aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/args.rs10
-rw-r--r--src/errors.rs5
-rw-r--r--src/file_op.rs140
-rw-r--r--src/renderer.rs83
4 files changed, 195 insertions, 43 deletions
diff --git a/src/args.rs b/src/args.rs
index f117b1c..17f6c84 100644
--- a/src/args.rs
+++ b/src/args.rs
@@ -165,13 +165,17 @@ pub struct CliArgs {
/// When specified via environment variable, a path always needs to be specified.
#[arg(short = 'u', long = "upload-files", value_hint = ValueHint::FilePath, num_args(0..=1), value_delimiter(','), env = "MINISERVE_ALLOWED_UPLOAD_DIR")]
pub allowed_upload_dir: Option<Vec<PathBuf>>,
-
+
/// Configure amount of concurrent uploads when visiting the website. Must have
/// upload-files option enabled for this setting to matter.
- ///
+ ///
/// For example, a value of 4 would mean that the web browser will only upload
/// 4 files at a time to the web server when using the web browser interface.
- #[arg(long = "web-upload-files-concurrency", env = "MINISERVE_WEB_UPLOAD_CONCURRENCY", default_value = "0")]
+ #[arg(
+ long = "web-upload-files-concurrency",
+ env = "MINISERVE_WEB_UPLOAD_CONCURRENCY",
+ default_value = "0"
+ )]
pub web_upload_concurrency: usize,
/// Enable creating directories
diff --git a/src/errors.rs b/src/errors.rs
index f0e22ab..24997fc 100644
--- a/src/errors.rs
+++ b/src/errors.rs
@@ -43,6 +43,10 @@ pub enum RuntimeError {
#[error("File already exists, and the overwrite_files option has not been set")]
DuplicateFileError,
+ /// Uploaded hash not correct
+ #[error("File hash that was provided did not match end result of uploaded file")]
+ UploadHashMismatchError,
+
/// Upload not allowed
#[error("Upload not allowed to this directory")]
UploadForbiddenError,
@@ -86,6 +90,7 @@ impl ResponseError for RuntimeError {
use StatusCode as S;
match self {
E::IoError(_, _) => S::INTERNAL_SERVER_ERROR,
+ E::UploadHashMismatchError => S::BAD_REQUEST,
E::MultipartError(_) => S::BAD_REQUEST,
E::DuplicateFileError => S::CONFLICT,
E::UploadForbiddenError => S::FORBIDDEN,
diff --git a/src/file_op.rs b/src/file_op.rs
index 76a7234..367517a 100644
--- a/src/file_op.rs
+++ b/src/file_op.rs
@@ -4,10 +4,10 @@ use std::io::ErrorKind;
use std::path::{Component, Path, PathBuf};
use actix_web::{http::header, web, HttpRequest, HttpResponse};
-use futures::{StreamExt, TryFutureExt};
-use futures::TryStreamExt;
+use futures::{StreamExt, TryStreamExt};
use serde::Deserialize;
-use tokio::fs::File;
+use sha2::{Digest, Sha256};
+use tempfile::NamedTempFile;
use tokio::io::AsyncWriteExt;
use crate::{
@@ -15,6 +15,18 @@ use crate::{
file_utils::sanitize_path,
};
+enum FileHash {
+ SHA256(String),
+}
+
+impl FileHash {
+ pub fn get_hasher(&self) -> impl Digest {
+ match self {
+ Self::SHA256(_) => Sha256::new(),
+ }
+ }
+}
+
/// Saves file data from a multipart form field (`field`) to `file_path`, optionally overwriting
/// existing file.
///
@@ -23,31 +35,84 @@ async fn save_file(
field: &mut actix_multipart::Field,
file_path: PathBuf,
overwrite_files: bool,
+ file_hash: Option<&FileHash>,
) -> Result<u64, RuntimeError> {
if !overwrite_files && file_path.exists() {
return Err(RuntimeError::DuplicateFileError);
}
- let file = match File::create(&file_path).await {
- Err(err) if err.kind() == ErrorKind::PermissionDenied => Err(
+ let named_temp_file = match tokio::task::spawn_blocking(|| NamedTempFile::new()).await {
+ Err(err) => Err(RuntimeError::MultipartError(format!(
+ "Failed to complete spawned task to create named temp file. {}",
+ err
+ ))),
+ Ok(Err(err)) if err.kind() == ErrorKind::PermissionDenied => Err(
RuntimeError::InsufficientPermissionsError(file_path.display().to_string()),
),
- Err(err) => Err(RuntimeError::IoError(
- format!("Failed to create {}", file_path.display()),
- err,
+ Ok(Err(err)) => Err(RuntimeError::IoError(
+ format!("Failed to create temporary file {}", file_path.display()),
+ err,
)),
- Ok(v) => Ok(v),
+ Ok(Ok(file)) => Ok(file),
}?;
- let (_, written_len) = field
- .map_err(|x| RuntimeError::MultipartError(x.to_string()))
- .try_fold((file, 0u64), |(mut file, written_len), bytes| async move {
- file.write_all(bytes.as_ref())
- .map_err(|e| RuntimeError::IoError("Failed to write to file".to_string(), e))
- .await?;
- Ok((file, written_len + bytes.len() as u64))
- })
- .await?;
+ let (file, temp_path) = named_temp_file.keep().map_err(|err| {
+ RuntimeError::IoError("Failed to keep temporary file".into(), err.error.into())
+ })?;
+ let mut temp_file = tokio::fs::File::from_std(file);
+
+ let mut written_len = 0;
+ let mut hasher = file_hash.as_ref().map(|h| h.get_hasher());
+ let mut error: Option<RuntimeError> = None;
+
+ while let Some(Ok(bytes)) = field.next().await {
+ if let Some(hasher) = hasher.as_mut() {
+ hasher.update(&bytes)
+ }
+ if let Err(e) = temp_file.write_all(&bytes).await {
+ error = Some(RuntimeError::IoError(
+ "Failed to write to file".to_string(),
+ e,
+ ));
+ break;
+ }
+ written_len += bytes.len() as u64;
+ }
+
+ drop(temp_file);
+
+ if let Some(e) = error {
+ let _ = tokio::fs::remove_file(temp_path).await;
+ return Err(e);
+ }
+
+ // There isn't a way to get notified when a request is cancelled
+ // by the user in actix it seems. References:
+ // - https://github.com/actix/actix-web/issues/1313
+ // - https://github.com/actix/actix-web/discussions/3011
+ // Therefore, we are relying on the fact that the web UI
+ // uploads a hash of the file.
+ if let Some(hasher) = hasher {
+ if let Some(FileHash::SHA256(expected_hash)) = file_hash {
+ let actual_hash = hex::encode(hasher.finalize());
+ if &actual_hash != expected_hash {
+ let _ = tokio::fs::remove_file(&temp_path).await;
+ return Err(RuntimeError::UploadHashMismatchError);
+ }
+ }
+ }
+
+ if let Err(e) = tokio::fs::rename(&temp_path, &file_path).await {
+ let _ = tokio::fs::remove_file(&temp_path).await;
+ return Err(RuntimeError::IoError(
+ format!(
+ "Failed to move temporary file {} to {}",
+ temp_path.display(),
+ file_path.display()
+ ),
+ e,
+ ));
+ }
Ok(written_len)
}
@@ -60,6 +125,7 @@ async fn handle_multipart(
allow_mkdir: bool,
allow_hidden_paths: bool,
allow_symlinks: bool,
+ file_hash: Option<&FileHash>,
) -> Result<u64, RuntimeError> {
let field_name = field.name().expect("No name field found").to_string();
@@ -168,15 +234,13 @@ async fn handle_multipart(
}
}
- match save_file(&mut field, path.join(filename_path), overwrite_files).await {
- Ok(bytes) => Ok(bytes),
- Err(err) => {
- // Required for file upload. If entire stream is not consumed, javascript
- // XML HTTP Request will never complete.
- while field.next().await.is_some() {}
- Err(err)
- },
- }
+ save_file(
+ &mut field,
+ path.join(filename_path),
+ overwrite_files,
+ file_hash,
+ )
+ .await
}
/// Query parameters used by upload and rm APIs
@@ -226,6 +290,27 @@ pub async fn upload_file(
)),
}?;
+ let mut file_hash: Option<FileHash> = None;
+ if let Some(hash) = req
+ .headers()
+ .get("X-File-Hash")
+ .and_then(|h| h.to_str().ok())
+ {
+ if let Some(hash_funciton) = req
+ .headers()
+ .get("X-File-Hash-Function")
+ .and_then(|h| h.to_str().ok())
+ {
+ match hash_funciton.to_ascii_uppercase().as_str() {
+ "SHA256" => {
+ file_hash = Some(FileHash::SHA256(hash.to_string()));
+ }
+ _ => {}
+ }
+ }
+ }
+
+ let hash_ref = file_hash.as_ref();
actix_multipart::Multipart::new(req.headers(), payload)
.map_err(|x| RuntimeError::MultipartError(x.to_string()))
.and_then(|field| {
@@ -236,6 +321,7 @@ pub async fn upload_file(
conf.mkdir_enabled,
conf.show_hidden,
!conf.no_symlinks,
+ hash_ref,
)
})
.try_collect::<Vec<u64>>()
diff --git a/src/renderer.rs b/src/renderer.rs
index 035309d..8a87228 100644
--- a/src/renderer.rs
+++ b/src/renderer.rs
@@ -193,6 +193,9 @@ pub fn page(
div.upload_container {
div.upload_header {
h4 style="margin:0px" id="upload_title" {}
+ svg id="upload-toggle" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="size-6" {
+ path stroke-linecap="round" stroke-linejoin="round" d="m4.5 15.75 7.5-7.5 7.5 7.5" {}
+ }
}
div.upload_action {
p id="upload_action_text" { "Starting upload..." }
@@ -663,6 +666,7 @@ fn page_header(title: &str, file_upload: bool, web_file_concurrency: usize, favi
const uploadCancelButton = document.querySelector('#upload_cancel');
const uploadList = document.querySelector('#upload_file_list');
const fileUploadItemTemplate = document.querySelector('#upload_file_item');
+ const uploadWidgetToggle = document.querySelector('#upload-toggle');
const dropContainer = document.querySelector('#drop-container');
const dragForm = document.querySelector('.drag-form');
@@ -696,6 +700,19 @@ fn page_header(title: &str, file_upload: bool, web_file_concurrency: usize, favi
dragForm.style.display = 'none';
};
+ uploadWidgetToggle.addEventListener('click', function (e) {
+ e.preventDefault();
+ if (uploadArea.style.height === "100vh") {
+ uploadArea.style = ""
+ document.body.style = ""
+ uploadWidgetToggle.style = ""
+ } else {
+ uploadArea.style.height = "100vh"
+ document.body.style = "overflow: hidden"
+ uploadWidgetToggle.style = "transform: rotate(180deg)"
+ }
+ })
+
uploadCancelButton.addEventListener('click', function (e) {
e.preventDefault();
CANCEL_UPLOAD = true;
@@ -706,7 +723,7 @@ fn page_header(title: &str, file_upload: bool, web_file_concurrency: usize, favi
uploadFiles()
})
- function updateUploadText() {
+ function updateUploadTextAndList() {
const queryLength = (state) => document.querySelectorAll(`[data-state='${state}']`).length;
const total = document.querySelectorAll("[data-state]").length;
const uploads = queryLength(UPLOADING);
@@ -733,16 +750,19 @@ fn page_header(title: &str, file_upload: bool, web_file_concurrency: usize, favi
uploadTitle.textContent = headerText
uploadActionText.textContent = statuses.join(', ')
- // Update list of uploads
- Array.from(uploadList.querySelectorAll('li'))
- .sort(({ dataset: { state: a }}, {dataset: { state: b }}) => UPLOAD_ITEM_ORDER[a] >= UPLOAD_ITEM_ORDER[b])
- .forEach((item) => item.parentNode.appendChild(item))
+ const items = Array.from(uploadList.querySelectorAll('li'));
+ items.sort((a, b) => UPLOAD_ITEM_ORDER[a.dataset.state] - UPLOAD_ITEM_ORDER[b.dataset.state]);
+ items.forEach((item, index) => {
+ if (uploadList.children[index] !== item) {
+ uploadList.insertBefore(item, uploadList.children[index]);
+ }
+ });
}
async function doWork(iterator, i) {
for (let [index, item] of iterator) {
await item();
- updateUploadText();
+ updateUploadTextAndList();
}
}
@@ -754,17 +774,17 @@ fn page_header(title: &str, file_upload: bool, web_file_concurrency: usize, favi
const workers = Array(concurrency).fill(iterator).map(doWork)
Promise.allSettled(workers)
.finally(() => {
- updateUploadText();
+ updateUploadTextAndList();
form.reset();
setTimeout(() => { uploadArea.classList.remove('active'); }, 1000)
setTimeout(() => { window.location.reload(); }, 1500)
})
- updateUploadText();
+ updateUploadTextAndList();
uploadArea.classList.add('active')
uploadList.scrollTo(0, 0)
}
-
+
function formatBytes(bytes, decimals) {
if (bytes == 0) return '0 Bytes';
var k = 1024,
@@ -774,6 +794,19 @@ fn page_header(title: &str, file_upload: bool, web_file_concurrency: usize, favi
return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + ' ' + sizes[i];
}
+ document.querySelector('input[type="file"]').addEventListener('change', async (e) => {
+ const file = e.target.files[0];
+ const hash = await hashFile(file);
+ console.log('File hash:', hash);
+ });
+
+ async function get256FileHash(file) {
+ const arrayBuffer = await file.arrayBuffer();
+ const hashBuffer = await crypto.subtle.digest('SHA-256', arrayBuffer);
+ const hashArray = Array.from(new Uint8Array(hashBuffer));
+ return hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
+ }
+
function uploadFile(file) {
const fileUploadItem = fileUploadItemTemplate.content.cloneNode(true)
const itemContainer = fileUploadItem.querySelector(".upload_file_item")
@@ -783,16 +816,38 @@ fn page_header(title: &str, file_upload: bool, web_file_concurrency: usize, favi
const percentText = fileUploadItem.querySelector(".file_upload_percent")
const bar = fileUploadItem.querySelector(".file_progress_bar")
const cancel = fileUploadItem.querySelector(".file_cancel_upload")
+ let preCancel = false;
- itemContainer.dataset.state = 'pending'
+ itemContainer.dataset.state = PENDING
name.textContent = file.name
size.textContent = formatBytes(file.size)
percentText.textContent = "0%"
-
+
uploadList.append(fileUploadItem)
+ function preCancelUpload() {
+ console.log('cancelled')
+ preCancel = true;
+ itemText.classList.add(CANCELLED);
+ bar.classList.add(CANCELLED);
+ itemContainer.dataset.state = CANCELLED;
+ itemContainer.style.background = 'var(--upload_modal_file_upload_complete_background)';
+ cancel.disabled = true;
+ cancel.removeEventListener("click", preCancelUpload);
+ uploadCancelButton.removeEventListener("click", preCancelUpload);
+ updateUploadTextAndList();
+ }
+
+ uploadCancelButton.addEventListener("click", preCancelUpload)
+ cancel.addEventListener("click", preCancelUpload)
+
return async () => {
- return new Promise((resolve, reject) => {
+ if (preCancel) {
+ return Promise.resolve()
+ }
+
+ return new Promise(async (resolve, reject) => {
+ const fileHash = await get256FileHash(file);
const xhr = new XMLHttpRequest();
const formData = new FormData();
formData.append('file', file);
@@ -865,12 +920,14 @@ fn page_header(title: &str, file_upload: bool, web_file_concurrency: usize, favi
if (CANCEL_UPLOAD) {
cancelUpload()
} else {
- itemContainer.dataset.state = 'uploading'
+ itemContainer.dataset.state = UPLOADING
xhr.addEventListener('readystatechange', onReadyStateChange);
xhr.addEventListener("error", onError);
xhr.addEventListener("abort", onAbort);
xhr.upload.addEventListener('progress', onProgress);
xhr.open('post', form.getAttribute("action"), true);
+ xhr.setRequestHeader('X-File-Hash', fileHash);
+ xhr.setRequestHeader('X-File-Hash-Function', 'SHA256');
xhr.send(formData);
}
})