Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: support max_upload_dir_size config #335

Merged
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
uts2ts = "0.4.1"
path-clean = "1.0.1"
fs_extra = "1.3.0"
orhun marked this conversation as resolved.
Show resolved Hide resolved

[dependencies.config]
version = "0.14.0"
Expand Down
9 changes: 9 additions & 0 deletions fixtures/test-server-upload-dir-limit/config.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
[server]
address = "127.0.0.1:8000"
max_content_length = "10KB"
max_uploads = "20KB"
upload_path = "./upload"

[paste]
default_extension = "txt"
duplicate_files = true
18 changes: 18 additions & 0 deletions fixtures/test-server-upload-dir-limit/test.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
#!/usr/bin/env bash

setup() {
truncate -s 9KB bigfile1 bigfile2 bigfile3
}

run_test() {
result=$(curl -s -F "file=@bigfile1" localhost:8000)
result=$(curl -s -F "file=@bigfile2" localhost:8000)
orhun marked this conversation as resolved.
Show resolved Hide resolved

result=$(curl -s -F "file=@bigfile3" localhost:8000)
test "upload directory size limit exceeded" = "$result"
}

teardown() {
rm bigfile*
rm -r upload
}
2 changes: 2 additions & 0 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ pub struct ServerConfig {
pub max_content_length: Byte,
/// Storage path.
pub upload_path: PathBuf,
/// Maximum upload directory size.
pub max_uploads: Option<Byte>,
tessus marked this conversation as resolved.
Show resolved Hide resolved
/// Request timeout.
#[serde(default, with = "humantime_serde")]
pub timeout: Option<Duration>,
Expand Down
22 changes: 21 additions & 1 deletion src/paste.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,15 @@ use crate::header::ContentDisposition;
use crate::util;
use actix_web::{error, Error};
use awc::Client;
use std::convert::{TryFrom, TryInto};
use std::fs::{self, File};
use std::io::{Error as IoError, ErrorKind as IoErrorKind, Result as IoResult, Write};
use std::path::{Path, PathBuf};
use std::str;
use std::sync::RwLock;
use std::{
convert::{TryFrom, TryInto},
ops::Add,
};
use url::Url;

/// Type of the data to store.
Expand Down Expand Up @@ -109,6 +112,23 @@ impl Paste {
}
}
}

if let Some(max_dir_size) = config.server.max_uploads {
// The unwrap here should be fine as the max value of u64 will be within the limits
let file_size = u64::try_from(self.data.len()).unwrap_or_default();
let upload_dir = self.type_.get_path(&config.server.upload_path)?;
let current_size_of_upload_dir = fs_extra::dir::get_size(upload_dir)
.map_err(|_| error::ErrorInternalServerError("Internal server error occured"))?;

let expected_size_of_upload_dir = current_size_of_upload_dir.add(file_size);

if expected_size_of_upload_dir > max_dir_size {
return Err(error::ErrorInsufficientStorage(
"upload directory size limit exceeded",
));
}
}

let mut file_name = match PathBuf::from(file_name)
.file_name()
.and_then(|v| v.to_str())
Expand Down
Loading