This commit is contained in:
minish 2025-05-24 00:42:33 -04:00
parent 9752430f81
commit a3b9d08e63
Signed by: min
GPG Key ID: FEECFF24EF0CE9E9
9 changed files with 540 additions and 93 deletions

198
Cargo.lock generated
View File

@ -4,13 +4,19 @@ version = 4
[[package]]
name = "addr2line"
version = "0.24.2"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
dependencies = [
"gimli",
]
[[package]]
name = "adler"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "adler2"
version = "2.0.0"
@ -32,12 +38,6 @@ dependencies = [
"libc",
]
[[package]]
name = "anyhow"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
[[package]]
name = "argh"
version = "0.1.13"
@ -182,17 +182,17 @@ dependencies = [
[[package]]
name = "backtrace"
version = "0.3.74"
version = "0.3.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a"
checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d"
dependencies = [
"addr2line",
"cc",
"cfg-if",
"libc",
"miniz_oxide",
"miniz_oxide 0.7.4",
"object",
"rustc-demangle",
"windows-targets",
]
[[package]]
@ -224,29 +224,32 @@ dependencies = [
[[package]]
name = "breeze"
version = "0.2.9"
version = "0.3.0"
dependencies = [
"anyhow",
"argh",
"atomic-time",
"axum",
"axum-extra",
"base64 0.21.7",
"bytes",
"color-eyre",
"dashmap",
"headers",
"hmac",
"http",
"img-parts",
"rand",
"rand 0.9.0",
"serde",
"serde_with",
"sha2",
"tikv-jemallocator",
"tokio",
"tokio-stream",
"tokio-util",
"toml",
"tower",
"tracing",
"tracing-subscriber",
"twox-hash",
"walkdir",
]
@ -296,6 +299,33 @@ dependencies = [
"windows-targets",
]
[[package]]
name = "color-eyre"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5"
dependencies = [
"backtrace",
"color-spantrace",
"eyre",
"indenter",
"once_cell",
"owo-colors",
"tracing-error",
]
[[package]]
name = "color-spantrace"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2"
dependencies = [
"once_cell",
"owo-colors",
"tracing-core",
"tracing-error",
]
[[package]]
name = "core-foundation-sys"
version = "0.8.7"
@ -403,6 +433,7 @@ checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"crypto-common",
"subtle",
]
[[package]]
@ -411,6 +442,16 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "eyre"
version = "0.6.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec"
dependencies = [
"indenter",
"once_cell",
]
[[package]]
name = "fnv"
version = "1.0.7"
@ -475,6 +516,17 @@ dependencies = [
"version_check",
]
[[package]]
name = "getrandom"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
dependencies = [
"cfg-if",
"libc",
"wasi 0.11.0+wasi-snapshot-preview1",
]
[[package]]
name = "getrandom"
version = "0.3.1"
@ -489,9 +541,9 @@ dependencies = [
[[package]]
name = "gimli"
version = "0.31.1"
version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253"
[[package]]
name = "h2"
@ -560,6 +612,15 @@ version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "hmac"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
dependencies = [
"digest",
]
[[package]]
name = "http"
version = "1.2.0"
@ -679,9 +740,15 @@ checksum = "dfded0de32cc78ecad0061b3c6a263cec6bce298fc1e670a4926b6723664ed87"
dependencies = [
"bytes",
"crc32fast",
"miniz_oxide",
"miniz_oxide 0.8.2",
]
[[package]]
name = "indenter"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
[[package]]
name = "indexmap"
version = "1.9.3"
@ -766,6 +833,15 @@ version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "miniz_oxide"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08"
dependencies = [
"adler",
]
[[package]]
name = "miniz_oxide"
version = "0.8.2"
@ -813,9 +889,9 @@ dependencies = [
[[package]]
name = "object"
version = "0.36.7"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441"
dependencies = [
"memchr",
]
@ -832,6 +908,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "owo-colors"
version = "3.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f"
[[package]]
name = "parking_lot_core"
version = "0.9.10"
@ -902,17 +984,38 @@ dependencies = [
"proc-macro2",
]
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha 0.3.1",
"rand_core 0.6.4",
]
[[package]]
name = "rand"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
dependencies = [
"rand_chacha",
"rand_core",
"rand_chacha 0.9.0",
"rand_core 0.9.0",
"zerocopy 0.8.17",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core 0.6.4",
]
[[package]]
name = "rand_chacha"
version = "0.9.0"
@ -920,7 +1023,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
dependencies = [
"ppv-lite86",
"rand_core",
"rand_core 0.9.0",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom 0.2.16",
]
[[package]]
@ -929,7 +1041,7 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b08f3c9802962f7e1b25113931d94f43ed9725bebc59db9d0c3e9a23b67e15ff"
dependencies = [
"getrandom",
"getrandom 0.3.1",
"zerocopy 0.8.17",
]
@ -1085,6 +1197,17 @@ dependencies = [
"digest",
]
[[package]]
name = "sha2"
version = "0.10.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]]
name = "sharded-slab"
version = "0.1.7"
@ -1140,6 +1263,12 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "subtle"
version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "syn"
version = "2.0.94"
@ -1365,6 +1494,16 @@ dependencies = [
"valuable",
]
[[package]]
name = "tracing-error"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b1581020d7a273442f5b45074a6a57d5757ad0a47dac0e9f0bd57b81936f3db"
dependencies = [
"tracing",
"tracing-subscriber",
]
[[package]]
name = "tracing-log"
version = "0.2.0"
@ -1390,6 +1529,15 @@ dependencies = [
"tracing-log",
]
[[package]]
name = "twox-hash"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7b17f197b3050ba473acf9181f7b1d3b66d1cf7356c6cc57886662276e65908"
dependencies = [
"rand 0.8.5",
]
[[package]]
name = "typenum"
version = "1.17.0"

View File

@ -1,20 +1,31 @@
[package]
name = "breeze"
version = "0.2.9"
edition = "2021"
version = "0.3.0"
edition = "2024"
[profile.dev.package]
tikv-jemalloc-sys = { opt-level = 3 }
[dependencies]
argh = "0.1.12"
atomic-time = "0.1.4"
axum = { version = "0.8.1", features = ["macros", "http2"] }
axum-extra = { version = "0.10.0", default-features = false, features = [
"tracing",
"typed-header",
] }
axum = { version = "0.8.1", features = ["macros", "http2"] }
tower = "0.5"
http = "1.2"
base64 = "0.21"
bytes = "1"
color-eyre = "0.6"
dashmap = { version = "6.1.0", features = ["inline"] }
headers = "0.4"
hmac = "0.12.1"
http = "1.2"
img-parts = "0.3"
rand = "0.9"
serde = { version = "1.0", features = ["derive"] }
serde_with = "3.12"
sha2 = "0.10.9"
tokio = { version = "1", features = [
"rt-multi-thread",
"macros",
@ -22,21 +33,13 @@ tokio = { version = "1", features = [
"fs",
"signal",
] }
tokio-util = { version = "0.7", features = ["io"] }
tokio-stream = "0.1"
tokio-util = { version = "0.7", features = ["io"] }
toml = "0.8.2"
tracing = "0.1"
tracing-subscriber = "0.3"
bytes = "1"
rand = "0.9"
twox-hash = "2"
walkdir = "2"
anyhow = "1.0"
serde = { version = "1.0", features = ["derive"] }
serde_with = "3.12"
toml = "0.8.2"
argh = "0.1.12"
dashmap = { version = "6.1.0", features = ["inline"] }
atomic-time = "0.1.4"
img-parts = "0.3"
[target.'cfg(not(target_env = "msvc"))'.dependencies]
tikv-jemallocator = "0.6"

View File

@ -1,27 +1,34 @@
# breeze
breeze is a simple, performant file upload server.
The primary instance is https://picture.wtf.
## Features
- Basic upload API tailored towards ShareX
- Streamed uploading
- Streamed downloading (on larger files)
- Pause/continue download support with `Range` header
- Upload caching in memory
- Support for ShareX file deletion URLs
- Temporary uploads
- Automatic exif data removal
## Installation
On picture.wtf, breeze's primary instance, it is ran using a NixOS module. If you would like to do that too, it is provided by the Nix flake in this repository.
It is very much possible to run and deploy breeze without doing that, though. Containerised and bare-metal deployments are also supported. Instructions for those are below.
On picture.wtf, breeze is ran with the NixOS module provided by `flake.nix`. [Take a look at the config](https://git.min.rip/min/infra/src/branch/main/nixos/hosts/silver/services/breeze.nix) if you want!
Containerised and bare-metal deployments are also supported. Instructions for those are below.
To begin, clone the Git repository:
```bash
git clone https://git.min.rip/min/breeze.git
```
If you would like to run it as a Docker container, here is an example `docker-compose.yaml` that may be useful for reference.
If you want to run it as a Docker container, here is an example `docker-compose.yaml` that may be useful for reference.
```
version: '3.6'
@ -39,25 +46,51 @@ services:
ports:
- 8383:8000
```
With this configuration, it is expected that:
* there is a clone of the Git repository in the `./breeze` folder
* there is a `breeze.toml` config file in current directory
* there is a directory at `/srv/uploads` for storing uploads
* port 8383 will be made accessible to the Internet somehow (either forwarding the port through your firewall directly, or passing it through a reverse proxy)
* you want the uploads to be owned by the user on your system with id 1000. (this is usually your user)
- there is a clone of the Git repository in the `./breeze` folder
- there is a `breeze.toml` config file in current directory
- there is a directory at `/srv/uploads` for storing uploads
- port 8383 will be made accessible to the Internet somehow (either forwarding the port through your firewall directly, or passing it through a reverse proxy)
- you want the uploads to be owned by the user on your system with id 1000. (this is usually your user)
It can also be installed directly if the Rust toolchain is installed:
```bash
cd breeze
cargo install --path .
# then, you can run w/ a path to your `breeze.toml` config file
breeze --config /path/to/breeze.toml
```
### Exposing publicly
If you want to expose a breeze server to the internet, I highly recommend using a reverse proxy instead of just forwarding its HTTP port.
Caddy is probably the easiest to set up if you are new to reverse proxies. Here is an example `Caddyfile` for the Docker Compose file above (assuming `yourdomain.com` is a domain that points to your server's IP).
```
yourdomain.com {
# enable compression
encode
# forward request to breeze
reverse_proxy 127.0.0.1:8383
}
```
## Usage
### Hosting
Configuration is read through a toml file.
The config file path is specified using the `-c`/`--config` command line switch.
Here is an example config file:
```toml
[engine]
# The base URL that the HTTP server will be accessible on.
@ -127,13 +160,15 @@ level = "warn"
```
### Uploading
The HTTP API is pretty simple, and it's easy to make a ShareX configuration for it.
Uploads should be sent to `/new?name={original filename}` as a POST request. If the server uses upload keys, it should be sent to `/new?name={original filename}&key={upload key}`. The uploaded file's content should be sent as raw binary in the request body.
Additionally, you may specify `&lastfor={time in seconds}` to make your upload temporary, or `&keepexif=true` to tell the server not to clear EXIF data on image uploads. (if you don't know what EXIF data is, just leave it as default. you'll know if you need it)
Also you can specify `&lastfor={time in seconds}` to make your upload temporary, or `&keepexif=true` to tell the server not to clear EXIF data on image uploads. (if you don't know what EXIF data is, you can leave it as default. you'll know if you need it)
Here's an example ShareX configuration for it (with a key):
```json
{
"Version": "14.1.0",

View File

@ -77,6 +77,8 @@
with lib; let
cfg = config.services.breeze;
settingsFormat = pkgs.formats.toml {};
defaultUser = "breeze";
defaultGroup = "breeze";
in {
options = {
services.breeze = {
@ -90,13 +92,13 @@
user = mkOption {
type = types.str;
default = "breeze";
default = defaultUser;
description = "User that `breeze` will run under";
};
group = mkOption {
type = types.str;
default = "breeze";
default = defaultGroup;
description = "Group that `breeze` will run under";
};
@ -111,7 +113,7 @@
default = {};
description = ''
The *.toml configuration to run `breeze` with.
There is no formal documentation, but there is an example in the [readme](https://git.min.rip/min/breeze/src/branch/main/README.md).
The options aren't formally documented, but the [readme](https://git.min.rip/min/breeze/src/branch/main/README.md) provides examples.
'';
};
@ -132,16 +134,29 @@
This is useful for loading it from a secret management system.
'';
};
deletionSecretFile = mkOption {
type = types.nullOr types.path;
default = null;
description = ''
File to load the `engine.deletion_secret` from, if desired.
This is useful for loading it from a secret management system.
'';
};
};
};
config = mkIf cfg.enable {
users.users.${cfg.user} = {
users.users = mkIf (cfg.user == defaultUser) {
${cfg.user} = {
isSystemUser = true;
inherit (cfg) group;
};
};
users.groups.${cfg.group} = {};
users.groups = mkIf (cfg.group == defaultGroup) {
${cfg.group} = {};
};
systemd.tmpfiles.rules = [
"d '${cfg.configDir}' 0750 ${cfg.user} ${cfg.group} - -"
@ -149,6 +164,7 @@
services.breeze.settings = mkMerge [
(mkIf (cfg.uploadKeyFile != null) {engine.upload_key = "@UPLOAD_KEY@";})
(mkIf (cfg.deletionSecretFile != null) {engine.deletion_secret = "@DELETION_SECRET@";})
];
systemd.services.breeze = let
@ -164,6 +180,9 @@
''
+ lib.optionalString (cfg.uploadKeyFile != null) ''
${pkgs.replace-secret}/bin/replace-secret '@UPLOAD_KEY@' "${cfg.uploadKeyFile}" ${cfgFile}
''
+ lib.optionalString (cfg.deletionSecretFile != null) ''
${pkgs.replace-secret}/bin/replace-secret '@DELETION_SECRET@' "${cfg.deletionSecretFile}" ${cfgFile}
'';
serviceConfig = rec {

View File

@ -27,6 +27,12 @@ pub struct EngineConfig {
#[serde(default)]
pub upload_key: String,
/// Secret key to use when generating or verifying deletion tokens.
/// Leave blank to disable.
///
/// If this secret is leaked, anyone can delete any file. Be careful!!!
pub deletion_secret: Option<String>,
/// Configuration for disk system
pub disk: DiskConfig,

86
src/delete.rs Normal file
View File

@ -0,0 +1,86 @@
use std::sync::Arc;
use axum::extract::{Query, State};
use base64::{Engine, prelude::BASE64_URL_SAFE_NO_PAD};
use bytes::{Buf, BytesMut};
use hmac::Mac;
use http::StatusCode;
use serde::Deserialize;
use crate::engine::update_hmac;
#[derive(Deserialize)]
pub struct DeleteRequest {
name: String,
hash: String,
hmac: String,
}
pub async fn delete(
State(engine): State<Arc<crate::engine::Engine>>,
Query(req): Query<DeleteRequest>,
) -> (StatusCode, &'static str) {
let Some(mut hmac) = engine.deletion_hmac.clone() else {
return (StatusCode::CONFLICT, "Deletion is not enabled");
};
// -- decode provided data
// decode user-given hmac
let Ok(provided_hmac) = BASE64_URL_SAFE_NO_PAD.decode(req.hmac) else {
return (StatusCode::BAD_REQUEST, "Could not decode hmac");
};
// decode hash from base64
let Ok(mut provided_hash_data) = BASE64_URL_SAFE_NO_PAD
.decode(req.hash)
.map(|v| BytesMut::from(&v[..]))
else {
return (StatusCode::BAD_REQUEST, "Could not decode partial hash");
};
// read hash
if provided_hash_data.len() != 16 {
return (StatusCode::BAD_REQUEST, "Partial hash length is invalid");
}
let provided_hash = provided_hash_data.get_u128();
// -- verify it
// check if info is valid
let is_hmac_valid = {
// update hmad
update_hmac(&mut hmac, &req.name, provided_hash);
// verify..
hmac.verify_slice(&provided_hmac).is_ok()
};
if !is_hmac_valid {
return (StatusCode::BAD_REQUEST, "Hmac is invalid");
}
// -- ensure hash matches
// okay, now check if we compute the same hash as the req
// this makes sure it's (probably) the same file
let actual_hash = match engine.get_hash(&req.name).await {
Ok(Some(h)) => h,
Ok(None) => return (StatusCode::NOT_FOUND, "File not found"),
Err(err) => {
tracing::error!(%err, "failed to get hash");
return (StatusCode::INTERNAL_SERVER_ERROR, "Internal server error!!");
}
};
// compare
if provided_hash != actual_hash {
return (StatusCode::BAD_REQUEST, "Partial hash did not match");
}
// -- delete file
// everything seems okay so try to delete
if let Err(err) = engine.remove(&req.name).await {
tracing::error!(%err, "failed to delete upload");
return (StatusCode::INTERNAL_SERVER_ERROR, "Delete failed");
}
(StatusCode::OK, "Deleted successfully!")
}

View File

@ -1,15 +1,18 @@
use std::{
io::SeekFrom,
ops::Bound,
sync::{
atomic::{AtomicUsize, Ordering},
Arc,
atomic::{AtomicUsize, Ordering},
},
time::Duration,
};
use anyhow::Context as _;
use axum::body::BodyDataStream;
use base64::{Engine as _, prelude::BASE64_URL_SAFE_NO_PAD};
use bytes::{BufMut, Bytes, BytesMut};
use color_eyre::eyre::{self, WrapErr};
use hmac::Mac;
use img_parts::{DynImage, ImageEXIF};
use rand::distr::{Alphanumeric, SampleString};
use tokio::{
@ -18,6 +21,7 @@ use tokio::{
};
use tokio_stream::StreamExt;
use tracing::{debug, error, info};
use twox_hash::XxHash3_128;
use crate::{cache, config, disk};
@ -29,6 +33,7 @@ pub enum UploadData {
Disk(tokio::io::Take<File>),
}
/// Upload data and metadata needed to build a view response
pub struct UploadResponse {
pub full_len: u64,
pub range: (u64, u64),
@ -39,8 +44,11 @@ pub struct UploadResponse {
/// Some are rejections.
pub enum ProcessOutcome {
/// The upload was successful.
/// We give the user their file's URL
Success(String),
/// We give the user their file's URL (and deletion URL if one was created)
Success {
url: String,
deletion_url: Option<String>,
},
/// Occurs when an upload exceeds the chosen maximum file size.
UploadTooLarge,
@ -64,6 +72,9 @@ pub enum GetOutcome {
RangeNotSatisfiable,
}
/// Type alias to make using HMAC SHA256 easier
type HmacSha256 = hmac::Hmac<sha2::Sha256>;
/// breeze engine
pub struct Engine {
/// Cached count of uploaded files
@ -72,6 +83,9 @@ pub struct Engine {
/// Engine configuration
pub cfg: config::EngineConfig,
/// HMAC state initialised with the deletion secret (if present)
pub deletion_hmac: Option<HmacSha256>,
/// The in-memory cache that cached uploads are stored in
cache: Arc<cache::Cache>,
@ -79,6 +93,7 @@ pub struct Engine {
disk: disk::Disk,
}
/// Try to parse a `Range` header into an easier format to work with
fn resolve_range(range: Option<headers::Range>, full_len: u64) -> Option<(u64, u64)> {
let last_byte = full_len - 1;
@ -109,9 +124,40 @@ fn resolve_range(range: Option<headers::Range>, full_len: u64) -> Option<(u64, u
Some((start, end))
}
/// Calculate HMAC of field values.
pub fn update_hmac(hmac: &mut HmacSha256, saved_name: &str, hash: u128) {
// mix deletion req fields into one buf
let mut field_bytes = BytesMut::new();
field_bytes.put(saved_name.as_bytes());
field_bytes.put_u128(hash);
// take the hmac
hmac.update(&field_bytes);
}
/// How many bytes of a file should be used for hash calculation.
const SAMPLE_WANTED_BYTES: usize = 32768;
/// Format some info about an upload and hash it
///
/// This should not change between versions!!
/// That would break deletion urls
fn calculate_hash(len: u64, data_sample: Bytes) -> u128 {
let mut buf = BytesMut::new();
buf.put_u64(len);
buf.put(data_sample);
XxHash3_128::oneshot(&buf)
}
impl Engine {
/// Creates a new instance of the engine
pub fn with_config(cfg: config::EngineConfig) -> Self {
let deletion_hmac = cfg
.deletion_secret
.as_ref()
.map(|s| HmacSha256::new_from_slice(s.as_bytes()).unwrap());
let cache = cache::Cache::with_config(cfg.cache.clone());
let disk = disk::Disk::with_config(cfg.disk.clone());
@ -123,6 +169,7 @@ impl Engine {
Self {
// initialise our cached upload count. this doesn't include temp uploads!
upl_count: AtomicUsize::new(disk.count()),
deletion_hmac,
cfg,
@ -142,7 +189,7 @@ impl Engine {
&self,
saved_name: &str,
range: Option<headers::Range>,
) -> anyhow::Result<GetOutcome> {
) -> eyre::Result<GetOutcome> {
let data = if let Some(u) = self.cache.get(saved_name) {
u
} else {
@ -185,7 +232,7 @@ impl Engine {
let range_len = (end - start) + 1;
f.seek(std::io::SeekFrom::Start(start)).await?;
f.seek(SeekFrom::Start(start)).await?;
let f = f.take(range_len);
let res = UploadResponse {
@ -232,6 +279,45 @@ impl Engine {
false
}
/// Try to read a file and calculate a hash for it.
pub async fn get_hash(&self, saved_name: &str) -> eyre::Result<Option<u128>> {
// readout sample data and full len
let (data_sample, len) = if let Some(full_data) = self.cache.get(saved_name) {
// we found it in cache! take as many bytes as we can
let taking = full_data.len().min(SAMPLE_WANTED_BYTES);
let data = full_data.slice(0..taking);
let len = full_data.len() as u64;
tracing::info!("data len is {}", data.len());
(data, len)
} else {
// not in cache, so try disk
let Some(mut f) = self.disk.open(saved_name).await? else {
// not found there either so we just dont have it
return Ok(None);
};
// find len..
let len = f.seek(SeekFrom::End(0)).await?;
f.rewind().await?;
// only take wanted # of bytes for read
let mut f = f.take(SAMPLE_WANTED_BYTES as u64);
// try to read
let mut data = Vec::with_capacity(SAMPLE_WANTED_BYTES);
f.read_to_end(&mut data).await?;
let data = Bytes::from(data);
(data, len)
};
// calculate hash
Ok(Some(calculate_hash(len, data_sample)))
}
/// Generate a new saved name for an upload.
///
/// If it picks a name that already exists, it will try again.
@ -258,14 +344,14 @@ impl Engine {
/// Wipe out an upload from all storage.
///
/// This is for deleting failed uploads only!!
pub async fn remove(&self, saved_name: &str) -> anyhow::Result<()> {
info!("!! removing upload: {saved_name}");
pub async fn remove(&self, saved_name: &str) -> eyre::Result<()> {
info!(saved_name, "!! removing upload");
self.cache.remove(saved_name);
self.disk
.remove(saved_name)
.await
.context("failed to remove file from disk")?;
.wrap_err("failed to remove file from disk")?;
info!("!! successfully removed upload");
@ -283,7 +369,7 @@ impl Engine {
mut stream: BodyDataStream,
lifetime: Option<Duration>,
keep_exif: bool,
) -> anyhow::Result<()> {
) -> eyre::Result<(Bytes, u64)> {
// if we're using cache, make some space to store the upload in
let mut data = if use_cache {
BytesMut::with_capacity(provided_len.try_into()?)
@ -311,6 +397,11 @@ impl Engine {
&& !keep_exif
&& provided_len <= self.cfg.max_strip_len;
// buffer of sampled data for the deletion hash
let mut hash_sample = BytesMut::with_capacity(SAMPLE_WANTED_BYTES);
// actual number of bytes processed
let mut observed_len = 0;
// read and save upload
while let Some(chunk) = stream.next().await {
// if we error on a chunk, fail out
@ -322,15 +413,27 @@ impl Engine {
if let Some(ref tx) = tx {
debug!("sending chunk to i/o task");
tx.send(chunk.clone())
.context("failed to send chunk to i/o task!")?;
.wrap_err("failed to send chunk to i/o task!")?;
}
}
// add to sample if we need to
let wanted = SAMPLE_WANTED_BYTES - hash_sample.len();
if wanted != 0 {
// take as many bytes as we can ...
let taking = chunk.len().min(wanted);
hash_sample.extend_from_slice(&chunk[0..taking]);
}
// record new len
observed_len += chunk.len() as u64;
if use_cache {
debug!("receiving data into buffer");
if data.len() + chunk.len() > data.capacity() {
info!("the amount of data sent exceeds the content-length provided by the client! caching will be cancelled for this upload.");
info!(
"the amount of data sent exceeds the content-length provided by the client! caching will be cancelled for this upload."
);
// if we receive too much data, drop the buffer and stop using cache (it is still okay to use disk, probably)
data = BytesMut::new();
@ -365,7 +468,7 @@ impl Engine {
if let Some(ref tx) = tx {
debug!("sending filled buffer to i/o task");
tx.send(data.clone())
.context("failed to send coalesced buffer to i/o task!")?;
.wrap_err("failed to send coalesced buffer to i/o task!")?;
}
data
@ -384,7 +487,7 @@ impl Engine {
};
}
Ok(())
Ok((hash_sample.freeze(), observed_len))
}
pub async fn process(
@ -394,7 +497,7 @@ impl Engine {
stream: BodyDataStream,
lifetime: Option<Duration>,
keep_exif: bool,
) -> anyhow::Result<ProcessOutcome> {
) -> eyre::Result<ProcessOutcome> {
// if the upload size is greater than our max file size, deny it now
if self.cfg.max_upload_len.is_some_and(|l| provided_len > l) {
return Ok(ProcessOutcome::UploadTooLarge);
@ -428,22 +531,47 @@ impl Engine {
)
.await;
// handle result
let (hash_sample, len) = match save_result {
// Okay so just extract metadata
Ok(m) => m,
// If anything fails, delete the upload and return the error
if save_result.is_err() {
Err(err) => {
error!("failed processing upload!");
self.remove(&saved_name).await?;
save_result?;
return Err(err);
}
};
// if deletion urls are enabled, create one
let deletion_url = self.deletion_hmac.clone().map(|mut hmac| {
// calculate hash of file metadata
let hash = calculate_hash(len, hash_sample);
let mut hash_bytes = BytesMut::new();
hash_bytes.put_u128(hash);
let hash_b64 = BASE64_URL_SAFE_NO_PAD.encode(&hash_bytes);
// take hmac
update_hmac(&mut hmac, &saved_name, hash);
let out = hmac.finalize().into_bytes();
let out_b64 = BASE64_URL_SAFE_NO_PAD.encode(out);
// format deletion url
format!(
"{}/del?name={saved_name}&hash={hash_b64}&hmac={out_b64}",
self.cfg.base_url
)
});
// format and send back the url
let url = format!("{}/p/{}", self.cfg.base_url, saved_name);
let url = format!("{}/p/{saved_name}", self.cfg.base_url);
// if all goes well, increment the cached upload counter
self.upl_count.fetch_add(1, Ordering::Relaxed);
info!("finished processing upload!");
Ok(ProcessOutcome::Success(url))
Ok(ProcessOutcome::Success { url, deletion_url })
}
}

View File

@ -1,6 +1,7 @@
use std::{path::PathBuf, sync::Arc};
use argh::FromArgs;
use color_eyre::eyre::{self, bail, Context};
use engine::Engine;
use axum::{
@ -12,6 +13,7 @@ use tracing::{info, warn};
mod cache;
mod config;
mod delete;
mod disk;
mod engine;
mod index;
@ -34,19 +36,22 @@ struct Args {
}
#[tokio::main]
async fn main() {
async fn main() -> eyre::Result<()> {
// Install color-eyre
color_eyre::install()?;
// Read & parse args
let args: Args = argh::from_env();
// Read & parse config
let cfg: config::Config = {
let config_str = fs::read_to_string(args.config).await.expect(
let config_str = fs::read_to_string(args.config).await.wrap_err(
"failed to read config file! make sure it exists and you have read permissions",
);
)?;
toml::from_str(&config_str).unwrap_or_else(|e| {
panic!("invalid config! ensure proper fields and structure. reference config is in readme.\n{e}");
})
toml::from_str(&config_str).wrap_err(
"invalid config! ensure proper fields and structure. reference config is in readme",
)?
};
// Set up tracing
@ -58,7 +63,7 @@ async fn main() {
{
let save_path = cfg.engine.disk.save_path.clone();
if !save_path.exists() || !save_path.is_dir() {
panic!("the save path does not exist or is not a directory! this is invalid");
bail!("the save path does not exist or is not a directory! this is invalid");
}
}
if cfg.engine.upload_key.is_empty() {
@ -72,6 +77,7 @@ async fn main() {
let app = Router::new()
.route("/new", post(new::new))
.route("/p/{saved_name}", get(view::view))
.route("/del", get(delete::delete))
.route("/", get(index::index))
.route("/robots.txt", get(index::robots_txt))
.with_state(Arc::new(engine));
@ -80,11 +86,13 @@ async fn main() {
info!("starting server.");
let listener = TcpListener::bind(&cfg.http.listen_on)
.await
.expect("failed to bind to given `http.listen_on` address! make sure it's valid, and the port isn't already bound");
.wrap_err("failed to bind to given `http.listen_on` address! make sure it's valid, and the port isn't already bound")?;
axum::serve(listener, app)
.with_graceful_shutdown(shutdown_signal())
.await
.expect("failed to start server");
.wrap_err("failed to start server")?;
Ok(())
}
async fn shutdown_signal() {

View File

@ -8,12 +8,13 @@ use std::{
use axum::{
body::Body,
extract::{Query, State},
response::{IntoResponse, Response},
};
use axum_extra::TypedHeader;
use headers::ContentLength;
use http::StatusCode;
use http::{HeaderValue, StatusCode};
use serde::Deserialize;
use serde_with::{serde_as, DurationSeconds};
use serde_with::{DurationSeconds, serde_as};
use tracing::error;
use crate::engine::ProcessOutcome;
@ -43,7 +44,7 @@ pub async fn new(
Query(req): Query<NewRequest>,
TypedHeader(ContentLength(content_length)): TypedHeader<ContentLength>,
body: Body,
) -> Result<String, StatusCode> {
) -> Result<Response, StatusCode> {
// check upload key, if i need to
if !engine.cfg.upload_key.is_empty() && req.key.unwrap_or_default() != engine.cfg.upload_key {
return Err(StatusCode::FORBIDDEN);
@ -100,7 +101,20 @@ pub async fn new(
{
Ok(outcome) => match outcome {
// 200 OK
ProcessOutcome::Success(url) => Ok(url),
ProcessOutcome::Success { url, deletion_url } => {
let mut res = url.into_response();
// insert deletion url header if needed
if let Some(deletion_url) = deletion_url {
let deletion_url = HeaderValue::from_str(&deletion_url)
.expect("deletion url contains invalid chars");
let headers = res.headers_mut();
headers.insert("Breeze-Deletion-Url", deletion_url);
}
Ok(res)
}
// 413 Payload Too Large
ProcessOutcome::UploadTooLarge | ProcessOutcome::TemporaryUploadTooLarge => {