Compare commits
No commits in common. "main" and "jemalloc" have entirely different histories.
|
@ -4,19 +4,13 @@ version = 4
|
|||
|
||||
[[package]]
|
||||
name = "addr2line"
|
||||
version = "0.21.0"
|
||||
version = "0.24.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
|
||||
checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
|
||||
dependencies = [
|
||||
"gimli",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "adler"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
|
||||
|
||||
[[package]]
|
||||
name = "adler2"
|
||||
version = "2.0.0"
|
||||
|
@ -38,6 +32,12 @@ dependencies = [
|
|||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
|
||||
|
||||
[[package]]
|
||||
name = "argh"
|
||||
version = "0.1.13"
|
||||
|
@ -182,17 +182,17 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "backtrace"
|
||||
version = "0.3.71"
|
||||
version = "0.3.74"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d"
|
||||
checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a"
|
||||
dependencies = [
|
||||
"addr2line",
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"miniz_oxide 0.7.4",
|
||||
"miniz_oxide",
|
||||
"object",
|
||||
"rustc-demangle",
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -224,32 +224,30 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "breeze"
|
||||
version = "0.3.0"
|
||||
version = "0.2.8"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argh",
|
||||
"atomic-time",
|
||||
"axum",
|
||||
"axum-extra",
|
||||
"base64 0.21.7",
|
||||
"bytes",
|
||||
"color-eyre",
|
||||
"dashmap",
|
||||
"headers",
|
||||
"hmac",
|
||||
"http",
|
||||
"img-parts",
|
||||
"rand 0.9.0",
|
||||
"rand",
|
||||
"rayon",
|
||||
"serde",
|
||||
"serde_with",
|
||||
"sha2",
|
||||
"tikv-jemallocator",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tokio-util",
|
||||
"toml",
|
||||
"tower",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"twox-hash",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
|
@ -299,33 +297,6 @@ dependencies = [
|
|||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "color-eyre"
|
||||
version = "0.6.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"color-spantrace",
|
||||
"eyre",
|
||||
"indenter",
|
||||
"once_cell",
|
||||
"owo-colors",
|
||||
"tracing-error",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "color-spantrace"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"owo-colors",
|
||||
"tracing-core",
|
||||
"tracing-error",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation-sys"
|
||||
version = "0.8.7"
|
||||
|
@ -350,6 +321,25 @@ dependencies = [
|
|||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-deque"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
|
||||
dependencies = [
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-epoch"
|
||||
version = "0.9.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.21"
|
||||
|
@ -413,6 +403,7 @@ dependencies = [
|
|||
"lock_api",
|
||||
"once_cell",
|
||||
"parking_lot_core",
|
||||
"rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -433,25 +424,20 @@ checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
|
|||
dependencies = [
|
||||
"block-buffer",
|
||||
"crypto-common",
|
||||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
|
||||
|
||||
[[package]]
|
||||
name = "equivalent"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
|
||||
|
||||
[[package]]
|
||||
name = "eyre"
|
||||
version = "0.6.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec"
|
||||
dependencies = [
|
||||
"indenter",
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fnv"
|
||||
version = "1.0.7"
|
||||
|
@ -482,6 +468,23 @@ version = "0.3.31"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
|
||||
|
||||
[[package]]
|
||||
name = "futures-io"
|
||||
version = "0.3.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
|
||||
|
||||
[[package]]
|
||||
name = "futures-macro"
|
||||
version = "0.3.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-sink"
|
||||
version = "0.3.31"
|
||||
|
@ -501,9 +504,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-macro",
|
||||
"futures-task",
|
||||
"pin-project-lite",
|
||||
"pin-utils",
|
||||
"slab",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -518,32 +523,20 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.16"
|
||||
version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
|
||||
checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"wasi 0.13.3+wasi-0.2.2",
|
||||
"windows-targets",
|
||||
"wasi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gimli"
|
||||
version = "0.28.1"
|
||||
version = "0.31.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253"
|
||||
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
|
||||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
|
@ -612,15 +605,6 @@ version = "0.4.3"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||
|
||||
[[package]]
|
||||
name = "hmac"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
|
||||
dependencies = [
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "1.2.0"
|
||||
|
@ -740,15 +724,9 @@ checksum = "dfded0de32cc78ecad0061b3c6a263cec6bce298fc1e670a4926b6723664ed87"
|
|||
dependencies = [
|
||||
"bytes",
|
||||
"crc32fast",
|
||||
"miniz_oxide 0.8.2",
|
||||
"miniz_oxide",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indenter"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "1.9.3"
|
||||
|
@ -833,15 +811,6 @@ version = "0.3.17"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08"
|
||||
dependencies = [
|
||||
"adler",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.8.2"
|
||||
|
@ -858,7 +827,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||
"wasi",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
|
@ -889,9 +858,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.32.2"
|
||||
version = "0.36.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441"
|
||||
checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
@ -909,10 +878,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
||||
|
||||
[[package]]
|
||||
name = "owo-colors"
|
||||
version = "3.5.0"
|
||||
name = "parking_lot"
|
||||
version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f"
|
||||
checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
|
||||
dependencies = [
|
||||
"lock_api",
|
||||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
|
@ -963,7 +936,7 @@ version = "0.2.20"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
|
||||
dependencies = [
|
||||
"zerocopy 0.7.35",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -991,19 +964,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"rand_chacha 0.3.1",
|
||||
"rand_core 0.6.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
|
||||
dependencies = [
|
||||
"rand_chacha 0.9.0",
|
||||
"rand_core 0.9.0",
|
||||
"zerocopy 0.8.17",
|
||||
"rand_chacha",
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1013,17 +975,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
|
||||
dependencies = [
|
||||
"ppv-lite86",
|
||||
"rand_core 0.6.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_chacha"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
|
||||
dependencies = [
|
||||
"ppv-lite86",
|
||||
"rand_core 0.9.0",
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1032,17 +984,27 @@ version = "0.6.4"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
|
||||
dependencies = [
|
||||
"getrandom 0.2.16",
|
||||
"getrandom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.9.0"
|
||||
name = "rayon"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b08f3c9802962f7e1b25113931d94f43ed9725bebc59db9d0c3e9a23b67e15ff"
|
||||
checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa"
|
||||
dependencies = [
|
||||
"getrandom 0.3.1",
|
||||
"zerocopy 0.8.17",
|
||||
"either",
|
||||
"rayon-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon-core"
|
||||
version = "1.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
|
||||
dependencies = [
|
||||
"crossbeam-deque",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1197,17 +1159,6 @@ dependencies = [
|
|||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sharded-slab"
|
||||
version = "0.1.7"
|
||||
|
@ -1263,12 +1214,6 @@ version = "0.11.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "subtle"
|
||||
version = "2.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.94"
|
||||
|
@ -1357,6 +1302,7 @@ dependencies = [
|
|||
"bytes",
|
||||
"libc",
|
||||
"mio",
|
||||
"parking_lot",
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"socket2",
|
||||
|
@ -1394,8 +1340,12 @@ checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078"
|
|||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-io",
|
||||
"futures-sink",
|
||||
"futures-util",
|
||||
"hashbrown 0.14.5",
|
||||
"pin-project-lite",
|
||||
"slab",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
|
@ -1494,16 +1444,6 @@ dependencies = [
|
|||
"valuable",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-error"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b1581020d7a273442f5b45074a6a57d5757ad0a47dac0e9f0bd57b81936f3db"
|
||||
dependencies = [
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-log"
|
||||
version = "0.2.0"
|
||||
|
@ -1529,15 +1469,6 @@ dependencies = [
|
|||
"tracing-log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "twox-hash"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e7b17f197b3050ba473acf9181f7b1d3b66d1cf7356c6cc57886662276e65908"
|
||||
dependencies = [
|
||||
"rand 0.8.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
version = "1.17.0"
|
||||
|
@ -1578,15 +1509,6 @@ version = "0.11.0+wasi-snapshot-preview1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.13.3+wasi-0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2"
|
||||
dependencies = [
|
||||
"wit-bindgen-rt",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.99"
|
||||
|
@ -1772,15 +1694,6 @@ dependencies = [
|
|||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wit-bindgen-rt"
|
||||
version = "0.33.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.7.35"
|
||||
|
@ -1788,16 +1701,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"zerocopy-derive 0.7.35",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.8.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa91407dacce3a68c56de03abe2760159582b846c6a4acd2f456618087f12713"
|
||||
dependencies = [
|
||||
"zerocopy-derive 0.8.17",
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1810,14 +1714,3 @@ dependencies = [
|
|||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.8.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06718a168365cad3d5ff0bb133aad346959a2074bd4a85c121255a11304a8626"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
|
44
Cargo.toml
44
Cargo.toml
|
@ -1,45 +1,37 @@
|
|||
[package]
|
||||
name = "breeze"
|
||||
version = "0.3.0"
|
||||
edition = "2024"
|
||||
version = "0.2.8"
|
||||
edition = "2021"
|
||||
|
||||
[profile.dev.package]
|
||||
tikv-jemalloc-sys = { opt-level = 3 }
|
||||
|
||||
[dependencies]
|
||||
argh = "0.1.12"
|
||||
atomic-time = "0.1.4"
|
||||
axum = { version = "0.8.1", features = ["macros", "http2"] }
|
||||
axum-extra = { version = "0.10.0", default-features = false, features = [
|
||||
"tracing",
|
||||
"typed-header",
|
||||
] }
|
||||
base64 = "0.21"
|
||||
bytes = "1"
|
||||
color-eyre = "0.6"
|
||||
dashmap = { version = "6.1.0", features = ["inline"] }
|
||||
headers = "0.4"
|
||||
hmac = "0.12.1"
|
||||
axum = { version = "0.8.1", features = ["macros", "http2"] }
|
||||
tower = "0.5"
|
||||
http = "1.2"
|
||||
img-parts = "0.3"
|
||||
rand = "0.9"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_with = "3.12"
|
||||
sha2 = "0.10.9"
|
||||
tokio = { version = "1", features = [
|
||||
"rt-multi-thread",
|
||||
"macros",
|
||||
"net",
|
||||
"fs",
|
||||
"signal",
|
||||
] }
|
||||
headers = "0.4"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-util = { version = "0.7", features = ["full"] }
|
||||
tokio-stream = "0.1"
|
||||
tokio-util = { version = "0.7", features = ["io"] }
|
||||
toml = "0.8.2"
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = "0.3"
|
||||
twox-hash = "2"
|
||||
bytes = "1"
|
||||
rand = "0.8.5"
|
||||
walkdir = "2"
|
||||
anyhow = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_with = "3.12"
|
||||
toml = "0.8.2"
|
||||
argh = "0.1.12"
|
||||
dashmap = { version = "6.1.0", features = ["rayon", "inline"] }
|
||||
rayon = "1.8"
|
||||
atomic-time = "0.1.4"
|
||||
img-parts = "0.3"
|
||||
|
||||
[target.'cfg(not(target_env = "msvc"))'.dependencies]
|
||||
tikv-jemallocator = "0.6"
|
||||
|
|
64
README.md
64
README.md
|
@ -1,34 +1,27 @@
|
|||
# breeze
|
||||
|
||||
breeze is a simple, performant file upload server.
|
||||
|
||||
The primary instance is https://picture.wtf.
|
||||
|
||||
## Features
|
||||
|
||||
- Basic upload API tailored towards ShareX
|
||||
- Streamed uploading
|
||||
- Streamed downloading (on larger files)
|
||||
- Pause/continue download support with `Range` header
|
||||
- Upload caching in memory
|
||||
- Support for ShareX file deletion URLs
|
||||
- Temporary uploads
|
||||
- Automatic exif data removal
|
||||
|
||||
## Installation
|
||||
On picture.wtf, breeze's primary instance, it is ran using a NixOS module. If you would like to do that too, it is provided by the Nix flake in this repository.
|
||||
|
||||
On picture.wtf, breeze is ran with the NixOS module provided by `flake.nix`. [Take a look at the config](https://git.min.rip/min/infra/src/branch/main/nixos/hosts/silver/services/breeze.nix) if you want!
|
||||
|
||||
Containerised and bare-metal deployments are also supported. Instructions for those are below.
|
||||
It is very much possible to run and deploy breeze without doing that, though. Containerised and bare-metal deployments are also supported. Instructions for those are below.
|
||||
|
||||
To begin, clone the Git repository:
|
||||
|
||||
```bash
|
||||
git clone https://git.min.rip/min/breeze.git
|
||||
```
|
||||
|
||||
If you want to run it as a Docker container, here is an example `docker-compose.yaml` that may be useful for reference.
|
||||
|
||||
If you would like to run it as a Docker container, here is an example `docker-compose.yaml` that may be useful for reference.
|
||||
```
|
||||
version: '3.6'
|
||||
|
||||
|
@ -46,51 +39,25 @@ services:
|
|||
ports:
|
||||
- 8383:8000
|
||||
```
|
||||
|
||||
With this configuration, it is expected that:
|
||||
|
||||
- there is a clone of the Git repository in the `./breeze` folder
|
||||
- there is a `breeze.toml` config file in current directory
|
||||
- there is a directory at `/srv/uploads` for storing uploads
|
||||
- port 8383 will be made accessible to the Internet somehow (either forwarding the port through your firewall directly, or passing it through a reverse proxy)
|
||||
- you want the uploads to be owned by the user on your system with id 1000. (this is usually your user)
|
||||
* there is a clone of the Git repository in the `./breeze` folder
|
||||
* there is a `breeze.toml` config file in current directory
|
||||
* there is a directory at `/srv/uploads` for storing uploads
|
||||
* port 8383 will be made accessible to the Internet somehow (either forwarding the port through your firewall directly, or passing it through a reverse proxy)
|
||||
* you want the uploads to be owned by the user on your system with id 1000. (this is usually your user)
|
||||
|
||||
It can also be installed directly if the Rust toolchain is installed:
|
||||
|
||||
```bash
|
||||
cd breeze
|
||||
cargo install --path .
|
||||
|
||||
# then, you can run w/ a path to your `breeze.toml` config file
|
||||
breeze --config /path/to/breeze.toml
|
||||
```
|
||||
|
||||
### Exposing publicly
|
||||
|
||||
If you want to expose a breeze server to the internet, I highly recommend using a reverse proxy instead of just forwarding its HTTP port.
|
||||
|
||||
Caddy is probably the easiest to set up if you are new to reverse proxies. Here is an example `Caddyfile` for the Docker Compose file above (assuming `yourdomain.com` is a domain that points to your server's IP).
|
||||
|
||||
```
|
||||
yourdomain.com {
|
||||
# enable compression
|
||||
encode
|
||||
|
||||
# forward request to breeze
|
||||
reverse_proxy 127.0.0.1:8383
|
||||
}
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Hosting
|
||||
|
||||
Configuration is read through a toml file.
|
||||
|
||||
The config file path is specified using the `-c`/`--config` command line switch.
|
||||
|
||||
Here is an example config file:
|
||||
|
||||
```toml
|
||||
[engine]
|
||||
# The base URL that the HTTP server will be accessible on.
|
||||
|
@ -103,12 +70,6 @@ base_url = "http://127.0.0.1:8000"
|
|||
# If it is not set, no key will be required.
|
||||
upload_key = "hiiiiiiii"
|
||||
|
||||
# OPTIONAL - If set, the secret key used to verify ShareX deletion URLs.
|
||||
# If it is not set, deletion URLs will not be created or made usable.
|
||||
# WARNING: Do not share this!! If somebody else obtains it, they can
|
||||
# generate deletion URLs for any upload!!
|
||||
deletion_secret = "asdfhjkasdhjfashjlfhjkaskdfjkhdjkh"
|
||||
|
||||
# OPTIONAL - specifies what to show when the site is visited on http
|
||||
# It is sent with text/plain content type.
|
||||
# There are two variables you can use:
|
||||
|
@ -166,17 +127,13 @@ level = "warn"
|
|||
```
|
||||
|
||||
### Uploading
|
||||
|
||||
The HTTP API is pretty simple, and it's easy to make a ShareX configuration for it.
|
||||
|
||||
Uploads should be sent to `/new?name={original filename}` as a POST request. If the server uses upload keys, it should be sent to `/new?name={original filename}&key={upload key}`. The uploaded file's content should be sent as raw binary in the request body.
|
||||
|
||||
Also you can specify `&lastfor={time in seconds}` to make your upload temporary, or `&keepexif=true` to tell the server not to clear EXIF data on image uploads. (if you don't know what EXIF data is, you can leave it as default. you'll know if you need it)
|
||||
|
||||
The endpoint's response will just be the URL of the upload in plain text, and the deletion URL will be sent in the `Breeze-Deletion-Url` header (if it's enabled).
|
||||
Additionally, you may specify `&lastfor={time in seconds}` to make your upload temporary, or `&keepexif=true` to tell the server not to clear EXIF data on image uploads. (if you don't know what EXIF data is, just leave it as default. you'll know if you need it)
|
||||
|
||||
Here's an example ShareX configuration for it (with a key):
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "14.1.0",
|
||||
|
@ -188,7 +145,6 @@ Here's an example ShareX configuration for it (with a key):
|
|||
"name": "{filename}",
|
||||
"key": "hiiiiiiii"
|
||||
},
|
||||
"Body": "Binary",
|
||||
"DeletionURL": "{header:Breeze-Deletion-Url}"
|
||||
"Body": "Binary"
|
||||
}
|
||||
```
|
||||
|
|
12
flake.lock
12
flake.lock
|
@ -2,11 +2,11 @@
|
|||
"nodes": {
|
||||
"crane": {
|
||||
"locked": {
|
||||
"lastModified": 1748047550,
|
||||
"narHash": "sha256-t0qLLqb4C1rdtiY8IFRH5KIapTY/n3Lqt57AmxEv9mk=",
|
||||
"lastModified": 1734808813,
|
||||
"narHash": "sha256-3aH/0Y6ajIlfy7j52FGZ+s4icVX0oHhqBzRdlOeztqg=",
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"rev": "b718a78696060df6280196a6f992d04c87a16aef",
|
||||
"rev": "72e2d02dbac80c8c86bf6bf3e785536acf8ee926",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -35,11 +35,11 @@
|
|||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1747958103,
|
||||
"narHash": "sha256-qmmFCrfBwSHoWw7cVK4Aj+fns+c54EBP8cGqp/yK410=",
|
||||
"lastModified": 1735821806,
|
||||
"narHash": "sha256-cuNapx/uQeCgeuhUhdck3JKbgpsml259sjUQnWM7zW8=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "fe51d34885f7b5e3e7b59572796e1bcb427eccb1",
|
||||
"rev": "d6973081434f88088e5321f83ebafe9a1167c367",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
33
flake.nix
33
flake.nix
|
@ -77,8 +77,6 @@
|
|||
with lib; let
|
||||
cfg = config.services.breeze;
|
||||
settingsFormat = pkgs.formats.toml {};
|
||||
defaultUser = "breeze";
|
||||
defaultGroup = "breeze";
|
||||
in {
|
||||
options = {
|
||||
services.breeze = {
|
||||
|
@ -92,13 +90,13 @@
|
|||
|
||||
user = mkOption {
|
||||
type = types.str;
|
||||
default = defaultUser;
|
||||
default = "breeze";
|
||||
description = "User that `breeze` will run under";
|
||||
};
|
||||
|
||||
group = mkOption {
|
||||
type = types.str;
|
||||
default = defaultGroup;
|
||||
default = "breeze";
|
||||
description = "Group that `breeze` will run under";
|
||||
};
|
||||
|
||||
|
@ -113,7 +111,7 @@
|
|||
default = {};
|
||||
description = ''
|
||||
The *.toml configuration to run `breeze` with.
|
||||
The options aren't formally documented, but the [readme](https://git.min.rip/min/breeze/src/branch/main/README.md) provides examples.
|
||||
There is no formal documentation, but there is an example in the [readme](https://git.min.rip/min/breeze/src/branch/main/README.md).
|
||||
'';
|
||||
};
|
||||
|
||||
|
@ -134,29 +132,16 @@
|
|||
This is useful for loading it from a secret management system.
|
||||
'';
|
||||
};
|
||||
|
||||
deletionSecretFile = mkOption {
|
||||
type = types.nullOr types.path;
|
||||
default = null;
|
||||
description = ''
|
||||
File to load the `engine.deletion_secret` from, if desired.
|
||||
This is useful for loading it from a secret management system.
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
users.users = mkIf (cfg.user == defaultUser) {
|
||||
${cfg.user} = {
|
||||
isSystemUser = true;
|
||||
inherit (cfg) group;
|
||||
};
|
||||
users.users.${cfg.user} = {
|
||||
isSystemUser = true;
|
||||
inherit (cfg) group;
|
||||
};
|
||||
|
||||
users.groups = mkIf (cfg.group == defaultGroup) {
|
||||
${cfg.group} = {};
|
||||
};
|
||||
users.groups.${cfg.group} = {};
|
||||
|
||||
systemd.tmpfiles.rules = [
|
||||
"d '${cfg.configDir}' 0750 ${cfg.user} ${cfg.group} - -"
|
||||
|
@ -164,7 +149,6 @@
|
|||
|
||||
services.breeze.settings = mkMerge [
|
||||
(mkIf (cfg.uploadKeyFile != null) {engine.upload_key = "@UPLOAD_KEY@";})
|
||||
(mkIf (cfg.deletionSecretFile != null) {engine.deletion_secret = "@DELETION_SECRET@";})
|
||||
];
|
||||
|
||||
systemd.services.breeze = let
|
||||
|
@ -180,9 +164,6 @@
|
|||
''
|
||||
+ lib.optionalString (cfg.uploadKeyFile != null) ''
|
||||
${pkgs.replace-secret}/bin/replace-secret '@UPLOAD_KEY@' "${cfg.uploadKeyFile}" ${cfgFile}
|
||||
''
|
||||
+ lib.optionalString (cfg.deletionSecretFile != null) ''
|
||||
${pkgs.replace-secret}/bin/replace-secret '@DELETION_SECRET@' "${cfg.deletionSecretFile}" ${cfgFile}
|
||||
'';
|
||||
|
||||
serviceConfig = rec {
|
||||
|
|
17
src/cache.rs
17
src/cache.rs
|
@ -6,6 +6,7 @@ use std::{
|
|||
use atomic_time::AtomicSystemTime;
|
||||
use bytes::Bytes;
|
||||
use dashmap::{mapref::one::Ref, DashMap};
|
||||
use rayon::prelude::*;
|
||||
use tokio::time;
|
||||
|
||||
use crate::config;
|
||||
|
@ -80,7 +81,7 @@ impl Cache {
|
|||
let mut sorted: Vec<_> = self.map.iter().collect();
|
||||
|
||||
// Sort by least recently used
|
||||
sorted.sort_unstable_by_key(|e| e.last_used());
|
||||
sorted.par_sort_unstable_by(|e1, e2| e1.last_used().cmp(&e2.last_used()));
|
||||
|
||||
// Total bytes we would be removing
|
||||
let mut total = 0;
|
||||
|
@ -141,12 +142,12 @@ impl Cache {
|
|||
// How far we went above the limit
|
||||
let needed = new_total - self.cfg.mem_capacity;
|
||||
|
||||
self.next_out(needed).iter().for_each(|k| {
|
||||
self.next_out(needed).par_iter().for_each(|k| {
|
||||
// Remove the element, and ignore the result
|
||||
// The only reason it should be failing is if it couldn't find it,
|
||||
// in which case it was already removed
|
||||
self.remove(k);
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
// Atomically add to total cached data length
|
||||
|
@ -169,7 +170,7 @@ impl Cache {
|
|||
///
|
||||
/// It exists so we can run the expiry check before
|
||||
/// actually working with any entries, so no weird bugs happen
|
||||
fn get_(&self, key: &str) -> Option<Ref<String, Entry>> {
|
||||
fn _get(&self, key: &str) -> Option<Ref<String, Entry>> {
|
||||
let e = self.map.get(key)?;
|
||||
|
||||
// if the entry is expired get rid of it now
|
||||
|
@ -189,7 +190,7 @@ impl Cache {
|
|||
|
||||
/// Get an item from the cache, if it exists.
|
||||
pub fn get(&self, key: &str) -> Option<Bytes> {
|
||||
let e = self.get_(key)?;
|
||||
let e = self._get(key)?;
|
||||
|
||||
if e.update_used {
|
||||
e.last_used.store(SystemTime::now(), Ordering::Relaxed);
|
||||
|
@ -205,7 +206,7 @@ impl Cache {
|
|||
/// We don't use [`DashMap::contains_key`] here because it would just do
|
||||
/// the exact same thing I do here, but without running the expiry check logic
|
||||
pub fn has(&self, key: &str) -> bool {
|
||||
self.get_(key).is_some()
|
||||
self._get(key).is_some()
|
||||
}
|
||||
|
||||
/// Returns if an upload is able to be cached
|
||||
|
@ -234,7 +235,7 @@ impl Cache {
|
|||
// If we fail to compare the times, it gets added to the list anyways
|
||||
let expired: Vec<_> = self
|
||||
.map
|
||||
.iter()
|
||||
.par_iter()
|
||||
.filter_map(|e| {
|
||||
let elapsed = now.duration_since(e.last_used()).unwrap_or(Duration::MAX);
|
||||
let is_expired = elapsed >= e.lifetime;
|
||||
|
@ -251,7 +252,7 @@ impl Cache {
|
|||
if !expired.is_empty() {
|
||||
// Use a retain call, should be less locks that way
|
||||
// (instead of many remove calls)
|
||||
self.map.retain(|k, _| !expired.contains(k));
|
||||
self.map.retain(|k, _| !expired.contains(k))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,12 +27,6 @@ pub struct EngineConfig {
|
|||
#[serde(default)]
|
||||
pub upload_key: String,
|
||||
|
||||
/// Secret key to use when generating or verifying deletion tokens.
|
||||
/// Leave blank to disable.
|
||||
///
|
||||
/// If this secret is leaked, anyone can delete any file. Be careful!!!
|
||||
pub deletion_secret: Option<String>,
|
||||
|
||||
/// Configuration for disk system
|
||||
pub disk: DiskConfig,
|
||||
|
||||
|
|
|
@ -1,86 +0,0 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use axum::extract::{Query, State};
|
||||
use base64::{Engine, prelude::BASE64_URL_SAFE_NO_PAD};
|
||||
use bytes::{Buf, BytesMut};
|
||||
use hmac::Mac;
|
||||
use http::StatusCode;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::engine::update_hmac;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct DeleteRequest {
|
||||
name: String,
|
||||
hash: String,
|
||||
hmac: String,
|
||||
}
|
||||
|
||||
pub async fn delete(
|
||||
State(engine): State<Arc<crate::engine::Engine>>,
|
||||
Query(req): Query<DeleteRequest>,
|
||||
) -> (StatusCode, &'static str) {
|
||||
let Some(mut hmac) = engine.deletion_hmac.clone() else {
|
||||
return (StatusCode::CONFLICT, "Deletion is not enabled");
|
||||
};
|
||||
|
||||
// -- decode provided data
|
||||
|
||||
// decode user-given hmac
|
||||
let Ok(provided_hmac) = BASE64_URL_SAFE_NO_PAD.decode(req.hmac) else {
|
||||
return (StatusCode::BAD_REQUEST, "Could not decode hmac");
|
||||
};
|
||||
|
||||
// decode hash from base64
|
||||
let Ok(mut provided_hash_data) = BASE64_URL_SAFE_NO_PAD
|
||||
.decode(req.hash)
|
||||
.map(|v| BytesMut::from(&v[..]))
|
||||
else {
|
||||
return (StatusCode::BAD_REQUEST, "Could not decode partial hash");
|
||||
};
|
||||
// read hash
|
||||
if provided_hash_data.len() != 16 {
|
||||
return (StatusCode::BAD_REQUEST, "Partial hash length is invalid");
|
||||
}
|
||||
let provided_hash = provided_hash_data.get_u128();
|
||||
|
||||
// -- verify it
|
||||
|
||||
// check if info is valid
|
||||
let is_hmac_valid = {
|
||||
// update hmad
|
||||
update_hmac(&mut hmac, &req.name, provided_hash);
|
||||
// verify..
|
||||
hmac.verify_slice(&provided_hmac).is_ok()
|
||||
};
|
||||
if !is_hmac_valid {
|
||||
return (StatusCode::BAD_REQUEST, "Hmac is invalid");
|
||||
}
|
||||
|
||||
// -- ensure hash matches
|
||||
|
||||
// okay, now check if we compute the same hash as the req
|
||||
// this makes sure it's (probably) the same file
|
||||
let actual_hash = match engine.get_hash(&req.name).await {
|
||||
Ok(Some(h)) => h,
|
||||
Ok(None) => return (StatusCode::NOT_FOUND, "File not found"),
|
||||
Err(err) => {
|
||||
tracing::error!(%err, "failed to get hash");
|
||||
return (StatusCode::INTERNAL_SERVER_ERROR, "Internal server error!!");
|
||||
}
|
||||
};
|
||||
// compare
|
||||
if provided_hash != actual_hash {
|
||||
return (StatusCode::BAD_REQUEST, "Partial hash did not match");
|
||||
}
|
||||
|
||||
// -- delete file
|
||||
|
||||
// everything seems okay so try to delete
|
||||
if let Err(err) = engine.remove(&req.name).await {
|
||||
tracing::error!(%err, "failed to delete upload");
|
||||
return (StatusCode::INTERNAL_SERVER_ERROR, "Delete failed");
|
||||
}
|
||||
|
||||
(StatusCode::OK, "Deleted successfully!")
|
||||
}
|
11
src/disk.rs
11
src/disk.rs
|
@ -1,4 +1,4 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use bytes::Bytes;
|
||||
use tokio::{
|
||||
|
@ -32,11 +32,8 @@ impl Disk {
|
|||
|
||||
/// Formats the path on disk for a `saved_name`.
|
||||
fn path_for(&self, saved_name: &str) -> PathBuf {
|
||||
// try to prevent path traversal by ignoring everything except the file name
|
||||
let name = Path::new(saved_name).file_name().unwrap_or_default();
|
||||
|
||||
let mut p: PathBuf = self.cfg.save_path.clone();
|
||||
p.push(name);
|
||||
let mut p = self.cfg.save_path.clone();
|
||||
p.push(saved_name);
|
||||
|
||||
p
|
||||
}
|
||||
|
@ -68,7 +65,7 @@ impl Disk {
|
|||
}
|
||||
|
||||
/// Create a background I/O task
|
||||
pub fn start_save(&self, saved_name: &str) -> mpsc::UnboundedSender<Bytes> {
|
||||
pub async fn start_save(&self, saved_name: &str) -> mpsc::UnboundedSender<Bytes> {
|
||||
// start a task that handles saving files to disk (we can save to cache/disk in parallel that way)
|
||||
let (tx, mut rx): (mpsc::UnboundedSender<Bytes>, mpsc::UnboundedReceiver<Bytes>) =
|
||||
mpsc::unbounded_channel();
|
||||
|
|
198
src/engine.rs
198
src/engine.rs
|
@ -1,27 +1,22 @@
|
|||
use std::{
|
||||
io::SeekFrom,
|
||||
ops::Bound,
|
||||
sync::{
|
||||
Arc,
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
Arc,
|
||||
},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use axum::body::BodyDataStream;
|
||||
use base64::{Engine as _, prelude::BASE64_URL_SAFE_NO_PAD};
|
||||
use bytes::{BufMut, Bytes, BytesMut};
|
||||
use color_eyre::eyre::{self, WrapErr};
|
||||
use hmac::Mac;
|
||||
use img_parts::{DynImage, ImageEXIF};
|
||||
use rand::distr::{Alphanumeric, SampleString};
|
||||
use rand::distributions::{Alphanumeric, DistString};
|
||||
use tokio::{
|
||||
fs::File,
|
||||
io::{AsyncReadExt, AsyncSeekExt},
|
||||
};
|
||||
use tokio_stream::StreamExt;
|
||||
use tracing::{debug, error, info};
|
||||
use twox_hash::XxHash3_128;
|
||||
|
||||
use crate::{cache, config, disk};
|
||||
|
||||
|
@ -33,7 +28,6 @@ pub enum UploadData {
|
|||
Disk(tokio::io::Take<File>),
|
||||
}
|
||||
|
||||
/// Upload data and metadata needed to build a view response
|
||||
pub struct UploadResponse {
|
||||
pub full_len: u64,
|
||||
pub range: (u64, u64),
|
||||
|
@ -44,11 +38,8 @@ pub struct UploadResponse {
|
|||
/// Some are rejections.
|
||||
pub enum ProcessOutcome {
|
||||
/// The upload was successful.
|
||||
/// We give the user their file's URL (and deletion URL if one was created)
|
||||
Success {
|
||||
url: String,
|
||||
deletion_url: Option<String>,
|
||||
},
|
||||
/// We give the user their file's URL
|
||||
Success(String),
|
||||
|
||||
/// Occurs when an upload exceeds the chosen maximum file size.
|
||||
UploadTooLarge,
|
||||
|
@ -72,9 +63,6 @@ pub enum GetOutcome {
|
|||
RangeNotSatisfiable,
|
||||
}
|
||||
|
||||
/// Type alias to make using HMAC SHA256 easier
|
||||
type HmacSha256 = hmac::Hmac<sha2::Sha256>;
|
||||
|
||||
/// breeze engine
|
||||
pub struct Engine {
|
||||
/// Cached count of uploaded files
|
||||
|
@ -83,9 +71,6 @@ pub struct Engine {
|
|||
/// Engine configuration
|
||||
pub cfg: config::EngineConfig,
|
||||
|
||||
/// HMAC state initialised with the deletion secret (if present)
|
||||
pub deletion_hmac: Option<HmacSha256>,
|
||||
|
||||
/// The in-memory cache that cached uploads are stored in
|
||||
cache: Arc<cache::Cache>,
|
||||
|
||||
|
@ -93,7 +78,6 @@ pub struct Engine {
|
|||
disk: disk::Disk,
|
||||
}
|
||||
|
||||
/// Try to parse a `Range` header into an easier format to work with
|
||||
fn resolve_range(range: Option<headers::Range>, full_len: u64) -> Option<(u64, u64)> {
|
||||
let last_byte = full_len - 1;
|
||||
|
||||
|
@ -124,40 +108,9 @@ fn resolve_range(range: Option<headers::Range>, full_len: u64) -> Option<(u64, u
|
|||
Some((start, end))
|
||||
}
|
||||
|
||||
/// Calculate HMAC of field values.
|
||||
pub fn update_hmac(hmac: &mut HmacSha256, saved_name: &str, hash: u128) {
|
||||
// mix deletion req fields into one buf
|
||||
let mut field_bytes = BytesMut::new();
|
||||
field_bytes.put(saved_name.as_bytes());
|
||||
field_bytes.put_u128(hash);
|
||||
|
||||
// take the hmac
|
||||
hmac.update(&field_bytes);
|
||||
}
|
||||
|
||||
/// How many bytes of a file should be used for hash calculation.
|
||||
const SAMPLE_WANTED_BYTES: usize = 32768;
|
||||
|
||||
/// Format some info about an upload and hash it
|
||||
///
|
||||
/// This should not change between versions!!
|
||||
/// That would break deletion urls
|
||||
fn calculate_hash(len: u64, data_sample: Bytes) -> u128 {
|
||||
let mut buf = BytesMut::new();
|
||||
buf.put_u64(len);
|
||||
buf.put(data_sample);
|
||||
|
||||
XxHash3_128::oneshot(&buf)
|
||||
}
|
||||
|
||||
impl Engine {
|
||||
/// Creates a new instance of the engine
|
||||
pub fn with_config(cfg: config::EngineConfig) -> Self {
|
||||
let deletion_hmac = cfg
|
||||
.deletion_secret
|
||||
.as_ref()
|
||||
.map(|s| HmacSha256::new_from_slice(s.as_bytes()).unwrap());
|
||||
|
||||
let cache = cache::Cache::with_config(cfg.cache.clone());
|
||||
let disk = disk::Disk::with_config(cfg.disk.clone());
|
||||
|
||||
|
@ -169,7 +122,6 @@ impl Engine {
|
|||
Self {
|
||||
// initialise our cached upload count. this doesn't include temp uploads!
|
||||
upl_count: AtomicUsize::new(disk.count()),
|
||||
deletion_hmac,
|
||||
|
||||
cfg,
|
||||
|
||||
|
@ -189,12 +141,14 @@ impl Engine {
|
|||
&self,
|
||||
saved_name: &str,
|
||||
range: Option<headers::Range>,
|
||||
) -> eyre::Result<GetOutcome> {
|
||||
) -> anyhow::Result<GetOutcome> {
|
||||
let data = if let Some(u) = self.cache.get(saved_name) {
|
||||
u
|
||||
} else {
|
||||
// now, check if we have it on disk
|
||||
let Some(mut f) = self.disk.open(saved_name).await? else {
|
||||
let mut f = if let Some(f) = self.disk.open(saved_name).await? {
|
||||
f
|
||||
} else {
|
||||
// file didn't exist
|
||||
return Ok(GetOutcome::NotFound);
|
||||
};
|
||||
|
@ -226,13 +180,15 @@ impl Engine {
|
|||
|
||||
data
|
||||
} else {
|
||||
let Some((start, end)) = resolve_range(range, full_len) else {
|
||||
let (start, end) = if let Some(range) = resolve_range(range, full_len) {
|
||||
range
|
||||
} else {
|
||||
return Ok(GetOutcome::RangeNotSatisfiable);
|
||||
};
|
||||
|
||||
let range_len = (end - start) + 1;
|
||||
|
||||
f.seek(SeekFrom::Start(start)).await?;
|
||||
f.seek(std::io::SeekFrom::Start(start)).await?;
|
||||
let f = f.take(range_len);
|
||||
|
||||
let res = UploadResponse {
|
||||
|
@ -245,7 +201,9 @@ impl Engine {
|
|||
};
|
||||
|
||||
let full_len = data.len() as u64;
|
||||
let Some((start, end)) = resolve_range(range, full_len) else {
|
||||
let (start, end) = if let Some(range) = resolve_range(range, full_len) {
|
||||
range
|
||||
} else {
|
||||
return Ok(GetOutcome::RangeNotSatisfiable);
|
||||
};
|
||||
|
||||
|
@ -279,52 +237,13 @@ impl Engine {
|
|||
false
|
||||
}
|
||||
|
||||
/// Try to read a file and calculate a hash for it.
|
||||
pub async fn get_hash(&self, saved_name: &str) -> eyre::Result<Option<u128>> {
|
||||
// readout sample data and full len
|
||||
let (data_sample, len) = if let Some(full_data) = self.cache.get(saved_name) {
|
||||
// we found it in cache! take as many bytes as we can
|
||||
let taking = full_data.len().min(SAMPLE_WANTED_BYTES);
|
||||
let data = full_data.slice(0..taking);
|
||||
|
||||
let len = full_data.len() as u64;
|
||||
|
||||
tracing::info!("data len is {}", data.len());
|
||||
|
||||
(data, len)
|
||||
} else {
|
||||
// not in cache, so try disk
|
||||
let Some(mut f) = self.disk.open(saved_name).await? else {
|
||||
// not found there either so we just dont have it
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
// find len..
|
||||
let len = f.seek(SeekFrom::End(0)).await?;
|
||||
f.rewind().await?;
|
||||
|
||||
// only take wanted # of bytes for read
|
||||
let mut f = f.take(SAMPLE_WANTED_BYTES as u64);
|
||||
|
||||
// try to read
|
||||
let mut data = Vec::with_capacity(SAMPLE_WANTED_BYTES);
|
||||
f.read_to_end(&mut data).await?;
|
||||
let data = Bytes::from(data);
|
||||
|
||||
(data, len)
|
||||
};
|
||||
|
||||
// calculate hash
|
||||
Ok(Some(calculate_hash(len, data_sample)))
|
||||
}
|
||||
|
||||
/// Generate a new saved name for an upload.
|
||||
///
|
||||
/// If it picks a name that already exists, it will try again.
|
||||
pub async fn gen_saved_name(&self, ext: Option<String>) -> String {
|
||||
loop {
|
||||
// generate a 6-character alphanumeric string
|
||||
let mut saved_name: String = Alphanumeric.sample_string(&mut rand::rng(), 6);
|
||||
let mut saved_name: String = Alphanumeric.sample_string(&mut rand::thread_rng(), 6);
|
||||
|
||||
// if we have an extension, add it now
|
||||
if let Some(ref ext) = ext {
|
||||
|
@ -344,14 +263,11 @@ impl Engine {
|
|||
/// Wipe out an upload from all storage.
|
||||
///
|
||||
/// This is for deleting failed uploads only!!
|
||||
pub async fn remove(&self, saved_name: &str) -> eyre::Result<()> {
|
||||
info!(saved_name, "!! removing upload");
|
||||
pub async fn remove(&self, saved_name: &str) -> anyhow::Result<()> {
|
||||
info!("!! removing upload: {saved_name}");
|
||||
|
||||
self.cache.remove(saved_name);
|
||||
self.disk
|
||||
.remove(saved_name)
|
||||
.await
|
||||
.wrap_err("failed to remove file from disk")?;
|
||||
self.disk.remove(saved_name).await?;
|
||||
|
||||
info!("!! successfully removed upload");
|
||||
|
||||
|
@ -369,7 +285,7 @@ impl Engine {
|
|||
mut stream: BodyDataStream,
|
||||
lifetime: Option<Duration>,
|
||||
keep_exif: bool,
|
||||
) -> eyre::Result<(Bytes, u64)> {
|
||||
) -> anyhow::Result<()> {
|
||||
// if we're using cache, make some space to store the upload in
|
||||
let mut data = if use_cache {
|
||||
BytesMut::with_capacity(provided_len.try_into()?)
|
||||
|
@ -379,12 +295,12 @@ impl Engine {
|
|||
|
||||
// don't begin a disk save if we're using temporary lifetimes
|
||||
let tx = if lifetime.is_none() {
|
||||
Some(self.disk.start_save(saved_name))
|
||||
Some(self.disk.start_save(saved_name).await)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// whether or not we are going to coalesce the data
|
||||
// whether or not we're gonna coalesce the data
|
||||
// in order to strip the exif data at the end,
|
||||
// instead of just sending it off to the i/o task
|
||||
let coalesce_and_strip = use_cache
|
||||
|
@ -397,11 +313,6 @@ impl Engine {
|
|||
&& !keep_exif
|
||||
&& provided_len <= self.cfg.max_strip_len;
|
||||
|
||||
// buffer of sampled data for the deletion hash
|
||||
let mut hash_sample = BytesMut::with_capacity(SAMPLE_WANTED_BYTES);
|
||||
// actual number of bytes processed
|
||||
let mut observed_len = 0;
|
||||
|
||||
// read and save upload
|
||||
while let Some(chunk) = stream.next().await {
|
||||
// if we error on a chunk, fail out
|
||||
|
@ -412,28 +323,15 @@ impl Engine {
|
|||
if !coalesce_and_strip {
|
||||
if let Some(ref tx) = tx {
|
||||
debug!("sending chunk to i/o task");
|
||||
tx.send(chunk.clone())
|
||||
.wrap_err("failed to send chunk to i/o task!")?;
|
||||
tx.send(chunk.clone())?;
|
||||
}
|
||||
}
|
||||
|
||||
// add to sample if we need to
|
||||
let wanted = SAMPLE_WANTED_BYTES - hash_sample.len();
|
||||
if wanted != 0 {
|
||||
// take as many bytes as we can ...
|
||||
let taking = chunk.len().min(wanted);
|
||||
hash_sample.extend_from_slice(&chunk[0..taking]);
|
||||
}
|
||||
// record new len
|
||||
observed_len += chunk.len() as u64;
|
||||
|
||||
if use_cache {
|
||||
debug!("receiving data into buffer");
|
||||
|
||||
if data.len() + chunk.len() > data.capacity() {
|
||||
info!(
|
||||
"the amount of data sent exceeds the content-length provided by the client! caching will be cancelled for this upload."
|
||||
);
|
||||
info!("the amount of data sent exceeds the content-length provided by the client! caching will be cancelled for this upload.");
|
||||
|
||||
// if we receive too much data, drop the buffer and stop using cache (it is still okay to use disk, probably)
|
||||
data = BytesMut::new();
|
||||
|
@ -467,8 +365,7 @@ impl Engine {
|
|||
// send what we did over to the i/o task, all in one chunk
|
||||
if let Some(ref tx) = tx {
|
||||
debug!("sending filled buffer to i/o task");
|
||||
tx.send(data.clone())
|
||||
.wrap_err("failed to send coalesced buffer to i/o task!")?;
|
||||
tx.send(data.clone())?;
|
||||
}
|
||||
|
||||
data
|
||||
|
@ -487,7 +384,7 @@ impl Engine {
|
|||
};
|
||||
}
|
||||
|
||||
Ok((hash_sample.freeze(), observed_len))
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn process(
|
||||
|
@ -497,7 +394,7 @@ impl Engine {
|
|||
stream: BodyDataStream,
|
||||
lifetime: Option<Duration>,
|
||||
keep_exif: bool,
|
||||
) -> eyre::Result<ProcessOutcome> {
|
||||
) -> anyhow::Result<ProcessOutcome> {
|
||||
// if the upload size is greater than our max file size, deny it now
|
||||
if self.cfg.max_upload_len.is_some_and(|l| provided_len > l) {
|
||||
return Ok(ProcessOutcome::UploadTooLarge);
|
||||
|
@ -531,47 +428,22 @@ impl Engine {
|
|||
)
|
||||
.await;
|
||||
|
||||
// handle result
|
||||
let (hash_sample, len) = match save_result {
|
||||
// Okay so just extract metadata
|
||||
Ok(m) => m,
|
||||
// If anything fails, delete the upload and return the error
|
||||
Err(err) => {
|
||||
error!("failed processing upload!");
|
||||
// If anything fails, delete the upload and return the error
|
||||
if save_result.is_err() {
|
||||
error!("failed processing upload!");
|
||||
|
||||
self.remove(&saved_name).await?;
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
|
||||
// if deletion urls are enabled, create one
|
||||
let deletion_url = self.deletion_hmac.clone().map(|mut hmac| {
|
||||
// calculate hash of file metadata
|
||||
let hash = calculate_hash(len, hash_sample);
|
||||
let mut hash_bytes = BytesMut::new();
|
||||
hash_bytes.put_u128(hash);
|
||||
let hash_b64 = BASE64_URL_SAFE_NO_PAD.encode(&hash_bytes);
|
||||
|
||||
// take hmac
|
||||
update_hmac(&mut hmac, &saved_name, hash);
|
||||
let out = hmac.finalize().into_bytes();
|
||||
let out_b64 = BASE64_URL_SAFE_NO_PAD.encode(out);
|
||||
|
||||
// format deletion url
|
||||
format!(
|
||||
"{}/del?name={saved_name}&hash={hash_b64}&hmac={out_b64}",
|
||||
self.cfg.base_url
|
||||
)
|
||||
});
|
||||
self.remove(&saved_name).await?;
|
||||
save_result?;
|
||||
}
|
||||
|
||||
// format and send back the url
|
||||
let url = format!("{}/p/{saved_name}", self.cfg.base_url);
|
||||
let url = format!("{}/p/{}", self.cfg.base_url, saved_name);
|
||||
|
||||
// if all goes well, increment the cached upload counter
|
||||
self.upl_count.fetch_add(1, Ordering::Relaxed);
|
||||
|
||||
info!("finished processing upload!");
|
||||
|
||||
Ok(ProcessOutcome::Success { url, deletion_url })
|
||||
Ok(ProcessOutcome::Success(url))
|
||||
}
|
||||
}
|
||||
|
|
30
src/main.rs
30
src/main.rs
|
@ -1,7 +1,6 @@
|
|||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
use argh::FromArgs;
|
||||
use color_eyre::eyre::{self, bail, Context};
|
||||
use engine::Engine;
|
||||
|
||||
use axum::{
|
||||
|
@ -13,7 +12,6 @@ use tracing::{info, warn};
|
|||
|
||||
mod cache;
|
||||
mod config;
|
||||
mod delete;
|
||||
mod disk;
|
||||
mod engine;
|
||||
mod index;
|
||||
|
@ -36,22 +34,19 @@ struct Args {
|
|||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> eyre::Result<()> {
|
||||
// Install color-eyre
|
||||
color_eyre::install()?;
|
||||
|
||||
async fn main() {
|
||||
// Read & parse args
|
||||
let args: Args = argh::from_env();
|
||||
|
||||
// Read & parse config
|
||||
let cfg: config::Config = {
|
||||
let config_str = fs::read_to_string(args.config).await.wrap_err(
|
||||
let config_str = fs::read_to_string(args.config).await.expect(
|
||||
"failed to read config file! make sure it exists and you have read permissions",
|
||||
)?;
|
||||
);
|
||||
|
||||
toml::from_str(&config_str).wrap_err(
|
||||
"invalid config! ensure proper fields and structure. reference config is in readme",
|
||||
)?
|
||||
toml::from_str(&config_str).unwrap_or_else(|e| {
|
||||
panic!("invalid config! ensure proper fields and structure. reference config is in readme.\n{e}");
|
||||
})
|
||||
};
|
||||
|
||||
// Set up tracing
|
||||
|
@ -63,7 +58,7 @@ async fn main() -> eyre::Result<()> {
|
|||
{
|
||||
let save_path = cfg.engine.disk.save_path.clone();
|
||||
if !save_path.exists() || !save_path.is_dir() {
|
||||
bail!("the save path does not exist or is not a directory! this is invalid");
|
||||
panic!("the save path does not exist or is not a directory! this is invalid");
|
||||
}
|
||||
}
|
||||
if cfg.engine.upload_key.is_empty() {
|
||||
|
@ -77,7 +72,6 @@ async fn main() -> eyre::Result<()> {
|
|||
let app = Router::new()
|
||||
.route("/new", post(new::new))
|
||||
.route("/p/{saved_name}", get(view::view))
|
||||
.route("/del", get(delete::delete))
|
||||
.route("/", get(index::index))
|
||||
.route("/robots.txt", get(index::robots_txt))
|
||||
.with_state(Arc::new(engine));
|
||||
|
@ -86,13 +80,11 @@ async fn main() -> eyre::Result<()> {
|
|||
info!("starting server.");
|
||||
let listener = TcpListener::bind(&cfg.http.listen_on)
|
||||
.await
|
||||
.wrap_err("failed to bind to given `http.listen_on` address! make sure it's valid, and the port isn't already bound")?;
|
||||
.expect("failed to bind to given `http.listen_on` address! make sure it's valid, and the port isn't already bound");
|
||||
axum::serve(listener, app)
|
||||
.with_graceful_shutdown(shutdown_signal())
|
||||
.await
|
||||
.wrap_err("failed to start server")?;
|
||||
|
||||
Ok(())
|
||||
.expect("failed to start server");
|
||||
}
|
||||
|
||||
async fn shutdown_signal() {
|
||||
|
@ -114,8 +106,8 @@ async fn shutdown_signal() {
|
|||
let terminate = std::future::pending::<()>();
|
||||
|
||||
tokio::select! {
|
||||
() = ctrl_c => {},
|
||||
() = terminate => {},
|
||||
_ = ctrl_c => {},
|
||||
_ = terminate => {},
|
||||
}
|
||||
|
||||
info!("shutting down!");
|
||||
|
|
30
src/new.rs
30
src/new.rs
|
@ -8,14 +8,12 @@ use std::{
|
|||
use axum::{
|
||||
body::Body,
|
||||
extract::{Query, State},
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use axum_extra::TypedHeader;
|
||||
use headers::ContentLength;
|
||||
use http::{HeaderValue, StatusCode};
|
||||
use http::StatusCode;
|
||||
use serde::Deserialize;
|
||||
use serde_with::{DurationSeconds, serde_as};
|
||||
use tracing::error;
|
||||
use serde_with::{serde_as, DurationSeconds};
|
||||
|
||||
use crate::engine::ProcessOutcome;
|
||||
|
||||
|
@ -44,7 +42,7 @@ pub async fn new(
|
|||
Query(req): Query<NewRequest>,
|
||||
TypedHeader(ContentLength(content_length)): TypedHeader<ContentLength>,
|
||||
body: Body,
|
||||
) -> Result<Response, StatusCode> {
|
||||
) -> Result<String, StatusCode> {
|
||||
// check upload key, if i need to
|
||||
if !engine.cfg.upload_key.is_empty() && req.key.unwrap_or_default() != engine.cfg.upload_key {
|
||||
return Err(StatusCode::FORBIDDEN);
|
||||
|
@ -92,7 +90,7 @@ pub async fn new(
|
|||
|
||||
// pass it off to the engine to be processed
|
||||
// --
|
||||
// also, error responses here don't get presented properly in ShareX most of the time
|
||||
// also, error responses here don't get represented properly in ShareX most of the time
|
||||
// they don't expect the connection to close before they're done uploading, i think
|
||||
// so it will just present the user with a "connection closed" error
|
||||
match engine
|
||||
|
@ -101,20 +99,7 @@ pub async fn new(
|
|||
{
|
||||
Ok(outcome) => match outcome {
|
||||
// 200 OK
|
||||
ProcessOutcome::Success { url, deletion_url } => {
|
||||
let mut res = url.into_response();
|
||||
|
||||
// insert deletion url header if needed
|
||||
if let Some(deletion_url) = deletion_url {
|
||||
let deletion_url = HeaderValue::from_str(&deletion_url)
|
||||
.expect("deletion url contains invalid chars");
|
||||
|
||||
let headers = res.headers_mut();
|
||||
headers.insert("Breeze-Deletion-Url", deletion_url);
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
ProcessOutcome::Success(url) => Ok(url),
|
||||
|
||||
// 413 Payload Too Large
|
||||
ProcessOutcome::UploadTooLarge | ProcessOutcome::TemporaryUploadTooLarge => {
|
||||
|
@ -126,9 +111,6 @@ pub async fn new(
|
|||
},
|
||||
|
||||
// 500 Internal Server Error
|
||||
Err(err) => {
|
||||
error!("failed to process upload!! {err:#}");
|
||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
|
17
src/view.rs
17
src/view.rs
|
@ -10,7 +10,6 @@ use axum_extra::TypedHeader;
|
|||
use headers::Range;
|
||||
use http::{HeaderValue, StatusCode};
|
||||
use tokio_util::io::ReaderStream;
|
||||
use tracing::error;
|
||||
|
||||
use crate::engine::{GetOutcome, UploadData, UploadResponse};
|
||||
|
||||
|
@ -92,23 +91,19 @@ pub async fn view(
|
|||
Path(original_path): Path<PathBuf>,
|
||||
range: Option<TypedHeader<Range>>,
|
||||
) -> Result<UploadResponse, ViewError> {
|
||||
// try to extract the file name (if it's the only component)
|
||||
// this makes paths like `asdf%2fabcdef.png` invalid
|
||||
let saved_name = match original_path.file_name().map(OsStr::to_str) {
|
||||
Some(Some(n)) if original_path.components().count() == 1 => n,
|
||||
_ => return Err(ViewError::NotFound),
|
||||
let saved_name = if let Some(Some(n)) = original_path.file_name().map(OsStr::to_str) {
|
||||
n
|
||||
} else {
|
||||
return Err(ViewError::NotFound);
|
||||
};
|
||||
|
||||
let range = range.map(|TypedHeader(range)| range);
|
||||
let range = range.map(|th| th.0);
|
||||
|
||||
// get result from the engine
|
||||
match engine.get(saved_name, range).await {
|
||||
Ok(GetOutcome::Success(res)) => Ok(res),
|
||||
Ok(GetOutcome::NotFound) => Err(ViewError::NotFound),
|
||||
Ok(GetOutcome::RangeNotSatisfiable) => Err(ViewError::RangeNotSatisfiable),
|
||||
Err(err) => {
|
||||
error!("failed to get upload!! {err:#}");
|
||||
Err(ViewError::InternalServerError)
|
||||
}
|
||||
Err(_) => Err(ViewError::InternalServerError),
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue