diff --git a/.gitignore b/.gitignore index 3bf25c0..b9abfe4 100644 --- a/.gitignore +++ b/.gitignore @@ -4,9 +4,6 @@ debug/ target/ -# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries -# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html -Cargo.lock # These are backup files generated by rustfmt **/*.rs.bk diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..2c24fb5 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,2245 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" + +[[package]] +name = "anstyle-parse" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", +] + +[[package]] +name = "anyhow" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" + +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", +] + +[[package]] +name = "async-trait" +version = "0.1.81" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", +] + +[[package]] +name = "asyncio-utils" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6bdd45809a42d6d5452f0048d5344ec63a2e2919b06143af6e70051e6ccca29" +dependencies = [ + "tokio", +] + +[[package]] +name = "atomic" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba" + +[[package]] +name = "atomic" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d818003e740b63afc82337e3160717f4f63078720a810b7b903e70a5d1d2994" +dependencies = [ + "bytemuck", +] + +[[package]] +name = "autocfg" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + +[[package]] +name = "backtrace" +version = "0.3.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide 0.7.4", + "object", + "rustc-demangle", +] + +[[package]] +name = "binascii" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "383d29d513d8764dcdc42ea295d979eb99c3c9f00607b3692cf68a431f7dca72" + +[[package]] +name = "bitflags" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "bytemuck" +version = "1.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773d90827bc3feecfb67fab12e24de0749aad83c74b9504ecde46237b5cd24e2" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50" + +[[package]] +name = "cc" +version = "1.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57b6a275aa2903740dc87da01c62040406b8812552e97129a63ea8850a17c6e6" +dependencies = [ + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "wasm-bindgen", + "windows-targets 0.52.6", +] + +[[package]] +name = "clap" +version = "4.5.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.76", +] + +[[package]] +name = "clap_lex" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" + +[[package]] +name = "colorchoice" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" + +[[package]] +name = "cookie" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7efb37c3e1ccb1ff97164ad95ac1606e8ccd35b3fa0a7d99a304c7f4a428cc24" +dependencies = [ + "percent-encoding", + "time", + "version_check", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "crc32fast" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "destructure_traitobject" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c877555693c14d2f84191cfd3ad8582790fc52b5e2274b40b59cf5f5cea25c7" + +[[package]] +name = "devise" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1d90b0c4c777a2cad215e3c7be59ac7c15adf45cf76317009b7d096d46f651d" +dependencies = [ + "devise_codegen", + "devise_core", +] + +[[package]] +name = "devise_codegen" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71b28680d8be17a570a2334922518be6adc3f58ecc880cbb404eaeb8624fd867" +dependencies = [ + "devise_core", + "quote", +] + +[[package]] +name = "devise_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b035a542cf7abf01f2e3c4d5a7acbaebfefe120ae4efc7bde3df98186e4b8af7" +dependencies = [ + "bitflags", + "proc-macro2", + "proc-macro2-diagnostics", + "quote", + "syn 2.0.76", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + +[[package]] +name = "encoding_rs" +version = "0.8.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "fastrand" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" + +[[package]] +name = "figment" +version = "0.10.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cb01cd46b0cf372153850f4c6c272d9cbea2da513e07538405148f95bd789f3" +dependencies = [ + "atomic 0.6.0", + "pear", + "serde", + "toml", + "uncased", + "version_check", +] + +[[package]] +name = "flate2" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "324a1be68054ef05ad64b861cc9eaf1d623d2d8cb25b4bf2cb9cdd902b4bf253" +dependencies = [ + "crc32fast", + "miniz_oxide 0.8.0", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generator" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cc16584ff22b460a382b7feec54b23d2908d858152e5739a120b949293bd74e" +dependencies = [ + "cc", + "libc", + "log", + "rustversion", + "windows", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "h2" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 2.4.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hermit-abi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c" +dependencies = [ + "equivalent", + "hashbrown 0.14.5", +] + +[[package]] +name = "inlinable_string" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb" + +[[package]] +name = "is-terminal" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b" +dependencies = [ + "hermit-abi 0.4.0", + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "js-sys" +version = "0.3.70" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.158" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +dependencies = [ + "serde", +] + +[[package]] +name = "log-mdc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a94d21414c1f4a51209ad204c1776a3d0765002c76c6abcb602a6f09f1e881c7" + +[[package]] +name = "log4rs" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0816135ae15bd0391cf284eab37e6e3ee0a6ee63d2ceeb659862bd8d0a984ca6" +dependencies = [ + "anyhow", + "arc-swap", + "chrono", + "derivative", + "flate2", + "fnv", + "humantime", + "libc", + "log", + "log-mdc", + "once_cell", + "parking_lot", + "rand", + "serde", + "serde-value", + "serde_json", + "serde_yaml", + "thiserror", + "thread-id", + "typemap-ors", + "winapi", +] + +[[package]] +name = "loom" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff50ecb28bb86013e935fb6683ab1f6d3a20016f123c76fd4c27470076ac30f5" +dependencies = [ + "cfg-if", + "generator", + "scoped-tls", + "serde", + "serde_json", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +dependencies = [ + "adler", +] + +[[package]] +name = "miniz_oxide" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +dependencies = [ + "hermit-abi 0.3.9", + "libc", + "wasi", + "windows-sys 0.52.0", +] + +[[package]] +name = "multer" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01acbdc23469fd8fe07ab135923371d5f5a422fbf9c522158677c8eb15bc51c2" +dependencies = [ + "bytes", + "encoding_rs", + "futures-util", + "http 0.2.12", + "httparse", + "log", + "memchr", + "mime", + "spin", + "tokio", + "tokio-util", + "version_check", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi 0.3.9", + "libc", +] + +[[package]] +name = "object" +version = "0.36.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "ordered-float" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +dependencies = [ + "num-traits", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "pear" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdeeaa00ce488657faba8ebf44ab9361f9365a97bd39ffb8a60663f57ff4b467" +dependencies = [ + "inlinable_string", + "pear_codegen", + "yansi 1.0.1", +] + +[[package]] +name = "pear_codegen" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bab5b985dc082b345f812b7df84e1bef27e7207b39e448439ba8bd69c93f147" +dependencies = [ + "proc-macro2", + "proc-macro2-diagnostics", + "quote", + "syn 2.0.76", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "proc-macro2" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "proc-macro2-diagnostics" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", + "version_check", + "yansi 1.0.1", +] + +[[package]] +name = "quote" +version = "1.0.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "redox_syscall" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" +dependencies = [ + "bitflags", +] + +[[package]] +name = "ref-cast" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf0a6f84d5f1d581da8b41b47ec8600871962f2a528115b542b362d4b744931" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcc303e793d3734489387d205e9b186fac9c6cfacedd98cbb2e8a5943595f3e6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", +] + +[[package]] +name = "regex" +version = "1.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.7", + "regex-syntax 0.8.4", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.4", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" + +[[package]] +name = "rocket" +version = "0.5.0-rc.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58734f7401ae5cfd129685b48f61182331745b357b96f2367f01aebaf1cc9cc9" +dependencies = [ + "async-stream", + "async-trait", + "atomic 0.5.3", + "binascii", + "bytes", + "either", + "figment", + "futures", + "indexmap 1.9.3", + "is-terminal", + "log", + "memchr", + "multer", + "num_cpus", + "parking_lot", + "pin-project-lite", + "rand", + "ref-cast", + "rocket_codegen", + "rocket_http", + "serde", + "state", + "tempfile", + "time", + "tokio", + "tokio-stream", + "tokio-util", + "ubyte", + "version_check", + "yansi 0.5.1", +] + +[[package]] +name = "rocket_codegen" +version = "0.5.0-rc.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7093353f14228c744982e409259fb54878ba9563d08214f2d880d59ff2fc508b" +dependencies = [ + "devise", + "glob", + "indexmap 1.9.3", + "proc-macro2", + "quote", + "rocket_http", + "syn 2.0.76", + "unicode-xid", +] + +[[package]] +name = "rocket_http" +version = "0.5.0-rc.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "936012c99162a03a67f37f9836d5f938f662e26f2717809761a9ac46432090f4" +dependencies = [ + "cookie", + "either", + "futures", + "http 0.2.12", + "hyper", + "indexmap 1.9.3", + "log", + "memchr", + "pear", + "percent-encoding", + "pin-project-lite", + "ref-cast", + "serde", + "smallvec", + "stable-pattern", + "state", + "time", + "tokio", + "uncased", +] + +[[package]] +name = "rust-s3-server" +version = "0.1.0" +dependencies = [ + "asyncio-utils", + "chrono", + "clap", + "futures", + "http-body 0.4.6", + "http-body-util", + "httpdate", + "hyper", + "lazy_static", + "log", + "log4rs", + "md-5", + "pin-project", + "rand", + "regex", + "rocket", + "serde", + "serde_json", + "tokio", + "ubyte", + "urlencoding", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustix" +version = "0.38.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a85d50532239da68e9addb745ba38ff4612a242c1c7ceea689c4bc7c2f43c36f" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustversion" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "serde" +version = "1.0.209" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-value" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" +dependencies = [ + "ordered-float", + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.209" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", +] + +[[package]] +name = "serde_json" +version = "1.0.127" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_spanned" +version = "0.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap 2.4.0", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "stable-pattern" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4564168c00635f88eaed410d5efa8131afa8d8699a612c80c455a0ba05c21045" +dependencies = [ + "memchr", +] + +[[package]] +name = "state" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbe866e1e51e8260c9eed836a042a5e7f6726bb2b411dffeaa712e19c388f23b" +dependencies = [ + "loom", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578e081a14e0cefc3279b0472138c513f37b41a08d5a3cca9b6e4e8ceb6cd525" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tempfile" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" +dependencies = [ + "cfg-if", + "fastrand", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "thiserror" +version = "1.0.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", +] + +[[package]] +name = "thread-id" +version = "4.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe8f25bbdd100db7e1d34acf7fd2dc59c4bf8f7483f505eaa7d4f12f76cc0ea" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "time" +version = "0.3.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tokio" +version = "1.40.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-macros" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", +] + +[[package]] +name = "tokio-stream" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" +dependencies = [ + "indexmap 2.4.0", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typemap-ors" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a68c24b707f02dd18f1e4ccceb9d49f2058c2fb86384ef9972592904d7a28867" +dependencies = [ + "unsafe-any-ors", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "ubyte" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f720def6ce1ee2fc44d40ac9ed6d3a59c361c80a75a7aa8e75bb9baed31cf2ea" +dependencies = [ + "serde", +] + +[[package]] +name = "uncased" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697" +dependencies = [ + "serde", + "version_check", +] + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-xid" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "229730647fbc343e3a80e463c1db7f78f3855d3f3739bee0dda773c9a037c90a" + +[[package]] +name = "unsafe-any-ors" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a303d30665362d9680d7d91d78b23f5f899504d4f08b3c4cf08d055d87c0ad" +dependencies = [ + "destructure_traitobject", +] + +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" +dependencies = [ + "cfg-if", + "once_cell", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.76", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winnow" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" +dependencies = [ + "memchr", +] + +[[package]] +name = "yansi" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" + +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..c7eaad1 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "rust-s3-server" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +rand = "0.8" +lazy_static = "1.4" +serde = { version = "1", features = ["derive"] } +serde_json = "1.0" +regex = "1.9" +tokio = { version = "1", features = ["full"]} +hyper = { version = "0.14", features = ["full"] } +http-body = "0.4" +http-body-util = "0.1.0-rc.3" +rocket = "=0.5.0-rc.3" +ubyte = "0.10" +chrono = "0.4" +urlencoding = "2.1" +httpdate = "1" +pin-project = "1.1" +md-5 = "0.10" +clap = { version = "4.3", features = ["derive"] } +futures = "0.3" +log = "0.4" +log4rs = { version="1.2.0", features = ["gzip", "background_rotation"] } +asyncio-utils = "0.4" + +[[bin]] +name="rusts3" +path="src/main.rs" + +[[bin]] +name="test" +path="src/test.rs" diff --git a/LICENSE b/LICENSE index 137069b..261eeb9 100644 --- a/LICENSE +++ b/LICENSE @@ -1,73 +1,201 @@ -Apache License -Version 2.0, January 2004 -http://www.apache.org/licenses/ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION -1. Definitions. + 1. Definitions. -"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. -"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. -"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. -"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. -"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. -"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. -"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). -"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. -"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." -"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. -2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. -3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. -4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: - (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and - (b) You must cause any modified files to carry prominent notices stating that You changed the files; and + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and - (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and - (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. - You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. -5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. -6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. -7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. -8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. -9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. -END OF TERMS AND CONDITIONS + END OF TERMS AND CONDITIONS -APPENDIX: How to apply the Apache License to your work. + APPENDIX: How to apply the Apache License to your work. -To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. -Copyright [yyyy] [name of copyright owner] + Copyright [yyyy] [name of copyright owner] -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at -http://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index af8ae9f..29b9a93 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,79 @@ -# s3-server-rs +# rust-s3-server +An S3 compatible Object Storage written in Rust. Ideal for local test environment + + +# Building + +```bash +$ cargo build --release +``` + +# Running + +## Getting help +```bash +$ target/release/rusts3 --help +Usage: rusts3 [OPTIONS] + +Options: + -b, --base-dir [default: ./rusts3-data] + --bind-address Bind IP address [default: 0.0.0.0] + --bind-port Bind port number [default: 8000] + --log-conf Log4rs config file [default: ] + -h, --help Print help +``` + +## Example usage: +```bash +$ target/release/rusts3 -b "test-data" --bind-address "192.168.44.172" --bind-port 18000 --log-conf log4rs.yaml +``` + +### Testing +Creating bucket: +```bash +$ aws --endpoint-url http://192.168.44.172:18000 s3 mb s3://new-bucket +make_bucket: new-bucket +``` + +Uploading object: +```bash +$ aws --endpoint-url http://192.168.44.172:18000 s3 cp ~/Downloads/zulu.dmg s3://new-bucket/some-path/zulu.dmg +upload: ./zulu.dmg to s3://new-bucket/some-path/zulu.dmg +``` + +Downloading object: +```bash +$ aws --endpoint-url http://192.168.44.172:18000 s3 cp s3://new-bucket/some-path/zulu.dmg ./new.dmg +download: s3://new-bucket/some-path/zulu.dmg to ./new.dmg + +# files should be the same +$ diff ~/Downloads/zulu.dmg ./new/dmg +``` + +Listing object: +```bash +$ aws --endpoint-url http://192.168.44.172:18000 s3 ls s3://new-bucket + PRE some-path/ +$ aws --endpoint-url http://192.168.44.172:18000 s3 ls s3://new-bucket/some-path/ + PRE some-path/ +2023-08-13 11:45:25 615835 zulu.dmg +``` + +Deleting object: +```bash +$ aws --endpoint-url http://192.168.44.172:18000 s3 rm s3://new-bucket/some-path/zulu.img +delete: s3://new-bucket/some-path/zulu.dmg +``` + +----- + + +```shell +export AWS_ACCESS_KEY_ID=ABCD +export AWS_SECRET_ACCESS_KEY=EF1234 +export AWS_ENDPOINT_URL=http://127.0.0.1:8001 +``` + + + -S3 Server in Rust \ No newline at end of file diff --git a/log4rs.yml b/log4rs.yml new file mode 100644 index 0000000..80530ae --- /dev/null +++ b/log4rs.yml @@ -0,0 +1,32 @@ +refresh_rate: 60 seconds + +appenders: + stdout: + kind: console + default: + kind: rolling_file + path: "/var/log/rusts3.log" + append: true + encoder: + pattern: "{d(%Y-%m-%d %H:%M:%S%.3f %Z)} {({l}):5.5} {f}:{L} - {m}{n}" + policy: + kind: compound + trigger: + kind: size + limit: 10 mb + roller: + kind: fixed_window + pattern: "/var/log/rusts3.{}.log.gz" + count: 20 + base: 1 +root: + level: info + appenders: + - stdout + +loggers: + rusts3: + level: info + appenders: + - default + additive: false diff --git a/src/cachedfile.rs b/src/cachedfile.rs new file mode 100644 index 0000000..3e979bf --- /dev/null +++ b/src/cachedfile.rs @@ -0,0 +1,44 @@ +use rocket::http::Status; +use std::time::SystemTime; +use asyncio_utils::LimitSeekerReader; +use tokio::fs::File; +//limit_reader::LimitedReader; + +pub struct CachedFile { + pub reader:LimitSeekerReader, + pub file_name:String, + pub size:usize, + pub modified_time:SystemTime, + pub etag:String, + pub partial: bool, +} + +#[rocket::async_trait] +impl<'r,'o> rocket::response::Responder<'r,'r> for CachedFile +{ + + fn respond_to(self, _req: &'r rocket::Request) -> rocket::response::Result<'r> { + //let etag = self.1.sha256sum().unwrap(); + //let last_modified = self.1.meta().unwrap().modified().unwrap(); + let etag = self.etag; + let htd = httpdate::fmt_http_date(self.modified_time); + + let response = rocket::response::Response::build().sized_body( + self.size, self.reader).finalize(); + + let mut actual_builder = &mut rocket::response::Response::build_from(response); + actual_builder = actual_builder.raw_header("Cache-control", "max-age=86400") + .raw_header("Last-Modified", htd) // 24h (24*60*60) + .raw_header("ETag", etag) + .raw_header("Content-Type", "application-octetstream") + .raw_header("Content-Disposition", format!("attachment; filename=\"{}\"", self.file_name)); + if self.partial { + actual_builder = actual_builder.status(Status::PartialContent); + } + + actual_builder.raw_header("content-length", format!("{}", self.size)); + + return Ok(actual_builder.finalize()); + + } +} diff --git a/src/chunk_to_raw.rs b/src/chunk_to_raw.rs new file mode 100644 index 0000000..367f71e --- /dev/null +++ b/src/chunk_to_raw.rs @@ -0,0 +1,133 @@ +use tokio::fs::File; +use std::error::Error; +use md5::{Md5, Digest}; +use tokio::io::{AsyncRead, AsyncReadExt, AsyncWriteExt}; +use std::i64; +use asyncio_utils::UndoReader; + + +pub async fn copy_chunk_to_raw(input_raw:&mut D, output: &mut File, chunked:bool) -> Result<(usize, String), Box> + where D:AsyncRead + Unpin +{ + let mut input_obj = UndoReader::new(input_raw, None); + let input = &mut input_obj; + let mut hasher:Md5 = Md5::new(); + let mut total:usize = 0; + if chunked { + loop { + // it is chunked + let header = read_chunk_header(input).await?; + let mut tokens = header.split(";"); + let size = tokens.next().expect("No size info!"); + let size = i64::from_str_radix(size, 16)?; + if size == 0 { + break; + } + let size = size as usize; + let copied = copy_fixed_bytes(input, output, size, &mut hasher).await?; + skip_n_bytes(input, 2).await?; + total += copied; + } + let final_hash = format!("{:x}", hasher.finalize()); + return Ok((total, final_hash)); + } else { + // raw + let copied = copy_direct(input, output, &mut hasher).await?; + let final_hash = format!("{:x}", hasher.finalize()); + + return Ok((copied, final_hash)); + } + +} + +async fn try_read_full(input: &mut UndoReader, buff:&mut[u8]) -> Result> + where D:AsyncRead + Unpin +{ + let target = buff.len(); + let mut nread:usize = 0; + while nread < target { + let rr = input.read(&mut buff[nread..]).await?; + if rr == 0 { + return Ok(nread); + } + + nread += rr; + } + return Ok(nread); +} +async fn skip_n_bytes(input: &mut UndoReader, how_many:usize) -> Result<(), Box> + where D:AsyncRead + Unpin +{ + let mut buf = vec![0u8; how_many]; + let mut nread:usize = 0; + while nread < how_many { + let rr = input.read(&mut buf[nread..]).await?; + if rr == 0 { + panic!("Insufficient read!"); + } + nread += rr; + } + Ok(()) +} +async fn read_chunk_header(input: &mut UndoReader) -> Result> + where D:AsyncRead + Unpin +{ + let mut buf = [0u8; 512]; + let rr = try_read_full(input, &mut buf).await?; + for i in 0 .. rr - 1 { + if buf[i] == 0x0d && buf[i+1] == 0x0a { + if buf.len() > i + 2 { + input.unread(&buf[i + 2..]); + } + // new line found! + return Ok(std::str::from_utf8(&buf[0..i]).unwrap().to_string()); + } + } + if buf[1] == 0x3b && buf[0] == 0x30 { + // 0 bytes + return Ok(std::str::from_utf8(&buf[0..rr]).unwrap().to_string()); + } + panic!("Expecing chunk header but not found!"); +} + +pub async fn copy_direct(reader:&mut UndoReader, out:&mut tokio::fs::File, hasher:&mut Md5) -> Result> + where D:tokio::io::AsyncRead + Unpin +{ + let mut buf = [0u8; 4096]; + let mut copied: usize = 0; + loop { + let rr = reader.read(&mut buf).await?; + if rr == 0 { + break; + } + out.write_all(&mut buf[..rr]).await?; + hasher.update(&mut buf[..rr]); + copied += rr; + } + return Ok(copied); +} + +async fn copy_fixed_bytes(input: &mut UndoReader, output: &mut File, bytes:usize, hasher:&mut Md5) -> Result> + where D:tokio::io::AsyncRead + Unpin +{ + let mut remaining = bytes; + let mut buffer = [0u8; 4096]; + // Copy as whole chunk + while remaining > 0 { + let mut rr = input.read(&mut buffer).await?; + if rr > remaining { + // overread, put it back + let to_put_back = &buffer[remaining..rr]; + input.unread(to_put_back); + // only write up to remaining bytes + rr = remaining; + } + let wr = output.write(&mut buffer[..rr]).await?; + if wr != rr { + panic!("Incomplete write!"); + } + hasher.update(&buffer[..rr]); + remaining = remaining - wr; + } + Ok(bytes) +} \ No newline at end of file diff --git a/src/fsapi.rs b/src/fsapi.rs new file mode 100644 index 0000000..388da74 --- /dev/null +++ b/src/fsapi.rs @@ -0,0 +1,987 @@ +use std::path::PathBuf; +use std::error::Error; +use std::fs::{Metadata, metadata, self}; +use crate::s3error::S3Error; +use crate::sequencing::{self, Sequence}; +use crate::chunk_to_raw::copy_chunk_to_raw; + +use std::collections::HashMap; +use rand::{distributions::Alphanumeric, Rng}; +use serde_json; +use md5::{Md5, Digest}; + +use tokio::io::{AsyncReadExt, AsyncWriteExt}; +use std::fmt; +use regex::Regex; +use serde::{Deserialize, Serialize}; +use tokio::fs as tokiofs; +use std::sync::Arc; +use std::io::Read; +use std::io::Write; +use chrono::offset::Utc; +use chrono::DateTime; + +#[derive(Debug)] +pub enum S3FSErrorKind { + InvalidBucketName, + InvalidObjectKey, + BucketNotFound, + BucketAlreadyExists, + KeyNotFound, + InvalidMeta, + IncompleteWrite, + InputOutput, + ObjectTooLarge +} + +impl std::fmt::Display for S3FSErrorKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{:?}", &self) + } +} +#[derive(Debug)] +pub struct S3FSError { + pub kind: S3FSErrorKind, + pub message: Option +} + +impl std::fmt::Display for S3FSError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut h = String::new(); + match &self.message { + None => {}, + Some(c) => + { + let msg = format!(" Message: {}", c); + h.push_str(&msg); + } + }; + write!(f, "S3FSError: Kind: {}{}", &self.kind, &h) + } +} + +impl Error for S3FSError { +} + +impl S3FSError { + pub fn io(msg: Option) -> S3FSError { + S3FSError { kind: S3FSErrorKind::InputOutput, message: msg } + } + + pub fn invalid_bucket_name (name: &str) -> S3FSError { + S3FSError { kind: S3FSErrorKind::InvalidBucketName, message: Some(name.to_string()) } + } + + pub fn bucket_not_found(name: &str) -> S3FSError { + S3FSError {kind:S3FSErrorKind::BucketNotFound, message:Some(name.to_string())} + } + + pub fn of_kind_and_message(kind:S3FSErrorKind, message:&str) -> S3FSError { + S3FSError {kind:kind, message:Some(message.to_string())} + } + + pub fn of_kind(kind:S3FSErrorKind) -> S3FSError { + S3FSError {kind:kind, message: None} + } + pub fn boxed(self) -> Box { + return Box::new(self); + } +} +pub struct FS { + base: PathBuf, + seq:Arc, +} + +impl FS { + pub fn new() -> FS { + let result = FS { base: PathBuf::from("."), seq:Arc::new(Default::default()) }; + return result; + } + + pub fn set_base(&mut self, new_base:&str) { + self.base = PathBuf::from(new_base); + } + + pub fn initialize(&self) { + self.create_staging_directory(); + } + + pub fn create_staging_directory(&self) { + let staging_dir = self.get_staging_dir(); + let _ = std::fs::create_dir_all(staging_dir); + } + pub fn make_bucket(&self,bucket:&str) -> Result> { + if !Bucket::valid_bucket(&bucket) { + return Err(S3FSError::of_kind_and_message( + S3FSErrorKind::InvalidBucketName, bucket).into()); + } + if self.get_bucket(&bucket).is_some() { + return Err(S3FSError::of_kind_and_message( + S3FSErrorKind::BucketAlreadyExists, + bucket).into()); + } + + let mut base_path = PathBuf::from(&self.base); + base_path.push(&bucket); + let mb_result = std::fs::create_dir_all(base_path); + if mb_result.is_err() { + return Err(S3FSError::of_kind_and_message(S3FSErrorKind::InputOutput, + &format!("Failed to create directory {}: `{}`", + bucket, + mb_result.err().unwrap())).into()); + } + let result = self.get_bucket(bucket); + if result.is_none() { + return Err(S3FSError::of_kind_and_message(S3FSErrorKind::InputOutput, + "Bucket did not exist after creation").into()); + } + + return Ok(result.unwrap()); + + } + pub fn get_staging_dir(&self) -> String { + let mut target = self.base.clone(); + target.push("_staging"); + return target.to_str().unwrap().to_string(); + } + + pub fn get_bucket(&self, name:&str) -> Option{ + if !Bucket::valid_bucket(name) { + return None; + } + let mut path_buf = PathBuf::from(&self.base); + path_buf.push(name); + let meta = + std::fs::metadata(path_buf.clone()); + if meta.is_err() { + return None; + } + let meta = meta.unwrap(); + if !meta.is_dir() { + return None; + } + + let result = Bucket::from(path_buf.to_str().unwrap(), + &self.get_staging_dir(), Arc::clone(&self.seq)); + + if result.is_err() { + return None; + } + + return Some(result.unwrap()); + } + + pub fn get_all_buckets(&self) -> HashMap { + let base_path: PathBuf = self.base.clone(); + let mut staging_dir = base_path.clone(); + staging_dir.push("_staging"); + let _ = std::fs::create_dir_all(&staging_dir); + let mut result = HashMap::new(); + + let read_result = std::fs::read_dir(self.base.clone()); + if read_result.is_err() { + return result; + } + + let dir_entries = read_result.unwrap(); + for next in dir_entries { + if next.is_err() { + continue; + } + + let next = next.unwrap(); + if !next.path().is_dir() { + continue; + } + let name = next.file_name(); + let name = name.to_str(); + if name.is_none() { + continue; + } + + let name = name.unwrap(); + if !Bucket::valid_bucket(name) { + continue; + } + + let staging_dir_local = staging_dir.clone(); + let bucket = Bucket::from(next.path().to_str().unwrap(), + staging_dir_local.to_str().unwrap(), + Arc::clone(&self.seq)); + if bucket.is_err() { + continue; + } + let bucket = bucket.unwrap(); + + result.insert(name.to_string(), bucket); + } + return result; + } +} + +#[derive(Debug, Clone)] +pub struct Bucket { + base: PathBuf, + base_path:String, + staging: PathBuf, + seq:Arc +} + +#[derive(Deserialize, Serialize, Debug)] +pub struct FileMeta { + pub etag: String, + pub size: usize, +} + +impl Bucket { + pub const META_SUFFIX:&str = "@@META@@"; + pub fn from(path: &str, staging_path:&str, seq:Arc) -> Result> { + let pathb = PathBuf::from(path); + let pathbx = pathb.canonicalize()?; + let pathbxc = pathbx.clone(); + let mut path_base = pathbxc.as_path().to_str().unwrap().to_string(); + if !path_base.ends_with("/") { + path_base.push_str("/"); + } + Ok(Bucket { + base: pathbx, + base_path: path_base, + staging: PathBuf::from(staging_path), + seq + }) + } + + fn get_sibling(&self, obj:S3Object) -> Vec { + let parent = obj.target.parent(); + if parent.is_none() { + return vec!(); + } + let parent = parent.unwrap().to_path_buf(); + let mut parent_key = obj.key; + if parent_key.ends_with("/") { + parent_key = parent_key.trim_end_matches("/").into(); + } + + let last_slash = parent_key.rfind("/"); + match last_slash { + Some(idx) => { + let self_key_str = &parent_key[0..idx]; + parent_key = self_key_str.into(); + }, + None =>{ + parent_key = "".into(); + } + } + //self_key = self_key.trim_end_matches("").into(); + let parent_obj = S3Object { + bucket:self, + target: parent, + kind: FileType::Directory, + key: parent_key, + }; + return self.get_children(parent_obj); + } + + fn get_children<'a>(&'a self, parent:S3Object) -> Vec>{ + let path = parent.target.clone(); + let parent_key = parent.key; + let dir_iter = std::fs::read_dir(path.clone()); + if dir_iter.is_err() { + return vec!(); + } + let dir_iter = dir_iter.unwrap(); + let mut result = Vec::new(); + for next in dir_iter { + if next.is_err() { + continue; + } + let next = next.unwrap(); + let name = next.file_name(); + let name = name.to_str(); + if name.is_none() { + // invalid name + continue; + } + let name = name.unwrap(); + if name == "." || name == ".." || name.ends_with(Bucket::META_SUFFIX) { + continue; + } + let meta = next.metadata(); + if meta.is_err() { + continue; + } + let meta = meta.unwrap(); + + if meta.is_dir() { + let mut this_path = path.clone(); + this_path.push(name); + let entry = S3Object { + bucket: self, + target: this_path, + kind: FileType::Directory, + key: format!("{parent_key}/{name}") + }; + result.push(entry); + } else if meta.is_file() { + let mut this_path = path.clone(); + this_path.push(name); + let entry = S3Object { + bucket: self, + target: this_path, + kind: FileType::File, + key: format!("{parent_key}/{name}") + }; + result.push(entry); + } + + } + result.sort_by(|a, b| a.target.partial_cmp(&b.target).unwrap()); + return result; + } + + fn collect_children<'a, 'b>(&'b self, obj:S3Object, result:&mut Vec>) + where 'b : 'a + { + let children = self.get_children(obj); + for next_child in children { + let child:S3Object = next_child.clone(); + result.push(child); + if next_child.is_dir() { + self.collect_children(next_child, result); + } + } + } + + pub fn list_objects(&self, prefix:&str, after:&str, limit: usize) -> Vec { + if !Bucket::valid_key(prefix) { + return vec!(); + } + let target = self.file_for_key(prefix); + let file_name = target.get_short_name(); + let parent = target.target.parent(); + let mut result = Vec::new(); + + if parent.is_none() { + return vec!(); + } + + if target.is_dir() { + result.push(target.clone()); + self.collect_children(target, &mut result); + } else { + let siblings = self.get_sibling(target); + for next in siblings { + let next_file_name = next.get_short_name(); + if next_file_name.starts_with(&file_name) { + if next.is_dir() { + self.collect_children(next, &mut result); + } else { + result.push(next.clone()); + } + } + } + } + return result.into_iter() + .filter(|x| x.kind == FileType::File) + .filter(|x| x.has_meta()) + .filter(|x| x.object_key() > after) + .take(limit) + .collect(); + } + + pub fn list_objects_short(&self, key:&str, after:&str, limit:usize) -> Vec { + if !Bucket::valid_key(key) { + return vec!(); + } + let mut path = self.base.clone(); + path.push(key); + let target = self.file_for_key(key); + let file_name = target.get_short_name(); + let parent = target.target.parent(); + let mut result = Vec::new(); + if parent.is_none() { + return vec!(); + } + if target.is_dir() { + result.push(target.clone()); + let children = self.get_children(target); + for next in children { + result.push(next); + } + } else { + let siblings = self.get_sibling(target); + for next in siblings { + let next_file_name = next.get_short_name(); + if next_file_name.starts_with(&file_name) { + result.push(next); + } + } + } + return result.into_iter() + .filter(|x| x.kind == FileType::Directory || (x.kind == FileType::File && x.has_meta())) + .filter(|x| x.object_key() > after) + .take(limit) + .collect(); + + } + + pub fn gen_upload_id(&self) -> String { + let s: String = rand::thread_rng() + .sample_iter(&Alphanumeric) + .take(10) + .map(char::from) + .collect(); + return format!("upload_{s}"); + } + + + pub fn get_creation_time(&self) -> Result> { + let meta = std::fs::metadata(&self.base)?; + let created = meta.created()?; + let datetime: DateTime = created.into(); + // 2009-10-12T17:50:30.000Z + return Ok(format!("{}", datetime.format("%Y-%m-%dT%H:%M:%S%.3f%:z"))); + } + pub fn cleanup_upload_id(&self, id:&str) { + let ids = id.to_string(); + //target.push(id.to_string()); + let mut counter = 0; + loop { + counter +=1; + let mut file_target = self.staging.clone(); + file_target.push(format!("{id}_{counter}")); + let mut meta_target = self.staging.clone(); + + meta_target.push(format!("{}_{}{}", ids, counter, Self::META_SUFFIX)); + if meta_target.exists() { + let _ = std::fs::remove_file(meta_target); + } + if file_target.exists() { + let _ = std::fs::remove_file(file_target); + } else { + if counter > 10 { + break; + } + } + if counter > 1000 { + break; + } + } + } + + pub fn valid_bucket(bucket:&str) -> bool { + if bucket == "_staging" { + return false; + } + let reg = "^[a-zA-Z0-9.\\-_]{1,255}$"; + let pattern = Regex::new(reg).unwrap(); + if pattern.is_match(bucket) { + return true; + } + return false; + } + fn valid_key(key:&str) -> bool { + let reg = ".*[\\\\><|:&\\$].*"; + let pattern = Regex::new(reg).unwrap(); + if pattern.is_match(key) { + return false; + } + + if key.contains("/./") || key.contains("/../") || key.contains("//") { + return false; + } + + if key.starts_with("/") { + return false; + } + if key.ends_with(Self::META_SUFFIX) { + return false; + } + + return true; + + } + pub fn list_all_objects(&self) -> Vec { + let mut files = Vec::new(); + Self::scan(&self.base, &mut files); + let mut result = Vec::new(); + let base = &self.base_path; + for item in files { + let full_path = String::from(item.to_str().unwrap()); + let key = full_path.strip_prefix(base).unwrap(); + if !full_path.ends_with(Self::META_SUFFIX) { + result.push( + S3Object { + bucket:&self, + target: item, + kind: FileType::File, + key:String::from(key) + } + ); + } + } + return result; + } + + fn scan(directory:&PathBuf, result: &mut Vec) { + let paths = fs::read_dir(directory.as_path()); + if let Err(_) = paths { + return; + } + let paths = paths.unwrap(); + for next in paths { + match next { + Ok(dir) => { + let next_path = dir.path(); + let meta = fs::metadata(next_path.as_path()); + match meta { + Err(_) => continue, + Ok(metadata) => { + if metadata.is_dir() { + // directory + Self::scan(&next_path, result); + } else if metadata.is_file(){ + result.push(next_path); + } + } + } + }, + Err(_) => { + continue; + } + } + } + } + + pub fn delete_object(&self, key:&str) -> bool { + if !Self::valid_key(key) { + return false; + } + let full_path = format!("{}{}", &self.base_path, &key); + let full_meta_path = format!("{}{}{}", &self.base_path, &key, Self::META_SUFFIX); + let pb = PathBuf::from(full_path); + let pbc = pb.clone(); + let _ = std::fs::remove_file(&pb); + let _ = std::fs::remove_file(&full_meta_path); + let _ = std::fs::remove_dir(&pb); + if key.contains("/") { + let parent = pbc.parent().unwrap(); + let _ = std::fs::remove_dir(parent); + } + return true; + } + + pub fn read_object(&self, key:&str) -> Result> { + if !Self::valid_key(key) { + return Err(S3FSError::of_kind_and_message( + S3FSErrorKind::InvalidObjectKey, key).into()); + } + let file = self.file_for_key(key); + let path = file.target; + return Ok(std::fs::File::open(path.as_path())?); + } + + pub fn get_object_meta(&self, key:&str) -> Result> { + if !Self::valid_key(key) { + return Err(S3FSError::of_kind_and_message(S3FSErrorKind::InvalidObjectKey, key).into()); + } + let meta_file = self.meta_file_for_key(key); + let path = meta_file.target; + let mut input_file = std::fs::File::open(path.as_path())?; + let mut buffer = [0u8; 4096]; + let mut nread:usize = 0; + loop { + let rr = input_file.read(&mut buffer[nread..])?; + if rr == 0 { + break; + } + nread += rr; + } + let string = std::str::from_utf8(&buffer[0..nread]).unwrap(); + let result:FileMeta = serde_json::from_str(string)?; + return Ok(result); + } + + pub async fn merge_part(&self, key:&str, upload_id:&str) -> Result<(usize, String), Box> { + let staging_path = self.staging.clone(); + let mut counter = 0; + let dest = self.file_for_key(key); + dest.ensure_parent(); + let mut dest_file = tokio::fs::File::create(dest.target).await?; + let mut total:usize = 0; + let mut hasher = Md5::new(); + loop { + counter = counter + 1; + let mut next_file_p = staging_path.clone(); + next_file_p.push(format!("{}_{}", upload_id, counter)); + let mut next_meta_file_p = staging_path.clone(); + next_meta_file_p.push(format!("{}_{}{}", upload_id, counter, Self::META_SUFFIX)); + if next_file_p.exists() { + let mut next_f = tokio::fs::File::open(next_file_p.as_path()).await?; + let copied = Self::merge_into_with_hash(&mut dest_file, &mut next_f, &mut hasher).await?; + total += copied; + let _ = std::fs::remove_file(next_file_p); + let _ = std::fs::remove_file(next_meta_file_p); + } else { + break; + } + } + let hash = format!("{:x}", hasher.finalize()); + + let meta_dest_f = self.meta_file_for_key(key); + let mut meta_dest = std::fs::File::create(meta_dest_f.path())?; + Self::save_meta(&mut meta_dest, FileMeta { + etag: hash.clone(), + size: total + })?; + return Ok((total, hash)); + } + + async fn merge_into_with_hash(dst:&mut tokio::fs::File, src:&mut tokio::fs::File, hasher:&mut Md5) ->Result> { + let mut buf = [0u8; 4096]; + let mut copied = 0; + loop { + let rr = src.read(&mut buf).await?; + if rr == 0 { + break; + } + hasher.update(&buf[..rr]); + let wr = dst.write(&mut buf[..rr]).await?; + if wr != rr { + return Err(Box::new(S3Error::io_error())) + } + copied += wr; + } + return Ok(copied); + } + + pub async fn save_object_part(&self, _key:&str, upload_id:&str, part_number:u32,reader: &mut D, chunked:bool) -> Result<(usize,String), Box> + where D:tokio::io::AsyncRead + Unpin + { + let mut path = self.staging.clone(); + path.push(format!("{upload_id}_{part_number}")); + let object_path = path; + let mut path_after = self.staging.clone(); + path_after.push(format!("{upload_id}_{part_number}")); + + let mut meta_path = self.staging.clone(); + meta_path.push(format!("{}_{}{}", upload_id, part_number, Self::META_SUFFIX)); + let object_meta_path = meta_path; + let mut tmp_file = tokiofs::File::create(object_path.clone()).await.unwrap(); + let mut tmp_meta = std::fs::File::create(object_meta_path.clone()).unwrap(); + + let (copied, md5) = copy_chunk_to_raw(reader, &mut tmp_file, chunked).await?; + + Self::save_meta(&mut tmp_meta, FileMeta{etag:md5.clone(), size:copied})?; + return Ok((copied, md5)); + } + + + pub async fn save_object(&self, key:&str, reader: &mut D, chunked:bool) -> Result<(usize, String), Box> + where D:tokio::io::AsyncRead + Unpin + { + if !Self::valid_key(key) { + return Err(S3FSError::of_kind_and_message( + S3FSErrorKind::InvalidObjectKey, key).into()) + } + let file = self.file_for_key(key); + let meta = self.meta_file_for_key(key); + + let seq = self.seq.next(); + let staging = self.staging.clone(); + let staging = staging.as_path().to_str().unwrap(); + let object_tmp_name = format!("{staging}/{seq}"); + let object_tmp_meta_name = format!("{}/{}{}", staging, seq, Self::META_SUFFIX); + let mut tmp_file = tokiofs::File::create(object_tmp_name.clone()).await.unwrap(); + let mut tmp_meta = std::fs::File::create(object_tmp_meta_name.clone()).unwrap(); + file.ensure_parent(); + + let (copied_size, md5) = copy_chunk_to_raw( + reader, &mut tmp_file, chunked).await?; + + Self::save_meta(&mut tmp_meta, FileMeta{etag:md5.clone(), size:copied_size})?; + + let _ = std::fs::rename(object_tmp_name, file.target); + + let _ = std::fs::rename(object_tmp_meta_name, meta.target); + return Ok((copied_size, md5)); + } + + fn save_meta(out:&mut std::fs::File, meta:FileMeta) -> Result> + { + let to_write = serde_json::to_string(&meta)?; + let written = out.write(to_write.as_bytes())?; + return Ok(written); + } + + + fn try_canicalize(input:PathBuf) ->PathBuf { + let can = input.clone().canonicalize(); + if can.is_err() { + return input; + } + return can.unwrap(); + } + fn file_for_key(&self, key:&str)->S3Object { + let full_path = format!("{}{}", self.base_path, key); + let path_buf = Self::try_canicalize(PathBuf::from(full_path)); + let actual_key = path_buf.to_str().unwrap().to_string(); + let base_string = self.base_path.clone(); + let mut key_short:String = "".into(); + if actual_key.len() > base_string.len() { + key_short = (&actual_key[base_string.len()..]).to_string(); + } + let meta = std::fs::metadata(&path_buf); + let mut obj_type = FileType::Uninitialized; + match meta { + Err(_) => { + + }, + Ok(some_meta) => { + if some_meta.is_dir() { + obj_type = FileType::Directory; + } else if some_meta.is_file() { + obj_type = FileType::File; + } + } + } + return S3Object { + bucket: &self, + target:path_buf, + kind: obj_type, + key: key_short + } + } + + pub fn get_object_by_key(&self, key:&str) -> Option { + let file = self.file_for_key(key); + if !file.exists() { + return None; + } + + if !file.meta().unwrap().is_file() { + return None; + } + return Some(file); + } + + fn meta_file_for_key(&self, key:&str) -> S3Object { + let full_path = format!("{}{}{}", self.base_path, key, Self::META_SUFFIX); + let path_buf = Self::try_canicalize(PathBuf::from(full_path)); + let meta = std::fs::metadata(&path_buf); + let mut obj_type = FileType::Uninitialized; + match meta { + Err(_) => { + + }, + Ok(some_meta) => { + if some_meta.is_dir() { + obj_type = FileType::Directory; + } else if some_meta.is_file() { + obj_type = FileType::File; + } + } + } + return S3Object { + bucket: &self, + target: path_buf, + kind: obj_type, + key: String::from(key) + } + } + pub fn list_objects_old(&self, prefix:&str) -> Vec { + let mut result = Vec::new(); + let all = self.list_all_objects(); + for next in all { + if next.key.starts_with(prefix) { + result.push(next) + } + } + return result; + } + + pub fn list_objects_short_old(&self, prefix:&str) -> Vec { + return vec!(); + } + +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum FileType { + Uninitialized, + File, + Directory +} +#[derive(Debug, Clone)] +pub struct S3Object<'a> { + bucket: &'a Bucket, + target: PathBuf, + kind: FileType, + key: String +} + + +impl<'a> S3Object<'a> { + pub async fn open_for_read(&self) -> Result> { + let meta = self.meta()?; + if meta.is_dir() { + return Err(S3FSError::of_kind_and_message(S3FSErrorKind::InvalidObjectKey, + "read target is folder").into()); + } + let open_result = tokiofs::File::open(self.target.clone()).await?; + return Ok(open_result); + } + + pub fn kind(&self) -> &FileType { + &self.kind + } + + pub fn read_as_string(&self) -> Result>{ + let bytes = self.read_as_bytes()?; + return Ok(String::from_utf8(bytes)?); + } + + pub fn read_as_bytes(&self) ->Result, Box> { + let mut file = std::fs::File::open(self.target.clone())?; + let meta = file.metadata()?; + if meta.len() > 10 * 1024 * 1024 { + return Err(Box::new(S3FSError::of_kind_and_message(S3FSErrorKind::ObjectTooLarge, + "Object larger than 10MiB can't be read as a whole!"))); + } + let mut buf = Vec::new(); + let _ = file.read_to_end(&mut buf)?; + return Ok(buf); + } + + pub fn is_dir(&self) -> bool { + let meta = self.meta(); + if meta.is_err() { + return false; + } + return meta.unwrap().is_dir(); + } + pub fn is_file(&self) -> bool { + let meta = self.meta(); + if meta.is_err() { + return false; + } + return meta.unwrap().is_file(); + } + + pub fn exists(&self) -> bool { + let meta = self.meta(); + if meta.is_err() { + return false; + } + return true; + } + + pub fn len(&self) -> Result> { + let meta = self.meta()?; + Ok(meta.len()) + } + + pub fn bucket(&self) -> &Bucket { + return self.bucket; + } + + pub fn metafile(&self) -> S3Object { + return self.bucket.meta_file_for_key(&self.key); + } + + + pub fn checksum(&self) -> Result> { + let meta = self.bucket.get_object_meta(&self.key)?; + return Ok(meta.etag); + } + + pub fn last_modified_formatted(&self) -> Result> { + let modified = self.meta()?.modified()?; + let datetime: DateTime = modified.into(); + // 2009-10-12T17:50:30.000Z + return Ok(format!("{}", datetime.format("%Y-%m-%dT%H:%M:%S%.3f%:z"))); + } + + pub fn object_key(&self) -> &str { + return &self.key; + } + + pub fn get_short_name(&self) -> String { + return self.target.file_name().unwrap().to_str().unwrap().to_string(); + } + + pub fn object_key_encoded(&self) -> String { + let to_encode = &self.key; + return urlencoding::encode(to_encode).to_string(); + } + + pub fn ensure_parent(&self) -> bool { + let parent = self.target.parent().unwrap(); + let created = std::fs::create_dir_all(parent); + if created.is_err() { + return false; + } + return true; + } + + fn type_for(path: &PathBuf) -> Option { + let meta = std::fs::metadata(path); + if meta.is_err() { + return None; + } + + let meta = meta.unwrap(); + if meta.is_dir() { + return Some(FileType::Directory); + } + + if meta.is_file() { + return Some(FileType::File); + } + return None; + } + + pub fn meta(&self) -> Result> { + Ok(metadata(self.target.as_path())?) + } + + pub fn has_meta(&self) -> bool { + let meta_file = self.metafile(); + return meta_file.is_file(); + } + + pub fn full_path(&self) -> Option { + let target_str = self.target.to_str()?; + return Some(target_str.to_string()); + } + + pub fn path(&self) -> &PathBuf { + &self.target + } + + pub async fn format(&self) -> String { + let object_key = self.object_key(); + if self.kind == FileType::Directory { + let mut object_key = object_key.clone().to_string(); + if !object_key.ends_with("/") { + object_key.push_str("/"); + } + return format!(r#"{object_key}"#); + } + let object_last_modified = self.last_modified_formatted().unwrap(); + let mut object_etag = self.checksum(); + if object_etag.is_err() { + object_etag = Ok("".into()); + } + let object_etag = object_etag.unwrap(); + let object_size = self.len().unwrap(); + let entry_xml = format!(r#" + {object_key} + {object_last_modified} + "{object_etag}" + {object_size} + STANDARD + "#); + return entry_xml; + } +} diff --git a/src/ioutil.rs b/src/ioutil.rs new file mode 100644 index 0000000..156666c --- /dev/null +++ b/src/ioutil.rs @@ -0,0 +1,9 @@ +use crate::fsapi::Bucket; +use std::error::Error; + +pub fn delete_file_and_meta(path: &str) -> Result<(), Box> { + let meta = format!("{}{}", path, Bucket::META_SUFFIX); + let _ = std::fs::remove_file(path); + let _ = std::fs::remove_file(meta); + return Ok(()); +} \ No newline at end of file diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..c30419e --- /dev/null +++ b/src/main.rs @@ -0,0 +1,650 @@ +pub mod fsapi; +pub mod sequencing; +pub mod cachedfile; +pub mod s3error; +pub mod s3resp; +pub mod chunk_to_raw; +pub mod request_guards; +pub mod ioutil; +pub mod states; + +use asyncio_utils::LimitSeekerReader; +use lazy_static::__Deref; +use regex::Regex; +use states::CliArg; +use request_guards::{ChunkedEncoding, CreateBucket, AbortMultipartUpload, AuthCheckPassed, CompleteMultipartUpload, CreateMultipartUpload, GetObject}; +use s3resp::S3Response; +use cachedfile::CachedFile; +use rocket::Data; +use rocket::response::Debug; +use ubyte::ToByteUnit; +use std::error::Error; +use std::io::SeekFrom; + +use fsapi::{FS, Bucket}; +#[macro_use] +extern crate rocket; + +extern crate log; +#[allow(unused)] +use log::{info, debug, warn, error, trace}; + +use rocket::State; + +#[derive(FromForm, Debug)] +struct ListQuery { + delimiter: Option, + #[allow(unused)] + #[field(name = "encoding-type")] + encoding_type: Option, + marker:Option, + #[field(name = "max-keys")] + max_keys:Option, + prefix: Option, + #[field(name="list-type")] + list_type: Option, + #[field(name="continuation-token")] + continuation_token: Option +} +#[get("/?", rank=99)] +async fn list_objects_ext(bucket:String, query:ListQuery, #[allow(unused)] marker:request_guards::ListObjects, fs:&State) + -> Option { + let action = "list_objects_ext"; + info!("{action}: bucket `{bucket}`, query `{query:?}`"); + return list_objects(bucket, query, fs).await; +} + +#[get("/?", rank=98)] +async fn list_objects_ext_v2(bucket:String, query:ListQuery, #[allow(unused)] marker:request_guards::ListObjectsV2, fs:&State) +-> Option { + let action = "list_objects_ext_v2"; + info!("{action}: bucket `{bucket}`, query `{query:?}`"); + return list_objects_v2(bucket, query, fs).await; +} + +fn list_bucket<'a>(bucket:&'a Bucket, prefix:&str, delimiter:&Option, after:&str, limit:usize) -> Vec> { + match delimiter { + Some(_str) => { + bucket.list_objects_short(prefix, after, limit) + }, + None => { + bucket.list_objects(prefix, after, limit) + } + } +} + +async fn list_objects_v2(bucket:String, query:ListQuery, fs:&State) -> Option { + let action = "list_objects_v2"; + let bucket_arg = bucket.clone(); + let bucket = fs.get_bucket(&bucket); + match bucket { + None => { + return None; + } + _ => { + + } + } + #[allow(unused)] + let list_type = query.list_type.unwrap_or(-1); + + let bucket = bucket.unwrap(); + let actual_prefix:String = query.prefix.unwrap_or(String::from("")); + let ct = query.continuation_token.unwrap_or(String::from("")); + let max_keys = query.max_keys.unwrap_or(100); + let objects:Vec = list_bucket(&bucket, &actual_prefix, &query.delimiter, &ct, max_keys); + let mut is_truncated = false; + let delimiter = query.delimiter.unwrap_or(String::from("")); + if delimiter != "/" && delimiter != "" { + panic!("delimeter must be / or empty"); + } + let mut objects_string = String::new(); + info!("{action}: returned {} objects", objects.len()); + let mut next_ct = "".to_string(); + for next in objects.iter() { + let entry_xml = next.format().await; + objects_string.push_str(&entry_xml); + } + if objects.len() > 0 { + let last_obj = objects.last().unwrap(); + next_ct = last_obj.object_key().to_string(); + is_truncated = true; + next_ct = format!("{next_ct}"); + } + let copy_count = objects.len(); + let result = format!(r###" + + {bucket_arg} + {actual_prefix} + {copy_count} + {ct} + {next_ct} + {max_keys} + {delimiter} + {is_truncated} + {objects_string} + url + + "###); + println!("{result}"); + return Some(result); +} +async fn list_objects(bucket:String, query:ListQuery, fs:&State) -> Option { + let action = "list_objects"; + let bucket_arg = bucket.clone(); + let bucket = fs.get_bucket(&bucket); + match bucket { + None => { + return None; + } + _ => { + + } + } + #[allow(unused)] + let list_type = query.list_type.unwrap_or(-1); + + let bucket = bucket.unwrap(); + let actual_prefix:String = query.prefix.unwrap_or(String::from("")); + let marker = query.marker.unwrap_or(String::from("")); + let max_keys = query.max_keys.unwrap_or(100); + let objects:Vec = list_bucket(&bucket, &actual_prefix, &query.delimiter, &marker, max_keys); + let mut is_truncated = false; + let delimiter = query.delimiter.unwrap_or(String::from("")); + if delimiter != "/" && delimiter != "" { + panic!("delimeter must be / or empty"); + } + let mut objects_string = String::new(); + info!("{action}: returned {} objects", objects.len()); + let mut next_marker = "".to_string(); + for (index, next) in objects.iter().enumerate() { + let entry_xml = next.format().await; + objects_string.push_str(&entry_xml); + } + if objects.len() > 0 { + let last_obj = objects.last().unwrap(); + next_marker = last_obj.object_key().to_string(); + is_truncated = true; + next_marker = format!("{next_marker}"); + } + let result = format!(r###" + + {bucket_arg} + {actual_prefix} + {marker} + {next_marker} + {max_keys} + {delimiter} + {is_truncated} + {objects_string} + url + + "###); + println!("{result}"); + return Some(result); +} + +#[get("/?", rank=4)] +async fn get_bucket_location(bucket:String, #[allow(unused)] marker:request_guards::GetBucketLocation, #[allow(unused)] location:String) -> Option { + let action = "get_bucket_location"; + info!("{action}: bucket: `{bucket}`. using hardcoded `ap-southeast-1`"); + return Some(String::from( + r#" + ap-southeast-1 + "# + )); +} + +#[allow(non_snake_case)] +#[put("//?&", data = "", rank=3)] +async fn put_object_part<'a>(bucket:String, key:std::path::PathBuf, partNumber:u32, uploadId:String, chunked:ChunkedEncoding, + fs:&State, #[allow(unused)] marker:request_guards::PutObjectPart, data: rocket::data::Data<'a>) + -> Result, Debug>> { + let action = "put_object_part"; + let key = key.to_str().unwrap(); + info!("{action}: object `{bucket}/{key}` uploadId `{uploadId}`, partNumber `{partNumber}`, chunked `{chunked:?}`"); + let bucket_obj = fs.get_bucket(&bucket); + if bucket_obj.is_none() { + info!("{action}: bucket `{bucket}` not found"); + return Ok(S3Response::not_found()) + } + + + let bucket_obj = bucket_obj.unwrap(); + use rocket::data::ToByteUnit; + let mut data_stream = data.open(5.gigabytes()); + let write_result = bucket_obj.save_object_part(&key, &uploadId, partNumber, &mut data_stream, chunked.0).await; + if write_result.is_err() { + info!("{action}: io error: {}", write_result.err().unwrap()); + return Ok(S3Response::server_error()); + } + let wr = write_result.unwrap(); + info!("{action}: `{bucket}/{key}` `{uploadId}@{partNumber}` written {} bytes, md5 is `{}`", wr.0, wr.1); + //data_stream.stream_to(writer).await; + let mut ok = S3Response::ok(); + ok.add_header("ETag", &format!("{}", wr.1)); + return Ok(ok); +} + +#[put("//", data = "", rank=2)] +async fn put_object<'a>(bucket:String, key:std::path::PathBuf, + chunked:ChunkedEncoding, + fs:&State, + #[allow(unused)] + marker:request_guards::PutObject, + data: rocket::data::Data<'a>) -> Result, Debug>> { + let action = "put_object"; + let key = key.to_str().unwrap(); + let bucket_obj = fs.get_bucket(&bucket); + if bucket_obj.is_none() { + warn!("{action}: bucket {bucket} does not exist"); + return Ok(S3Response::not_found()) + } + + + let bucket = bucket_obj.unwrap(); + use rocket::data::ToByteUnit; + let mut data_stream = data.open(5.gigabytes()); + //test(&data_stream); + let write_result = bucket.save_object(&key, &mut data_stream, chunked.0).await; + if write_result.is_err() { + warn!("{action}: io error: {}", write_result.err().unwrap()); + return Ok(S3Response::server_error()); + } + let wr = write_result.unwrap(); + info!("{action}: `{key}` written {} bytes, md5 is `{}`", wr.0, wr.1); + //data_stream.stream_to(writer).await; + let mut ok = S3Response::ok(); + ok.add_header("ETag", &format!("{}", wr.1)); + return Ok(ok); + +} + + +#[delete("/?", rank=2)] +async fn cleanup_multipart_upload<'r>(bucket:String, + #[allow(unused)] + marker:AbortMultipartUpload, + fs:&State, + #[allow(non_snake_case)] + uploadId:String) -> Result, Debug>> { + + let action = "cleanup_multipart_upload"; + let bucket_obj = fs.get_bucket(&bucket); + if bucket_obj.is_none() { + warn!("{action}: bucket `{bucket}` not found"); + return Err(Debug(Box::new(s3error::S3Error::not_found()))); + } + let bucket = bucket_obj.unwrap(); + + bucket.cleanup_upload_id(&uploadId); + info!("{action}: cleaned up upload id {uploadId}"); + Ok(S3Response::ok()) +} + +#[allow(non_snake_case)] +#[post("//?", data="", rank=5)] +async fn complete_multipart_upload<'a>(bucket:String, key_full:std::path::PathBuf, uploadId:String, + #[allow(unused)] data:rocket::data::Data<'a>, fs:&State, + #[allow(unused)] marker:CompleteMultipartUpload +) -> Result, Debug>> { + let action = "complete_multipart_upload"; + let key = key_full.to_str().unwrap().to_string(); + let bucket_obj = fs.get_bucket(&bucket); + if bucket_obj.is_none() { + warn!("{action}: bucket `{bucket}` not found"); + return Ok(S3Response::not_found()); + } + + let bucket_obj = bucket_obj.unwrap(); + + let (size, hash) = bucket_obj.merge_part(&key, &uploadId).await?; + info!("{action}: merged `{uploadId}` into `{bucket}/{key}`, {size}+`{hash}`"); + let response = format!(r#" + + http://127.0.0.1:8000/{bucket}/{key} + {bucket} + {key} + "{hash}" + "#); + + let mut ok = S3Response::ok(); + ok.body(&response); + return Ok(ok); + +} + +#[post("//?", data="", rank=99)] +async fn create_multipart_upload<'a>(bucket:String, fs:&State, key_full:std::path::PathBuf, + #[allow(unused)] uploads:String, + #[allow(unused)] marker: CreateMultipartUpload, + #[allow(unused)] data:Data<'a>) -> Result, Debug>> { + let action = "create_multipart_upload"; + let key = key_full.to_str().unwrap().to_string(); + + let bucket_name = bucket.clone(); + info!("{action}: object: `{bucket}/{key}`"); + let bucket_obj = fs.get_bucket(&bucket); + if bucket_obj.is_none() { + warn!("{action}: bucket `{bucket}` not found"); + return Err(Debug(Box::new(s3error::S3Error::not_found()))); + } + + let bucket = bucket_obj.unwrap(); + let part = bucket.gen_upload_id(); + info!("{action}: uploadId -> `{part}`"); + let result = format!(r###" + + {bucket_name} + {key} + {part} + "###); + + let mut ok = S3Response::ok(); + ok.body(&result); + + return Ok(ok); +} + +#[get("//")] +async fn get_object(bucket:String, + #[allow(unused)] + marker: GetObject, + key_full:std::path::PathBuf, fs:&State, range:request_guards::RangeHeader) -> Option { + let action = "get_object"; + info!("{action}: bucket: `{bucket}`, key: `{key_full:?}`, range: `{range:?}`"); + let key = key_full.to_str().unwrap().to_string(); + let bucket_obj = fs.get_bucket(&bucket); + if bucket_obj.is_none() { + warn!("{action}: bucket `{bucket}` not found"); + return None; + } + let bucket_obj = bucket_obj.unwrap(); + let obj = bucket_obj.get_object_by_key(&key); + if obj.is_none() { + warn!("{action}: object `{bucket}/{key}` not found"); + return None; + } + let obj = obj.unwrap(); + let path = obj.path().as_path(); + let mut tokiof = tokio::fs::File::open(path).await.unwrap(); + //return Some(NamedFile::open(obj.path().as_path().to_str().unwrap()); + let etag = obj.checksum().unwrap(); + let last_modified = obj.meta().unwrap().modified().unwrap(); + let file_size = obj.len().unwrap(); + let mut content_length: usize = file_size as usize; + let begin = range.begin; + let end = range.end; + + if begin.is_some() { + use tokio::io::AsyncSeekExt; + let seek_target = begin.unwrap(); + let seek_result = tokiof.seek(SeekFrom::Start(seek_target as u64)).await; + match seek_result { + Err(some_err) => { + error!("{action}: seek to {seek_target} failed: {some_err:?}"); + return None; + }, + Ok(_) => {} + } + if end.is_some() { + // x -y range + content_length = end.unwrap() - begin.unwrap() + 1; + } else { + content_length = file_size as usize - begin.unwrap() as usize; + } + } + info!("{action}: delegating to `LimitedReader` with content-length -> `{content_length}`"); + let reader = LimitSeekerReader::new(tokiof, Some(content_length)); + return Some(CachedFile { + reader: reader, + etag, + modified_time: last_modified, + size: content_length, + file_name: obj.get_short_name(), + partial: begin.is_some() || end.is_some(), + }); +} + +fn parse_delete_keys(data:&str) -> Vec<(String, String)> { + let data = data.replace("\r", ""); + let data = data.replace("\n", ""); + println!("Parsing {data}"); + let p = r"\s*(\S+?)\s*(\s*(\S+?)\s*)?\s*"; + + let pattern = Regex::new(p).unwrap(); + let mut start = 0; + let mut result = vec!(); + loop { + let find_result = pattern.captures_at(&data, start); + if find_result.is_none() { + break; + } + + let find_result = find_result.unwrap(); + let match_full_length = find_result.get(0).unwrap().end(); + start = match_full_length; + + let key = find_result.get(1).unwrap().as_str(); + let version = find_result.get(3); + let mut version_string = ""; + if version.is_some() { + version_string = version.unwrap().as_str(); + } + result.push((String::from(key), String::from(version_string))); + } + info!("{:?}", result); + return result; +} + + +#[post("//?", data="", rank=4)] +async fn delete_object_multi_alt<'r>( + bucket:String, + #[allow(unused)] + delete:String, + empty:String, + #[allow(unused)] + marker:request_guards::DeleteObjects, fs:&State, + data: Data<'_>) -> Result, Debug>> { + return delete_object_multi(bucket, delete, marker, fs, data).await; + +} +#[post("/?", data="", rank=3)] +#[allow(non_snake_case)] +async fn delete_object_multi<'r>(bucket:String, + #[allow(unused)] + delete:String, + #[allow(unused)] + marker:request_guards::DeleteObjects, fs:&State, + data: Data<'_> +) -> Result, Debug>> { + let action = "delete_object_multi"; + let bucket_obj = fs.get_bucket(&bucket); + if bucket_obj.is_none() { + warn!("{action}: bucket `{bucket}` not found"); + return Ok(S3Response::not_found()); + } + + let bucket_obj = bucket_obj.unwrap(); + let body = data.open(10.mebibytes()).into_string().await; + if body.is_err() { + warn!("{action}: body parse error: {}", body.err().unwrap()); + return Ok(S3Response::invalid_request()); + } + let body = body.unwrap(); + let body = body.as_str(); + let keys_and_versions = parse_delete_keys(body); + let mut result = true; + for (key, version) in &keys_and_versions { + if version != "" { + warn!("{action}: versioning not supported but `{version}` is requested."); + } + let local_result = bucket_obj.delete_object(&key); + if !local_result { + result = false; + } + } + let mut item_string = String::new(); + for (key, version) in &keys_and_versions { + if version == "" { + let my_str = format!(r#" + {key} + "#, ); + item_string.push_str(&my_str); + } else { + let my_str = format!(r#" + false + {key} + {version} + "#); + item_string.push_str(&my_str); + } + } + let response = format!(r###" + + {item_string} + "###); + if result { + info!("{action}: delete `{bucket}/{keys_and_versions:?}` succeeded."); + } else { + warn!("{action}: delete `{bucket}/{keys_and_versions:?}` not found/error."); + } + let mut ok = S3Response::ok(); + ok.body(&response); + return Ok(ok); +} +#[delete("//?", rank=1)] +#[allow(non_snake_case)] +async fn delete_object<'r>(bucket:String, full_key:std::path::PathBuf, + #[allow(non_snake_case)] + #[allow(unused)] + versionId:Option, + #[allow(unused)] + marker:request_guards::DeleteObject, fs:&State +) -> Result, Debug>> { + let action = "delete_object"; + let key = full_key.to_str().unwrap().to_string(); + let bucket_obj = fs.get_bucket(&bucket); + if bucket_obj.is_none() { + warn!("{action}: bucket `{bucket}` not found"); + return Ok(S3Response::not_found()); + } + + let bucket_obj = bucket_obj.unwrap(); + let result = bucket_obj.delete_object(&key); + if result { + info!("{action}: delete `{bucket}/{key}` succeeded."); + return Ok(S3Response::ok()); + } else { + warn!("{action}: delete `{bucket}/{key}` not found/error."); + return Ok(S3Response::not_found()); + } +} + +#[put("/", data = "", rank=1)] +async fn create_bucket<'r>(bucket:String, + #[allow(unused)] + marker:CreateBucket, fs:&State, + #[allow(unused)] + data: rocket::data::Data<'r> +) -> Result, Debug>> { + let action = "create_bucket"; + info!("{action}: creating bucket `{bucket}`"); + let cr = fs.make_bucket(&bucket); + match cr { + Ok(_) => { + info!("{action}: creating bucket `{bucket}` ok"); + }, + Err(some_err) => { + error!("{action}: creating bucket `{bucket}` err: `{some_err}`"); + } + } + Ok(S3Response::ok()) +} + +#[get("/backdoor?")] +async fn backdoor(secret_token:String, + #[allow(unused)] auth:AuthCheckPassed) -> String { + let action = "backdoor"; + info!("{action}: backdoor accessed `{secret_token}` revealed"); + format!("Secret Revealed: {secret_token}") +} + + +#[get("/")] +async fn list_all_buckets<'r>(fs:&State) -> Result, Debug>> { + let action = "list_all_buckets"; + let mut buckets = String::new(); + let bucket_list = fs.get_all_buckets(); + info!("{action}: found {} buckets", bucket_list.len()); + for (name, bucket) in bucket_list.iter() { + let creation_time = bucket.get_creation_time()?; + let next_bucket = format!(r#" + + {creation_time} + {name} + "#); + buckets.push_str(&next_bucket); + + } + + let body = format!(r#" + {buckets}"#); + let mut ok = S3Response::ok(); + ok.body(&body); + + Ok(ok) +} + +pub fn setup_logger(log_conf_file: &str) -> Result<(), Box> { + if log_conf_file.len() > 0 { + log4rs::init_file(log_conf_file, Default::default())?; + println!("logs will be sent according to config file {log_conf_file}"); + } else { + println!("no log config file specified."); + use log::LevelFilter; + use log4rs::append::console::ConsoleAppender; + use log4rs::config::{Appender, Config, Root}; + let stdout = ConsoleAppender::builder().build(); + + let config = Config::builder() + .appender(Appender::builder().build("stdout", Box::new(stdout))) + .build(Root::builder().appender("stdout").build(LevelFilter::Info)) + .unwrap(); + let _ = log4rs::init_config(config)?; + } + Ok(()) +} + +#[launch] +fn rocket() -> _ { + use clap::Parser; + let args = CliArg::parse(); + + setup_logger(args.log4rs_config_file()).unwrap(); + //trace!("I AM TRACE"); + //debug!("I AM DEBUG"); + //info!("I AM INFO"); + //warn!("I AM WARN"); + //error!("I AM ERROR"); + + let mut base_fs = FS::new(); + base_fs.set_base(args.base_dir()); + base_fs.initialize(); + + + let figment = rocket::Config::figment() + .merge(("port", args.bind_port())) + .merge(("address", args.bind_address())) + .merge(("log_level", "normal")) + ; + rocket::custom(figment) + .manage(args) + .manage(base_fs) + .mount("/", routes![list_objects_ext, list_objects_ext_v2, get_object, + put_object, put_object_part, create_multipart_upload, + cleanup_multipart_upload, list_all_buckets, + create_bucket, delete_object, delete_object_multi, delete_object_multi_alt, get_bucket_location, + complete_multipart_upload, backdoor]) +} diff --git a/src/request_guards.rs b/src/request_guards.rs new file mode 100644 index 0000000..1216988 --- /dev/null +++ b/src/request_guards.rs @@ -0,0 +1,409 @@ +use std::convert::Infallible; +use lazy_static::lazy_static; +use regex::Regex; +use rocket::{request::FromRequest, http::Status}; + +#[derive(Debug, Clone)] +pub struct RangeHeader { + pub begin: Option, + pub end: Option, +} + +lazy_static! { + pub static ref RANGE_START:Regex = Regex::new(r"^bytes=(\d+)-$").unwrap(); + pub static ref RANGE_START_END:Regex = Regex::new(r"^bytes=(\d+)-(\d+)$").unwrap(); +} + +lazy_static! { + pub static ref CREATE_BUCKET_URI_PATTERN:Regex = { + Regex::new(r"^[a-zA-Z0-9.\\-_]{1,255}$").unwrap() + }; +} + +#[derive(Debug)] +pub struct ChunkedEncoding(pub bool); + +impl RangeHeader { + fn match_str_and_get_group(input:&str, r:&Regex) -> Option> { + match r.captures(input) { + Some(group) => { + let number_of_groups = group.len(); + let mut result = Vec::with_capacity(number_of_groups); + for i in 0..number_of_groups { + result.push(group.get(i).unwrap().as_str().to_string()); + } + Some(result) + }, + None => None + } + } + fn parse_range(hdr:Option<&str>) -> (Option, Option) { + if hdr.is_none() { + return (None, None); + } + + let hdr_str = hdr.unwrap(); + + let start_try = Self::match_str_and_get_group(hdr_str, &RANGE_START); + if start_try.is_some() { + let start_try = start_try.unwrap(); + let g1 = start_try.get(1).unwrap(); + let parse_result:usize = g1.parse().unwrap(); + return (Some(parse_result), None); + } + + let start_end_try = Self::match_str_and_get_group(hdr_str, &RANGE_START_END); + if start_end_try.is_some() { + let start_end_try = start_end_try.unwrap(); + let g1 = start_end_try.get(1).unwrap(); + let g2 = start_end_try.get(2).unwrap(); + let parse1:usize = g1.parse().unwrap(); + let parse2:usize = g2.parse().unwrap(); + return (Some(parse1), Some(parse2)); + } + (None, None) + } + +} +#[rocket::async_trait] +impl<'r> FromRequest<'r> for ChunkedEncoding { + type Error = Infallible; + //Transfer-Encoding + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let range = request.headers().get_one("transfer-encoding"); + let flag1 = range.is_some() && range.unwrap().to_ascii_lowercase() == "chunked"; + + let decoded_length = request.headers().get_one("x-amz-decoded-content-length"); + let content_length = request.headers().get_one("content-length"); + let mut flag2 = false; + if decoded_length.is_some() && content_length.is_some() { + let decoded_length = decoded_length.unwrap(); + let content_length = content_length.unwrap(); + if decoded_length.len() > 0 && content_length.len() > 0 { + let decoded_length:i32 = decoded_length.parse().unwrap(); + let content_length: i32 = content_length.parse().unwrap(); + flag2 = content_length > decoded_length; + } + + } + rocket::request::Outcome::Success( + ChunkedEncoding(flag1 || flag2) + ) + } +} +#[rocket::async_trait] +impl<'r> FromRequest<'r> for RangeHeader { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + + let range = request.headers().get_one("range"); + let (begin, end) = Self::parse_range(range); + + + rocket::request::Outcome::Success( + RangeHeader { + begin, end + } + ) + } +} + + + +#[derive(Debug, Clone)] +pub struct CreateBucket(bool); + +#[rocket::async_trait] +impl<'r> FromRequest<'r> for CreateBucket { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let method = request.method().as_str(); + let is_put = method == "PUT"; + let segments = request.uri().path().segments().len(); + if is_put && segments == 1 { + rocket::request::Outcome::Success( + CreateBucket(true) + ) + } else { + rocket::request::Outcome::Forward(()) + } + } +} +pub struct ListBucket; +#[rocket::async_trait] +impl<'r> FromRequest<'r> for ListBucket { + type Error = Infallible; + + #[allow(unused)] + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + rocket::request::Outcome::Success( + ListBucket + ) + } +} + +pub struct PutObject; +#[rocket::async_trait] +impl<'r> FromRequest<'r> for PutObject { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let segments = request.uri().path().segments().len(); + + let method = request.method().as_str(); + let is_put = method == "PUT"; + let upload_id = request.query_value::("uploadId"); + let part_number = request.query_value::("partNumber"); + if is_put && part_number.is_none() && upload_id.is_none() && segments > 1 { + rocket::request::Outcome::Success( + PutObject + ) + } else { + rocket::request::Outcome::Forward(()) + } + } +} +pub struct CreateMultipartUpload; +#[rocket::async_trait] +impl<'r> FromRequest<'r> for CreateMultipartUpload { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let method = request.method().as_str(); + let is_post = method == "POST"; + let segments = request.uri().path().segments().len(); + let uploads = request.query_value::("uploads"); + if is_post && segments > 1 && uploads.is_some() { + rocket::request::Outcome::Success( + CreateMultipartUpload + ) + } else { + rocket::request::Outcome::Forward(()) + } + } +} +pub struct UploadMultipart; + +pub struct AbortMultipartUpload; +#[rocket::async_trait] +impl<'r> FromRequest<'r> for AbortMultipartUpload { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let method = request.method().as_str(); + let is_delete = method == "DELETE"; + let segments = request.uri().path().segments().len(); + let upload_id = request.query_value::("uploadId"); + if is_delete && segments == 1 && upload_id.is_some() { + rocket::request::Outcome::Success( + AbortMultipartUpload + ) + } else { + rocket::request::Outcome::Forward(()) + } + } +} + +pub struct DeleteObjects; +#[rocket::async_trait] +impl<'r> FromRequest<'r> for DeleteObjects { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let method = request.method().as_str(); + let is_delete = method == "POST"; + let segments = request.uri().path().segments().len(); + let delete = request.query_value::("delete"); + if is_delete && segments <= 2 && delete.is_some(){ + rocket::request::Outcome::Success( + DeleteObjects + ) + } else { + rocket::request::Outcome::Forward(()) + } + } +} +pub struct DeleteObject; +#[rocket::async_trait] +impl<'r> FromRequest<'r> for DeleteObject { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let method = request.method().as_str(); + let is_delete = method == "DELETE"; + let segments = request.uri().path().segments().len(); + if is_delete && segments > 1 { + rocket::request::Outcome::Success( + DeleteObject + ) + } else { + rocket::request::Outcome::Forward(()) + } + } +} + +pub struct CompleteMultipartUpload; +#[rocket::async_trait] +impl<'r> FromRequest<'r> for CompleteMultipartUpload { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let method = request.method().as_str(); + let is_post = method == "POST"; + let segments = request.uri().path().segments().len(); + let upload_id = request.query_value::("uploadId"); + if is_post && segments > 1 && upload_id.is_some() { + rocket::request::Outcome::Success( + CompleteMultipartUpload + ) + } else { + rocket::request::Outcome::Forward(()) + } + } +} + +pub struct GetObject; +#[rocket::async_trait] +impl<'r> FromRequest<'r> for GetObject { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let method = request.method().as_str(); + let is_get = method == "GET"; + let segments = request.uri().path().segments().len(); + let location = request.query_value::("location"); + if is_get && location.is_none() && segments > 1{ + rocket::request::Outcome::Success( + GetObject + ) + } else { + rocket::request::Outcome::Forward(()) + } + } +} + +pub struct GetBucketLocation; +#[rocket::async_trait] +impl<'r> FromRequest<'r> for GetBucketLocation { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let method = request.method().as_str(); + let is_delete = method == "GET"; + let segments = request.uri().path().segments().len(); + let location = request.query_value::("location"); + if is_delete && location.is_some() && segments == 1{ + rocket::request::Outcome::Success( + GetBucketLocation + ) + } else { + rocket::request::Outcome::Forward(()) + } + } +} + +pub struct PutObjectPart; +#[rocket::async_trait] +impl<'r> FromRequest<'r> for PutObjectPart { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let method = request.method().as_str(); + let is_put = method == "PUT"; + let segments = request.uri().path().segments().len(); + + let upload_id = request.query_value::("uploadId"); + let part_number = request.query_value::("partNumber"); + if is_put && part_number.is_some() && segments > 1 && upload_id.is_some() { + rocket::request::Outcome::Success( + PutObjectPart + ) + } else { + rocket::request::Outcome::Forward(()) + } + } +} +pub struct ListObjects; +#[rocket::async_trait] +impl<'r> FromRequest<'r> for ListObjects { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let method = request.method().as_str(); + let is_get = method == "GET"; + let segments = request.uri().path().segments().len(); + let location = request.query_value::("location"); + let list_type = request.query_value::("list-type"); + if is_get && location.is_none() && segments > 0 && list_type.is_none(){ + rocket::request::Outcome::Success( + ListObjects + ) + } else { + rocket::request::Outcome::Forward(()) + } + } +} + + +pub struct ListObjectsV2; +#[rocket::async_trait] +impl<'r> FromRequest<'r> for ListObjectsV2 { + type Error = Infallible; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let method = request.method().as_str(); + let is_get = method == "GET"; + let segments = request.uri().path().segments().len(); + let location = request.query_value::("location"); + let list_type = request.query_value::("list-type"); + if is_get && location.is_none() && segments > 0 && list_type.is_some() && list_type.unwrap().unwrap() == "2" { + rocket::request::Outcome::Success( + ListObjectsV2 + ) + } else { + rocket::request::Outcome::Forward(()) + } + } +} +pub struct AuthCheckPassed; +#[derive(Debug)] +pub enum AuthError { + NoKeySupplied, + InvalidKey, + InvalidSignature, +} +#[rocket::async_trait] +impl<'r> FromRequest<'r> for AuthCheckPassed { + type Error = AuthError; + + async fn from_request(request: &'r rocket::Request<'_>) -> + rocket::request::Outcome { + let api_key = request.headers().get_one("x-api-key"); + if api_key.is_some() { + if api_key.unwrap() == "secret" { + return rocket::request::Outcome::Success(AuthCheckPassed); + } else { + return rocket::request::Outcome::Failure((Status::Forbidden, AuthError::InvalidKey)); + } + } else { + return rocket::request::Outcome::Failure((Status::Forbidden, AuthError::NoKeySupplied)); + } + } +} diff --git a/src/s3error.rs b/src/s3error.rs new file mode 100644 index 0000000..944d9be --- /dev/null +++ b/src/s3error.rs @@ -0,0 +1,50 @@ +use std::fmt; +#[derive(Debug)] +pub struct S3Error { + pub kind:ErrorKind, + pub message:String, +} + +#[derive(Debug)] +pub enum ErrorKind { + InvalidKey, + NotFound, + InputOutput +} + +impl S3Error { + pub fn invalid_key() -> Self { + S3Error { + kind: ErrorKind::InvalidKey, + message: String::from("") + } + } + + pub fn not_found() -> Self { + S3Error { + kind: ErrorKind::NotFound, + message: String::from("") + } + } + + pub fn io_error() -> Self { + S3Error { + kind: ErrorKind::InputOutput, + message: String::from("") + } + } +} +impl fmt::Display for ErrorKind { + fn fmt(&self, w: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + return write!(w, "{:?}", self); + } +} +impl fmt::Display for S3Error { + fn fmt(&self, w: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + return write!(w, "{:?}", self); + } +} + +impl std::error::Error for S3Error { + +} \ No newline at end of file diff --git a/src/s3resp.rs b/src/s3resp.rs new file mode 100644 index 0000000..40d57c9 --- /dev/null +++ b/src/s3resp.rs @@ -0,0 +1,77 @@ +use rocket::{http::Status, response::Responder}; +use std::{io::Cursor, convert::Infallible}; +use rocket::http::Header; +use rocket::response::Debug; +pub struct S3Response<'r> { + body: String, + headers: Vec>, + status: Status, +} + +impl<'r,'o> Responder<'r, 'r> for S3Response<'r> + where 'o:'r +{ + fn respond_to(self, _req: &'r rocket::Request) -> Result, Status> { + let size = self.body.len(); + use rocket::response::Builder; + let mut resp:Builder<'r> = rocket::Response::build(); + resp.status(self.status); + resp.sized_body(size, Cursor::new(self.body)); + for next in self.headers { + resp.header(next); + } + + return Ok(resp.finalize()); + } +} + +impl<'r> Into, Debug>> for S3Response<'r> { + fn into(self) -> Result, Debug> { + return Ok(self); + } +} +impl <'r> S3Response<'r> { + pub fn ok() -> Self { + S3Response { body: "".to_string(), headers: vec![], status: Status::Ok } + } + + pub fn not_found() -> Self { + S3Response { + body: "".to_string(), + headers: vec!(), + status: Status::NotFound + } + } + + pub fn invalid_request() -> Self { + S3Response { + body: "".to_string(), + headers: vec![], + status: Status::BadRequest + } + } + + pub fn server_error() -> Self { + S3Response { + body: "".to_string(), + headers: vec![], + status: Status::InternalServerError + } + } + + pub fn add_header(&mut self, key:&str, value:&str) -> &mut Self { + let new_header = Header::new(key.to_string(), value.to_string()); + self.headers.push(new_header); + return self; + } + + pub fn status_code(&mut self, code: u16) -> &mut Self { + self.status = Status::new(code); + return self; + } + + pub fn body(&mut self, text:&str) -> &mut Self { + self.body = String::from(text); + return self; + } +} \ No newline at end of file diff --git a/src/sequencing.rs b/src/sequencing.rs new file mode 100644 index 0000000..88f1a67 --- /dev/null +++ b/src/sequencing.rs @@ -0,0 +1,24 @@ +use std::sync::atomic::AtomicU64; + +#[derive(Debug)] +pub struct Sequence { + seq:AtomicU64 +} + +impl Sequence { + pub fn next(&self) -> String { + let start = std::time::SystemTime::now(); + let time_part = start + .duration_since(std::time::UNIX_EPOCH) + .expect("Time went backwards") + .as_millis(); + return format!("{}_{}", time_part, self.seq.fetch_add(1, std::sync::atomic::Ordering::SeqCst)); + } +} +impl Default for Sequence { + fn default() -> Self { + return Sequence { + seq: AtomicU64::new(0), + } + } +} \ No newline at end of file diff --git a/src/states.rs b/src/states.rs new file mode 100644 index 0000000..be914dd --- /dev/null +++ b/src/states.rs @@ -0,0 +1,46 @@ +use clap::Parser; + +#[cfg(debug_assertions)] +static BASE_DIR:&str = "./rusts3-data-debug"; + +#[cfg(not(debug_assertions))] +static BASE_DIR:&str = "./rusts3-data"; + +#[cfg(debug_assertions)] +static PORT:i32 = 8001; + +#[cfg(not(debug_assertions))] +static PORT:i32 = 8000; + +#[derive(Parser, Debug, Clone)] +pub struct CliArg { + #[arg(short, long, default_value_t = String::from(BASE_DIR))] + base_dir: String, + + #[arg(long, default_value_t = String::from("0.0.0.0"), help="Bind IP address")] + bind_address: String, + + #[arg(long, default_value_t = PORT, help="Bind port number")] + bind_port: i32, + + #[arg(long, default_value_t = String::from(""), help="Log4rs config file")] + log_conf:String +} + +impl CliArg { + pub fn bind_port(&self) -> i32 { + self.bind_port + } + + pub fn bind_address(&self) -> &str { + &self.bind_address + } + + pub fn base_dir(&self) -> &str { + &self.base_dir + } + + pub fn log4rs_config_file(&self) ->&str { + &self.log_conf + } +} \ No newline at end of file diff --git a/src/test.rs b/src/test.rs new file mode 100644 index 0000000..b550bf5 --- /dev/null +++ b/src/test.rs @@ -0,0 +1,29 @@ +pub mod s3error; +pub mod chunk_to_raw; +pub mod sequencing; +pub mod fsapi; +use fsapi::{Bucket, FS}; +use tokio::fs::File; +use std::path::PathBuf; +#[tokio::main] +async fn main() { + let mut fs = FS::new(); + fs.set_base("rusts3-data-debug"); + fs.initialize(); + let b = fs.get_bucket("abcde").unwrap(); + let result = b.list_objects("te", "", 10); + for next in result { + println!("{} -> {:?}", next.object_key(), next.kind()); + } + + let mut v = Vec::::new(); + test(&mut v); +} + +fn test(v:&mut Vec) { + v.push(1); + if v.len() < 5 { + test(v); + } +} +