From e42ee5ff463eeded604124d138d5bc2bf34cba83 Mon Sep 17 00:00:00 2001 From: Mike Solar Date: Fri, 28 Nov 2025 17:30:31 +0800 Subject: [PATCH 01/10] Add S3 Support. --- .gitignore | 1 + client/src/App.svelte | 9 + .../src/lib/asset_browser/FileUpload.svelte | 6 +- server/Cargo.lock | 887 +++++++++++++++++- server/Cargo.toml | 3 + server/src/api_server/file_upload.rs | 21 +- server/src/api_server/server_state.rs | 6 + server/src/api_server/test_utils.rs | 3 + server/src/api_server/user_session.rs | 2 +- server/src/api_server/ws_handers.rs | 6 +- server/src/grpc/db_models.rs | 21 +- server/src/grpc/grpc_server.rs | 6 +- server/src/lib.rs | 11 +- server/src/main.rs | 63 +- server/src/storage.rs | 187 ++++ server/src/tests/integration_test.rs | 6 +- server/src/video_pipeline/incoming_monitor.rs | 7 +- server/src/video_pipeline/metadata_reader.rs | 15 +- server/src/video_pipeline/mod.rs | 43 +- 19 files changed, 1217 insertions(+), 86 deletions(-) create mode 100644 server/src/storage.rs diff --git a/.gitignore b/.gitignore index c083ba63..1f87460a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ dist_deb/ .DS_Store .claude +.idea \ No newline at end of file diff --git a/client/src/App.svelte b/client/src/App.svelte index 639c4558..8e202f3c 100644 --- a/client/src/App.svelte +++ b/client/src/App.svelte @@ -433,6 +433,7 @@ else let uploadUrl: string = $state(""); +let transcodePreferred: boolean = $state(true); // ------------------------------------------------------------- @@ -1131,11 +1132,19 @@ function onMediaFileListPopupAction(e: { detail: { action: Proto3.ActionDef, ite
{#if pit.folderListing.allowUpload} +
+ Transcode + +
diff --git a/client/src/lib/asset_browser/FileUpload.svelte b/client/src/lib/asset_browser/FileUpload.svelte index 0affe9b9..fdedffa7 100644 --- a/client/src/lib/asset_browser/FileUpload.svelte +++ b/client/src/lib/asset_browser/FileUpload.svelte @@ -14,6 +14,7 @@ let files = { // Passed to HTTP POST request: listingData: Object; mediaFileAddedAction: string|undefined; + transcodePreferred?: boolean; children?: import('svelte').Snippet; } @@ -21,6 +22,7 @@ let files = { postUrl, listingData, mediaFileAddedAction, + transcodePreferred = true, children }: Props = $props(); @@ -79,11 +81,13 @@ function upload() { ajax.addEventListener("abort", abortHandler, false); ajax.open("POST", postUrl); ajax.setRequestHeader("X-FILE-NAME", encodeURIComponent(file.name)); + ajax.setRequestHeader("X-CLAPSHOT-TRANSCODE", transcodePreferred ? "true" : "false"); let upload_cookies = { ...LocalStorageCookies.getAllNonExpired() }; if (mediaFileAddedAction) upload_cookies["media_file_added_action"] = mediaFileAddedAction; upload_cookies["listing_data_json"] = JSON.stringify(listingData); + upload_cookies["transcode_preference"] = transcodePreferred ? "true" : "false"; ajax.setRequestHeader("X-CLAPSHOT-COOKIES", JSON.stringify(upload_cookies)); ajax.send(formdata); @@ -143,4 +147,4 @@ function onDropFiles(e: any) { background-color: rgb(25, 33, 52); transition: background-color 0.1s ease-in-out } - \ No newline at end of file + diff --git a/server/Cargo.lock b/server/Cargo.lock index a3213fe9..ab067f21 100644 --- a/server/Cargo.lock +++ b/server/Cargo.lock @@ -38,6 +38,12 @@ dependencies = [ "memchr", ] +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + [[package]] name = "android_system_properties" version = "0.1.5" @@ -296,6 +302,378 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" +[[package]] +name = "aws-config" +version = "1.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c478f5b10ce55c9a33f87ca3404ca92768b144fc1bfdede7c0121214a8283a25" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sdk-sts", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "http 1.3.1", + "time", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "aws-credential-types" +version = "1.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b01c9521fa01558f750d183c8c68c81b0155b9d193a4ba7f84c36bd1b6d04a06" +dependencies = [ + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "zeroize", +] + +[[package]] +name = "aws-lc-rs" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b5ce75405893cd713f9ab8e297d8e438f624dde7d706108285f7e17a25a180f" +dependencies = [ + "aws-lc-sys", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "179c3777a8b5e70e90ea426114ffc565b2c1a9f82f6c4a0c5a34aa6ef5e781b6" +dependencies = [ + "cc", + "cmake", + "dunce", + "fs_extra", +] + +[[package]] +name = "aws-runtime" +version = "1.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c034a1bc1d70e16e7f4e4caf7e9f7693e4c9c24cd91cf17c2a0b21abaebc7c8b" +dependencies = [ + "aws-credential-types", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "http 0.2.12", + "http-body 0.4.6", + "percent-encoding", + "pin-project-lite", + "tracing", + "uuid", +] + +[[package]] +name = "aws-sdk-s3" +version = "1.103.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af040a86ae4378b7ed2f62c83b36be1848709bbbf5757ec850d0e08596a26be9" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-checksums", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "bytes", + "fastrand", + "hex", + "hmac", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "lru", + "percent-encoding", + "regex-lite", + "sha2", + "tracing", + "url", +] + +[[package]] +name = "aws-sdk-sts" +version = "1.84.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91abcdbfb48c38a0419eb75e0eac772a4783a96750392680e4f3c25a8a0535b9" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-query", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "fastrand", + "http 0.2.12", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sigv4" +version = "1.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c35452ec3f001e1f2f6db107b6373f1f48f05ec63ba2c5c9fa91f07dad32af11" +dependencies = [ + "aws-credential-types", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "form_urlencoded", + "hex", + "hmac", + "http 0.2.12", + "http 1.3.1", + "percent-encoding", + "sha2", + "time", + "tracing", +] + +[[package]] +name = "aws-smithy-async" +version = "1.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "127fcfad33b7dfc531141fda7e1c402ac65f88aca5511a4d31e2e3d2cd01ce9c" +dependencies = [ + "futures-util", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "aws-smithy-checksums" +version = "0.63.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb9a26b2831e728924ec0089e92697a78a2f9cdcf90d81e8cfcc6a6c85080369" +dependencies = [ + "aws-smithy-http", + "aws-smithy-types", + "bytes", + "crc-fast", + "hex", + "http 0.2.12", + "http-body 0.4.6", + "md-5", + "pin-project-lite", + "sha1", + "sha2", + "tracing", +] + +[[package]] +name = "aws-smithy-eventstream" +version = "0.60.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e29a304f8319781a39808847efb39561351b1bb76e933da7aa90232673638658" +dependencies = [ + "aws-smithy-types", + "bytes", + "crc32fast", +] + +[[package]] +name = "aws-smithy-http" +version = "0.62.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445d5d720c99eed0b4aa674ed00d835d9b1427dd73e04adaf2f94c6b2d6f9fca" +dependencies = [ + "aws-smithy-eventstream", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "bytes-utils", + "futures-core", + "futures-util", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "percent-encoding", + "pin-project-lite", + "pin-utils", + "tracing", +] + +[[package]] +name = "aws-smithy-http-client" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f108f1ca850f3feef3009bdcc977be201bca9a91058864d9de0684e64514bee0" +dependencies = [ + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "h2 0.3.27", + "h2 0.4.10", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "hyper 0.14.32", + "hyper 1.6.0", + "hyper-rustls 0.24.2", + "hyper-rustls 0.27.6", + "hyper-util", + "pin-project-lite", + "rustls 0.21.12", + "rustls 0.23.27", + "rustls-native-certs 0.8.2", + "rustls-pki-types", + "tokio", + "tower", + "tracing", +] + +[[package]] +name = "aws-smithy-json" +version = "0.61.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2db31f727935fc63c6eeae8b37b438847639ec330a9161ece694efba257e0c54" +dependencies = [ + "aws-smithy-types", +] + +[[package]] +name = "aws-smithy-observability" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d1881b1ea6d313f9890710d65c158bdab6fb08c91ea825f74c1c8c357baf4cc" +dependencies = [ + "aws-smithy-runtime-api", +] + +[[package]] +name = "aws-smithy-query" +version = "0.60.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d28a63441360c477465f80c7abac3b9c4d075ca638f982e605b7dc2a2c7156c9" +dependencies = [ + "aws-smithy-types", + "urlencoding", +] + +[[package]] +name = "aws-smithy-runtime" +version = "1.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e107ce0783019dbff59b3a244aa0c114e4a8c9d93498af9162608cd5474e796" +dependencies = [ + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-http-client", + "aws-smithy-observability", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "fastrand", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "http-body 1.0.1", + "pin-project-lite", + "pin-utils", + "tokio", + "tracing", +] + +[[package]] +name = "aws-smithy-runtime-api" +version = "1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec7204f9fd94749a7c53b26da1b961b4ac36bf070ef1e0b94bb09f79d4f6c193" +dependencies = [ + "aws-smithy-async", + "aws-smithy-types", + "bytes", + "http 0.2.12", + "http 1.3.1", + "pin-project-lite", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-types" +version = "1.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25f535879a207fce0db74b679cfc3e91a3159c8144d717d55f5832aea9eef46e" +dependencies = [ + "base64-simd", + "bytes", + "bytes-utils", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "http-body 1.0.1", + "http-body-util", + "itoa", + "num-integer", + "pin-project-lite", + "pin-utils", + "ryu", + "serde", + "time", +] + +[[package]] +name = "aws-smithy-xml" +version = "0.60.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eab77cdd036b11056d2a30a7af7b775789fb024bf216acc13884c6c97752ae56" +dependencies = [ + "xmlparser", +] + +[[package]] +name = "aws-types" +version = "1.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d79fb68e3d7fe5d4833ea34dc87d2e97d26d3086cb3da660bb6b1f76d98680b6" +dependencies = [ + "aws-credential-types", + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "rustc_version", + "tracing", +] + [[package]] name = "axum" version = "0.8.4" @@ -306,10 +684,10 @@ dependencies = [ "bytes", "form_urlencoded", "futures-util", - "http", - "http-body", + "http 1.3.1", + "http-body 1.0.1", "http-body-util", - "hyper", + "hyper 1.6.0", "hyper-util", "itoa", "matchit", @@ -338,8 +716,8 @@ checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6" dependencies = [ "bytes", "futures-core", - "http", - "http-body", + "http 1.3.1", + "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", @@ -350,12 +728,28 @@ dependencies = [ "tracing", ] +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "base64-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" +dependencies = [ + "outref", + "vsimd", +] + [[package]] name = "bitflags" version = "2.9.1" @@ -478,12 +872,25 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +[[package]] +name = "bytes-utils" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" +dependencies = [ + "bytes", + "either", +] + [[package]] name = "cc" -version = "1.2.24" +version = "1.2.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16595d3be041c03b09d08d0858631facccee9221e579704070e6e9e4915d3bc7" +checksum = "c481bdbf0ed3b892f6f806287d72acd515b352a4ec27a208489b8c1bc839633a" dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", "shlex", ] @@ -574,7 +981,10 @@ dependencies = [ "aspasia", "assert_fs", "async-std", - "base64", + "aws-config", + "aws-sdk-s3", + "aws-types", + "base64 0.22.1", "bytes", "chrono", "clap", @@ -641,6 +1051,15 @@ dependencies = [ "whoami", ] +[[package]] +name = "cmake" +version = "0.1.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" +dependencies = [ + "cc", +] + [[package]] name = "colorchoice" version = "1.0.3" @@ -656,6 +1075,26 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -671,6 +1110,34 @@ dependencies = [ "libc", ] +[[package]] +name = "crc" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + +[[package]] +name = "crc-fast" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bf62af4cc77d8fe1c22dde4e721d87f2f54056139d8c412e1366b740305f56f" +dependencies = [ + "crc", + "digest", + "libc", + "rand 0.9.1", + "regex", +] + [[package]] name = "crc32fast" version = "1.4.2" @@ -842,6 +1309,7 @@ checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", + "subtle", ] [[package]] @@ -881,6 +1349,12 @@ dependencies = [ "syn 2.0.101", ] +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + [[package]] name = "either" version = "1.15.0" @@ -996,6 +1470,12 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "find-msvc-tools" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" + [[package]] name = "fixedbitset" version = "0.5.7" @@ -1018,6 +1498,12 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -1027,6 +1513,12 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + [[package]] name = "funty" version = "2.0.0" @@ -1208,6 +1700,25 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "h2" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "h2" version = "0.4.10" @@ -1219,7 +1730,7 @@ dependencies = [ "fnv", "futures-core", "futures-sink", - "http", + "http 1.3.1", "indexmap", "slab", "tokio", @@ -1241,6 +1752,11 @@ name = "hashbrown" version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] [[package]] name = "headers" @@ -1248,10 +1764,10 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb" dependencies = [ - "base64", + "base64 0.22.1", "bytes", "headers-core", - "http", + "http 1.3.1", "httpdate", "mime", "sha1", @@ -1263,7 +1779,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" dependencies = [ - "http", + "http 1.3.1", ] [[package]] @@ -1290,6 +1806,26 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + [[package]] name = "http" version = "1.3.1" @@ -1301,6 +1837,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + [[package]] name = "http-body" version = "1.0.1" @@ -1308,7 +1855,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http", + "http 1.3.1", ] [[package]] @@ -1319,8 +1866,8 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", - "http", - "http-body", + "http 1.3.1", + "http-body 1.0.1", "pin-project-lite", ] @@ -1336,6 +1883,30 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.3.27", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.5.10", + "tokio", + "tower-service", + "tracing", + "want", +] + [[package]] name = "hyper" version = "1.6.0" @@ -1345,9 +1916,9 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "h2", - "http", - "http-body", + "h2 0.4.10", + "http 1.3.1", + "http-body 1.0.1", "httparse", "httpdate", "itoa", @@ -1357,19 +1928,36 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.32", + "log", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", + "tokio", + "tokio-rustls 0.24.1", +] + [[package]] name = "hyper-rustls" version = "0.27.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03a01595e11bdcec50946522c32dde3fc6914743000a68b93000965f2f02406d" dependencies = [ - "http", - "hyper", + "http 1.3.1", + "hyper 1.6.0", "hyper-util", - "rustls", + "rustls 0.23.27", + "rustls-native-certs 0.8.2", "rustls-pki-types", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.2", "tower-service", "webpki-roots 1.0.0", ] @@ -1380,7 +1968,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "hyper", + "hyper 1.6.0", "hyper-util", "pin-project-lite", "tokio", @@ -1397,9 +1985,9 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "http", - "http-body", - "hyper", + "http 1.3.1", + "http-body 1.0.1", + "hyper 1.6.0", "libc", "pin-project-lite", "socket2 0.5.10", @@ -1613,6 +2201,16 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.3", + "libc", +] + [[package]] name = "js-sys" version = "0.3.82" @@ -1726,6 +2324,15 @@ dependencies = [ "value-bag", ] +[[package]] +name = "lru" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" +dependencies = [ + "hashbrown 0.15.3", +] + [[package]] name = "lru-slab" version = "0.1.2" @@ -1747,6 +2354,16 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + [[package]] name = "memchr" version = "2.7.4" @@ -1826,7 +2443,7 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http", + "http 1.3.1", "httparse", "log", "memchr", @@ -1882,6 +2499,15 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + [[package]] name = "num-traits" version = "0.2.19" @@ -1922,6 +2548,18 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "outref" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e" + [[package]] name = "parking" version = "2.2.1" @@ -1975,7 +2613,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "898bac3fa00d0ba57a4e8289837e965baa2dee8c3749f3b11d45a64b4223d9c3" dependencies = [ - "base64", + "base64 0.22.1", "serde", ] @@ -2298,7 +2936,7 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash", - "rustls", + "rustls 0.23.27", "socket2 0.5.10", "thiserror 2.0.17", "tokio", @@ -2318,7 +2956,7 @@ dependencies = [ "rand 0.9.1", "ring", "rustc-hash", - "rustls", + "rustls 0.23.27", "rustls-pki-types", "slab", "thiserror 2.0.17", @@ -2464,6 +3102,12 @@ dependencies = [ "regex-syntax", ] +[[package]] +name = "regex-lite" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d942b98df5e658f56f20d592c7f868833fe38115e65c33003d8cd224b0155da" + [[package]] name = "regex-syntax" version = "0.8.5" @@ -2485,16 +3129,16 @@ version = "0.12.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" dependencies = [ - "base64", + "base64 0.22.1", "bytes", "futures-channel", "futures-core", "futures-util", - "http", - "http-body", + "http 1.3.1", + "http-body 1.0.1", "http-body-util", - "hyper", - "hyper-rustls", + "hyper 1.6.0", + "hyper-rustls 0.27.6", "hyper-util", "ipnet", "js-sys", @@ -2505,15 +3149,15 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls", - "rustls-pemfile", + "rustls 0.23.27", + "rustls-pemfile 2.2.0", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.2", "tokio-util", "tower", "tower-service", @@ -2591,6 +3235,15 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + [[package]] name = "rustix" version = "1.0.7" @@ -2604,20 +3257,66 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring", + "rustls-webpki 0.101.7", + "sct", +] + [[package]] name = "rustls" version = "0.23.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "730944ca083c1c233a75c09f199e973ca499344a2b7ba9e755c457e86fb4a321" dependencies = [ + "aws-lc-rs", "once_cell", "ring", "rustls-pki-types", - "rustls-webpki", + "rustls-webpki 0.103.3", "subtle", "zeroize", ] +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework 2.11.1", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework 3.5.1", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + [[package]] name = "rustls-pemfile" version = "2.2.0" @@ -2637,12 +3336,23 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustls-webpki" version = "0.103.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4a72fe2bcf7a6ac6fd7d0b9e5cb68aeb7d4c0a0271730218b3e92d43b4eb435" dependencies = [ + "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", @@ -2678,6 +3388,15 @@ dependencies = [ "sdd", ] +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + [[package]] name = "scheduled-thread-pool" version = "0.2.7" @@ -2699,6 +3418,16 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "sdd" version = "3.0.8" @@ -2711,6 +3440,42 @@ version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework" +version = "3.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" +dependencies = [ + "bitflags", + "core-foundation 0.10.1", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "semver" version = "1.0.27" @@ -3214,13 +3979,23 @@ dependencies = [ "syn 2.0.101", ] +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" dependencies = [ - "rustls", + "rustls 0.23.27", "tokio", ] @@ -3341,13 +4116,13 @@ checksum = "eb7613188ce9f7df5bfe185db26c5814347d110db17920415cf2fbcad85e7203" dependencies = [ "async-trait", "axum", - "base64", + "base64 0.22.1", "bytes", - "h2", - "http", - "http-body", + "h2 0.4.10", + "http 1.3.1", + "http-body 1.0.1", "http-body-util", - "hyper", + "hyper 1.6.0", "hyper-timeout", "hyper-util", "percent-encoding", @@ -3589,7 +4364,7 @@ checksum = "eadc29d668c91fcc564941132e17b28a7ceb2f3ebf0b9dae3e03fd7a6748eb0d" dependencies = [ "bytes", "data-encoding", - "http", + "http 1.3.1", "httparse", "log", "rand 0.9.1", @@ -3606,7 +4381,7 @@ checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" dependencies = [ "bytes", "data-encoding", - "http", + "http 1.3.1", "httparse", "log", "rand 0.9.1", @@ -3719,6 +4494,12 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + [[package]] name = "vte" version = "0.14.1" @@ -3765,10 +4546,10 @@ dependencies = [ "bytes", "futures-util", "headers", - "http", - "http-body", + "http 1.3.1", + "http-body 1.0.1", "http-body-util", - "hyper", + "hyper 1.6.0", "hyper-util", "log", "mime", @@ -4251,6 +5032,12 @@ dependencies = [ "rustix", ] +[[package]] +name = "xmlparser" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" + [[package]] name = "yoke" version = "0.8.0" diff --git a/server/Cargo.toml b/server/Cargo.toml index db74d6c9..b455c41c 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -122,6 +122,9 @@ indoc = "2.0.5" Inflector = "0.11.4" serial_test = "3.1.1" aspasia = "0.2.0" +aws-config = { version = "1.5.0", default-features = false, features = ["rustls"] } +aws-sdk-s3 = { version = "1.40.0", default-features = false, features = ["rustls"] } +aws-types = "1.3.0" [dev-dependencies] assert_fs = "1.0.13" diff --git a/server/src/api_server/file_upload.rs b/server/src/api_server/file_upload.rs index 529fdf89..12522bbd 100644 --- a/server/src/api_server/file_upload.rs +++ b/server/src/api_server/file_upload.rs @@ -10,6 +10,7 @@ use crate::video_pipeline::IncomingFile; use super::parse_auth_headers; use super::server_state::ServerState; use super::user_session::{org_authz_with_default, AuthzTopic, AuthzError}; +use crate::video_pipeline::TranscodePreference; use lib_clapshot_grpc::proto; use proto::org::authz_user_action_request as authz_req; @@ -33,7 +34,7 @@ pub async fn handle_multipart_upload( body: impl warp::Stream> + Unpin) -> Result, Infallible> { - let (user_id, user_name, is_admin, cookies, filtered_headers, remote_error) = parse_auth_headers(&hdrs, &server.default_user, &server.org_http_headers_regex); + let (user_id, user_name, is_admin, mut cookies, filtered_headers, remote_error) = parse_auth_headers(&hdrs, &server.default_user, &server.org_http_headers_regex); // If X-Remote-Error is set, return error response if let Some(error_msg) = remote_error { @@ -73,6 +74,17 @@ pub async fn handle_multipart_upload( } } + // Determine transcoding preference from header + let transcode_preference = hdrs.get("x-clapshot-transcode") + .and_then(|v| v.to_str().ok()) + .map(|s| s.to_ascii_lowercase()) + .map(|s| match s.as_str() { + "true" | "1" | "yes" => TranscodePreference::Force, + "false" | "0" | "no" => TranscodePreference::Skip, + _ => TranscodePreference::Auto, + }).unwrap_or(TranscodePreference::Auto); + cookies.insert("transcode_preference".into(), format!("{:?}", transcode_preference)); + // Parse the multipart stream let boundary = mime.get_param("boundary").map(|v| v.to_string()); let boundary = match boundary { @@ -165,7 +177,12 @@ pub async fn handle_multipart_upload( } } - if let Err(e) = upload_done.send(IncomingFile{ file_path: uploaded_file, user_id: user_id, cookies }) { + if let Err(e) = upload_done.send(IncomingFile{ + file_path: uploaded_file, + user_id, + cookies, + transcode_preference, + }) { tracing::error!("Failed to send upload ok signal: {:?}", e); return Ok(warp::reply::with_status("Internal error: failed to send upload ok signal".into(), warp::http::StatusCode::INTERNAL_SERVER_ERROR)); } diff --git a/server/src/api_server/server_state.rs b/server/src/api_server/server_state.rs index 0757bfeb..83faab33 100644 --- a/server/src/api_server/server_state.rs +++ b/server/src/api_server/server_state.rs @@ -5,6 +5,7 @@ use lib_clapshot_grpc::proto::org::OrganizerInfo; use parking_lot::{RwLock, MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLockReadGuard, RwLockWriteGuard}; use std::sync::atomic::AtomicBool; use regex::Regex; +use crate::storage::StorageBackend; use tokio::sync::Mutex; use anyhow::anyhow; @@ -26,6 +27,8 @@ pub struct ServerState { pub db: Arc, pub media_files_dir: PathBuf, pub upload_dir: PathBuf, + pub media_base_url: String, + pub storage: StorageBackend, pub url_base: String, pub default_user: String, pub org_http_headers_regex: Regex, @@ -48,6 +51,7 @@ impl ServerState { media_files_dir: &Path, upload_dir: &Path, url_base: &str, + storage: StorageBackend, organizer_uri: Option, grpc_srv_listening_flag: Arc, default_user: String, @@ -58,6 +62,8 @@ impl ServerState { db, media_files_dir: media_files_dir.to_path_buf(), upload_dir: upload_dir.to_path_buf(), + media_base_url: storage.media_base_url().to_string(), + storage, grpc_srv_listening_flag, terminate_flag, url_base: url_base.to_string(), diff --git a/server/src/api_server/test_utils.rs b/server/src/api_server/test_utils.rs index 0f7ab700..d4307dfe 100644 --- a/server/src/api_server/test_utils.rs +++ b/server/src/api_server/test_utils.rs @@ -14,6 +14,7 @@ use tokio_tungstenite::tungstenite::Message; use crate::video_pipeline::IncomingFile; use crate::api_server::{UserMessage}; use crate::database::{DB, models}; +use crate::storage::StorageBackend; @@ -190,12 +191,14 @@ macro_rules! api_test { let ws_url = url_base.replace("http", "ws") + "/api/ws"; let media_files_dir = data_dir.join("videos"); let upload_dir = data_dir.join("upload"); + let storage = StorageBackend::local(media_files_dir.clone(), &url_base); let test_regex = validate_org_http_headers_regex("^X[-_]REMOTE[-_]").expect("Test regex failed"); let server_state = ServerState::new( db.clone(), &media_files_dir.clone(), &upload_dir.clone(), &url_base.clone(), + storage, None, grpc_srv_listening_flag.clone(), "anonymous".to_string(), diff --git a/server/src/api_server/user_session.rs b/server/src/api_server/user_session.rs index 64b66424..a90c3615 100644 --- a/server/src/api_server/user_session.rs +++ b/server/src/api_server/user_session.rs @@ -151,7 +151,7 @@ pub async fn org_authz<'a>( AuthzTopic::MediaFile(v, op) => authz_op::Op::MediaFileOp( authz_op::MediaFileOp { op: op.into(), - media_file: Some(v.to_proto3(&server.url_base, vec![])) }), // omit subtitles for authz check + media_file: Some(v.to_proto3(&server.media_base_url, vec![])) }), // omit subtitles for authz check AuthzTopic::Comment(c, op) => authz_op::Op::CommentOp( authz_op::CommentOp { op: op.into(), diff --git a/server/src/api_server/ws_handers.rs b/server/src/api_server/ws_handers.rs index 5af3dd62..99fff452 100644 --- a/server/src/api_server/ws_handers.rs +++ b/server/src/api_server/ws_handers.rs @@ -102,7 +102,7 @@ pub async fn msg_open_navigation_page(data: &OpenNavigationPage , ses: &mut User let mut media_files: Vec = Vec::new(); for m in models::MediaFile::get_by_user(&mut server.db.conn()?, &ses.user_id, DBPaging::default())? { let subs = models::Subtitle::get_by_media_file(&mut server.db.conn()?, &m.id, DBPaging::default())?; - media_files.push(m.to_proto3(&server.url_base, subs)); + media_files.push(m.to_proto3(&server.media_base_url, subs)); } let h_txt = if media_files.is_empty() { "

You have no media yet.

" } else { "

All your media files

" }; @@ -137,7 +137,7 @@ pub async fn send_open_media_file_cmd(server: &ServerState, session_id: &str, me let conn = &mut server.db.conn()?; let v_db = models::MediaFile::get(conn, &media_file_id.into())?; let subs = models::Subtitle::get_by_media_file(conn, media_file_id, DBPaging::default())?; - let v = v_db.to_proto3(&server.url_base, subs); + let v = v_db.to_proto3(&server.media_base_url, subs); if v.playback_url.is_none() { return Err(anyhow!("No playback file")); } @@ -421,6 +421,7 @@ pub async fn msg_add_subtitle(data: &AddSubtitle, ses: &mut UserSession, server: }; tokio::fs::write(&orig_sub_file, file_contents).await.context("Failed to write orig subtitle file")?; + server.storage.upload_if_exists(&orig_sub_file); // Convert to WebVTT if needed let playback_filename ={ @@ -457,6 +458,7 @@ pub async fn msg_add_subtitle(data: &AddSubtitle, ses: &mut UserSession, server: } temp_workaround_aspasia_webvtt_bug(&vtt_path)?; + server.storage.upload_if_exists(&vtt_path); Some(vtt_path.file_name().context("Bad filename")?.to_str().context("Bad filename")?.to_string()) }, Err(e) => return Err(anyhow!("Failed to parse subtitle file: {:?}", e)), diff --git a/server/src/grpc/db_models.rs b/server/src/grpc/db_models.rs index 6c1e7458..02be7939 100644 --- a/server/src/grpc/db_models.rs +++ b/server/src/grpc/db_models.rs @@ -53,7 +53,7 @@ impl models::MediaFile }) } - pub fn to_proto3(&self, url_base: &str, subtitles: Vec) -> proto::MediaFile + pub fn to_proto3(&self, media_base_url: &str, subtitles: Vec) -> proto::MediaFile { let duration = match (self.duration, self.total_frames, &self.fps) { (Some(dur), Some(total_frames), Some(fps)) => Some(proto::MediaFileDuration { @@ -75,12 +75,12 @@ impl models::MediaFile // Make preview data (thumb sheet and/or thumb url) let thumb_url = if matches!(self.has_thumbnail, Some(true)) { - Some(format!("{}/videos/{}/thumbs/thumb.webp", &url_base, &self.id)) + Some(format!("{}/thumbs/thumb.webp", format!("{}/{}", media_base_url, &self.id))) } else { None }; let thumb_sheet = match (self.thumb_sheet_cols, self.thumb_sheet_rows) { (Some(cols), Some(rows)) => Some(proto::media_file_preview_data::ThumbSheet { - url: format!("{}/videos/{}/thumbs/sheet-{}x{}.webp", &url_base, &self.id, cols, rows), + url: format!("{}/thumbs/sheet-{}x{}.webp", format!("{}/{}", media_base_url, &self.id), cols, rows), rows: rows as u32, cols: cols as u32, }), @@ -110,10 +110,10 @@ impl models::MediaFile added_time: Some(datetime_to_proto3(&self.added_time)), preview_data, processing_metadata, - subtitles: subtitles.into_iter().map(|s| s.to_proto3(url_base)).collect(), + subtitles: subtitles.into_iter().map(|s| s.to_proto3(media_base_url)).collect(), default_subtitle_id: self.default_subtitle_id.map(|id| id.to_string()), - playback_url: playback_uri.map(|uri| format!("{}/videos/{}/{}", url_base, &self.id, uri)), - orig_url: orig_uri.map(|uri| format!("{}/videos/{}/{}", url_base, &self.id, uri)) + playback_url: playback_uri.map(|uri| format!("{}/{}/{}", media_base_url, &self.id, uri)), + orig_url: orig_uri.map(|uri| format!("{}/{}/{}", media_base_url, &self.id, uri)) } } @@ -165,12 +165,13 @@ impl models::Subtitle }) } - pub fn to_proto3(&self, url_base: &str) -> proto::Subtitle + pub fn to_proto3(&self, media_base_url: &str) -> proto::Subtitle { - let orig_url = format!("{}/videos/{}/subs/orig/{}", url_base, &self.media_file_id, &self.orig_filename); + let base = format!("{}/{}", media_base_url, &self.media_file_id); + let orig_url = format!("{}/subs/orig/{}", base, &self.orig_filename); let playback_url = match &self.filename { - Some(f) => format!("{}/videos/{}/subs/{}", url_base, &self.media_file_id, f), - None => orig_url.clone() + Some(f) => format!("{}/subs/{}", base, f), + None => orig_url.clone(), }; proto::Subtitle { id: self.id.to_string(), diff --git a/server/src/grpc/grpc_server.rs b/server/src/grpc/grpc_server.rs index e616cdd2..68bab841 100644 --- a/server/src/grpc/grpc_server.rs +++ b/server/src/grpc/grpc_server.rs @@ -131,7 +131,7 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl }; let mut proto_items = Vec::with_capacity(items.len()); - for mf in items { proto_items.push(mf.to_proto3(&self.server.url_base, mf.get_subtitles(conn)?)); } + for mf in items { proto_items.push(mf.to_proto3(&self.server.media_base_url, mf.get_subtitles(conn)?)); } Ok(Response::new(org::DbMediaFileList { items: proto_items, @@ -231,7 +231,7 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl media_files: upsert_type!([ conn, req.media_files, models::MediaFile, models::MediaFileInsert, |it: &proto::MediaFile| it.id.is_empty(), - |it: &models::MediaFile| Ok(it.to_proto3(self.server.url_base.as_str(), it.get_subtitles(conn)?))])?, + |it: &models::MediaFile| Ok(it.to_proto3(self.server.media_base_url.as_str(), it.get_subtitles(conn)?))])?, comments: upsert_type!([ conn, req.comments, models::Comment, models::CommentInsert, |it: &proto::Comment| it.id.is_empty(), @@ -243,7 +243,7 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl subtitles: upsert_type!([ conn, req.subtitles, models::Subtitle, models::SubtitleInsert, |it: &proto::Subtitle| it.id.is_empty(), - |it: &models::Subtitle| Ok(it.to_proto3(self.server.url_base.as_str()))])?, + |it: &models::Subtitle| Ok(it.to_proto3(self.server.media_base_url.as_str()))])?, })) } diff --git a/server/src/lib.rs b/server/src/lib.rs index c839209c..8f4e5d03 100644 --- a/server/src/lib.rs +++ b/server/src/lib.rs @@ -12,6 +12,7 @@ pub mod api_server; pub mod database; pub mod tests; pub mod grpc; +pub mod storage; pub const PKG_VERSION: &'static str = env!("CARGO_PKG_VERSION"); pub const PKG_NAME: &'static str = env!("CARGO_PKG_NAME"); @@ -46,6 +47,7 @@ impl ClapshotInit { transcode_script: String, thumbnail_script: String, org_http_headers_regex: regex::Regex, + storage: crate::storage::StorageBackend, terminate_flag: Arc) -> anyhow::Result { @@ -61,7 +63,7 @@ impl ClapshotInit { } // Create subdirectories - for d in &["videos", "incoming", "videos"] { + for d in &["videos", "incoming", "upload"] { std::fs::create_dir_all(&data_dir.join(d))?; } @@ -84,6 +86,7 @@ impl ClapshotInit { &data_dir.join("videos"), &data_dir.join("upload"), &url_base, + storage.clone(), organizer_uri.clone(), grpc_srv_listening_flag.clone(), default_user, @@ -124,7 +127,7 @@ impl ClapshotInit { let vpp_thread = Some({ let db = db.clone(); thread::spawn(move || { video_pipeline::run_forever( - db, tf.clone(), dd, user_msg_tx, poll_interval, resubmit_delay, target_bitrate, upload_rx, n_workers, ingest_username_from, ts, ths)}) + db, tf.clone(), dd, storage.clone(), user_msg_tx, poll_interval, resubmit_delay, target_bitrate, upload_rx, n_workers, ingest_username_from, ts, ths)}) }); @@ -363,6 +366,7 @@ pub fn run_clapshot( transcode_script: String, thumbnail_script: String, org_http_headers_regex: regex::Regex, + storage: crate::storage::StorageBackend, ) -> anyhow::Result<()> { let terminate_flag = Arc::new(AtomicBool::new(false)); @@ -386,9 +390,10 @@ pub fn run_clapshot( transcode_script, thumbnail_script, org_http_headers_regex, + storage, terminate_flag.clone() )?; // Wait until termination clapshot.wait_for_termination() -} \ No newline at end of file +} diff --git a/server/src/main.rs b/server/src/main.rs index 73b4490a..5354a5d0 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -3,7 +3,7 @@ use clap::Parser; use clapshot_server::{ api_server::validate_org_http_headers_regex, grpc::{grpc_client::prepare_organizer, grpc_server::make_grpc_server_bind}, - run_clapshot, PKG_NAME, PKG_VERSION, + run_clapshot, storage::StorageBackend, PKG_NAME, PKG_VERSION, video_pipeline::IngestUsernameFrom, }; use std::{path::PathBuf, sync::Arc, str::FromStr}; @@ -127,6 +127,38 @@ struct Args { /// Case-insensitive matching. Default is disabled for security. #[arg(long, value_name="REGEX", default_value="^$")] org_http_headers: String, + + /// Storage backend (local or s3-compatible object storage) + #[arg(long, value_name="BACKEND", default_value="local")] + storage_backend: String, + + /// S3-compatible endpoint base URL, e.g. https://s3.example.com + #[arg(long, value_name="URL")] + s3_endpoint: Option, + + /// S3 region (required for S3 backend) + #[arg(long, value_name="REGION")] + s3_region: Option, + + /// S3 bucket (required for S3 backend) + #[arg(long, value_name="BUCKET")] + s3_bucket: Option, + + /// S3 access key (required for S3 backend) + #[arg(long, value_name="KEY")] + s3_access_key: Option, + + /// S3 secret key (required for S3 backend) + #[arg(long, value_name="SECRET")] + s3_secret_key: Option, + + /// Path/prefix inside the bucket where media files are stored + #[arg(long, value_name="PREFIX", default_value="videos")] + s3_prefix: String, + + /// Public base URL for accessing the bucket/prefix (used for playback URLs) + #[arg(long, value_name="URL")] + s3_public_url: Option, } fn main() -> anyhow::Result<()> { @@ -179,6 +211,32 @@ fn main() -> anyhow::Result<()> { // Validate and compile the org_http_headers regex let org_http_headers_regex = validate_org_http_headers_regex(&args.org_http_headers)?; + let storage = match args.storage_backend.as_str() { + "local" => StorageBackend::local(args.data_dir.join("videos"), &url_base), + "s3" => { + let endpoint = args.s3_endpoint.clone().ok_or_else(|| anyhow::anyhow!("--s3-endpoint is required for S3 backend"))?; + let region = args.s3_region.clone().ok_or_else(|| anyhow::anyhow!("--s3-region is required for S3 backend"))?; + let bucket = args.s3_bucket.clone().ok_or_else(|| anyhow::anyhow!("--s3-bucket is required for S3 backend"))?; + let access_key = args.s3_access_key.clone().ok_or_else(|| anyhow::anyhow!("--s3-access-key is required for S3 backend"))?; + let secret_key = args.s3_secret_key.clone().ok_or_else(|| anyhow::anyhow!("--s3-secret-key is required for S3 backend"))?; + + let public_base_url = args.s3_public_url.clone() + .unwrap_or_else(|| format!("{}/{}", endpoint.trim_end_matches('/'), bucket)); + + StorageBackend::s3( + args.data_dir.join("videos"), + bucket, + region, + access_key, + secret_key, + endpoint, + args.s3_prefix.clone(), + public_base_url, + )? + }, + other => bail!("Unknown storage backend '{}'. Valid options: local, s3", other), + }; + // Run the server (blocking) if let Err(e) = run_clapshot( args.data_dir.to_path_buf(), @@ -198,9 +256,10 @@ fn main() -> anyhow::Result<()> { args.transcode_script, args.thumbnail_script, org_http_headers_regex, + storage, ) { error!("run_clapshot() failed: {}", e); } Ok(()) -} \ No newline at end of file +} diff --git a/server/src/storage.rs b/server/src/storage.rs new file mode 100644 index 00000000..2cdb453d --- /dev/null +++ b/server/src/storage.rs @@ -0,0 +1,187 @@ +use std::fs::File; +use std::io::Read; +use std::path::{Path, PathBuf}; +use std::str::FromStr; +use std::sync::Arc; + +use anyhow::{bail, Context}; +use aws_sdk_s3::{config::Region, endpoint::Endpoint, primitives::ByteStream, Client}; +use aws_types::credentials::Credentials; +use tokio::runtime::Runtime; + +/// Simple content type guessing for a handful of formats we serve. +fn guess_content_type(path: &Path) -> &'static str { + match path.extension().and_then(|e| e.to_str()).map(|s| s.to_ascii_lowercase()) { + Some(ext) if ext == "mp4" => "video/mp4", + Some(ext) if ext == "mkv" => "video/x-matroska", + Some(ext) if ext == "webm" => "video/webm", + Some(ext) if ext == "mov" => "video/quicktime", + Some(ext) if ext == "webp" => "image/webp", + Some(ext) if ext == "png" => "image/png", + Some(ext) if ext == "jpg" || ext == "jpeg" => "image/jpeg", + Some(ext) if ext == "vtt" => "text/vtt", + Some(ext) if ext == "srt" => "application/x-subrip", + _ => "application/octet-stream", + } +} + +#[derive(Clone)] +pub enum StorageBackend { + LocalFs(LocalFsBackend), + S3(ObjectStorageBackend), +} + +impl StorageBackend { + pub fn local(media_root: PathBuf, url_base: &str) -> Self { + let prefix = "videos".to_string(); + let media_base_url = format!("{}/{}", url_base.trim_end_matches('/'), prefix); + StorageBackend::LocalFs(LocalFsBackend { + media_root, + prefix, + media_base_url, + }) + } + + pub fn s3( + media_root: PathBuf, + bucket: String, + region: String, + access_key: String, + secret_key: String, + endpoint: String, + prefix: String, + public_base_url: String, + ) -> anyhow::Result { + let media_base_url = format!("{}/{}", public_base_url.trim_end_matches('/'), prefix.trim_end_matches('/')); + + let rt = Runtime::new().context("create tokio runtime for S3 client")?; + let client = { + let region = Region::new(region); + let credentials = Credentials::from_keys(access_key, secret_key, None); + let endpoint = Endpoint::immutable( + http::Uri::from_str(&endpoint).context("bad s3 endpoint uri")? + ); + let cfg = rt.block_on(async { + let base = aws_config::defaults(aws_config::BehaviorVersion::latest()) + .region(region) + .credentials_provider(credentials) + .load() + .await; + aws_sdk_s3::config::Builder::from(&base) + .endpoint_resolver(endpoint) + .force_path_style(true) + .build() + }); + Client::from_conf(cfg) + }; + + Ok(StorageBackend::S3(ObjectStorageBackend { + media_root, + prefix, + media_base_url, + client: Arc::new(client), + bucket, + rt: Arc::new(rt), + })) + } + + pub fn media_base_url(&self) -> &str { + match self { + StorageBackend::LocalFs(b) => &b.media_base_url, + StorageBackend::S3(b) => &b.media_base_url, + } + } + + pub fn media_root(&self) -> &Path { + match self { + StorageBackend::LocalFs(b) => &b.media_root, + StorageBackend::S3(b) => &b.media_root, + } + } + + pub fn needs_remote_upload(&self) -> bool { + matches!(self, StorageBackend::S3(_)) + } + + /// Upload a file that lives under the media root. No-op for LocalFS. + pub fn upload_local_path(&self, abs_path: &Path) -> anyhow::Result<()> { + match self { + StorageBackend::LocalFs(_) => Ok(()), + StorageBackend::S3(backend) => backend.upload(abs_path), + } + } + + /// Upload file if it exists and log an error instead of bailing. + pub fn upload_if_exists(&self, abs_path: &Path) { + if !self.needs_remote_upload() { + return; + } + if !abs_path.exists() { + tracing::debug!(path=?abs_path, "Skipping upload for missing file"); + return; + } + if let Err(e) = self.upload_local_path(abs_path) { + tracing::error!(path=?abs_path, details=%e, "Failed to upload asset to object storage"); + } + } + + fn key_for_path(&self, abs_path: &Path) -> anyhow::Result { + let root = self.media_root(); + let rel = abs_path.strip_prefix(root) + .with_context(|| format!("Path '{:?}' not under media root '{:?}'", abs_path, root))?; + let rel = rel.to_string_lossy().replace('\\', "/"); + let prefix = match self { + StorageBackend::LocalFs(b) => &b.prefix, + StorageBackend::S3(b) => &b.prefix, + } + .trim_end_matches('/'); + + if prefix.is_empty() { + Ok(rel) + } else { + Ok(format!("{}/{}", prefix, rel)) + } + } +} + +#[derive(Clone)] +pub struct LocalFsBackend { + pub media_root: PathBuf, + pub prefix: String, + pub media_base_url: String, +} + +#[derive(Clone)] +pub struct ObjectStorageBackend { + pub media_root: PathBuf, + pub prefix: String, + pub media_base_url: String, + pub bucket: String, + pub client: Arc, + pub rt: Arc, +} + +impl ObjectStorageBackend { + fn upload(&self, abs_path: &Path) -> anyhow::Result<()> { + let key = StorageBackend::S3(self.clone()).key_for_path(abs_path)?; + let ct = guess_content_type(abs_path); + let mut file = File::open(abs_path).with_context(|| format!("Open file {:?}", abs_path))?; + let mut buffer = Vec::new(); + file.read_to_end(&mut buffer)?; + + self.rt.block_on(async { + let stream = ByteStream::from(buffer); + self.client + .put_object() + .bucket(&self.bucket) + .key(&key) + .body(stream) + .content_type(ct) + .send() + .await + }) + .context("upload to object storage")?; + + Ok(()) + } +} diff --git a/server/src/tests/integration_test.rs b/server/src/tests/integration_test.rs index 87e3d560..0233cdbc 100644 --- a/server/src/tests/integration_test.rs +++ b/server/src/tests/integration_test.rs @@ -24,6 +24,7 @@ mod integration_test use crate::api_server::tests::expect_user_msg; use crate::api_server::validate_org_http_headers_regex; + use crate::storage::StorageBackend; use crate::database::schema::media_files::{thumb_sheet_cols, thumb_sheet_rows}; use crate::{expect_client_cmd, send_server_cmd}; @@ -65,6 +66,7 @@ mod integration_test file_path: PathBuf::from_str(data_dir.join("NASA_Red_Lettuce_excerpt.mov").to_str().unwrap())?, user_id: "nobody".to_string(), cookies: HashMap::new(), + transcode_preference: crate::video_pipeline::TranscodePreference::Auto, }; arg_sender.send(args.clone())?; @@ -131,9 +133,10 @@ mod integration_test let data_dir = $data_dir.path().to_path_buf(); let url_base = url_base.clone(); let org_uri = org_uri.clone(); + let storage = crate::storage::StorageBackend::local(data_dir.join("videos"), &url_base); let tf = terminate_flag.clone(); thread::spawn(move || { - let mut clapshot = crate::ClapshotInit::init_and_spawn_workers(data_dir, true, url_base, vec![], "127.0.0.1".into(), port, org_uri.clone(), grpc_server_bind, 4, target_bitrate, poll_interval, "anonymous".to_string(), poll_interval*5.0, $ingest_username_from, "scripts/clapshot-transcode".to_string(), "scripts/clapshot-thumbnail".to_string(), regex, tf)?; + let mut clapshot = crate::ClapshotInit::init_and_spawn_workers(data_dir, true, url_base, vec![], "127.0.0.1".into(), port, org_uri.clone(), grpc_server_bind, 4, target_bitrate, poll_interval, "anonymous".to_string(), poll_interval*5.0, $ingest_username_from, "scripts/clapshot-transcode".to_string(), "scripts/clapshot-thumbnail".to_string(), regex, storage, tf)?; clapshot.wait_for_termination() })}; @@ -590,6 +593,7 @@ mod integration_test file_path: test_file.clone(), user_id: "test_user".to_string(), cookies: HashMap::new(), + transcode_preference: crate::video_pipeline::TranscodePreference::Auto, }; let (tx, rx) = crossbeam_channel::unbounded(); diff --git a/server/src/video_pipeline/incoming_monitor.rs b/server/src/video_pipeline/incoming_monitor.rs index a2b44ba9..823a55bf 100644 --- a/server/src/video_pipeline/incoming_monitor.rs +++ b/server/src/video_pipeline/incoming_monitor.rs @@ -119,7 +119,12 @@ pub fn run_forever( tracing::info!("Submitting for processing."); submission_time.insert(path.clone(), std::time::Instant::now()); if let Err(e) = incoming_sender.send( - super::IncomingFile {file_path: path.clone(), user_id: username, cookies: HashMap::new()}) { + super::IncomingFile { + file_path: path.clone(), + user_id: username, + cookies: HashMap::new(), + transcode_preference: super::TranscodePreference::Auto, + }) { tracing::error!(details=%e, "Failed to send incoming file to processing queue."); } }, diff --git a/server/src/video_pipeline/metadata_reader.rs b/server/src/video_pipeline/metadata_reader.rs index c7915888..72c917a7 100644 --- a/server/src/video_pipeline/metadata_reader.rs +++ b/server/src/video_pipeline/metadata_reader.rs @@ -49,7 +49,8 @@ pub struct Metadata { pub fps: Decimal, pub bitrate: u32, pub metadata_all: String, - pub upload_cookies: HashMap // Cookies from the upload, not read from the file + pub upload_cookies: HashMap, // Cookies from the upload, not read from the file + pub transcode_preference: super::TranscodePreference, } pub type MetadataResult = Result; @@ -154,7 +155,8 @@ fn extract_variables(json: serde_json::Value, args: &IncomingFile, get_file_s fps: Decimal::from_str(video_track["FrameRate"].as_str().ok_or("FPS not found")?).map_err(|_| "Invalid FPS".to_string())?, bitrate, metadata_all: json.to_string(), - upload_cookies: args.cookies.clone() + upload_cookies: args.cookies.clone(), + transcode_preference: args.transcode_preference, }) } @@ -170,7 +172,8 @@ fn extract_variables(json: serde_json::Value, args: &IncomingFile, get_file_s fps: Decimal::from_u8(0).unwrap(), bitrate: audio_track["BitRate"].as_str().ok_or("Bitrate not found")?.parse().map_err(|_| "Invalid bitrate".to_string())?, metadata_all: json.to_string(), - upload_cookies: args.cookies.clone() + upload_cookies: args.cookies.clone(), + transcode_preference: args.transcode_preference, }) } @@ -186,7 +189,8 @@ fn extract_variables(json: serde_json::Value, args: &IncomingFile, get_file_s fps: Decimal::from_u8(0).unwrap(), bitrate: 0, metadata_all: json.to_string(), - upload_cookies: args.cookies.clone() + upload_cookies: args.cookies.clone(), + transcode_preference: args.transcode_preference, }) } else { return Err("No video, audio or image track found".to_string()); @@ -269,7 +273,8 @@ fn test_fixture(has_bitrate: bool, has_fps: bool) -> (IncomingFile, serde_json:: let args = IncomingFile { file_path: PathBuf::from("test.mp4"), user_id: "test_user".to_string(), - cookies: Default::default() + cookies: Default::default(), + transcode_preference: super::TranscodePreference::Auto, }; (args, json) diff --git a/server/src/video_pipeline/mod.rs b/server/src/video_pipeline/mod.rs index 7930ab62..9b3f4030 100644 --- a/server/src/video_pipeline/mod.rs +++ b/server/src/video_pipeline/mod.rs @@ -34,6 +34,7 @@ use crate::database::error::DBError; use crate::video_pipeline::metadata_reader::MediaType; use cleanup_rejected::clean_up_rejected_file; use crate::database::{DB, models, DbBasicQuery}; +use crate::storage::StorageBackend; #[derive(Debug, Clone)] pub enum IngestUsernameFrom { @@ -58,12 +59,20 @@ pub const THUMB_SHEET_ROWS: u32 = 10; pub const THUMB_W: u32 = 160; pub const THUMB_H: u32 = 90; +#[derive(Debug, Clone, Copy)] +pub enum TranscodePreference { + Auto, + Force, + Skip, +} + #[derive (Clone, Debug)] pub struct IncomingFile { pub file_path: PathBuf, pub user_id: String, - pub cookies: HashMap // Cookies from client, if this was an HTTP upload + pub cookies: HashMap, // Cookies from client, if this was an HTTP upload + pub transcode_preference: TranscodePreference, } #[derive(Debug, Clone)] @@ -117,6 +126,7 @@ fn ingest_media_file( md: &metadata_reader::Metadata, data_dir: &Path, media_files_dir: &Path, + storage: &StorageBackend, target_bitrate: u32, db: &DB, user_msg_tx: &crossbeam_channel::Sender, @@ -185,6 +195,8 @@ fn ingest_media_file( std::fs::rename(&src, &src_moved)?; if !src_moved.exists() { bail!("Failed to move {:?} file to orig/", src_moved) } + storage.upload_if_exists(&src_moved); + let orig_filename = src.file_name().ok_or(anyhow!("Bad filename: {:?}", src))?.to_string_lossy().into_owned(); // Add to DB @@ -209,7 +221,7 @@ fn ingest_media_file( // Check if it needs recompressing - fn needs_transcoding(md: &metadata_reader::Metadata, target_max_bitrate: u32) -> Option<(String, u32)> { + fn auto_transcoding_need(md: &metadata_reader::Metadata, target_max_bitrate: u32) -> Option<(String, u32)> { match md.media_type { metadata_reader::MediaType::Audio => Some(("client cannot playback audio only".to_string(), target_max_bitrate)), metadata_reader::MediaType::Image => Some(("client cannot 'playback' still images".to_string(), target_max_bitrate)), @@ -238,7 +250,13 @@ fn ingest_media_file( duration: md.duration, }; - let transcode_req = match needs_transcoding(md, target_bitrate) { + let requested_transcode = match md.transcode_preference { + TranscodePreference::Force => Some(("user requested transcoding".to_string(), target_bitrate)), + TranscodePreference::Skip => None, + TranscodePreference::Auto => auto_transcoding_need(md, target_bitrate), + }; + + let transcode_req = match requested_transcode { Some((reason, new_bitrate)) => { let video_dst_prefix = format!("transcoded_br{}_{}", new_bitrate, uuid::Uuid::new_v4()); cmpr_tx.send(script_processor::CmprInput::Transcode { @@ -331,6 +349,7 @@ pub fn run_forever( db: Arc, terminate_flag: Arc, data_dir: PathBuf, + storage: StorageBackend, user_msg_tx: crossbeam_channel::Sender, poll_interval: f32, resubmit_delay: f32, @@ -451,7 +470,12 @@ pub fn run_forever( match msg { Ok(msg) => { tracing::debug!("Got upload result. Submitting it for processing. {:?}", msg); - to_md.send(IncomingFile {file_path: msg.file_path.clone(),user_id: msg.user_id, cookies: msg.cookies }).unwrap_or_else(|e| { + to_md.send(IncomingFile { + file_path: msg.file_path.clone(), + user_id: msg.user_id, + cookies: msg.cookies, + transcode_preference: msg.transcode_preference, + }).unwrap_or_else(|e| { tracing::error!("Error sending file to metadata reader: {:?}", e); clean_up_rejected_file(&data_dir, &msg.file_path, None).unwrap_or_else(|e| { tracing::error!("Cleanup of '{:?}' failed: {:?}", &msg.file_path, e); @@ -479,7 +503,7 @@ pub fn run_forever( })) }, Ok(vid) => { - let ing_res = ingest_media_file(&vid, &md, &data_dir, &media_files_dir, target_bitrate, &db, &user_msg_tx, &cmpr_in_tx).map_err(|e| { + let ing_res = ingest_media_file(&vid, &md, &data_dir, &media_files_dir, &storage, target_bitrate, &db, &user_msg_tx, &cmpr_in_tx).map_err(|e| { DetailedMsg { msg: "Media ingestion failed".into(), details: e.to_string(), @@ -564,6 +588,7 @@ pub fn run_forever( { let videos_dir = media_files_dir.clone(); let vid = logs.media_file_id.clone(); + let storage = storage.clone(); tracing::info!(media_file=%vid, log_info=%logs.stdout, "Transcoding completed"); @@ -592,6 +617,7 @@ pub fn run_forever( tracing::error!(details=%e, "Failed to create symlink {:?} -> {:?}", symlink_path, video_dst); return false; } + storage.upload_if_exists(&symlink_path); if let Err(e) = db.conn().and_then(|mut conn| models::MediaFile::set_recompressed(&mut conn, &vid)) { tracing::error!(details=%e, "Error marking media file as recompressed in DB"); @@ -627,6 +653,7 @@ pub fn run_forever( { let videos_dir = media_files_dir.clone(); let vid = logs.media_file_id.clone(); + let storage = storage.clone(); let mut db_errors = false; // Thumbnails (and/or sheet) done? @@ -652,6 +679,12 @@ pub fn run_forever( } } } + if let Some(dir) = thumb_dir { + storage.upload_if_exists(&dir.join("thumb.webp")); + if let Some((sheet_cols, sheet_rows)) = thumb_sheet_dims { + storage.upload_if_exists(&dir.join(format!("sheet-{}x{}.webp", sheet_cols, sheet_rows))); + } + } // Send MediaFileUpdated message to user user_msg_tx.send(UserMessage { From 7866eac7e6f2233fa30a615b6aeefd1d0ded97be Mon Sep 17 00:00:00 2001 From: Mike Solar Date: Fri, 28 Nov 2025 18:04:15 +0800 Subject: [PATCH 02/10] Fix compilation errors. --- .gitignore | 3 +- server/Cargo.lock | 5143 -------------------------------- server/Cargo.toml | 1 + server/src/api_server/tests.rs | 2 +- server/src/storage.rs | 25 +- 5 files changed, 20 insertions(+), 5154 deletions(-) delete mode 100644 server/Cargo.lock diff --git a/.gitignore b/.gitignore index 1f87460a..d4c9eb82 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ dist_deb/ .DS_Store .claude -.idea \ No newline at end of file +.idea +server/Cargo.lock \ No newline at end of file diff --git a/server/Cargo.lock b/server/Cargo.lock deleted file mode 100644 index ab067f21..00000000 --- a/server/Cargo.lock +++ /dev/null @@ -1,5143 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "Inflector" -version = "0.11.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" -dependencies = [ - "lazy_static", - "regex", -] - -[[package]] -name = "adler2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" - -[[package]] -name = "ahash" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" -dependencies = [ - "getrandom 0.2.16", - "once_cell", - "version_check", -] - -[[package]] -name = "aho-corasick" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" -dependencies = [ - "memchr", -] - -[[package]] -name = "allocator-api2" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "anstream" -version = "0.6.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "is_terminal_polyfill", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" - -[[package]] -name = "anstyle-parse" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" -dependencies = [ - "windows-sys 0.59.0", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6680de5231bd6ee4c6191b8a1325daa282b415391ec9d3a37bd34f2060dc73fa" -dependencies = [ - "anstyle", - "once_cell_polyfill", - "windows-sys 0.59.0", -] - -[[package]] -name = "anyhow" -version = "1.0.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" - -[[package]] -name = "arrayvec" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" - -[[package]] -name = "aspasia" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b7c4e7dfd1cb7a872dfa20b445664984f373780b035de6af5a373df3ae14b55" -dependencies = [ - "buildstructor", - "chardetng", - "encoding_rs", - "encoding_rs_io", - "nom", -] - -[[package]] -name = "assert_fs" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a652f6cb1f516886fcfee5e7a5c078b9ade62cfcb889524efe5a64d682dd27a9" -dependencies = [ - "anstyle", - "doc-comment", - "globwalk", - "predicates", - "predicates-core", - "predicates-tree", - "tempfile", -] - -[[package]] -name = "async-channel" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" -dependencies = [ - "concurrent-queue", - "event-listener 2.5.3", - "futures-core", -] - -[[package]] -name = "async-channel" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" -dependencies = [ - "concurrent-queue", - "event-listener-strategy", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-executor" -version = "1.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb812ffb58524bdd10860d7d974e2f01cc0950c2438a74ee5ec2e2280c6c4ffa" -dependencies = [ - "async-task", - "concurrent-queue", - "fastrand", - "futures-lite", - "pin-project-lite", - "slab", -] - -[[package]] -name = "async-global-executor" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" -dependencies = [ - "async-channel 2.3.1", - "async-executor", - "async-io", - "async-lock", - "blocking", - "futures-lite", - "once_cell", -] - -[[package]] -name = "async-io" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1237c0ae75a0f3765f58910ff9cdd0a12eeb39ab2f4c7de23262f337f0aacbb3" -dependencies = [ - "async-lock", - "cfg-if", - "concurrent-queue", - "futures-io", - "futures-lite", - "parking", - "polling", - "rustix", - "slab", - "tracing", - "windows-sys 0.59.0", -] - -[[package]] -name = "async-lock" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" -dependencies = [ - "event-listener 5.4.0", - "event-listener-strategy", - "pin-project-lite", -] - -[[package]] -name = "async-std" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "730294c1c08c2e0f85759590518f6333f0d5a0a766a27d519c1b244c3dfd8a24" -dependencies = [ - "async-channel 1.9.0", - "async-global-executor", - "async-io", - "async-lock", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite", - "gloo-timers", - "kv-log-macro", - "log", - "memchr", - "once_cell", - "pin-project-lite", - "pin-utils", - "slab", - "wasm-bindgen-futures", -] - -[[package]] -name = "async-stream" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" -dependencies = [ - "async-stream-impl", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "async-task" -version = "4.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" - -[[package]] -name = "async-trait" -version = "0.1.88" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - -[[package]] -name = "autocfg" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" - -[[package]] -name = "aws-config" -version = "1.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c478f5b10ce55c9a33f87ca3404ca92768b144fc1bfdede7c0121214a8283a25" -dependencies = [ - "aws-credential-types", - "aws-runtime", - "aws-sdk-sts", - "aws-smithy-async", - "aws-smithy-http", - "aws-smithy-json", - "aws-smithy-runtime", - "aws-smithy-runtime-api", - "aws-smithy-types", - "aws-types", - "bytes", - "fastrand", - "http 1.3.1", - "time", - "tokio", - "tracing", - "url", -] - -[[package]] -name = "aws-credential-types" -version = "1.2.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b01c9521fa01558f750d183c8c68c81b0155b9d193a4ba7f84c36bd1b6d04a06" -dependencies = [ - "aws-smithy-async", - "aws-smithy-runtime-api", - "aws-smithy-types", - "zeroize", -] - -[[package]] -name = "aws-lc-rs" -version = "1.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b5ce75405893cd713f9ab8e297d8e438f624dde7d706108285f7e17a25a180f" -dependencies = [ - "aws-lc-sys", - "zeroize", -] - -[[package]] -name = "aws-lc-sys" -version = "0.34.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "179c3777a8b5e70e90ea426114ffc565b2c1a9f82f6c4a0c5a34aa6ef5e781b6" -dependencies = [ - "cc", - "cmake", - "dunce", - "fs_extra", -] - -[[package]] -name = "aws-runtime" -version = "1.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c034a1bc1d70e16e7f4e4caf7e9f7693e4c9c24cd91cf17c2a0b21abaebc7c8b" -dependencies = [ - "aws-credential-types", - "aws-sigv4", - "aws-smithy-async", - "aws-smithy-eventstream", - "aws-smithy-http", - "aws-smithy-runtime", - "aws-smithy-runtime-api", - "aws-smithy-types", - "aws-types", - "bytes", - "fastrand", - "http 0.2.12", - "http-body 0.4.6", - "percent-encoding", - "pin-project-lite", - "tracing", - "uuid", -] - -[[package]] -name = "aws-sdk-s3" -version = "1.103.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af040a86ae4378b7ed2f62c83b36be1848709bbbf5757ec850d0e08596a26be9" -dependencies = [ - "aws-credential-types", - "aws-runtime", - "aws-sigv4", - "aws-smithy-async", - "aws-smithy-checksums", - "aws-smithy-eventstream", - "aws-smithy-http", - "aws-smithy-json", - "aws-smithy-runtime", - "aws-smithy-runtime-api", - "aws-smithy-types", - "aws-smithy-xml", - "aws-types", - "bytes", - "fastrand", - "hex", - "hmac", - "http 0.2.12", - "http 1.3.1", - "http-body 0.4.6", - "lru", - "percent-encoding", - "regex-lite", - "sha2", - "tracing", - "url", -] - -[[package]] -name = "aws-sdk-sts" -version = "1.84.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91abcdbfb48c38a0419eb75e0eac772a4783a96750392680e4f3c25a8a0535b9" -dependencies = [ - "aws-credential-types", - "aws-runtime", - "aws-smithy-async", - "aws-smithy-http", - "aws-smithy-json", - "aws-smithy-query", - "aws-smithy-runtime", - "aws-smithy-runtime-api", - "aws-smithy-types", - "aws-smithy-xml", - "aws-types", - "fastrand", - "http 0.2.12", - "regex-lite", - "tracing", -] - -[[package]] -name = "aws-sigv4" -version = "1.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c35452ec3f001e1f2f6db107b6373f1f48f05ec63ba2c5c9fa91f07dad32af11" -dependencies = [ - "aws-credential-types", - "aws-smithy-eventstream", - "aws-smithy-http", - "aws-smithy-runtime-api", - "aws-smithy-types", - "bytes", - "form_urlencoded", - "hex", - "hmac", - "http 0.2.12", - "http 1.3.1", - "percent-encoding", - "sha2", - "time", - "tracing", -] - -[[package]] -name = "aws-smithy-async" -version = "1.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "127fcfad33b7dfc531141fda7e1c402ac65f88aca5511a4d31e2e3d2cd01ce9c" -dependencies = [ - "futures-util", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "aws-smithy-checksums" -version = "0.63.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb9a26b2831e728924ec0089e92697a78a2f9cdcf90d81e8cfcc6a6c85080369" -dependencies = [ - "aws-smithy-http", - "aws-smithy-types", - "bytes", - "crc-fast", - "hex", - "http 0.2.12", - "http-body 0.4.6", - "md-5", - "pin-project-lite", - "sha1", - "sha2", - "tracing", -] - -[[package]] -name = "aws-smithy-eventstream" -version = "0.60.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e29a304f8319781a39808847efb39561351b1bb76e933da7aa90232673638658" -dependencies = [ - "aws-smithy-types", - "bytes", - "crc32fast", -] - -[[package]] -name = "aws-smithy-http" -version = "0.62.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445d5d720c99eed0b4aa674ed00d835d9b1427dd73e04adaf2f94c6b2d6f9fca" -dependencies = [ - "aws-smithy-eventstream", - "aws-smithy-runtime-api", - "aws-smithy-types", - "bytes", - "bytes-utils", - "futures-core", - "futures-util", - "http 0.2.12", - "http 1.3.1", - "http-body 0.4.6", - "percent-encoding", - "pin-project-lite", - "pin-utils", - "tracing", -] - -[[package]] -name = "aws-smithy-http-client" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f108f1ca850f3feef3009bdcc977be201bca9a91058864d9de0684e64514bee0" -dependencies = [ - "aws-smithy-async", - "aws-smithy-runtime-api", - "aws-smithy-types", - "h2 0.3.27", - "h2 0.4.10", - "http 0.2.12", - "http 1.3.1", - "http-body 0.4.6", - "hyper 0.14.32", - "hyper 1.6.0", - "hyper-rustls 0.24.2", - "hyper-rustls 0.27.6", - "hyper-util", - "pin-project-lite", - "rustls 0.21.12", - "rustls 0.23.27", - "rustls-native-certs 0.8.2", - "rustls-pki-types", - "tokio", - "tower", - "tracing", -] - -[[package]] -name = "aws-smithy-json" -version = "0.61.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2db31f727935fc63c6eeae8b37b438847639ec330a9161ece694efba257e0c54" -dependencies = [ - "aws-smithy-types", -] - -[[package]] -name = "aws-smithy-observability" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d1881b1ea6d313f9890710d65c158bdab6fb08c91ea825f74c1c8c357baf4cc" -dependencies = [ - "aws-smithy-runtime-api", -] - -[[package]] -name = "aws-smithy-query" -version = "0.60.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d28a63441360c477465f80c7abac3b9c4d075ca638f982e605b7dc2a2c7156c9" -dependencies = [ - "aws-smithy-types", - "urlencoding", -] - -[[package]] -name = "aws-smithy-runtime" -version = "1.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e107ce0783019dbff59b3a244aa0c114e4a8c9d93498af9162608cd5474e796" -dependencies = [ - "aws-smithy-async", - "aws-smithy-http", - "aws-smithy-http-client", - "aws-smithy-observability", - "aws-smithy-runtime-api", - "aws-smithy-types", - "bytes", - "fastrand", - "http 0.2.12", - "http 1.3.1", - "http-body 0.4.6", - "http-body 1.0.1", - "pin-project-lite", - "pin-utils", - "tokio", - "tracing", -] - -[[package]] -name = "aws-smithy-runtime-api" -version = "1.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec7204f9fd94749a7c53b26da1b961b4ac36bf070ef1e0b94bb09f79d4f6c193" -dependencies = [ - "aws-smithy-async", - "aws-smithy-types", - "bytes", - "http 0.2.12", - "http 1.3.1", - "pin-project-lite", - "tokio", - "tracing", - "zeroize", -] - -[[package]] -name = "aws-smithy-types" -version = "1.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f535879a207fce0db74b679cfc3e91a3159c8144d717d55f5832aea9eef46e" -dependencies = [ - "base64-simd", - "bytes", - "bytes-utils", - "http 0.2.12", - "http 1.3.1", - "http-body 0.4.6", - "http-body 1.0.1", - "http-body-util", - "itoa", - "num-integer", - "pin-project-lite", - "pin-utils", - "ryu", - "serde", - "time", -] - -[[package]] -name = "aws-smithy-xml" -version = "0.60.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eab77cdd036b11056d2a30a7af7b775789fb024bf216acc13884c6c97752ae56" -dependencies = [ - "xmlparser", -] - -[[package]] -name = "aws-types" -version = "1.3.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d79fb68e3d7fe5d4833ea34dc87d2e97d26d3086cb3da660bb6b1f76d98680b6" -dependencies = [ - "aws-credential-types", - "aws-smithy-async", - "aws-smithy-runtime-api", - "aws-smithy-types", - "rustc_version", - "tracing", -] - -[[package]] -name = "axum" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5" -dependencies = [ - "axum-core", - "bytes", - "form_urlencoded", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "hyper 1.6.0", - "hyper-util", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "serde_json", - "serde_path_to_error", - "serde_urlencoded", - "sync_wrapper", - "tokio", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "axum-core" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6" -dependencies = [ - "bytes", - "futures-core", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "mime", - "pin-project-lite", - "rustversion", - "sync_wrapper", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - -[[package]] -name = "base64-simd" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" -dependencies = [ - "outref", - "vsimd", -] - -[[package]] -name = "bitflags" -version = "2.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" - -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "blocking" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" -dependencies = [ - "async-channel 2.3.1", - "async-task", - "futures-io", - "futures-lite", - "piper", -] - -[[package]] -name = "borsh" -version = "1.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad8646f98db542e39fc66e68a20b2144f6a732636df7c2354e74645faaa433ce" -dependencies = [ - "borsh-derive", - "cfg_aliases", -] - -[[package]] -name = "borsh-derive" -version = "1.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdd1d3c0c2f5833f22386f252fe8ed005c7f59fdcddeef025c01b4c3b9fd9ac3" -dependencies = [ - "once_cell", - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "bstr" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4" -dependencies = [ - "memchr", - "serde", -] - -[[package]] -name = "buildstructor" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3907aac66c65520545ae3cb3c195306e20d5ed5c90bfbb992e061cf12a104d0" -dependencies = [ - "lazy_static", - "proc-macro2", - "quote", - "str_inflector", - "syn 2.0.101", - "thiserror 1.0.69", - "try_match", -] - -[[package]] -name = "bumpalo" -version = "3.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" - -[[package]] -name = "bytecheck" -version = "0.6.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23cdc57ce23ac53c931e88a43d06d070a6fd142f2617be5855eb75efc9beb1c2" -dependencies = [ - "bytecheck_derive", - "ptr_meta", - "simdutf8", -] - -[[package]] -name = "bytecheck_derive" -version = "0.6.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "bytes" -version = "1.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" - -[[package]] -name = "bytes-utils" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" -dependencies = [ - "bytes", - "either", -] - -[[package]] -name = "cc" -version = "1.2.48" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c481bdbf0ed3b892f6f806287d72acd515b352a4ec27a208489b8c1bc839633a" -dependencies = [ - "find-msvc-tools", - "jobserver", - "libc", - "shlex", -] - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" - -[[package]] -name = "chardetng" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14b8f0b65b7b08ae3c8187e8d77174de20cb6777864c6b832d8ad365999cf1ea" -dependencies = [ - "cfg-if", - "encoding_rs", - "memchr", -] - -[[package]] -name = "chrono" -version = "0.4.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" -dependencies = [ - "iana-time-zone", - "js-sys", - "num-traits", - "serde", - "wasm-bindgen", - "windows-link 0.2.1", -] - -[[package]] -name = "clap" -version = "4.5.51" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5" -dependencies = [ - "clap_builder", - "clap_derive", -] - -[[package]] -name = "clap_builder" -version = "4.5.51" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim", - "terminal_size", -] - -[[package]] -name = "clap_derive" -version = "4.5.49" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "clap_lex" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" - -[[package]] -name = "clapshot-server" -version = "0.9.0" -dependencies = [ - "Inflector", - "anyhow", - "aspasia", - "assert_fs", - "async-std", - "aws-config", - "aws-sdk-s3", - "aws-types", - "base64 0.22.1", - "bytes", - "chrono", - "clap", - "crossbeam-channel", - "data-url", - "diesel", - "diesel_migrations", - "file-owner", - "flate2", - "futures", - "futures-util", - "hex", - "http-body-util", - "hyper-util", - "indoc", - "lib-clapshot-grpc", - "libc", - "log", - "mime", - "mpart-async", - "num_cpus", - "parking_lot", - "path-absolutize", - "pbjson", - "pbjson-build", - "pbjson-types", - "portpicker", - "prost", - "r2d2", - "rand 0.9.1", - "regex", - "reqwest", - "rust_decimal", - "semver", - "serde", - "serde_json", - "serial_test", - "sha2", - "signal-hook", - "tar", - "tempfile", - "thiserror 2.0.17", - "threadpool", - "time", - "timeago", - "tokio", - "tokio-stream", - "tokio-test", - "tokio-tungstenite 0.28.0", - "tonic", - "tonic-prost", - "tonic-prost-build", - "tonic-reflection", - "tower", - "tracing", - "tracing-appender", - "tracing-subscriber", - "tracing-test", - "unix-named-pipe", - "url", - "urlencoding", - "uuid", - "warp", - "whoami", -] - -[[package]] -name = "cmake" -version = "0.1.54" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" -dependencies = [ - "cc", -] - -[[package]] -name = "colorchoice" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" - -[[package]] -name = "concurrent-queue" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" - -[[package]] -name = "cpufeatures" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" -dependencies = [ - "libc", -] - -[[package]] -name = "crc" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" - -[[package]] -name = "crc-fast" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bf62af4cc77d8fe1c22dde4e721d87f2f54056139d8c412e1366b740305f56f" -dependencies = [ - "crc", - "digest", - "libc", - "rand 0.9.1", - "regex", -] - -[[package]] -name = "crc32fast" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crossbeam-channel" -version = "0.5.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "darling" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" -dependencies = [ - "darling_core", - "darling_macro", -] - -[[package]] -name = "darling_core" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 2.0.101", -] - -[[package]] -name = "darling_macro" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" -dependencies = [ - "darling_core", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "data-encoding" -version = "2.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" - -[[package]] -name = "data-url" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a" - -[[package]] -name = "deranged" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" -dependencies = [ - "powerfmt", -] - -[[package]] -name = "diesel" -version = "2.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e7624a3bb9fffd82fff016be9a7f163d20e5a89eb8d28f9daaa6b30fff37500" -dependencies = [ - "chrono", - "diesel_derives", - "downcast-rs", - "libsqlite3-sys", - "r2d2", - "sqlite-wasm-rs", - "time", -] - -[[package]] -name = "diesel_derives" -version = "2.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9daac6489a36e42570da165a10c424f3edcefdff70c5fd55e1847c23f3dd7562" -dependencies = [ - "diesel_table_macro_syntax", - "dsl_auto_type", - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "diesel_migrations" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee060f709c3e3b1cadd83fcd0f61711f7a8cf493348f758d3a1c1147d70b3c97" -dependencies = [ - "diesel", - "migrations_internals", - "migrations_macros", -] - -[[package]] -name = "diesel_table_macro_syntax" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe2444076b48641147115697648dc743c2c00b61adade0f01ce67133c7babe8c" -dependencies = [ - "syn 2.0.101", -] - -[[package]] -name = "difflib" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "crypto-common", - "subtle", -] - -[[package]] -name = "displaydoc" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "doc-comment" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" - -[[package]] -name = "downcast-rs" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "117240f60069e65410b3ae1bb213295bd828f707b5bec6596a1afc8793ce0cbc" - -[[package]] -name = "dsl_auto_type" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd122633e4bef06db27737f21d3738fb89c8f6d5360d6d9d7635dda142a7757e" -dependencies = [ - "darling", - "either", - "heck", - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "dunce" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" - -[[package]] -name = "either" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" - -[[package]] -name = "encoding_rs" -version = "0.8.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "encoding_rs_io" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cc3c5651fb62ab8aa3103998dade57efdd028544bd300516baa31840c252a83" -dependencies = [ - "encoding_rs", -] - -[[package]] -name = "equivalent" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" - -[[package]] -name = "errno" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" -dependencies = [ - "errno-dragonfly", - "libc", - "winapi", -] - -[[package]] -name = "errno" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18" -dependencies = [ - "libc", - "windows-sys 0.59.0", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "event-listener" -version = "2.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" - -[[package]] -name = "event-listener" -version = "5.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3492acde4c3fc54c845eaab3eed8bd00c7a7d881f78bfc801e43a93dec1331ae" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - -[[package]] -name = "event-listener-strategy" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" -dependencies = [ - "event-listener 5.4.0", - "pin-project-lite", -] - -[[package]] -name = "fastrand" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" - -[[package]] -name = "file-owner" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36f03ecafcffaf8add486d5424ffd60a25690f9da6a026692bc6c657379fc60c" -dependencies = [ - "nix", -] - -[[package]] -name = "filetime" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" -dependencies = [ - "cfg-if", - "libc", - "libredox", - "windows-sys 0.59.0", -] - -[[package]] -name = "find-msvc-tools" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" - -[[package]] -name = "fixedbitset" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" - -[[package]] -name = "flate2" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "foldhash" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" - -[[package]] -name = "form_urlencoded" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "fs_extra" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" - -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - -[[package]] -name = "futures" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" - -[[package]] -name = "futures-executor" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-io" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" - -[[package]] -name = "futures-lite" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5edaec856126859abb19ed65f39e90fea3a9574b9707f13539acf4abf7eb532" -dependencies = [ - "fastrand", - "futures-core", - "futures-io", - "parking", - "pin-project-lite", -] - -[[package]] -name = "futures-macro" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "futures-sink" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" - -[[package]] -name = "futures-task" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" - -[[package]] -name = "futures-util" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "wasm-bindgen", -] - -[[package]] -name = "getrandom" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "r-efi", - "wasi 0.14.2+wasi-0.2.4", - "wasm-bindgen", -] - -[[package]] -name = "globset" -version = "0.4.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5" -dependencies = [ - "aho-corasick", - "bstr", - "log", - "regex-automata", - "regex-syntax", -] - -[[package]] -name = "globwalk" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757" -dependencies = [ - "bitflags", - "ignore", - "walkdir", -] - -[[package]] -name = "gloo-timers" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "h2" -version = "0.3.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 0.2.12", - "indexmap", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "h2" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9421a676d1b147b16b82c9225157dc629087ef8ec4d5e2960f9437a90dac0a5" -dependencies = [ - "atomic-waker", - "bytes", - "fnv", - "futures-core", - "futures-sink", - "http 1.3.1", - "indexmap", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" -dependencies = [ - "ahash", -] - -[[package]] -name = "hashbrown" -version = "0.15.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" -dependencies = [ - "allocator-api2", - "equivalent", - "foldhash", -] - -[[package]] -name = "headers" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb" -dependencies = [ - "base64 0.22.1", - "bytes", - "headers-core", - "http 1.3.1", - "httpdate", - "mime", - "sha1", -] - -[[package]] -name = "headers-core" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" -dependencies = [ - "http 1.3.1", -] - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "hermit-abi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" - -[[package]] -name = "hermit-abi" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f154ce46856750ed433c8649605bf7ed2de3bc35fd9d2a9f30cddd873c80cb08" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest", -] - -[[package]] -name = "http" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - -[[package]] -name = "http" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - -[[package]] -name = "http-body" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" -dependencies = [ - "bytes", - "http 0.2.12", - "pin-project-lite", -] - -[[package]] -name = "http-body" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" -dependencies = [ - "bytes", - "http 1.3.1", -] - -[[package]] -name = "http-body-util" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" -dependencies = [ - "bytes", - "futures-core", - "http 1.3.1", - "http-body 1.0.1", - "pin-project-lite", -] - -[[package]] -name = "httparse" -version = "1.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" - -[[package]] -name = "httpdate" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" - -[[package]] -name = "hyper" -version = "0.14.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "h2 0.3.27", - "http 0.2.12", - "http-body 0.4.6", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "socket2 0.5.10", - "tokio", - "tower-service", - "tracing", - "want", -] - -[[package]] -name = "hyper" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "h2 0.4.10", - "http 1.3.1", - "http-body 1.0.1", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "smallvec", - "tokio", - "want", -] - -[[package]] -name = "hyper-rustls" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" -dependencies = [ - "futures-util", - "http 0.2.12", - "hyper 0.14.32", - "log", - "rustls 0.21.12", - "rustls-native-certs 0.6.3", - "tokio", - "tokio-rustls 0.24.1", -] - -[[package]] -name = "hyper-rustls" -version = "0.27.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a01595e11bdcec50946522c32dde3fc6914743000a68b93000965f2f02406d" -dependencies = [ - "http 1.3.1", - "hyper 1.6.0", - "hyper-util", - "rustls 0.23.27", - "rustls-native-certs 0.8.2", - "rustls-pki-types", - "tokio", - "tokio-rustls 0.26.2", - "tower-service", - "webpki-roots 1.0.0", -] - -[[package]] -name = "hyper-timeout" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" -dependencies = [ - "hyper 1.6.0", - "hyper-util", - "pin-project-lite", - "tokio", - "tower-service", -] - -[[package]] -name = "hyper-util" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c293b6b3d21eca78250dc7dbebd6b9210ec5530e038cbfe0661b5c47ab06e8" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "hyper 1.6.0", - "libc", - "pin-project-lite", - "socket2 0.5.10", - "tokio", - "tower-service", - "tracing", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.63" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "log", - "wasm-bindgen", - "windows-core", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "icu_collections" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" -dependencies = [ - "displaydoc", - "potential_utf", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_locale_core" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" -dependencies = [ - "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", -] - -[[package]] -name = "icu_normalizer" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" - -[[package]] -name = "icu_properties" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_locale_core", - "icu_properties_data", - "icu_provider", - "potential_utf", - "zerotrie", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" - -[[package]] -name = "icu_provider" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" -dependencies = [ - "displaydoc", - "icu_locale_core", - "stable_deref_trait", - "tinystr", - "writeable", - "yoke", - "zerofrom", - "zerotrie", - "zerovec", -] - -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - -[[package]] -name = "idna" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" -dependencies = [ - "idna_adapter", - "smallvec", - "utf8_iter", -] - -[[package]] -name = "idna_adapter" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" -dependencies = [ - "icu_normalizer", - "icu_properties", -] - -[[package]] -name = "ignore" -version = "0.4.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" -dependencies = [ - "crossbeam-deque", - "globset", - "log", - "memchr", - "regex-automata", - "same-file", - "walkdir", - "winapi-util", -] - -[[package]] -name = "indexmap" -version = "2.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" -dependencies = [ - "equivalent", - "hashbrown 0.15.3", -] - -[[package]] -name = "indoc" -version = "2.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" - -[[package]] -name = "ipnet" -version = "2.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" - -[[package]] -name = "is_terminal_polyfill" -version = "1.70.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" - -[[package]] -name = "isolang" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe50d48c77760c55188549098b9a7f6e37ae980c586a24693d6b01c3b2010c3c" -dependencies = [ - "phf", -] - -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" - -[[package]] -name = "jobserver" -version = "0.1.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" -dependencies = [ - "getrandom 0.3.3", - "libc", -] - -[[package]] -name = "js-sys" -version = "0.3.82" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "kv-log-macro" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" -dependencies = [ - "log", -] - -[[package]] -name = "lazy_static" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" - -[[package]] -name = "lib-clapshot-grpc" -version = "0.9.0" -dependencies = [ - "anyhow", - "axum", - "axum-core", - "http-body-util", - "hyper-util", - "libc", - "mio", - "nix", - "pbjson", - "pbjson-build", - "pbjson-types", - "prost", - "serde", - "strip-ansi-escapes", - "tokio", - "tokio-stream", - "tonic", - "tonic-prost", - "tonic-prost-build", - "tonic-reflection", - "tower", - "tracing", - "wait-timeout", - "warp", -] - -[[package]] -name = "libc" -version = "0.2.172" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" - -[[package]] -name = "libredox" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" -dependencies = [ - "bitflags", - "libc", - "redox_syscall", -] - -[[package]] -name = "libsqlite3-sys" -version = "0.33.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "947e6816f7825b2b45027c2c32e7085da9934defa535de4a6a46b10a4d5257fa" -dependencies = [ - "pkg-config", - "vcpkg", -] - -[[package]] -name = "linux-raw-sys" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" - -[[package]] -name = "litemap" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" - -[[package]] -name = "lock_api" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" -dependencies = [ - "autocfg", - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" -dependencies = [ - "value-bag", -] - -[[package]] -name = "lru" -version = "0.12.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" -dependencies = [ - "hashbrown 0.15.3", -] - -[[package]] -name = "lru-slab" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" - -[[package]] -name = "matchers" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" -dependencies = [ - "regex-automata", -] - -[[package]] -name = "matchit" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" - -[[package]] -name = "md-5" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" -dependencies = [ - "cfg-if", - "digest", -] - -[[package]] -name = "memchr" -version = "2.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" - -[[package]] -name = "migrations_internals" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c791ecdf977c99f45f23280405d7723727470f6689a5e6dbf513ac547ae10d" -dependencies = [ - "serde", - "toml", -] - -[[package]] -name = "migrations_macros" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36fc5ac76be324cfd2d3f2cf0fdf5d5d3c4f14ed8aaebadb09e304ba42282703" -dependencies = [ - "migrations_internals", - "proc-macro2", - "quote", -] - -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - -[[package]] -name = "mime_guess" -version = "2.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" -dependencies = [ - "mime", - "unicase", -] - -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - -[[package]] -name = "miniz_oxide" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" -dependencies = [ - "adler2", -] - -[[package]] -name = "mio" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" -dependencies = [ - "libc", - "log", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.59.0", -] - -[[package]] -name = "mpart-async" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9bdeb8c63325c5d4ec66dff19cc42c5416b5bb27b2f555b3b9debdbe2d40d9b" -dependencies = [ - "bytes", - "futures-core", - "futures-util", - "http 1.3.1", - "httparse", - "log", - "memchr", - "mime_guess", - "percent-encoding", - "pin-project-lite", - "rand 0.8.5", - "thiserror 1.0.69", - "tokio", - "tokio-util", -] - -[[package]] -name = "multimap" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" - -[[package]] -name = "nix" -version = "0.30.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" -dependencies = [ - "bitflags", - "cfg-if", - "cfg_aliases", - "libc", -] - -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - -[[package]] -name = "nu-ansi-term" -version = "0.50.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "num-conv" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", -] - -[[package]] -name = "num_cpus" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" -dependencies = [ - "hermit-abi 0.3.9", - "libc", -] - -[[package]] -name = "num_threads" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" -dependencies = [ - "libc", -] - -[[package]] -name = "once_cell" -version = "1.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" - -[[package]] -name = "once_cell_polyfill" -version = "1.70.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" - -[[package]] -name = "openssl-probe" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" - -[[package]] -name = "outref" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e" - -[[package]] -name = "parking" -version = "2.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" - -[[package]] -name = "parking_lot" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-targets 0.52.6", -] - -[[package]] -name = "path-absolutize" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4af381fe79fa195b4909485d99f73a80792331df0625188e707854f0b3383f5" -dependencies = [ - "path-dedot", -] - -[[package]] -name = "path-dedot" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07ba0ad7e047712414213ff67533e6dd477af0a4e1d14fb52343e53d30ea9397" -dependencies = [ - "once_cell", -] - -[[package]] -name = "pbjson" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "898bac3fa00d0ba57a4e8289837e965baa2dee8c3749f3b11d45a64b4223d9c3" -dependencies = [ - "base64 0.22.1", - "serde", -] - -[[package]] -name = "pbjson-build" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af22d08a625a2213a78dbb0ffa253318c5c79ce3133d32d296655a7bdfb02095" -dependencies = [ - "heck", - "itertools", - "prost", - "prost-types", -] - -[[package]] -name = "pbjson-types" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e748e28374f10a330ee3bb9f29b828c0ac79831a32bab65015ad9b661ead526" -dependencies = [ - "bytes", - "chrono", - "pbjson", - "pbjson-build", - "prost", - "prost-build", - "serde", -] - -[[package]] -name = "percent-encoding" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" - -[[package]] -name = "petgraph" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" -dependencies = [ - "fixedbitset", - "indexmap", -] - -[[package]] -name = "phf" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" -dependencies = [ - "phf_shared", -] - -[[package]] -name = "phf_shared" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" -dependencies = [ - "siphasher", -] - -[[package]] -name = "pin-project" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "piper" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" -dependencies = [ - "atomic-waker", - "fastrand", - "futures-io", -] - -[[package]] -name = "pkg-config" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" - -[[package]] -name = "polling" -version = "3.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b53a684391ad002dd6a596ceb6c74fd004fdce75f4be2e3f615068abbea5fd50" -dependencies = [ - "cfg-if", - "concurrent-queue", - "hermit-abi 0.5.1", - "pin-project-lite", - "rustix", - "tracing", - "windows-sys 0.59.0", -] - -[[package]] -name = "portpicker" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be97d76faf1bfab666e1375477b23fde79eccf0276e9b63b92a39d676a889ba9" -dependencies = [ - "rand 0.8.5", -] - -[[package]] -name = "potential_utf" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" -dependencies = [ - "zerovec", -] - -[[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - -[[package]] -name = "ppv-lite86" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "predicates" -version = "3.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5d19ee57562043d37e82899fade9a22ebab7be9cef5026b07fda9cdd4293573" -dependencies = [ - "anstyle", - "difflib", - "predicates-core", -] - -[[package]] -name = "predicates-core" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727e462b119fe9c93fd0eb1429a5f7647394014cf3c04ab2c0350eeb09095ffa" - -[[package]] -name = "predicates-tree" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72dd2d6d381dfb73a193c7fca536518d7caee39fc8503f74e7dc0be0531b425c" -dependencies = [ - "predicates-core", - "termtree", -] - -[[package]] -name = "prettyplease" -version = "0.2.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "664ec5419c51e34154eec046ebcba56312d5a2fc3b09a06da188e1ad21afadf6" -dependencies = [ - "proc-macro2", - "syn 2.0.101", -] - -[[package]] -name = "proc-macro-crate" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" -dependencies = [ - "toml_edit", -] - -[[package]] -name = "proc-macro2" -version = "1.0.95" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "prost" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7231bd9b3d3d33c86b58adbac74b5ec0ad9f496b19d22801d773636feaa95f3d" -dependencies = [ - "bytes", - "prost-derive", -] - -[[package]] -name = "prost-build" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac6c3320f9abac597dcbc668774ef006702672474aad53c6d596b62e487b40b1" -dependencies = [ - "heck", - "itertools", - "log", - "multimap", - "once_cell", - "petgraph", - "prettyplease", - "prost", - "prost-types", - "pulldown-cmark", - "pulldown-cmark-to-cmark", - "regex", - "syn 2.0.101", - "tempfile", -] - -[[package]] -name = "prost-derive" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9120690fafc389a67ba3803df527d0ec9cbbc9cc45e4cc20b332996dfb672425" -dependencies = [ - "anyhow", - "itertools", - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "prost-types" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9b4db3d6da204ed77bb26ba83b6122a73aeb2e87e25fbf7ad2e84c4ccbf8f72" -dependencies = [ - "prost", -] - -[[package]] -name = "ptr_meta" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" -dependencies = [ - "ptr_meta_derive", -] - -[[package]] -name = "ptr_meta_derive" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "pulldown-cmark" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e8bbe1a966bd2f362681a44f6edce3c2310ac21e4d5067a6e7ec396297a6ea0" -dependencies = [ - "bitflags", - "memchr", - "unicase", -] - -[[package]] -name = "pulldown-cmark-to-cmark" -version = "21.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8246feae3db61428fd0bb94285c690b460e4517d83152377543ca802357785f1" -dependencies = [ - "pulldown-cmark", -] - -[[package]] -name = "quinn" -version = "0.11.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8" -dependencies = [ - "bytes", - "cfg_aliases", - "pin-project-lite", - "quinn-proto", - "quinn-udp", - "rustc-hash", - "rustls 0.23.27", - "socket2 0.5.10", - "thiserror 2.0.17", - "tokio", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-proto" -version = "0.11.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e" -dependencies = [ - "bytes", - "getrandom 0.3.3", - "lru-slab", - "rand 0.9.1", - "ring", - "rustc-hash", - "rustls 0.23.27", - "rustls-pki-types", - "slab", - "thiserror 2.0.17", - "tinyvec", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-udp" -version = "0.5.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee4e529991f949c5e25755532370b8af5d114acae52326361d68d47af64aa842" -dependencies = [ - "cfg_aliases", - "libc", - "once_cell", - "socket2 0.5.10", - "tracing", - "windows-sys 0.59.0", -] - -[[package]] -name = "quote" -version = "1.0.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "r-efi" -version = "5.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" - -[[package]] -name = "r2d2" -version = "0.8.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" -dependencies = [ - "log", - "parking_lot", - "scheduled-thread-pool", -] - -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" -dependencies = [ - "rand_chacha 0.9.0", - "rand_core 0.9.3", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" -dependencies = [ - "ppv-lite86", - "rand_core 0.9.3", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.16", -] - -[[package]] -name = "rand_core" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" -dependencies = [ - "getrandom 0.3.3", -] - -[[package]] -name = "redox_syscall" -version = "0.5.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af" -dependencies = [ - "bitflags", -] - -[[package]] -name = "regex" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata", - "regex-syntax", -] - -[[package]] -name = "regex-automata" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-lite" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d942b98df5e658f56f20d592c7f868833fe38115e65c33003d8cd224b0155da" - -[[package]] -name = "regex-syntax" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" - -[[package]] -name = "rend" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71fe3824f5629716b1589be05dacd749f6aa084c87e00e016714a8cdfccc997c" -dependencies = [ - "bytecheck", -] - -[[package]] -name = "reqwest" -version = "0.12.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" -dependencies = [ - "base64 0.22.1", - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "hyper 1.6.0", - "hyper-rustls 0.27.6", - "hyper-util", - "ipnet", - "js-sys", - "log", - "mime", - "mime_guess", - "once_cell", - "percent-encoding", - "pin-project-lite", - "quinn", - "rustls 0.23.27", - "rustls-pemfile 2.2.0", - "rustls-pki-types", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper", - "tokio", - "tokio-rustls 0.26.2", - "tokio-util", - "tower", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm-streams", - "web-sys", - "webpki-roots 0.26.11", - "windows-registry", -] - -[[package]] -name = "ring" -version = "0.17.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" -dependencies = [ - "cc", - "cfg-if", - "getrandom 0.2.16", - "libc", - "untrusted", - "windows-sys 0.52.0", -] - -[[package]] -name = "rkyv" -version = "0.7.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b" -dependencies = [ - "bitvec", - "bytecheck", - "bytes", - "hashbrown 0.12.3", - "ptr_meta", - "rend", - "rkyv_derive", - "seahash", - "tinyvec", - "uuid", -] - -[[package]] -name = "rkyv_derive" -version = "0.7.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "rust_decimal" -version = "1.37.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faa7de2ba56ac291bd90c6b9bece784a52ae1411f9506544b3eae36dd2356d50" -dependencies = [ - "arrayvec", - "borsh", - "bytes", - "num-traits", - "rand 0.8.5", - "rkyv", - "serde", - "serde_json", -] - -[[package]] -name = "rustc-hash" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" - -[[package]] -name = "rustc_version" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" -dependencies = [ - "semver", -] - -[[package]] -name = "rustix" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" -dependencies = [ - "bitflags", - "errno 0.3.12", - "libc", - "linux-raw-sys", - "windows-sys 0.59.0", -] - -[[package]] -name = "rustls" -version = "0.21.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" -dependencies = [ - "log", - "ring", - "rustls-webpki 0.101.7", - "sct", -] - -[[package]] -name = "rustls" -version = "0.23.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "730944ca083c1c233a75c09f199e973ca499344a2b7ba9e755c457e86fb4a321" -dependencies = [ - "aws-lc-rs", - "once_cell", - "ring", - "rustls-pki-types", - "rustls-webpki 0.103.3", - "subtle", - "zeroize", -] - -[[package]] -name = "rustls-native-certs" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" -dependencies = [ - "openssl-probe", - "rustls-pemfile 1.0.4", - "schannel", - "security-framework 2.11.1", -] - -[[package]] -name = "rustls-native-certs" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923" -dependencies = [ - "openssl-probe", - "rustls-pki-types", - "schannel", - "security-framework 3.5.1", -] - -[[package]] -name = "rustls-pemfile" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" -dependencies = [ - "base64 0.21.7", -] - -[[package]] -name = "rustls-pemfile" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" -dependencies = [ - "rustls-pki-types", -] - -[[package]] -name = "rustls-pki-types" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" -dependencies = [ - "web-time", - "zeroize", -] - -[[package]] -name = "rustls-webpki" -version = "0.101.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "rustls-webpki" -version = "0.103.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4a72fe2bcf7a6ac6fd7d0b9e5cb68aeb7d4c0a0271730218b3e92d43b4eb435" -dependencies = [ - "aws-lc-rs", - "ring", - "rustls-pki-types", - "untrusted", -] - -[[package]] -name = "rustversion" -version = "1.0.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" - -[[package]] -name = "ryu" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "scc" -version = "2.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22b2d775fb28f245817589471dd49c5edf64237f4a19d10ce9a92ff4651a27f4" -dependencies = [ - "sdd", -] - -[[package]] -name = "schannel" -version = "0.1.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "scheduled-thread-pool" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" -dependencies = [ - "parking_lot", -] - -[[package]] -name = "scoped-tls" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "sct" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "sdd" -version = "3.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "584e070911c7017da6cb2eb0788d09f43d789029b5877d3e5ecc8acf86ceee21" - -[[package]] -name = "seahash" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" - -[[package]] -name = "security-framework" -version = "2.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" -dependencies = [ - "bitflags", - "core-foundation 0.9.4", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework" -version = "3.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" -dependencies = [ - "bitflags", - "core-foundation 0.10.1", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "semver" -version = "1.0.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" - -[[package]] -name = "serde" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" -dependencies = [ - "serde_core", - "serde_derive", -] - -[[package]] -name = "serde_core" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "serde_json" -version = "1.0.145" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" -dependencies = [ - "itoa", - "memchr", - "ryu", - "serde", - "serde_core", -] - -[[package]] -name = "serde_path_to_error" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59fab13f937fa393d08645bf3a84bdfe86e296747b506ada67bb15f10f218b2a" -dependencies = [ - "itoa", - "serde", -] - -[[package]] -name = "serde_spanned" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392" -dependencies = [ - "serde_core", -] - -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "serial_test" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b258109f244e1d6891bf1053a55d63a5cd4f8f4c30cf9a1280989f80e7a1fa9" -dependencies = [ - "futures", - "log", - "once_cell", - "parking_lot", - "scc", - "serial_test_derive", -] - -[[package]] -name = "serial_test_derive" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sha2" -version = "0.10.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sharded-slab" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "signal-hook" -version = "0.3.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2" -dependencies = [ - "libc", - "signal-hook-registry", -] - -[[package]] -name = "signal-hook-registry" -version = "1.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410" -dependencies = [ - "libc", -] - -[[package]] -name = "simdutf8" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" - -[[package]] -name = "siphasher" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" - -[[package]] -name = "slab" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] - -[[package]] -name = "smallvec" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" - -[[package]] -name = "socket2" -version = "0.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "socket2" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" -dependencies = [ - "libc", - "windows-sys 0.60.2", -] - -[[package]] -name = "sqlite-wasm-rs" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35c6d746902bca4ddf16592357eacf0473631ea26b36072f0dd0b31fa5ccd1f4" -dependencies = [ - "js-sys", - "once_cell", - "thiserror 2.0.17", - "tokio", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - -[[package]] -name = "str_inflector" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0b848d5a7695b33ad1be00f84a3c079fe85c9278a325ff9159e6c99cef4ef7" -dependencies = [ - "lazy_static", - "regex", -] - -[[package]] -name = "strip-ansi-escapes" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a8f8038e7e7969abb3f1b7c2a811225e9296da208539e0f79c5251d6cac0025" -dependencies = [ - "vte", -] - -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - -[[package]] -name = "subtle" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.101" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "sync_wrapper" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" -dependencies = [ - "futures-core", -] - -[[package]] -name = "synstructure" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "tar" -version = "0.4.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" -dependencies = [ - "filetime", - "libc", - "xattr", -] - -[[package]] -name = "tempfile" -version = "3.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" -dependencies = [ - "fastrand", - "getrandom 0.3.3", - "once_cell", - "rustix", - "windows-sys 0.61.2", -] - -[[package]] -name = "terminal_size" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed" -dependencies = [ - "rustix", - "windows-sys 0.59.0", -] - -[[package]] -name = "termtree" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" - -[[package]] -name = "thiserror" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" -dependencies = [ - "thiserror-impl 1.0.69", -] - -[[package]] -name = "thiserror" -version = "2.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" -dependencies = [ - "thiserror-impl 2.0.17", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "thread_local" -version = "1.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" -dependencies = [ - "cfg-if", - "once_cell", -] - -[[package]] -name = "threadpool" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" -dependencies = [ - "num_cpus", -] - -[[package]] -name = "time" -version = "0.3.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" -dependencies = [ - "deranged", - "itoa", - "libc", - "num-conv", - "num_threads", - "powerfmt", - "serde", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" - -[[package]] -name = "time-macros" -version = "0.2.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" -dependencies = [ - "num-conv", - "time-core", -] - -[[package]] -name = "timeago" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05030782ebd7d1295cce15a98c8805de6e70776c95f8e3468f84f7f198824f49" -dependencies = [ - "chrono", - "isolang", -] - -[[package]] -name = "tinystr" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" -dependencies = [ - "displaydoc", - "zerovec", -] - -[[package]] -name = "tinyvec" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "tokio" -version = "1.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" -dependencies = [ - "bytes", - "libc", - "mio", - "parking_lot", - "pin-project-lite", - "signal-hook-registry", - "socket2 0.6.1", - "tokio-macros", - "windows-sys 0.61.2", -] - -[[package]] -name = "tokio-macros" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "tokio-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" -dependencies = [ - "rustls 0.21.12", - "tokio", -] - -[[package]] -name = "tokio-rustls" -version = "0.26.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" -dependencies = [ - "rustls 0.23.27", - "tokio", -] - -[[package]] -name = "tokio-stream" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" -dependencies = [ - "futures-core", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "tokio-test" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7" -dependencies = [ - "async-stream", - "bytes", - "futures-core", - "tokio", - "tokio-stream", -] - -[[package]] -name = "tokio-tungstenite" -version = "0.27.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "489a59b6730eda1b0171fcfda8b121f4bee2b35cba8645ca35c5f7ba3eb736c1" -dependencies = [ - "futures-util", - "log", - "tokio", - "tungstenite 0.27.0", -] - -[[package]] -name = "tokio-tungstenite" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" -dependencies = [ - "futures-util", - "log", - "tokio", - "tungstenite 0.28.0", -] - -[[package]] -name = "tokio-util" -version = "0.7.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66a539a9ad6d5d281510d5bd368c973d636c02dbf8a67300bfb6b950696ad7df" -dependencies = [ - "bytes", - "futures-core", - "futures-sink", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "toml" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8" -dependencies = [ - "serde_core", - "serde_spanned", - "toml_datetime 0.7.3", - "toml_parser", - "winnow", -] - -[[package]] -name = "toml_datetime" -version = "0.6.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" - -[[package]] -name = "toml_datetime" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" -dependencies = [ - "serde_core", -] - -[[package]] -name = "toml_edit" -version = "0.22.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e" -dependencies = [ - "indexmap", - "toml_datetime 0.6.9", - "winnow", -] - -[[package]] -name = "toml_parser" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" -dependencies = [ - "winnow", -] - -[[package]] -name = "tonic" -version = "0.14.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb7613188ce9f7df5bfe185db26c5814347d110db17920415cf2fbcad85e7203" -dependencies = [ - "async-trait", - "axum", - "base64 0.22.1", - "bytes", - "h2 0.4.10", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "hyper 1.6.0", - "hyper-timeout", - "hyper-util", - "percent-encoding", - "pin-project", - "socket2 0.6.1", - "sync_wrapper", - "tokio", - "tokio-stream", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tonic-build" -version = "0.14.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c40aaccc9f9eccf2cd82ebc111adc13030d23e887244bc9cfa5d1d636049de3" -dependencies = [ - "prettyplease", - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "tonic-prost" -version = "0.14.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66bd50ad6ce1252d87ef024b3d64fe4c3cf54a86fb9ef4c631fdd0ded7aeaa67" -dependencies = [ - "bytes", - "prost", - "tonic", -] - -[[package]] -name = "tonic-prost-build" -version = "0.14.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4a16cba4043dc3ff43fcb3f96b4c5c154c64cbd18ca8dce2ab2c6a451d058a2" -dependencies = [ - "prettyplease", - "proc-macro2", - "prost-build", - "prost-types", - "quote", - "syn 2.0.101", - "tempfile", - "tonic-build", -] - -[[package]] -name = "tonic-reflection" -version = "0.14.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34da53e8387581d66db16ff01f98a70b426b091fdf76856e289d5c1bd386ed7b" -dependencies = [ - "prost", - "prost-types", - "tokio", - "tokio-stream", - "tonic", - "tonic-prost", -] - -[[package]] -name = "tower" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" -dependencies = [ - "futures-core", - "futures-util", - "indexmap", - "pin-project-lite", - "slab", - "sync_wrapper", - "tokio", - "tokio-util", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-layer" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" - -[[package]] -name = "tower-service" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" - -[[package]] -name = "tracing" -version = "0.1.41" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" -dependencies = [ - "log", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-appender" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" -dependencies = [ - "crossbeam-channel", - "thiserror 1.0.69", - "time", - "tracing-subscriber", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "tracing-core" -version = "0.1.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" -dependencies = [ - "once_cell", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-serde" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" -dependencies = [ - "serde", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" -dependencies = [ - "matchers", - "nu-ansi-term", - "once_cell", - "regex-automata", - "serde", - "serde_json", - "sharded-slab", - "smallvec", - "thread_local", - "time", - "tracing", - "tracing-core", - "tracing-log", - "tracing-serde", -] - -[[package]] -name = "tracing-test" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "557b891436fe0d5e0e363427fc7f217abf9ccd510d5136549847bdcbcd011d68" -dependencies = [ - "tracing-core", - "tracing-subscriber", - "tracing-test-macro", -] - -[[package]] -name = "tracing-test-macro" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" -dependencies = [ - "quote", - "syn 2.0.101", -] - -[[package]] -name = "try-lock" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" - -[[package]] -name = "try_match" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b065c869a3f832418e279aa4c1d7088f9d5d323bde15a60a08e20c2cd4549082" -dependencies = [ - "try_match_inner", -] - -[[package]] -name = "try_match_inner" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9c81686f7ab4065ccac3df7a910c4249f8c0f3fb70421d6ddec19b9311f63f9" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "tungstenite" -version = "0.27.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadc29d668c91fcc564941132e17b28a7ceb2f3ebf0b9dae3e03fd7a6748eb0d" -dependencies = [ - "bytes", - "data-encoding", - "http 1.3.1", - "httparse", - "log", - "rand 0.9.1", - "sha1", - "thiserror 2.0.17", - "utf-8", -] - -[[package]] -name = "tungstenite" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" -dependencies = [ - "bytes", - "data-encoding", - "http 1.3.1", - "httparse", - "log", - "rand 0.9.1", - "sha1", - "thiserror 2.0.17", - "utf-8", -] - -[[package]] -name = "typenum" -version = "1.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" - -[[package]] -name = "unicase" -version = "2.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" - -[[package]] -name = "unicode-ident" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" - -[[package]] -name = "unix-named-pipe" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ad653da8f36ac5825ba06642b5a3cce14a4e52c6a5fab4a8928d53f4426dae2" -dependencies = [ - "errno 0.2.8", - "libc", -] - -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - -[[package]] -name = "url" -version = "2.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", -] - -[[package]] -name = "urlencoding" -version = "2.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" - -[[package]] -name = "utf-8" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" - -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - -[[package]] -name = "utf8parse" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" - -[[package]] -name = "uuid" -version = "1.18.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" -dependencies = [ - "getrandom 0.3.3", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "valuable" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" - -[[package]] -name = "value-bag" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5" - -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - -[[package]] -name = "version_check" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" - -[[package]] -name = "vsimd" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" - -[[package]] -name = "vte" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "231fdcd7ef3037e8330d8e17e61011a2c244126acc0a982f4040ac3f9f0bc077" -dependencies = [ - "memchr", -] - -[[package]] -name = "wait-timeout" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" -dependencies = [ - "libc", -] - -[[package]] -name = "walkdir" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" -dependencies = [ - "same-file", - "winapi-util", -] - -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - -[[package]] -name = "warp" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d06d9202adc1f15d709c4f4a2069be5428aa912cc025d6f268ac441ab066b0" -dependencies = [ - "bytes", - "futures-util", - "headers", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "hyper 1.6.0", - "hyper-util", - "log", - "mime", - "mime_guess", - "percent-encoding", - "pin-project", - "scoped-tls", - "serde", - "serde_json", - "serde_urlencoded", - "tokio", - "tokio-tungstenite 0.27.0", - "tokio-util", - "tower-service", - "tracing", -] - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasi" -version = "0.14.2+wasi-0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" -dependencies = [ - "wit-bindgen-rt", -] - -[[package]] -name = "wasite" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" - -[[package]] -name = "wasm-bindgen" -version = "0.2.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60" -dependencies = [ - "cfg-if", - "once_cell", - "rustversion", - "wasm-bindgen-macro", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.55" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0" -dependencies = [ - "cfg-if", - "js-sys", - "once_cell", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc" -dependencies = [ - "bumpalo", - "proc-macro2", - "quote", - "syn 2.0.101", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "wasm-streams" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" -dependencies = [ - "futures-util", - "js-sys", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "web-sys" -version = "0.3.82" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "web-time" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "webpki-roots" -version = "0.26.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" -dependencies = [ - "webpki-roots 1.0.0", -] - -[[package]] -name = "webpki-roots" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2853738d1cc4f2da3a225c18ec6c3721abb31961096e9dbf5ab35fa88b19cfdb" -dependencies = [ - "rustls-pki-types", -] - -[[package]] -name = "whoami" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" -dependencies = [ - "libredox", - "wasite", - "web-sys", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" -dependencies = [ - "windows-sys 0.59.0", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows-core" -version = "0.61.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" -dependencies = [ - "windows-implement", - "windows-interface", - "windows-link 0.1.1", - "windows-result", - "windows-strings 0.4.2", -] - -[[package]] -name = "windows-implement" -version = "0.60.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "windows-interface" -version = "0.59.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "windows-link" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" - -[[package]] -name = "windows-link" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" - -[[package]] -name = "windows-registry" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" -dependencies = [ - "windows-result", - "windows-strings 0.3.1", - "windows-targets 0.53.5", -] - -[[package]] -name = "windows-result" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" -dependencies = [ - "windows-link 0.1.1", -] - -[[package]] -name = "windows-strings" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" -dependencies = [ - "windows-link 0.1.1", -] - -[[package]] -name = "windows-strings" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" -dependencies = [ - "windows-link 0.1.1", -] - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" -dependencies = [ - "windows-targets 0.53.5", -] - -[[package]] -name = "windows-sys" -version = "0.61.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" -dependencies = [ - "windows-link 0.2.1", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm 0.52.6", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", -] - -[[package]] -name = "windows-targets" -version = "0.53.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" -dependencies = [ - "windows-link 0.2.1", - "windows_aarch64_gnullvm 0.53.0", - "windows_aarch64_msvc 0.53.0", - "windows_i686_gnu 0.53.0", - "windows_i686_gnullvm 0.53.0", - "windows_i686_msvc 0.53.0", - "windows_x86_64_gnu 0.53.0", - "windows_x86_64_gnullvm 0.53.0", - "windows_x86_64_msvc 0.53.0", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnu" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_i686_msvc" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" - -[[package]] -name = "winnow" -version = "0.7.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" -dependencies = [ - "memchr", -] - -[[package]] -name = "wit-bindgen-rt" -version = "0.39.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" -dependencies = [ - "bitflags", -] - -[[package]] -name = "writeable" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" - -[[package]] -name = "wyz" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" -dependencies = [ - "tap", -] - -[[package]] -name = "xattr" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e" -dependencies = [ - "libc", - "rustix", -] - -[[package]] -name = "xmlparser" -version = "0.13.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" - -[[package]] -name = "yoke" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" -dependencies = [ - "serde", - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] - -[[package]] -name = "yoke-derive" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", - "synstructure", -] - -[[package]] -name = "zerocopy" -version = "0.8.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28a6e20d751156648aa063f3800b706ee209a32c0b4d9f24be3d980b01be55ef" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "zerofrom" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", - "synstructure", -] - -[[package]] -name = "zeroize" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" - -[[package]] -name = "zerotrie" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" -dependencies = [ - "displaydoc", - "yoke", - "zerofrom", -] - -[[package]] -name = "zerovec" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" -dependencies = [ - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] diff --git a/server/Cargo.toml b/server/Cargo.toml index b455c41c..2fd7118c 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -125,6 +125,7 @@ aspasia = "0.2.0" aws-config = { version = "1.5.0", default-features = false, features = ["rustls"] } aws-sdk-s3 = { version = "1.40.0", default-features = false, features = ["rustls"] } aws-types = "1.3.0" +http = "0.2" [dev-dependencies] assert_fs = "1.0.13" diff --git a/server/src/api_server/tests.rs b/server/src/api_server/tests.rs index d9191bd2..2a95d10b 100644 --- a/server/src/api_server/tests.rs +++ b/server/src/api_server/tests.rs @@ -1,5 +1,6 @@ #![allow(dead_code)] +use crate::storage::StorageBackend; use std::sync::Arc; use std::str::FromStr; use std::sync::atomic::AtomicBool; @@ -15,7 +16,6 @@ use crate::api_server::{parse_auth_headers, run_api_server_async, validate_org_h use crate::api_server::server_state::ServerState; use crate::database::models::{self}; use crate::database::tests::make_test_db; - use crate::api_server::test_utils::{ApiTestState, expect_msg, expect_no_msg, write, open_media_file, connect_client_ws}; use crate::grpc::db_models::proto_msg_type_to_event_name; diff --git a/server/src/storage.rs b/server/src/storage.rs index 2cdb453d..6204223b 100644 --- a/server/src/storage.rs +++ b/server/src/storage.rs @@ -4,11 +4,14 @@ use std::path::{Path, PathBuf}; use std::str::FromStr; use std::sync::Arc; -use anyhow::{bail, Context}; -use aws_sdk_s3::{config::Region, endpoint::Endpoint, primitives::ByteStream, Client}; -use aws_types::credentials::Credentials; +use anyhow::{anyhow, bail, Context}; +use aws_sdk_s3::{config::Region, config::endpoint::Endpoint, primitives::ByteStream, Client, config::endpoint::ResolveEndpoint}; +use aws_sdk_s3::config::auth::{ParamsBuilder}; +use aws_sdk_s3::config::Credentials; +use aws_sdk_s3::config::endpoint::{DefaultResolver, EndpointFuture, SharedEndpointResolver}; +use http::Uri; +use mime::Params; use tokio::runtime::Runtime; - /// Simple content type guessing for a handful of formats we serve. fn guess_content_type(path: &Path) -> &'static str { match path.extension().and_then(|e| e.to_str()).map(|s| s.to_ascii_lowercase()) { @@ -57,18 +60,22 @@ impl StorageBackend { let rt = Runtime::new().context("create tokio runtime for S3 client")?; let client = { let region = Region::new(region); - let credentials = Credentials::from_keys(access_key, secret_key, None); - let endpoint = Endpoint::immutable( - http::Uri::from_str(&endpoint).context("bad s3 endpoint uri")? - ); + let credentials = Credentials::new(access_key, secret_key, None, None, ""); + let url=match Uri::from_str(&endpoint){ + Ok(u) => u, + Err(e) => return Err(anyhow!("failed to create uri: {}", e)), + }; + + let resolver=DefaultResolver::new(); let cfg = rt.block_on(async { let base = aws_config::defaults(aws_config::BehaviorVersion::latest()) .region(region) + .endpoint_url(endpoint) .credentials_provider(credentials) .load() .await; aws_sdk_s3::config::Builder::from(&base) - .endpoint_resolver(endpoint) + .endpoint_resolver(resolver) .force_path_style(true) .build() }); From 714e84cd13948b3001a681a4ed651d145f7ad6e7 Mon Sep 17 00:00:00 2001 From: Mike Solar Date: Sun, 30 Nov 2025 15:31:13 +0800 Subject: [PATCH 03/10] Add file upload and transcode progress bar. --- README.md | 31 +- client/src/__tests__/lib/NavBar.test.ts | 22 +- client/src/lib/NavBar.svelte | 21 +- server/src/api_server/file_upload.rs | 179 ++-- server/src/api_server/mod.rs | 420 +++++++--- server/src/api_server/server_state.rs | 174 ++-- server/src/api_server/test_utils.rs | 135 ++- server/src/api_server/tests.rs | 115 +-- server/src/api_server/user_session.rs | 158 ++-- server/src/api_server/ws_handers.rs | 769 +++++++++++++----- server/src/database/basic_query.rs | 66 +- server/src/database/custom_ops.rs | 99 ++- server/src/database/db_backup.rs | 55 +- server/src/database/error.rs | 2 +- server/src/database/migration_solver.rs | 362 ++++++--- server/src/database/mod.rs | 212 +++-- server/src/database/models.rs | 58 +- server/src/database/schema.rs | 1 - server/src/database/tests.rs | 321 ++++++-- server/src/grpc/caller.rs | 97 ++- server/src/grpc/db_models.rs | 374 ++++++--- server/src/grpc/grpc_client.rs | 96 ++- server/src/grpc/grpc_impl_helpers.rs | 28 +- server/src/grpc/grpc_server.rs | 371 ++++++--- server/src/grpc/mod.rs | 83 +- server/src/lib.rs | 321 +++++--- server/src/log.rs | 111 ++- server/src/main.rs | 128 +-- server/src/storage.rs | 234 +++++- server/src/tests/integration_test.rs | 389 +++++---- server/src/video_pipeline/cleanup_rejected.rs | 24 +- server/src/video_pipeline/incoming_monitor.rs | 132 +-- server/src/video_pipeline/metadata_reader.rs | 194 +++-- server/src/video_pipeline/mod.rs | 429 ++++++---- server/src/video_pipeline/script_processor.rs | 652 ++++++++++----- 35 files changed, 4780 insertions(+), 2083 deletions(-) diff --git a/README.md b/README.md index 55262f0c..c603df6d 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ Clapshot is an open-source, self-hosted tool for collaborative video/media revie - **File Organization**: Hierarchical folder system with drag-and-drop, admin user management interface - **Media Processing**: FFmpeg transcoding with configurable quality, thumbnail generation - **Authentication**: Reverse proxy integration supporting OAuth, JWT, Kerberos, SAML, etc. -- **Storage**: SQLite database with automatic migrations, file-based media storage +- **Storage**: SQLite database with automatic migrations, local filesystem or S3-compatible object storage - **Extensibility**: Plugin system for custom workflows and integrations *For a comprehensive feature list, see [FEATURES.md](FEATURES.md).* @@ -125,6 +125,35 @@ See [Upgrading Guide](doc/upgrading.md) for instructions on installing a new rel **Want to customize media processing?** See the [Transcoding and Thumbnailing Guide](doc/transcoding.md) for configuring hardware acceleration, custom encoders, and specialized processing workflows. +### Object Storage (S3-compatible) + +Clapshot can upload processed media and thumbnails to an S3-compatible object store while still staging files locally under `/videos`. + +**Required settings** (CLI flags, `clapshot-server.conf`, or `CLAPSHOT_SERVER__*` env vars): +- `storage-backend = s3` +- `s3-endpoint = https://s3.example.com` +- `s3-region = us-east-1` +- `s3-bucket = clapshot-media` +- `s3-access-key`, `s3-secret-key` + +**Optional settings:** +- `s3-prefix` – path inside the bucket (default: `videos`) +- `s3-public-url` – base URL used in playback links (defaults to `s3-endpoint/bucket`; set to your CDN/domain if different) + +**Docker env example:** +```bash +-e CLAPSHOT_SERVER__STORAGE_BACKEND=s3 \ +-e CLAPSHOT_SERVER__S3_ENDPOINT=https://s3.example.com \ +-e CLAPSHOT_SERVER__S3_REGION=us-east-1 \ +-e CLAPSHOT_SERVER__S3_BUCKET=clapshot-media \ +-e CLAPSHOT_SERVER__S3_ACCESS_KEY=YOUR_KEY \ +-e CLAPSHOT_SERVER__S3_SECRET_KEY=YOUR_SECRET \ +-e CLAPSHOT_SERVER__S3_PUBLIC_URL=https://cdn.example.com/clapshot-media +``` + +Ensure the bucket/prefix is readable at `s3-public-url` for playback, and keep enough local disk for staging uploads under `data_dir/videos`. + + ## Architecture Overview Main components: diff --git a/client/src/__tests__/lib/NavBar.test.ts b/client/src/__tests__/lib/NavBar.test.ts index 6f38134e..d0248395 100644 --- a/client/src/__tests__/lib/NavBar.test.ts +++ b/client/src/__tests__/lib/NavBar.test.ts @@ -431,6 +431,26 @@ describe('NavBar.svelte', () => { expect(matchingReport?.progress).toBe(0.7); }); + it('renders a progress bar for the active video', () => { + const report: MediaProgressReport = { + mediaFileId: 'video123', + msg: 'Uploading to storage…', + progress: 0.4, + received_ts: Date.now() + }; + mediaFileId.set('video123'); + curVideo.set(createMinimalMediaFile({ + id: 'video123', + title: 'Test Video' + })); + latestProgressReports.set([report]); + + const { container } = render(NavBar); + expect(screen.getByText('Uploading to storage...')).toBeInTheDocument(); + const bars = container.querySelectorAll('.bg-amber-500'); + expect(bars.length).toBeGreaterThan(0); + }); + it('should return undefined when no matching progress report', () => { const reports: MediaProgressReport[] = [ { @@ -523,4 +543,4 @@ describe('NavBar.svelte', () => { expect(() => render(NavBar)).not.toThrow(); }); }); -}); \ No newline at end of file +}); diff --git a/client/src/lib/NavBar.svelte b/client/src/lib/NavBar.svelte index 8a1bfa15..601fa702 100644 --- a/client/src/lib/NavBar.svelte +++ b/client/src/lib/NavBar.svelte @@ -20,13 +20,17 @@ let { onbasicauthlogout, onaddcomments }: Props = $props(); let loggedOut = $state(false); -// Watch for (transcoding) progress reports from server, and update progress bar if one matches this item. +// Watch for (transcoding/upload) progress reports from server, and show a quick status bar for the current video. let videoProgressMsg: string | undefined = $state(undefined); +let videoProgressVal: number | undefined = $state(undefined); onMount(async () => { - latestProgressReports.subscribe((reports: MediaProgressReport[]) => { - videoProgressMsg = reports.find((r: MediaProgressReport) => r.mediaFileId === $mediaFileId)?.msg; + const unsubscribe = latestProgressReports.subscribe((reports: MediaProgressReport[]) => { + const match = reports.find((r: MediaProgressReport) => r.mediaFileId === $mediaFileId); + videoProgressMsg = match?.msg; + videoProgressVal = match?.progress; }); + return () => unsubscribe(); }); @@ -138,6 +142,17 @@ function addEDLComments(comments: Proto3.Comment[]) {
+ + {#if videoProgressVal !== undefined} +
+
+ {videoProgressMsg || 'Processing...'} +
+
+
+
+
+ {/if} {$curVideo?.title} {#if videoProgressMsg} {videoProgressMsg} diff --git a/server/src/api_server/file_upload.rs b/server/src/api_server/file_upload.rs index 12522bbd..8dc1ea31 100644 --- a/server/src/api_server/file_upload.rs +++ b/server/src/api_server/file_upload.rs @@ -1,21 +1,20 @@ -use futures_util::stream::StreamExt; -use warp::http::HeaderMap; use futures::stream::TryStreamExt; +use futures_util::stream::StreamExt; use mpart_async::server::MultipartStream; use std::convert::Infallible; use std::path::{Path, PathBuf}; use std::sync::Arc; +use warp::http::HeaderMap; -use crate::video_pipeline::IncomingFile; use super::parse_auth_headers; use super::server_state::ServerState; -use super::user_session::{org_authz_with_default, AuthzTopic, AuthzError}; +use super::user_session::{org_authz_with_default, AuthzError, AuthzTopic}; +use crate::video_pipeline::IncomingFile; use crate::video_pipeline::TranscodePreference; use lib_clapshot_grpc::proto; use proto::org::authz_user_action_request as authz_req; - /// Warp filter for multipart/form-data file upload /// /// # Arguments @@ -31,67 +30,101 @@ pub async fn handle_multipart_upload( mime: mime::Mime, hdrs: HeaderMap, server: ServerState, - body: impl warp::Stream> + Unpin) - -> Result, Infallible> -{ - let (user_id, user_name, is_admin, mut cookies, filtered_headers, remote_error) = parse_auth_headers(&hdrs, &server.default_user, &server.org_http_headers_regex); + body: impl warp::Stream> + Unpin, +) -> Result, Infallible> { + let (user_id, user_name, is_admin, mut cookies, filtered_headers, remote_error) = + parse_auth_headers(&hdrs, &server.default_user, &server.org_http_headers_regex); // If X-Remote-Error is set, return error response if let Some(error_msg) = remote_error { return Ok(warp::reply::with_status( format!("Authentication Error: {}", error_msg), - warp::http::StatusCode::FORBIDDEN + warp::http::StatusCode::FORBIDDEN, )); } // Check from organizer if user is allowed to upload. // Allow by default if organizer is not configured or doesn't care. if let Some(uri) = &server.organizer_uri { - if server.organizer_has_connected.load(std::sync::atomic::Ordering::Relaxed) { + if server + .organizer_has_connected + .load(std::sync::atomic::Ordering::Relaxed) + { let organizer = match crate::grpc::grpc_client::connect(uri.clone()).await { Ok(c) => Arc::new(tokio::sync::Mutex::new(c)), Err(e) => { tracing::error!("Failed to connect to organizer: {}", e); - return Ok(warp::reply::with_status("Internal error: failed to connect to organizer".into(), warp::http::StatusCode::INTERNAL_SERVER_ERROR)); + return Ok(warp::reply::with_status( + "Internal error: failed to connect to organizer".into(), + warp::http::StatusCode::INTERNAL_SERVER_ERROR, + )); } }; let org_session = proto::org::UserSessionData { sid: "".to_string(), - user: Some(proto::UserInfo { id: user_id.clone(), name: user_name.clone() }), + user: Some(proto::UserInfo { + id: user_id.clone(), + name: user_name.clone(), + }), is_admin, cookies: cookies.clone(), http_headers: filtered_headers, }; - match org_authz_with_default(&org_session, "upload media file", true, &server, &Some(organizer), - true, AuthzTopic::Other(None, authz_req::other_op::Op::UploadMediaFile)).await { - Ok(_) => {}, + match org_authz_with_default( + &org_session, + "upload media file", + true, + &server, + &Some(organizer), + true, + AuthzTopic::Other(None, authz_req::other_op::Op::UploadMediaFile), + ) + .await + { + Ok(_) => {} Err(AuthzError::Denied) => { - return Ok(warp::reply::with_status("Permission denied".into(), warp::http::StatusCode::FORBIDDEN)); - }, + return Ok(warp::reply::with_status( + "Permission denied".into(), + warp::http::StatusCode::FORBIDDEN, + )); + } } } } // Determine transcoding preference from header - let transcode_preference = hdrs.get("x-clapshot-transcode") + let transcode_preference = hdrs + .get("x-clapshot-transcode") .and_then(|v| v.to_str().ok()) .map(|s| s.to_ascii_lowercase()) .map(|s| match s.as_str() { "true" | "1" | "yes" => TranscodePreference::Force, "false" | "0" | "no" => TranscodePreference::Skip, _ => TranscodePreference::Auto, - }).unwrap_or(TranscodePreference::Auto); - cookies.insert("transcode_preference".into(), format!("{:?}", transcode_preference)); + }) + .unwrap_or(TranscodePreference::Auto); + cookies.insert( + "transcode_preference".into(), + format!("{:?}", transcode_preference), + ); // Parse the multipart stream let boundary = mime.get_param("boundary").map(|v| v.to_string()); let boundary = match boundary { Some(b) => b, - None => return Ok(warp::reply::with_status("Missing boundary".into(), warp::http::StatusCode::BAD_REQUEST)), + None => { + return Ok(warp::reply::with_status( + "Missing boundary".into(), + warp::http::StatusCode::BAD_REQUEST, + )) + } }; - let mut stream = MultipartStream::new(boundary, body.map_ok(|mut buf| buf.copy_to_bytes(buf.remaining()))); + let mut stream = MultipartStream::new( + boundary, + body.map_ok(|mut buf| buf.copy_to_bytes(buf.remaining())), + ); let mut uploaded_file: PathBuf = PathBuf::new(); while let Ok(Some(mut field)) = stream.try_next().await { @@ -101,55 +134,79 @@ pub async fn handle_multipart_upload( Err(e) => { let msg = format!("Error getting filename: {}", e); tracing::error!(msg); - return Ok(warp::reply::with_status(msg, warp::http::StatusCode::BAD_REQUEST)); - }, - Ok(filename) => - { + return Ok(warp::reply::with_status( + msg, + warp::http::StatusCode::BAD_REQUEST, + )); + } + Ok(filename) => { let path = Path::new(&filename); if path.file_name() != Some(path.as_os_str()) { - return Ok(warp::reply::with_status("Filename must not contain path".into(), warp::http::StatusCode::BAD_REQUEST)); + return Ok(warp::reply::with_status( + "Filename must not contain path".into(), + warp::http::StatusCode::BAD_REQUEST, + )); } // Make a unique upload dir let uuid = uuid::Uuid::new_v4(); - let new_dir = async_std::path::PathBuf::from(&upload_dir).join(uuid.to_string()); - let dst = new_dir.join(path.file_name().unwrap()); + let new_dir = + async_std::path::PathBuf::from(&upload_dir).join(uuid.to_string()); + let dst = new_dir.join(path.file_name().unwrap()); if dst.exists().await { tracing::error!("Upload dst '{}' already exists, even tough it was prefixed with uuid4. Bug??", dst.display()); - return Ok(warp::reply::with_status("Internal error: file already exists".into(), warp::http::StatusCode::INTERNAL_SERVER_ERROR)); + return Ok(warp::reply::with_status( + "Internal error: file already exists".into(), + warp::http::StatusCode::INTERNAL_SERVER_ERROR, + )); } if let Err(e) = async_std::fs::create_dir_all(&new_dir).await { tracing::error!("Failed to create upload dir: {}", e); - return Ok(warp::reply::with_status("Internal error: failed to create upload dir".into(), warp::http::StatusCode::INTERNAL_SERVER_ERROR)); + return Ok(warp::reply::with_status( + "Internal error: failed to create upload dir".into(), + warp::http::StatusCode::INTERNAL_SERVER_ERROR, + )); } // Create the file and stream the data into it match async_std::fs::File::create(&dst).await { Err(e) => { - let msg = format!("Failed to create file '{}': {}", dst.display(), e); + let msg = + format!("Failed to create file '{}': {}", dst.display(), e); tracing::error!(msg); - return Ok(warp::reply::with_status(msg, warp::http::StatusCode::INTERNAL_SERVER_ERROR)); - }, - Ok(mut f) => - { + return Ok(warp::reply::with_status( + msg, + warp::http::StatusCode::INTERNAL_SERVER_ERROR, + )); + } + Ok(mut f) => { // Read and write in parallel - let (buff_tx, mut buff_rx) = tokio::sync::mpsc::channel::(16); + let (buff_tx, mut buff_rx) = + tokio::sync::mpsc::channel::(16); // Read chunks from HTTP let read_all_chunks = async move { while let Some(chunk) = field.next().await { match chunk { - Ok(data) => { buff_tx.send(data).await.unwrap(); }, - Err(e) => { return Err(e.to_string()); } - }}; Ok(()) // buff_tx dropped + Ok(data) => { + buff_tx.send(data).await.unwrap(); + } + Err(e) => { + return Err(e.to_string()); + } + } + } + Ok(()) // buff_tx dropped }; // Write chunks to the file let write_all_chunks = async move { while let Some(data) = buff_rx.recv().await { - futures_util::AsyncWriteExt::write_all(&mut f, &data).await + futures_util::AsyncWriteExt::write_all(&mut f, &data) + .await .map_err(|e| e.to_string())?; - }; Ok(()) + } + Ok(()) }; // Run both tasks in parallel, cleanup on error @@ -158,33 +215,49 @@ pub async fn handle_multipart_upload( tracing::error!("Upload failed: {}", e); // Remove the file & dir, since it's incomplete if let Err(e) = async_std::fs::remove_file(&dst).await { - tracing::warn!("Failed to remove incomplete upload file: {}", e); - } else if let Err(e) = async_std::fs::remove_dir(new_dir).await { - tracing::warn!("Failed to remove incomplete upload dir: {}", e); + tracing::warn!( + "Failed to remove incomplete upload file: {}", + e + ); + } else if let Err(e) = async_std::fs::remove_dir(new_dir).await + { + tracing::warn!( + "Failed to remove incomplete upload dir: {}", + e + ); } - return Ok(warp::reply::with_status(format!("Upload failed: {e}"), warp::http::StatusCode::BAD_REQUEST)); + return Ok(warp::reply::with_status( + format!("Upload failed: {e}"), + warp::http::StatusCode::BAD_REQUEST, + )); } - tracing::info!(dst=dst.display().to_string(), "File uploaded."); + tracing::info!(dst = dst.display().to_string(), "File uploaded."); uploaded_file = dst.into(); } }; } } - }, + } fieldname => { tracing::info!("Skipping UNKNOWN multipart POST field '{fieldname}'"); - }, + } } } - if let Err(e) = upload_done.send(IncomingFile{ + if let Err(e) = upload_done.send(IncomingFile { file_path: uploaded_file, user_id, cookies, transcode_preference, }) { tracing::error!("Failed to send upload ok signal: {:?}", e); - return Ok(warp::reply::with_status("Internal error: failed to send upload ok signal".into(), warp::http::StatusCode::INTERNAL_SERVER_ERROR)); + return Ok(warp::reply::with_status( + "Internal error: failed to send upload ok signal".into(), + warp::http::StatusCode::INTERNAL_SERVER_ERROR, + )); } - Ok(warp::reply::with_status("Ok".into(), warp::http::StatusCode::OK)) + Ok(warp::reply::with_status( + "Ok".into(), + warp::http::StatusCode::OK, + )) } diff --git a/server/src/api_server/mod.rs b/server/src/api_server/mod.rs index d6b660a1..486e1675 100644 --- a/server/src/api_server/mod.rs +++ b/server/src/api_server/mod.rs @@ -3,23 +3,23 @@ //#![allow(unused_imports)] use async_std::task::block_on; -use lib_clapshot_grpc::GrpcBindAddr; +use core::panic; +use futures_util::stream::StreamExt; +use futures_util::SinkExt; use lib_clapshot_grpc::proto; use lib_clapshot_grpc::proto::org::OnStartUserSessionResponse; -use tracing::debug; -use warp::Filter; -use core::panic; +use lib_clapshot_grpc::GrpcBindAddr; +use parking_lot::RwLock; +use regex::Regex; use std::collections::HashMap; +use std::sync::atomic::Ordering::Relaxed; use std::sync::Arc; use std::time::Duration; use tokio::time::sleep; -use parking_lot::RwLock; -use futures_util::stream::StreamExt; -use futures_util::SinkExt; -use warp::ws::Message; +use tracing::debug; use warp::http::HeaderMap; -use regex::Regex; -use std::sync::atomic::Ordering::Relaxed; +use warp::ws::Message; +use warp::Filter; use anyhow::{anyhow, bail}; @@ -35,23 +35,23 @@ use ws_handers::msg_dispatch; #[cfg(test)] pub mod test_utils; +mod file_upload; #[cfg(test)] pub mod tests; -mod file_upload; -use file_upload::handle_multipart_upload; -use crate::api_server::user_session::AuthzTopic; +use self::user_session::UserSession; use crate::api_server::user_session::org_authz; +use crate::api_server::user_session::AuthzTopic; +use crate::api_server::ws_handers::SessionClose; use crate::client_cmd; -use crate::database::DbBasicQuery; use crate::database::models; +use crate::database::DbBasicQuery; use crate::grpc::db_models::proto_msg_type_to_event_name; use crate::grpc::grpc_client::OrganizerConnection; use crate::grpc::grpc_client::OrganizerURI; use crate::grpc::{grpc_server, make_media_file_popup_actions}; -use crate::api_server::ws_handers::SessionClose; use crate::video_pipeline::IncomingFile; use crate::PKG_VERSION; -use self::user_session::UserSession; +use file_upload::handle_multipart_upload; type Res = anyhow::Result; type WsMsgSender = tokio::sync::mpsc::UnboundedSender; @@ -71,7 +71,7 @@ pub enum SendTo<'a> { pub type UserMessageTopic = proto::user_message::Type; /// Message from other server modules to user(s) -#[derive (Clone, Debug, Default)] +#[derive(Clone, Debug, Default)] pub struct UserMessage { pub topic: UserMessageTopic, pub user_id: Option, @@ -92,23 +92,24 @@ fn abbrv(msg: &str) -> String { } } - /// User has connected to our WebSocket endpoint. /// This function will run (potentially forever) for each individual user that connects. async fn handle_ws_session( - ws: warp::ws::WebSocket, - sid: String, - user_id: String, - username: String, - is_admin: bool, - cookies: HashMap, - filtered_headers: HashMap, - server: ServerState) -{ + ws: warp::ws::WebSocket, + sid: String, + user_id: String, + username: String, + is_admin: bool, + cookies: HashMap, + filtered_headers: HashMap, + server: ServerState, +) { let (msgq_tx, mut msgq_rx) = tokio::sync::mpsc::unbounded_channel(); - let user = match server.db.conn().and_then(|mut conn| - models::User::get_or_create(&mut conn, &user_id, Some(&username))) + let user = match server + .db + .conn() + .and_then(|mut conn| models::User::get_or_create(&mut conn, &user_id, Some(&username))) { Ok(u) => u, Err(e) => { @@ -137,7 +138,7 @@ async fn handle_ws_session( is_admin, cookies, http_headers: filtered_headers, - } + }, }; let _user_session_guard = Some(server.register_user_session(&sid, &user_id, ses.clone())); @@ -150,24 +151,31 @@ async fn handle_ws_session( is_admin: is_admin, server_version: PKG_VERSION.to_string(), }), - SendTo::MsgSender(&ses.sender) + SendTo::MsgSender(&ses.sender), ) { tracing::error!(details=%e, "Error sending welcome message. Closing session."); return; } - async fn connect_organizer(uri: OrganizerURI, ses: &proto::org::UserSessionData) -> Res<(OrganizerConnection, OnStartUserSessionResponse)> { + async fn connect_organizer( + uri: OrganizerURI, + ses: &proto::org::UserSessionData, + ) -> Res<(OrganizerConnection, OnStartUserSessionResponse)> { let mut c = crate::grpc::grpc_client::connect(uri).await?; - let start_ses_req = proto::org::OnStartUserSessionRequest { ses: Some(ses.clone()) }; + let start_ses_req = proto::org::OnStartUserSessionRequest { + ses: Some(ses.clone()), + }; let res = match c.on_start_user_session(start_ses_req).await { Ok(res) => res.into_inner(), Err(e) => { if e.code() == tonic::Code::Unimplemented { - tracing::debug!("Organizer does not implement on_start_user_session. Ignoring.",); + tracing::debug!( + "Organizer does not implement on_start_user_session. Ignoring.", + ); OnStartUserSessionResponse::default() } else { - return Err(e.into()) + return Err(e.into()); } } }; @@ -176,8 +184,9 @@ async fn handle_ws_session( // Define default actions. Organizer may call DefineActions later to override these. if let Err(e) = server.emit_cmd( - client_cmd!(DefineActions, {actions: make_media_file_popup_actions()}), - SendTo::MsgSender(&ses.sender)) { + client_cmd!(DefineActions, {actions: make_media_file_popup_actions()}), + SendTo::MsgSender(&ses.sender), + ) { tracing::error!(details=%e, "Error sending define_actions to client. Closing session."); return; } @@ -187,30 +196,42 @@ async fn handle_ws_session( match connect_organizer(uri, &ses.org_session).await { Ok((c, _res)) => { ses.organizer = Some(tokio::sync::Mutex::new(c).into()); - let op = AuthzTopic::Other(None, proto::org::authz_user_action_request::other_op::Op::Login); - if org_authz(&ses.org_session, "login", true, &server, &ses.organizer, op).await == Some(false) { - tracing::info!("User '{}' not authorized to login. Closing session.", ses.user_id); - server.emit_cmd( - client_cmd!(Error, {msg: "Login permission denied.".into()}), - SendTo::MsgSender(&ses.sender)).ok(); + let op = AuthzTopic::Other( + None, + proto::org::authz_user_action_request::other_op::Op::Login, + ); + if org_authz(&ses.org_session, "login", true, &server, &ses.organizer, op).await + == Some(false) + { + tracing::info!( + "User '{}' not authorized to login. Closing session.", + ses.user_id + ); + server + .emit_cmd( + client_cmd!(Error, {msg: "Login permission denied.".into()}), + SendTo::MsgSender(&ses.sender), + ) + .ok(); return; } - }, + } Err(e) => { const MSG: &str = "Error connecting to Organizer. Closing session."; tracing::error!(details=%e, MSG); - server.emit_cmd( - client_cmd!(Error, {msg: MSG.into()}), - SendTo::MsgSender(&ses.sender)).ok(); + server + .emit_cmd( + client_cmd!(Error, {msg: MSG.into()}), + SendTo::MsgSender(&ses.sender), + ) + .ok(); return; } } }; - loop - { - tokio::select! - { + loop { + tokio::select! { // Termination flag set? Exit. _ = sleep(Duration::from_millis(100)) => { if server.terminate_flag.load(Relaxed) { @@ -339,8 +360,13 @@ async fn handle_ws_session( /// * `Ok(Regex)` - Compiled regex with case-insensitive matching /// * `Err(anyhow::Error)` - If pattern is invalid pub fn validate_org_http_headers_regex(pattern: &str) -> anyhow::Result { - Regex::new(&format!("(?i){}", pattern)) - .map_err(|e| anyhow!("Invalid org-http-headers regex pattern '{}': {}", pattern, e)) + Regex::new(&format!("(?i){}", pattern)).map_err(|e| { + anyhow!( + "Invalid org-http-headers regex pattern '{}': {}", + pattern, + e + ) + }) } /// Extract user id, name and clapshot_cookies from HTTP headers (set by nginx) @@ -356,44 +382,102 @@ pub fn validate_org_http_headers_regex(pattern: &str) -> anyhow::Result { /// * `filter_regex` - Regex to filter headers for organizer (case-insensitive) /// /// * Returns: (user_id: String, user_name: String, is_admin: bool, clapshot_cookies: HashMap, filtered_headers: HashMap, remote_error: Option) -fn parse_auth_headers(hdrs: &HeaderMap, default_user_id: &str, filter_regex: &Regex) -> (String, String, bool, HashMap, HashMap, Option) -{ +fn parse_auth_headers( + hdrs: &HeaderMap, + default_user_id: &str, + filter_regex: &Regex, +) -> ( + String, + String, + bool, + HashMap, + HashMap, + Option, +) { fn try_get_first_named_hdr(hdrs: &HeaderMap, names: T) -> Option - where T: IntoIterator { + where + T: IntoIterator, + { for n in names { if let Some(val) = hdrs.get(n).or(hdrs.get(n.to_lowercase())) { match val.to_str() { Ok(s) => return Some(s.into()), Err(e) => tracing::warn!(details=%e, "Error parsing header '{}'.", n), - }}} + } + } + } None } - let user_id = match try_get_first_named_hdr(&hdrs, vec!["X-Remote-User-Id", "X_Remote_User_Id", "HTTP_X_REMOTE_USER_ID"]) { + let user_id = match try_get_first_named_hdr( + &hdrs, + vec![ + "X-Remote-User-Id", + "X_Remote_User_Id", + "HTTP_X_REMOTE_USER_ID", + ], + ) { Some(id) => id, None => { - tracing::warn!("Missing X-Remote-User-Id in HTTP headers. Using '{}' instead.", default_user_id); + tracing::warn!( + "Missing X-Remote-User-Id in HTTP headers. Using '{}' instead.", + default_user_id + ); default_user_id.into() - }}; - let user_name = try_get_first_named_hdr(&hdrs, vec!["X-Remote-User-Name", "X_Remote_User_Name", "HTTP_X_REMOTE_USER_NAME"]) - .unwrap_or_else(|| user_id.clone()); - - let cookies_str = try_get_first_named_hdr(&hdrs, vec!["X-Clapshot-Cookies", "X_Clapshot_Cookies", "HTTP_X_CLAPSHOT_COOKIES"]) - .unwrap_or_else(|| "{}".into()); - - let is_admin: bool = try_get_first_named_hdr(&hdrs, vec!["X-Remote-User-Is-Admin", "X_Remote_User_Is_Admin", "HTTP_X_REMOTE_USER_IS_ADMIN"]) - .map(|s| s.to_lowercase() == "true" || s == "1").unwrap_or(user_id == "admin"); - - let remote_error = try_get_first_named_hdr(&hdrs, vec!["X-Remote-Error", "X_Remote_Error", "HTTP_X_REMOTE_ERROR"]); + } + }; + let user_name = try_get_first_named_hdr( + &hdrs, + vec![ + "X-Remote-User-Name", + "X_Remote_User_Name", + "HTTP_X_REMOTE_USER_NAME", + ], + ) + .unwrap_or_else(|| user_id.clone()); + + let cookies_str = try_get_first_named_hdr( + &hdrs, + vec![ + "X-Clapshot-Cookies", + "X_Clapshot_Cookies", + "HTTP_X_CLAPSHOT_COOKIES", + ], + ) + .unwrap_or_else(|| "{}".into()); + + let is_admin: bool = try_get_first_named_hdr( + &hdrs, + vec![ + "X-Remote-User-Is-Admin", + "X_Remote_User_Is_Admin", + "HTTP_X_REMOTE_USER_IS_ADMIN", + ], + ) + .map(|s| s.to_lowercase() == "true" || s == "1") + .unwrap_or(user_id == "admin"); + + let remote_error = try_get_first_named_hdr( + &hdrs, + vec!["X-Remote-Error", "X_Remote_Error", "HTTP_X_REMOTE_ERROR"], + ); let app_cookies = match cookies_str.parse::() { - Ok(c) => { - match c.as_object() { - Some(c) => c.iter().map(|(k, v)| (k.clone(), v.as_str().unwrap_or("").to_string())).collect(), - None => { - tracing::error!("'clapshot_cookies' was not a JSON dict, ignoring."); - HashMap::new() - } + Ok(c) => match c.as_object() { + Some(c) => c + .iter() + .map(|(k, v)| { + ( + k.clone(), + v.as_str() + .unwrap_or("") + .to_string(), + ) + }) + .collect(), + None => { + tracing::error!("'clapshot_cookies' was not a JSON dict, ignoring."); + HashMap::new() } }, Err(e) => { @@ -415,7 +499,14 @@ fn parse_auth_headers(hdrs: &HeaderMap, default_user_id: &str, filter_regex: &Re } } - (user_id, user_name, is_admin, app_cookies, filtered_headers, remote_error) + ( + user_id, + user_name, + is_admin, + app_cookies, + filtered_headers, + remote_error, + ) } /// Handle HTTP requests, read authentication headers and dispatch to WebSocket handler. @@ -426,8 +517,8 @@ async fn run_api_server_async( user_msg_rx: crossbeam_channel::Receiver, upload_results_tx: crossbeam_channel::Sender, grpc_server_bind: Option, - port: u16) -{ + port: u16, +) { let session_counter = Arc::new(RwLock::new(0u64)); let server_state_cln1 = server_state.clone(); let server_state_cln2 = server_state.clone(); @@ -444,9 +535,10 @@ async fn run_api_server_async( tracing::info!("Starting gRPC server for org->srv."); let server = server_state.clone(); let b = bind.clone(); - let hdl = tokio::spawn(async move { - grpc_server::run_org_to_srv_grpc_server(b, server).await - }); + let hdl = + tokio::spawn( + async move { grpc_server::run_org_to_srv_grpc_server(b, server).await }, + ); let server = server_state.clone(); let mut wait_time = Duration::from_millis(10); sleep(wait_time).await; @@ -456,28 +548,37 @@ async fn run_api_server_async( tracing::debug!("Waiting for org->srv connection..."); } wait_time = std::cmp::min(wait_time * 2, Duration::from_secs(4)); - if server.terminate_flag.load(Relaxed) { return; } + if server.terminate_flag.load(Relaxed) { + return; + } } if let Some(org_info) = server.organizer_info.lock().await.as_ref() { tracing::info!( org_name = &org_info.name, description = &org_info.description, - version = org_info.version.as_ref().map(|v| format!("{}.{}.{}", v.major, v.minor, v.patch)), - "org->srv connected, bidirectional gRPC established."); + version = org_info + .version + .as_ref() + .map(|v| format!("{}.{}.{}", v.major, v.minor, v.patch)), + "org->srv connected, bidirectional gRPC established." + ); } else { panic!("Organizer connected, but no info received. This is a bug in server code."); } Some(hdl) - }, + } None => None, }; - tracing::info!(port=port, "Starting websocket API."); + tracing::info!(port = port, "Starting websocket API."); - let rt_health = warp::path("api").and(warp::path("health")).map(|| "I'm alive!"); + let rt_health = warp::path("api") + .and(warp::path("health")) + .map(|| "I'm alive!"); let upload_dir = server_state.upload_dir.clone(); - let rt_upload = warp::path("api").and(warp::path("upload")) + let rt_upload = warp::path("api") + .and(warp::path("upload")) .and(warp::post()) .and(warp::any().map(move || upload_dir.clone())) .and(warp::any().map(move || upload_results_tx.clone())) @@ -487,17 +588,21 @@ async fn run_api_server_async( .and(warp::body::stream()) .and_then(handle_multipart_upload); - let rt_videos = warp::path("videos").and( - warp::fs::dir(server_state_cln1.media_files_dir.clone()) - .with(warp::log("videos"))); + let rt_videos = warp::path("videos") + .and(warp::fs::dir(server_state_cln1.media_files_dir.clone()).with(warp::log("videos"))); - let rt_api_ws = warp::path("api").and(warp::path("ws")) + let rt_api_ws = warp::path("api") + .and(warp::path("ws")) .and(warp::header::headers_cloned()) .and(warp::ws()) - .map (move|hdrs: HeaderMap, ws: warp::ws::Ws| { - + .map(move |hdrs: HeaderMap, ws: warp::ws::Ws| { // Get user ID and username (from reverse proxy) - let (user_id, user_name, is_admin, app_cookies, filtered_headers, remote_error) = parse_auth_headers(&hdrs, &server_state.default_user, &server_state.org_http_headers_regex); + let (user_id, user_name, is_admin, app_cookies, filtered_headers, remote_error) = + parse_auth_headers( + &hdrs, + &server_state.default_user, + &server_state.org_http_headers_regex, + ); // Increment session counter let sid = { @@ -512,9 +617,10 @@ async fn run_api_server_async( // Check for X-Remote-Error and send error message if present if let Some(error_msg) = remote_error { let err_msg = proto::client::server_to_client_cmd::Error { - msg: format!("Authentication Error: {}", error_msg) + msg: format!("Authentication Error: {}", error_msg), }; - let json_txt = serde_json::to_string(&err_msg).expect("Error serializing error message"); + let json_txt = + serde_json::to_string(&err_msg).expect("Error serializing error message"); if let Err(e) = ws.send(warp::ws::Message::text(json_txt)).await { tracing::error!("Failed to send error message: {}", e); } @@ -527,25 +633,49 @@ async fn run_api_server_async( // Diesel SQLite calls are blocking, so run a thread per user session // even though we're using async/await tokio::task::spawn_blocking(move || { - let _span = tracing::info_span!("ws_session", sid=%sid, user=%user_id).entered(); - block_on(handle_ws_session(ws, sid, user_id, user_name, is_admin, app_cookies, filtered_headers, server_state)); - }).await.unwrap_or_else(|e| { - tracing::error!(details=%e, "Error joining handle_ws_session thread."); }); + let _span = + tracing::info_span!("ws_session", sid=%sid, user=%user_id).entered(); + block_on(handle_ws_session( + ws, + sid, + user_id, + user_name, + is_admin, + app_cookies, + filtered_headers, + server_state, + )); + }) + .await + .unwrap_or_else(|e| { + tracing::error!(details=%e, "Error joining handle_ws_session thread."); + }); }) }); - let routes = rt_health.or(rt_api_ws).or(rt_upload).or(rt_videos) + let routes = rt_health + .or(rt_api_ws) + .or(rt_upload) + .or(rt_videos) .with(warp::log("api_server")); - - let mut cors_origins: Vec<&str> = cors_origins.iter() + let mut cors_origins: Vec<&str> = cors_origins + .iter() .map(|s| s.as_str()) .filter(|s| !s.is_empty()) .collect(); tracing::info!("Allowed CORS origins: {:?}", cors_origins); let cors_methods = ["GET", "POST", "HEAD", "OPTIONS"]; - let cors_headers = ["x-file-name", "x-clapshot-cookies", "content-type", "upgrade", "sec-websocket-protocol", "sec-websocket-version"]; + let cors_headers = [ + "x-file-name", + "x-clapshot-cookies", + "x-clapshot-transcode", + "content-type", + "upgrade", + "sec-websocket-protocol", + "sec-websocket-version", + ]; let routes = if cors_origins.contains(&"*") { tracing::warn!(concat!( @@ -554,17 +684,32 @@ async fn run_api_server_async( "Do NOT use '*' in production! ", "Instead, specify the allowed origin, such as 'https://clapshot.example.com'." )); - routes.with(warp::cors().allow_methods(cors_methods).allow_headers(cors_headers) - .allow_any_origin()).boxed() + routes + .with( + warp::cors() + .allow_methods(cors_methods) + .allow_headers(cors_headers) + .allow_any_origin(), + ) + .boxed() } else { if cors_origins.is_empty() { cors_origins.push(url_base.as_str()); - tracing::info!("No CORS origins specified. Using url_base for it: '{}'", url_base); + tracing::info!( + "No CORS origins specified. Using url_base for it: '{}'", + url_base + ); } else { tracing::info!("Using CORS origins: {:?}", cors_origins); } - routes.with(warp::cors().allow_methods(cors_methods).allow_headers(cors_headers) - .allow_origins(cors_origins)).boxed() + routes + .with( + warp::cors() + .allow_methods(cors_methods) + .allow_headers(cors_headers) + .allow_origins(cors_origins), + ) + .boxed() }; debug!("Binding Websocket API to {}:{}", bind_addr, port); @@ -585,12 +730,13 @@ async fn run_api_server_async( while let Ok(m) = user_msg_rx.try_recv() { let topic_str = proto_msg_type_to_event_name(m.topic); - let msg_insert = models::MessageInsert { + let msg_insert = models::MessageInsert { event_name: topic_str.into(), user_id: m.user_id.clone().unwrap_or("".into()), message: m.msg.clone(), details: m.details.clone().unwrap_or("".into()), - seen: false, comment_id: None, + seen: false, + comment_id: None, media_file_id: m.media_file_id.clone(), subtitle_id: m.subtitle_id.clone(), }; @@ -602,9 +748,12 @@ async fn run_api_server_async( if let Some(vid) = m.media_file_id { if let Err(_) = server_state.emit_cmd( client_cmd!(ShowMessages, { msgs: vec![proto_msg.clone()] }), - SendTo::MediaFileId(&vid) + SendTo::MediaFileId(&vid), ) { - tracing::error!(media_file=vid, "Failed to send notification to media file watchers."); + tracing::error!( + media_file = vid, + "Failed to send notification to media file watchers." + ); } }; @@ -614,12 +763,19 @@ async fn run_api_server_async( let mut user_was_online = false; match server_state.emit_cmd( client_cmd!(ShowMessages, { msgs: vec![proto_msg.clone()] }), - SendTo::UserId(&user_id)) - { - Ok(session_cnt) => { user_was_online = session_cnt>0 }, - Err(e) => tracing::error!(user=user_id, details=%e, "Failed to send user notification."), + SendTo::UserId(&user_id), + ) { + Ok(session_cnt) => user_was_online = session_cnt > 0, + Err(e) => { + tracing::error!(user=user_id, details=%e, "Failed to send user notification.") + } } - if !(matches!(m.topic, UserMessageTopic::Progress | UserMessageTopic::MediaFileAdded | UserMessageTopic::MediaFileUpdated)) { + if !(matches!( + m.topic, + UserMessageTopic::Progress + | UserMessageTopic::MediaFileAdded + | UserMessageTopic::MediaFileUpdated + )) { let msg = models::MessageInsert { seen: msg_insert.seen || user_was_online, ..msg_insert @@ -631,7 +787,7 @@ async fn run_api_server_async( } }; } - }; + } server_state.terminate_flag.store(true, Relaxed); }; @@ -645,7 +801,9 @@ async fn run_api_server_async( debug!("Waiting for gRPC server to exit..."); match tokio::try_join!(g) { Ok((Ok(_),)) => tracing::debug!("gRPC server for org->srv exited OK."), - Ok((Err(e),)) => tracing::error!(details=%e, "gRPC server for org->srv exited with error."), + Ok((Err(e),)) => { + tracing::error!(details=%e, "gRPC server for org->srv exited with error.") + } Err(e) => tracing::error!(details=%e, "gRPC server for org->srv panicked."), }; } @@ -653,7 +811,6 @@ async fn run_api_server_async( tracing::debug!("Exiting."); } - #[tokio::main] pub async fn run_forever( user_msg_rx: crossbeam_channel::Receiver, @@ -663,8 +820,8 @@ pub async fn run_forever( url_base: String, cors_origins: Vec, state: ServerState, - port: u16) -{ + port: u16, +) { assert!(!url_base.ends_with('/')); // Should have been stripped by caller let bind_addr = match bind_addr.parse::() { @@ -676,5 +833,14 @@ pub async fn run_forever( }; let _span = tracing::info_span!("API").entered(); - run_api_server_async(bind_addr, cors_origins, state, user_msg_rx, upload_res_tx, grpc_server_bind, port).await; + run_api_server_async( + bind_addr, + cors_origins, + state, + user_msg_rx, + upload_res_tx, + grpc_server_bind, + port, + ) + .await; } diff --git a/server/src/api_server/server_state.rs b/server/src/api_server/server_state.rs index 83faab33..8d641ed3 100644 --- a/server/src/api_server/server_state.rs +++ b/server/src/api_server/server_state.rs @@ -1,26 +1,30 @@ +use crate::storage::StorageBackend; +use lib_clapshot_grpc::proto::org::OrganizerInfo; +use parking_lot::{ + MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLock, RwLockReadGuard, RwLockWriteGuard, +}; +use regex::Regex; use std::collections::HashMap; use std::path::{Path, PathBuf}; -use std::sync::Arc; -use lib_clapshot_grpc::proto::org::OrganizerInfo; -use parking_lot::{RwLock, MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLockReadGuard, RwLockWriteGuard}; use std::sync::atomic::AtomicBool; -use regex::Regex; -use crate::storage::StorageBackend; +use std::sync::Arc; -use tokio::sync::Mutex; use anyhow::anyhow; +use tokio::sync::Mutex; -use base64::{Engine as _, engine::general_purpose as Base64GP}; +use base64::{engine::general_purpose as Base64GP, Engine as _}; use super::user_session::OpaqueGuard; -use super::{WsMsgSender, SenderList, SessionMap, SenderListMap, StringToStringMap, Res, UserSession, SendTo}; +use super::{ + Res, SendTo, SenderList, SenderListMap, SessionMap, StringToStringMap, UserSession, WsMsgSender, +}; use crate::client_cmd; -use crate::database::{DB, models, DbBasicQuery}; +use crate::database::{models, DbBasicQuery, DB}; use crate::grpc::grpc_client::OrganizerURI; use lib_clapshot_grpc::proto; /// Lists of all active connections and other server state vars -#[derive (Clone)] +#[derive(Clone)] pub struct ServerState { pub grpc_srv_listening_flag: Arc, pub terminate_flag: Arc, @@ -41,11 +45,10 @@ pub struct ServerState { pub organizer_uri: Option, pub organizer_has_connected: Arc, - pub organizer_info: Arc>> + pub organizer_info: Arc>>, } impl ServerState { - pub fn new( db: Arc, media_files_dir: &Path, @@ -56,8 +59,8 @@ impl ServerState { grpc_srv_listening_flag: Arc, default_user: String, terminate_flag: Arc, - org_http_headers_regex: Regex) -> ServerState - { + org_http_headers_regex: Regex, + ) -> ServerState { ServerState { db, media_files_dir: media_files_dir.to_path_buf(), @@ -90,7 +93,10 @@ impl ServerState { } } - pub fn get_session_write<'a>(&'a self, sid: &str) -> Option> { + pub fn get_session_write<'a>( + &'a self, + sid: &str, + ) -> Option> { let lock = self.sid_to_session.write(); if lock.contains_key(sid) { Some(RwLockWriteGuard::map(lock, |map| map.get_mut(sid).unwrap())) @@ -101,7 +107,8 @@ impl ServerState { /// Register a new sender (API connection) for a user_id. One user can have multiple connections. /// Returns a guard that will remove the sender when dropped. pub fn register_user_session(&self, sid: &str, user_id: &str, ses: UserSession) -> OpaqueGuard { - let guard1 = self.add_sender_to_maplist(user_id, ses.sender.clone(), &self.user_id_to_senders); + let guard1 = + self.add_sender_to_maplist(user_id, ses.sender.clone(), &self.user_id_to_senders); let guard2 = self.add_session_to_sid_map(sid, ses); Arc::new(Mutex::new((guard1, guard2))) } @@ -118,22 +125,36 @@ impl ServerState { } /// Send a client command to websocket of given recipient(s) - pub fn emit_cmd(&self, cmd: proto::client::server_to_client_cmd::Cmd, send_to: SendTo) -> Res - { + pub fn emit_cmd( + &self, + cmd: proto::client::server_to_client_cmd::Cmd, + send_to: SendTo, + ) -> Res { let cmd = proto::client::ServerToClientCmd { cmd: Some(cmd) }; let msg = serde_json::to_value(cmd)?; let msg = warp::ws::Message::text(msg.to_string()); match send_to { - SendTo::UserSession(sid) => { self.send_to_user_session(&sid, &msg) }, - SendTo::Collab(id) => { self.send_to_all_collab_users(&Some(id.into()), &msg) }, - SendTo::UserId(user_id) => { self.send_to_all_user_sessions(user_id, &msg) }, - SendTo::MediaFileId(media_file_id) => { self.send_to_all_media_file_sessions(media_file_id, &msg) }, - SendTo::MsgSender(sender) => { sender.send(msg)?; Ok(1u32) }, + SendTo::UserSession(sid) => self.send_to_user_session(&sid, &msg), + SendTo::Collab(id) => self.send_to_all_collab_users(&Some(id.into()), &msg), + SendTo::UserId(user_id) => self.send_to_all_user_sessions(user_id, &msg), + SendTo::MediaFileId(media_file_id) => { + self.send_to_all_media_file_sessions(media_file_id, &msg) + } + SendTo::MsgSender(sender) => { + sender.send(msg)?; + Ok(1u32) + } } } /// Send a user message to given recipients. - pub fn push_notify_message(&self, msg: &models::MessageInsert, send_to: SendTo, persist: bool, progress: Option) -> Res<()> { + pub fn push_notify_message( + &self, + msg: &models::MessageInsert, + send_to: SendTo, + persist: bool, + progress: Option, + ) -> Res<()> { let mut proto_msg = msg.to_proto3(); proto_msg.progress = progress; @@ -141,10 +162,14 @@ impl ServerState { let send_res = self.emit_cmd(cmd, send_to); if let Ok(sent_count) = send_res { if persist { - models::Message::insert(&mut self.db.conn()?, &models::MessageInsert { - seen: msg.seen || sent_count > 0, - ..msg.clone() - }).map_err(|e| anyhow!("Failed to persist msg: {}", e))?; + models::Message::insert( + &mut self.db.conn()?, + &models::MessageInsert { + seen: msg.seen || sent_count > 0, + ..msg.clone() + }, + ) + .map_err(|e| anyhow!("Failed to persist msg: {}", e))?; } }; send_res.map(|_| ()) @@ -158,20 +183,26 @@ impl ServerState { let map = self.user_id_to_senders.read(); for sender in map.get(user_id).unwrap_or(&vec![]).iter() { sender.send(msg.clone())?; - total_sent += 1; }; + total_sent += 1; + } Ok(total_sent) } /// Send a message to all sessions that are collaboratively viewing a media file. /// Bails out with error if any of the senders fail. /// Returns the number of messages sent. - pub fn send_to_all_collab_users(&self, collab_id: &Option, msg: &super::Message) -> Res { + pub fn send_to_all_collab_users( + &self, + collab_id: &Option, + msg: &super::Message, + ) -> Res { let mut total_sent = 0u32; if let Some(collab_id) = collab_id { let map = self.collab_id_to_senders.read(); for sender in map.get(collab_id).unwrap_or(&vec![]).iter() { sender.send(msg.clone())?; - total_sent += 1; }; + total_sent += 1; + } } Ok(total_sent) } @@ -181,8 +212,14 @@ impl ServerState { /// Returns a guard that will remove the sender when dropped. pub fn link_session_to_media_file(&self, session_id: &str, media_file_id: &str) -> Res<()> { let mut map = self.sid_to_session.write(); - let ses = map.get_mut(session_id).ok_or_else(|| anyhow!("Session {} not found", session_id))?; - let grd: OpaqueGuard = self.add_sender_to_maplist(media_file_id, ses.sender.clone(), &self.media_file_id_to_senders); + let ses = map + .get_mut(session_id) + .ok_or_else(|| anyhow!("Session {} not found", session_id))?; + let grd: OpaqueGuard = self.add_sender_to_maplist( + media_file_id, + ses.sender.clone(), + &self.media_file_id_to_senders, + ); ses.media_session_guard = Some(grd); Ok(()) } @@ -196,10 +233,19 @@ impl ServerState { pub fn sender_is_collab_participant(&self, collab_id: &str, sender: &WsMsgSender) -> bool { let senders = self.collab_id_to_senders.read(); - senders.get(collab_id).unwrap_or(&vec![]).iter().any(|s| s.same_channel(sender)) + senders + .get(collab_id) + .unwrap_or(&vec![]) + .iter() + .any(|s| s.same_channel(sender)) } - pub fn link_session_to_collab(&self, collab_id: &str, media_file_id: &str, sender: WsMsgSender) -> Res { + pub fn link_session_to_collab( + &self, + collab_id: &str, + media_file_id: &str, + sender: WsMsgSender, + ) -> Res { // GC collab media file map. (This might not be the optimal way to do this but at least it // will keep it from growing indefinitely.) self.garbage_collect_collab_media_file_map(); @@ -217,43 +263,69 @@ impl ServerState { /// Send a message to all sessions that are viewing a media file. /// Bails out with error if any of the senders fail. /// Returns the number of messages sent. - pub fn send_to_all_media_file_sessions(&self, media_file_id: &str, msg: &super::Message) -> Res { + pub fn send_to_all_media_file_sessions( + &self, + media_file_id: &str, + msg: &super::Message, + ) -> Res { let mut total_sent = 0u32; let map = self.media_file_id_to_senders.read(); for sender in map.get(media_file_id).unwrap_or(&vec![]).iter() { sender.send(msg.clone())?; - total_sent += 1; }; + total_sent += 1; + } Ok(total_sent) } // Common implementations for the above add functions. - fn add_sender_to_maplist(&self, key: &str, sender: WsMsgSender, maplist: &SenderListMap) -> OpaqueGuard { + fn add_sender_to_maplist( + &self, + key: &str, + sender: WsMsgSender, + maplist: &SenderListMap, + ) -> OpaqueGuard { let mut list = maplist.write(); let senders = list.entry(key.to_string()).or_insert(Vec::new()); senders.push(sender.clone()); - struct Guard { maplist: SenderListMap, sender: WsMsgSender, key: String } + struct Guard { + maplist: SenderListMap, + sender: WsMsgSender, + key: String, + } impl Drop for Guard { fn drop(&mut self) { let mut list = self.maplist.write(); let senders = list.entry(self.key.to_string()).or_insert(Vec::new()); senders.retain(|s| !self.sender.same_channel(&s)); - if senders.len() == 0 { list.remove(&self.key); } + if senders.len() == 0 { + list.remove(&self.key); + } } } - Arc::new(Mutex::new(Guard { maplist: maplist.clone(), sender: sender.clone(), key: key.to_string() })) + Arc::new(Mutex::new(Guard { + maplist: maplist.clone(), + sender: sender.clone(), + key: key.to_string(), + })) } fn add_session_to_sid_map(&self, sid: &str, ses: UserSession) -> OpaqueGuard { self.sid_to_session.write().insert(sid.into(), ses); - struct Guard { map: SessionMap, sid: String } + struct Guard { + map: SessionMap, + sid: String, + } impl Drop for Guard { fn drop(&mut self) { self.map.write().remove(&self.sid); } } - Arc::new(Mutex::new(Guard { map: self.sid_to_session.clone(), sid: sid.to_string() })) + Arc::new(Mutex::new(Guard { + map: self.sid_to_session.clone(), + sid: sid.to_string(), + })) } /// Reads the drawing data from disk and encodes it into a data URI, updating the comment's drawing field @@ -262,10 +334,17 @@ impl ServerState { if drawing != "" { // If drawing is present, read it from disk and encode it into a data URI. if !drawing.starts_with("data:") { - let path = self.media_files_dir.join(&c.media_file_id).join("drawings").join(&drawing); + let path = self + .media_files_dir + .join(&c.media_file_id) + .join("drawings") + .join(&drawing); if path.exists() { let data = tokio::fs::read(path).await?; - *drawing = format!("data:image/webp;base64,{}", Base64GP::STANDARD_NO_PAD.encode(&data)); + *drawing = format!( + "data:image/webp;base64,{}", + Base64GP::STANDARD_NO_PAD.encode(&data) + ); } else { tracing::warn!("Drawing file not found for comment: {}", c.id); c.comment += " [DRAWING NOT FOUND]"; @@ -273,7 +352,10 @@ impl ServerState { } else { // If drawing is already a data URI, just use it as is. // This shouldn't happen anymore, but it's here just in case. - tracing::warn!("Comment '{}' has data URI drawing stored in DB. Should be on disk.", c.id); + tracing::warn!( + "Comment '{}' has data URI drawing stored in DB. Should be on disk.", + c.id + ); } } }; diff --git a/server/src/api_server/test_utils.rs b/server/src/api_server/test_utils.rs index d4307dfe..a8555be3 100644 --- a/server/src/api_server/test_utils.rs +++ b/server/src/api_server/test_utils.rs @@ -2,29 +2,30 @@ //#![allow(unused_variables)] //#![allow(unused_imports)] -use std::sync::Arc; -use std::sync::atomic::AtomicBool; -use std::time::Duration; use futures_util::stream::StreamExt; use futures_util::SinkExt; use lib_clapshot_grpc::proto; use std::path::PathBuf; +use std::sync::atomic::AtomicBool; +use std::sync::Arc; +use std::time::Duration; use tokio_tungstenite::tungstenite::Message; -use crate::video_pipeline::IncomingFile; -use crate::api_server::{UserMessage}; -use crate::database::{DB, models}; +use crate::api_server::UserMessage; +use crate::database::{models, DB}; use crate::storage::StorageBackend; - - +use crate::video_pipeline::IncomingFile; #[macro_export] macro_rules! send_server_cmd { ($ws:expr, $cmd_name:ident, $options:expr) => {{ let cmd = proto::client::ClientToServerCmd { - cmd: Some(proto::client::client_to_server_cmd::Cmd::$cmd_name($options)), + cmd: Some(proto::client::client_to_server_cmd::Cmd::$cmd_name( + $options, + )), }; - let json_cmd = serde_json::to_string(&cmd).expect("Failed to serialize ClientToServerCmd to JSON"); + let json_cmd = + serde_json::to_string(&cmd).expect("Failed to serialize ClientToServerCmd to JSON"); crate::api_server::test_utils::write(&mut $ws, &json_cmd).await; }}; } @@ -43,16 +44,21 @@ pub(crate) struct ApiTestState { pub(crate) ws_url: String, } -pub(crate) type WsClient = tokio_tungstenite::WebSocketStream>; +pub(crate) type WsClient = + tokio_tungstenite::WebSocketStream>; pub(crate) async fn read(ws: &mut WsClient) -> Option { - let res = match async_std::future::timeout( - std::time::Duration::from_secs_f32(0.25), ws.next()).await { - Ok(Some(m)) => Some(m.expect("Failed to read server message")).map(|m| m.to_string()), - _ => None, + let res = match async_std::future::timeout(std::time::Duration::from_secs_f32(0.25), ws.next()) + .await + { + Ok(Some(m)) => Some(m.expect("Failed to read server message")).map(|m| m.to_string()), + _ => None, }; let res_str = res.as_ref().map(|s| s.as_str()).unwrap_or(""); - if let Some(Some(res_json)) = res.as_ref().map(|s| serde_json::from_str::(s).ok()) { + if let Some(Some(res_json)) = res + .as_ref() + .map(|s| serde_json::from_str::(s).ok()) + { println!("<--- [Client got]: {:#}", res_json); } else { println!("<--- [Client got]: {:#}", res_str); @@ -65,17 +71,33 @@ pub(crate) async fn expect_msg(ws: &mut WsClient) -> String { } pub(crate) async fn expect_parsed(ws: &mut WsClient) -> T - where T: serde::de::DeserializeOwned +where + T: serde::de::DeserializeOwned, { let msg = expect_msg(ws).await; - serde_json::from_str::(&msg).expect(format!("Failed to parse type '{}' message from JSON", std::any::type_name::()).as_str()) + serde_json::from_str::(&msg).expect( + format!( + "Failed to parse type '{}' message from JSON", + std::any::type_name::() + ) + .as_str(), + ) } pub(crate) async fn try_get_parsed(ws: &mut WsClient) -> Option - where T: serde::de::DeserializeOwned +where + T: serde::de::DeserializeOwned, { if let Some(msg) = read(ws).await { - Some(serde_json::from_str::(&msg).expect(format!("Failed to parse type '{}' message from JSON", std::any::type_name::()).as_str())) + Some( + serde_json::from_str::(&msg).expect( + format!( + "Failed to parse type '{}' message from JSON", + std::any::type_name::() + ) + .as_str(), + ), + ) } else { None } @@ -85,15 +107,21 @@ pub(crate) async fn try_get_parsed(ws: &mut WsClient) -> Option macro_rules! expect_client_cmd { ($ws:expr, $variant:ident) => {{ println!("Client expecting command '{}'...", stringify!($variant)); - match crate::api_server::test_utils::expect_parsed::($ws).await.cmd { + match crate::api_server::test_utils::expect_parsed::($ws) + .await + .cmd + { Some(lib_clapshot_grpc::proto::client::server_to_client_cmd::Cmd::$variant(v)) => { println!("...got '{}' ok.", stringify!($variant)); println!(". . ."); v - }, - _ => panic!("Client expected command '{}' BUT GOT SOMETHING ELSE.", stringify!($variant)), + } + _ => panic!( + "Client expected command '{}' BUT GOT SOMETHING ELSE.", + stringify!($variant) + ), } - }} + }}; } /* @@ -114,12 +142,15 @@ pub(crate) async fn read_cmd_data(ws: &mut WsClient) -> Option<(serde_json::Valu } */ -pub (crate) async fn wait_for_thumbnails(ws: &mut WsClient) { +pub(crate) async fn wait_for_thumbnails(ws: &mut WsClient) { println!("Waiting for thumbnail generation..."); let mut thumb_done = false; for _ in 0..12 { - match crate::api_server::test_utils::try_get_parsed::(ws).await - .map(|c| c.cmd).flatten() { + match crate::api_server::test_utils::try_get_parsed::(ws) + .await + .map(|c| c.cmd) + .flatten() + { Some(proto::client::server_to_client_cmd::Cmd::ShowMessages(m)) => { if m.msgs[0].r#type == proto::user_message::Type::MediaFileUpdated as i32 { thumb_done = true; @@ -127,14 +158,14 @@ pub (crate) async fn wait_for_thumbnails(ws: &mut WsClient) { } else { println!(" (... got some other message: {:?})", m.msgs[0]); } - }, + } None => { // Wait for file to be processed tokio::time::sleep(Duration::from_secs_f32(0.2)).await; - }, + } _ => panic!("Unexpected message while waitig for thumbnail generation"), } - }; + } if !thumb_done { panic!("... thumbnail generation TIMED OUT"); } @@ -143,28 +174,37 @@ pub (crate) async fn wait_for_thumbnails(ws: &mut WsClient) { } pub(crate) async fn expect_no_msg(ws: &mut WsClient) { - assert!(read(ws).await.is_none(), "Got unexpected message from server"); + assert!( + read(ws).await.is_none(), + "Got unexpected message from server" + ); } pub(crate) async fn write(ws: &mut WsClient, msg: &str) { println!("---> [Client sending]: {:#}", msg); - ws.send(Message::text(msg)).await.expect("Failed to send WS message"); + ws.send(Message::text(msg)) + .await + .expect("Failed to send WS message"); } pub(crate) async fn connect_client_ws(ws_url: &str, user_id: &str) -> WsClient { - use tokio_tungstenite::tungstenite::http; use tokio_tungstenite::connect_async; + use tokio_tungstenite::tungstenite::http; let request = http::Request::builder() .uri(ws_url) .header("Host", "127.0.0.1") .header("HTTP_X_REMOTE_USER_ID", user_id) - .header("HTTP_X_REMOTE_USER_NAME", format!("Username for {}", user_id)) + .header( + "HTTP_X_REMOTE_USER_NAME", + format!("Username for {}", user_id), + ) .header("Connection", "Upgrade") .header("Upgrade", "websocket") .header("Sec-WebSocket-Version", "13") .header("Sec-WebSocket-Key", "1234567890") - .body(()).unwrap(); + .body(()) + .unwrap(); let (mut ws, _) = connect_async(request).await.unwrap(); @@ -232,29 +272,38 @@ macro_rules! api_test { /// /// # Returns /// * OpenMediaFile message from the server -pub(crate) async fn open_media_file(ws: &mut WsClient, vid: &str) -> proto::client::server_to_client_cmd::OpenMediaFile -{ +pub(crate) async fn open_media_file( + ws: &mut WsClient, + vid: &str, +) -> proto::client::server_to_client_cmd::OpenMediaFile { println!("--------- TEST: open_media_file '{}'...", vid); use lib_clapshot_grpc::proto::client::client_to_server_cmd as cmd_enum; - send_server_cmd!(*ws, OpenMediaFile, cmd_enum::OpenMediaFile{ media_file_id: vid.into() }); + send_server_cmd!( + *ws, + OpenMediaFile, + cmd_enum::OpenMediaFile { + media_file_id: vid.into() + } + ); let ov = expect_client_cmd!(ws, OpenMediaFile); while let Some(msg) = read(ws).await { - let cmd: proto::client::ServerToClientCmd = serde_json::from_str(&msg).expect("Failed to parse ServerToClientCmd from JSON"); + let cmd: proto::client::ServerToClientCmd = + serde_json::from_str(&msg).expect("Failed to parse ServerToClientCmd from JSON"); match cmd.cmd { // Make sure the comments are for the media file we opened Some(proto::client::server_to_client_cmd::Cmd::AddComments(m)) => { assert!(m.comments.iter().all(|c| c.media_file_id == vid)); - }, + } // Thumbnail generation can take a while, so ignore it if it happens to be in the queue Some(proto::client::server_to_client_cmd::Cmd::ShowMessages(m)) => { assert!(m.msgs.iter().any(|m| m.message.contains("thumbnail"))); - }, - None => {}, + } + None => {} _ => panic!("Unexpected message from server: {}", msg), } - }; + } ov } diff --git a/server/src/api_server/tests.rs b/server/src/api_server/tests.rs index 2a95d10b..531887ee 100644 --- a/server/src/api_server/tests.rs +++ b/server/src/api_server/tests.rs @@ -1,34 +1,41 @@ #![allow(dead_code)] use crate::storage::StorageBackend; -use std::sync::Arc; +use lib_clapshot_grpc::proto; use std::str::FromStr; use std::sync::atomic::AtomicBool; -use lib_clapshot_grpc::proto; -use tracing_test::traced_test; use std::sync::atomic::Ordering::Relaxed; +use std::sync::Arc; +use tracing_test::traced_test; use reqwest::{multipart, Client}; -use crate::database::DbBasicQuery; -use crate::database::error::DBError; -use crate::api_server::{parse_auth_headers, run_api_server_async, validate_org_http_headers_regex, UserMessage, UserMessageTopic}; use crate::api_server::server_state::ServerState; +use crate::api_server::test_utils::{ + connect_client_ws, expect_msg, expect_no_msg, open_media_file, write, ApiTestState, +}; +use crate::api_server::{ + parse_auth_headers, run_api_server_async, validate_org_http_headers_regex, UserMessage, + UserMessageTopic, +}; +use crate::database::error::DBError; use crate::database::models::{self}; use crate::database::tests::make_test_db; -use crate::api_server::test_utils::{ApiTestState, expect_msg, expect_no_msg, write, open_media_file, connect_client_ws}; +use crate::database::DbBasicQuery; use crate::grpc::db_models::proto_msg_type_to_event_name; use warp::http::{HeaderMap, HeaderValue}; -use lib_clapshot_grpc::proto::client::client_to_server_cmd::{AddComment, DelComment, DelMediaFile, EditComment, ListMyMessages, OpenNavigationPage, OpenMediaFile, RenameMediaFile}; +use lib_clapshot_grpc::proto::client::client_to_server_cmd::{ + AddComment, DelComment, DelMediaFile, EditComment, ListMyMessages, OpenMediaFile, + OpenNavigationPage, RenameMediaFile, +}; use std::convert::TryFrom; // --------------------------------------------------------------------------------------------- #[traced_test] -async fn test_echo() -{ +async fn test_echo() { api_test! {[ws, _ts] write(&mut ws, r#"{"cmd":"echo","data":"hello"}"#).await; assert_eq!(expect_msg(&mut ws).await, "Echo: hello"); @@ -37,8 +44,7 @@ async fn test_echo() #[tokio::test] #[traced_test] -async fn test_api_push_msg() -{ +async fn test_api_push_msg() { api_test! {[ws, ts] let mut umsg = UserMessage { msg: "test_msg".into(), @@ -61,11 +67,9 @@ async fn test_api_push_msg() } } - #[tokio::test] #[traced_test] -async fn test_api_navigation_page() -{ +async fn test_api_navigation_page() { api_test! {[ws, ts] send_server_cmd!(ws, OpenNavigationPage, OpenNavigationPage{..Default::default()}); let sp = expect_client_cmd!(&mut ws, ShowPage); @@ -78,11 +82,9 @@ async fn test_api_navigation_page() } } - #[tokio::test] #[traced_test] -async fn test_api_del_video() -{ +async fn test_api_del_video() { api_test! {[ws, ts] let conn = &mut ts.db.conn().unwrap(); @@ -124,11 +126,9 @@ async fn test_api_del_video() } } - #[tokio::test] #[traced_test] -async fn test_api_open_media_file() -{ +async fn test_api_open_media_file() { api_test! {[ws, ts] for vid in &ts.media_files { let v = open_media_file(&mut ws, &vid.id).await.media_file.unwrap(); @@ -152,16 +152,17 @@ async fn test_api_open_media_file() #[tokio::test] #[traced_test] -async fn test_api_open_bad_media_file() -{ +async fn test_api_open_bad_media_file() { api_test! {[ws, ts] - send_server_cmd!(ws, OpenMediaFile, OpenMediaFile{media_file_id: "non-existent".into()}); - expect_user_msg(&mut ws, proto::user_message::Type::Error).await; - } + send_server_cmd!(ws, OpenMediaFile, OpenMediaFile{media_file_id: "non-existent".into()}); + expect_user_msg(&mut ws, proto::user_message::Type::Error).await; + } } -pub async fn expect_user_msg(ws: &mut crate::api_server::test_utils::WsClient, evt_type: proto::user_message::Type ) -> proto::UserMessage -{ +pub async fn expect_user_msg( + ws: &mut crate::api_server::test_utils::WsClient, + evt_type: proto::user_message::Type, +) -> proto::UserMessage { println!(" --expect_user_msg of type {:?} ....", evt_type); let cmd = expect_client_cmd!(ws, ShowMessages); assert_eq!(cmd.msgs.len(), 1); @@ -171,8 +172,7 @@ pub async fn expect_user_msg(ws: &mut crate::api_server::test_utils::WsClient, e #[tokio::test] #[traced_test] -async fn test_api_rename_media_file() -{ +async fn test_api_rename_media_file() { api_test! {[ws, ts] let media_file = &ts.media_files[0]; open_media_file(&mut ws, &media_file.id).await; @@ -196,12 +196,9 @@ async fn test_api_rename_media_file() } } - - #[tokio::test] #[traced_test] -async fn test_api_add_plain_comment() -{ +async fn test_api_add_plain_comment() { api_test! {[ws, ts] let media = &ts.media_files[0]; send_server_cmd!(ws, AddComment, AddComment{media_file_id: media.id.clone(), comment: "Test comment".into(), ..Default::default()}); @@ -234,11 +231,9 @@ async fn test_api_add_plain_comment() } } - #[tokio::test] #[traced_test] -async fn test_api_comment_other_users_video() -{ +async fn test_api_comment_other_users_video() { api_test! {[ws, ts] let other_users_vid = &ts.media_files[1]; assert_ne!(other_users_vid.user_id, ts.media_files[0].user_id); @@ -255,8 +250,7 @@ async fn test_api_comment_other_users_video() #[tokio::test] #[traced_test] -async fn test_api_edit_comment() -{ +async fn test_api_edit_comment() { api_test! {[ws, ts] let media = &ts.media_files[0]; let com = &ts.comments[0]; @@ -293,11 +287,9 @@ async fn test_api_edit_comment() } } - #[tokio::test] #[traced_test] -async fn test_api_del_comment() -{ +async fn test_api_del_comment() { // Summary of comment thread used in this test: // // media_file[0]: @@ -340,11 +332,9 @@ async fn test_api_del_comment() } } - #[tokio::test] #[traced_test] -async fn test_api_list_my_messages() -{ +async fn test_api_list_my_messages() { api_test! {[ws, ts] send_server_cmd!(ws, ListMyMessages, ListMyMessages{}); @@ -378,11 +368,9 @@ async fn test_api_list_my_messages() } } - #[tokio::test] #[traced_test] -async fn test_multipart_upload() -{ +async fn test_multipart_upload() { api_test! {[_ws, ts] // Upload file let file_body = "Testfile 1234"; @@ -406,7 +394,6 @@ async fn test_multipart_upload() } } - #[test] fn test_validate_org_http_headers_regex() { // Test valid patterns @@ -428,7 +415,10 @@ fn test_header_filtering() { headers.insert("X-Remote-User-Id", HeaderValue::from_static("testuser")); headers.insert("X-Remote-User-Name", HeaderValue::from_static("Test User")); headers.insert("X-Remote-User-Can-Upload", HeaderValue::from_static("true")); - headers.insert("X_REMOTE_USER_GROUPS", HeaderValue::from_static("admins,users")); + headers.insert( + "X_REMOTE_USER_GROUPS", + HeaderValue::from_static("admins,users"), + ); headers.insert("Authorization", HeaderValue::from_static("Bearer token123")); headers.insert("Content-Type", HeaderValue::from_static("application/json")); @@ -442,10 +432,22 @@ fn test_header_filtering() { // Verify header filtering (HeaderMap converts names to lowercase) assert_eq!(filtered_headers.len(), 4); // 4 X-Remote headers - assert_eq!(filtered_headers.get("x-remote-user-id"), Some(&"testuser".to_string())); - assert_eq!(filtered_headers.get("x-remote-user-name"), Some(&"Test User".to_string())); - assert_eq!(filtered_headers.get("x-remote-user-can-upload"), Some(&"true".to_string())); - assert_eq!(filtered_headers.get("x_remote_user_groups"), Some(&"admins,users".to_string())); + assert_eq!( + filtered_headers.get("x-remote-user-id"), + Some(&"testuser".to_string()) + ); + assert_eq!( + filtered_headers.get("x-remote-user-name"), + Some(&"Test User".to_string()) + ); + assert_eq!( + filtered_headers.get("x-remote-user-can-upload"), + Some(&"true".to_string()) + ); + assert_eq!( + filtered_headers.get("x_remote_user_groups"), + Some(&"admins,users".to_string()) + ); // Verify non-matching headers are filtered out (also lowercase) assert!(!filtered_headers.contains_key("authorization")); @@ -475,7 +477,10 @@ fn test_remote_error_header() { // Test with X-Remote-Error header let mut headers = HeaderMap::new(); - headers.insert("X-Remote-Error", HeaderValue::from_static("Access denied by IDP")); + headers.insert( + "X-Remote-Error", + HeaderValue::from_static("Access denied by IDP"), + ); headers.insert("X-Remote-User-Id", HeaderValue::from_static("testuser")); let (user_id, _user_name, _is_admin, _cookies, filtered_headers, remote_error) = diff --git a/server/src/api_server/user_session.rs b/server/src/api_server/user_session.rs index a90c3615..dfe808c1 100644 --- a/server/src/api_server/user_session.rs +++ b/server/src/api_server/user_session.rs @@ -1,7 +1,11 @@ +use crate::{ + client_cmd, + database::models::{self, Comment, MediaFile}, + grpc::grpc_client::OrganizerConnection, +}; use std::sync::Arc; -use crate::{database::models::{self, MediaFile, Comment}, grpc::grpc_client::OrganizerConnection, client_cmd}; -use super::{WsMsgSender, server_state::ServerState, SendTo}; +use super::{server_state::ServerState, SendTo, WsMsgSender}; use lib_clapshot_grpc::proto; use tracing::{debug, error}; @@ -10,7 +14,7 @@ type Res = anyhow::Result; pub enum Topic<'a> { MediaFile(&'a str), Comment(i32), - None + None, } #[macro_export] @@ -55,23 +59,31 @@ macro_rules! send_user_ok( ($user_id:expr, $server:expr, $topic:expr, $msg:expr) => { send_user_ok!($user_id, $server, $topic, $msg, String::new(), false); }; ); -#[derive (Debug, Clone)] +#[derive(Debug, Clone)] pub enum AuthzTopic<'a> { - MediaFile(&'a MediaFile, proto::org::authz_user_action_request::media_file_op::Op), - Comment(&'a Comment, proto::org::authz_user_action_request::comment_op::Op), - Other(Option<&'a str>, proto::org::authz_user_action_request::other_op::Op) + MediaFile( + &'a MediaFile, + proto::org::authz_user_action_request::media_file_op::Op, + ), + Comment( + &'a Comment, + proto::org::authz_user_action_request::comment_op::Op, + ), + Other( + Option<&'a str>, + proto::org::authz_user_action_request::other_op::Op, + ), } -#[derive (thiserror::Error, Debug)] +#[derive(thiserror::Error, Debug)] pub enum AuthzError { #[error("Permission denied")] Denied, } - pub type OpaqueGuard = Arc>; -#[derive (Clone)] +#[derive(Clone)] pub struct UserSession { pub sid: String, pub sender: WsMsgSender, @@ -89,16 +101,25 @@ pub struct UserSession { } impl UserSession { - - pub async fn emit_new_comment(&self, server: &ServerState, mut c: models::Comment, send_to: SendTo<'_>) -> Res<()> { + pub async fn emit_new_comment( + &self, + server: &ServerState, + mut c: models::Comment, + send_to: SendTo<'_>, + ) -> Res<()> { server.fetch_drawing_data_into_comment(&mut c).await?; let cmd = client_cmd!(AddComments, {comments: vec![c.to_proto3()]}); server.emit_cmd(cmd, send_to).map(|_| ()) } } - -fn try_send_error<'a>(user_id: &str, server: &ServerState, msg: String, details: Option, op: &AuthzTopic<'a>) -> anyhow::Result<()> { +fn try_send_error<'a>( + user_id: &str, + server: &ServerState, + msg: String, + details: Option, + op: &AuthzTopic<'a>, +) -> anyhow::Result<()> { let topic = match op { AuthzTopic::MediaFile(v, _op) => Topic::MediaFile(&v.id), AuthzTopic::Comment(c, _op) => Topic::Comment(c.id), @@ -112,8 +133,6 @@ fn try_send_error<'a>(user_id: &str, server: &ServerState, msg: String, details: Ok(()) } - - /// Check from Organizer if the user is allowed to perform given action. /// /// Some(true) = allowed @@ -130,8 +149,7 @@ pub async fn org_authz<'a>( server: &ServerState, organizer: &Option>>, op: AuthzTopic<'a>, -) -> Option -{ +) -> Option { let user_id = match &session.user { Some(ui) => ui.id.clone(), None => { @@ -142,60 +160,77 @@ pub async fn org_authz<'a>( let org = match &organizer { Some(org) => org, - None => { return None; } + None => { + return None; + } }; tracing::debug!(op=?op, user=user_id, desc, "Checking authz from Organizer"); use proto::org::authz_user_action_request as authz_op; let pop = match op { - AuthzTopic::MediaFile(v, op) => authz_op::Op::MediaFileOp( - authz_op::MediaFileOp { - op: op.into(), - media_file: Some(v.to_proto3(&server.media_base_url, vec![])) }), // omit subtitles for authz check - AuthzTopic::Comment(c, op) => authz_op::Op::CommentOp( - authz_op::CommentOp { - op: op.into(), - comment: Some(c.to_proto3()) }), - AuthzTopic::Other(subj, op) => authz_op::Op::OtherOp( - authz_op::OtherOp { - op: op.into(), - subject: subj.map(|s| s.into()) }), + AuthzTopic::MediaFile(v, op) => authz_op::Op::MediaFileOp(authz_op::MediaFileOp { + op: op.into(), + media_file: Some(v.to_proto3(&server.media_base_url, vec![])), + }), // omit subtitles for authz check + AuthzTopic::Comment(c, op) => authz_op::Op::CommentOp(authz_op::CommentOp { + op: op.into(), + comment: Some(c.to_proto3()), + }), + AuthzTopic::Other(subj, op) => authz_op::Op::OtherOp(authz_op::OtherOp { + op: op.into(), + subject: subj.map(|s| s.into()), + }), + }; + let req = proto::org::AuthzUserActionRequest { + ses: Some(session.clone()), + op: Some(pop), }; - let req = proto::org::AuthzUserActionRequest { ses: Some(session.clone()), op: Some(pop) }; let res = org.lock().await.authz_user_action(req).await; match res { Err(e) => { if e.code() == tonic::Code::Unimplemented { - tracing::debug!(desc, user=user_id, "Organizer doesn't support authz"); + tracing::debug!(desc, user = user_id, "Organizer doesn't support authz"); None } else if e.code() == tonic::Code::Aborted { tracing::warn!(desc, user=user_id, "Organizer gRPC.ABORTED authz request. Unsupported behavior for authz_user_action. Denying by default."); Some(false) } else { error!(desc, user=&user_id, err=?e, "Error while authorizing user action"); - try_send_error(&user_id, &server, format!("Internal error in authz: {}", desc), None, &op).ok(); + try_send_error( + &user_id, + &server, + format!("Internal error in authz: {}", desc), + None, + &op, + ) + .ok(); Some(false) } - }, - Ok(res) => { - match res.get_ref().is_authorized { - Some(false) => { - let msg = res.get_ref().message.clone().map(|s| s).unwrap_or_else(|| "Permission denied".to_string()); - let details = res.get_ref().details.clone(); - if msg_on_deny { try_send_error(&user_id, &server, msg, details, &op).ok(); } - debug!(desc, user=user_id, "Organizer: Permission denied"); - Some(false) - }, - Some(true) => { - debug!(desc, user=user_id, "Organizer: Authorized OK"); - Some(true) - }, - None => { - debug!(desc, user=user_id, "Organizer: don't care, use defaults"); - None + } + Ok(res) => match res.get_ref().is_authorized { + Some(false) => { + let msg = res + .get_ref() + .message + .clone() + .map(|s| s) + .unwrap_or_else(|| "Permission denied".to_string()); + let details = res.get_ref().details.clone(); + if msg_on_deny { + try_send_error(&user_id, &server, msg, details, &op).ok(); } + debug!(desc, user = user_id, "Organizer: Permission denied"); + Some(false) } - } + Some(true) => { + debug!(desc, user = user_id, "Organizer: Authorized OK"); + Some(true) + } + None => { + debug!(desc, user = user_id, "Organizer: don't care, use defaults"); + None + } + }, } } @@ -209,12 +244,25 @@ pub async fn org_authz_with_default<'a>( op: AuthzTopic<'a>, ) -> Result<(), AuthzError> { if let Some(res) = org_authz(session, desc, msg_on_deny, server, organizer, op.clone()).await { - if res { Ok(()) } else { Err(AuthzError::Denied) } + if res { + Ok(()) + } else { + Err(AuthzError::Denied) + } } else { - if default { Ok(()) } else { + if default { + Ok(()) + } else { if msg_on_deny { if let Some(ui) = &session.user { - try_send_error(&ui.id, &server, format!("Permission denied: {}", desc), Some(format!("{:?}", &op)), &op).ok(); + try_send_error( + &ui.id, + &server, + format!("Permission denied: {}", desc), + Some(format!("{:?}", &op)), + &op, + ) + .ok(); } else { tracing::error!(desc, "No user ID in session. Couldn't send deny message from org_authz_with_default"); } diff --git a/server/src/api_server/ws_handers.rs b/server/src/api_server/ws_handers.rs index 99fff452..096566cb 100644 --- a/server/src/api_server/ws_handers.rs +++ b/server/src/api_server/ws_handers.rs @@ -2,13 +2,17 @@ #![allow(unused_variables)] #![allow(unused_imports)] +use lib_clapshot_grpc::proto::client::client_to_server_cmd::{ + AddSubtitle, CollabReport, DelComment, DelMediaFile, DelSubtitle, EditComment, + EditSubtitleInfo, JoinCollab, LeaveCollab, OpenMediaFile, OpenNavigationPage, RenameMediaFile, + ReorderItems, +}; +use lib_clapshot_grpc::proto::client::ClientToServerCmd; +use parking_lot::RwLock; use std::collections::HashMap; use std::path::{Path, PathBuf}; -use std::sync::Arc; use std::str::FromStr; -use lib_clapshot_grpc::proto::client::ClientToServerCmd; -use lib_clapshot_grpc::proto::client::client_to_server_cmd::{AddSubtitle, CollabReport, DelComment, DelMediaFile, DelSubtitle, EditComment, EditSubtitleInfo, JoinCollab, LeaveCollab, OpenMediaFile, OpenNavigationPage, RenameMediaFile, ReorderItems}; -use parking_lot::RwLock; +use std::sync::Arc; type WsMsg = warp::ws::Message; type Res = anyhow::Result; @@ -16,43 +20,58 @@ type MsgSender = tokio::sync::mpsc::UnboundedSender; type SenderList = Vec; type SenderListMap = Arc>>; -use serde_json::json; use anyhow::{anyhow, bail, Context}; +use serde_json::json; -use inflector::Inflector; -use data_url::{DataUrl, mime}; -use sha2::{Sha256, Digest}; +use data_url::{mime, DataUrl}; use hex; +use inflector::Inflector; +use sha2::{Digest, Sha256}; -use super::user_session::{self, AuthzTopic, org_authz_with_default}; +use super::user_session::{self, org_authz_with_default, AuthzTopic}; use super::UserSession; use crate::api_server::server_state::ServerState; use crate::api_server::user_session::Topic; use crate::database::error::DBError; -use crate::database::{models, DBPaging, DbBasicQuery, DbQueryByMediaFile, DbQueryByUser, DbUpdate, DB}; -use crate::{client_cmd, optional_str_to_i32_or_tonic_error, send_user_error, send_user_ok, str_to_i32_or_tonic_error}; +use crate::database::{ + models, DBPaging, DbBasicQuery, DbQueryByMediaFile, DbQueryByUser, DbUpdate, DB, +}; +use crate::{ + client_cmd, optional_str_to_i32_or_tonic_error, send_user_error, send_user_ok, + str_to_i32_or_tonic_error, +}; use lib_clapshot_grpc::proto; use proto::org::authz_user_action_request as authz_req; - /// Get media file by ID from DB, or send user error. /// Return None if media file not found and error was sent, or Some(MediaFile) if found. -async fn get_media_file_or_send_error(media_file_id: Option<&str>, ses: &Option<&mut UserSession>, server: &ServerState) -> Res> { +async fn get_media_file_or_send_error( + media_file_id: Option<&str>, + ses: &Option<&mut UserSession>, + server: &ServerState, +) -> Res> { let media_file_id = media_file_id.ok_or(anyhow!("media file id missing"))?; match models::MediaFile::get(&mut server.db.conn()?, &media_file_id.into()) { Err(DBError::NotFound()) => { if let Some(ses) = ses { - send_user_error!(ses.user_id, server, Topic::MediaFile(media_file_id), "No such media file."); + send_user_error!( + ses.user_id, + server, + Topic::MediaFile(media_file_id), + "No such media file." + ); }; Ok(None) } - Err(e) => { bail!(e); } - Ok(v) => { Ok(Some(v)) } + Err(e) => { + bail!(e); + } + Ok(v) => Ok(Some(v)), } } @@ -61,9 +80,21 @@ async fn get_media_file_or_send_error(media_file_id: Option<&str>, ses: &Option< // --------------------------------------------------------------------- /// Send user a navigation page to browse the files / folders they have (and/or something else, if Organizer handles it). -pub async fn msg_open_navigation_page(data: &OpenNavigationPage , ses: &mut UserSession, server: &ServerState) -> Res<()> { - org_authz_with_default(&ses.org_session, "list media files", true, server, - &ses.organizer, true, AuthzTopic::Other(None, authz_req::other_op::Op::ViewHome)).await?; +pub async fn msg_open_navigation_page( + data: &OpenNavigationPage, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { + org_authz_with_default( + &ses.org_session, + "list media files", + true, + server, + &ses.organizer, + true, + AuthzTopic::Other(None, authz_req::other_op::Op::ViewHome), + ) + .await?; // Try to delegate request to Organizer. if let Some(org) = &ses.organizer { @@ -76,13 +107,15 @@ pub async fn msg_open_navigation_page(data: &OpenNavigationPage , ses: &mut User if e.code() == tonic::Code::Unimplemented { tracing::debug!("Organizer doesn't implement navigate_page(). Using default."); } else if e.code() == tonic::Code::Aborted { - tracing::debug!("Ignoring org.navigate_page() result because it GrpcStatus.ABORTED."); + tracing::debug!( + "Ignoring org.navigate_page() result because it GrpcStatus.ABORTED." + ); return Ok(()); } else { tracing::error!(err=?e, "Error in organizer navigate_page() call"); anyhow::bail!("{}: {}", e.code(), e.message()); } - }, + } Ok(res) => { let res = res.into_inner(); server.emit_cmd( @@ -91,7 +124,8 @@ pub async fn msg_open_navigation_page(data: &OpenNavigationPage , ses: &mut User page_id: res.page_id.clone(), page_title: res.page_title, }), - super::SendTo::UserSession(&ses.sid))?; + super::SendTo::UserSession(&ses.sid), + )?; return Ok(()); } } @@ -100,13 +134,25 @@ pub async fn msg_open_navigation_page(data: &OpenNavigationPage , ses: &mut User // Organizer didn't handle this, so return a default listing. let mut media_files: Vec = Vec::new(); - for m in models::MediaFile::get_by_user(&mut server.db.conn()?, &ses.user_id, DBPaging::default())? { - let subs = models::Subtitle::get_by_media_file(&mut server.db.conn()?, &m.id, DBPaging::default())?; + for m in + models::MediaFile::get_by_user(&mut server.db.conn()?, &ses.user_id, DBPaging::default())? + { + let subs = models::Subtitle::get_by_media_file( + &mut server.db.conn()?, + &m.id, + DBPaging::default(), + )?; media_files.push(m.to_proto3(&server.media_base_url, subs)); } - let h_txt = if media_files.is_empty() { "

You have no media yet.

" } else { "

All your media files

" }; - let heading = proto::PageItem{ item: Some(proto::page_item::Item::Html(h_txt.into()))}; + let h_txt = if media_files.is_empty() { + "

You have no media yet.

" + } else { + "

All your media files

" + }; + let heading = proto::PageItem { + item: Some(proto::page_item::Item::Html(h_txt.into())), + }; let listing = crate::grpc::folder_listing_for_media_files(&media_files); let page = vec![heading, listing]; @@ -116,23 +162,38 @@ pub async fn msg_open_navigation_page(data: &OpenNavigationPage , ses: &mut User Ok(()) } - /// User opens a media file. /// Send them the media info and all comments related to it. /// Register the session as a viewer of the file (media_file_session_guard). -pub async fn msg_open_media_file(data: &OpenMediaFile, ses: &mut UserSession, server: &ServerState) -> Res<()> { - if let Some(v) = get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { - org_authz_with_default(&ses.org_session, - "open media file", true, server, &ses.organizer, - true, AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::View)).await?; +pub async fn msg_open_media_file( + data: &OpenMediaFile, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { + if let Some(v) = + get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? + { + org_authz_with_default( + &ses.org_session, + "open media file", + true, + server, + &ses.organizer, + true, + AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::View), + ) + .await?; send_open_media_file_cmd(server, &ses.sid, &v.id).await?; ses.cur_media_file_id = Some(v.id); } Ok(()) } - -pub async fn send_open_media_file_cmd(server: &ServerState, session_id: &str, media_file_id: &str) -> Res<()> { +pub async fn send_open_media_file_cmd( + server: &ServerState, + session_id: &str, + media_file_id: &str, +) -> Res<()> { server.link_session_to_media_file(session_id, media_file_id)?; let conn = &mut server.db.conn()?; let v_db = models::MediaFile::get(conn, &media_file_id.into())?; @@ -143,7 +204,8 @@ pub async fn send_open_media_file_cmd(server: &ServerState, session_id: &str, me } server.emit_cmd( client_cmd!(OpenMediaFile, {media_file: Some(v)}), - super::SendTo::UserSession(session_id))?; + super::SendTo::UserSession(session_id), + )?; let mut cmts = vec![]; for mut c in models::Comment::get_by_media_file(conn, media_file_id, DBPaging::default())? { server.fetch_drawing_data_into_comment(&mut c).await?; @@ -151,31 +213,51 @@ pub async fn send_open_media_file_cmd(server: &ServerState, session_id: &str, me } server.emit_cmd( client_cmd!(AddComments, {comments: cmts}), - super::SendTo::UserSession(session_id))?; + super::SendTo::UserSession(session_id), + )?; Ok(()) } - -pub async fn del_media_file_and_cleanup(media_file_id: &str, ses: Option<&mut UserSession>, server: &ServerState) -> Res<()> { - tracing::info!(media_file_id=media_file_id, user_id=ses.as_ref().map(|u|u.user_id.clone()), "Trashing media file."); +pub async fn del_media_file_and_cleanup( + media_file_id: &str, + ses: Option<&mut UserSession>, + server: &ServerState, +) -> Res<()> { + tracing::info!( + media_file_id = media_file_id, + user_id = ses.as_ref().map(|u| u.user_id.clone()), + "Trashing media file." + ); if let Some(v) = get_media_file_or_send_error(Some(media_file_id), &ses, server).await? { - // Check authorization against user session, if provided if let Some(ses) = &ses { let default_perm = ses.user_id == (&v).user_id || ses.is_admin; - org_authz_with_default(&ses.org_session, "delete media file", true, server, &ses.organizer, - default_perm, AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Delete)).await?; + org_authz_with_default( + &ses.org_session, + "delete media file", + true, + server, + &ses.organizer, + default_perm, + AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Delete), + ) + .await?; } models::MediaFile::delete(&mut server.db.conn()?, &v.id)?; - let mut details = format!("Added by '{}' on {}. Filename was {}.", + let mut details = format!( + "Added by '{}' on {}. Filename was {}.", v.user_id.clone(), v.added_time, - v.orig_filename.clone().unwrap_or_default()); + v.orig_filename.clone().unwrap_or_default() + ); fn backup_media_file_db_row(server: &ServerState, v: &models::MediaFile) -> Res<()> { - let backup_file = server.media_files_dir.join(v.id.clone()).join("db_backup.json"); + let backup_file = server + .media_files_dir + .join(v.id.clone()) + .join("db_backup.json"); if backup_file.exists() { std::fs::remove_file(&backup_file)?; } @@ -184,14 +266,17 @@ pub async fn del_media_file_and_cleanup(media_file_id: &str, ses: Option<&mut Us Ok(()) } - fn move_media_file_to_trash(server: &ServerState, media_file_id: &str) -> Res<()> - { + fn move_media_file_to_trash(server: &ServerState, media_file_id: &str) -> Res<()> { let media_file_dir = server.media_files_dir.join(media_file_id); let trash_dir = server.media_files_dir.join("trash"); if !trash_dir.exists() { std::fs::create_dir(&trash_dir)?; } - let hash_and_datetime = format!("{}_{}", media_file_id, chrono::Utc::now().format("%Y%m%d-%H%M%S")); + let hash_and_datetime = format!( + "{}_{}", + media_file_id, + chrono::Utc::now().format("%Y%m%d-%H%M%S") + ); let media_file_trash_dir = trash_dir.join(hash_and_datetime); std::fs::rename(&media_file_dir, &media_file_trash_dir)?; Ok(()) @@ -201,7 +286,6 @@ pub async fn del_media_file_and_cleanup(media_file_id: &str, ses: Option<&mut Us if let Err(e) = backup_media_file_db_row(server, &v) { details.push_str(&format!(" WARNING: DB row backup failed: {:?}.", e)); cleanup_errors = true; - } if let Err(e) = move_media_file_to_trash(server, &v.id) { details.push_str(&format!(" WARNING: Move to trash failed: {:?}.", e)); @@ -210,71 +294,124 @@ pub async fn del_media_file_and_cleanup(media_file_id: &str, ses: Option<&mut Us if let Some(ses) = ses { let media_type_str = v.media_type.unwrap_or("file".to_string()).to_title_case(); - send_user_ok!(&ses.user_id, &server, Topic::MediaFile(&v.id), - if !cleanup_errors { format!("{} deleted.", media_type_str) } else { format!("{} deleted, but cleanup had errors.", media_type_str) }, - details, true); + send_user_ok!( + &ses.user_id, + &server, + Topic::MediaFile(&v.id), + if !cleanup_errors { + format!("{} deleted.", media_type_str) + } else { + format!("{} deleted, but cleanup had errors.", media_type_str) + }, + details, + true + ); } } Ok(()) } - -pub async fn msg_del_media_file(data: &DelMediaFile, ses: &mut UserSession, server: &ServerState) -> Res<()> { +pub async fn msg_del_media_file( + data: &DelMediaFile, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { del_media_file_and_cleanup(&data.media_file_id, Some(ses), server).await } - -pub async fn msg_rename_media_file(data: &RenameMediaFile, ses: &mut UserSession, server: &ServerState) -> Res<()> { - if let Some(v) = get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { +pub async fn msg_rename_media_file( + data: &RenameMediaFile, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { + if let Some(v) = + get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? + { let default_perm = ses.user_id == (&v).user_id || ses.is_admin; - org_authz_with_default(&ses.org_session, "rename media file", true, server, &ses.organizer, - default_perm, AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Rename)).await?; + org_authz_with_default( + &ses.org_session, + "rename media file", + true, + server, + &ses.organizer, + default_perm, + AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Rename), + ) + .await?; let new_name = data.new_name.trim(); if new_name.is_empty() || !new_name.chars().any(|c| c.is_alphanumeric()) { - send_user_error!(&ses.user_id, server, Topic::MediaFile(&v.id), "Invalid file name (must have letters/numbers)"); + send_user_error!( + &ses.user_id, + server, + Topic::MediaFile(&v.id), + "Invalid file name (must have letters/numbers)" + ); return Ok(()); } if new_name.len() > 160 { - send_user_error!(&ses.user_id, server, Topic::MediaFile(&v.id), "Name too long (max 160)"); + send_user_error!( + &ses.user_id, + server, + Topic::MediaFile(&v.id), + "Name too long (max 160)" + ); return Ok(()); } models::MediaFile::rename(&mut server.db.conn()?, &v.id, new_name)?; let media_type_str = v.media_type.unwrap_or("file".to_string()).to_title_case(); - send_user_ok!(&ses.user_id, server, Topic::MediaFile(&v.id), format!("{} renamed.", media_type_str), - format!("New name: '{}'", new_name), true); + send_user_ok!( + &ses.user_id, + server, + Topic::MediaFile(&v.id), + format!("{} renamed.", media_type_str), + format!("New name: '{}'", new_name), + true + ); } Ok(()) } - -pub async fn msg_add_comment(data: &proto::client::client_to_server_cmd::AddComment, ses: &mut UserSession, server: &ServerState) -> Res<()> { - - let media_file_id = match get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { - Some(v) => { - let default_perm = true; // anyone can comment on any media file - org_authz_with_default(&ses.org_session, "comment media file", true, server, &ses.organizer, - default_perm, AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Comment)).await?; - v.id - }, - None => return Ok(()), - }; +pub async fn msg_add_comment( + data: &proto::client::client_to_server_cmd::AddComment, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { + let media_file_id = + match get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { + Some(v) => { + let default_perm = true; // anyone can comment on any media file + org_authz_with_default( + &ses.org_session, + "comment media file", + true, + server, + &ses.organizer, + default_perm, + AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Comment), + ) + .await?; + v.id + } + None => return Ok(()), + }; // Parse drawing data if present and write to file let mut drwn = data.drawing.clone(); if let Some(d) = &drwn { if d.starts_with("data:") { - // Convert data URI to bytes let img_uri = DataUrl::process(&d).map_err(|e| anyhow!("Invalid drawing data URI"))?; if img_uri.mime_type().type_ != "image" || img_uri.mime_type().subtype != "webp" { bail!("Invalid mimetype in drawing: {:?}", img_uri.mime_type()) } - let img_data = img_uri.decode_to_vec().map_err(|e| anyhow!("Failed to decode drawing data URI: {:?}", e))?; + let img_data = img_uri + .decode_to_vec() + .map_err(|e| anyhow!("Failed to decode drawing data URI: {:?}", e))?; // Make up a filename - fn sha256hex( data: &[u8] ) -> String { + fn sha256hex(data: &[u8]) -> String { let mut hasher = Sha256::new(); hasher.update(data); let result = hasher.finalize(); @@ -284,11 +421,16 @@ pub async fn msg_add_comment(data: &proto::client::client_to_server_cmd::AddComm let fname = format!("{}.webp", short_csum); // Write to file - let drawing_path = server.media_files_dir.join(&media_file_id).join("drawings").join(&fname); + let drawing_path = server + .media_files_dir + .join(&media_file_id) + .join("drawings") + .join(&fname); std::fs::create_dir_all(drawing_path.parent().unwrap()) .map_err(|e| anyhow!("Failed to create drawings dir: {:?}", e))?; - async_std::fs::write(drawing_path, img_data.0).await.map_err( - |e| anyhow!("Failed to write drawing file: {:?}", e))?; + async_std::fs::write(drawing_path, img_data.0) + .await + .map_err(|e| anyhow!("Failed to write drawing file: {:?}", e))?; // Replace data URI with filename drwn = Some(fname); @@ -304,93 +446,166 @@ pub async fn msg_add_comment(data: &proto::client::client_to_server_cmd::AddComm timecode: data.timecode.clone(), drawing: drwn.clone(), subtitle_id: optional_str_to_i32_or_tonic_error!(data.subtitle_id)?, - subtitle_filename_ifnull: None + subtitle_filename_ifnull: None, }; let c = models::Comment::insert(&mut server.db.conn()?, &c) .map_err(|e| anyhow!("Failed to add comment: {:?}", e))?; // Send to all clients watching this media file - ses.emit_new_comment(server, c, super::SendTo::MediaFileId(&media_file_id)).await?; + ses.emit_new_comment(server, c, super::SendTo::MediaFileId(&media_file_id)) + .await?; Ok(()) } - -pub async fn msg_edit_comment(data: &EditComment, ses: &mut UserSession, server: &ServerState) -> Res<()> { +pub async fn msg_edit_comment( + data: &EditComment, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { let id = i32::from_str(&data.comment_id)?; let conn = &mut server.db.conn()?; match models::Comment::get(conn, &id) { Ok(old) => { let default_perm = Some(&ses.user_id) == old.user_id.as_ref() || ses.is_admin; - org_authz_with_default(&ses.org_session, "edit comment", true, server, &ses.organizer, - default_perm, AuthzTopic::Comment(&old, authz_req::comment_op::Op::Edit)).await?; + org_authz_with_default( + &ses.org_session, + "edit comment", + true, + server, + &ses.organizer, + default_perm, + AuthzTopic::Comment(&old, authz_req::comment_op::Op::Edit), + ) + .await?; let vid = &old.media_file_id; models::Comment::edit(conn, id, &data.new_comment)?; server.emit_cmd( client_cmd!(DelComment, {comment_id: id.to_string()}), - super::SendTo::MediaFileId(&vid))?; + super::SendTo::MediaFileId(&vid), + )?; let c = models::Comment::get(conn, &id)?; - ses.emit_new_comment(server, c, super::SendTo::MediaFileId(&vid)).await?; + ses.emit_new_comment(server, c, super::SendTo::MediaFileId(&vid)) + .await?; } Err(DBError::NotFound()) => { - send_user_error!(&ses.user_id, server, Topic::None, "Failed to edit comment.", "No such comment. Cannot edit.", true); + send_user_error!( + &ses.user_id, + server, + Topic::None, + "Failed to edit comment.", + "No such comment. Cannot edit.", + true + ); + } + Err(e) => { + bail!(e); } - Err(e) => { bail!(e); } } Ok(()) } - -pub async fn msg_del_comment(data: &DelComment, ses: &mut UserSession, server: &ServerState) -> Res<()> { +pub async fn msg_del_comment( + data: &DelComment, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { let id = i32::from_str(&data.comment_id)?; let conn = &mut server.db.conn()?; match models::Comment::get(conn, &id) { Ok(cmt) => { let default_perm = Some(&ses.user_id) == cmt.user_id.as_ref() || ses.is_admin; - org_authz_with_default(&ses.org_session, "delete comment", true, server, &ses.organizer, - default_perm, AuthzTopic::Comment(&cmt, authz_req::comment_op::Op::Delete)).await?; + org_authz_with_default( + &ses.org_session, + "delete comment", + true, + server, + &ses.organizer, + default_perm, + AuthzTopic::Comment(&cmt, authz_req::comment_op::Op::Delete), + ) + .await?; let vid = cmt.media_file_id; if Some(&ses.user_id) != cmt.user_id.as_ref() && !ses.is_admin { - send_user_error!(&ses.user_id, server, Topic::MediaFile(&vid), "Failed to delete comment.", "You can only delete your own comments", true); + send_user_error!( + &ses.user_id, + server, + Topic::MediaFile(&vid), + "Failed to delete comment.", + "You can only delete your own comments", + true + ); return Ok(()); } let all_comm = models::Comment::get_by_media_file(conn, &vid, DBPaging::default())?; - if all_comm.iter().any(|c| c.parent_id.map(|i| i.to_string()) == Some(id.to_string())) { - send_user_error!(&ses.user_id, server, Topic::MediaFile(&vid), "Failed to delete comment.", "Comment has replies. Cannot delete.", true); + if all_comm + .iter() + .any(|c| c.parent_id.map(|i| i.to_string()) == Some(id.to_string())) + { + send_user_error!( + &ses.user_id, + server, + Topic::MediaFile(&vid), + "Failed to delete comment.", + "Comment has replies. Cannot delete.", + true + ); return Ok(()); } models::Comment::delete(conn, &id)?; server.emit_cmd( client_cmd!(DelComment, {comment_id: id.to_string()}), - super::SendTo::MediaFileId(&vid))?; + super::SendTo::MediaFileId(&vid), + )?; } Err(DBError::NotFound()) => { - send_user_error!(&ses.user_id, server, Topic::None, "Failed to delete comment.", "No such comment. Cannot delete.", true); + send_user_error!( + &ses.user_id, + server, + Topic::None, + "Failed to delete comment.", + "No such comment. Cannot delete.", + true + ); + } + Err(e) => { + bail!(e); } - Err(e) => { bail!(e); } } Ok(()) } - -pub async fn msg_add_subtitle(data: &AddSubtitle, ses: &mut UserSession, server: &ServerState) -> Res<()> { - let mf = match get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { - Some(v) => { - let default_perm = ses.user_id == (&v).user_id || ses.is_admin; - org_authz_with_default(&ses.org_session, "add subtitle", true, server, &ses.organizer, - default_perm, AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Edit)).await?; - v - }, - None => return Ok(()), - }; +pub async fn msg_add_subtitle( + data: &AddSubtitle, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { + let mf = + match get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { + Some(v) => { + let default_perm = ses.user_id == (&v).user_id || ses.is_admin; + org_authz_with_default( + &ses.org_session, + "add subtitle", + true, + server, + &ses.organizer, + default_perm, + AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Edit), + ) + .await?; + v + } + None => return Ok(()), + }; let language_code = { // Guess language from filename (e.g. "en" from "video.en.srt") let lang = data.file_name.split('.').rev().nth(1).unwrap_or_default(); - if (lang.len()==2 || lang.len()==3) && lang.chars().all(|c| c.is_ascii_lowercase()) { + if (lang.len() == 2 || lang.len() == 3) && lang.chars().all(|c| c.is_ascii_lowercase()) { lang.to_string() } else { "en".to_string() @@ -398,7 +613,9 @@ pub async fn msg_add_subtitle(data: &AddSubtitle, ses: &mut UserSession, server: }; let media_dir = server.media_files_dir.join(&mf.id); - if !media_dir.exists() { bail!("Media file dir not found: {:?}", media_dir); } + if !media_dir.exists() { + bail!("Media file dir not found: {:?}", media_dir); + } let subs_dir = media_dir.join("subs"); let orig_subs_dir = subs_dir.join("orig"); @@ -406,41 +623,75 @@ pub async fn msg_add_subtitle(data: &AddSubtitle, ses: &mut UserSession, server: bail!("Failed to create orig subs dir"); } - let orig_fn_clean: PathBuf = Path::new(&data.file_name).file_name().context("Bad filename")?.into(); + let orig_fn_clean: PathBuf = Path::new(&data.file_name) + .file_name() + .context("Bad filename")? + .into(); let orig_sub_file = orig_subs_dir.join(&orig_fn_clean); tracing::debug!("Writing orig subtitle file to: {:?}", orig_sub_file); if orig_sub_file.exists() { - send_user_error!(&ses.user_id, server, Topic::MediaFile(&mf.id), "Failed to add subtitle.", format!("Subtitle file already exists: '{:?}'", &orig_fn_clean), true); + send_user_error!( + &ses.user_id, + server, + Topic::MediaFile(&mf.id), + "Failed to add subtitle.", + format!("Subtitle file already exists: '{:?}'", &orig_fn_clean), + true + ); return Ok(()); } let file_contents = { use base64::{engine::general_purpose::STANDARD, Engine as _}; - STANDARD.decode(&data.contents_base64).context("Failed to base64 decode subtitle file")? + STANDARD + .decode(&data.contents_base64) + .context("Failed to base64 decode subtitle file")? }; - tokio::fs::write(&orig_sub_file, file_contents).await.context("Failed to write orig subtitle file")?; + tokio::fs::write(&orig_sub_file, file_contents) + .await + .context("Failed to write orig subtitle file")?; server.storage.upload_if_exists(&orig_sub_file); // Convert to WebVTT if needed - let playback_filename ={ + let playback_filename = { use aspasia::{AssSubtitle, SubRipSubtitle, Subtitle, TimedSubtitleFile, WebVttSubtitle}; - let vtt_path = subs_dir.join(&orig_fn_clean.with_extension("vtt").file_name().context("Bad filename")?); + let vtt_path = subs_dir.join( + &orig_fn_clean + .with_extension("vtt") + .file_name() + .context("Bad filename")?, + ); if vtt_path.exists() { - send_user_error!(&ses.user_id, server, Topic::MediaFile(&mf.id), "Failed to add subtitle.", format!("WebVTT file already exists: '{:?}'", &vtt_path.file_name().context("Bad filename")?), true); + send_user_error!( + &ses.user_id, + server, + Topic::MediaFile(&mf.id), + "Failed to add subtitle.", + format!( + "WebVTT file already exists: '{:?}'", + &vtt_path.file_name().context("Bad filename")? + ), + true + ); return Ok(()); } match TimedSubtitleFile::new(&orig_sub_file) { Ok(TimedSubtitleFile::WebVtt(sub)) => { - tracing::debug!("Subtitle file is already WebVTT, not converting: {:?}", &orig_sub_file); + tracing::debug!( + "Subtitle file is already WebVTT, not converting: {:?}", + &orig_sub_file + ); None - }, + } Ok(sub) => { tracing::debug!("Converting subtitle file to WebVTT: {:?}", &orig_sub_file); - WebVttSubtitle::from(sub).export(&vtt_path).context("Failed to convert to WebVTT")?; + WebVttSubtitle::from(sub) + .export(&vtt_path) + .context("Failed to convert to WebVTT")?; // Workaround for: https://github.com/ylysyym/aspasia/issues/1 fn temp_workaround_aspasia_webvtt_bug(vtt_file: &Path) -> std::io::Result<()> { @@ -451,7 +702,9 @@ pub async fn msg_add_subtitle(data: &AddSubtitle, ses: &mut UserSession, server: let mut lines: Vec = Vec::new(); for line in reader.lines() { let mut line = line?; - if line.contains("-->") { line = line.replace(",", "."); } + if line.contains("-->") { + line = line.replace(",", "."); + } lines.push(line); } fs::write(vtt_file, lines.join("\n")) @@ -459,21 +712,32 @@ pub async fn msg_add_subtitle(data: &AddSubtitle, ses: &mut UserSession, server: temp_workaround_aspasia_webvtt_bug(&vtt_path)?; server.storage.upload_if_exists(&vtt_path); - Some(vtt_path.file_name().context("Bad filename")?.to_str().context("Bad filename")?.to_string()) - }, + Some( + vtt_path + .file_name() + .context("Bad filename")? + .to_str() + .context("Bad filename")? + .to_string(), + ) + } Err(e) => return Err(anyhow!("Failed to parse subtitle file: {:?}", e)), } }; let conn = &mut server.db.conn()?; - let new_sub = models::Subtitle::insert(conn, &models::SubtitleInsert { - media_file_id: mf.id.clone(), - orig_filename: orig_fn_clean.to_string_lossy().into(), - title: orig_fn_clean.to_string_lossy().into(), - language_code, - filename: playback_filename, - time_offset: 0.0, - }) .map_err(|e| anyhow!("Failed to add subtitle: {:?}", e))?; + let new_sub = models::Subtitle::insert( + conn, + &models::SubtitleInsert { + media_file_id: mf.id.clone(), + orig_filename: orig_fn_clean.to_string_lossy().into(), + title: orig_fn_clean.to_string_lossy().into(), + language_code, + filename: playback_filename, + time_offset: 0.0, + }, + ) + .map_err(|e| anyhow!("Failed to add subtitle: {:?}", e))?; let all_subs = models::Subtitle::get_by_media_file(conn, &mf.id, DBPaging::default())?; if all_subs.len() == 1 { @@ -485,28 +749,46 @@ pub async fn msg_add_subtitle(data: &AddSubtitle, ses: &mut UserSession, server: Ok(()) } - -pub async fn msg_edit_subtitle_info(data: &EditSubtitleInfo, ses: &mut UserSession, server: &ServerState) -> Res<()> { +pub async fn msg_edit_subtitle_info( + data: &EditSubtitleInfo, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { let id = str_to_i32_or_tonic_error!(data.id)?; let conn = &mut server.db.conn()?; - let mut sub = models::Subtitle::get(conn, &id).map_err(|e| anyhow!("Failed to get subtitle: {:?}", e))?; - let mf = models::MediaFile::get(conn, &sub.media_file_id).map_err(|e| anyhow!("Failed to get media file: {:?}", e))?; + let mut sub = + models::Subtitle::get(conn, &id).map_err(|e| anyhow!("Failed to get subtitle: {:?}", e))?; + let mf = models::MediaFile::get(conn, &sub.media_file_id) + .map_err(|e| anyhow!("Failed to get media file: {:?}", e))?; let default_perm = ses.user_id == mf.user_id || ses.is_admin; - org_authz_with_default(&ses.org_session, "edit subtitle", true, server, &ses.organizer, - default_perm, AuthzTopic::MediaFile(&mf, authz_req::media_file_op::Op::Edit)).await?; + org_authz_with_default( + &ses.org_session, + "edit subtitle", + true, + server, + &ses.organizer, + default_perm, + AuthzTopic::MediaFile(&mf, authz_req::media_file_op::Op::Edit), + ) + .await?; // Update subtitle in DB sub.title = data.title.clone().unwrap_or(sub.title.clone()); - sub.language_code = data.language_code.clone().unwrap_or(sub.language_code.clone()); + sub.language_code = data + .language_code + .clone() + .unwrap_or(sub.language_code.clone()); sub.time_offset = data.time_offset.clone().unwrap_or(sub.time_offset); - models::Subtitle::update_many(conn, &[sub]) .map_err(|e| anyhow!("Failed to update subtitle: {:?}", e))?; + models::Subtitle::update_many(conn, &[sub]) + .map_err(|e| anyhow!("Failed to update subtitle: {:?}", e))?; // Set/unset default subtitle for media file if requested if let Some(is_default) = data.is_default { let new_val = if is_default { Some(id) } else { None }; - if is_default || mf.default_subtitle_id == Some(id) { // only set null if this subtitle was previously the default + if is_default || mf.default_subtitle_id == Some(id) { + // only set null if this subtitle was previously the default models::MediaFile::set_default_subtitle(conn, &mf.id, new_val) .map_err(|e| anyhow!("Failed to set default subtitle: {:?}", e))?; } @@ -516,60 +798,105 @@ pub async fn msg_edit_subtitle_info(data: &EditSubtitleInfo, ses: &mut UserSessi Ok(()) } -pub async fn msg_del_subtitle(data: &DelSubtitle, ses: &mut UserSession, server: &ServerState) -> Res<()> { +pub async fn msg_del_subtitle( + data: &DelSubtitle, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { let id = str_to_i32_or_tonic_error!(data.id)?; let conn = &mut server.db.conn()?; - let sub = models::Subtitle::get(conn, &id).map_err(|e| anyhow!("Failed to get subtitle: {:?}", e))?; - let mf = models::MediaFile::get(conn, &sub.media_file_id).map_err(|e| anyhow!("Failed to get media file: {:?}", e))?; + let sub = + models::Subtitle::get(conn, &id).map_err(|e| anyhow!("Failed to get subtitle: {:?}", e))?; + let mf = models::MediaFile::get(conn, &sub.media_file_id) + .map_err(|e| anyhow!("Failed to get media file: {:?}", e))?; let default_perm = ses.user_id == mf.user_id || ses.is_admin; - org_authz_with_default(&ses.org_session, "delete subtitle", true, server, &ses.organizer, - default_perm, AuthzTopic::MediaFile(&mf, authz_req::media_file_op::Op::Edit)).await?; + org_authz_with_default( + &ses.org_session, + "delete subtitle", + true, + server, + &ses.organizer, + default_perm, + AuthzTopic::MediaFile(&mf, authz_req::media_file_op::Op::Edit), + ) + .await?; let subs_dir = server.media_files_dir.join(&mf.id).join("subs"); tracing::debug!(orig_file=?sub.orig_filename, vtt_file=?sub.filename, "Deleting subtitle files"); let orig_path = subs_dir.join("orig").join(&sub.orig_filename); - if orig_path.exists() { std::fs::remove_file(&orig_path).context("Failed to delete orig subtitle file")?; } + if orig_path.exists() { + std::fs::remove_file(&orig_path).context("Failed to delete orig subtitle file")?; + } if let Some(vtt) = sub.filename { let vtt_path = subs_dir.join(&vtt); - if vtt_path.exists() { std::fs::remove_file(&vtt_path).context("Failed to delete vtt subtitle file")?; } + if vtt_path.exists() { + std::fs::remove_file(&vtt_path).context("Failed to delete vtt subtitle file")?; + } } - models::Subtitle::delete(conn, &id).map_err(|e| anyhow!("Failed to delete subtitle: {:?}", e))?; + models::Subtitle::delete(conn, &id) + .map_err(|e| anyhow!("Failed to delete subtitle: {:?}", e))?; send_open_media_file_cmd(server, &ses.sid, &mf.id).await?; Ok(()) } -pub async fn msg_list_my_messages(data: &proto::client::client_to_server_cmd::ListMyMessages, ses: &mut UserSession, server: &ServerState) -> Res<()> { +pub async fn msg_list_my_messages( + data: &proto::client::client_to_server_cmd::ListMyMessages, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { let conn = &mut server.db.conn()?; let msgs = models::Message::get_by_user(conn, &ses.user_id, DBPaging::default())?; server.emit_cmd( client_cmd!(ShowMessages, { msgs: (&msgs).into_iter().map(|m| m.to_proto3()).collect() }), - super::SendTo::UserSession(&ses.sid) + super::SendTo::UserSession(&ses.sid), )?; for m in msgs { - if !m.seen { models::Message::set_seen(conn, m.id, true)?; } + if !m.seen { + models::Message::set_seen(conn, m.id, true)?; + } } Ok(()) } - -pub async fn msg_join_collab(data: &JoinCollab, ses: &mut UserSession, server: &ServerState) -> Res<()> { +pub async fn msg_join_collab( + data: &JoinCollab, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { if let Some(collab_id) = ses.cur_collab_id.clone() { if server.sender_is_collab_participant(collab_id.as_str(), &ses.sender) { - tracing::debug!("{} is already in collab {}. Ignoring double join.", ses.user_name, collab_id); + tracing::debug!( + "{} is already in collab {}. Ignoring double join.", + ses.user_name, + collab_id + ); return Ok(()); } } ses.collab_session_guard = None; ses.cur_collab_id = None; - if let Some(v) = get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { - org_authz_with_default(&ses.org_session, "join collab", true, server, &ses.organizer, - true, AuthzTopic::Other(Some(&data.collab_id), authz_req::other_op::Op::JoinCollabSession)).await?; + if let Some(v) = + get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? + { + org_authz_with_default( + &ses.org_session, + "join collab", + true, + server, + &ses.organizer, + true, + AuthzTopic::Other( + Some(&data.collab_id), + authz_req::other_op::Op::JoinCollabSession, + ), + ) + .await?; match server.link_session_to_collab(&data.collab_id, &v.id, ses.sender.clone()) { Ok(csg) => { @@ -583,19 +910,27 @@ pub async fn msg_join_collab(data: &JoinCollab, ses: &mut UserSession, server: & ..Default::default() }] }), - super::SendTo::Collab(&data.collab_id) + super::SendTo::Collab(&data.collab_id), )?; } Err(e) => { - send_user_error!(&ses.user_id, server, Topic::MediaFile(&v.id), format!("Failed to join collab session: {}", e)); + send_user_error!( + &ses.user_id, + server, + Topic::MediaFile(&v.id), + format!("Failed to join collab session: {}", e) + ); } } } Ok(()) } - -pub async fn msg_leave_collab(data: &LeaveCollab, ses: &mut UserSession, server: &ServerState) -> Res<()> { +pub async fn msg_leave_collab( + data: &LeaveCollab, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { if let Some(collab_id) = &ses.cur_collab_id { server.emit_cmd( client_cmd!(ShowMessages, { msgs: vec![ @@ -605,7 +940,7 @@ pub async fn msg_leave_collab(data: &LeaveCollab, ses: &mut UserSession, server: ..Default::default() }] }), - super::SendTo::Collab(&collab_id) + super::SendTo::Collab(&collab_id), )?; ses.collab_session_guard = None; ses.cur_collab_id = None; @@ -613,8 +948,11 @@ pub async fn msg_leave_collab(data: &LeaveCollab, ses: &mut UserSession, server: Ok(()) } - -pub async fn msg_collab_report(data: &CollabReport, ses: &mut UserSession, server: &ServerState) -> Res<()> { +pub async fn msg_collab_report( + data: &CollabReport, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { if let Some(collab_id) = &ses.cur_collab_id { let ce = client_cmd!(CollabEvent, { paused: data.paused, @@ -624,15 +962,25 @@ pub async fn msg_collab_report(data: &CollabReport, ses: &mut UserSession, serve drawing: data.drawing.clone(), subtitle_id: data.subtitle_id.clone(), }); - server.emit_cmd(ce, super::SendTo::Collab(collab_id)).map(|_| ()) + server + .emit_cmd(ce, super::SendTo::Collab(collab_id)) + .map(|_| ()) } else { - send_user_error!(&ses.user_id, server, Topic::None, "Report rejected: no active collab session."); + send_user_error!( + &ses.user_id, + server, + Topic::None, + "Report rejected: no active collab session." + ); return Ok(()); } } - -pub async fn msg_move_to_folder(data: &proto::client::client_to_server_cmd::MoveToFolder, ses: &mut UserSession, server: &ServerState) -> Res<()> { +pub async fn msg_move_to_folder( + data: &proto::client::client_to_server_cmd::MoveToFolder, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { if let Some(org) = &ses.organizer { let req = proto::org::MoveToFolderRequest { ses: Some(ses.org_session.clone()), @@ -644,17 +992,25 @@ pub async fn msg_move_to_folder(data: &proto::client::client_to_server_cmd::Move if e.code() == tonic::Code::Unimplemented { tracing::debug!("Organizer doesn't implement move_to_folder(). Ignoring."); } else if e.code() == tonic::Code::Aborted { - tracing::debug!("Ignoring org.move_to_folder() result because it GrpcStatus.ABORTED."); + tracing::debug!( + "Ignoring org.move_to_folder() result because it GrpcStatus.ABORTED." + ); } else { tracing::error!(err=?e, "Error in organizer move_to_folder() call"); anyhow::bail!("Organizer error: {:?}", e); } } - } else { send_user_error!(&ses.user_id, server, Topic::None, "No organizer session."); } + } else { + send_user_error!(&ses.user_id, server, Topic::None, "No organizer session."); + } Ok(()) } -pub async fn msg_reorder_items(data: &ReorderItems, ses: &mut UserSession, server: &ServerState) -> Res<()> { +pub async fn msg_reorder_items( + data: &ReorderItems, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { if let Some(org) = &ses.organizer { let req = proto::org::ReorderItemsRequest { ses: Some(ses.org_session.clone()), @@ -665,41 +1021,50 @@ pub async fn msg_reorder_items(data: &ReorderItems, ses: &mut UserSession, serve if e.code() == tonic::Code::Unimplemented { tracing::debug!("Organizer doesn't implement reorder_items(). Ignoring."); } else if e.code() == tonic::Code::Aborted { - tracing::debug!("Ignoring org.reorder_items() result because it GrpcStatus.ABORTED."); + tracing::debug!( + "Ignoring org.reorder_items() result because it GrpcStatus.ABORTED." + ); } else { tracing::error!(err=?e, "Error in organizer reorder_items() call"); anyhow::bail!("Organizer error: {:?}", e); } } - } else { send_user_error!(&ses.user_id, server, Topic::None, "No organizer session."); } + } else { + send_user_error!(&ses.user_id, server, Topic::None, "No organizer session."); + } Ok(()) } - -pub async fn msg_organizer_cmd(data: &proto::client::client_to_server_cmd::OrganizerCmd, ses: &mut UserSession, server: &ServerState) -> Res<()> { +pub async fn msg_organizer_cmd( + data: &proto::client::client_to_server_cmd::OrganizerCmd, + ses: &mut UserSession, + server: &ServerState, +) -> Res<()> { if let Some(org) = &ses.organizer { let req = proto::org::CmdFromClientRequest { ses: Some(ses.org_session.clone()), cmd: data.cmd.clone(), - args: data.args.clone() + args: data.args.clone(), }; match org.lock().await.cmd_from_client(req).await { Err(e) => { if e.code() == tonic::Code::Aborted { - tracing::debug!("Ignoring org.cmd_from_client() result because it GrpcStatus.ABORTED."); + tracing::debug!( + "Ignoring org.cmd_from_client() result because it GrpcStatus.ABORTED." + ); } else { tracing::error!(err=?e, "Error in organizer cmd_from_client() call"); anyhow::bail!("Organizer error: {:?}", e); } - }, - Ok(res) => { return Ok(()); } + } + Ok(res) => { + return Ok(()); + } } } Ok(()) } - - #[derive(thiserror::Error, Debug)] pub enum SessionClose { #[error("User logout")] @@ -708,11 +1073,20 @@ pub enum SessionClose { /// Dispatch a message from client to appropriate handler. /// Return true if the session should be kept open, or false if it should be closed. -pub async fn msg_dispatch(req: &ClientToServerCmd, ses: &mut UserSession, server: &ServerState) -> Res { +pub async fn msg_dispatch( + req: &ClientToServerCmd, + ses: &mut UserSession, + server: &ServerState, +) -> Res { use proto::client::client_to_server_cmd::Cmd; let res = match req.cmd.as_ref() { None => { - send_user_error!(&ses.user_id, server, Topic::None, format!("Missing command from client: {:?}", req)); + send_user_error!( + &ses.user_id, + server, + Topic::None, + format!("Missing command from client: {:?}", req) + ); Ok(()) } Some(cmd) => match cmd { @@ -736,17 +1110,30 @@ pub async fn msg_dispatch(req: &ClientToServerCmd, ses: &mut UserSession, server Cmd::Logout(_) => { tracing::info!("logout from client: user={}", ses.user_id); return Err(SessionClose::Logout.into()); - }, + } }, }; if let Err(e) = res { // Ignore authz errors, they are already logged if let None = e.downcast_ref::() { - let cmd_str = req.cmd.as_ref().map(|c| format!("{:?}", c)).unwrap_or_default(); + let cmd_str = req + .cmd + .as_ref() + .map(|c| format!("{:?}", c)) + .unwrap_or_default(); tracing::warn!("[{}] '{cmd_str}' failed: {}", ses.sid, e); // Assume name is regex '^[a-zA-Z0-9_]+' of cmd_str - let cmd_name = regex::Regex::new(r"^[a-zA-Z0-9_]+").unwrap().find(&cmd_str).map(|m| m.as_str()).unwrap_or(cmd_str.as_str()); - send_user_error!(&ses.user_id, server, Topic::None, format!("Cmd '{cmd_name}' failed: {e}")); + let cmd_name = regex::Regex::new(r"^[a-zA-Z0-9_]+") + .unwrap() + .find(&cmd_str) + .map(|m| m.as_str()) + .unwrap_or(cmd_str.as_str()); + send_user_error!( + &ses.user_id, + server, + Topic::None, + format!("Cmd '{cmd_name}' failed: {e}") + ); } } Ok(true) diff --git a/server/src/database/basic_query.rs b/server/src/database/basic_query.rs index c9b4f039..8f23875a 100644 --- a/server/src/database/basic_query.rs +++ b/server/src/database/basic_query.rs @@ -1,32 +1,37 @@ #[macro_export] macro_rules! implement_basic_query_traits { ($model:ty, $insert_model:ty, $table:ident, $pk_type:ty, $order_by:expr) => { - impl DbBasicQuery<$pk_type, $insert_model> for $model { - /// Insert a new object into the database. fn insert(conn: &mut PooledConnection, item: &$insert_model) -> DBResult { use schema::$table::dsl::*; - to_db_res(retry_if_db_locked!(diesel::insert_into($table).values(item).get_result(conn))) + to_db_res(retry_if_db_locked!(diesel::insert_into($table) + .values(item) + .get_result(conn))) } /// Insert multiple objects into the database. - fn insert_many(conn: &mut PooledConnection, items: &[$insert_model]) -> DBResult> { + fn insert_many( + conn: &mut PooledConnection, + items: &[$insert_model], + ) -> DBResult> { items.iter().map(|i| Self::insert(conn, i)).collect() } /// Get a single object by its primary key. - fn get(conn: &mut PooledConnection, pk: &$pk_type) -> DBResult - { + fn get(conn: &mut PooledConnection, pk: &$pk_type) -> DBResult { use schema::$table::dsl::*; - to_db_res(retry_if_db_locked!({ $table.filter(id.eq(pk)).first::<$model>(conn) })) + to_db_res(retry_if_db_locked!({ + $table.filter(id.eq(pk)).first::<$model>(conn) + })) } /// Get multiple objects by their primary keys. - fn get_many(conn: &mut PooledConnection, ids: &[$pk_type]) -> DBResult> - { + fn get_many(conn: &mut PooledConnection, ids: &[$pk_type]) -> DBResult> { use schema::$table::dsl::*; - to_db_res(retry_if_db_locked!({ $table.filter(id.eq_any(ids)).load::<$model>(conn) })) + to_db_res(retry_if_db_locked!({ + $table.filter(id.eq_any(ids)).load::<$model>(conn) + })) } /// Get all nodes of type Self, with no filtering, paginated. @@ -43,52 +48,57 @@ macro_rules! implement_basic_query_traits { } /// Delete a single object from the database. - fn delete(conn: &mut PooledConnection, pk: &$pk_type) -> DBResult - { + fn delete(conn: &mut PooledConnection, pk: &$pk_type) -> DBResult { use schema::$table::dsl::*; to_db_res(retry_if_db_locked!({ - diesel::delete($table.filter(id.eq(pk))).execute(conn).map(|n_rows| n_rows>0) + diesel::delete($table.filter(id.eq(pk))) + .execute(conn) + .map(|n_rows| n_rows > 0) })) } /// Delete multiple objects from the database. /// Returns the number of objects deleted. - fn delete_many(conn: &mut PooledConnection, ids: &[$pk_type]) -> DBResult - { + fn delete_many(conn: &mut PooledConnection, ids: &[$pk_type]) -> DBResult { use schema::$table::dsl::*; to_db_res(retry_if_db_locked!({ diesel::delete($table.filter(id.eq_any(ids))).execute(conn) })) } } - } + }; } #[macro_export] macro_rules! implement_update_traits { ($model:ty, $table:ident, $pk_type:ty) => { - impl DbUpdate<$pk_type> for $model { /// Update objects, replaces the entire object except for the primary key. fn update_many(conn: &mut PooledConnection, items: &[Self]) -> DBResult> { use schema::$table::dsl::*; let mut res: Vec = Vec::with_capacity(items.len()); for it in items { - res.push(retry_if_db_locked!(diesel::update($table.filter(id.eq(&it.id))).set(it).get_result(conn))?); + res.push(retry_if_db_locked!(diesel::update( + $table.filter(id.eq(&it.id)) + ) + .set(it) + .get_result(conn))?); } Ok(res) } } - } + }; } #[macro_export] macro_rules! implement_query_by_user_traits { ($model:ty, $table:ident, $user_field:ident, $order_by:expr) => { - impl DbQueryByUser for $model { - - fn get_by_user(conn: &mut PooledConnection, uid: &str, pg: DBPaging) -> DBResult> { + fn get_by_user( + conn: &mut PooledConnection, + uid: &str, + pg: DBPaging, + ) -> DBResult> { use schema::$table::dsl::*; to_db_res(retry_if_db_locked!({ $table @@ -101,16 +111,18 @@ macro_rules! implement_query_by_user_traits { })) } } - } + }; } #[macro_export] macro_rules! implement_query_by_media_file_traits { ($model:ty, $table:ident, $media_col:ident, $order_by:expr) => { - impl DbQueryByMediaFile for $model { - - fn get_by_media_file(conn: &mut PooledConnection, vid: &str, pg: DBPaging) -> DBResult> { + fn get_by_media_file( + conn: &mut PooledConnection, + vid: &str, + pg: DBPaging, + ) -> DBResult> { use schema::$table::dsl::*; to_db_res(retry_if_db_locked!({ $table @@ -123,5 +135,5 @@ macro_rules! implement_query_by_media_file_traits { })) } } - } + }; } diff --git a/server/src/database/custom_ops.rs b/server/src/database/custom_ops.rs index 25147173..3a4fc230 100644 --- a/server/src/database/custom_ops.rs +++ b/server/src/database/custom_ops.rs @@ -1,15 +1,17 @@ +use crate::{ + database::{models, schema, to_db_res, DBResult, EmptyDBResult}, + retry_if_db_locked, +}; use anyhow::Context; -use diesel::prelude::*; use chrono::offset::Local; -use crate::{database::{models, schema, to_db_res, DBResult, EmptyDBResult}, retry_if_db_locked}; +use diesel::prelude::*; use super::{error::DBError, DbBasicQuery, PooledConnection}; // ------------------- Model-specific custom operations ------------------- impl models::User { - pub fn set_name(conn: &mut PooledConnection, uid: &str, new_name: &str) -> EmptyDBResult - { + pub fn set_name(conn: &mut PooledConnection, uid: &str, new_name: &str) -> EmptyDBResult { use schema::users::dsl::*; retry_if_db_locked!({ diesel::update(users.filter(id.eq(uid))) @@ -25,16 +27,20 @@ impl models::User { /// * `conn` - Database connection /// * `user_id` - ID of the user /// * `username` - Name of the user, if you want to update it. If None, and user is being created, the name will be set to the user_id. - pub fn get_or_create(conn: &mut PooledConnection, user_id: &str, username: Option<&str>) -> DBResult - { + pub fn get_or_create( + conn: &mut PooledConnection, + user_id: &str, + username: Option<&str>, + ) -> DBResult { match models::User::get(conn, &user_id.to_string()) { Ok(u) => { // Update name if needed if let Some(username) = username { - models::User::set_name(conn, &u.id, &username).context("Failed to update user name")?; + models::User::set_name(conn, &u.id, &username) + .context("Failed to update user name")?; } models::User::get(conn, &u.id) - }, + } Err(DBError::NotFound()) => { // User not found, create a new user let new_user = models::UserInsert { @@ -42,22 +48,19 @@ impl models::User { name: username.unwrap_or(user_id).to_string(), }; models::User::insert(conn, &new_user) - }, - Err(e) => { Err(e) } + } + Err(e) => Err(e), } } } - impl models::MediaFile { - /// Set the recompressed flag for a media file. /// /// # Arguments /// * `db` - Database /// * `vid` - Id of the media file - pub fn set_recompressed(conn: &mut PooledConnection, vid: &str) -> EmptyDBResult - { + pub fn set_recompressed(conn: &mut PooledConnection, vid: &str) -> EmptyDBResult { use schema::media_files::dsl::*; retry_if_db_locked!({ diesel::update(media_files.filter(id.eq(vid))) @@ -73,8 +76,11 @@ impl models::MediaFile { /// * `db` - Database /// * `vid` - Id of the media file /// * `sid` - Id of the subtitle - pub fn set_default_subtitle(conn: &mut PooledConnection, vid: &str, sid: Option) -> EmptyDBResult - { + pub fn set_default_subtitle( + conn: &mut PooledConnection, + vid: &str, + sid: Option, + ) -> EmptyDBResult { use schema::media_files::dsl::*; retry_if_db_locked!({ diesel::update(media_files.filter(id.eq(vid))) @@ -91,12 +97,19 @@ impl models::MediaFile { /// * `vid` - Id of the media file /// * `cols` - Width of the thumbnail sheet /// * `rows` - Height of the thumbnail sheet - pub fn set_thumb_sheet_dimensions(conn: &mut PooledConnection, vid: &str, cols: u32, rows: u32) -> EmptyDBResult - { + pub fn set_thumb_sheet_dimensions( + conn: &mut PooledConnection, + vid: &str, + cols: u32, + rows: u32, + ) -> EmptyDBResult { use schema::media_files::dsl::*; retry_if_db_locked!({ diesel::update(media_files.filter(id.eq(vid))) - .set((thumb_sheet_cols.eq(cols as i32), thumb_sheet_rows.eq(rows as i32))) + .set(( + thumb_sheet_cols.eq(cols as i32), + thumb_sheet_rows.eq(rows as i32), + )) .execute(conn) })?; Ok(()) @@ -108,8 +121,7 @@ impl models::MediaFile { /// * `db` - Database /// * `vid` - Id of the media file /// * `new_value` - New value of the flag - pub fn set_has_thumb(conn: &mut PooledConnection, vid: &str, new_value: bool) -> EmptyDBResult - { + pub fn set_has_thumb(conn: &mut PooledConnection, vid: &str, new_value: bool) -> EmptyDBResult { use schema::media_files::dsl::*; retry_if_db_locked!({ diesel::update(media_files.filter(id.eq(vid))) @@ -125,8 +137,7 @@ impl models::MediaFile { /// # Arguments /// * `db` - Database /// * `vid` - Id of the media file - pub fn set_thumbs_done(conn: &mut PooledConnection, vid: &str) -> EmptyDBResult - { + pub fn set_thumbs_done(conn: &mut PooledConnection, vid: &str) -> EmptyDBResult { use schema::media_files::dsl::*; retry_if_db_locked!({ diesel::update(media_files.filter(id.eq(vid))) @@ -136,7 +147,6 @@ impl models::MediaFile { Ok(()) } - /// Rename a media file (title). /// /// # Arguments @@ -148,8 +158,7 @@ impl models::MediaFile { /// * `EmptyResult` /// * `Err(NotFound)` - MediaFile not found /// * `Err(Other)` - Other error - pub fn rename(conn: &mut PooledConnection, vid: &str, new_name: &str) -> EmptyDBResult - { + pub fn rename(conn: &mut PooledConnection, vid: &str, new_name: &str) -> EmptyDBResult { use schema::media_files::dsl::*; retry_if_db_locked!({ diesel::update(media_files.filter(id.eq(vid))) @@ -163,17 +172,21 @@ impl models::MediaFile { /// /// # Returns /// * `Vec` - List of MediaFile objects - pub fn get_all_with_missing_thumbnails(conn: &mut PooledConnection) -> DBResult> - { + pub fn get_all_with_missing_thumbnails( + conn: &mut PooledConnection, + ) -> DBResult> { use models::*; use schema::media_files::dsl::*; - to_db_res(retry_if_db_locked!({ media_files.filter(thumbs_done.is_null()).order_by(added_time.desc()).load::(conn) })) + to_db_res(retry_if_db_locked!({ + media_files + .filter(thumbs_done.is_null()) + .order_by(added_time.desc()) + .load::(conn) + })) } } - impl models::Comment { - /// Edit a comment (change text). /// /// # Arguments @@ -182,19 +195,18 @@ impl models::Comment { /// /// # Returns /// * `Res` - True if comment was edited, false if it was not found - pub fn edit(conn: &mut PooledConnection, comment_id: i32, new_comment: &str) -> DBResult - { + pub fn edit(conn: &mut PooledConnection, comment_id: i32, new_comment: &str) -> DBResult { use schema::comments::dsl::*; to_db_res(retry_if_db_locked!({ diesel::update(comments.filter(id.eq(comment_id))) - .set((comment.eq(new_comment), edited.eq(diesel::dsl::now))).execute(conn).map(|x| x > 0) + .set((comment.eq(new_comment), edited.eq(diesel::dsl::now))) + .execute(conn) + .map(|x| x > 0) })) } } - impl models::Message { - /// Set the seen status of a message. /// /// # Arguments @@ -204,12 +216,13 @@ impl models::Message { /// /// # Returns /// * `Res` - True if message was found and updated, false if it was not found - pub fn set_seen(conn: &mut PooledConnection, msg_id: i32, new_status: bool) -> DBResult - { + pub fn set_seen(conn: &mut PooledConnection, msg_id: i32, new_status: bool) -> DBResult { use schema::messages::dsl::*; to_db_res(retry_if_db_locked!({ diesel::update(messages.filter(id.eq(msg_id))) - .set(seen.eq(new_status)).execute(conn).map(|x| x > 0) + .set(seen.eq(new_status)) + .execute(conn) + .map(|x| x > 0) })) } @@ -221,11 +234,13 @@ impl models::Message { /// /// # Returns /// * `Res>` - List of messages - pub fn get_by_comment(conn: &mut PooledConnection, cid: i32) -> DBResult> - { + pub fn get_by_comment(conn: &mut PooledConnection, cid: i32) -> DBResult> { use schema::messages::dsl::*; to_db_res(retry_if_db_locked!({ - messages.filter(comment_id.eq(cid)).order(created.desc()).load::(conn) + messages + .filter(comment_id.eq(cid)) + .order(created.desc()) + .load::(conn) })) } } diff --git a/server/src/database/db_backup.rs b/server/src/database/db_backup.rs index 0c326578..d237f09f 100644 --- a/server/src/database/db_backup.rs +++ b/server/src/database/db_backup.rs @@ -1,17 +1,17 @@ use std::{fs::File, path::PathBuf}; +use anyhow::bail; use anyhow::Context; use flate2::{write::GzEncoder, Compression}; -use anyhow::bail; - /// Backup the SQLite database to a tar.gz file. /// This is done before migrations. -pub fn backup_sqlite_database( db_file: std::path::PathBuf ) -> anyhow::Result> { +pub fn backup_sqlite_database(db_file: std::path::PathBuf) -> anyhow::Result> { if db_file.exists() { // Make a tar.gz backup let now = chrono::Local::now(); - let backup_path = db_file.with_extension(format!("backup-{}.tar.gz", now.format("%Y-%m-%dT%H_%M_%S"))); + let backup_path = + db_file.with_extension(format!("backup-{}.tar.gz", now.format("%Y-%m-%dT%H_%M_%S"))); tracing::info!(file=%db_file.display(), backup=%backup_path.display(), "Backing up database before migration."); let backup_file = File::create(&backup_path).context("Error creating DB backup file")?; @@ -21,25 +21,38 @@ pub fn backup_sqlite_database( db_file: std::path::PathBuf ) -> anyhow::Result anyhow::Result<()> { +pub fn restore_sqlite_database( + db_file: std::path::PathBuf, + backup_path: std::path::PathBuf, +) -> anyhow::Result<()> { if db_file.exists() { let _span = tracing::info_span!("restore_sqlite_database").entered(); tracing::info!(file=%db_file.display(), backup=%backup_path.display(), "Restoring."); @@ -48,18 +61,30 @@ pub fn restore_sqlite_database( db_file: std::path::PathBuf, backup_path: std::p let gzip_reader = flate2::read::GzDecoder::new(backup_file); let mut tar = tar::Archive::new(gzip_reader); - let db_file_prefix = db_file.file_name().context("DB file has no filename")?.to_string_lossy(); + let db_file_prefix = db_file + .file_name() + .context("DB file has no filename")? + .to_string_lossy(); let suffices = ["", "-wal", "-shm"]; //tar.unpack(db_file.parent().unwrap()).context("Error unpacking DB backup")?; for entry in tar.entries().context("Error reading tar archive")? { let mut entry = entry.context("Error reading tar entry")?; - let path = entry.path().context("Error getting tar entry path")?.to_path_buf(); + let path = entry + .path() + .context("Error getting tar entry path")? + .to_path_buf(); let path_str = path.to_string_lossy(); - let acceptable_names: Vec = suffices.iter().map(|suffix| format!("{}{}", db_file_prefix, suffix)).collect(); + let acceptable_names: Vec = suffices + .iter() + .map(|suffix| format!("{}{}", db_file_prefix, suffix)) + .collect(); if acceptable_names.iter().any(|p| path_str.eq(p)) { - let dst_file = db_file.parent().expect("DB file had no parent").join(path.file_name().expect("Tar entry has no filename")); + let dst_file = db_file + .parent() + .expect("DB file had no parent") + .join(path.file_name().expect("Tar entry has no filename")); tracing::debug!(file=?path_str, "Unpacking file from tar."); entry.unpack(dst_file).context("Error unpacking file")?; } else { diff --git a/server/src/database/error.rs b/server/src/database/error.rs index 0454cdad..00d22656 100644 --- a/server/src/database/error.rs +++ b/server/src/database/error.rs @@ -1,5 +1,5 @@ -use thiserror; use anyhow; +use thiserror; #[derive(thiserror::Error, Debug)] pub enum DBError { diff --git a/server/src/database/migration_solver.rs b/server/src/database/migration_solver.rs index 708603a9..e88fd869 100644 --- a/server/src/database/migration_solver.rs +++ b/server/src/database/migration_solver.rs @@ -1,20 +1,22 @@ -use std::collections::{HashMap, HashSet}; use lib_clapshot_grpc::proto::org::Migration; +use std::collections::{HashMap, HashSet}; pub struct MigrationGraphModule { - pub name: String, // Unique name (id) of this module - pub cur_version: Option, // Current migration version this modules is at - pub migrations: Vec, // Available (alternative) migrations for this module + pub name: String, // Unique name (id) of this module + pub cur_version: Option, // Current migration version this modules is at + pub migrations: Vec, // Available (alternative) migrations for this module } /// For a given set of modules and their migrations, find a valid path of migrations that /// upgrades all modules to their latest version. /// /// Returns `None` if no solution was found. -pub fn solve_migration_graph(modules: Vec<&MigrationGraphModule>) -> anyhow::Result>> { +pub fn solve_migration_graph( + modules: Vec<&MigrationGraphModule>, +) -> anyhow::Result>> { assert!(!modules.is_empty()); - let mut cur_module_versions = HashMap::new(); // name -> version + let mut cur_module_versions = HashMap::new(); // name -> version let mut target_module_versions = HashMap::new(); // Initialize the current and max versions for each module @@ -22,17 +24,30 @@ pub fn solve_migration_graph(modules: Vec<&MigrationGraphModule>) -> anyhow::Res if let Some(cur_version) = &module.cur_version { cur_module_versions.insert(module.name.as_str(), cur_version.as_str()); } - if let Some(max_version) = module.migrations.iter().max_by_key(|m| &m.version).map(|m| m.version.as_str()) { + if let Some(max_version) = module + .migrations + .iter() + .max_by_key(|m| &m.version) + .map(|m| m.version.as_str()) + { target_module_versions.insert(module.name.as_str(), max_version); } } // List all migrations that advance the current version of some module as a tuple (module_name, migration) - let mut all_migrations: Vec<(&str, &Migration)> = modules.iter() - .flat_map(|module| module.migrations.iter() - .filter(|mig| module.cur_version.is_none() || mig.version.as_str() > module.cur_version.as_ref().unwrap().as_str()) - .map(|mig| (module.name.as_str(), mig)) - ).collect(); + let mut all_migrations: Vec<(&str, &Migration)> = modules + .iter() + .flat_map(|module| { + module + .migrations + .iter() + .filter(|mig| { + module.cur_version.is_none() + || mig.version.as_str() > module.cur_version.as_ref().unwrap().as_str() + }) + .map(|mig| (module.name.as_str(), mig)) + }) + .collect(); // Check that uuids are unique let mut uuids = HashSet::new(); @@ -42,29 +57,37 @@ pub fn solve_migration_graph(modules: Vec<&MigrationGraphModule>) -> anyhow::Res } } - all_migrations.sort_by(|a, b| a.1.version.cmp(&b.1.version)); // Oldest versions first + all_migrations.sort_by(|a, b| a.1.version.cmp(&b.1.version)); // Oldest versions first let mut solution = None; - depth_first_search(&all_migrations, &target_module_versions, cur_module_versions.clone(), HashSet::new(), vec![], &mut solution); - - Ok(solution.map(|path| { path.into_iter().cloned().collect() })) + depth_first_search( + &all_migrations, + &target_module_versions, + cur_module_versions.clone(), + HashSet::new(), + vec![], + &mut solution, + ); + + Ok(solution.map(|path| path.into_iter().cloned().collect())) } - /// Recursive depth-first search for shortest path of migrations /// that upgrades all modules to their target version. fn depth_first_search<'a>( - all_migrations: &'a Vec<(&'a str, &'a Migration)>, // (module_name, migration) - max_module_versions: &'a HashMap<&'a str, &'a str>, // module_name -> target version - cur_module_versions: HashMap<&'a str, &'a str>, // module_name -> current version - visited: HashSet<&'a str>, // Set of visited migration UUIDs - cur_path: Vec<&'a Migration>, // Current path of migrations (in reverse order) - best_path: &mut Option>, // Best path found so far (in reverse order) + all_migrations: &'a Vec<(&'a str, &'a Migration)>, // (module_name, migration) + max_module_versions: &'a HashMap<&'a str, &'a str>, // module_name -> target version + cur_module_versions: HashMap<&'a str, &'a str>, // module_name -> current version + visited: HashSet<&'a str>, // Set of visited migration UUIDs + cur_path: Vec<&'a Migration>, // Current path of migrations (in reverse order) + best_path: &mut Option>, // Best path found so far (in reverse order) ) { // Solution found? (all modules are at their target version) if max_module_versions.iter().all(|(mod_name, max_ver)| { - cur_module_versions.get(mod_name).map_or(false, |cur_ver| cur_ver == max_ver) }) - { + cur_module_versions + .get(mod_name) + .map_or(false, |cur_ver| cur_ver == max_ver) + }) { if best_path.is_none() || cur_path.len() < best_path.as_ref().unwrap().len() { *best_path = Some(cur_path.iter().cloned().collect()); } @@ -104,13 +127,18 @@ fn depth_first_search<'a>( let mut new_visited = visited.clone(); new_visited.insert(mig.uuid.as_str()); - depth_first_search(all_migrations, max_module_versions, new_cur_module_versions, new_visited, new_path, best_path); + depth_first_search( + all_migrations, + max_module_versions, + new_cur_module_versions, + new_visited, + new_path, + best_path, + ); } } } - - #[cfg(test)] mod tests { use super::*; @@ -118,30 +146,56 @@ mod tests { macro_rules! migmod { ($name:expr, $cur_ver:expr, $migs:expr) => { - MigrationGraphModule { name: $name.to_string(), cur_version: $cur_ver.map(|s: &str| s.to_string()), migrations: $migs } + MigrationGraphModule { + name: $name.to_string(), + cur_version: $cur_ver.map(|s: &str| s.to_string()), + migrations: $migs, + } }; } macro_rules! mig { ($uuid:expr, $ver:expr, $deps:expr) => { - Migration { uuid: $uuid.to_string(), version: $ver.to_string(), dependencies: $deps, description: "dummy-desc".to_string() } + Migration { + uuid: $uuid.to_string(), + version: $ver.to_string(), + dependencies: $deps, + description: "dummy-desc".to_string(), + } }; } macro_rules! dep { ($name:expr, $min:expr, $max:expr) => { - Dependency { name: $name.to_string(), min_ver: $min.map(|s: &str| s.to_string()), max_ver: $max.map(|s: &str| s.to_string()) } + Dependency { + name: $name.to_string(), + min_ver: $min.map(|s: &str| s.to_string()), + max_ver: $max.map(|s: &str| s.to_string()), + } }; } fn compare_results(result: &Option>, expected: Option>) { match expected { None => { - assert!(result.is_none(), "Expected None, got: {:?}", - result.clone().unwrap().iter().map(|m| m.uuid.as_str()).collect::>()); + assert!( + result.is_none(), + "Expected None, got: {:?}", + result + .clone() + .unwrap() + .iter() + .map(|m| m.uuid.as_str()) + .collect::>() + ); } Some(expected) => { let result = result.as_ref().unwrap(); - let eq = result.iter().zip(expected.iter()).all(|(a, b)| &a.uuid == b); - assert!(eq, "Expected:\n{:?}\nGot:\n{:?}", + let eq = result + .iter() + .zip(expected.iter()) + .all(|(a, b)| &a.uuid == b); + assert!( + eq, + "Expected:\n{:?}\nGot:\n{:?}", expected.iter().map(|m| m).collect::>(), result.iter().map(|m| m.uuid.as_str()).collect::>() ); @@ -158,94 +212,153 @@ mod tests { #[test] fn test_msolv_trivial_from_empty() { - let mod_server = migmod!("server", None, vec![ - mig!("uuid1", "1", vec![]), - mig!("uuid2", "2", vec![dep!("server", Some("1"), Some("1"))]), - mig!("uuid3", "3", vec![dep!("server", Some("2"), Some("2"))]), - ]); - let correct = vec![ "uuid1", "uuid2", "uuid3" ]; + let mod_server = migmod!( + "server", + None, + vec![ + mig!("uuid1", "1", vec![]), + mig!("uuid2", "2", vec![dep!("server", Some("1"), Some("1"))]), + mig!("uuid3", "3", vec![dep!("server", Some("2"), Some("2"))]), + ] + ); + let correct = vec!["uuid1", "uuid2", "uuid3"]; solve_and_compare(vec![&mod_server], Some(correct)); } #[test] fn test_msolv_shortcut() { - let mod_server = migmod!("server", Some("1"), vec![ - mig!("uuid1", "1", vec![]), - mig!("uuid2", "2", vec![dep!("server", Some("1"), Some("1"))]), - mig!("uuid3", "3", vec![dep!("server", Some("2"), Some("2"))]), - mig!("uuid4", "4", vec![dep!("server", Some("1"), Some("3"))]), - ]); - let correct = vec![ "uuid4" ]; + let mod_server = migmod!( + "server", + Some("1"), + vec![ + mig!("uuid1", "1", vec![]), + mig!("uuid2", "2", vec![dep!("server", Some("1"), Some("1"))]), + mig!("uuid3", "3", vec![dep!("server", Some("2"), Some("2"))]), + mig!("uuid4", "4", vec![dep!("server", Some("1"), Some("3"))]), + ] + ); + let correct = vec!["uuid4"]; solve_and_compare(vec![&mod_server], Some(correct)); } #[test] fn test_msolv_two_modules_indep() { - let mod_server = migmod!("server", None, vec![ - mig!("S1", "1", vec![]), - mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), - mig!("S3", "3", vec![dep!("server", Some("2"), Some("2"))]), - ]); - let mod_org = migmod!("org", Some("0"), vec![ - mig!("G1", "1", vec![dep!("org", None, Some("0"))]), - mig!("G2", "2", vec![dep!("org", Some("1"), Some("1"))]), - mig!("G3", "3", vec![dep!("org", Some("2"), Some("2"))]), - ]); - let correct = vec![ "S1", "G1", "S2", "G2", "S3", "G3" ]; + let mod_server = migmod!( + "server", + None, + vec![ + mig!("S1", "1", vec![]), + mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), + mig!("S3", "3", vec![dep!("server", Some("2"), Some("2"))]), + ] + ); + let mod_org = migmod!( + "org", + Some("0"), + vec![ + mig!("G1", "1", vec![dep!("org", None, Some("0"))]), + mig!("G2", "2", vec![dep!("org", Some("1"), Some("1"))]), + mig!("G3", "3", vec![dep!("org", Some("2"), Some("2"))]), + ] + ); + let correct = vec!["S1", "G1", "S2", "G2", "S3", "G3"]; solve_and_compare(vec![&mod_server, &mod_org], Some(correct)); } #[test] fn test_msolv_two_modules_dep() { - let mod_server = migmod!("server", None, vec![ - mig!("S1", "1", vec![]), - mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), - mig!("S3", "3", vec![dep!("server", Some("2"), Some("2"))]), - ]); - let mod_org = migmod!("org", Some("0"), vec![ - mig!("G1", "1", vec![ - dep!("org", Some("0"), Some("0")), - dep!("server", None, Some("1"))]), - mig!("G2", "2", vec![ - dep!("org", Some("1"), Some("1")), - dep!("server", Some("2"), Some("2"))]), - mig!("G3", "3", vec![ - dep!("org", Some("2"), Some("2")), - dep!("server", Some("2"), Some("2"))]), - ]); + let mod_server = migmod!( + "server", + None, + vec![ + mig!("S1", "1", vec![]), + mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), + mig!("S3", "3", vec![dep!("server", Some("2"), Some("2"))]), + ] + ); + let mod_org = migmod!( + "org", + Some("0"), + vec![ + mig!( + "G1", + "1", + vec![ + dep!("org", Some("0"), Some("0")), + dep!("server", None, Some("1")) + ] + ), + mig!( + "G2", + "2", + vec![ + dep!("org", Some("1"), Some("1")), + dep!("server", Some("2"), Some("2")) + ] + ), + mig!( + "G3", + "3", + vec![ + dep!("org", Some("2"), Some("2")), + dep!("server", Some("2"), Some("2")) + ] + ), + ] + ); let correct = vec!["S1", "G1", "S2", "G2", "G3", "S3"]; solve_and_compare(vec![&mod_server, &mod_org], Some(correct)); } #[test] fn test_msolv_one_module_nonsolvable() { - let mod_server = migmod!("server", None, vec![ - mig!("S1", "1", vec![]), - mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), - // missing migration to version 3 - mig!("S4", "4", vec![dep!("server", Some("3"), Some("3"))]), - ]); + let mod_server = migmod!( + "server", + None, + vec![ + mig!("S1", "1", vec![]), + mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), + // missing migration to version 3 + mig!("S4", "4", vec![dep!("server", Some("3"), Some("3"))]), + ] + ); solve_and_compare(vec![&mod_server], None); } - #[test] fn test_msolv_two_modules_nonsolvable() { - let mod_server = migmod!("server", None, vec![ - mig!("S1", "1", vec![]), - mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), - mig!("S3", "3", vec![dep!("server", Some("2"), Some("2"))]), - ]); - let mod_org = migmod!("org", Some("0"), vec![ - mig!("G1", "1", vec![ - dep!("org", Some("0"), Some("0")), - dep!("server", Some("1"), Some("1"))]), - mig!("G2", "2", vec![ - dep!("org", Some("2"), Some("2"))]), - mig!("G3", "3", vec![ - dep!("org", Some("2"), Some("2")), - dep!("server", Some("1"), Some("1"))]), - ]); + let mod_server = migmod!( + "server", + None, + vec![ + mig!("S1", "1", vec![]), + mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), + mig!("S3", "3", vec![dep!("server", Some("2"), Some("2"))]), + ] + ); + let mod_org = migmod!( + "org", + Some("0"), + vec![ + mig!( + "G1", + "1", + vec![ + dep!("org", Some("0"), Some("0")), + dep!("server", Some("1"), Some("1")) + ] + ), + mig!("G2", "2", vec![dep!("org", Some("2"), Some("2"))]), + mig!( + "G3", + "3", + vec![ + dep!("org", Some("2"), Some("2")), + dep!("server", Some("1"), Some("1")) + ] + ), + ] + ); solve_and_compare(vec![&mod_server, &mod_org], None); } @@ -258,14 +371,53 @@ mod tests { Migration: '2024-05-22-163000_add_media_type' of module 'clapshot.server' depends on: '[Dependency { name: "clapshot.server", min_ver: Some("2024-05-13-093800_add_users_table"), max_ver: Some("2024-05-13-093800_add_users_table") }]') Migration: '2024-05-30-202000_add_missing_users' of module 'clapshot.server' depends on: '[Dependency { name: "clapshot.server", min_ver: Some("2024-05-22-163000_add_media_type"), max_ver: Some("2024-05-22-163000_add_media_type") }]') */ - let mod_server = migmod!("clapshot.server", Some("1"), vec![ - mig!("2023-04-18-190209_change_video_primkey", "2023-04-18-190209_change_video_primkey", vec![dep!("clapshot.server", None, Some(""))]), - mig!("2023-04-18-190300_add_cascade_rules", "2023-04-18-190300_add_cascade_rules", vec![dep!("clapshot.server", Some("2023-04-18-190209_change_video_primkey"), Some("2023-04-18-190209_change_video_primkey"))]), - mig!("2024-05-13-093800_add_users_table", "2024-05-13-093800_add_users_table", vec![dep!("clapshot.server", Some("2023-04-18-190300_add_cascade_rules"), Some("2023-04-18-190300_add_cascade_rules"))]), - mig!("2024-05-22-163000_add_media_type", "2024-05-22-163000_add_media_type", vec![dep!("clapshot.server", Some("2024-05-13-093800_add_users_table"), Some("2024-05-13-093800_add_users_table"))]), - mig!("2024-05-30-202000_add_missing_users", "2024-05-30-202000_add_missing_users", vec![dep!("clapshot.server", Some("2024-05-22-163000_add_media_type"), Some("2024-05-22-163000_add_media_type"))]), - ]); + let mod_server = migmod!( + "clapshot.server", + Some("1"), + vec![ + mig!( + "2023-04-18-190209_change_video_primkey", + "2023-04-18-190209_change_video_primkey", + vec![dep!("clapshot.server", None, Some(""))] + ), + mig!( + "2023-04-18-190300_add_cascade_rules", + "2023-04-18-190300_add_cascade_rules", + vec![dep!( + "clapshot.server", + Some("2023-04-18-190209_change_video_primkey"), + Some("2023-04-18-190209_change_video_primkey") + )] + ), + mig!( + "2024-05-13-093800_add_users_table", + "2024-05-13-093800_add_users_table", + vec![dep!( + "clapshot.server", + Some("2023-04-18-190300_add_cascade_rules"), + Some("2023-04-18-190300_add_cascade_rules") + )] + ), + mig!( + "2024-05-22-163000_add_media_type", + "2024-05-22-163000_add_media_type", + vec![dep!( + "clapshot.server", + Some("2024-05-13-093800_add_users_table"), + Some("2024-05-13-093800_add_users_table") + )] + ), + mig!( + "2024-05-30-202000_add_missing_users", + "2024-05-30-202000_add_missing_users", + vec![dep!( + "clapshot.server", + Some("2024-05-22-163000_add_media_type"), + Some("2024-05-22-163000_add_media_type") + )] + ), + ] + ); solve_and_compare(vec![&mod_server], None); } - } diff --git a/server/src/database/mod.rs b/server/src/database/mod.rs index 865de769..406ced6e 100644 --- a/server/src/database/mod.rs +++ b/server/src/database/mod.rs @@ -1,19 +1,19 @@ +use anyhow::{anyhow, Context}; use diesel::migration::Migration; use diesel::prelude::*; use diesel::r2d2::ConnectionManager; use diesel::SqliteConnection; -use anyhow::{Context, anyhow}; use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness}; use std::path::Path; use std::sync::atomic::AtomicBool; -pub mod schema; -pub mod models; +pub mod db_backup; pub mod error; pub mod migration_solver; -pub mod db_backup; +pub mod models; +pub mod schema; #[cfg(test)] pub mod tests; @@ -27,7 +27,6 @@ pub type Pool = r2d2::Pool>; pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations"); - #[macro_export] macro_rules! retry_if_db_locked { ($op:expr) => { @@ -40,7 +39,12 @@ macro_rules! retry_if_db_locked { } else { let err_msg = res.as_ref().err().unwrap().to_string(); if (attempt <= 8) && err_msg.to_lowercase().contains("locked") { - tracing::debug!("DB: '{}, retrying in 100ms (attempt {}/{})", err_msg, attempt, 8); + tracing::debug!( + "DB: '{}, retrying in 100ms (attempt {}/{})", + err_msg, + attempt, + 8 + ); std::thread::sleep(std::time::Duration::from_millis(100)); attempt += 1; continue; @@ -50,7 +54,7 @@ macro_rules! retry_if_db_locked { } } })() - } + }; } /// Convert a diesel result to a DBResult, turning empty result @@ -69,13 +73,14 @@ pub struct DB { broken_for_test: AtomicBool, } - impl DB { - /// Connect to SQLite database with an URL (use this for memory databases) pub fn open_db_url(db_url: &str) -> DBResult { let manager = ConnectionManager::::new(db_url); - let pool = Pool::builder().max_size(16).build(manager).context("Failed to build DB pool")?; + let pool = Pool::builder() + .max_size(16) + .build(manager) + .context("Failed to build DB pool")?; Ok(DB { pool, broken_for_test: AtomicBool::new(false), @@ -83,38 +88,64 @@ impl DB { } /// Connect to SQLite database with a file path - pub fn open_db_file( db_file: &Path ) -> DBResult { - let db_url = format!("sqlite://{}", db_file.to_str().ok_or(anyhow!("Invalid DB file path")) - .context("Failed to connect DB file")?); + pub fn open_db_file(db_file: &Path) -> DBResult { + let db_url = format!( + "sqlite://{}", + db_file + .to_str() + .ok_or(anyhow!("Invalid DB file path")) + .context("Failed to connect DB file")? + ); let res = DB::open_db_url(&db_url); res } /// Get a connection from the pool pub fn conn(&self) -> DBResult { - if self.broken_for_test.load(std::sync::atomic::Ordering::Relaxed) { + if self + .broken_for_test + .load(std::sync::atomic::Ordering::Relaxed) + { let bad_manager = ConnectionManager::::new("sqlite:///dev/urandom"); - let bad_pool = Pool::builder().build(bad_manager).context("TEST ERROR: Failed to build 'broken' DB pool")?; - return bad_pool.get().map_err(|e| anyhow!("TEST ERROR: Failed to get connection from 'broken' pool: {:?}", e).into()); + let bad_pool = Pool::builder() + .build(bad_manager) + .context("TEST ERROR: Failed to build 'broken' DB pool")?; + return bad_pool.get().map_err(|e| { + anyhow!( + "TEST ERROR: Failed to get connection from 'broken' pool: {:?}", + e + ) + .into() + }); } - let mut conn = self.pool.get().context("Failed to get connection from pool")?; - diesel::sql_query(r#" + let mut conn = self + .pool + .get() + .context("Failed to get connection from pool")?; + diesel::sql_query( + r#" PRAGMA foreign_keys = ON; PRAGMA journal_mode = WAL; PRAGMA wal_autocheckpoint = 1000; PRAGMA wal_checkpoint(TRUNCATE); PRAGMA synchronous = NORMAL; PRAGMA busy_timeout = 15000; - "#).execute(&mut conn).context("Failed to set DB pragmas")?; + "#, + ) + .execute(&mut conn) + .context("Failed to set DB pragmas")?; Ok(conn) } /// Return list of any pending (migration_name, version) tuples pub fn pending_server_migrations(&self) -> DBResult> { - Ok(MigrationHarness::pending_migrations(&mut self.conn()?, MIGRATIONS) - .map_err(|e| anyhow!("Failed to get migrations: {:?}", e))? - .iter().map(|m| (format!("{}", m.name()), format!("{}", m.name().version()))) - .collect()) + Ok( + MigrationHarness::pending_migrations(&mut self.conn()?, MIGRATIONS) + .map_err(|e| anyhow!("Failed to get migrations: {:?}", e))? + .iter() + .map(|m| (format!("{}", m.name()), format!("{}", m.name().version()))) + .collect(), + ) } /// Return name of the latest applied migration @@ -127,18 +158,25 @@ impl DB { } /// Run a named migration - pub fn apply_server_migration(&self, conn: &mut SqliteConnection, migration_name: &str) -> EmptyDBResult { - + pub fn apply_server_migration( + &self, + conn: &mut SqliteConnection, + migration_name: &str, + ) -> EmptyDBResult { let pending = MigrationHarness::pending_migrations(conn, MIGRATIONS) .map_err(|e| anyhow!("Failed to get migrations: {:?}", e))?; - let migration = pending.iter().find(|m| m.name().to_string() == migration_name) + let migration = pending + .iter() + .find(|m| m.name().to_string() == migration_name) .ok_or_else(|| anyhow!("Migration not found: {}", migration_name))?; - let _span = tracing::info_span!("apply_server_migration", + let _span = tracing::info_span!( + "apply_server_migration", name = migration.name().to_string(), new_ver = migration.name().version().to_string(), - ).entered(); + ) + .entered(); tracing::debug!("PRAGMA foreign_keys = OFF;"); diesel::sql_query("PRAGMA foreign_keys = OFF;").execute(conn)?; @@ -147,7 +185,8 @@ impl DB { diesel::sql_query("PRAGMA legacy_alter_table=ON;").execute(conn)?; let res: EmptyDBResult = conn.transaction(|conn| { - sqlite_check_foreign_key_status(conn, false).context("Pragma failed to disable foreign keys")?; + sqlite_check_foreign_key_status(conn, false) + .context("Pragma failed to disable foreign keys")?; tracing::info!("Applying..."); MigrationHarness::run_migration(conn, &**migration) @@ -159,18 +198,19 @@ impl DB { res.and_then(|_| { tracing::debug!("PRAGMA foreign_keys = ON;"); diesel::sql_query("PRAGMA foreign_keys = ON;").execute(conn)?; - sqlite_check_foreign_key_status(conn, true).context("Pragma failed to re-enable foreign keys")?; + sqlite_check_foreign_key_status(conn, true) + .context("Pragma failed to re-enable foreign keys")?; Ok(()) }) } /// "Corrupt" the connection for testing so that subsequent queries fail pub fn break_db(&self) { - self.broken_for_test.store(true, std::sync::atomic::Ordering::Relaxed); + self.broken_for_test + .store(true, std::sync::atomic::Ordering::Relaxed); } } - #[derive(QueryableByName, Debug)] struct ForeignKeyEnforcement { #[diesel(sql_type = diesel::sql_types::Integer)] @@ -178,12 +218,17 @@ struct ForeignKeyEnforcement { value: i32, } -pub fn sqlite_check_foreign_key_status(conn: &mut SqliteConnection, should_be_on: bool) -> EmptyDBResult { +pub fn sqlite_check_foreign_key_status( + conn: &mut SqliteConnection, + should_be_on: bool, +) -> EmptyDBResult { let fk_status: Vec = diesel::sql_query("PRAGMA foreign_keys;") .load(conn) .map_err(|e| anyhow!("Failed to check foreign key setting: {:?}", e))?; - if fk_status.is_empty() { return Err(anyhow!("Failed to check foreign key setting"))?; } + if fk_status.is_empty() { + return Err(anyhow!("Failed to check foreign key setting"))?; + } if should_be_on && fk_status.iter().any(|fk| fk.value != 1) { return Err(anyhow!("Assertion failed: SQLite foreign_keys != ON").into()); @@ -193,11 +238,8 @@ pub fn sqlite_check_foreign_key_status(conn: &mut SqliteConnection, should_be_on Ok(()) } - - /// Check for foreign key violations in the database pub fn sqlite_foreign_key_check(conn: &mut SqliteConnection, log_as_errors: bool) -> EmptyDBResult { - #[derive(QueryableByName, Debug)] struct ForeignKeyCheck { #[diesel(sql_type = diesel::sql_types::Text)] @@ -215,7 +257,8 @@ pub fn sqlite_foreign_key_check(conn: &mut SqliteConnection, log_as_errors: bool } let violations: Vec = diesel::sql_query("PRAGMA foreign_key_check;") - .load(conn).map_err(|e| anyhow!("Failed to check foreign key violations: {:?}", e))?; + .load(conn) + .map_err(|e| anyhow!("Failed to check foreign key violations: {:?}", e))?; if violations.is_empty() { Ok(()) } else { @@ -249,14 +292,17 @@ impl DBPaging { impl Default for DBPaging { fn default() -> Self { - Self { page_num: 0, page_size: unsafe { std::num::NonZeroU32::new_unchecked(u32::MAX) } } + Self { + page_num: 0, + page_size: unsafe { std::num::NonZeroU32::new_unchecked(u32::MAX) }, + } } } - pub trait DbBasicQuery: Sized - where P: std::str::FromStr + Send + Sync + Clone, - I: Send + Sync, +where + P: std::str::FromStr + Send + Sync + Clone, + I: Send + Sync, { /// Insert a new object into the database. fn insert(conn: &mut PooledConnection, item: &I) -> DBResult; @@ -282,19 +328,56 @@ pub trait DbBasicQuery: Sized } pub trait DbUpdate

: Sized - where P: std::str::FromStr + Send + Sync + Clone, +where + P: std::str::FromStr + Send + Sync + Clone, { /// Update objects, replaces the entire object except for the primary key. fn update_many(conn: &mut PooledConnection, items: &[Self]) -> DBResult>; } mod basic_query; -crate::implement_basic_query_traits!(models::User, models::UserInsert, users, String, created.desc()); -crate::implement_basic_query_traits!(models::MediaType, models::MediaType, media_types, String, id.desc()); -crate::implement_basic_query_traits!(models::MediaFile, models::MediaFileInsert, media_files, String, added_time.desc()); -crate::implement_basic_query_traits!(models::Comment, models::CommentInsert, comments, i32, created.desc()); -crate::implement_basic_query_traits!(models::Message, models::MessageInsert, messages, i32, created.desc()); -crate::implement_basic_query_traits!(models::Subtitle, models::SubtitleInsert, subtitles, i32, added_time.desc()); +crate::implement_basic_query_traits!( + models::User, + models::UserInsert, + users, + String, + created.desc() +); +crate::implement_basic_query_traits!( + models::MediaType, + models::MediaType, + media_types, + String, + id.desc() +); +crate::implement_basic_query_traits!( + models::MediaFile, + models::MediaFileInsert, + media_files, + String, + added_time.desc() +); +crate::implement_basic_query_traits!( + models::Comment, + models::CommentInsert, + comments, + i32, + created.desc() +); +crate::implement_basic_query_traits!( + models::Message, + models::MessageInsert, + messages, + i32, + created.desc() +); +crate::implement_basic_query_traits!( + models::Subtitle, + models::SubtitleInsert, + subtitles, + i32, + added_time.desc() +); crate::implement_update_traits!(models::User, users, String); crate::implement_update_traits!(models::MediaFile, media_files, String); @@ -302,8 +385,6 @@ crate::implement_update_traits!(models::Comment, comments, i32); crate::implement_update_traits!(models::Message, messages, i32); crate::implement_update_traits!(models::Subtitle, subtitles, i32); - - pub trait DbQueryByUser: Sized { /// Get all objects of type Self that belong to given user. fn get_by_user(conn: &mut PooledConnection, uid: &str, pg: DBPaging) -> DBResult>; @@ -313,12 +394,29 @@ crate::implement_query_by_user_traits!(models::MediaFile, media_files, user_id, crate::implement_query_by_user_traits!(models::Comment, comments, user_id, created.desc()); crate::implement_query_by_user_traits!(models::Message, messages, user_id, created.desc()); - - pub trait DbQueryByMediaFile: Sized { /// Get all objects of type Self that are linked to given media file. - fn get_by_media_file(conn: &mut PooledConnection, vid: &str, pg: DBPaging) -> DBResult>; + fn get_by_media_file( + conn: &mut PooledConnection, + vid: &str, + pg: DBPaging, + ) -> DBResult>; } -crate::implement_query_by_media_file_traits!(models::Comment, comments, media_file_id, created.desc()); -crate::implement_query_by_media_file_traits!(models::Message, messages, media_file_id, created.desc()); -crate::implement_query_by_media_file_traits!(models::Subtitle, subtitles, media_file_id, added_time.desc()); +crate::implement_query_by_media_file_traits!( + models::Comment, + comments, + media_file_id, + created.desc() +); +crate::implement_query_by_media_file_traits!( + models::Message, + messages, + media_file_id, + created.desc() +); +crate::implement_query_by_media_file_traits!( + models::Subtitle, + subtitles, + media_file_id, + added_time.desc() +); diff --git a/server/src/database/models.rs b/server/src/database/models.rs index 4fe67b0f..6507f176 100644 --- a/server/src/database/models.rs +++ b/server/src/database/models.rs @@ -1,12 +1,14 @@ -use diesel::{prelude::*, QueryId}; -use serde::{Deserialize, Serialize}; use super::schema::*; use chrono; use chrono::naive::serde::{ts_seconds, ts_seconds_option}; use chrono::TimeZone; +use diesel::{prelude::*, QueryId}; +use serde::{Deserialize, Serialize}; use timeago; -#[derive(Serialize, Deserialize, Debug, Default, Queryable, Selectable, Identifiable, AsChangeset, Clone)] +#[derive( + Serialize, Deserialize, Debug, Default, Queryable, Selectable, Identifiable, AsChangeset, Clone, +)] #[diesel(treat_none_as_null = true)] pub struct User { pub id: String, @@ -23,7 +25,9 @@ pub struct UserInsert { pub name: String, } -#[derive(Serialize, Deserialize, Debug, Queryable, Selectable, Insertable, Identifiable, QueryId, Clone)] +#[derive( + Serialize, Deserialize, Debug, Queryable, Selectable, Insertable, Identifiable, QueryId, Clone, +)] #[diesel(treat_none_as_null = true)] #[diesel(table_name = media_types)] #[diesel(primary_key(id))] @@ -31,8 +35,9 @@ pub struct MediaType { pub id: String, } - -#[derive(Serialize, Deserialize, Debug, Queryable, Selectable, Identifiable, QueryId, AsChangeset, Clone)] +#[derive( + Serialize, Deserialize, Debug, Queryable, Selectable, Identifiable, QueryId, AsChangeset, Clone, +)] #[diesel(treat_none_as_null = true)] #[diesel(table_name = media_files)] #[diesel(primary_key(id))] @@ -81,7 +86,18 @@ pub struct MediaFileInsert { // ------------------------------------------------------- -#[derive(Serialize, Deserialize, Debug, Default, Queryable, Selectable, Identifiable, Associations, AsChangeset, Clone)] +#[derive( + Serialize, + Deserialize, + Debug, + Default, + Queryable, + Selectable, + Identifiable, + Associations, + AsChangeset, + Clone, +)] #[diesel(belongs_to(MediaFile, foreign_key = media_file_id))] #[diesel(treat_none_as_null = true)] pub struct Subtitle { @@ -112,10 +128,20 @@ pub struct SubtitleInsert { // ------------------------------------------------------- -#[derive(Serialize, Deserialize, Debug, Associations, Queryable, Selectable, Identifiable, QueryId, AsChangeset, Clone)] +#[derive( + Serialize, + Deserialize, + Debug, + Associations, + Queryable, + Selectable, + Identifiable, + QueryId, + AsChangeset, + Clone, +)] #[diesel(belongs_to(User, foreign_key = user_id))] #[diesel(belongs_to(MediaFile, foreign_key = media_file_id))] - #[diesel(treat_none_as_null = true)] pub struct Comment { pub id: i32, @@ -151,12 +177,22 @@ pub struct CommentInsert { pub drawing: Option, pub subtitle_id: Option, pub subtitle_filename_ifnull: Option, - } // ------------------------------------------------------- -#[derive(Serialize, Deserialize, Debug, Default, Queryable, Selectable, Identifiable, Associations, AsChangeset, Clone)] +#[derive( + Serialize, + Deserialize, + Debug, + Default, + Queryable, + Selectable, + Identifiable, + Associations, + AsChangeset, + Clone, +)] #[diesel(belongs_to(User, foreign_key = user_id))] #[diesel(belongs_to(MediaFile, foreign_key = media_file_id))] #[diesel(belongs_to(Comment, foreign_key = comment_id))] diff --git a/server/src/database/schema.rs b/server/src/database/schema.rs index 93f2c0d4..03a9a64b 100644 --- a/server/src/database/schema.rs +++ b/server/src/database/schema.rs @@ -84,7 +84,6 @@ diesel::table! { } diesel::joinable!(comments -> subtitles (subtitle_id)); - diesel::allow_tables_to_appear_in_same_query!( users, comments, diff --git a/server/src/database/tests.rs b/server/src/database/tests.rs index 37993331..beda0c95 100644 --- a/server/src/database/tests.rs +++ b/server/src/database/tests.rs @@ -1,8 +1,9 @@ -use tracing_test::traced_test; use crate::database::*; +use tracing_test::traced_test; -use models::{User, MediaType, MediaFile, MediaFileInsert, Message, MessageInsert, Comment, CommentInsert}; - +use models::{ + Comment, CommentInsert, MediaFile, MediaFileInsert, MediaType, Message, MessageInsert, User, +}; fn _dump_db(conn: &mut PooledConnection) { println!("================ dump_db ================"); @@ -10,22 +11,31 @@ fn _dump_db(conn: &mut PooledConnection) { conn.transaction(|conn| { let media_types = MediaType::get_all(conn, DBPaging::default()).unwrap(); println!("----- Media types -----"); - for v in media_types { println!("----\n{:#?}", v);} + for v in media_types { + println!("----\n{:#?}", v); + } let media_files = MediaFile::get_all(conn, DBPaging::default()).unwrap(); println!("----- Media files -----"); - for v in media_files { println!("----\n{:#?}", v);} + for v in media_files { + println!("----\n{:#?}", v); + } let comments = Comment::get_all(conn, DBPaging::default()).unwrap(); println!("----- Comments -----"); - for c in comments { println!("----\n{:#?}", c);} + for c in comments { + println!("----\n{:#?}", c); + } let messages = Message::get_all(conn, DBPaging::default()).unwrap(); println!("----- Messages -----"); - for m in messages { println!("----\n{:#?}", m);} + for m in messages { + println!("----\n{:#?}", m); + } DBResult::Ok(()) - }).unwrap(); + }) + .unwrap(); println!("========================================="); } @@ -46,29 +56,34 @@ fn _dump_db(conn: &mut PooledConnection) { /// /// /// ``` -pub fn make_test_db() -> (std::sync::Arc, assert_fs::TempDir, Vec, Vec) -{ +pub fn make_test_db() -> ( + std::sync::Arc, + assert_fs::TempDir, + Vec, + Vec, +) { println!("--- make_test_db"); let data_dir = assert_fs::TempDir::new().unwrap(); std::fs::create_dir(&data_dir.path().join("incoming")).ok(); - let db = std::sync::Arc::new(DB::open_db_file(data_dir.join("clapshot.sqlite").as_path()).unwrap()); + let db = + std::sync::Arc::new(DB::open_db_file(data_dir.join("clapshot.sqlite").as_path()).unwrap()); let conn = &mut db.conn().unwrap(); for (m, _ver) in db.pending_server_migrations().unwrap() { db.apply_server_migration(conn, &m).unwrap(); } - _dump_db(conn); // Uncomment to debug database contents + _dump_db(conn); // Uncomment to debug database contents // Make some videos let hashes = vec!["B1DE0", "11111", "22222", "B1DE3", "B1DE4"]; let mkvid = |i: usize| { - let user_id = format!("user.num{}", 1 + i % 2); let username = format!("User Number{}", 1 + i % 2); - let user = User::get_or_create(conn, &user_id, Some(&username)).expect("Failed to create user"); + let user = + User::get_or_create(conn, &user_id, Some(&username)).expect("Failed to create user"); let v = MediaFileInsert { id: hashes[i].to_string(), @@ -108,7 +123,8 @@ pub fn make_test_db() -> (std::sync::Arc, assert_fs::TempDir, Vec let c = Comment::insert(conn, &c).expect("Failed to insert comment"); let dp = data_dir.join("videos").join(vid).join("drawings"); std::fs::create_dir_all(&dp).expect("Failed to create drawing directory"); - std::fs::write(dp.join(&c.drawing.clone().unwrap()), "IMAGE_DATA").expect("Failed to write drawing"); + std::fs::write(dp.join(&c.drawing.clone().unwrap()), "IMAGE_DATA") + .expect("Failed to write drawing"); c }; let mut comments = (0..5) @@ -138,7 +154,6 @@ pub fn make_test_db() -> (std::sync::Arc, assert_fs::TempDir, Vec (db, data_dir, videos, comments) } - #[test] #[traced_test] fn test_pagination() -> anyhow::Result<()> { @@ -146,7 +161,13 @@ fn test_pagination() -> anyhow::Result<()> { let conn = &mut db.conn()?; // Test pagination of comments - let mut res = Comment::get_all(conn, DBPaging { page_num: 0, page_size: 3.try_into()? })?; + let mut res = Comment::get_all( + conn, + DBPaging { + page_num: 0, + page_size: 3.try_into()?, + }, + )?; println!("---- page 0, 3"); println!("res: {:#?}", res); @@ -155,7 +176,13 @@ fn test_pagination() -> anyhow::Result<()> { assert_eq!(res[1].id, comments[1].id); assert_eq!(res[2].id, comments[2].id); - res = Comment::get_all(conn, DBPaging { page_num: 1, page_size: 3.try_into()? })?; + res = Comment::get_all( + conn, + DBPaging { + page_num: 1, + page_size: 3.try_into()?, + }, + )?; println!("---- page 1, 3"); println!("res: {:#?}", res); assert_eq!(res.len(), 3); @@ -163,7 +190,13 @@ fn test_pagination() -> anyhow::Result<()> { assert_eq!(res[1].id, comments[4].id); assert_eq!(res[2].id, comments[5].id); - res = Comment::get_all(conn, DBPaging { page_num: 2, page_size: 3.try_into()? })?; + res = Comment::get_all( + conn, + DBPaging { + page_num: 2, + page_size: 3.try_into()?, + }, + )?; println!("---- page 2, 3"); println!("res: {:#?}", res); assert_eq!(res.len(), 2); @@ -173,20 +206,21 @@ fn test_pagination() -> anyhow::Result<()> { Ok(()) } - // ---------------------------------------------------------------------------- - #[test] #[traced_test] -fn test_fixture_state() -> anyhow::Result<()> -{ +fn test_fixture_state() -> anyhow::Result<()> { let (db, _data_dir, videos, comments) = make_test_db(); let conn = &mut db.conn()?; // First 5 comments have no parent, last 2 have parent_id=1 - for i in 0..5 { assert!(comments[i].parent_id.is_none()); } - for i in 5..5 + 2 { assert_eq!(comments[i].parent_id, Some(comments[0].id)); } + for i in 0..5 { + assert!(comments[i].parent_id.is_none()); + } + for i in 5..5 + 2 { + assert_eq!(comments[i].parent_id, Some(comments[0].id)); + } // Video #0 has 3 comments, video #1 has 2, video #2 has 1 assert_eq!(comments[0].media_file_id, comments[3].media_file_id); @@ -201,14 +235,17 @@ fn test_fixture_state() -> anyhow::Result<()> for v in videos.iter() { assert_eq!(MediaFile::get(conn, &v.id)?.id, v.id); let comments = Comment::get_by_media_file(conn, &v.id, DBPaging::default())?; - assert_eq!(comments.len(), match v.id.as_str() { - "B1DE0" => 5, - "11111" => 2, - "22222" => 1, - "B1DE3" => 0, - "B1DE4" => 0, - _ => panic!("Unexpected media file id"), - }); + assert_eq!( + comments.len(), + match v.id.as_str() { + "B1DE0" => 5, + "11111" => 2, + "22222" => 1, + "B1DE3" => 0, + "B1DE4" => 0, + _ => panic!("Unexpected media file id"), + } + ); } for c in comments.iter() { assert_eq!(models::Comment::get(conn, &c.id)?.id, c.id); @@ -216,32 +253,55 @@ fn test_fixture_state() -> anyhow::Result<()> } // Check that we can get videos by user - assert_eq!(models::MediaFile::get_by_user(conn, "user.num1", DBPaging::default())?.len(), 3); - assert_eq!(models::MediaFile::get_by_user(conn, "user.num2", DBPaging::default())?.len(), 2); + assert_eq!( + models::MediaFile::get_by_user(conn, "user.num1", DBPaging::default())?.len(), + 3 + ); + assert_eq!( + models::MediaFile::get_by_user(conn, "user.num2", DBPaging::default())?.len(), + 2 + ); Ok(()) } - #[test] #[traced_test] fn test_comment_delete() -> anyhow::Result<()> { let (db, _data_dir, _vid, com) = make_test_db(); let conn = &mut db.conn()?; - assert_eq!(Comment::get_by_media_file(conn, &com[1].media_file_id, DBPaging::default())?.len(), 2, "Media files should have 2 comments before deletion"); + assert_eq!( + Comment::get_by_media_file(conn, &com[1].media_file_id, DBPaging::default())?.len(), + 2, + "Media files should have 2 comments before deletion" + ); // Delete comment #2 and check that it was deleted, and nothing else models::Comment::delete(&mut db.conn()?, &com[1].id)?; for c in com.iter() { if c.id == com[1].id { - assert!(matches!(models::Comment::get(conn, &c.id).unwrap_err() , DBError::NotFound()), "Comment should be deleted"); + assert!( + matches!( + models::Comment::get(conn, &c.id).unwrap_err(), + DBError::NotFound() + ), + "Comment should be deleted" + ); } else { - assert_eq!(models::Comment::get(conn, &c.id)?.id, c.id, "Deletion removed wrong comment(s)"); + assert_eq!( + models::Comment::get(conn, &c.id)?.id, + c.id, + "Deletion removed wrong comment(s)" + ); } } // Check that media file still has 1 comment - assert_eq!(Comment::get_by_media_file(conn, &com[1].media_file_id, DBPaging::default())?.len(), 1, "Media file should have 1 comment left"); + assert_eq!( + Comment::get_by_media_file(conn, &com[1].media_file_id, DBPaging::default())?.len(), + 1, + "Media file should have 1 comment left" + ); // Delete last, add a new one and check for ID reuse models::Comment::delete(&mut db.conn()?, &com[6].id)?; @@ -257,7 +317,10 @@ fn test_comment_delete() -> anyhow::Result<()> { subtitle_filename_ifnull: None, }; let new_id = models::Comment::insert(conn, &c)?.id; - assert_ne!(new_id, com[6].id, "Comment ID was re-used after deletion. This would mix up comment threads in the UI."); + assert_ne!( + new_id, com[6].id, + "Comment ID was re-used after deletion. This would mix up comment threads in the UI." + ); Ok(()) } @@ -282,7 +345,6 @@ fn test_rename_video() -> anyhow::Result<()> { Ok(()) } - #[test] #[traced_test] fn test_user_messages() -> anyhow::Result<()> { @@ -331,15 +393,21 @@ fn test_user_messages() -> anyhow::Result<()> { let a = serde_json::to_value(Message::get(conn, &new_msg.id)?.to_proto3())?; let b = serde_json::to_value(new_msg.to_proto3())?; - assert_eq!(a,b); + assert_eq!(a, b); assert!(!Message::get(conn, &new_msg.id)?.seen); new_msgs.push(new_msg); } // Correctly count messages - assert_eq!(Message::get_by_user(conn, "user.num1", DBPaging::default())?.len(), 2); - assert_eq!(Message::get_by_user(conn, "user.num2", DBPaging::default())?.len(), 1); + assert_eq!( + Message::get_by_user(conn, "user.num1", DBPaging::default())?.len(), + 2 + ); + assert_eq!( + Message::get_by_user(conn, "user.num2", DBPaging::default())?.len(), + 1 + ); // Mark message #2 as seen Message::set_seen(conn, new_msgs[1].id, true)?; @@ -348,8 +416,14 @@ fn test_user_messages() -> anyhow::Result<()> { // Delete & recount Message::delete(conn, &new_msgs[2].id)?; Message::delete(conn, &new_msgs[0].id)?; - assert_eq!(Message::get_by_user(conn, "user.num1", DBPaging::default())?.len(), 1); - assert_eq!(Message::get_by_user(conn, "user.num2", DBPaging::default())?.len(), 0); + assert_eq!( + Message::get_by_user(conn, "user.num1", DBPaging::default())?.len(), + 1 + ); + assert_eq!( + Message::get_by_user(conn, "user.num2", DBPaging::default())?.len(), + 0 + ); Ok(()) } @@ -360,15 +434,25 @@ fn test_transaction_rollback() -> anyhow::Result<()> { let (db, _data_dir, vid, _com) = make_test_db(); let conn = &mut db.conn()?; - assert_eq!(MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), vid.len()); + assert_eq!( + MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), + vid.len() + ); conn.transaction::<(), _, _>(|conn| { MediaFile::delete(conn, &vid[0].id).unwrap(); - assert_eq!(MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), vid.len()-1); + assert_eq!( + MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), + vid.len() - 1 + ); Err(diesel::result::Error::RollbackTransaction) - }).ok(); + }) + .ok(); - assert_eq!(MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), vid.len()); + assert_eq!( + MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), + vid.len() + ); Ok(()) } @@ -378,13 +462,23 @@ fn test_transaction_commit() -> anyhow::Result<()> { let (db, _data_dir, vid, _com) = make_test_db(); let conn = &mut db.conn()?; - assert_eq!(MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), vid.len()); + assert_eq!( + MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), + vid.len() + ); conn.transaction::<(), _, _>(|conn| { MediaFile::delete(conn, &vid[0].id).unwrap(); - assert_eq!(MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), vid.len()-1); + assert_eq!( + MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), + vid.len() - 1 + ); DBResult::Ok(()) - }).unwrap(); - assert_eq!(MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), vid.len()-1); + }) + .unwrap(); + assert_eq!( + MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), + vid.len() - 1 + ); Ok(()) } @@ -427,11 +521,17 @@ fn test_subtitle_add_update_delete() -> anyhow::Result<()> { }; let c = models::Comment::insert(conn, &c)?; assert_eq!(models::Comment::get(conn, &c.id)?.subtitle_id, Some(s.id)); - assert_eq!(models::Comment::get(conn, &c.id)?.subtitle_filename_ifnull, None); + assert_eq!( + models::Comment::get(conn, &c.id)?.subtitle_filename_ifnull, + None + ); // Delete subtitle models::Subtitle::delete(conn, &s.id)?; - assert!(matches!(models::Subtitle::get(conn, &s.id).unwrap_err(), DBError::NotFound())); + assert!(matches!( + models::Subtitle::get(conn, &s.id).unwrap_err(), + DBError::NotFound() + )); // Check that comment still exists, and that subtitle_filename_ifnull is set let c = models::Comment::get(conn, &c.id)?; @@ -441,14 +541,16 @@ fn test_subtitle_add_update_delete() -> anyhow::Result<()> { Ok(()) } - #[test] #[traced_test] fn test_migrate_existing_v056_db() -> anyhow::Result<()> { let data_dir = assert_fs::TempDir::new().unwrap(); let db_file = data_dir.path().join("clapshot.sqlite"); - std::fs::copy("src/tests/assets/databases/clapshot-migration-test-1_v056.sqlite", &db_file) - .expect("Failed to copy test DB for migration test"); + std::fs::copy( + "src/tests/assets/databases/clapshot-migration-test-1_v056.sqlite", + &db_file, + ) + .expect("Failed to copy test DB for migration test"); let db = DB::open_db_file(&db_file).unwrap(); let conn = &mut db.conn()?; @@ -459,28 +561,93 @@ fn test_migrate_existing_v056_db() -> anyhow::Result<()> { // Check that the database has (some of) the expected contents (still after migrations) let media_files = MediaFile::get_all(conn, DBPaging::default())?; assert_eq!(media_files.len(), 9); - assert_eq!(media_files.iter().filter(|v| v.user_id == "uid-4f9c36a6").count(), 2); - assert_eq!(media_files.iter().filter(|v| v.user_id == "uid-9e25df03").count(), 2); - assert_eq!(media_files.iter().filter(|v| v.user_id == "uid-d20ec3a4").count(), 5); + assert_eq!( + media_files + .iter() + .filter(|v| v.user_id == "uid-4f9c36a6") + .count(), + 2 + ); + assert_eq!( + media_files + .iter() + .filter(|v| v.user_id == "uid-9e25df03") + .count(), + 2 + ); + assert_eq!( + media_files + .iter() + .filter(|v| v.user_id == "uid-d20ec3a4") + .count(), + 5 + ); let comments = Comment::get_all(conn, DBPaging::default())?; assert_eq!(comments.len(), 41); - assert_eq!(comments.iter().filter(|c| c.user_id == Some("uid-9e25df03".into())).count(), 7); - assert_eq!(comments.iter().filter(|c| c.user_id == Some("uid-4f9c36a6".into())).count(), 4); - assert_eq!(comments.iter().filter(|c| c.user_id == Some("uid-addcb300".into())).count(), 5); - assert_eq!(comments.iter().filter(|c| c.user_id == Some("uid-d20ec3a4".into())).count(), 25); - assert_eq!(comments.iter().filter(|c| c.media_file_id == "77d7fe01").count(), 14); - assert_eq!(comments.iter().filter(|c| c.media_file_id == "338fb82c").count(), 2); + assert_eq!( + comments + .iter() + .filter(|c| c.user_id == Some("uid-9e25df03".into())) + .count(), + 7 + ); + assert_eq!( + comments + .iter() + .filter(|c| c.user_id == Some("uid-4f9c36a6".into())) + .count(), + 4 + ); + assert_eq!( + comments + .iter() + .filter(|c| c.user_id == Some("uid-addcb300".into())) + .count(), + 5 + ); + assert_eq!( + comments + .iter() + .filter(|c| c.user_id == Some("uid-d20ec3a4".into())) + .count(), + 25 + ); + assert_eq!( + comments + .iter() + .filter(|c| c.media_file_id == "77d7fe01") + .count(), + 14 + ); + assert_eq!( + comments + .iter() + .filter(|c| c.media_file_id == "338fb82c") + .count(), + 2 + ); let messages = Message::get_all(conn, DBPaging::default())?; assert_eq!(messages.len(), 36); - assert_eq!(messages.iter().filter(|m| m.user_id == "uid-d20ec3a4").count(), 17); - assert_eq!(messages.iter().filter(|m| m.media_file_id == Some("338fb82c".into())).count(), 3); + assert_eq!( + messages + .iter() + .filter(|m| m.user_id == "uid-d20ec3a4") + .count(), + 17 + ); + assert_eq!( + messages + .iter() + .filter(|m| m.media_file_id == Some("338fb82c".into())) + .count(), + 3 + ); Ok(()) } - #[test] #[traced_test] fn test_backup_restore() { @@ -497,12 +664,18 @@ fn test_backup_restore() { for c in comments.iter() { models::Comment::delete(conn, &c.id).expect("Failed to delete comment"); } - assert_eq!(models::Comment::get_all(conn, DBPaging::default()).unwrap().len(), 0); // Make sure they are gone + assert_eq!( + models::Comment::get_all(conn, DBPaging::default()) + .unwrap() + .len(), + 0 + ); // Make sure they are gone } // Close DB and restore drop(db); - db_backup::restore_sqlite_database(db_file.clone(), backup_file.clone()).expect("Failed to restore database"); + db_backup::restore_sqlite_database(db_file.clone(), backup_file.clone()) + .expect("Failed to restore database"); // Check that comments are back { diff --git a/server/src/grpc/caller.rs b/server/src/grpc/caller.rs index 58ff357c..2ecb799c 100644 --- a/server/src/grpc/caller.rs +++ b/server/src/grpc/caller.rs @@ -1,71 +1,98 @@ -use std::path::Path; use lib_clapshot_grpc::GrpcBindAddr; +use std::path::Path; -use crate::grpc::grpc_client::{connect, OrganizerConnection}; use super::grpc_client::OrganizerURI; use super::proto; +use crate::grpc::grpc_client::{connect, OrganizerConnection}; pub struct OrganizerCaller { uri: OrganizerURI, } impl OrganizerCaller { - pub fn new(uri: &OrganizerURI ) -> Self { + pub fn new(uri: &OrganizerURI) -> Self { OrganizerCaller { uri: uri.clone() } } - pub fn blocking_handshake_organizer(&self, data_dir: &Path, server_url: &str, db_file: &Path, backchannel: &GrpcBindAddr) - -> anyhow::Result<()> - { - async fn async_call_handshake(conn: &mut OrganizerConnection, backchannel: &GrpcBindAddr, data_dir: &Path, server_url: &str, db_file: &Path) - -> anyhow::Result<()> - { + pub fn blocking_handshake_organizer( + &self, + data_dir: &Path, + server_url: &str, + db_file: &Path, + backchannel: &GrpcBindAddr, + ) -> anyhow::Result<()> { + async fn async_call_handshake( + conn: &mut OrganizerConnection, + backchannel: &GrpcBindAddr, + data_dir: &Path, + server_url: &str, + db_file: &Path, + ) -> anyhow::Result<()> { let v = semver::Version::parse(crate::PKG_VERSION)?; - use lib_clapshot_grpc::proto::org as org; + use lib_clapshot_grpc::proto::org; let req = proto::org::ServerInfo { storage: Some(org::server_info::Storage { storage: Some(org::server_info::storage::Storage::LocalFs( org::server_info::storage::LocalFilesystem { - base_dir: data_dir.to_string_lossy().into() - }))}), + base_dir: data_dir.to_string_lossy().into(), + }, + )), + }), backchannel: Some(org::server_info::GrpcEndpoint { - endpoint: Some( - match backchannel { - GrpcBindAddr::Tcp(addr) => - org::server_info::grpc_endpoint::Endpoint::Tcp( - org::server_info::grpc_endpoint::Tcp { - host: addr.ip().to_string(), - port: addr.port() as u32, - }), - GrpcBindAddr::Unix(path) => + endpoint: Some(match backchannel { + GrpcBindAddr::Tcp(addr) => org::server_info::grpc_endpoint::Endpoint::Tcp( + org::server_info::grpc_endpoint::Tcp { + host: addr.ip().to_string(), + port: addr.port() as u32, + }, + ), + GrpcBindAddr::Unix(path) => { org::server_info::grpc_endpoint::Endpoint::Unix( org::server_info::grpc_endpoint::Unix { - path: path.to_string_lossy().into(), - }), - }) + path: path.to_string_lossy().into(), + }, + ) + } }), + }), url_base: server_url.into(), db: Some(org::Database { r#type: org::database::DatabaseType::Sqlite.into(), - endpoint: db_file.canonicalize()?.to_str().ok_or( - anyhow::anyhow!("Sqlite path is not valid UTF-8"))?.into() - }), - version: Some(proto::org::SemanticVersionNumber { major: v.major, minor: v.minor, patch: v.patch }), + endpoint: db_file + .canonicalize()? + .to_str() + .ok_or(anyhow::anyhow!("Sqlite path is not valid UTF-8"))? + .into(), + }), + version: Some(proto::org::SemanticVersionNumber { + major: v.major, + minor: v.minor, + patch: v.patch, + }), }; conn.handshake(req).await?; Ok(()) } const MAX_TRIES: usize = 5; - for retry in 1..(MAX_TRIES+1) { + for retry in 1..(MAX_TRIES + 1) { match self.tokio_connect() { Ok((rt, mut conn)) => { tracing::info!("Connected to organizer (on attempt {retry}). Doing handshake."); - return rt.block_on(async_call_handshake(&mut conn, backchannel, data_dir, server_url, db_file)); - }, + return rt.block_on(async_call_handshake( + &mut conn, + backchannel, + data_dir, + server_url, + db_file, + )); + } Err(e) => { - tracing::warn!("Connecting organizer failed (attempt {retry}/{MAX_TRIES}: {}", e); + tracing::warn!( + "Connecting organizer failed (attempt {retry}/{MAX_TRIES}: {}", + e + ); std::thread::sleep(std::time::Duration::from_secs_f32(0.5)); } } @@ -73,12 +100,12 @@ impl OrganizerCaller { anyhow::bail!("Connecting organizer failed after {MAX_TRIES} attempts"); } - /// Helper for code that's not already async pub fn tokio_connect(&self) -> anyhow::Result<(tokio::runtime::Runtime, OrganizerConnection)> { - let rt = tokio::runtime::Builder::new_current_thread().enable_all().build()?; + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build()?; let client = rt.block_on(connect(self.uri.clone()))?; Ok((rt, client)) } - } diff --git a/server/src/grpc/db_models.rs b/server/src/grpc/db_models.rs index 02be7939..67978568 100644 --- a/server/src/grpc/db_models.rs +++ b/server/src/grpc/db_models.rs @@ -1,17 +1,19 @@ -use lib_clapshot_grpc::proto; -use crate::database::{error::{DBError, DBResult}, DBPaging, DbQueryByMediaFile, PooledConnection}; use crate::database::models; +use crate::database::{ + error::{DBError, DBResult}, + DBPaging, DbQueryByMediaFile, PooledConnection, +}; +use lib_clapshot_grpc::proto; use super::{datetime_to_proto3, proto3_to_datetime}; - pub fn proto_msg_type_to_event_name(t: proto::user_message::Type) -> &'static str { match t { proto::user_message::Type::Ok => "ok", proto::user_message::Type::Error => "error", proto::user_message::Type::Progress => "progress", proto::user_message::Type::MediaFileUpdated => "media_file_updated", - proto::user_message::Type::MediaFileAdded => "media_file_added" + proto::user_message::Type::MediaFileAdded => "media_file_added", } } @@ -26,35 +28,72 @@ pub fn msg_event_name_to_proto_msg_type(t: &str) -> proto::user_message::Type { } } - // ============================ MediaFile ============================ -impl models::MediaFile -{ - pub fn from_proto3(v: &proto::MediaFile) -> DBResult - { +impl models::MediaFile { + pub fn from_proto3(v: &proto::MediaFile) -> DBResult { Ok(Self { id: v.id.clone(), user_id: v.user_id.clone(), media_type: Some(v.media_type.clone()), - added_time: v.added_time.as_ref().map(|t| proto3_to_datetime(t)).flatten().ok_or(DBError::Other(anyhow::anyhow!("Bad added_time")))?, - recompression_done: v.processing_metadata.as_ref().map(|m| m.recompression_done.as_ref().map(|x| proto3_to_datetime(x))).flatten().flatten(), - thumbs_done: v.processing_metadata.as_ref().map(|m| m.thumbs_done.as_ref().map(|x| proto3_to_datetime(x))).flatten().flatten(), + added_time: v + .added_time + .as_ref() + .map(|t| proto3_to_datetime(t)) + .flatten() + .ok_or(DBError::Other(anyhow::anyhow!("Bad added_time")))?, + recompression_done: v + .processing_metadata + .as_ref() + .map(|m| m.recompression_done.as_ref().map(|x| proto3_to_datetime(x))) + .flatten() + .flatten(), + thumbs_done: v + .processing_metadata + .as_ref() + .map(|m| m.thumbs_done.as_ref().map(|x| proto3_to_datetime(x))) + .flatten() + .flatten(), has_thumbnail: v.preview_data.as_ref().map(|d| d.thumb_url.is_some()), - thumb_sheet_cols: v.preview_data.as_ref().map(|d| d.thumb_sheet.as_ref().map(|x| x.cols as i32)).flatten(), - thumb_sheet_rows: v.preview_data.as_ref().map(|d| d.thumb_sheet.as_ref().map(|x| x.rows as i32)).flatten(), - orig_filename: v.processing_metadata.as_ref().map(|m| m.orig_filename.clone()), + thumb_sheet_cols: v + .preview_data + .as_ref() + .map(|d| d.thumb_sheet.as_ref().map(|x| x.cols as i32)) + .flatten(), + thumb_sheet_rows: v + .preview_data + .as_ref() + .map(|d| d.thumb_sheet.as_ref().map(|x| x.rows as i32)) + .flatten(), + orig_filename: v + .processing_metadata + .as_ref() + .map(|m| m.orig_filename.clone()), title: v.title.clone(), total_frames: v.duration.as_ref().map(|d| d.total_frames as i32), duration: v.duration.as_ref().map(|d| d.duration as f32), fps: v.duration.as_ref().map(|d| d.fps.clone()), - raw_metadata_all: v.processing_metadata.as_ref().map(|m| m.ffprobe_metadata_all.clone()).flatten(), - default_subtitle_id: v.default_subtitle_id.as_ref().map(|id| id.parse().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid default_subtitle_id")))).transpose()?, + raw_metadata_all: v + .processing_metadata + .as_ref() + .map(|m| m.ffprobe_metadata_all.clone()) + .flatten(), + default_subtitle_id: v + .default_subtitle_id + .as_ref() + .map(|id| { + id.parse() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid default_subtitle_id"))) + }) + .transpose()?, }) } - pub fn to_proto3(&self, media_base_url: &str, subtitles: Vec) -> proto::MediaFile - { + pub fn to_proto3( + &self, + media_base_url: &str, + subtitles: Vec, + ) -> proto::MediaFile { let duration = match (self.duration, self.total_frames, &self.fps) { (Some(dur), Some(total_frames), Some(fps)) => Some(proto::MediaFileDuration { duration: dur as f64, @@ -63,42 +102,63 @@ impl models::MediaFile }), _ => None, }; - let processing_metadata = match (&self.orig_filename, &self.recompression_done, &self.raw_metadata_all.clone()) { - (Some(orig_filename), recompression_done, ffprobe_metadata_all) => Some(proto::MediaFileProcessingMetadata { - orig_filename: orig_filename.clone(), - recompression_done: recompression_done.map(|t| datetime_to_proto3(&t)), - thumbs_done: self.thumbs_done.map(|t| datetime_to_proto3(&t)), - ffprobe_metadata_all: ffprobe_metadata_all.clone(), - }), + let processing_metadata = match ( + &self.orig_filename, + &self.recompression_done, + &self.raw_metadata_all.clone(), + ) { + (Some(orig_filename), recompression_done, ffprobe_metadata_all) => { + Some(proto::MediaFileProcessingMetadata { + orig_filename: orig_filename.clone(), + recompression_done: recompression_done.map(|t| datetime_to_proto3(&t)), + thumbs_done: self.thumbs_done.map(|t| datetime_to_proto3(&t)), + ffprobe_metadata_all: ffprobe_metadata_all.clone(), + }) + } _ => None, }; // Make preview data (thumb sheet and/or thumb url) let thumb_url = if matches!(self.has_thumbnail, Some(true)) { - Some(format!("{}/thumbs/thumb.webp", format!("{}/{}", media_base_url, &self.id))) - } else { None }; + Some(format!( + "{}/thumbs/thumb.webp", + format!("{}/{}", media_base_url, &self.id) + )) + } else { + None + }; let thumb_sheet = match (self.thumb_sheet_cols, self.thumb_sheet_rows) { (Some(cols), Some(rows)) => Some(proto::media_file_preview_data::ThumbSheet { - url: format!("{}/thumbs/sheet-{}x{}.webp", format!("{}/{}", media_base_url, &self.id), cols, rows), + url: format!( + "{}/thumbs/sheet-{}x{}.webp", + format!("{}/{}", media_base_url, &self.id), + cols, + rows + ), rows: rows as u32, cols: cols as u32, }), - _ => None + _ => None, }; let preview_data = if thumb_url.is_some() || thumb_sheet.is_some() { - Some(proto::MediaFilePreviewData { thumb_url, thumb_sheet }) - } else { None }; + Some(proto::MediaFilePreviewData { + thumb_url, + thumb_sheet, + }) + } else { + None + }; // Use transcoded or orig video? let orig_uri = match &self.orig_filename { Some(f) => Some(format!("orig/{}", urlencoding::encode(f))), - None => None + None => None, }; let playback_uri = match self.recompression_done { Some(_) => Some("video.mp4".into()), - None => orig_uri.clone() + None => orig_uri.clone(), }; proto::MediaFile { @@ -110,10 +170,14 @@ impl models::MediaFile added_time: Some(datetime_to_proto3(&self.added_time)), preview_data, processing_metadata, - subtitles: subtitles.into_iter().map(|s| s.to_proto3(media_base_url)).collect(), + subtitles: subtitles + .into_iter() + .map(|s| s.to_proto3(media_base_url)) + .collect(), default_subtitle_id: self.default_subtitle_id.map(|id| id.to_string()), - playback_url: playback_uri.map(|uri| format!("{}/{}/{}", media_base_url, &self.id, uri)), - orig_url: orig_uri.map(|uri| format!("{}/{}/{}", media_base_url, &self.id, uri)) + playback_url: playback_uri + .map(|uri| format!("{}/{}/{}", media_base_url, &self.id, uri)), + orig_url: orig_uri.map(|uri| format!("{}/{}/{}", media_base_url, &self.id, uri)), } } @@ -122,51 +186,85 @@ impl models::MediaFile } } -impl models::MediaFileInsert -{ - pub fn from_proto3(v: &proto::MediaFile) -> DBResult - { +impl models::MediaFileInsert { + pub fn from_proto3(v: &proto::MediaFile) -> DBResult { Ok(Self { id: v.id.clone(), user_id: v.user_id.clone(), media_type: Some(v.media_type.clone()), - recompression_done: v.processing_metadata.as_ref().map(|m| m.recompression_done.as_ref().map(|x| proto3_to_datetime(x))).flatten().flatten(), - thumbs_done: v.processing_metadata.as_ref().map(|m| m.thumbs_done.as_ref().map(|x| proto3_to_datetime(x))).flatten().flatten(), + recompression_done: v + .processing_metadata + .as_ref() + .map(|m| m.recompression_done.as_ref().map(|x| proto3_to_datetime(x))) + .flatten() + .flatten(), + thumbs_done: v + .processing_metadata + .as_ref() + .map(|m| m.thumbs_done.as_ref().map(|x| proto3_to_datetime(x))) + .flatten() + .flatten(), has_thumbnail: v.preview_data.as_ref().map(|d| d.thumb_url.is_some()), - thumb_sheet_cols: v.preview_data.as_ref().map(|d| d.thumb_sheet.as_ref().map(|x| x.cols as i32)).flatten(), - thumb_sheet_rows: v.preview_data.as_ref().map(|d| d.thumb_sheet.as_ref().map(|x| x.rows as i32)).flatten(), - orig_filename: v.processing_metadata.as_ref().map(|m| m.orig_filename.clone()), + thumb_sheet_cols: v + .preview_data + .as_ref() + .map(|d| d.thumb_sheet.as_ref().map(|x| x.cols as i32)) + .flatten(), + thumb_sheet_rows: v + .preview_data + .as_ref() + .map(|d| d.thumb_sheet.as_ref().map(|x| x.rows as i32)) + .flatten(), + orig_filename: v + .processing_metadata + .as_ref() + .map(|m| m.orig_filename.clone()), title: v.title.clone(), total_frames: v.duration.as_ref().map(|d| d.total_frames as i32), duration: v.duration.as_ref().map(|d| d.duration as f32), fps: v.duration.as_ref().map(|d| d.fps.clone()), - raw_metadata_all: v.processing_metadata.as_ref().map(|m| m.ffprobe_metadata_all.clone()).flatten(), - default_subtitle_id: v.default_subtitle_id.as_ref().map(|id| id.parse().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid default_subtitle_id")))).transpose()?, + raw_metadata_all: v + .processing_metadata + .as_ref() + .map(|m| m.ffprobe_metadata_all.clone()) + .flatten(), + default_subtitle_id: v + .default_subtitle_id + .as_ref() + .map(|id| { + id.parse() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid default_subtitle_id"))) + }) + .transpose()?, }) } } // ============================ Subtitles ============================ -impl models::Subtitle -{ - pub fn from_proto3(v: &proto::Subtitle) -> DBResult - { - let added_time = v.added_time.as_ref().ok_or(anyhow::anyhow!("Missing added_time timestamp"))?; +impl models::Subtitle { + pub fn from_proto3(v: &proto::Subtitle) -> DBResult { + let added_time = v + .added_time + .as_ref() + .ok_or(anyhow::anyhow!("Missing added_time timestamp"))?; Ok(Self { - id: v.id.parse().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))?, + id: v + .id + .parse() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))?, media_file_id: v.media_file_id.clone(), title: v.title.clone(), language_code: v.language_code.clone(), filename: v.playback_url.split('/').last().map(|s| s.to_string()), orig_filename: v.orig_filename.clone(), - added_time: proto3_to_datetime(added_time).ok_or(anyhow::anyhow!("Invalid 'added_time' timestamp"))?, + added_time: proto3_to_datetime(added_time) + .ok_or(anyhow::anyhow!("Invalid 'added_time' timestamp"))?, time_offset: v.time_offset, }) } - pub fn to_proto3(&self, media_base_url: &str) -> proto::Subtitle - { + pub fn to_proto3(&self, media_base_url: &str) -> proto::Subtitle { let base = format!("{}/{}", media_base_url, &self.media_file_id); let orig_url = format!("{}/subs/orig/{}", base, &self.orig_filename); let playback_url = match &self.filename { @@ -187,11 +285,8 @@ impl models::Subtitle } } - -impl models::SubtitleInsert -{ - pub fn from_proto3(s: &proto::Subtitle) -> DBResult - { +impl models::SubtitleInsert { + pub fn from_proto3(s: &proto::Subtitle) -> DBResult { if s.id != String::default() { return Err(DBError::Other(anyhow::anyhow!("Subtitle ID must be empty for conversion to SubtitleInsert, which doesn't have 'id' field"))); } @@ -206,33 +301,46 @@ impl models::SubtitleInsert } } - // ============================ Comment ============================ -impl models::Comment -{ - pub fn from_proto3(c: &proto::Comment) -> DBResult - { +impl models::Comment { + pub fn from_proto3(c: &proto::Comment) -> DBResult { //let user = v.user.as_ref().ok_or(anyhow::anyhow!("Missing user"))?; - let created = c.created.as_ref().ok_or(anyhow::anyhow!("Missing created timestamp"))?; + let created = c + .created + .as_ref() + .ok_or(anyhow::anyhow!("Missing created timestamp"))?; Ok(Self { - id: c.id.parse().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid comment ID")))?, + id: c + .id + .parse() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid comment ID")))?, media_file_id: c.media_file_id.clone(), user_id: c.user_id.clone(), username_ifnull: c.username_ifnull.clone(), comment: c.comment.clone(), timecode: c.timecode.clone(), - parent_id: c.parent_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid parent ID")))?, - created: proto3_to_datetime(created).ok_or(anyhow::anyhow!("Invalid 'created' timestamp"))?, + parent_id: c + .parent_id + .as_ref() + .map(|id| id.parse()) + .transpose() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid parent ID")))?, + created: proto3_to_datetime(created) + .ok_or(anyhow::anyhow!("Invalid 'created' timestamp"))?, edited: c.edited.as_ref().map(|t| proto3_to_datetime(t)).flatten(), drawing: c.drawing.clone(), - subtitle_id: c.subtitle_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))?, + subtitle_id: c + .subtitle_id + .as_ref() + .map(|id| id.parse()) + .transpose() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))?, subtitle_filename_ifnull: c.subtitle_filename_ifnull.clone(), }) } - pub fn to_proto3(&self) -> proto::Comment - { + pub fn to_proto3(&self) -> proto::Comment { let created_timestamp = Some(datetime_to_proto3(&self.created)); let edited_timestamp = self.edited.map(|edited| datetime_to_proto3(&edited)); @@ -253,10 +361,8 @@ impl models::Comment } } -impl models::CommentInsert -{ - pub fn from_proto3(c: &proto::Comment) -> DBResult - { +impl models::CommentInsert { + pub fn from_proto3(c: &proto::Comment) -> DBResult { if c.id != String::default() { return Err(DBError::Other(anyhow::anyhow!("Comment ID must be empty for conversion to CommentInsert, which doesn't have 'id' field"))); } @@ -266,9 +372,19 @@ impl models::CommentInsert username_ifnull: c.username_ifnull.clone(), comment: c.comment.clone(), timecode: c.timecode.clone(), - parent_id: c.parent_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid parent ID")))?, + parent_id: c + .parent_id + .as_ref() + .map(|id| id.parse()) + .transpose() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid parent ID")))?, drawing: c.drawing.clone(), - subtitle_id: c.subtitle_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))?, + subtitle_id: c + .subtitle_id + .as_ref() + .map(|id| id.parse()) + .transpose() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))?, subtitle_filename_ifnull: c.subtitle_filename_ifnull.clone(), }) } @@ -276,40 +392,69 @@ impl models::CommentInsert // ============================ Message ============================ -impl models::Message -{ - pub fn from_proto3(v: &proto::UserMessage) -> DBResult - { - let created = v.created.as_ref().ok_or(anyhow::anyhow!("Missing created timestamp"))?; +impl models::Message { + pub fn from_proto3(v: &proto::UserMessage) -> DBResult { + let created = v + .created + .as_ref() + .ok_or(anyhow::anyhow!("Missing created timestamp"))?; let user_id = v.user_id.as_ref().ok_or(anyhow::anyhow!("Missing user"))?; let id = v.id.as_ref().ok_or(anyhow::anyhow!("Missing message ID"))?; Ok(Self { - id: id.parse().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid message ID")))?, + id: id + .parse() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid message ID")))?, event_name: proto_msg_type_to_event_name(v.r#type()).to_string(), user_id: user_id.clone(), media_file_id: v.refs.as_ref().map(|r| r.media_file_id.clone()).flatten(), - comment_id: v.refs.as_ref().map(|r| r.comment_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid comment ID")))).transpose()?.flatten(), - subtitle_id: v.refs.as_ref().map(|r| r.comment_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))).transpose()?.flatten(), + comment_id: v + .refs + .as_ref() + .map(|r| { + r.comment_id + .as_ref() + .map(|id| id.parse()) + .transpose() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid comment ID"))) + }) + .transpose()? + .flatten(), + subtitle_id: v + .refs + .as_ref() + .map(|r| { + r.comment_id + .as_ref() + .map(|id| id.parse()) + .transpose() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID"))) + }) + .transpose()? + .flatten(), message: v.message.clone(), details: v.details.clone().unwrap_or_default(), - created: proto3_to_datetime(created).ok_or(anyhow::anyhow!("Invalid 'created' timestamp"))?, + created: proto3_to_datetime(created) + .ok_or(anyhow::anyhow!("Invalid 'created' timestamp"))?, seen: v.seen, }) } - pub fn to_proto3(&self) -> proto::UserMessage - { + pub fn to_proto3(&self) -> proto::UserMessage { proto::UserMessage { id: Some(self.id.to_string()), r#type: msg_event_name_to_proto_msg_type(&self.event_name.as_str()).into(), user_id: Some(self.user_id.clone()), - refs:Some(proto::user_message::Refs { + refs: Some(proto::user_message::Refs { media_file_id: self.media_file_id.clone(), comment_id: self.comment_id.map(|id| id.to_string()), subtitle_id: self.subtitle_id.map(|id| id.to_string()), }), message: self.message.clone(), - details: if self.details.is_empty() { None } else { Some(self.details.clone()) }, + details: if self.details.is_empty() { + None + } else { + Some(self.details.clone()) + }, created: Some(datetime_to_proto3(&self.created)), seen: self.seen, progress: None, @@ -317,10 +462,8 @@ impl models::Message } } -impl models::MessageInsert -{ - pub fn from_proto3(v: &proto::UserMessage) -> DBResult - { +impl models::MessageInsert { + pub fn from_proto3(v: &proto::UserMessage) -> DBResult { if v.id.is_some() { return Err(DBError::Other(anyhow::anyhow!("Message ID must be empty for conversion to MessageInsert, which doesn't have 'id' field"))); } @@ -330,27 +473,52 @@ impl models::MessageInsert event_name: proto_msg_type_to_event_name(v.r#type()).to_string(), user_id: user_id.clone(), media_file_id: v.refs.as_ref().map(|r| r.media_file_id.clone()).flatten(), - comment_id: v.refs.as_ref().map(|r| r.comment_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid comment ID")))).transpose()?.flatten(), - subtitle_id: v.refs.as_ref().map(|r| r.comment_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))).transpose()?.flatten(), + comment_id: v + .refs + .as_ref() + .map(|r| { + r.comment_id + .as_ref() + .map(|id| id.parse()) + .transpose() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid comment ID"))) + }) + .transpose()? + .flatten(), + subtitle_id: v + .refs + .as_ref() + .map(|r| { + r.comment_id + .as_ref() + .map(|id| id.parse()) + .transpose() + .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID"))) + }) + .transpose()? + .flatten(), message: v.message.clone(), details: v.details.clone().unwrap_or_default(), seen: v.seen, }) } - pub fn to_proto3(&self) -> proto::UserMessage - { + pub fn to_proto3(&self) -> proto::UserMessage { proto::UserMessage { id: None, r#type: msg_event_name_to_proto_msg_type(&self.event_name.as_str()).into(), user_id: Some(self.user_id.clone()), - refs:Some(proto::user_message::Refs { + refs: Some(proto::user_message::Refs { media_file_id: self.media_file_id.clone(), comment_id: self.comment_id.map(|id| id.to_string()), subtitle_id: self.subtitle_id.map(|id| id.to_string()), }), message: self.message.clone(), - details: if self.details.is_empty() { None } else { Some(self.details.clone()) }, + details: if self.details.is_empty() { + None + } else { + Some(self.details.clone()) + }, created: None, seen: self.seen, progress: None, diff --git a/server/src/grpc/grpc_client.rs b/server/src/grpc/grpc_client.rs index 6504370d..e31afad1 100644 --- a/server/src/grpc/grpc_client.rs +++ b/server/src/grpc/grpc_client.rs @@ -1,13 +1,12 @@ use std::path::{Path, PathBuf}; -use lib_clapshot_grpc::{unix_socket, subprocess::spawn_shell, subprocess::ProcHandle}; use lib_clapshot_grpc::proto::org::organizer_inbound_client::OrganizerInboundClient; +use lib_clapshot_grpc::{subprocess::spawn_shell, subprocess::ProcHandle, unix_socket}; -use anyhow::{Context, bail}; -use tonic::transport::{Endpoint, Channel}; +use anyhow::{bail, Context}; +use tonic::transport::{Channel, Endpoint}; use tracing::info_span; - pub type OrganizerConnection = OrganizerInboundClient; #[derive(Debug, Clone)] @@ -18,19 +17,17 @@ pub enum OrganizerURI { /// Connect to a gRPC server, either via a Unix socket or HTTP(S). /// Plain path string means Unix socket, "http://..." or "https://..." means HTTP(S). -pub async fn connect(uri: OrganizerURI) -> anyhow::Result -{ +pub async fn connect(uri: OrganizerURI) -> anyhow::Result { let channel = match uri { - OrganizerURI::UnixSocket(path) => - { + OrganizerURI::UnixSocket(path) => { unix_socket::wait_for(&path, 5.0).await?; // For tonic 0.13.1, create a custom connector that wraps UnixStream #[cfg(unix)] { + use hyper_util::rt::TokioIo; use tokio::net::UnixStream; use tower::service_fn; - use hyper_util::rt::TokioIo; - + let path_clone = path.clone(); Endpoint::try_from("http://[::]:50051")? .connect_timeout(std::time::Duration::from_secs(8)) @@ -48,26 +45,25 @@ pub async fn connect(uri: OrganizerURI) -> anyhow::Result { anyhow::bail!("Unix sockets are not supported on this platform") } - }, - OrganizerURI::Http(uri) => - { - Channel::from_shared(uri.to_string()).context("Failed to parse organizer HTTP URI")? - .connect_timeout(std::time::Duration::from_secs(8)) - .connect().await.context("HTTP Channel::connect failed")? - }, + } + OrganizerURI::Http(uri) => Channel::from_shared(uri.to_string()) + .context("Failed to parse organizer HTTP URI")? + .connect_timeout(std::time::Duration::from_secs(8)) + .connect() + .await + .context("HTTP Channel::connect failed")?, }; Ok(OrganizerInboundClient::new(channel)) } /// Parse Organizer plugin arguments and spawn it if necessary pub fn prepare_organizer( - org_uri: &Option, - cmd: &Option, - level: tracing::Level, - json: bool, - data_dir: &Path) - -> anyhow::Result<(Option, Option)> -{ + org_uri: &Option, + cmd: &Option, + level: tracing::Level, + json: bool, + data_dir: &Path, +) -> anyhow::Result<(Option, Option)> { assert!(tracing::Level::TRACE > tracing::Level::DEBUG); let debug = level >= tracing::Level::DEBUG; @@ -80,39 +76,51 @@ pub fn prepare_organizer( Some((pcol, _)) => bail!("Unsupported gRPC protocol: {}", pcol), }), }; - let org_hdl = - if let Some(cmd) = cmd { - // Use a temp sock if none was given - if org_uri.is_none() { - let unix_sock = data_dir - .canonicalize().context("Expanding data dir")? - .join("grpc-srv-to-org.sock"); - org_uri = Some(OrganizerURI::UnixSocket(unix_sock)); - }; - Some(spawn_organizer(&cmd.as_str(), org_uri.clone().unwrap(), debug, json)?) - } else { None }; + let org_hdl = if let Some(cmd) = cmd { + // Use a temp sock if none was given + if org_uri.is_none() { + let unix_sock = data_dir + .canonicalize() + .context("Expanding data dir")? + .join("grpc-srv-to-org.sock"); + org_uri = Some(OrganizerURI::UnixSocket(unix_sock)); + }; + Some(spawn_organizer( + &cmd.as_str(), + org_uri.clone().unwrap(), + debug, + json, + )?) + } else { + None + }; Ok((org_uri, org_hdl)) } /// Spawn organizer gRPC server as a subprocess. /// Dropping the returned handle will signal/kill the subprocess. -fn spawn_organizer(cmd: &str, uri: OrganizerURI, debug: bool, json: bool) - -> anyhow::Result -{ +fn spawn_organizer( + cmd: &str, + uri: OrganizerURI, + debug: bool, + json: bool, +) -> anyhow::Result { assert!(cmd != "", "Empty organizer command"); let mut cmd = match uri { OrganizerURI::UnixSocket(path) => { unix_socket::delete_old(&path)?; format!("{} {}", cmd, path.display()) - }, - OrganizerURI::Http(_) => { - cmd.into() - }, + } + OrganizerURI::Http(_) => cmd.into(), }; - if debug { cmd += " --debug"; } - if json { cmd += " --json"; } + if debug { + cmd += " --debug"; + } + if json { + cmd += " --json"; + } spawn_shell(&cmd, "organizer", info_span!("ORG")) } diff --git a/server/src/grpc/grpc_impl_helpers.rs b/server/src/grpc/grpc_impl_helpers.rs index 78a33616..bac40a71 100644 --- a/server/src/grpc/grpc_impl_helpers.rs +++ b/server/src/grpc/grpc_impl_helpers.rs @@ -1,21 +1,31 @@ +use crate::database::error::DBError; use std::num::NonZeroU32; use tonic::Status; -use crate::database::error::DBError; use lib_clapshot_grpc::proto::org; - -pub (crate) fn rpc_expect_field<'a, T> (fld: &'a Option, name: &'a str) -> tonic::Result<&'a T, Status> { +pub(crate) fn rpc_expect_field<'a, T>( + fld: &'a Option, + name: &'a str, +) -> tonic::Result<&'a T, Status> { match fld { Some(f) => Ok(f), - None => return Err(Status::invalid_argument(format!("Missing '{}' field", name))), + None => { + return Err(Status::invalid_argument(format!( + "Missing '{}' field", + name + ))) + } } } /// Emulate paging by taking a slice of the vector for database /// queries that don't support it. -pub (crate) fn paged_vec(v: Vec, p: crate::database::DBPaging) -> Vec { - v.into_iter().skip(p.offset() as usize).take(p.limit() as usize).collect() +pub(crate) fn paged_vec(v: Vec, p: crate::database::DBPaging) -> Vec { + v.into_iter() + .skip(p.offset() as usize) + .take(p.limit() as usize) + .collect() } /// Convert GRPC paging object to (type-safe) DB counterpart. @@ -32,7 +42,7 @@ impl TryInto for Option<&org::DbPaging> { page_num: p.page_num.into(), page_size, }) - }, + } None => Ok(crate::database::DBPaging::default()), } } @@ -43,7 +53,9 @@ impl From for Status { fn from(e: DBError) -> Self { match e { DBError::NotFound() => Status::not_found("DB item not found (on Server)"), - DBError::BackendError(e) => Status::internal(format!("DB backend error (on Server): {}", e)), + DBError::BackendError(e) => { + Status::internal(format!("DB backend error (on Server): {}", e)) + } DBError::Other(e) => Status::internal(format!("DB error (on Server): {}", e)), } } diff --git a/server/src/grpc/grpc_server.rs b/server/src/grpc/grpc_server.rs index 68bab841..8e1a886b 100644 --- a/server/src/grpc/grpc_server.rs +++ b/server/src/grpc/grpc_server.rs @@ -1,12 +1,21 @@ -use std::{path::Path, sync::atomic::Ordering::Relaxed}; +use crate::database::models; +use crate::grpc::db_models::proto_msg_type_to_event_name; +use crate::{ + api_server::{server_state::ServerState, ws_handers::del_media_file_and_cleanup, SendTo}, + client_cmd, + database::{DbBasicQuery, DbQueryByMediaFile, DbQueryByUser, DbUpdate}, + grpc::grpc_impl_helpers::{paged_vec, rpc_expect_field}, + optional_str_to_i32_or_tonic_error, str_to_i32_or_tonic_error, +}; use anyhow::Context; +use std::{path::Path, sync::atomic::Ordering::Relaxed}; use tonic::{Request, Response, Status}; -use crate::{api_server::{server_state::ServerState, ws_handers::del_media_file_and_cleanup, SendTo}, client_cmd, database::{DbBasicQuery, DbQueryByMediaFile, DbQueryByUser, DbUpdate}, grpc::grpc_impl_helpers::{paged_vec, rpc_expect_field}, optional_str_to_i32_or_tonic_error, str_to_i32_or_tonic_error}; -use crate::grpc::db_models::proto_msg_type_to_event_name; -use crate::database::models; -use lib_clapshot_grpc::{proto::{self}, run_organizer_outbound_grpc_server, GrpcBindAddr, RpcResult}; use lib_clapshot_grpc::proto::org; +use lib_clapshot_grpc::{ + proto::{self}, + run_organizer_outbound_grpc_server, GrpcBindAddr, RpcResult, +}; pub struct OrganizerOutboundImpl { server: ServerState, @@ -15,48 +24,67 @@ pub struct OrganizerOutboundImpl { // Implement RCP methods for Organizer -> Server #[tonic::async_trait] -impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl -{ - async fn handshake(&self, req: tonic::Request) -> RpcResult - { +impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl { + async fn handshake(&self, req: tonic::Request) -> RpcResult { tracing::debug!("org->srv handshake received"); - self.server.organizer_info.lock().await.replace(req.into_inner()); + self.server + .organizer_info + .lock() + .await + .replace(req.into_inner()); self.server.organizer_has_connected.store(true, Relaxed); Ok(Response::new(proto::Empty {})) } - async fn client_define_actions(&self, req: Request) -> RpcResult - { + async fn client_define_actions( + &self, + req: Request, + ) -> RpcResult { let req = req.into_inner(); - to_rpc_empty(self.server.emit_cmd(client_cmd!(DefineActions, {actions: req.actions}), SendTo::UserSession(&req.sid))) + to_rpc_empty(self.server.emit_cmd( + client_cmd!(DefineActions, {actions: req.actions}), + SendTo::UserSession(&req.sid), + )) } - async fn client_show_page(&self, req: Request) -> RpcResult - { + async fn client_show_page( + &self, + req: Request, + ) -> RpcResult { let req = req.into_inner(); - to_rpc_empty(self.server.emit_cmd(client_cmd!(ShowPage, { - page_items: req.page_items, - page_id: req.page_id, - page_title: req.page_title, - }), SendTo::UserSession(&req.sid))) + to_rpc_empty(self.server.emit_cmd( + client_cmd!(ShowPage, { + page_items: req.page_items, + page_id: req.page_id, + page_title: req.page_title, + }), + SendTo::UserSession(&req.sid), + )) } /// Send a message to one or more user sessions. - async fn client_show_user_message(&self, req: Request) -> RpcResult - { - use org::client_show_user_message_request::Recipient; + async fn client_show_user_message( + &self, + req: Request, + ) -> RpcResult { use crate::api_server::SendTo; + use org::client_show_user_message_request::Recipient; let req = req.into_inner(); - let msg_in = req.msg.map_or_else(|| return Err(Status::invalid_argument("No message specified")), Ok)?; - let recipient = req.recipient.ok_or_else(|| Status::invalid_argument("No recipient specified"))?; + let msg_in = req.msg.map_or_else( + || return Err(Status::invalid_argument("No message specified")), + Ok, + )?; + let recipient = req + .recipient + .ok_or_else(|| Status::invalid_argument("No recipient specified"))?; let (media_file_id, comment_id, subtitle_id) = match &msg_in.refs { Some(refs) => ( refs.media_file_id.clone(), optional_str_to_i32_or_tonic_error!(refs.comment_id)?, - optional_str_to_i32_or_tonic_error!(refs.subtitle_id)? + optional_str_to_i32_or_tonic_error!(refs.subtitle_id)?, ), None => (None, None, None), }; @@ -72,7 +100,8 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl message: msg_in.message.clone(), details: msg_in.details.clone().unwrap_or_default(), }; - self.server.push_notify_message(&msg, to, persist, msg_in.progress) + self.server + .push_notify_message(&msg, to, persist, msg_in.progress) }; let res = match recipient { @@ -82,30 +111,48 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl } else { Err(anyhow::anyhow!("Session not found")) } - }, - Recipient::UserTemp(username) => { send_msg(&username, SendTo::UserId(&username), false) }, - Recipient::UserPersist(username) => { send_msg(&username, SendTo::UserId(&username), true) }, - Recipient::MediaFileId(id) => { send_msg(&id, SendTo::MediaFileId(&id), false) }, - Recipient::CollabSession(csi) => { send_msg(&csi, SendTo::Collab(&csi), false) }, + } + Recipient::UserTemp(username) => send_msg(&username, SendTo::UserId(&username), false), + Recipient::UserPersist(username) => { + send_msg(&username, SendTo::UserId(&username), true) + } + Recipient::MediaFileId(id) => send_msg(&id, SendTo::MediaFileId(&id), false), + Recipient::CollabSession(csi) => send_msg(&csi, SendTo::Collab(&csi), false), }; to_rpc_empty(res) } - async fn client_open_media_file(&self, req: Request) -> RpcResult - { + async fn client_open_media_file( + &self, + req: Request, + ) -> RpcResult { let req = req.into_inner(); - to_rpc_empty(crate::api_server::ws_handers::send_open_media_file_cmd(&self.server, &req.sid, &req.id).await) + to_rpc_empty( + crate::api_server::ws_handers::send_open_media_file_cmd( + &self.server, + &req.sid, + &req.id, + ) + .await, + ) } - async fn client_set_cookies(&self, req: Request) -> RpcResult - { + async fn client_set_cookies( + &self, + req: Request, + ) -> RpcResult { let req = req.into_inner(); - to_rpc_empty(self.server.emit_cmd(client_cmd!(SetCookies, {cookies: req.cookies, expire_time: req.expire_time}), SendTo::UserSession(&req.sid))) + to_rpc_empty(self.server.emit_cmd( + client_cmd!(SetCookies, {cookies: req.cookies, expire_time: req.expire_time}), + SendTo::UserSession(&req.sid), + )) } - async fn delete_media_file(&self, req: Request) -> RpcResult - { + async fn delete_media_file( + &self, + req: Request, + ) -> RpcResult { let req = req.into_inner(); to_rpc_empty(del_media_file_and_cleanup(req.id.as_str(), None, &self.server).await) } @@ -116,22 +163,25 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl // (These aggregate a lot of filtering and paging functionality into a relatively // few RPC calls, so there's quite a bit of matching and dense logic here.) - - async fn db_get_media_files(&self, req: Request) -> RpcResult - { + async fn db_get_media_files( + &self, + req: Request, + ) -> RpcResult { use org::db_get_media_files_request::Filter; let req = req.into_inner(); let db = self.server.db.clone(); let pg = req.paging.as_ref().try_into()?; let conn = &mut db.conn()?; let items = match rpc_expect_field(&req.filter, "filter")? { - Filter::All(_) => { models::MediaFile::get_all(conn, pg)? }, - Filter::Ids(ids) => { paged_vec(models::MediaFile::get_many(conn, &ids.ids)?, pg) }, - Filter::UserId(user_id) => { models::MediaFile::get_by_user(conn, &user_id, pg)? }, + Filter::All(_) => models::MediaFile::get_all(conn, pg)?, + Filter::Ids(ids) => paged_vec(models::MediaFile::get_many(conn, &ids.ids)?, pg), + Filter::UserId(user_id) => models::MediaFile::get_by_user(conn, &user_id, pg)?, }; let mut proto_items = Vec::with_capacity(items.len()); - for mf in items { proto_items.push(mf.to_proto3(&self.server.media_base_url, mf.get_subtitles(conn)?)); } + for mf in items { + proto_items.push(mf.to_proto3(&self.server.media_base_url, mf.get_subtitles(conn)?)); + } Ok(Response::new(org::DbMediaFileList { items: proto_items, @@ -139,9 +189,10 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl })) } - - async fn db_get_comments(&self, req: Request) -> RpcResult - { + async fn db_get_comments( + &self, + req: Request, + ) -> RpcResult { use org::db_get_comments_request::Filter; let req = req.into_inner(); let db = self.server.db.clone(); @@ -149,13 +200,19 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl let conn = &mut db.conn()?; let items = match rpc_expect_field(&req.filter, "filter")? { - Filter::All(_) => { models::Comment::get_all(conn, pg)? }, + Filter::All(_) => models::Comment::get_all(conn, pg)?, Filter::Ids(ids) => { - let ids = ids.ids.iter().map(|comment_id| str_to_i32_or_tonic_error!(comment_id)).collect::, _>>()?; + let ids = ids + .ids + .iter() + .map(|comment_id| str_to_i32_or_tonic_error!(comment_id)) + .collect::, _>>()?; paged_vec(models::Comment::get_many(conn, &ids)?, pg) - }, - Filter::UserId(user_id) => { models::Comment::get_by_user(conn, user_id, pg)? }, - Filter::MediaFileId(media_file_id) => { models::Comment::get_by_media_file(conn, media_file_id, pg)? }, + } + Filter::UserId(user_id) => models::Comment::get_by_user(conn, user_id, pg)?, + Filter::MediaFileId(media_file_id) => { + models::Comment::get_by_media_file(conn, media_file_id, pg)? + } }; Ok(Response::new(org::DbCommentList { items: items.into_iter().map(|c| c.to_proto3()).collect(), @@ -163,23 +220,32 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl })) } - - async fn db_get_user_messages(&self, req: Request) -> RpcResult - { + async fn db_get_user_messages( + &self, + req: Request, + ) -> RpcResult { use org::db_get_user_messages_request::Filter; let req = req.into_inner(); let db = self.server.db.clone(); let pg = req.paging.as_ref().try_into()?; let conn = &mut db.conn()?; let items = match rpc_expect_field(&req.filter, "filter")? { - Filter::All(_) => { models::Message::get_all(conn, pg)? }, + Filter::All(_) => models::Message::get_all(conn, pg)?, Filter::Ids(ids) => { - let ids = ids.ids.iter().map(|message_id| str_to_i32_or_tonic_error!(message_id)).collect::, _>>()?; + let ids = ids + .ids + .iter() + .map(|message_id| str_to_i32_or_tonic_error!(message_id)) + .collect::, _>>()?; paged_vec(models::Message::get_many(conn, ids.as_slice())?, pg) - }, - Filter::UserId(user_id) => { models::Message::get_by_user(conn, user_id, pg)? }, - Filter::MediaFileId(media_file_id) => { models::Message::get_by_media_file(conn, media_file_id, pg)? }, - Filter::CommentId(comment_id) => { models::Message::get_by_comment(conn, str_to_i32_or_tonic_error!(comment_id)?)? }, + } + Filter::UserId(user_id) => models::Message::get_by_user(conn, user_id, pg)?, + Filter::MediaFileId(media_file_id) => { + models::Message::get_by_media_file(conn, media_file_id, pg)? + } + Filter::CommentId(comment_id) => { + models::Message::get_by_comment(conn, str_to_i32_or_tonic_error!(comment_id)?)? + } }; Ok(Response::new(org::DbUserMessageList { items: items.into_iter().map(|m| m.to_proto3()).collect(), @@ -187,83 +253,121 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl })) } - - async fn db_upsert(&self, req: Request) -> RpcResult - { + async fn db_upsert( + &self, + req: Request, + ) -> RpcResult { let req = req.into_inner(); macro_rules! upsert_type { - ([$db:expr, $input_items:expr, $model:ty, $ins_model:ty, $id_missing:expr, $to_proto:expr]) => { - { - let inserts = $input_items.iter().filter(|it| $id_missing(it)) - .map(|it| <$ins_model>::from_proto3(it)) - .collect::, _>>()?; - - let updates = $input_items.iter().filter(|it| !$id_missing(it)) - .map(|it| <$model>::from_proto3(it)) - .collect::, _>>()?; - - // Perform database operations - let ins_res = <$model>::insert_many($db, &inserts)?; - let upd_res = <$model>::update_many($db, &updates)?; - - if ins_res.len() + upd_res.len() != $input_items.len() { - return Err(Status::internal("Database upsert returned unexpected number of results")); - } - - // Combine the results in the original order - let mut ins_iter = ins_res.into_iter(); - let mut upd_iter = upd_res.into_iter(); - let res_comb_orig_order = $input_items.iter().map(|it| { + ([$db:expr, $input_items:expr, $model:ty, $ins_model:ty, $id_missing:expr, $to_proto:expr]) => {{ + let inserts = $input_items + .iter() + .filter(|it| $id_missing(it)) + .map(|it| <$ins_model>::from_proto3(it)) + .collect::, _>>()?; + + let updates = $input_items + .iter() + .filter(|it| !$id_missing(it)) + .map(|it| <$model>::from_proto3(it)) + .collect::, _>>()?; + + // Perform database operations + let ins_res = <$model>::insert_many($db, &inserts)?; + let upd_res = <$model>::update_many($db, &updates)?; + + if ins_res.len() + upd_res.len() != $input_items.len() { + return Err(Status::internal( + "Database upsert returned unexpected number of results", + )); + } + + // Combine the results in the original order + let mut ins_iter = ins_res.into_iter(); + let mut upd_iter = upd_res.into_iter(); + let res_comb_orig_order = $input_items + .iter() + .map(|it| { if $id_missing(it) { ins_iter.next().expect("Insert result missing") } else { upd_iter.next().expect("Update result missing") } - }).collect::>(); - - // Convert back to proto3 - res_comb_orig_order.iter().map(|it| $to_proto(it)).collect::, tonic::Status>>() - } - } + }) + .collect::>(); + + // Convert back to proto3 + res_comb_orig_order + .iter() + .map(|it| $to_proto(it)) + .collect::, tonic::Status>>() + }}; } let conn = &mut self.server.db.conn()?; Ok(Response::new(org::DbUpsertResponse { media_files: upsert_type!([ - conn, req.media_files, models::MediaFile, models::MediaFileInsert, + conn, + req.media_files, + models::MediaFile, + models::MediaFileInsert, |it: &proto::MediaFile| it.id.is_empty(), - |it: &models::MediaFile| Ok(it.to_proto3(self.server.media_base_url.as_str(), it.get_subtitles(conn)?))])?, + |it: &models::MediaFile| Ok( + it.to_proto3(self.server.media_base_url.as_str(), it.get_subtitles(conn)?) + ) + ])?, comments: upsert_type!([ - conn, req.comments, models::Comment, models::CommentInsert, + conn, + req.comments, + models::Comment, + models::CommentInsert, |it: &proto::Comment| it.id.is_empty(), - |it: &models::Comment| Ok(it.to_proto3())])?, + |it: &models::Comment| Ok(it.to_proto3()) + ])?, user_messages: upsert_type!([ - conn, req.user_messages, models::Message, models::MessageInsert, + conn, + req.user_messages, + models::Message, + models::MessageInsert, |it: &proto::UserMessage| it.id.is_none(), - |it: &models::Message| Ok(it.to_proto3())])?, + |it: &models::Message| Ok(it.to_proto3()) + ])?, subtitles: upsert_type!([ - conn, req.subtitles, models::Subtitle, models::SubtitleInsert, + conn, + req.subtitles, + models::Subtitle, + models::SubtitleInsert, |it: &proto::Subtitle| it.id.is_empty(), - |it: &models::Subtitle| Ok(it.to_proto3(self.server.media_base_url.as_str()))])?, + |it: &models::Subtitle| Ok(it.to_proto3(self.server.media_base_url.as_str())) + ])?, })) } - async fn db_delete(&self, req: Request) -> RpcResult - { + async fn db_delete( + &self, + req: Request, + ) -> RpcResult { let req = req.into_inner(); macro_rules! delete_type { - ([$db:expr, $input_ids:expr, $id_type:ty, $model:ty]) => { - { - use std::str::FromStr; - let ids = $input_ids.iter().map(|s| <$id_type>::from_str(&s) + ([$db:expr, $input_ids:expr, $id_type:ty, $model:ty]) => {{ + use std::str::FromStr; + let ids = $input_ids + .iter() + .map(|s| { + <$id_type>::from_str(&s) .map_err(|e| Status::invalid_argument(format!("Invalid ID: {}", e))) - ).collect::, _>>()?; - <$model>::delete_many($db, ids.as_slice())? as u32 - } - } + }) + .collect::, _>>()?; + <$model>::delete_many($db, ids.as_slice())? as u32 + }}; } let conn = &mut self.server.db.conn()?; Ok(Response::new(org::DbDeleteResponse { - media_files_deleted: delete_type!([conn, req.media_file_ids, String, models::MediaFile]), + media_files_deleted: delete_type!([ + conn, + req.media_file_ids, + String, + models::MediaFile + ]), subtitles_deleted: delete_type!([conn, req.subtitle_ids, i32, models::Subtitle]), comments_deleted: delete_type!([conn, req.comment_ids, i32, models::Comment]), user_messages_deleted: delete_type!([conn, req.user_message_ids, i32, models::Message]), @@ -271,9 +375,9 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl } } - fn to_rpc_empty(res: Result) -> RpcResult - where E: std::fmt::Display, +where + E: std::fmt::Display, { match res { Ok(_) => Ok(Response::new(proto::Empty {})), @@ -281,24 +385,37 @@ fn to_rpc_empty(res: Result) -> RpcResult } } - -pub async fn run_org_to_srv_grpc_server(bind: GrpcBindAddr, server: ServerState) -> anyhow::Result<()> -{ +pub async fn run_org_to_srv_grpc_server( + bind: GrpcBindAddr, + server: ServerState, +) -> anyhow::Result<()> { let span = tracing::info_span!("gRPC server for org->srv"); let terminate_flag = server.terminate_flag.clone(); let server_listening_flag = server.grpc_srv_listening_flag.clone(); - let service = org::organizer_outbound_server::OrganizerOutboundServer::new(OrganizerOutboundImpl { server }); + let service = + org::organizer_outbound_server::OrganizerOutboundServer::new(OrganizerOutboundImpl { + server, + }); - run_organizer_outbound_grpc_server(bind, service, span, server_listening_flag, terminate_flag).await + run_organizer_outbound_grpc_server(bind, service, span, server_listening_flag, terminate_flag) + .await } -pub fn make_grpc_server_bind(tcp: &Option, data_dir: &Path) -> anyhow::Result -{ +pub fn make_grpc_server_bind( + tcp: &Option, + data_dir: &Path, +) -> anyhow::Result { match tcp { - None => Ok(GrpcBindAddr::Unix(data_dir - .canonicalize().context("Expanding data dir")? - .join("grpc-org-to-srv.sock").into())), - Some(s) => Ok(GrpcBindAddr::Tcp(s.parse().context("Parsing TCP listen address")?)), + None => Ok(GrpcBindAddr::Unix( + data_dir + .canonicalize() + .context("Expanding data dir")? + .join("grpc-org-to-srv.sock") + .into(), + )), + Some(s) => Ok(GrpcBindAddr::Tcp( + s.parse().context("Parsing TCP listen address")?, + )), } } diff --git a/server/src/grpc/mod.rs b/server/src/grpc/mod.rs index 349c79f1..f791ead9 100644 --- a/server/src/grpc/mod.rs +++ b/server/src/grpc/mod.rs @@ -1,12 +1,11 @@ -pub mod grpc_client; pub mod caller; -pub mod grpc_server; -pub mod grpc_impl_helpers; pub mod db_models; +pub mod grpc_client; +pub mod grpc_impl_helpers; +pub mod grpc_server; -use std::collections::HashMap; use lib_clapshot_grpc::proto; - +use std::collections::HashMap; // Helper macro to simplify creation of ServerToClientCmd messages. // Prost/Tonic syntax is a bit verbose. @@ -23,15 +22,33 @@ macro_rules! client_cmd { // Proto3 objects use string for many IDs that are integers in DB. Helper to convert them. #[macro_export] macro_rules! str_to_i32_or_tonic_error { - ($r:expr) => { $r.parse::().map_err(|e| tonic::Status::invalid_argument(format!("Could not parse {} as int: {}", stringify!($r), e))) }; + ($r:expr) => { + $r.parse::().map_err(|e| { + tonic::Status::invalid_argument(format!( + "Could not parse {} as int: {}", + stringify!($r), + e + )) + }) + }; } #[macro_export] macro_rules! optional_str_to_i32_or_tonic_error { - ($r:expr) => { $r.as_ref().map(|v| v.parse::().map_err(|e| tonic::Status::invalid_argument(format!("Could not parse {} as int: {}", stringify!($r), e)))).transpose() }; + ($r:expr) => { + $r.as_ref() + .map(|v| { + v.parse::().map_err(|e| { + tonic::Status::invalid_argument(format!( + "Could not parse {} as int: {}", + stringify!($r), + e + )) + }) + }) + .transpose() + }; } - - /// Convert database time to protobuf3 pub fn datetime_to_proto3(dt: &chrono::NaiveDateTime) -> pbjson_types::Timestamp { pbjson_types::Timestamp { @@ -44,7 +61,7 @@ pub fn proto3_to_datetime(ts: &pbjson_types::Timestamp) -> Option HashMap { +pub(crate) fn make_media_file_popup_actions() -> HashMap { HashMap::from([ ("popup_builtin_rename".into(), make_builtin_rename_action()), ("popup_builtin_trash".into(), make_builting_trash_action()), @@ -52,12 +69,14 @@ pub (crate) fn make_media_file_popup_actions() -> HashMap proto::ActionDef { - proto::ActionDef { + proto::ActionDef { ui_props: Some(proto::ActionUiProps { label: Some(format!("Rename")), icon: Some(proto::Icon { src: Some(proto::icon::Src::FaClass(proto::icon::FaClass { - classes: "fa fa-edit".into(), color: None, })), + classes: "fa fa-edit".into(), + color: None, + })), ..Default::default() }), key_shortcut: Some("F2".into()), @@ -79,8 +98,10 @@ if (new_name && new_name != old_name) { alert("Unknown item type in rename action. Please report this bug."); } } - "#.trim().into() - }) + "# + .trim() + .into(), + }), } } @@ -129,19 +150,28 @@ if (confirm(msg)) { } } - - /// Convert a list of database MediaFiles to a protobuf3 PageItem (FolderListing) -pub (crate) fn folder_listing_for_media_files(media_files: &[proto::MediaFile]) -> proto::PageItem { - let media_files: Vec = media_files.iter().map(|v| { +pub(crate) fn folder_listing_for_media_files(media_files: &[proto::MediaFile]) -> proto::PageItem { + let media_files: Vec = media_files + .iter() + .map(|v| { proto::page_item::folder_listing::Item { - item: Some(proto::page_item::folder_listing::item::Item::MediaFile(v.clone())), + item: Some(proto::page_item::folder_listing::item::Item::MediaFile( + v.clone(), + )), open_action: Some(proto::ScriptCall { lang: proto::script_call::Lang::Javascript.into(), - code: format!("clapshot.openMediaFile(\"{}\")", v.id).into() + code: format!("clapshot.openMediaFile(\"{}\")", v.id).into(), }), popup_actions: vec!["popup_builtin_rename".into(), "popup_builtin_trash".into()], - vis: if v.preview_data.as_ref().and_then(|pv| pv.thumb_url.as_ref()).is_some() { None } else { + vis: if v + .preview_data + .as_ref() + .and_then(|pv| pv.thumb_url.as_ref()) + .is_some() + { + None + } else { // If no thumbnail, show an icon based on media type instead Some(proto::page_item::folder_listing::item::Visualization { icon: Some(proto::Icon { @@ -151,14 +181,18 @@ pub (crate) fn folder_listing_for_media_files(media_files: &[proto::MediaFile]) "image" => "fas fa-image", "video" => "fas fa-video", _ => "fa fa-circle-question", - }.into(), color: None, })), + } + .into(), + color: None, + })), ..Default::default() }), ..Default::default() }) }, } - }).collect(); + }) + .collect(); proto::PageItem { item: Some(proto::page_item::Item::FolderListing( @@ -167,6 +201,7 @@ pub (crate) fn folder_listing_for_media_files(media_files: &[proto::MediaFile]) allow_reordering: false, allow_upload: true, ..Default::default() - })), + }, + )), } } diff --git a/server/src/lib.rs b/server/src/lib.rs index 8f4e5d03..a5efecee 100644 --- a/server/src/lib.rs +++ b/server/src/lib.rs @@ -1,24 +1,41 @@ -use std::{collections::HashMap, path::PathBuf, sync::{atomic::{AtomicBool, Ordering}, Arc}, thread::{self, JoinHandle}}; - +use std::{ + collections::HashMap, + path::PathBuf, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, + }, + thread::{self, JoinHandle}, +}; + +use crate::{ + api_server::server_state::ServerState, + grpc::{caller::OrganizerCaller, grpc_client::OrganizerURI}, +}; use anyhow::Context; -use database::{db_backup::{backup_sqlite_database, restore_sqlite_database}, migration_solver::MigrationGraphModule, sqlite_foreign_key_check, DB}; -use lib_clapshot_grpc::{proto::org::{self, Migration}, GrpcBindAddr}; -use crate::{api_server::server_state::ServerState, grpc::{caller::OrganizerCaller, grpc_client::OrganizerURI}}; +use database::{ + db_backup::{backup_sqlite_database, restore_sqlite_database}, + migration_solver::MigrationGraphModule, + sqlite_foreign_key_check, DB, +}; +use lib_clapshot_grpc::{ + proto::org::{self, Migration}, + GrpcBindAddr, +}; use anyhow::bail; -pub mod video_pipeline; pub mod api_server; pub mod database; -pub mod tests; pub mod grpc; pub mod storage; +pub mod tests; +pub mod video_pipeline; pub const PKG_VERSION: &'static str = env!("CARGO_PKG_VERSION"); pub const PKG_NAME: &'static str = env!("CARGO_PKG_NAME"); -const SERVER_MODULE_NAME: &str = "clapshot.server"; // Name for migrations solver - +const SERVER_MODULE_NAME: &str = "clapshot.server"; // Name for migrations solver pub struct ClapshotInit { terminate_flag: Arc, @@ -27,7 +44,6 @@ pub struct ClapshotInit { } impl ClapshotInit { - /// Initialize clapshot and spawn all worker threads. pub fn init_and_spawn_workers( data_dir: std::path::PathBuf, @@ -48,12 +64,11 @@ impl ClapshotInit { thumbnail_script: String, org_http_headers_regex: regex::Regex, storage: crate::storage::StorageBackend, - terminate_flag: Arc) - -> anyhow::Result - { + terminate_flag: Arc, + ) -> anyhow::Result { + use crossbeam_channel::unbounded; use signal_hook::consts::TERM_SIGNALS; - use signal_hook::flag; - use crossbeam_channel::unbounded; // Work queue + use signal_hook::flag; // Work queue let _span = tracing::info_span!("INIT").entered(); @@ -82,7 +97,8 @@ impl ClapshotInit { let (user_msg_tx, user_msg_rx) = unbounded::(); let (upload_tx, upload_rx) = unbounded::(); let api_thread = Some({ - let server = ServerState::new( db.clone(), + let server = ServerState::new( + db.clone(), &data_dir.join("videos"), &data_dir.join("upload"), &url_base, @@ -91,10 +107,26 @@ impl ClapshotInit { grpc_srv_listening_flag.clone(), default_user, terminate_flag.clone(), - org_http_headers_regex); - let grpc_srv = if (&organizer_uri).is_some() { Some(grpc_server_bind.clone()) } else { None }; + org_http_headers_regex, + ); + let grpc_srv = if (&organizer_uri).is_some() { + Some(grpc_server_bind.clone()) + } else { + None + }; let ub = url_base.clone(); - thread::spawn(move || { api_server::run_forever(user_msg_rx, grpc_srv, upload_tx, bind_api.to_string(), ub, cors_origins, server, port) }) + thread::spawn(move || { + api_server::run_forever( + user_msg_rx, + grpc_srv, + upload_tx, + bind_api.to_string(), + ub, + cors_origins, + server, + port, + ) + }) }); // Handshake Organizer if configured @@ -111,7 +143,12 @@ impl ClapshotInit { // Ok, organizer should be able to connect back to us now, so handshake let org = OrganizerCaller::new(&ouri); tracing::info!("Connecting gRPC srv->org..."); - org.blocking_handshake_organizer(&data_dir, &url_base, &db_file, &grpc_server_bind)?; + org.blocking_handshake_organizer( + &data_dir, + &url_base, + &db_file, + &grpc_server_bind, + )?; tracing::debug!("srv->org handshake done."); } None => { @@ -126,18 +163,34 @@ impl ClapshotInit { let ths = thumbnail_script.clone(); let vpp_thread = Some({ let db = db.clone(); - thread::spawn(move || { video_pipeline::run_forever( - db, tf.clone(), dd, storage.clone(), user_msg_tx, poll_interval, resubmit_delay, target_bitrate, upload_rx, n_workers, ingest_username_from, ts, ths)}) + thread::spawn(move || { + video_pipeline::run_forever( + db, + tf.clone(), + dd, + storage.clone(), + user_msg_tx, + poll_interval, + resubmit_delay, + target_bitrate, + upload_rx, + n_workers, + ingest_username_from, + ts, + ths, + ) + }) }); - - Ok(ClapshotInit {terminate_flag, api_thread, vpp_thread}) + Ok(ClapshotInit { + terminate_flag, + api_thread, + vpp_thread, + }) } - /// Block until the terminate flag is set - pub fn wait_for_termination(&mut self) -> anyhow::Result<()> - { + pub fn wait_for_termination(&mut self) -> anyhow::Result<()> { // Loop forever, abort on SIGINT/SIGTERM or if child threads die while !self.terminate_flag.load(Ordering::Relaxed) { thread::sleep(std::time::Duration::from_secs(1)); @@ -147,22 +200,26 @@ impl ClapshotInit { } tracing::info!("Got kill signal. Cleaning up."); - self.vpp_thread.take().and_then(|t| t.join().ok()).expect("VPP thread failed"); - self.api_thread.take().and_then(|t| t.join().ok()).expect("API thread failed"); + self.vpp_thread + .take() + .and_then(|t| t.join().ok()) + .expect("VPP thread failed"); + self.api_thread + .take() + .and_then(|t| t.join().ok()) + .expect("API thread failed"); Ok(()) } } - - /// Find migrations from server and organizer, solve their dependencies, and apply them. /// Backup before starting, and restore if foreign key checks fail after applying the migrations. -fn migrate_db( db_file: &PathBuf, org_uri: &Option) -> anyhow::Result<()> -{ +fn migrate_db(db_file: &PathBuf, org_uri: &Option) -> anyhow::Result<()> { use lib_clapshot_grpc::proto::org::CheckMigrationsRequest; let _span = tracing::info_span!("migrate_db").entered(); - let db: Arc = Arc::new(database::DB::open_db_file(&db_file).context("Error opening DB file")?); + let db: Arc = + Arc::new(database::DB::open_db_file(&db_file).context("Error opening DB file")?); let cur_server_migration = db.latest_applied_server_migration_name()?; let pending_server_migrations = db.pending_server_migrations()?; @@ -174,44 +231,57 @@ fn migrate_db( db_file: &PathBuf, org_uri: &Option) -> anyhow::Res // Represent server migrations for solver let server_module = { let mut prev_ver: Option = cur_server_migration.clone(); - let server_migs = pending_server_migrations.iter().map(|(m_name, m_version)| { - let mig = Migration { - uuid: m_name.clone(), - version: m_version.clone(), - dependencies: vec![lib_clapshot_grpc::proto::org::migration::Dependency { - name: SERVER_MODULE_NAME.to_string(), - min_ver: prev_ver.clone(), - max_ver: prev_ver.clone(), - }], - description: "".to_string(), - }; - prev_ver = Some(m_version.clone()); - mig - }).collect::>(); - - tracing::debug!("Clapshot server has {} pending migrations.", server_migs.len()); + let server_migs = pending_server_migrations + .iter() + .map(|(m_name, m_version)| { + let mig = Migration { + uuid: m_name.clone(), + version: m_version.clone(), + dependencies: vec![lib_clapshot_grpc::proto::org::migration::Dependency { + name: SERVER_MODULE_NAME.to_string(), + min_ver: prev_ver.clone(), + max_ver: prev_ver.clone(), + }], + description: "".to_string(), + }; + prev_ver = Some(m_version.clone()); + mig + }) + .collect::>(); + + tracing::debug!( + "Clapshot server has {} pending migrations.", + server_migs.len() + ); MigrationGraphModule { name: SERVER_MODULE_NAME.to_string(), cur_version: cur_server_migration.clone(), - migrations: server_migs + migrations: server_migs, } }; - let mut migration_modules: Vec = vec![ server_module ]; + let mut migration_modules: Vec = vec![server_module]; let org_db_info = Some(org::Database { r#type: org::database::DatabaseType::Sqlite.into(), - endpoint: db_file.canonicalize()?.to_str().ok_or( - anyhow::anyhow!("Sqlite path is not valid UTF-8"))?.into() + endpoint: db_file + .canonicalize()? + .to_str() + .ok_or(anyhow::anyhow!("Sqlite path is not valid UTF-8"))? + .into(), }); // Add Organizer and its migrations, if available if let Some(uri) = org_uri { let caller = OrganizerCaller::new(uri); - let (rt, mut org_conn) = caller.tokio_connect().context("Error connecting to Organizer")?; + let (rt, mut org_conn) = caller + .tokio_connect() + .context("Error connecting to Organizer")?; tracing::debug!("Calling check_migrations on Organizer."); - match rt.block_on(org_conn.check_migrations(CheckMigrationsRequest { db: org_db_info.clone() })) { + match rt.block_on(org_conn.check_migrations(CheckMigrationsRequest { + db: org_db_info.clone(), + })) { Ok(cm_res) => { let migrations = cm_res.get_ref().pending_migrations.clone(); tracing::debug!("Organizer has {} pending migrations.", migrations.len()); @@ -221,37 +291,59 @@ fn migrate_db( db_file: &PathBuf, org_uri: &Option) -> anyhow::Res migrations, }); } - Err(e) => { - match e.code() { - tonic::Code::NotFound => { tracing::info!("No pending migrations from Organizer."); }, - tonic::Code::Unimplemented => { tracing::info!("Organizer does not implement migrations. Ignoring."); }, - _ => { anyhow::bail!("Error checking Organizer migrations: {:?}", e); } + Err(e) => match e.code() { + tonic::Code::NotFound => { + tracing::info!("No pending migrations from Organizer."); } - } + tonic::Code::Unimplemented => { + tracing::info!("Organizer does not implement migrations. Ignoring."); + } + _ => { + anyhow::bail!("Error checking Organizer migrations: {:?}", e); + } + }, } }; - match migration_modules.iter().map(|m| m.migrations.len()).sum::() { + match migration_modules + .iter() + .map(|m| m.migrations.len()) + .sum::() + { 0 => { tracing::info!("No pending migrations."); return Ok(()); - }, - n => { tracing::debug!("Total {} migrations to consider. Solving dependencies.", n); } + } + n => { + tracing::debug!("Total {} migrations to consider. Solving dependencies.", n); + } } // Solve migration order - let migration_order = database::migration_solver::solve_migration_graph(migration_modules.iter().collect())?; + let migration_order = + database::migration_solver::solve_migration_graph(migration_modules.iter().collect())?; match migration_order { None => { - tracing::error!("Failed to solve migration dependencies. List of considered migrations:"); + tracing::error!( + "Failed to solve migration dependencies. List of considered migrations:" + ); for m in migration_modules { - tracing::error!("Module: '{}': current version: '{:?}'", &m.name, &m.cur_version); + tracing::error!( + "Module: '{}': current version: '{:?}'", + &m.name, + &m.cur_version + ); for mig in &m.migrations { - tracing::error!(" - '{}', brings version to '{}' depends on: '{:?}')", mig.uuid, mig.version, mig.dependencies); + tracing::error!( + " - '{}', brings version to '{}' depends on: '{:?}')", + mig.uuid, + mig.version, + mig.dependencies + ); } } bail!("Cannot proceed with migrations due to unsolvable dependencies."); - }, + } // Solver returned a list of migrations to apply Some(order) => { if order.is_empty() { @@ -260,25 +352,38 @@ fn migrate_db( db_file: &PathBuf, org_uri: &Option) -> anyhow::Res } tracing::info!("Migration plan created."); - drop(db); // Close before backup + drop(db); // Close before backup let db_backup_file = backup_sqlite_database(db_file.into())?; - let db: Arc = Arc::new(database::DB::open_db_file(&db_file).context("Error opening DB file")?); - match apply_migrations(&migration_modules, &order, &db, db_file, org_uri,org_db_info.clone()) - .and_then(|_| { db.conn().context("Error opening DB connection after migrations") }) - .and_then(|mut conn| { sqlite_foreign_key_check(&mut conn, true).context("Foreign key checks failed after migrations") }) - { + let db: Arc = + Arc::new(database::DB::open_db_file(&db_file).context("Error opening DB file")?); + match apply_migrations( + &migration_modules, + &order, + &db, + db_file, + org_uri, + org_db_info.clone(), + ) + .and_then(|_| { + db.conn() + .context("Error opening DB connection after migrations") + }) + .and_then(|mut conn| { + sqlite_foreign_key_check(&mut conn, true) + .context("Foreign key checks failed after migrations") + }) { Ok(_) => { tracing::info!("Migrations applied successfully. Foreign keys checked Ok."); return Ok(()); - }, + } Err(e) => { - drop (db); // Close before restore + drop(db); // Close before restore tracing::error!(error=%e, "Migration failure. Restoring DB from the backup."); match db_backup_file { None => { tracing::warn_span!("No backup file found. Skipping restore. This usually means DB was missing before migrations. If that's the case, delete the dangling DB before trying again."); - }, + } Some(db_backup_file) => { restore_sqlite_database(db_file.into(), db_backup_file) .context("Error restoring DB after failed migrations")?; @@ -289,13 +394,12 @@ fn migrate_db( db_file: &PathBuf, org_uri: &Option) -> anyhow::Res } } } - }, + } } Ok(()) } - /// Execute given migration plan. fn apply_migrations( migration_modules: &Vec, @@ -303,14 +407,18 @@ fn apply_migrations( db: &Arc, db_file: &PathBuf, org_uri: &Option, - org_db_info: Option -) -> Result<(), anyhow::Error> -{ + org_db_info: Option, +) -> Result<(), anyhow::Error> { use lib_clapshot_grpc::proto::org::ApplyMigrationRequest; - let uuid_to_mod: HashMap = migration_modules.iter().flat_map(|m| { - m.migrations.iter().map(|mig| (mig.uuid.clone(), m.name.clone())) - }).collect(); + let uuid_to_mod: HashMap = migration_modules + .iter() + .flat_map(|m| { + m.migrations + .iter() + .map(|mig| (mig.uuid.clone(), m.name.clone())) + }) + .collect(); for mig in plan { match uuid_to_mod.get(mig.uuid.as_str()) { @@ -324,30 +432,48 @@ fn apply_migrations( } // Organizer Some(module_name) => { - let _span = tracing::info_span!("apply org migration", name=mig.uuid, new_ver=mig.version, org=module_name).entered(); + let _span = tracing::info_span!( + "apply org migration", + name = mig.uuid, + new_ver = mig.version, + org = module_name + ) + .entered(); tracing::info!("Applying on Organizer..."); if let Some(uri) = org_uri { - let (rt, mut org_conn) = OrganizerCaller::new(uri).tokio_connect() + let (rt, mut org_conn) = OrganizerCaller::new(uri) + .tokio_connect() .context("Error connecting to organizer for migrations")?; rt.block_on(org_conn.apply_migration(ApplyMigrationRequest { db: org_db_info.clone(), - uuid: mig.uuid.clone() - })).map_err(|e| anyhow::anyhow!("Error applying organizer migration '{}': {:?}", &mig.uuid, e))?; + uuid: mig.uuid.clone(), + })) + .map_err(|e| { + anyhow::anyhow!( + "Error applying organizer migration '{}': {:?}", + &mig.uuid, + e + ) + })?; } else { - bail!("Organizer migration '{}' found but no organizer URI to connect to.", &mig.uuid); + bail!( + "Organizer migration '{}' found but no organizer URI to connect to.", + &mig.uuid + ); } - }, + } None => { - bail!("Migration '{}' not found in modules. This should not happen.", mig.uuid); + bail!( + "Migration '{}' not found in modules. This should not happen.", + mig.uuid + ); } } } Ok(()) } - - pub fn run_clapshot( data_dir: std::path::PathBuf, migrate: bool, @@ -368,7 +494,6 @@ pub fn run_clapshot( org_http_headers_regex: regex::Regex, storage: crate::storage::StorageBackend, ) -> anyhow::Result<()> { - let terminate_flag = Arc::new(AtomicBool::new(false)); // Initialize clapshot @@ -391,7 +516,7 @@ pub fn run_clapshot( thumbnail_script, org_http_headers_regex, storage, - terminate_flag.clone() + terminate_flag.clone(), )?; // Wait until termination diff --git a/server/src/log.rs b/server/src/log.rs index e080792b..10130817 100644 --- a/server/src/log.rs +++ b/server/src/log.rs @@ -1,3 +1,4 @@ +use signal_hook::{consts::SIGUSR1, iterator::Signals}; use std::{ fs::OpenOptions, io::{self, stdout, Write}, @@ -5,9 +6,8 @@ use std::{ sync::{Arc, Mutex}, thread, }; -use signal_hook::{consts::SIGUSR1, iterator::Signals}; use tracing::subscriber::set_global_default; -use tracing_subscriber::{fmt, EnvFilter, fmt::time::OffsetTime}; +use tracing_subscriber::{fmt, fmt::time::OffsetTime, EnvFilter}; /// Custom logger with the ability to write to a file or stdout. /// It supports transparent file reopen on SIGUSR1 (for `logrotate`), @@ -17,14 +17,18 @@ pub struct ClapshotLogger { pub _guard: tracing_appender::non_blocking::WorkerGuard, } -impl ClapshotLogger -{ +impl ClapshotLogger { /// Create a new Logger instance. /// - `time_offset`: Time offset for the log timestamps. /// - `level`: Tracing level to log. /// - `log_file`: Path to the log file or "-" for stdout. /// - `json_log`: Enable or disable JSON formatted logging. - pub fn new(time_offset: time::UtcOffset, level: tracing::Level, log_file: &str, json_log: bool) -> anyhow::Result { + pub fn new( + time_offset: time::UtcOffset, + level: tracing::Level, + log_file: &str, + json_log: bool, + ) -> anyhow::Result { let log_writer = Arc::new(Mutex::new(None)); let log_to_stdout = log_file.is_empty() || log_file == "-"; @@ -39,7 +43,8 @@ impl ClapshotLogger let log_writer_cloned = log_writer.clone(); thread::spawn(move || { for _ in signals.forever() { - let mut log_writer = log_writer_cloned.lock().expect("Failed to lock log writer"); + let mut log_writer = + log_writer_cloned.lock().expect("Failed to lock log writer"); if let Some(file) = log_writer.as_mut() { file.sync_and_reopen().expect("Failed to reopen log file"); } @@ -50,12 +55,17 @@ impl ClapshotLogger if std::env::var_os("RUST_LOG").is_none() { std::env::set_var( - "RUST_LOG", match level { + "RUST_LOG", + match level { tracing::Level::ERROR => "error", tracing::Level::WARN => "warn", tracing::Level::INFO => "info,clapshot_server=info", - tracing::Level::DEBUG => "debug,clapshot_server=debug,h2=info,hyper::proto::h1=info", - tracing::Level::TRACE => "trace,clapshot_server=trace,h2=debug,hyper::proto::h1=debug,async_io=debug", + tracing::Level::DEBUG => { + "debug,clapshot_server=debug,h2=info,hyper::proto::h1=info" + } + tracing::Level::TRACE => { + "trace,clapshot_server=trace,h2=debug,hyper::proto::h1=debug,async_io=debug" + } }, ); } @@ -67,13 +77,11 @@ impl ClapshotLogger (true, _) => "[year]-[month]-[day]T[hour]:[minute]:[second].[subsecond digits:4][offset_hour sign:mandatory]:[offset_minute]", }; - let time_format = time::format_description::parse( - if json_log { - "[unix_timestamp].[subsecond digits:4]]" - } else { - iso_fmt - }, - ) + let time_format = time::format_description::parse(if json_log { + "[unix_timestamp].[subsecond digits:4]]" + } else { + iso_fmt + }) .expect("invalid time format"); let timer = OffsetTime::new(time_offset, time_format); @@ -95,11 +103,13 @@ impl ClapshotLogger } .expect("tracing::subscriber::set_global_default failed"); - Ok(ClapshotLogger { _log_writer: log_writer, _guard: guard }) + Ok(ClapshotLogger { + _log_writer: log_writer, + _guard: guard, + }) } } - /// ReopenableFileWriter provides functionality to write to a file /// that can be reopened, allowing for log rotation without losing log entries. pub struct ReopenableFileWriter { @@ -114,7 +124,11 @@ impl ReopenableFileWriter { } fn open_file(path: &PathBuf) -> io::Result { - OpenOptions::new().create(true).write(true).append(true).open(path) + OpenOptions::new() + .create(true) + .write(true) + .append(true) + .open(path) } /// Sync the current log file to disk and reopen it under a new file descriptor. @@ -130,40 +144,52 @@ impl ReopenableFileWriter { impl Clone for ReopenableFileWriter { fn clone(&self) -> Self { - Self { file: self.file.clone(), path: self.path.clone() } + Self { + file: self.file.clone(), + path: self.path.clone(), + } } } impl Write for ReopenableFileWriter { fn write(&mut self, buf: &[u8]) -> io::Result { let mut file_lock = self.file.lock().unwrap(); - if let Some(file) = file_lock.as_mut() { file.write(buf) } else { Ok(0) } + if let Some(file) = file_lock.as_mut() { + file.write(buf) + } else { + Ok(0) + } } fn flush(&mut self) -> io::Result<()> { let mut file_lock = self.file.lock().unwrap(); - if let Some(file) = file_lock.as_mut() { file.flush() } else { Ok(()) } + if let Some(file) = file_lock.as_mut() { + file.flush() + } else { + Ok(()) + } } } - #[test] fn test_log_rotation_on_sigusr1() { - use std::{ - fs::File, - io::Read, - sync::Arc, - thread, - time::Duration, - }; use assert_fs::TempDir; + use std::{fs::File, io::Read, sync::Arc, thread, time::Duration}; let log_dir = TempDir::new().expect("Failed to create temp dir"); let log_file = log_dir.path().join("test_log.log"); let log_file_backup = log_dir.path().join("test_log_backup.log"); let time_offset = time::UtcOffset::from_whole_seconds(0).unwrap(); - let logger = Arc::new(ClapshotLogger::new(time_offset, tracing::Level::DEBUG, log_file.to_str().unwrap(), false).expect("Failed to setup logger")); + let logger = Arc::new( + ClapshotLogger::new( + time_offset, + tracing::Level::DEBUG, + log_file.to_str().unwrap(), + false, + ) + .expect("Failed to setup logger"), + ); tracing::info!("Logging before rotation"); @@ -186,12 +212,25 @@ fn test_log_rotation_on_sigusr1() { let mut old_log_content = String::new(); let mut old_log_file = File::open(&log_file_backup).expect("Failed to open old log file"); - old_log_file.read_to_string(&mut old_log_content).expect("Failed to read old log file"); - assert!(old_log_content.contains("Logging before rotation"), "Old log file does not contain the expected log entry"); - assert!(!old_log_content.contains("Logging after rotation"), "Old log file contains the second log entry"); + old_log_file + .read_to_string(&mut old_log_content) + .expect("Failed to read old log file"); + assert!( + old_log_content.contains("Logging before rotation"), + "Old log file does not contain the expected log entry" + ); + assert!( + !old_log_content.contains("Logging after rotation"), + "Old log file contains the second log entry" + ); let mut new_log_content = String::new(); let mut new_log_file = File::open(&log_file).expect("Failed to open new log file"); - new_log_file.read_to_string(&mut new_log_content).expect("Failed to read new log file"); - assert!(new_log_content.contains("Logging after rotation"), "New log file does not contain the expected log entry"); + new_log_file + .read_to_string(&mut new_log_content) + .expect("Failed to read new log file"); + assert!( + new_log_content.contains("Logging after rotation"), + "New log file does not contain the expected log entry" + ); } diff --git a/server/src/main.rs b/server/src/main.rs index 5354a5d0..a832660a 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -3,12 +3,15 @@ use clap::Parser; use clapshot_server::{ api_server::validate_org_http_headers_regex, grpc::{grpc_client::prepare_organizer, grpc_server::make_grpc_server_bind}, - run_clapshot, storage::StorageBackend, PKG_NAME, PKG_VERSION, + run_clapshot, + storage::StorageBackend, video_pipeline::IngestUsernameFrom, + PKG_NAME, PKG_VERSION, }; -use std::{path::PathBuf, sync::Arc, str::FromStr}; -use tracing::error; +use http::Uri; use indoc::indoc; +use std::{path::PathBuf, str::FromStr, sync::Arc}; +use tracing::error; mod log; @@ -29,17 +32,16 @@ mod log; )] struct Args { /// Directory for database, /incoming, /videos and /rejected - #[arg(short='D', long, required=true, value_name="DIR" )] + #[arg(short = 'D', long, required = true, value_name = "DIR")] data_dir: PathBuf, /// Base URL of the API server, e.g. `https://clapshot.example.com`. /// This depends on your proxy server, and is usually different from `--host` and `--port`. - #[arg(short='U', long, required=true, value_name="URL")] + #[arg(short = 'U', long, required = true, value_name = "URL")] url_base: String, - /// TCP port to listen on - #[arg(short='p', long, default_value_t = 8095)] + #[arg(short = 'p', long, default_value_t = 8095)] port: u16, /// Host to listen on @@ -48,31 +50,28 @@ struct Args { /// Allowed CORS Origins, separated by commas. /// Defaults to the value of `url_base`. - #[arg(long, value_name="ORIGINS")] + #[arg(long, value_name = "ORIGINS")] cors: Option, - /// Polling interval for incoming folder - #[arg(short='P', long, default_value_t = 3.0, value_name="SECONDS")] + #[arg(short = 'P', long, default_value_t = 3.0, value_name = "SECONDS")] poll: f32, /// Max number of workers for media file processing /// (0 = number of CPU cores) - #[arg(short, long, default_value_t = 0, value_name="NUM")] + #[arg(short, long, default_value_t = 0, value_name = "NUM")] workers: usize, /// Target (max) bitrate for transcoding, in Mbps - #[arg(short, long, default_value_t = 2.5, value_name="MBITS")] + #[arg(short, long, default_value_t = 2.5, value_name = "MBITS")] bitrate: f32, - /// Migrate database to latest version. Makes an automatic backup. #[arg(long)] migrate: bool, - /// Log to file instead of stdout - #[arg(short, long, value_name="FILE")] + #[arg(short, long, value_name = "FILE")] log: Option, /// Set debug level by repeating (-d = debug, -dd = trace) @@ -80,84 +79,89 @@ struct Args { debug: u8, // Enable debug mode (same as -v) - /// Log in JSON format #[arg(short, long)] json: bool, - /// Use this user id if auth headers are not found. /// Mainly useful for debugging. - #[arg(long, default_value="anonymous", value_name="USER")] + #[arg(long, default_value = "anonymous", value_name = "USER")] default_user: String, /// How to determine username for files in incoming/ folder. /// 'file-owner' uses filesystem ownership, 'folder-name' uses first subfolder name. - #[arg(long, default_value="file-owner", value_name="METHOD")] + #[arg(long, default_value = "file-owner", value_name = "METHOD")] ingest_username_from: String, - /// Shell command to start Organizer plugin. /// The command should block until SIGTERM, and log to stdout/stderr without timestamps. /// Unless --org-uri is a HTTP(S) URI, the command will get a Unix socket path as an argument when Clapshot server calls it. - #[arg(long, value_name="CMD")] - org_cmd: Option, // TODO: turn into a Vec to allow multiple plugins + #[arg(long, value_name = "CMD")] + org_cmd: Option, // TODO: turn into a Vec to allow multiple plugins /// Custom endpoint for srv->org connections. /// E.g. `/path/to/plugin.sock` or `http://[::1]:50051` /// If `--org-cmd` is given, this defaults to a temp .sock in datadir. - #[arg(long, value_name="URI")] + #[arg(long, value_name = "URI")] org_in_uri: Option, /// Listen in TCP address port for org->srv connections. /// Default is to use a Unix socket in datadir. E.g. `[::1]:50052` - #[arg(long, value_name="BIND")] + #[arg(long, value_name = "BIND")] org_out_tcp: Option, /// Path to custom transcoding script - #[arg(long, value_name="SCRIPT", default_value="scripts/clapshot-transcode")] + #[arg( + long, + value_name = "SCRIPT", + default_value = "scripts/clapshot-transcode" + )] transcode_script: String, /// Path to custom thumbnailing script - #[arg(long, value_name="SCRIPT", default_value="scripts/clapshot-thumbnail")] + #[arg( + long, + value_name = "SCRIPT", + default_value = "scripts/clapshot-thumbnail" + )] thumbnail_script: String, /// Regular expression to filter HTTP headers passed to Organizer. /// Only headers matching this pattern will be included in UserSessionData. /// Case-insensitive matching. Default is disabled for security. - #[arg(long, value_name="REGEX", default_value="^$")] + #[arg(long, value_name = "REGEX", default_value = "^$")] org_http_headers: String, /// Storage backend (local or s3-compatible object storage) - #[arg(long, value_name="BACKEND", default_value="local")] + #[arg(long, value_name = "BACKEND", default_value = "local")] storage_backend: String, /// S3-compatible endpoint base URL, e.g. https://s3.example.com - #[arg(long, value_name="URL")] + #[arg(long, value_name = "URL")] s3_endpoint: Option, /// S3 region (required for S3 backend) - #[arg(long, value_name="REGION")] + #[arg(long, value_name = "REGION")] s3_region: Option, /// S3 bucket (required for S3 backend) - #[arg(long, value_name="BUCKET")] + #[arg(long, value_name = "BUCKET")] s3_bucket: Option, /// S3 access key (required for S3 backend) - #[arg(long, value_name="KEY")] + #[arg(long, value_name = "KEY")] s3_access_key: Option, /// S3 secret key (required for S3 backend) - #[arg(long, value_name="SECRET")] + #[arg(long, value_name = "SECRET")] s3_secret_key: Option, /// Path/prefix inside the bucket where media files are stored - #[arg(long, value_name="PREFIX", default_value="videos")] + #[arg(long, value_name = "PREFIX", default_value = "videos")] s3_prefix: String, /// Public base URL for accessing the bucket/prefix (used for playback URLs) - #[arg(long, value_name="URL")] + #[arg(long, value_name = "URL")] s3_public_url: Option, } @@ -202,7 +206,8 @@ fn main() -> anyhow::Result<()> { &args.data_dir, )?; - let cors_origins: Vec = args.cors + let cors_origins: Vec = args + .cors .map(|s| s.split(',').map(|s| s.trim().to_string()).collect()) .unwrap_or_default(); @@ -214,13 +219,37 @@ fn main() -> anyhow::Result<()> { let storage = match args.storage_backend.as_str() { "local" => StorageBackend::local(args.data_dir.join("videos"), &url_base), "s3" => { - let endpoint = args.s3_endpoint.clone().ok_or_else(|| anyhow::anyhow!("--s3-endpoint is required for S3 backend"))?; - let region = args.s3_region.clone().ok_or_else(|| anyhow::anyhow!("--s3-region is required for S3 backend"))?; - let bucket = args.s3_bucket.clone().ok_or_else(|| anyhow::anyhow!("--s3-bucket is required for S3 backend"))?; - let access_key = args.s3_access_key.clone().ok_or_else(|| anyhow::anyhow!("--s3-access-key is required for S3 backend"))?; - let secret_key = args.s3_secret_key.clone().ok_or_else(|| anyhow::anyhow!("--s3-secret-key is required for S3 backend"))?; - - let public_base_url = args.s3_public_url.clone() + let endpoint = args + .s3_endpoint + .clone() + .ok_or_else(|| anyhow::anyhow!("--s3-endpoint is required for S3 backend"))?; + let region = args + .s3_region + .clone() + .ok_or_else(|| anyhow::anyhow!("--s3-region is required for S3 backend"))?; + let bucket = args + .s3_bucket + .clone() + .ok_or_else(|| anyhow::anyhow!("--s3-bucket is required for S3 backend"))?; + let access_key = args + .s3_access_key + .clone() + .ok_or_else(|| anyhow::anyhow!("--s3-access-key is required for S3 backend"))?; + let secret_key = args + .s3_secret_key + .clone() + .ok_or_else(|| anyhow::anyhow!("--s3-secret-key is required for S3 backend"))?; + + let public_base_url = args + .s3_public_url + .clone() + .or_else(|| { + Uri::from_str(&endpoint).ok().and_then(|uri| { + let scheme = uri.scheme_str()?; + let authority = uri.authority()?; + Some(format!("{scheme}://{bucket}.{}", authority)) + }) + }) .unwrap_or_else(|| format!("{}/{}", endpoint.trim_end_matches('/'), bucket)); StorageBackend::s3( @@ -233,8 +262,11 @@ fn main() -> anyhow::Result<()> { args.s3_prefix.clone(), public_base_url, )? - }, - other => bail!("Unknown storage backend '{}'. Valid options: local, s3", other), + } + other => bail!( + "Unknown storage backend '{}'. Valid options: local, s3", + other + ), }; // Run the server (blocking) @@ -247,7 +279,11 @@ fn main() -> anyhow::Result<()> { args.port, org_uri, grpc_server_bind, - if args.workers == 0 { num_cpus::get() } else { args.workers }, + if args.workers == 0 { + num_cpus::get() + } else { + args.workers + }, target_bitrate, default_user, args.poll, diff --git a/server/src/storage.rs b/server/src/storage.rs index 6204223b..bc7f8b80 100644 --- a/server/src/storage.rs +++ b/server/src/storage.rs @@ -1,20 +1,40 @@ -use std::fs::File; -use std::io::Read; use std::path::{Path, PathBuf}; use std::str::FromStr; use std::sync::Arc; -use anyhow::{anyhow, bail, Context}; -use aws_sdk_s3::{config::Region, config::endpoint::Endpoint, primitives::ByteStream, Client, config::endpoint::ResolveEndpoint}; -use aws_sdk_s3::config::auth::{ParamsBuilder}; +use anyhow::{anyhow, Context}; +use aws_sdk_s3::config::endpoint::DefaultResolver; use aws_sdk_s3::config::Credentials; -use aws_sdk_s3::config::endpoint::{DefaultResolver, EndpointFuture, SharedEndpointResolver}; +use aws_sdk_s3::types::{CompletedMultipartUpload, CompletedPart}; +use aws_sdk_s3::{ + config::AsyncSleep, config::Region, config::SharedAsyncSleep, config::Sleep, + primitives::ByteStream, Client, +}; use http::Uri; -use mime::Params; +use tokio::fs; +use tokio::io::AsyncReadExt; use tokio::runtime::Runtime; +use tracing; + +pub type ProgressCallback = Arc; + +const MULTIPART_MIN_SIZE: u64 = 5 * 1024 * 1024; +const MULTIPART_CHUNK_SIZE: usize = 8 * 1024 * 1024; +#[derive(Debug)] +pub struct ForeverSleep; + +impl AsyncSleep for ForeverSleep { + fn sleep(&self, _duration: std::time::Duration) -> Sleep { + Sleep::new(std::future::pending()) + } +} /// Simple content type guessing for a handful of formats we serve. fn guess_content_type(path: &Path) -> &'static str { - match path.extension().and_then(|e| e.to_str()).map(|s| s.to_ascii_lowercase()) { + match path + .extension() + .and_then(|e| e.to_str()) + .map(|s| s.to_ascii_lowercase()) + { Some(ext) if ext == "mp4" => "video/mp4", Some(ext) if ext == "mkv" => "video/x-matroska", Some(ext) if ext == "webm" => "video/webm", @@ -55,18 +75,22 @@ impl StorageBackend { prefix: String, public_base_url: String, ) -> anyhow::Result { - let media_base_url = format!("{}/{}", public_base_url.trim_end_matches('/'), prefix.trim_end_matches('/')); + let media_base_url = format!( + "{}/{}", + public_base_url.trim_end_matches('/'), + prefix.trim_end_matches('/') + ); let rt = Runtime::new().context("create tokio runtime for S3 client")?; let client = { let region = Region::new(region); let credentials = Credentials::new(access_key, secret_key, None, None, ""); - let url=match Uri::from_str(&endpoint){ + let _endpoint_uri = match Uri::from_str(&endpoint) { Ok(u) => u, Err(e) => return Err(anyhow!("failed to create uri: {}", e)), }; - let resolver=DefaultResolver::new(); + let resolver = DefaultResolver::new(); let cfg = rt.block_on(async { let base = aws_config::defaults(aws_config::BehaviorVersion::latest()) .region(region) @@ -76,7 +100,7 @@ impl StorageBackend { .await; aws_sdk_s3::config::Builder::from(&base) .endpoint_resolver(resolver) - .force_path_style(true) + .sleep_impl(SharedAsyncSleep::new(ForeverSleep)) .build() }); Client::from_conf(cfg) @@ -112,10 +136,7 @@ impl StorageBackend { /// Upload a file that lives under the media root. No-op for LocalFS. pub fn upload_local_path(&self, abs_path: &Path) -> anyhow::Result<()> { - match self { - StorageBackend::LocalFs(_) => Ok(()), - StorageBackend::S3(backend) => backend.upload(abs_path), - } + self.upload_with_progress(abs_path, None) } /// Upload file if it exists and log an error instead of bailing. @@ -132,9 +153,35 @@ impl StorageBackend { } } + /// Upload a file when object storage is enabled, and propagate failures. + pub fn upload_required(&self, abs_path: &Path) -> anyhow::Result<()> { + if !self.needs_remote_upload() { + return Ok(()); + } + self.upload_with_progress(abs_path, None) + } + + /// Upload a file, optionally reporting progress (0.0 - 1.0) while streaming to object storage. + pub fn upload_with_progress( + &self, + abs_path: &Path, + progress: Option, + ) -> anyhow::Result<()> { + match self { + StorageBackend::LocalFs(_) => { + if let Some(cb) = progress { + cb(1.0); + } + Ok(()) + } + StorageBackend::S3(backend) => backend.upload_with_progress(abs_path, progress), + } + } + fn key_for_path(&self, abs_path: &Path) -> anyhow::Result { let root = self.media_root(); - let rel = abs_path.strip_prefix(root) + let rel = abs_path + .strip_prefix(root) .with_context(|| format!("Path '{:?}' not under media root '{:?}'", abs_path, root))?; let rel = rel.to_string_lossy().replace('\\', "/"); let prefix = match self { @@ -169,25 +216,152 @@ pub struct ObjectStorageBackend { } impl ObjectStorageBackend { - fn upload(&self, abs_path: &Path) -> anyhow::Result<()> { + fn upload_with_progress( + &self, + abs_path: &Path, + progress: Option, + ) -> anyhow::Result<()> { let key = StorageBackend::S3(self.clone()).key_for_path(abs_path)?; let ct = guess_content_type(abs_path); - let mut file = File::open(abs_path).with_context(|| format!("Open file {:?}", abs_path))?; - let mut buffer = Vec::new(); - file.read_to_end(&mut buffer)?; - - self.rt.block_on(async { - let stream = ByteStream::from(buffer); - self.client - .put_object() - .bucket(&self.bucket) + let bucket = self.bucket.clone(); + let client = self.client.clone(); + let path = abs_path.to_path_buf(); + + self.rt.block_on(async move { + let mut file = fs::File::open(&path) + .await + .with_context(|| format!("Open file {:?}", path))?; + let meta = file.metadata().await?; + let total_len = meta.len(); + + if total_len == 0 { + if let Some(cb) = progress.as_ref() { + cb(1.0); + } + client + .put_object() + .bucket(&bucket) + .key(&key) + .body(ByteStream::from(Vec::new())) + .content_type(ct) + .send() + .await + .context("upload empty object to storage")?; + return Ok(()); + } + + if total_len <= MULTIPART_MIN_SIZE { + let mut buffer = Vec::with_capacity(total_len as usize); + file.read_to_end(&mut buffer).await?; + + client + .put_object() + .bucket(&bucket) + .key(&key) + .body(ByteStream::from(buffer)) + .content_type(ct) + .send() + .await + .context("upload small object to storage")?; + + if let Some(cb) = progress { + cb(1.0); + } + return Ok(()); + } + + let upload = client + .create_multipart_upload() + .bucket(&bucket) .key(&key) - .body(stream) .content_type(ct) .send() .await - }) - .context("upload to object storage")?; + .context("initiate multipart upload")?; + + let upload_id = upload + .upload_id() + .ok_or(anyhow!("Missing upload id from multipart upload"))? + .to_string(); + + let mut parts = Vec::new(); + let mut buf = vec![0u8; MULTIPART_CHUNK_SIZE]; + let mut part_number = 1; + let mut uploaded: u64 = 0; + + loop { + let bytes_read = file.read(&mut buf).await?; + if bytes_read == 0 { + break; + } + + let body = ByteStream::from(buf[..bytes_read].to_vec()); + let res = client + .upload_part() + .bucket(&bucket) + .key(&key) + .upload_id(&upload_id) + .part_number(part_number) + .body(body) + .send() + .await + .with_context(|| format!("upload part {part_number}"))?; + + let etag = res + .e_tag() + .ok_or(anyhow!("Missing etag for uploaded part {part_number}"))? + .to_string(); + + parts.push( + CompletedPart::builder() + .e_tag(etag) + .part_number(part_number) + .build(), + ); + + uploaded += bytes_read as u64; + if let Some(cb) = progress.as_ref() { + cb((uploaded as f32 / total_len as f32).clamp(0.0, 1.0)); + } + + part_number += 1; + } + + let multipart = CompletedMultipartUpload::builder() + .set_parts(Some(parts)) + .build(); + + if let Err(e) = client + .complete_multipart_upload() + .bucket(&bucket) + .key(&key) + .upload_id(&upload_id) + .multipart_upload(multipart) + .send() + .await + { + tracing::error!( + details=%e, + upload_id=%upload_id, + key=%key, + "Completing multipart upload failed, aborting" + ); + // Best-effort abort; ignore abort error to bubble the original failure. + let _ = client + .abort_multipart_upload() + .bucket(&bucket) + .key(&key) + .upload_id(upload_id) + .send() + .await; + return Err(anyhow!("complete multipart upload: {e}")); + } + + if let Some(cb) = progress { + cb(1.0); + } + Ok::<(), anyhow::Error>(()) + })?; Ok(()) } diff --git a/server/src/tests/integration_test.rs b/server/src/tests/integration_test.rs index 0233cdbc..fdb3fbe0 100644 --- a/server/src/tests/integration_test.rs +++ b/server/src/tests/integration_test.rs @@ -3,12 +3,11 @@ #![allow(unused_imports)] #[cfg(test)] -mod integration_test -{ +mod integration_test { use std::collections::HashMap; use std::sync::atomic::AtomicBool; - use std::sync::{Mutex, Arc}; - use std::{error, any}; + use std::sync::{Arc, Mutex}; + use std::{any, error}; use std::{path::PathBuf, str::FromStr}; use std::{thread, time::Duration}; @@ -20,33 +19,31 @@ mod integration_test use rust_decimal::prelude::*; use crossbeam_channel; - use crossbeam_channel::{Receiver, RecvTimeoutError, unbounded, select}; + use crossbeam_channel::{select, unbounded, Receiver, RecvTimeoutError}; use crate::api_server::tests::expect_user_msg; use crate::api_server::validate_org_http_headers_regex; use crate::storage::StorageBackend; + use crate::api_server::test_utils::{connect_client_ws, open_media_file, write}; use crate::database::schema::media_files::{thumb_sheet_cols, thumb_sheet_rows}; - use crate::{expect_client_cmd, send_server_cmd}; use crate::grpc::grpc_client::prepare_organizer; use crate::video_pipeline::{metadata_reader, IncomingFile, IngestUsernameFrom}; - use crate::api_server::test_utils::{connect_client_ws, open_media_file, write}; - use lib_clapshot_grpc::{GrpcBindAddr, proto}; - use lib_clapshot_grpc::proto::client::ServerToClientCmd; + use crate::{expect_client_cmd, send_server_cmd}; use lib_clapshot_grpc::proto::client::server_to_client_cmd as s2c; + use lib_clapshot_grpc::proto::client::ServerToClientCmd; + use lib_clapshot_grpc::{proto, GrpcBindAddr}; - use tracing; - use tracing::{error, info, warn, instrument}; - use tracing_test::traced_test; use serial_test::serial; use std::io::Write; - + use tracing; + use tracing::{error, info, instrument, warn}; + use tracing_test::traced_test; #[test] #[serial] #[traced_test] - fn test_integ_metadata_reader_ok() -> anyhow::Result<()> - { + fn test_integ_metadata_reader_ok() -> anyhow::Result<()> { let data_dir = assert_fs::TempDir::new()?; data_dir.copy_from("src/tests/assets/", &["*.mov"])?; @@ -58,12 +55,17 @@ mod integration_test let (arg_sender, arg_recvr) = unbounded::(); let (res_sender, res_recvr) = unbounded::(); let th = thread::spawn(move || { - metadata_reader::run_forever(arg_recvr, res_sender, 4); - }); + metadata_reader::run_forever(arg_recvr, res_sender, 4); + }); // Send request to metadata reader let args = IncomingFile { - file_path: PathBuf::from_str(data_dir.join("NASA_Red_Lettuce_excerpt.mov").to_str().unwrap())?, + file_path: PathBuf::from_str( + data_dir + .join("NASA_Red_Lettuce_excerpt.mov") + .to_str() + .unwrap(), + )?, user_id: "nobody".to_string(), cookies: HashMap::new(), transcode_preference: crate::video_pipeline::TranscodePreference::Auto, @@ -88,19 +90,28 @@ mod integration_test Ok(()) } - /// Query API health endpoint until it returns 200 OK or timeout fn wait_for_healthy(url_base: &str) -> bool { const MAX_RETRIES: usize = 10; let mut interval_ms: u64 = 10; let url = format!("{}/api/health", url_base); for i in 1..=MAX_RETRIES { - if i > 1 { thread::sleep(Duration::from_millis(interval_ms)); } + if i > 1 { + thread::sleep(Duration::from_millis(interval_ms)); + } interval_ms = std::cmp::min(interval_ms * 2, 1000); let resp_result = reqwest::blocking::get(&url); if let Ok(resp) = resp_result { - if resp.status() == 200 { return true; } - else { tracing::debug!("wait_for_healthy got status {} from /api/health. Try {}/{}.", resp.status(), i, MAX_RETRIES) } + if resp.status() == 200 { + return true; + } else { + tracing::debug!( + "wait_for_healthy got status {} from /api/health. Try {}/{}.", + resp.status(), + i, + MAX_RETRIES + ) + } } } false @@ -164,8 +175,7 @@ mod integration_test #[test] #[serial] #[traced_test] - fn test_video_ingest_no_transcode() -> anyhow::Result<()> - { + fn test_video_ingest_no_transcode() -> anyhow::Result<()> { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 2500_000, None, None, IngestUsernameFrom::FileOwner] // Copy test file to incoming dir let mp4_file = "60fps-example.mp4"; @@ -221,8 +231,7 @@ mod integration_test #[test] #[serial] #[traced_test] - fn test_video_try_ingest_corrupted_video() -> anyhow::Result<()> - { + fn test_video_try_ingest_corrupted_video() -> anyhow::Result<()> { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner] tracing::info!("WRITING CORRUPTED VIDEO"); @@ -244,7 +253,6 @@ mod integration_test Ok(()) } - // --- Transcoding tests --- pub struct WaitForReportResults { @@ -262,56 +270,79 @@ mod integration_test expect_transcode: bool, expect_thumbnail: bool, expect_thumbsheet: bool, - check_file_outputs: Option<(PathBuf, String)>) -> WaitForReportResults - { + check_file_outputs: Option<(PathBuf, String)>, + ) -> WaitForReportResults { let mut res = WaitForReportResults { - transcode_complete: false, thumbs_complete: false, - got_progress_report: false, got_transcode_report: false, got_thumbnail_report: false, - ts_cols: String::new(), ts_rows: String::new(), + transcode_complete: false, + thumbs_complete: false, + got_progress_report: false, + got_transcode_report: false, + got_thumbnail_report: false, + ts_cols: String::new(), + ts_rows: String::new(), }; const WAIT_AFTER_REPORTS_TIMEOUT_SECS: u32 = 5; // Wait for file to be processed thread::sleep(Duration::from_secs_f32(0.5)); - let msg = expect_user_msg(&mut ws, proto::user_message::Type::MediaFileAdded).await; // notification to client (with upload folder info etc) + let msg = expect_user_msg(&mut ws, proto::user_message::Type::MediaFileAdded).await; // notification to client (with upload folder info etc) let vid = msg.refs.unwrap().media_file_id.unwrap(); thread::sleep(Duration::from_secs_f32(0.5)); - let msg = expect_user_msg(&mut ws, proto::user_message::Type::Ok).await; // notification to user (in text) + let msg = expect_user_msg(&mut ws, proto::user_message::Type::Ok).await; // notification to user (in text) let vid2 = msg.refs.unwrap().media_file_id.unwrap(); assert_eq!(vid, vid2); assert!(vid.len() > 0); if expect_transcode { - assert!(msg.details.unwrap().to_ascii_lowercase().contains("transcod")); + assert!(msg + .details + .unwrap() + .to_ascii_lowercase() + .contains("transcod")); } - for _ in 0..(60*2*10) - { + for _ in 0..(60 * 2 * 10) { // Wait until server sends media updated messages about // transcoding and thumbnail generation being done // before we try to open and check metadata. let mut still_waiting = true; if still_waiting { - match crate::api_server::test_utils::try_get_parsed::(&mut ws).await.map(|c| c.cmd).flatten() { + match crate::api_server::test_utils::try_get_parsed::(&mut ws) + .await + .map(|c| c.cmd) + .flatten() + { Some(s2c::Cmd::ShowMessages(m)) => { // Got progress report? - res.got_progress_report |= m.msgs.iter().any(|msg| msg.r#type == proto::user_message::Type::Progress as i32); + res.got_progress_report |= m + .msgs + .iter() + .any(|msg| msg.r#type == proto::user_message::Type::Progress as i32); assert!(!m.msgs.iter().any(|msg| msg.r#type == proto::user_message::Type::Error as i32), "Got ERROR type message while waiting for transcode/thumbnail completion"); - if m.msgs.iter().any(|msg| msg.r#type == proto::user_message::Type::MediaFileUpdated as i32) { + if m.msgs.iter().any(|msg| { + msg.r#type == proto::user_message::Type::MediaFileUpdated as i32 + }) { // Got transcoding update message? - if m.msgs.iter().any(|msg| msg.clone().message.to_ascii_lowercase().contains("transcod")) { + if m.msgs.iter().any(|msg| { + msg.clone() + .message + .to_ascii_lowercase() + .contains("transcod") + }) { res.got_transcode_report = true; } // Got thumbnail update message? - else if m.msgs.iter().any(|msg| msg.clone().message.to_ascii_lowercase().contains("thumb")) { + else if m.msgs.iter().any(|msg| { + msg.clone().message.to_ascii_lowercase().contains("thumb") + }) { res.got_thumbnail_report = true; } } - }, + } _ => (), }; @@ -328,8 +359,16 @@ mod integration_test if still_waiting { thread::sleep(Duration::from_millis(100)); } else { - println!("...waiting done, expected reports received. Doing OpenNavigationPage ..."); - send_server_cmd!(ws, OpenNavigationPage, OpenNavigationPage {..Default::default()}); + println!( + "...waiting done, expected reports received. Doing OpenNavigationPage ..." + ); + send_server_cmd!( + ws, + OpenNavigationPage, + OpenNavigationPage { + ..Default::default() + } + ); break; } } @@ -338,26 +377,37 @@ mod integration_test let reports_received_at = std::time::Instant::now(); // Wait for page with media file to be shown - 'waitloop: for _ in 0..80 - { - if reports_received_at.elapsed().as_millis() > (WAIT_AFTER_REPORTS_TIMEOUT_SECS*1000).into() { + 'waitloop: for _ in 0..80 { + if reports_received_at.elapsed().as_millis() + > (WAIT_AFTER_REPORTS_TIMEOUT_SECS * 1000).into() + { panic!("Timeout checking API messages after transcode/thumbnail completion"); } - match crate::api_server::test_utils::expect_parsed::(&mut ws).await.cmd { - + match crate::api_server::test_utils::expect_parsed::(&mut ws) + .await + .cmd + { Some(s2c::Cmd::ShowMessages(m)) => { tracing::info!("Got ShowMessages (while waiting for ShowPage. Ignoring."); - res.got_progress_report |= m.msgs.iter().any(|msg| msg.r#type == proto::user_message::Type::Progress as i32); - assert!(!m.msgs.iter().any(|msg| msg.r#type == proto::user_message::Type::Error as i32), "Got ERROR type message while waiting for ShowPage"); - }, + res.got_progress_report |= m + .msgs + .iter() + .any(|msg| msg.r#type == proto::user_message::Type::Progress as i32); + assert!( + !m.msgs + .iter() + .any(|msg| msg.r#type == proto::user_message::Type::Error as i32), + "Got ERROR type message while waiting for ShowPage" + ); + } Some(s2c::Cmd::ShowPage(p)) => { let pitems = p.page_items; - assert!(pitems.len() == 1+1); + assert!(pitems.len() == 1 + 1); match &pitems[0].item { - Some(proto::page_item::Item::Html(_)) => {}, + Some(proto::page_item::Item::Html(_)) => {} _ => panic!("Expected HTML for page item 0"), }; @@ -390,7 +440,10 @@ mod integration_test if let Some(pd) = v.preview_data { if let Some(thumb_url) = pd.thumb_url { - assert!(pm.thumbs_done.is_some(), "thumbs_done not set in processing metadata but got thumb_url"); + assert!( + pm.thumbs_done.is_some(), + "thumbs_done not set in processing metadata but got thumb_url" + ); res.thumbs_complete = true; } if let Some(thumb_sheet) = pd.thumb_sheet { @@ -401,26 +454,34 @@ mod integration_test } } - if (expect_thumbnail == res.thumbs_complete) && (expect_transcode == res.transcode_complete) { + if (expect_thumbnail == res.thumbs_complete) + && (expect_transcode == res.transcode_complete) + { break 'waitloop; } else { tracing::info!("Not done yet: transcode_complete = {} (expected: {}), thumbs_complete = {} (expected: {})...", res.transcode_complete, expect_transcode, res.thumbs_complete, expect_thumbnail); - } - }, + } something_else => { - tracing::info!("Got UNEXPECTED (not necessarily a bug) message: {:?}", something_else); - }, + tracing::info!( + "Got UNEXPECTED (not necessarily a bug) message: {:?}", + something_else + ); + } } thread::sleep(Duration::from_secs_f32(0.1)); } - tracing::info!("Transcode complete: {} (expeted: {}), thumbs complete: {} (expected: {})", - res.transcode_complete, expect_transcode, - res.thumbs_complete, expect_thumbnail); + tracing::info!( + "Transcode complete: {} (expeted: {}), thumbs complete: {} (expected: {})", + res.transcode_complete, + expect_transcode, + res.thumbs_complete, + expect_thumbnail + ); if let Some((data_dir, input_filename)) = check_file_outputs { let vid_dir = data_dir.join("videos").join(vid); @@ -438,7 +499,9 @@ mod integration_test if expect_thumbsheet { assert!(u32::from_str(&res.ts_cols).ok().unwrap() > 0); assert!(u32::from_str(&res.ts_rows).ok().unwrap() > 0); - assert!(thumb_dir.join(format!("sheet-{}x{}.webp", res.ts_cols, res.ts_rows)).is_file()); + assert!(thumb_dir + .join(format!("sheet-{}x{}.webp", res.ts_cols, res.ts_rows)) + .is_file()); } if expect_thumbnail || expect_thumbsheet { assert!(thumb_dir.join("thumbnail.log").is_file()); @@ -448,30 +511,29 @@ mod integration_test res } - - async fn wait_for_any_client_msg(mut ws: &mut crate::api_server::test_utils::WsClient) - { - for _ in 0..(60*2*10) - { - match crate::api_server::test_utils::try_get_parsed::(&mut ws).await.map(|c| c.cmd).flatten() { + async fn wait_for_any_client_msg(mut ws: &mut crate::api_server::test_utils::WsClient) { + for _ in 0..(60 * 2 * 10) { + match crate::api_server::test_utils::try_get_parsed::(&mut ws) + .await + .map(|c| c.cmd) + .flatten() + { Some(x) => { tracing::info!("Got message: {:?}", x); return; - }, + } None => { thread::sleep(Duration::from_millis(50)); - }, + } }; } } - #[test] #[serial] #[traced_test] #[cfg(feature = "include_slow_tests")] - fn test_video_mov_ingest_and_transcode() -> anyhow::Result<()> - { + fn test_video_mov_ingest_and_transcode() -> anyhow::Result<()> { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner] // Copy test file to incoming dir let mov_file = "NASA_Red_Lettuce_excerpt.mov"; @@ -488,13 +550,11 @@ mod integration_test Ok(()) } - #[test] #[serial] #[traced_test] #[cfg(feature = "include_slow_tests")] - fn test_video_12bit_dnxhr_alpha_ingest_and_transcode() -> anyhow::Result<()> - { + fn test_video_12bit_dnxhr_alpha_ingest_and_transcode() -> anyhow::Result<()> { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner] // Copy test file to incoming dir let mov_file = "alpha-test_dnxhr-444-12bit-dnxhr.mov"; @@ -510,13 +570,11 @@ mod integration_test Ok(()) } - #[test] #[serial] #[traced_test] #[cfg(feature = "include_slow_tests")] - fn test_audio_ingest_and_transcode() -> anyhow::Result<()> - { + fn test_audio_ingest_and_transcode() -> anyhow::Result<()> { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner] // Copy test file to incoming dir let audio_file_name = "drunkards-special-short-mono.wav"; @@ -546,8 +604,7 @@ mod integration_test #[serial] #[traced_test] #[cfg(feature = "include_slow_tests")] - fn test_mp3_full_integration() -> anyhow::Result<()> - { + fn test_mp3_full_integration() -> anyhow::Result<()> { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner] // Copy the MP3 file to incoming dir and test full integration let audio_file_name = "Apollo11_countdown.mp3"; @@ -579,11 +636,10 @@ mod integration_test #[test] #[serial] #[traced_test] - fn test_mp3_metadata_detection() -> anyhow::Result<()> - { - use crossbeam_channel; + fn test_mp3_metadata_detection() -> anyhow::Result<()> { use crate::video_pipeline::metadata_reader; use crate::video_pipeline::IncomingFile; + use crossbeam_channel; use std::collections::HashMap; // Test that MP3 files are correctly detected as Audio @@ -613,11 +669,21 @@ mod integration_test match result { metadata_reader::MetadataResult::Ok(metadata) => { // After the fix, MP3 files should be correctly detected as Audio - assert_eq!(format!("{:?}", metadata.media_type), "Audio", "MP3 file should be detected as Audio"); + assert_eq!( + format!("{:?}", metadata.media_type), + "Audio", + "MP3 file should be detected as Audio" + ); // Duration should be reasonable for the test file (~25 seconds) - assert!(metadata.duration > rust_decimal::Decimal::from(20), "Duration should be > 20 seconds"); - assert!(metadata.duration < rust_decimal::Decimal::from(30), "Duration should be < 30 seconds"); + assert!( + metadata.duration > rust_decimal::Decimal::from(20), + "Duration should be > 20 seconds" + ); + assert!( + metadata.duration < rust_decimal::Decimal::from(30), + "Duration should be < 30 seconds" + ); } metadata_reader::MetadataResult::Err(e) => { panic!("Metadata reading failed: {:?}", e); @@ -632,12 +698,10 @@ mod integration_test Ok(()) } - #[test] #[serial] #[traced_test] - fn test_image_ingest_and_transcode() -> anyhow::Result<()> - { + fn test_image_ingest_and_transcode() -> anyhow::Result<()> { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner] let image_file_name = "NASA-48410_PIA25967_-_MAV_Test.jpeg"; data_dir.copy_from("src/tests/assets/", &[image_file_name]).unwrap(); @@ -648,19 +712,19 @@ mod integration_test Ok(()) } - - #[test] #[serial] #[traced_test] - fn test_existing_v056_migrate_and_image_ingest() -> anyhow::Result<()> - { + fn test_existing_v056_migrate_and_image_ingest() -> anyhow::Result<()> { let (_db, temp_dir, _videos, _comments) = crate::database::tests::make_test_db(); // Overwrite the test DB with one from assets dir, for migration testing on existing DB let db_file = temp_dir.path().join("clapshot.sqlite"); - std::fs::copy("src/tests/assets/databases/clapshot-migration-test-1_v056.sqlite", &db_file) - .expect("Failed to copy test DB for migration test"); + std::fs::copy( + "src/tests/assets/databases/clapshot-migration-test-1_v056.sqlite", + &db_file, + ) + .expect("Failed to copy test DB for migration test"); cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, Some(temp_dir), IngestUsernameFrom::FileOwner] let image_file_name = "NASA-48410_PIA25967_-_MAV_Test.jpeg"; @@ -674,21 +738,23 @@ mod integration_test #[test] #[serial] #[traced_test] - fn test_organizer_existing_v056_migrate() -> anyhow::Result<()> - { + fn test_organizer_existing_v056_migrate() -> anyhow::Result<()> { // This supplements the other v056_migrate test, by testing with Organizer too. - match std::env::var("TEST_ORG_CMD").ok() - { + match std::env::var("TEST_ORG_CMD").ok() { Some(org_cmd) => { let (_db, temp_dir, _videos, _comments) = crate::database::tests::make_test_db(); // Overwrite the test DB with one from assets dir, for migration testing on existing DB let db_file = temp_dir.path().join("clapshot.sqlite"); - std::fs::copy("src/tests/assets/databases/clapshot-migration-test-1_v056.sqlite", &db_file).expect("Failed to copy test DB for migration test"); + std::fs::copy( + "src/tests/assets/databases/clapshot-migration-test-1_v056.sqlite", + &db_file, + ) + .expect("Failed to copy test DB for migration test"); cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, Some(org_cmd), Some(temp_dir), IngestUsernameFrom::FileOwner] // If we get any client messages, Organizer migration was successful and API server was started wait_for_any_client_msg(&mut ws).await; } - }, + } None => { tracing::info!("Organizer cmd not specified, skipping organizer test"); } @@ -699,20 +765,22 @@ mod integration_test #[test] #[serial] #[traced_test] - fn test_organizer_existing_v061_migrate() -> anyhow::Result<()> - { - match std::env::var("TEST_ORG_CMD").ok() - { + fn test_organizer_existing_v061_migrate() -> anyhow::Result<()> { + match std::env::var("TEST_ORG_CMD").ok() { Some(org_cmd) => { let (_db, temp_dir, _videos, _comments) = crate::database::tests::make_test_db(); // Overwrite the test DB with one from assets dir, for migration testing on existing DB let db_file = temp_dir.path().join("clapshot.sqlite"); - std::fs::copy("src/tests/assets/databases/clapshot-migration-test-2_v061.sqlite", &db_file).expect("Failed to copy test DB for migration test"); + std::fs::copy( + "src/tests/assets/databases/clapshot-migration-test-2_v061.sqlite", + &db_file, + ) + .expect("Failed to copy test DB for migration test"); cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, Some(org_cmd), Some(temp_dir), IngestUsernameFrom::FileOwner] // If we get any client messages, Organizer migration was successful and API server was started wait_for_any_client_msg(&mut ws).await; } - }, + } None => { tracing::info!("Organizer cmd not specified, skipping organizer test"); } @@ -720,26 +788,23 @@ mod integration_test Ok(()) } - - - #[test] #[serial] #[traced_test] - fn test_organizer_run_organizer_tests() -> anyhow::Result<()> - { + fn test_organizer_run_organizer_tests() -> anyhow::Result<()> { // Environment variable TEST_ORG_CMD can be used to specify a command // to start organizer. If not specified, the test will be skipped. - match std::env::var("TEST_ORG_CMD").ok() - { + match std::env::var("TEST_ORG_CMD").ok() { Some(cmd) => { - // `cargo test` captures stdout/stderr, so we can't list the test to console, // put them in a log file instead. Open & truncate here, so it's empty if // listing fails. - let log_path = std::env::var("TEST_ORG_LOG").unwrap_or("organizer_tests.log".into()); + let log_path = + std::env::var("TEST_ORG_LOG").unwrap_or("organizer_tests.log".into()); let log = Arc::new(Mutex::new(std::io::BufWriter::new( - std::fs::File::create(&log_path).expect(format!("Failed to create log file '{}'", &log_path).as_str())))); + std::fs::File::create(&log_path) + .expect(format!("Failed to create log file '{}'", &log_path).as_str()), + ))); fn write_log(writer: &Arc>, s: &str) { let mut writer = writer.lock().unwrap(); @@ -748,7 +813,8 @@ mod integration_test println!("{}", s); } - let test_results: Arc>> = Arc::new(Mutex::new(Vec::new())); + let test_results: Arc>> = + Arc::new(Mutex::new(Vec::new())); // Connect to organizer and list its test names write_log(&log, " Retrieving organizer tests..."); @@ -777,25 +843,42 @@ mod integration_test println!("\n\n^^^ (that was just a call listing organizer tests, now running them...) ^^^"); // Call gRPC run_test() for each test name. Store results in test_results. - let mut test_names: Vec = test_names.lock().unwrap().iter().map(|s| s.clone()).collect(); + let mut test_names: Vec = test_names + .lock() + .unwrap() + .iter() + .map(|s| s.clone()) + .collect(); // Check for TEST_ORG_FILTER environment variable to filter tests - if let Some(filter) = std::env::var("TEST_ORG_FILTER").ok().filter(|s| !s.is_empty()) { - write_log(&log, format!(" Filtering tests with pattern: '{}'", filter).as_str()); + if let Some(filter) = std::env::var("TEST_ORG_FILTER") + .ok() + .filter(|s| !s.is_empty()) + { + write_log( + &log, + format!(" Filtering tests with pattern: '{}'", filter).as_str(), + ); test_names.retain(|name| name.contains(&filter)); if test_names.is_empty() { - write_log(&log, format!(" No tests match filter '{}'", filter).as_str()); + write_log( + &log, + format!(" No tests match filter '{}'", filter).as_str(), + ); panic!("No organizer tests match the filter '{}'", filter); } } - write_log(&log, format!(" Running {} organizer tests", test_names.len()).as_str()); + write_log( + &log, + format!(" Running {} organizer tests", test_names.len()).as_str(), + ); - for (i, test_name) in test_names.iter().enumerate() - { + for (i, test_name) in test_names.iter().enumerate() { println!("\n\n\n------------ Running organizer test {}/{}: '{}'... ------------\n\n\n", i+1, test_names.len()+1, test_name); - let (_db, temp_dir, _videos, _comments) = crate::database::tests::make_test_db(); + let (_db, temp_dir, _videos, _comments) = + crate::database::tests::make_test_db(); let test_results = test_results.clone(); let log = log.clone(); @@ -827,24 +910,44 @@ mod integration_test // Write test results to log file and print to console, mimicking cargo test output let test_results = test_results.lock().unwrap(); - for (test_name, res) in test_results.iter() - { + for (test_name, res) in test_results.iter() { if let Some(err) = &res.error { write_log(&log, format!("\n\n").as_str()); - write_log(&log, format!("==================== FAILED ORG TEST: '{}' ====================", test_name).as_str()); + write_log( + &log, + format!( + "==================== FAILED ORG TEST: '{}' ====================", + test_name + ) + .as_str(), + ); write_log(&log, format!("(NOTE! For Clapshot Server -captured logs, see the cargo test output for integration_test::test_organizer!)").as_str()); - write_log(&log, format!("\n---------------- RunTestResponse.output ----------------").as_str()); + write_log( + &log, + format!("\n---------------- RunTestResponse.output ----------------") + .as_str(), + ); write_log(&log, format!("{}", res.output).as_str()); - write_log(&log, format!("\n---------------- RunTestResponse.error ----------------").as_str()); + write_log( + &log, + format!("\n---------------- RunTestResponse.error ----------------") + .as_str(), + ); write_log(&log, format!("{}", err).as_str()); write_log(&log, format!("\n\n").as_str()); } } if test_results.iter().any(|(_, res)| res.error.is_some()) { - write_log(&log, format!("### Some organizer tests failed ###").as_str()); - panic!("Some organizer tests failed, output also logged into '{}'", log_path); + write_log( + &log, + format!("### Some organizer tests failed ###").as_str(), + ); + panic!( + "Some organizer tests failed, output also logged into '{}'", + log_path + ); } - }, + } None => { tracing::info!("Organizer cmd not specified, skipping organizer test"); } @@ -855,8 +958,7 @@ mod integration_test #[test] #[serial] #[traced_test] - fn test_ingest_username_from_file_owner() -> anyhow::Result<()> - { + fn test_ingest_username_from_file_owner() -> anyhow::Result<()> { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 2500_000, None, None, IngestUsernameFrom::FileOwner] // Copy test file to incoming dir (owned by current user) let mp4_file = "60fps-example.mp4"; @@ -886,8 +988,7 @@ mod integration_test #[test] #[serial] #[traced_test] - fn test_ingest_username_from_folder_name() -> anyhow::Result<()> - { + fn test_ingest_username_from_folder_name() -> anyhow::Result<()> { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 2500_000, None, None, IngestUsernameFrom::FolderName, Some("test_folder_user".to_string())] // Create user folder structure with specific test username let current_user = whoami::username(); @@ -923,8 +1024,7 @@ mod integration_test #[test] #[serial] #[traced_test] - fn test_ingest_username_from_folder_name_nested() -> anyhow::Result<()> - { + fn test_ingest_username_from_folder_name_nested() -> anyhow::Result<()> { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 2500_000, None, None, IngestUsernameFrom::FolderName, Some("test_nested_user".to_string())] // Create folder structure with specific test username let current_user = whoami::username(); @@ -956,5 +1056,4 @@ mod integration_test } Ok(()) } - } diff --git a/server/src/video_pipeline/cleanup_rejected.rs b/server/src/video_pipeline/cleanup_rejected.rs index b5ec76e3..21da5558 100644 --- a/server/src/video_pipeline/cleanup_rejected.rs +++ b/server/src/video_pipeline/cleanup_rejected.rs @@ -1,18 +1,24 @@ -use std::path::Path; use anyhow::{anyhow, bail}; +use std::path::Path; use tracing; - /// Clean up after a processing error. Attempts to preserve the original file /// by moving it under the rejected directory. Then deletes any dangling files that were /// created during the failed ingestion. -pub fn clean_up_rejected_file(data_dir: &Path, src_file: &Path, media_file_id: Option) -> anyhow::Result<()> -{ +pub fn clean_up_rejected_file( + data_dir: &Path, + src_file: &Path, + media_file_id: Option, +) -> anyhow::Result<()> { // Create rejected directory if it doesn't exist let rejected_dir = data_dir.join("rejected"); - if !rejected_dir.exists() { std::fs::create_dir(&rejected_dir)?; }; + if !rejected_dir.exists() { + std::fs::create_dir(&rejected_dir)?; + }; - let src_file_name = src_file.file_name().ok_or(anyhow!("Invalid filename {:?}", src_file))?; + let src_file_name = src_file + .file_name() + .ok_or(anyhow!("Invalid filename {:?}", src_file))?; let move_to = rejected_dir.join(src_file_name); if !move_to.exists() { // Move the original file to the root of rejected directory @@ -22,9 +28,11 @@ pub fn clean_up_rejected_file(data_dir: &Path, src_file: &Path, media_file_id: O // Use media file id if available, otherwise an UUID4. let extra_dir = match &media_file_id { Some(id) => rejected_dir.join(id), - None => rejected_dir.join( uuid::Uuid::new_v4().to_string() ), + None => rejected_dir.join(uuid::Uuid::new_v4().to_string()), + }; + if !extra_dir.exists() { + std::fs::create_dir(&extra_dir)?; }; - if !extra_dir.exists() { std::fs::create_dir(&extra_dir)?; }; let move_to = extra_dir.join(src_file_name); if !move_to.exists() { diff --git a/server/src/video_pipeline/incoming_monitor.rs b/server/src/video_pipeline/incoming_monitor.rs index 823a55bf..6d4b7fe8 100644 --- a/server/src/video_pipeline/incoming_monitor.rs +++ b/server/src/video_pipeline/incoming_monitor.rs @@ -2,20 +2,20 @@ #![allow(unused_variables)] #![allow(unused_imports)] +use anyhow::anyhow; +use async_std::net::Incoming; +use crossbeam_channel::{Receiver, RecvTimeoutError, Sender}; +use file_owner::PathExt; +use path_absolutize::*; use std::borrow::Cow; use std::collections::HashMap; use std::os; -use std::time::Duration; use std::path::{Path, PathBuf}; -use file_owner::PathExt; -use async_std::net::Incoming; -use crossbeam_channel::{Sender, Receiver, RecvTimeoutError}; -use path_absolutize::*; +use std::time::Duration; use tracing; -use anyhow::anyhow; -use crate::video_pipeline::metadata_reader; use super::{cleanup_rejected::clean_up_rejected_file, IngestUsernameFrom}; +use crate::video_pipeline::metadata_reader; pub enum Void {} @@ -26,13 +26,20 @@ pub fn run_forever( resubmit_delay: f32, incoming_sender: Sender, exit_evt: Receiver, - ingest_username_from: IngestUsernameFrom) -> anyhow::Result<()> -{ + ingest_username_from: IngestUsernameFrom, +) -> anyhow::Result<()> { let _span = tracing::info_span!("INCOMING").entered(); - tracing::debug!(dir=data_dir.to_str(), poll_interval=poll_interval, resubmit_delay=resubmit_delay, "Starting."); + tracing::debug!( + dir = data_dir.to_str(), + poll_interval = poll_interval, + resubmit_delay = resubmit_delay, + "Starting." + ); - let mut last_tested_size: std::collections::HashMap = std::collections::HashMap::new(); - let mut submission_time: std::collections::HashMap = std::collections::HashMap::new(); + let mut last_tested_size: std::collections::HashMap = + std::collections::HashMap::new(); + let mut submission_time: std::collections::HashMap = + std::collections::HashMap::new(); loop { // Remove expired submissions @@ -40,13 +47,14 @@ pub fn run_forever( submission_time.retain(|_, t| now.duration_since(t.clone()).as_secs_f32() < resubmit_delay); match exit_evt.recv_timeout(Duration::from_secs_f32(poll_interval)) { - Err(RecvTimeoutError::Disconnected) => { break; } + Err(RecvTimeoutError::Disconnected) => { + break; + } _ => {} } //tracing::trace!("Polling dir."); match incoming_dir.read_dir() { Ok(entries) => { - // Collect files from incoming directory and one level of subdirectories let mut names_and_sizes = Vec::new(); for entry in entries { @@ -63,7 +71,10 @@ pub fn run_forever( if let Ok(subentry) = subentry { if let Ok(sub_metadata) = subentry.metadata() { if sub_metadata.is_file() { - names_and_sizes.push((subentry.path(), sub_metadata.len())); + names_and_sizes.push(( + subentry.path(), + sub_metadata.len(), + )); } } } @@ -75,38 +86,60 @@ pub fn run_forever( } fn get_file_owner_name(path: &Path) -> anyhow::Result { - path.owner()?.name()?.ok_or(anyhow!("Unnamed OS user for file {:?}", path)) + path.owner()? + .name()? + .ok_or(anyhow!("Unnamed OS user for file {:?}", path)) } - fn get_username_from_folder(path: &Path, incoming_dir: &Path) -> anyhow::Result { - let relative_path = path.strip_prefix(incoming_dir) - .map_err(|e| anyhow!("File {:?} is not within incoming directory {:?}: {}", path, incoming_dir, e))?; - - let first_component = relative_path.components().next() - .ok_or(anyhow!("File {:?} has no parent directory components", relative_path))?; - + fn get_username_from_folder( + path: &Path, + incoming_dir: &Path, + ) -> anyhow::Result { + let relative_path = path.strip_prefix(incoming_dir).map_err(|e| { + anyhow!( + "File {:?} is not within incoming directory {:?}: {}", + path, + incoming_dir, + e + ) + })?; + + let first_component = relative_path.components().next().ok_or(anyhow!( + "File {:?} has no parent directory components", + relative_path + ))?; + match first_component { - std::path::Component::Normal(username) => { - username.to_str() - .ok_or(anyhow!("Username directory name is not valid UTF-8: {:?}", username)) - .map(|s| s.to_string()) - }, - _ => Err(anyhow!("Invalid directory structure for file {:?}", relative_path)) + std::path::Component::Normal(username) => username + .to_str() + .ok_or(anyhow!( + "Username directory name is not valid UTF-8: {:?}", + username + )) + .map(|s| s.to_string()), + _ => Err(anyhow!( + "Invalid directory structure for file {:?}", + relative_path + )), } } for (path, sz) in names_and_sizes { - let _span = tracing::debug_span!("Considering file.", path=path.to_str()).entered(); + let _span = + tracing::debug_span!("Considering file.", path = path.to_str()).entered(); if !submission_time.contains_key(&path) { // Check if file is still being written to - if sz > 1 && sz != 4096 { // 4096 = size of an empty file on ext4 + if sz > 1 && sz != 4096 { + // 4096 = size of an empty file on ext4 if &sz == last_tested_size.get(&path).unwrap_or(&0) { let username_result = match ingest_username_from { IngestUsernameFrom::FileOwner => get_file_owner_name(&path), - IngestUsernameFrom::FolderName => get_username_from_folder(&path, &incoming_dir), + IngestUsernameFrom::FolderName => { + get_username_from_folder(&path, &incoming_dir) + } }; - + match username_result { Err(e) => { tracing::error!(details=%e, "Cannot ingest. Failed to get username for file."); @@ -117,30 +150,33 @@ pub fn run_forever( } Ok(username) => { tracing::info!("Submitting for processing."); - submission_time.insert(path.clone(), std::time::Instant::now()); - if let Err(e) = incoming_sender.send( - super::IncomingFile { - file_path: path.clone(), - user_id: username, - cookies: HashMap::new(), - transcode_preference: super::TranscodePreference::Auto, - }) { + submission_time + .insert(path.clone(), std::time::Instant::now()); + if let Err(e) = incoming_sender.send(super::IncomingFile { + file_path: path.clone(), + user_id: username, + cookies: HashMap::new(), + transcode_preference: super::TranscodePreference::Auto, + }) { tracing::error!(details=%e, "Failed to send incoming file to processing queue."); } - }, + } }; } else { tracing::debug!("File '{:?}' apparently still being written to. Skipping for now...", path); last_tested_size.insert(path, sz); - }}}} - }, + } + } + } + } + } Err(e) => { // Directory listing failed. Cannot continue monitoring. tracing::error!(details=%e, "Error monitoring dir {:?} - aborting.", - match incoming_dir.absolutize() { - Ok(Cow::Owned(p)) => p, // Got absolute path - _ => incoming_dir.clone(), // Some error happened, use original - }); + match incoming_dir.absolutize() { + Ok(Cow::Owned(p)) => p, // Got absolute path + _ => incoming_dir.clone(), // Some error happened, use original + }); break; } } diff --git a/server/src/video_pipeline/metadata_reader.rs b/server/src/video_pipeline/metadata_reader.rs index 72c917a7..b3c030c8 100644 --- a/server/src/video_pipeline/metadata_reader.rs +++ b/server/src/video_pipeline/metadata_reader.rs @@ -1,14 +1,14 @@ -use std::{collections::HashMap, process::Command}; +use super::{DetailedMsg, IncomingFile}; +use crossbeam_channel::{Receiver, RecvError, Sender}; +use rust_decimal::prelude::*; +use serde_json; +use std::path::PathBuf; +use std::str::FromStr; +use std::sync::atomic::AtomicBool; use std::sync::atomic::Ordering; +use std::{collections::HashMap, process::Command}; use threadpool::ThreadPool; -use std::path::PathBuf; -use serde_json; -use crossbeam_channel::{Sender, Receiver, RecvError}; use tracing; -use rust_decimal::prelude::*; -use std::sync::atomic::AtomicBool; -use std::str::FromStr; -use super::{IncomingFile, DetailedMsg}; #[derive(Debug, Clone)] pub enum MediaType { @@ -37,7 +37,6 @@ impl FromStr for MediaType { } } - #[derive(Debug, Clone)] pub struct Metadata { pub src_file: PathBuf, @@ -49,7 +48,7 @@ pub struct Metadata { pub fps: Decimal, pub bitrate: u32, pub metadata_all: String, - pub upload_cookies: HashMap, // Cookies from the upload, not read from the file + pub upload_cookies: HashMap, // Cookies from the upload, not read from the file pub transcode_preference: super::TranscodePreference, } @@ -59,16 +58,16 @@ pub type MetadataResult = Result; /// /// # Arguments /// * `file_path` - Path to the file to be analyzed -fn run_mediainfo( file: &PathBuf ) -> Result -{ +fn run_mediainfo(file: &PathBuf) -> Result { // Link to source file to a temporary file to avoid problems with // special characters in the path with mediainfo let uuid = uuid::Uuid::new_v4(); let file_dir = file.parent().ok_or("Failed to get parent directory")?; let temp_dir = file_dir.join(uuid.to_string()); - + // Preserve original file extension to help mediainfo detect format correctly - let extension = file.extension() + let extension = file + .extension() .map(|ext| format!(".{}", ext.to_string_lossy())) .unwrap_or_default(); let link_path = temp_dir.join(format!("tempname{}", extension)); @@ -95,25 +94,30 @@ fn run_mediainfo( file: &PathBuf ) -> Result } } - match mediainfo_res - { + match mediainfo_res { Ok(output) => { if output.status.success() { { - let json_res = String::from_utf8(output.stdout) - .map_err(|e| e.to_string())?; + let json_res = String::from_utf8(output.stdout).map_err(|e| e.to_string())?; serde_json::from_str(&json_res) - }.map_err(|e| format!("Error parsing mediainfo JSON: {:?}", e)) + } + .map_err(|e| format!("Error parsing mediainfo JSON: {:?}", e)) } else { - tracing::error!("Mediainfo stdout: {}", String::from_utf8_lossy(&output.stdout)); - tracing::error!("Mediainfo stderr: {}", String::from_utf8_lossy(&output.stderr)); - Err( format!("Mediainfo exited with error: {}", - String::from_utf8_lossy(&output.stderr))) + tracing::error!( + "Mediainfo stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); + tracing::error!( + "Mediainfo stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); + Err(format!( + "Mediainfo exited with error: {}", + String::from_utf8_lossy(&output.stderr) + )) } - }, - Err(e) => { - Err(format!("Failed to execute mediainfo: {}", e)) } + Err(e) => Err(format!("Failed to execute mediainfo: {}", e)), } } @@ -125,58 +129,92 @@ fn run_mediainfo( file: &PathBuf ) -> Result /// * `json` - Mediainfo JSON output /// * `args` - Metadata request arguments /// * `get_file_size` - Closure to get the file size (only called if bitrate is not available and we need to calculate it) -fn extract_variables(json: serde_json::Value, args: &IncomingFile, get_file_size: F) -> Result - where F: FnOnce() -> Result +fn extract_variables( + json: serde_json::Value, + args: &IncomingFile, + get_file_size: F, +) -> Result +where + F: FnOnce() -> Result, { - let tracks = json["media"]["track"].as_array().ok_or("No media tracks found")?; + let tracks = json["media"]["track"] + .as_array() + .ok_or("No media tracks found")?; // Video file if let Some(video_track) = tracks.iter().find(|t| t["@type"] == "Video") { - // Bitrate is tricky. It might be in "BitRate" or "BitRate_Nominal". If it's not in either, we'll estimate it. - let duration = Decimal::from_str(video_track["Duration"].as_str().ok_or("Duration not found")?).map_err(|_| "Invalid duration")?; + let duration = Decimal::from_str( + video_track["Duration"] + .as_str() + .ok_or("Duration not found")?, + ) + .map_err(|_| "Invalid duration")?; let bitrate = { - let bitrate_str = video_track["BitRate"].as_str() + let bitrate_str = video_track["BitRate"] + .as_str() .or(video_track["BitRate_Nominal"].as_str()); match bitrate_str { - Some(bit_rate_str) => bit_rate_str.parse().map_err(|_| format!("Invalid bitrate: {}", bit_rate_str))?, + Some(bit_rate_str) => bit_rate_str + .parse() + .map_err(|_| format!("Invalid bitrate: {}", bit_rate_str))?, None => { let duration = duration.to_f32().ok_or("Invalid duration")?; ((get_file_size()? as f32) * 8.0 / duration) as u32 - }}}; + } + } + }; Ok(Metadata { src_file: args.file_path.clone(), user_id: args.user_id.clone(), - total_frames: video_track["FrameCount"].as_str().ok_or("FrameCount not found")?.parse().map_err(|_| "Invalid frame count".to_string())?, + total_frames: video_track["FrameCount"] + .as_str() + .ok_or("FrameCount not found")? + .parse() + .map_err(|_| "Invalid frame count".to_string())?, duration, media_type: MediaType::Video, - orig_codec: video_track["Format"].as_str().ok_or("No codec found")?.to_string(), - fps: Decimal::from_str(video_track["FrameRate"].as_str().ok_or("FPS not found")?).map_err(|_| "Invalid FPS".to_string())?, + orig_codec: video_track["Format"] + .as_str() + .ok_or("No codec found")? + .to_string(), + fps: Decimal::from_str(video_track["FrameRate"].as_str().ok_or("FPS not found")?) + .map_err(|_| "Invalid FPS".to_string())?, bitrate, metadata_all: json.to_string(), upload_cookies: args.cookies.clone(), transcode_preference: args.transcode_preference, }) } - // Audio file else if let Some(audio_track) = tracks.iter().find(|t| t["@type"] == "Audio") { Ok(Metadata { src_file: args.file_path.clone(), user_id: args.user_id.clone(), total_frames: 0, - duration: Decimal::from_str(audio_track["Duration"].as_str().ok_or("Duration not found")?).map_err(|_| "Invalid duration".to_string())?, + duration: Decimal::from_str( + audio_track["Duration"] + .as_str() + .ok_or("Duration not found")?, + ) + .map_err(|_| "Invalid duration".to_string())?, media_type: MediaType::Audio, - orig_codec: audio_track["Format"].as_str().ok_or("No codec found")?.to_string(), + orig_codec: audio_track["Format"] + .as_str() + .ok_or("No codec found")? + .to_string(), fps: Decimal::from_u8(0).unwrap(), - bitrate: audio_track["BitRate"].as_str().ok_or("Bitrate not found")?.parse().map_err(|_| "Invalid bitrate".to_string())?, + bitrate: audio_track["BitRate"] + .as_str() + .ok_or("Bitrate not found")? + .parse() + .map_err(|_| "Invalid bitrate".to_string())?, metadata_all: json.to_string(), upload_cookies: args.cookies.clone(), transcode_preference: args.transcode_preference, }) } - // Image file else if let Some(image_track) = tracks.iter().find(|t| t["@type"] == "Image") { Ok(Metadata { @@ -185,7 +223,10 @@ fn extract_variables(json: serde_json::Value, args: &IncomingFile, get_file_s total_frames: 1, duration: Decimal::from_u8(0).unwrap(), media_type: MediaType::Image, - orig_codec: image_track["Format"].as_str().ok_or("No codec found")?.to_string(), + orig_codec: image_track["Format"] + .as_str() + .ok_or("No codec found")? + .to_string(), fps: Decimal::from_u8(0).unwrap(), bitrate: 0, metadata_all: json.to_string(), @@ -198,10 +239,15 @@ fn extract_variables(json: serde_json::Value, args: &IncomingFile, get_file_s } /// Run mediainfo and extract the metadata -fn read_metadata_from_file(args: &IncomingFile) -> Result -{ +fn read_metadata_from_file(args: &IncomingFile) -> Result { let json = run_mediainfo(&args.file_path)?; - extract_variables(json, args, || Ok(args.file_path.metadata().map_err(|e| format!("Failed to get file size: {:?}", e))?.len())) + extract_variables(json, args, || { + Ok(args + .file_path + .metadata() + .map_err(|e| format!("Failed to get file size: {:?}", e))? + .len()) + }) } /// Listens to inq for new files to scan for metadata with Mediainfo shell command. @@ -213,13 +259,12 @@ fn read_metadata_from_file(args: &IncomingFile) -> Result /// * `inq` - channel to receive new files to process /// * `outq` - channel to send results to /// * `n_workers` - number of threads to use for processing -pub fn run_forever(inq: Receiver, outq: Sender, n_workers: usize) -{ +pub fn run_forever(inq: Receiver, outq: Sender, n_workers: usize) { let span = tracing::info_span!("MD").entered(); tracing::debug!(n_workers = n_workers, "Starting."); let pool = ThreadPool::new(n_workers); - let pool_is_healthy = std::sync::Arc::new(AtomicBool::new(true)); + let pool_is_healthy = std::sync::Arc::new(AtomicBool::new(true)); while pool_is_healthy.load(Ordering::Relaxed) { match inq.recv() { @@ -230,20 +275,20 @@ pub fn run_forever(inq: Receiver, outq: Sender, n_ let span = span.clone(); pool.execute(move || { span.in_scope(|| { - if let Err(e) = outq.send( - read_metadata_from_file(&args).map_err(|e| { - DetailedMsg { - msg: "Metadata read failed".to_string(), - details: e, - src_file: args.file_path.clone(), - user_id: args.user_id.clone() }})) + if let Err(e) = + outq.send(read_metadata_from_file(&args).map_err(|e| DetailedMsg { + msg: "Metadata read failed".to_string(), + details: e, + src_file: args.file_path.clone(), + user_id: args.user_id.clone(), + })) { tracing::error!(details=%e, "Result send failed! Aborting."); pool_is_healthy.store(false, Ordering::Relaxed); } }) }); - }, + } Err(RecvError) => { tracing::info!("Incoming queue closed."); break; @@ -254,21 +299,31 @@ pub fn run_forever(inq: Receiver, outq: Sender, n_ tracing::debug!("Exiting."); } - // Unit tests ===================================================================================== #[cfg(test)] -fn test_fixture(has_bitrate: bool, has_fps: bool) -> (IncomingFile, serde_json::Value) -{ - let bitrate = if has_bitrate { r#", "BitRate": "1000""# } else { "" }; - let fps = if has_fps { r#", "FrameRate": "30""# } else { "" }; +fn test_fixture(has_bitrate: bool, has_fps: bool) -> (IncomingFile, serde_json::Value) { + let bitrate = if has_bitrate { + r#", "BitRate": "1000""# + } else { + "" + }; + let fps = if has_fps { + r#", "FrameRate": "30""# + } else { + "" + }; - let json = serde_json::from_str(&format!(r#"{{ + let json = serde_json::from_str(&format!( + r#"{{ "media": {{ "track": [ {{ "@type": "Video", "FrameCount": "100", "Duration": "5.0", "Format": "H264" {}{} - }} ] }} }}"#, bitrate, fps)).unwrap(); + }} ] }} }}"#, + bitrate, fps + )) + .unwrap(); let args = IncomingFile { file_path: PathBuf::from("test.mp4"), @@ -281,8 +336,7 @@ fn test_fixture(has_bitrate: bool, has_fps: bool) -> (IncomingFile, serde_json:: } #[test] -fn test_extract_variables_ok() -{ +fn test_extract_variables_ok() { let (args, json) = test_fixture(true, true); let metadata = extract_variables(json, &args, || Ok(1000)).unwrap(); assert_eq!(metadata.total_frames, 100); @@ -293,16 +347,14 @@ fn test_extract_variables_ok() } #[test] -fn test_extract_variables_missing_bitrate() -{ +fn test_extract_variables_missing_bitrate() { let (args, json) = test_fixture(false, true); let metadata = extract_variables(json, &args, || Ok(1000)).unwrap(); - assert_eq!(metadata.bitrate, 1000*8/5); + assert_eq!(metadata.bitrate, 1000 * 8 / 5); } #[test] -fn test_extract_variables_fail_missing_fps() -{ +fn test_extract_variables_fail_missing_fps() { let (args, json) = test_fixture(true, false); let metadata = extract_variables(json, &args, || Ok(1000)); assert!(metadata.is_err()); diff --git a/server/src/video_pipeline/mod.rs b/server/src/video_pipeline/mod.rs index 9b3f4030..436ab46c 100644 --- a/server/src/video_pipeline/mod.rs +++ b/server/src/video_pipeline/mod.rs @@ -6,21 +6,21 @@ use std::collections::HashMap; use std::io::Read; +use std::path::{Path, PathBuf}; use std::str::FromStr; -use std::sync::Arc; use std::sync::atomic::AtomicBool; +use std::sync::Arc; use std::thread; -use std::path::{PathBuf, Path}; use crossbeam_channel; -use crossbeam_channel::{Receiver, unbounded, select}; +use crossbeam_channel::{select, unbounded, Receiver}; use rust_decimal::prelude::{FromPrimitive, ToPrimitive}; use rust_decimal::Decimal; use tracing; -use anyhow::{anyhow, Context, bail}; -use sha2::{Sha256, Digest}; +use anyhow::{anyhow, bail, Context}; use hex; +use sha2::{Digest, Sha256}; pub mod incoming_monitor; pub mod metadata_reader; @@ -28,13 +28,13 @@ pub mod metadata_reader; mod cleanup_rejected; mod script_processor; -use metadata_reader::MetadataResult; use crate::api_server::{UserMessage, UserMessageTopic}; use crate::database::error::DBError; +use crate::database::{models, DbBasicQuery, DB}; +use crate::storage::StorageBackend; use crate::video_pipeline::metadata_reader::MediaType; use cleanup_rejected::clean_up_rejected_file; -use crate::database::{DB, models, DbBasicQuery}; -use crate::storage::StorageBackend; +use metadata_reader::MetadataResult; #[derive(Debug, Clone)] pub enum IngestUsernameFrom { @@ -49,7 +49,10 @@ impl std::str::FromStr for IngestUsernameFrom { match s { "file-owner" => Ok(IngestUsernameFrom::FileOwner), "folder-name" => Ok(IngestUsernameFrom::FolderName), - _ => Err(format!("Invalid value '{}', must be 'file-owner' or 'folder-name'", s)), + _ => Err(format!( + "Invalid value '{}', must be 'file-owner' or 'folder-name'", + s + )), } } } @@ -66,12 +69,11 @@ pub enum TranscodePreference { Skip, } - -#[derive (Clone, Debug)] +#[derive(Clone, Debug)] pub struct IncomingFile { pub file_path: PathBuf, pub user_id: String, - pub cookies: HashMap, // Cookies from client, if this was an HTTP upload + pub cookies: HashMap, // Cookies from client, if this was an HTTP upload pub transcode_preference: TranscodePreference, } @@ -83,13 +85,61 @@ pub struct DetailedMsg { pub user_id: String, } +fn send_progress_update( + user_msg_tx: &crossbeam_channel::Sender, + user_id: &str, + media_file_id: &str, + msg: &str, + progress: f32, +) { + let _ = user_msg_tx.send(UserMessage { + topic: UserMessageTopic::Progress, + msg: msg.to_string(), + details: None, + user_id: Some(user_id.to_string()), + media_file_id: Some(media_file_id.to_string()), + subtitle_id: None, + progress: Some(progress.clamp(0.0, 1.0)), + }); +} + +fn upload_to_storage_with_progress( + storage: &StorageBackend, + abs_path: &Path, + user_msg_tx: &crossbeam_channel::Sender, + user_id: &str, + media_file_id: &str, + label: &str, +) -> anyhow::Result<()> { + if !storage.needs_remote_upload() { + return storage.upload_local_path(abs_path); + } + + let tx = user_msg_tx.clone(); + let uid = user_id.to_string(); + let mid = media_file_id.to_string(); + let label = label.to_string(); + let callback = Arc::new(move |ratio: f32| { + send_progress_update(&tx, &uid, &mid, &label, ratio); + }); + + // Kick off the bar right away so the client shows it while we stream to S3. + callback(0.0); + storage.upload_with_progress(abs_path, Some(callback)) +} /// Calculate hash identifier (media_file_id) for the submitted files, /// based on filename, user_id, size and sample of the file contents. -fn calc_media_file_id(file_path: &PathBuf, user_id: &str, upload_cookies: HashMap) -> anyhow::Result { +fn calc_media_file_id( + file_path: &PathBuf, + user_id: &str, + upload_cookies: HashMap, +) -> anyhow::Result { let mut file_hash = Sha256::new(); - let fname = file_path.file_name() - .ok_or(anyhow!("Bad filename: {:?}", file_path))?.to_str() + let fname = file_path + .file_name() + .ok_or(anyhow!("Bad filename: {:?}", file_path))? + .to_str() .ok_or(anyhow!("Bad filename encoding {:?}", file_path))?; file_hash.update(fname.as_bytes()); @@ -109,7 +159,7 @@ fn calc_media_file_id(file_path: &PathBuf, user_id: &str, upload_cookies: HashMa // Read max 32k of contents let file = std::fs::File::open(file_path)?; - let mut buf = Vec::with_capacity(32*1024); + let mut buf = Vec::with_capacity(32 * 1024); file.take(32768u64).read_to_end(&mut buf)?; file_hash.update(&buf); @@ -122,26 +172,28 @@ fn calc_media_file_id(file_path: &PathBuf, user_id: &str, upload_cookies: HashMa /// Move the file to the appropriate directory, and update the database. /// See if the file is a duplicate, and submit it for transcoding if necessary. fn ingest_media_file( - media_id: &str, - md: &metadata_reader::Metadata, - data_dir: &Path, - media_files_dir: &Path, - storage: &StorageBackend, - target_bitrate: u32, - db: &DB, - user_msg_tx: &crossbeam_channel::Sender, - cmpr_tx: &crossbeam_channel::Sender) - -> anyhow::Result -{ + media_id: &str, + md: &metadata_reader::Metadata, + data_dir: &Path, + media_files_dir: &Path, + storage: &StorageBackend, + target_bitrate: u32, + db: &DB, + user_msg_tx: &crossbeam_channel::Sender, + cmpr_tx: &crossbeam_channel::Sender, +) -> anyhow::Result { let _span = tracing::info_span!("INGEST_MEDIA", media_id = %media_id, user=md.user_id, - filename=%md.src_file.file_name().unwrap_or_default().to_string_lossy()).entered(); + filename=%md.src_file.file_name().unwrap_or_default().to_string_lossy()) + .entered(); tracing::info!("Ingesting file."); let src = PathBuf::from(&md.src_file); - if !src.is_file() { bail!("Source file not found: {:?}", src) } + if !src.is_file() { + bail!("Source file not found: {:?}", src) + } let dir_for_media_file = media_files_dir.join(&media_id); tracing::debug!("Media dir = {:?}", dir_for_media_file); @@ -154,23 +206,27 @@ fn ingest_media_file( let new_owner = &md.user_id; if &v.user_id == new_owner { tracing::info!("User already has this media file."); - user_msg_tx.send(UserMessage { - topic: UserMessageTopic::Ok, - msg: "Media file already exists".to_string(), - user_id: Some(new_owner.clone()), - media_file_id: None, // Don't pass media file id here, otherwise the pre-existing media would be deleted! - ..Default::default() - }).ok(); - - clean_up_rejected_file(&data_dir, &src, Some(media_id.into())).unwrap_or_else(|e| { - tracing::error!(details=?e, "Cleanup failed."); - }); + user_msg_tx + .send(UserMessage { + topic: UserMessageTopic::Ok, + msg: "Media file already exists".to_string(), + user_id: Some(new_owner.clone()), + media_file_id: None, // Don't pass media file id here, otherwise the pre-existing media would be deleted! + ..Default::default() + }) + .ok(); + + clean_up_rejected_file(&data_dir, &src, Some(media_id.into())).unwrap_or_else( + |e| { + tracing::error!(details=?e, "Cleanup failed."); + }, + ); return Ok(false); } else { bail!("Hash collision?!? Media '{media_id}' already owned by another user '{new_owner}'.") } - }, + } Err(DBError::NotFound()) => { // File exists, but not in DB. Remove files and reprocess. tracing::info!("Dir for '{media_id}' exists, but not in DB. Deleting old dir and reprocessing."); @@ -193,52 +249,101 @@ fn ingest_media_file( tracing::debug!("Moving '{}' to '{}'", src.display(), src_moved.display()); std::fs::rename(&src, &src_moved)?; - if !src_moved.exists() { bail!("Failed to move {:?} file to orig/", src_moved) } - - storage.upload_if_exists(&src_moved); + if !src_moved.exists() { + bail!("Failed to move {:?} file to orig/", src_moved) + } - let orig_filename = src.file_name().ok_or(anyhow!("Bad filename: {:?}", src))?.to_string_lossy().into_owned(); + let orig_filename = src + .file_name() + .ok_or(anyhow!("Bad filename: {:?}", src))? + .to_string_lossy() + .into_owned(); // Add to DB tracing::debug!("Adding media file to DB."); - models::MediaFile::insert(&mut db.conn()?, &models::MediaFileInsert { - id: media_id.to_string(), - user_id: md.user_id.clone(), - media_type: Some(md.media_type.as_ref().into()), - recompression_done: None, - thumbs_done: None, - has_thumbnail: None, - thumb_sheet_cols: None, - thumb_sheet_rows: None, - orig_filename: Some(orig_filename.clone()), - title: Some(orig_filename), - total_frames: Some(md.total_frames as i32), - duration: md.duration.to_f32(), - fps: Some(md.fps.to_string()), - raw_metadata_all: Some(md.metadata_all.clone()), - default_subtitle_id: None, - })?; + models::MediaFile::insert( + &mut db.conn()?, + &models::MediaFileInsert { + id: media_id.to_string(), + user_id: md.user_id.clone(), + media_type: Some(md.media_type.as_ref().into()), + recompression_done: None, + thumbs_done: None, + has_thumbnail: None, + thumb_sheet_cols: None, + thumb_sheet_rows: None, + orig_filename: Some(orig_filename.clone()), + title: Some(orig_filename), + total_frames: Some(md.total_frames as i32), + duration: md.duration.to_f32(), + fps: Some(md.fps.to_string()), + raw_metadata_all: Some(md.metadata_all.clone()), + default_subtitle_id: None, + }, + )?; + upload_to_storage_with_progress( + storage, + &src_moved, + user_msg_tx, + &md.user_id, + media_id, + "Uploading to storage", + )?; // Check if it needs recompressing - fn auto_transcoding_need(md: &metadata_reader::Metadata, target_max_bitrate: u32) -> Option<(String, u32)> { + fn auto_transcoding_need( + md: &metadata_reader::Metadata, + target_max_bitrate: u32, + ) -> Option<(String, u32)> { match md.media_type { - metadata_reader::MediaType::Audio => Some(("client cannot playback audio only".to_string(), target_max_bitrate)), - metadata_reader::MediaType::Image => Some(("client cannot 'playback' still images".to_string(), target_max_bitrate)), + metadata_reader::MediaType::Audio => Some(( + "client cannot playback audio only".to_string(), + target_max_bitrate, + )), + metadata_reader::MediaType::Image => Some(( + "client cannot 'playback' still images".to_string(), + target_max_bitrate, + )), metadata_reader::MediaType::Video => { - let new_bitrate = std::cmp::max(md.bitrate/2, std::cmp::min(md.bitrate, target_max_bitrate)); - let ext = md.src_file.extension().unwrap_or(std::ffi::OsStr::new("")).to_string_lossy().to_lowercase(); + let new_bitrate = std::cmp::max( + md.bitrate / 2, + std::cmp::min(md.bitrate, target_max_bitrate), + ); + let ext = md + .src_file + .extension() + .unwrap_or(std::ffi::OsStr::new("")) + .to_string_lossy() + .to_lowercase(); { - let bitrate_fine = (new_bitrate >= md.bitrate || (md.bitrate as f32) <= 1.2 * (target_max_bitrate as f32)); - let codec_fine = ["h264", "avc", "hevc", "h265"].contains(&md.orig_codec.to_lowercase().as_str()); + let bitrate_fine = (new_bitrate >= md.bitrate + || (md.bitrate as f32) <= 1.2 * (target_max_bitrate as f32)); + let codec_fine = ["h264", "avc", "hevc", "h265"] + .contains(&md.orig_codec.to_lowercase().as_str()); let container_fine = ["mp4", "mkv"].contains(&ext.as_str()); - if !container_fine { Some(format!("container '{}' not supported", md.src_file.extension().unwrap_or_default().to_string_lossy())) } - else if !codec_fine { Some(format!("codec '{}' not supported", md.orig_codec)) } - else if !bitrate_fine { Some(format!("bitrate is too high: old {} > new {}", md.bitrate, new_bitrate)) } - else { None } - }.map(|reason| (reason, new_bitrate)) - }, + if !container_fine { + Some(format!( + "container '{}' not supported", + md.src_file + .extension() + .unwrap_or_default() + .to_string_lossy() + )) + } else if !codec_fine { + Some(format!("codec '{}' not supported", md.orig_codec)) + } else if !bitrate_fine { + Some(format!( + "bitrate is too high: old {} > new {}", + md.bitrate, new_bitrate + )) + } else { + None + } + } + .map(|reason| (reason, new_bitrate)) + } } } @@ -251,7 +356,9 @@ fn ingest_media_file( }; let requested_transcode = match md.transcode_preference { - TranscodePreference::Force => Some(("user requested transcoding".to_string(), target_bitrate)), + TranscodePreference::Force => { + Some(("user requested transcoding".to_string(), target_bitrate)) + } TranscodePreference::Skip => None, TranscodePreference::Auto => auto_transcoding_need(md, target_bitrate), }; @@ -259,13 +366,16 @@ fn ingest_media_file( let transcode_req = match requested_transcode { Some((reason, new_bitrate)) => { let video_dst_prefix = format!("transcoded_br{}_{}", new_bitrate, uuid::Uuid::new_v4()); - cmpr_tx.send(script_processor::CmprInput::Transcode { - video_dst_dir: dir_for_media_file.clone(), - video_dst_prefix, - video_bitrate: new_bitrate, - src: src.clone() - }).map(|_| (true, reason)).context("Error sending file to transcoding") - }, + cmpr_tx + .send(script_processor::CmprInput::Transcode { + video_dst_dir: dir_for_media_file.clone(), + video_dst_prefix, + video_bitrate: new_bitrate, + src: src.clone(), + }) + .map(|_| (true, reason)) + .context("Error sending file to transcoding") + } None => { tracing::info!("Media OK already, not transcoding."); Ok((false, "".to_string())) @@ -285,18 +395,20 @@ fn ingest_media_file( media_type: md.media_type.clone(), path: src_moved.clone(), duration: md.duration, - } + }, }) { tracing::error!(details=?e, "Failed to send file to thumbnailing"); if let Err(e) = user_msg_tx.send(UserMessage { - topic: UserMessageTopic::Error, - msg: "Thumbnailing failed.".to_string(), - details: Some(format!("Error sending file to thumbnailing: {}", e)), - user_id: Some(md.user_id.clone()), - media_file_id: Some(media_id.to_string()), - subtitle_id: None, - progress: None - }) { tracing::error!(details=?e, "Failed to send user message") }; + topic: UserMessageTopic::Error, + msg: "Thumbnailing failed.".to_string(), + details: Some(format!("Error sending file to thumbnailing: {}", e)), + user_id: Some(md.user_id.clone()), + media_file_id: Some(media_id.to_string()), + subtitle_id: None, + progress: None, + }) { + tracing::error!(details=?e, "Failed to send user message") + }; }; }; @@ -307,25 +419,36 @@ fn ingest_media_file( user_msg_tx.send(UserMessage { topic: UserMessageTopic::MediaFileAdded, msg: String::new(), - details: Some(serde_json::to_string(&md.upload_cookies).map_err(|e| anyhow!("Error serializing cookies: {}", e))?), + details: Some( + serde_json::to_string(&md.upload_cookies) + .map_err(|e| anyhow!("Error serializing cookies: {}", e))?, + ), user_id: Some(md.user_id.clone()), media_file_id: Some(media_id.to_string()), subtitle_id: None, progress: None, })?; // Tell user in text also - tracing::debug!(transcode=do_transcode, reason=reason, "Media added to DB. Transcode"); + tracing::debug!( + transcode = do_transcode, + reason = reason, + "Media added to DB. Transcode" + ); user_msg_tx.send(UserMessage { topic: UserMessageTopic::Ok, - msg: "Media added.".to_string() + if do_transcode {" Transcoding..."} else {""}, - details: if do_transcode { Some(format!("Transcoding because {reason}")) } else { None }, + msg: "Media added.".to_string() + if do_transcode { " Transcoding..." } else { "" }, + details: if do_transcode { + Some(format!("Transcoding because {reason}")) + } else { + None + }, user_id: Some(md.user_id.clone()), media_file_id: Some(media_id.to_string()), subtitle_id: None, progress: if do_transcode { Some(0.0) } else { None }, })?; Ok(do_transcode) - }, + } Err(e) => { tracing::error!(details=?e, "Media added to DB, but failed to send to transcoding."); user_msg_tx.send(UserMessage { @@ -342,9 +465,6 @@ fn ingest_media_file( } } - - - pub fn run_forever( db: Arc, terminate_flag: Arc, @@ -358,8 +478,8 @@ pub fn run_forever( n_workers: usize, ingest_username_from: IngestUsernameFrom, transcode_script: String, - thumbnail_script: String) -{ + thumbnail_script: String, +) { tracing::debug!("Starting media file processing pipeline."); // Create folder for processed media files @@ -371,14 +491,14 @@ pub fn run_forever( // Thread for incoming folder scanner let (_md_thread, from_md, to_md) = { - let (arg_sender, arg_recvr) = unbounded::(); - let (res_sender, res_recvr) = unbounded::(); + let (arg_sender, arg_recvr) = unbounded::(); + let (res_sender, res_recvr) = unbounded::(); - let th = thread::spawn(move || { - metadata_reader::run_forever(arg_recvr, res_sender, 4); - }); - (th, res_recvr, arg_sender) - }; + let th = thread::spawn(move || { + metadata_reader::run_forever(arg_recvr, res_sender, 4); + }); + (th, res_recvr, arg_sender) + }; // Thread for metadata reader let (mon_thread, from_mon, mon_exit) = { @@ -387,15 +507,18 @@ pub fn run_forever( let data_dir = data_dir.clone(); let th = thread::spawn(move || { - if let Err(e) = incoming_monitor::run_forever( - data_dir.clone(), - (data_dir.join("incoming") ).clone(), - poll_interval, resubmit_delay, - incoming_sender, - exit_recvr, - ingest_username_from) { - tracing::error!(details=?e, "Error from incoming monitor."); - }}); + if let Err(e) = incoming_monitor::run_forever( + data_dir.clone(), + (data_dir.join("incoming")).clone(), + poll_interval, + resubmit_delay, + incoming_sender, + exit_recvr, + ingest_username_from, + ) { + tracing::error!(details=?e, "Error from incoming monitor."); + } + }); (th, incoming_recvr, exit_sender) }; @@ -406,29 +529,46 @@ pub fn run_forever( let transcode_script_clone = transcode_script.clone(); let thumbnail_script_clone = thumbnail_script.clone(); thread::spawn(move || { - script_processor::run_forever(cmpr_in_rx, cmpr_out_tx, cmpr_prog_tx, n_workers, transcode_script_clone, thumbnail_script_clone); + script_processor::run_forever( + cmpr_in_rx, + cmpr_out_tx, + cmpr_prog_tx, + n_workers, + transcode_script_clone, + thumbnail_script_clone, + ); }); // Migration from older version: find a media file that is missing thumbnail sheet - fn legacy_thumbnail_next_media_file(db: &DB, videos_dir: &PathBuf, cmpr_in: &mut crossbeam_channel::Sender) -> Option { - - let candidates = db.conn() + fn legacy_thumbnail_next_media_file( + db: &DB, + videos_dir: &PathBuf, + cmpr_in: &mut crossbeam_channel::Sender, + ) -> Option { + let candidates = db + .conn() .and_then(|mut conn| models::MediaFile::get_all_with_missing_thumbnails(&mut conn)) - .map_err(|e| { tracing::error!(details=?e, "DB: Failed to get media files without thumbnails."); }).ok()?; + .map_err(|e| { + tracing::error!(details=?e, "DB: Failed to get media files without thumbnails."); + }) + .ok()?; if let Some(v) = candidates.first() { tracing::info!(id=%v.id, "Found legacy media file that needs thumbnailing."); let media_file_path = if v.recompression_done.is_some() { - Some(videos_dir.join(&v.id).join("video.mp4")) - } else { - match v.orig_filename { - Some(ref orig_filename) => Some(videos_dir.join(&v.id).join("orig").join(orig_filename)), - None => { - tracing::error!(media_file_id=%v.id, "Legacy thumbnailing failed. Original filename missing and not recompressed."); - None - }} - }; + Some(videos_dir.join(&v.id).join("video.mp4")) + } else { + match v.orig_filename { + Some(ref orig_filename) => { + Some(videos_dir.join(&v.id).join("orig").join(orig_filename)) + } + None => { + tracing::error!(media_file_id=%v.id, "Legacy thumbnailing failed. Original filename missing and not recompressed."); + None + } + } + }; match media_file_path { Some(file_path) => { @@ -444,23 +584,24 @@ pub fn run_forever( media_file_id: v.id.clone(), media_type, path: file_path, - duration: Decimal::from_f32(v.duration.unwrap_or(0.0)).unwrap_or_default(), + duration: Decimal::from_f32(v.duration.unwrap_or(0.0)) + .unwrap_or_default(), }, }; cmpr_in.send(req).unwrap_or_else(|e| { tracing::error!(details=?e, "Error sending legacy thumbnailing request to compressor."); }); return Some(v.id.clone()); - }, + } _ => { tracing::error!(media_file_id=%v.id, "Legacy thumbnailing failed. User ID or orig filename missing."); - }, + } } } None } - let mut legacy_media_file_now_thumnailing = legacy_thumbnail_next_media_file(&db, &media_files_dir, &mut cmpr_in_tx.clone()); - + let mut legacy_media_file_now_thumnailing = + legacy_thumbnail_next_media_file(&db, &media_files_dir, &mut cmpr_in_tx.clone()); let _span = tracing::info_span!("PIPELINE").entered(); loop { @@ -617,7 +758,17 @@ pub fn run_forever( tracing::error!(details=%e, "Failed to create symlink {:?} -> {:?}", symlink_path, video_dst); return false; } - storage.upload_if_exists(&symlink_path); + if let Err(e) = upload_to_storage_with_progress( + &storage, + &symlink_path, + &utx, + &user_id, + &vid, + "Uploading transcoded video", + ) { + tracing::error!(details=%e, "Failed to upload transcoded file to object storage"); + return false; + } if let Err(e) = db.conn().and_then(|mut conn| models::MediaFile::set_recompressed(&mut conn, &vid)) { tracing::error!(details=%e, "Error marking media file as recompressed in DB"); @@ -742,7 +893,7 @@ pub fn run_forever( drop(mon_exit); terminate_flag.store(true, std::sync::atomic::Ordering::Relaxed); match mon_thread.join() { - Ok(_) => {}, + Ok(_) => {} Err(e) => { tracing::error!("Error waiting for monitor thread to exit: {:?}", e); } diff --git a/server/src/video_pipeline/script_processor.rs b/server/src/video_pipeline/script_processor.rs index a8c0ecab..e42b117a 100644 --- a/server/src/video_pipeline/script_processor.rs +++ b/server/src/video_pipeline/script_processor.rs @@ -1,12 +1,12 @@ -use std::{process::Command, io::BufRead, collections::HashMap}; -use std::path::PathBuf; -use crossbeam_channel::{Sender, Receiver}; -use rust_decimal::Decimal; +use chrono; +use crossbeam_channel::{Receiver, Sender}; use rust_decimal::prelude::ToPrimitive; -use tracing; -use threadpool::ThreadPool; +use rust_decimal::Decimal; use std::fs; -use chrono; +use std::path::PathBuf; +use std::{collections::HashMap, io::BufRead, process::Command}; +use threadpool::ThreadPool; +use tracing; use super::metadata_reader::MediaType; use super::DetailedMsg; @@ -17,7 +17,7 @@ pub type ProgressSender = crossbeam_channel::Sender<(String, String, String, Opt #[derive(Debug, Clone)] pub enum CmprInput { Transcode { - video_dst_dir: PathBuf, // Directory where script should output + video_dst_dir: PathBuf, // Directory where script should output video_dst_prefix: String, // Filename prefix (script decides extension) video_bitrate: u32, src: CmprInputSource, @@ -27,7 +27,7 @@ pub enum CmprInput { thumb_sheet_dims: (u32, u32), thumb_size: (u32, u32), src: CmprInputSource, - } + }, } #[derive(Debug, Clone)] @@ -43,16 +43,20 @@ pub struct CmprInputSource { #[derive(Debug, Clone)] pub enum CmprOutput { TranscodeSuccess { - video_dst: PathBuf, // Final output file path (determined by script) - logs: CmprLogs + video_dst: PathBuf, // Final output file path (determined by script) + logs: CmprLogs, }, ThumbsSuccess { thumb_dir: Option, thumb_sheet_dims: Option<(u32, u32)>, - logs: CmprLogs + logs: CmprLogs, + }, + TranscodeFailure { + logs: CmprLogs, + }, + ThumbsFailure { + logs: CmprLogs, }, - TranscodeFailure { logs: CmprLogs }, - ThumbsFailure { logs: CmprLogs } } #[derive(Debug, Clone)] @@ -64,7 +68,6 @@ pub struct CmprLogs { pub dmsg: DetailedMsg, } - /// Validate and sanitize values passed to scripts via environment variables fn validate_env_value(key: &str, value: &str) -> Result { match key { @@ -75,22 +78,25 @@ fn validate_env_value(key: &str, value: &str) -> Result { } else { Err(format!("Invalid bitrate format: {}", value)) } - }, + } "CLAPSHOT_MEDIA_TYPE" => { // Media type should be one of known values match value { "video" | "audio" | "image" => Ok(value.to_string()), - _ => Err(format!("Invalid media type: {}", value)) + _ => Err(format!("Invalid media type: {}", value)), } - }, + } "CLAPSHOT_USER_ID" | "CLAPSHOT_MEDIA_ID" => { // User/media IDs should be alphanumeric + basic chars only - if value.chars().all(|c| c.is_alphanumeric() || c == '_' || c == '-' || c == '.') { + if value + .chars() + .all(|c| c.is_alphanumeric() || c == '_' || c == '-' || c == '.') + { Ok(value.to_string()) } else { Err(format!("Invalid {} format: {}", key, value)) } - }, + } "CLAPSHOT_DURATION" => { // Duration should be numeric (can have decimal point) if value.chars().all(|c| c.is_numeric() || c == '.') { @@ -98,31 +104,51 @@ fn validate_env_value(key: &str, value: &str) -> Result { } else { Err(format!("Invalid duration format: {}", value)) } - }, + } "CLAPSHOT_THUMB_SIZE" | "CLAPSHOT_SHEET_DIMS" => { // Dimensions should be in format "NxN" where N is numeric - if value.matches('x').count() == 1 && - value.split('x').all(|part| part.chars().all(|c| c.is_numeric()) && !part.is_empty()) { + if value.matches('x').count() == 1 + && value + .split('x') + .all(|part| part.chars().all(|c| c.is_numeric()) && !part.is_empty()) + { Ok(value.to_string()) } else { - Err(format!("Invalid dimension format (expected NxN): {}", value)) + Err(format!( + "Invalid dimension format (expected NxN): {}", + value + )) } - }, + } "CLAPSHOT_OUTPUT_PREFIX" => { // Output prefix should be alphanumeric + basic chars only (no path separators) - if value.chars().all(|c| c.is_alphanumeric() || c == '_' || c == '-' || c == '.') { + if value + .chars() + .all(|c| c.is_alphanumeric() || c == '_' || c == '-' || c == '.') + { Ok(value.to_string()) } else { Err(format!("Invalid output prefix format: {}", value)) } - }, + } _ => { // For file paths, allow basic path chars but reject dangerous sequences - if value.contains("..") || value.contains(";") || value.contains("|") || value.contains("&") || - value.contains("`") || value.contains("$") || value.contains("'") || value.contains("\"") || - value.contains("\\") || value.contains("\n") || value.contains("\r") { + if value.contains("..") + || value.contains(";") + || value.contains("|") + || value.contains("&") + || value.contains("`") + || value.contains("$") + || value.contains("'") + || value.contains("\"") + || value.contains("\\") + || value.contains("\n") + || value.contains("\r") + { Err(format!("Potentially unsafe value for {}: {}", key, value)) - } else if !value.chars().all(|c| c.is_alphanumeric() || c == '/' || c == '_' || c == '-' || c == '.' || c == ' ') { + } else if !value.chars().all(|c| { + c.is_alphanumeric() || c == '/' || c == '_' || c == '-' || c == '.' || c == ' ' + }) { Err(format!("Invalid characters in {}: {}", key, value)) } else { Ok(value.to_string()) @@ -133,10 +159,12 @@ fn validate_env_value(key: &str, value: &str) -> Result { /// Create a sanitized symlink in the orig directory for script access fn create_sanitized_symlink(src_path: &PathBuf) -> Result { - let orig_dir = src_path.parent() + let orig_dir = src_path + .parent() .ok_or("Source file has no parent directory")?; - let extension = src_path.extension() + let extension = src_path + .extension() .and_then(|e| e.to_str()) .unwrap_or("bin"); @@ -149,7 +177,10 @@ fn create_sanitized_symlink(src_path: &PathBuf) -> Result { // Check if source file exists if !src_path.exists() { - return Err(format!("Source file does not exist: {}", src_path.display())); + return Err(format!( + "Source file does not exist: {}", + src_path.display() + )); } // Always create a fresh symlink (remove any existing one first) @@ -160,12 +191,15 @@ fn create_sanitized_symlink(src_path: &PathBuf) -> Result { } // Use relative path for symlink target (just the filename) since both files are in the same directory - let src_filename = src_path.file_name() - .ok_or("Source file has no filename")?; + let src_filename = src_path.file_name().ok_or("Source file has no filename")?; if let Err(e) = std::os::unix::fs::symlink(src_filename, &sanitized_path) { - return Err(format!("Failed to create sanitized symlink from {} to {}: {}", - src_filename.to_string_lossy(), sanitized_path.display(), e)); + return Err(format!( + "Failed to create sanitized symlink from {} to {}: {}", + src_filename.to_string_lossy(), + sanitized_path.display(), + e + )); } tracing::debug!(src=?src_path, symlink=?sanitized_path, "Created sanitized symlink"); @@ -173,32 +207,55 @@ fn create_sanitized_symlink(src_path: &PathBuf) -> Result { } /// Set up environment variables for script execution -fn setup_script_environment(src: &CmprInputSource, input_file: &PathBuf, output_dir: &PathBuf, - output_prefix: &str, target_bitrate: u32, progress_pipe: &Option) - -> Result, String> { +fn setup_script_environment( + src: &CmprInputSource, + input_file: &PathBuf, + output_dir: &PathBuf, + output_prefix: &str, + target_bitrate: u32, + progress_pipe: &Option, +) -> Result, String> { let mut env_vars = HashMap::new(); // Validate and set environment variables - env_vars.insert("CLAPSHOT_INPUT_FILE".to_string(), - validate_env_value("CLAPSHOT_INPUT_FILE", &input_file.to_string_lossy())?); - env_vars.insert("CLAPSHOT_OUTPUT_DIR".to_string(), - validate_env_value("CLAPSHOT_OUTPUT_DIR", &output_dir.to_string_lossy())?); - env_vars.insert("CLAPSHOT_OUTPUT_PREFIX".to_string(), - validate_env_value("CLAPSHOT_OUTPUT_PREFIX", output_prefix)?); - env_vars.insert("CLAPSHOT_MEDIA_TYPE".to_string(), - validate_env_value("CLAPSHOT_MEDIA_TYPE", src.media_type.as_ref())?); - env_vars.insert("CLAPSHOT_TARGET_BITRATE".to_string(), - validate_env_value("CLAPSHOT_TARGET_BITRATE", &target_bitrate.to_string())?); - env_vars.insert("CLAPSHOT_USER_ID".to_string(), - validate_env_value("CLAPSHOT_USER_ID", &src.user_id)?); - env_vars.insert("CLAPSHOT_MEDIA_ID".to_string(), - validate_env_value("CLAPSHOT_MEDIA_ID", &src.media_file_id)?); - env_vars.insert("CLAPSHOT_DURATION".to_string(), - validate_env_value("CLAPSHOT_DURATION", &src.duration.to_string())?); + env_vars.insert( + "CLAPSHOT_INPUT_FILE".to_string(), + validate_env_value("CLAPSHOT_INPUT_FILE", &input_file.to_string_lossy())?, + ); + env_vars.insert( + "CLAPSHOT_OUTPUT_DIR".to_string(), + validate_env_value("CLAPSHOT_OUTPUT_DIR", &output_dir.to_string_lossy())?, + ); + env_vars.insert( + "CLAPSHOT_OUTPUT_PREFIX".to_string(), + validate_env_value("CLAPSHOT_OUTPUT_PREFIX", output_prefix)?, + ); + env_vars.insert( + "CLAPSHOT_MEDIA_TYPE".to_string(), + validate_env_value("CLAPSHOT_MEDIA_TYPE", src.media_type.as_ref())?, + ); + env_vars.insert( + "CLAPSHOT_TARGET_BITRATE".to_string(), + validate_env_value("CLAPSHOT_TARGET_BITRATE", &target_bitrate.to_string())?, + ); + env_vars.insert( + "CLAPSHOT_USER_ID".to_string(), + validate_env_value("CLAPSHOT_USER_ID", &src.user_id)?, + ); + env_vars.insert( + "CLAPSHOT_MEDIA_ID".to_string(), + validate_env_value("CLAPSHOT_MEDIA_ID", &src.media_file_id)?, + ); + env_vars.insert( + "CLAPSHOT_DURATION".to_string(), + validate_env_value("CLAPSHOT_DURATION", &src.duration.to_string())?, + ); if let Some(pipe) = progress_pipe { - env_vars.insert("CLAPSHOT_PROGRESS_PIPE".to_string(), - validate_env_value("CLAPSHOT_PROGRESS_PIPE", pipe)?); + env_vars.insert( + "CLAPSHOT_PROGRESS_PIPE".to_string(), + validate_env_value("CLAPSHOT_PROGRESS_PIPE", pipe)?, + ); } Ok(env_vars) @@ -206,8 +263,8 @@ fn setup_script_environment(src: &CmprInputSource, input_file: &PathBuf, output_ /// Find the actual output file created by the script fn find_script_output(output_dir: &PathBuf, output_prefix: &str) -> Result { - let entries = fs::read_dir(output_dir) - .map_err(|e| format!("Failed to read output directory: {}", e))?; + let entries = + fs::read_dir(output_dir).map_err(|e| format!("Failed to read output directory: {}", e))?; let mut candidates = Vec::new(); for entry in entries { @@ -230,15 +287,26 @@ fn find_script_output(output_dir: &PathBuf, output_prefix: &str) -> Result Err(format!("No valid output files found with prefix: {}", output_prefix)), + 0 => Err(format!( + "No valid output files found with prefix: {}", + output_prefix + )), 1 => Ok(candidates.into_iter().next().unwrap()), - _ => Err(format!("Multiple valid output files found with prefix {}: {:?}", output_prefix, candidates)) + _ => Err(format!( + "Multiple valid output files found with prefix {}: {:?}", + output_prefix, candidates + )), } } -fn err2cout(msg_txt: &str, err: E, args: &CmprInput, sanitized_symlink: Option<&PathBuf>) -> CmprOutput { +fn err2cout( + msg_txt: &str, + err: E, + args: &CmprInput, + sanitized_symlink: Option<&PathBuf>, +) -> CmprOutput { let details_str = format!("{:?}", err); - tracing::error!(details=&details_str, "err2cout: {}", msg_txt); + tracing::error!(details = &details_str, "err2cout: {}", msg_txt); // Clean up sanitized symlink if provided if let Some(symlink_path) = sanitized_symlink { @@ -260,45 +328,62 @@ fn err2cout(msg_txt: &str, err: E, args: &CmprInput, sanitiz msg: msg_txt.to_string(), details: details_str, src_file: src.path.clone(), - user_id: src.user_id.clone() - } + user_id: src.user_id.clone(), + }, }; match args { - CmprInput::Transcode { .. } => { CmprOutput::TranscodeFailure { logs } }, - CmprInput::Thumbs { .. } => { CmprOutput::ThumbsFailure { logs } } + CmprInput::Transcode { .. } => CmprOutput::TranscodeFailure { logs }, + CmprInput::Thumbs { .. } => CmprOutput::ThumbsFailure { logs }, } } /// Run transcoding script and return the output -fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefix: String, - video_bitrate: u32, progress: ProgressSender, script_path: &str) -> CmprOutput { +fn run_transcode_script( + src: &CmprInputSource, + output_dir: PathBuf, + output_prefix: String, + video_bitrate: u32, + progress: ProgressSender, + script_path: &str, +) -> CmprOutput { let _span = tracing::info_span!("run_transcode_script", media_file = %src.media_file_id, user = %src.user_id, - thread = ?std::thread::current().id()).entered(); + thread = ?std::thread::current().id()) + .entered(); // Create sanitized symlink for script access let sanitized_input = match create_sanitized_symlink(&src.path) { Ok(path) => path, - Err(e) => return err2cout("Failed to create sanitized symlink", e, - &CmprInput::Transcode { - video_dst_dir: output_dir, - video_dst_prefix: output_prefix, - video_bitrate, - src: src.clone() - }, None) + Err(e) => { + return err2cout( + "Failed to create sanitized symlink", + e, + &CmprInput::Transcode { + video_dst_dir: output_dir, + video_dst_prefix: output_prefix, + video_bitrate, + src: src.clone(), + }, + None, + ) + } }; // Create transcoded/ subdirectory for script to work in let script_work_dir = output_dir.join("transcoded"); if let Err(e) = fs::create_dir_all(&script_work_dir) { - return err2cout("Failed to create script work directory", e, - &CmprInput::Transcode { - video_dst_dir: output_dir, - video_dst_prefix: output_prefix, - video_bitrate, - src: src.clone() - }, Some(&sanitized_input)); + return err2cout( + "Failed to create script work directory", + e, + &CmprInput::Transcode { + video_dst_dir: output_dir, + video_dst_prefix: output_prefix, + video_bitrate, + src: src.clone(), + }, + Some(&sanitized_input), + ); } // Set up progress pipe in a temporary directory (not user-writable space) @@ -310,10 +395,16 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi Some(fname) => unix_named_pipe::create(&fname, None) .map(|_| fname.to_string()) .map_err(|e| e.to_string()) - .map_or_else(|e| { - tracing::warn!(details=e, "Won't track script progress; failed to create pipe file."); - None - }, |f| Some(f)) + .map_or_else( + |e| { + tracing::warn!( + details = e, + "Won't track script progress; failed to create pipe file." + ); + None + }, + |f| Some(f), + ), }; let progress_terminate = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)); @@ -332,7 +423,8 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi Some(pfn) => { std::thread::spawn(move || { let _span = tracing::info_span!("script_progress", - thread = ?std::thread::current().id()).entered(); + thread = ?std::thread::current().id()) + .entered(); let f = match unix_named_pipe::open_read(&pfn) { Ok(f) => f, @@ -357,7 +449,7 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi } else { std::thread::sleep(std::time::Duration::from_millis(250)); } - }, + } None => { tracing::debug!("Progress pipe EOF. Sleeping..."); std::thread::sleep(std::time::Duration::from_millis(250)); @@ -374,35 +466,43 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi "end" => { msg = Some("Transcoding done.".to_string()); done_ratio = Some(1.0); - }, + } "continue" => { msg = Some("Transcoding...".to_string()); // Calculate progress percentage if we have both current time and duration if let Some(time_us) = current_time_us { if total_duration_us > 0 { - let progress_pct = (time_us as f32 / total_duration_us as f32).min(1.0).max(0.0); + let progress_pct = (time_us as f32 + / total_duration_us as f32) + .min(1.0) + .max(0.0); done_ratio = Some(progress_pct); } } - }, + } _ => { msg = Some("Transcoding...".to_string()); } } - }, + } "out_time_us" => { // Parse current position in microseconds if let Ok(time_us) = val.parse::() { current_time_us = Some(time_us); } - }, + } _ => {} // Ignore other keys } } // Send progress message (if any) if let Some(msg) = msg.take() { - if let Err(e) = progress.send((vid.clone(), user_id.clone(), msg, done_ratio.clone())) { + if let Err(e) = progress.send(( + vid.clone(), + user_id.clone(), + msg, + done_ratio.clone(), + )) { tracing::debug!(details=%e, "Failed to send script progress message. Ending progress tracking."); return; } @@ -417,16 +517,28 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi }; // Set up environment variables for script - let env_vars = match setup_script_environment(src, &sanitized_input, &script_work_dir, &output_prefix, - video_bitrate, &ppipe_fname) { + let env_vars = match setup_script_environment( + src, + &sanitized_input, + &script_work_dir, + &output_prefix, + video_bitrate, + &ppipe_fname, + ) { Ok(vars) => vars, - Err(e) => return err2cout("Failed to set up script environment", e, - &CmprInput::Transcode { - video_dst_dir: output_dir, - video_dst_prefix: output_prefix, - video_bitrate, - src: src.clone() - }, Some(&sanitized_input)) + Err(e) => { + return err2cout( + "Failed to set up script environment", + e, + &CmprInput::Transcode { + video_dst_dir: output_dir, + video_dst_prefix: output_prefix, + video_bitrate, + src: src.clone(), + }, + Some(&sanitized_input), + ) + } }; // Run the transcoding script @@ -438,7 +550,8 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi let script_thread = { std::thread::spawn(move || { let _span = tracing::info_span!("transcode_script", - thread = ?std::thread::current().id()).entered(); + thread = ?std::thread::current().id()) + .entered(); let mut cmd = Command::new(&script_path_owned); @@ -484,10 +597,16 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi tracing::error!(file=?log_file, details=%e, "Failed to write transcoding log file"); } - (if res.status.success() {None} else {Some("Script exited with error".to_string())}, + ( + if res.status.success() { + None + } else { + Some("Script exited with error".to_string()) + }, format!("Log written to: {}", log_file.display()), - "".to_string() ) - }, + "".to_string(), + ) + } Err(e) => { tracing::error!(details=%e, "Script exec failed"); @@ -514,7 +633,11 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi tracing::error!(file=?log_file, details=%write_err, "Failed to write transcoding error log"); } - (Some(e.to_string()), format!("Log written to: {}", log_file.display()), "".into()) + ( + Some(e.to_string()), + format!("Log written to: {}", log_file.display()), + "".into(), + ) } } }) @@ -524,7 +647,11 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi tracing::debug!("Waiting for transcoding script to complete..."); let (err_msg, stdout, stderr) = script_thread.join().unwrap_or_else(|e| { tracing::error!(details=?e, "Script thread panicked."); - (Some("Script thread panicked".to_string()), "".into(), format!("{:?}", e)) + ( + Some("Script thread panicked".to_string()), + "".into(), + format!("{:?}", e), + ) }); tracing::debug!("Terminating script progress thread."); @@ -561,9 +688,9 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi msg: "Failed to get script output filename".to_string(), details: "Script output has invalid filename".to_string(), src_file: src.path.clone(), - user_id: src.user_id.clone() - } - } + user_id: src.user_id.clone(), + }, + }, }; } }; @@ -591,15 +718,15 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi msg: "Failed to move script output to final location".to_string(), details: format!("Error moving file: {}", e), src_file: src.path.clone(), - user_id: src.user_id.clone() - } - } + user_id: src.user_id.clone(), + }, + }, }; } tracing::debug!(from=?script_output_path, to=?final_output_path, "Moved script output to final location"); final_output_path - }, + } Err(e) => { tracing::error!(details=%e, "Script completed but output validation failed"); // Clean up progress pipe if it exists @@ -618,13 +745,13 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi msg: "Script output validation failed".to_string(), details: e, src_file: src.path.clone(), - user_id: src.user_id.clone() - } - } + user_id: src.user_id.clone(), + }, + }, }; } }, - Some(_) => PathBuf::new() // Error case, path doesn't matter + Some(_) => PathBuf::new(), // Error case, path doesn't matter }; let logs = CmprLogs { @@ -633,11 +760,16 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi stdout, _stderr: stderr, dmsg: DetailedMsg { - msg: if err_msg.is_some() { "Transcoding failed" } else { "Transcoding complete" }.to_string(), + msg: if err_msg.is_some() { + "Transcoding failed" + } else { + "Transcoding complete" + } + .to_string(), details: format!("Error in script: {:?}", err_msg.clone().unwrap_or_default()), src_file: src.path.clone(), - user_id: src.user_id.clone() - } + user_id: src.user_id.clone(), + }, }; // Clean up progress pipe and sanitized symlink @@ -660,40 +792,56 @@ fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefi match err_msg { Some(_) => CmprOutput::TranscodeFailure { logs }, - None => CmprOutput::TranscodeSuccess { video_dst, logs } + None => CmprOutput::TranscodeSuccess { video_dst, logs }, } } /// Run thumbnailing script -fn run_thumbnail_script(thumb_dir: PathBuf, thumb_size: (u32,u32), thumb_sheet_dims: (u32, u32), - src: CmprInputSource, script_path: &str) -> CmprOutput { +fn run_thumbnail_script( + thumb_dir: PathBuf, + thumb_size: (u32, u32), + thumb_sheet_dims: (u32, u32), + src: CmprInputSource, + script_path: &str, +) -> CmprOutput { let _span = tracing::info_span!("run_thumbnail_script", media_file = %src.media_file_id, user = %src.user_id, - thread = ?std::thread::current().id()).entered(); + thread = ?std::thread::current().id()) + .entered(); // Create sanitized symlink for script access let sanitized_input = match create_sanitized_symlink(&src.path) { Ok(path) => path, - Err(e) => return err2cout("Failed to create sanitized symlink", e, - &CmprInput::Thumbs { - thumb_dir: thumb_dir.clone(), - thumb_sheet_dims, - thumb_size, - src: src.clone() - }, None) + Err(e) => { + return err2cout( + "Failed to create sanitized symlink", + e, + &CmprInput::Thumbs { + thumb_dir: thumb_dir.clone(), + thumb_sheet_dims, + thumb_size, + src: src.clone(), + }, + None, + ) + } }; // Create isolated script work directory for thumbnailing let script_work_dir = thumb_dir.join("transcoded"); if let Err(e) = fs::create_dir_all(&script_work_dir) { - return err2cout("Failed to create script work directory", e.to_string(), - &CmprInput::Thumbs { - thumb_dir: thumb_dir.clone(), - thumb_sheet_dims, - thumb_size, - src: src.clone() - }, Some(&sanitized_input)); + return err2cout( + "Failed to create script work directory", + e.to_string(), + &CmprInput::Thumbs { + thumb_dir: thumb_dir.clone(), + thumb_sheet_dims, + thumb_size, + src: src.clone(), + }, + Some(&sanitized_input), + ); } // Set up environment variables for script @@ -701,29 +849,53 @@ fn run_thumbnail_script(thumb_dir: PathBuf, thumb_size: (u32,u32), thumb_sheet_d // Validate and set environment variables if let Err(e) = (|| -> Result<(), String> { - env_vars.insert("CLAPSHOT_INPUT_FILE".to_string(), - validate_env_value("CLAPSHOT_INPUT_FILE", &sanitized_input.to_string_lossy())?); - env_vars.insert("CLAPSHOT_OUTPUT_DIR".to_string(), - validate_env_value("CLAPSHOT_OUTPUT_DIR", &script_work_dir.to_string_lossy())?); - env_vars.insert("CLAPSHOT_MEDIA_TYPE".to_string(), - validate_env_value("CLAPSHOT_MEDIA_TYPE", src.media_type.as_ref())?); - env_vars.insert("CLAPSHOT_USER_ID".to_string(), - validate_env_value("CLAPSHOT_USER_ID", &src.user_id)?); - env_vars.insert("CLAPSHOT_MEDIA_ID".to_string(), - validate_env_value("CLAPSHOT_MEDIA_ID", &src.media_file_id)?); - env_vars.insert("CLAPSHOT_THUMB_SIZE".to_string(), - validate_env_value("CLAPSHOT_THUMB_SIZE", &format!("{}x{}", thumb_size.0, thumb_size.1))?); - env_vars.insert("CLAPSHOT_SHEET_DIMS".to_string(), - validate_env_value("CLAPSHOT_SHEET_DIMS", &format!("{}x{}", thumb_sheet_dims.0, thumb_sheet_dims.1))?); + env_vars.insert( + "CLAPSHOT_INPUT_FILE".to_string(), + validate_env_value("CLAPSHOT_INPUT_FILE", &sanitized_input.to_string_lossy())?, + ); + env_vars.insert( + "CLAPSHOT_OUTPUT_DIR".to_string(), + validate_env_value("CLAPSHOT_OUTPUT_DIR", &script_work_dir.to_string_lossy())?, + ); + env_vars.insert( + "CLAPSHOT_MEDIA_TYPE".to_string(), + validate_env_value("CLAPSHOT_MEDIA_TYPE", src.media_type.as_ref())?, + ); + env_vars.insert( + "CLAPSHOT_USER_ID".to_string(), + validate_env_value("CLAPSHOT_USER_ID", &src.user_id)?, + ); + env_vars.insert( + "CLAPSHOT_MEDIA_ID".to_string(), + validate_env_value("CLAPSHOT_MEDIA_ID", &src.media_file_id)?, + ); + env_vars.insert( + "CLAPSHOT_THUMB_SIZE".to_string(), + validate_env_value( + "CLAPSHOT_THUMB_SIZE", + &format!("{}x{}", thumb_size.0, thumb_size.1), + )?, + ); + env_vars.insert( + "CLAPSHOT_SHEET_DIMS".to_string(), + validate_env_value( + "CLAPSHOT_SHEET_DIMS", + &format!("{}x{}", thumb_sheet_dims.0, thumb_sheet_dims.1), + )?, + ); Ok(()) })() { - return err2cout("Failed to set up script environment", e, - &CmprInput::Thumbs { - thumb_dir: thumb_dir.clone(), - thumb_sheet_dims, - thumb_size, - src: src.clone() - }, Some(&sanitized_input)); + return err2cout( + "Failed to set up script environment", + e, + &CmprInput::Thumbs { + thumb_dir: thumb_dir.clone(), + thumb_sheet_dims, + thumb_size, + src: src.clone(), + }, + Some(&sanitized_input), + ); } // Run the thumbnailing script @@ -735,7 +907,8 @@ fn run_thumbnail_script(thumb_dir: PathBuf, thumb_size: (u32,u32), thumb_sheet_d let script_thread = { std::thread::spawn(move || { let _span = tracing::info_span!("thumbnail_script", - thread = ?std::thread::current().id()).entered(); + thread = ?std::thread::current().id()) + .entered(); let mut cmd = Command::new(&script_path_owned); @@ -781,10 +954,16 @@ fn run_thumbnail_script(thumb_dir: PathBuf, thumb_size: (u32,u32), thumb_sheet_d tracing::error!(file=?log_file, details=%e, "Failed to write thumbnailing log file"); } - (if res.status.success() {None} else {Some("Script exited with error".to_string())}, + ( + if res.status.success() { + None + } else { + Some("Script exited with error".to_string()) + }, format!("Log written to: {}", log_file.display()), - "".to_string() ) - }, + "".to_string(), + ) + } Err(e) => { tracing::error!(details=%e, "Script exec failed"); @@ -811,7 +990,11 @@ fn run_thumbnail_script(thumb_dir: PathBuf, thumb_size: (u32,u32), thumb_sheet_d tracing::error!(file=?log_file, details=%write_err, "Failed to write thumbnailing error log"); } - (Some(e.to_string()), format!("Log written to: {}", log_file.display()), "".into()) + ( + Some(e.to_string()), + format!("Log written to: {}", log_file.display()), + "".into(), + ) } } }) @@ -821,7 +1004,11 @@ fn run_thumbnail_script(thumb_dir: PathBuf, thumb_size: (u32,u32), thumb_sheet_d tracing::debug!("Waiting for thumbnailing script to complete..."); let (err_msg, stdout, stderr) = script_thread.join().unwrap_or_else(|e| { tracing::error!(details=?e, "Script thread panicked."); - (Some("Script thread panicked".to_string()), "".into(), format!("{:?}", e)) + ( + Some("Script thread panicked".to_string()), + "".into(), + format!("{:?}", e), + ) }); let logs = CmprLogs { @@ -830,11 +1017,16 @@ fn run_thumbnail_script(thumb_dir: PathBuf, thumb_size: (u32,u32), thumb_sheet_d stdout, _stderr: stderr, dmsg: DetailedMsg { - msg: if err_msg.is_some() { "Thumbnailing failed" } else { "Thumbnailing complete" }.to_string(), + msg: if err_msg.is_some() { + "Thumbnailing failed" + } else { + "Thumbnailing complete" + } + .to_string(), details: format!("Error in script: {:?}", err_msg.clone().unwrap_or_default()), src_file: src.path.clone(), - user_id: src.user_id.clone() - } + user_id: src.user_id.clone(), + }, }; // Move thumbnail files from script work directory to main thumbnail directory @@ -851,7 +1043,9 @@ fn run_thumbnail_script(thumb_dir: PathBuf, thumb_size: (u32,u32), thumb_sheet_d if let Err(e) = fs::rename(&src_path, &dest_path) { tracing::warn!(details=%e, from=?src_path, to=?dest_path, "Failed to move thumbnail file to final location"); // Try copy + remove as fallback - if let Ok(()) = fs::copy(&src_path, &dest_path).and_then(|_| fs::remove_file(&src_path)) { + if let Ok(()) = + fs::copy(&src_path, &dest_path).and_then(|_| fs::remove_file(&src_path)) + { tracing::debug!(from=?src_path, to=?dest_path, "Successfully copied and removed thumbnail file"); } } else { @@ -878,28 +1072,39 @@ fn run_thumbnail_script(thumb_dir: PathBuf, thumb_size: (u32,u32), thumb_sheet_d Some(_) => CmprOutput::ThumbsFailure { logs }, None => { // Check if any thumbnail files were actually created - let has_thumbnails = thumb_dir.exists() && - fs::read_dir(&thumb_dir) - .map(|entries| entries.filter_map(|e| e.ok()).any(|entry| { - let path = entry.path(); - let is_thumb = path.is_file() && path.file_name() - .and_then(|name| name.to_str()) - .map(|s| { - // Only consider actual thumbnail files, not log files - s.ends_with(".webp") || - (s.starts_with("thumb") && s.ends_with(".webp")) || - (s.starts_with("sheet-") && s.ends_with(".webp")) - }) - .unwrap_or(false); - is_thumb - })) + let has_thumbnails = thumb_dir.exists() + && fs::read_dir(&thumb_dir) + .map(|entries| { + entries.filter_map(|e| e.ok()).any(|entry| { + let path = entry.path(); + let is_thumb = path.is_file() + && path + .file_name() + .and_then(|name| name.to_str()) + .map(|s| { + // Only consider actual thumbnail files, not log files + s.ends_with(".webp") + || (s.starts_with("thumb") && s.ends_with(".webp")) + || (s.starts_with("sheet-") && s.ends_with(".webp")) + }) + .unwrap_or(false); + is_thumb + }) + }) .unwrap_or(false); - CmprOutput::ThumbsSuccess { - thumb_dir: if has_thumbnails { Some(thumb_dir) } else { None }, - thumb_sheet_dims: if has_thumbnails { Some(thumb_sheet_dims) } else { None }, - logs + thumb_dir: if has_thumbnails { + Some(thumb_dir) + } else { + None + }, + thumb_sheet_dims: if has_thumbnails { + Some(thumb_sheet_dims) + } else { + None + }, + logs, } } } @@ -912,8 +1117,8 @@ pub fn run_forever( progress: ProgressSender, n_workers: usize, transcode_script: String, - thumbnail_script: String) -{ + thumbnail_script: String, +) { let _span = tracing::info_span!("SCRIPT_PROCESSOR").entered(); tracing::debug!(n_workers = n_workers, "Starting script processor."); @@ -926,12 +1131,12 @@ pub fn run_forever( tracing::info!(id=%src.media_file_id, r#type=?src.media_type, user=%src.user_id, file=%(src.path.file_name().unwrap_or_default().to_string_lossy()), "Media file transcode request (script)."); - }, + } CmprInput::Thumbs { src, .. } => { tracing::info!(id=%src.media_file_id, r#type=?src.media_type, user=%src.user_id, file=%(src.path.file_name().unwrap_or_default().to_string_lossy()), "Media file thumbnail request (script)."); - }, + } } tracing::debug!(details=?args, "Spawning script worker thread."); @@ -940,21 +1145,42 @@ pub fn run_forever( let transcode_script_path = transcode_script.clone(); let thumbnail_script_path = thumbnail_script.clone(); - pool.execute(move || { - match args { - CmprInput::Transcode { video_dst_dir, video_dst_prefix, video_bitrate, src } => { - if let Err(e) = outq.send(run_transcode_script(&src, video_dst_dir, video_dst_prefix, video_bitrate, prgr_sender, &transcode_script_path)) { - tracing::error!("Transcode result send failed! Aborting. -- {:?}", e); - } - }, - CmprInput::Thumbs { thumb_dir, thumb_sheet_dims, thumb_size, src } => { - if let Err(e) = outq.send(run_thumbnail_script(thumb_dir, thumb_size, thumb_sheet_dims, src, &thumbnail_script_path)) { - tracing::error!("Thumbnail result send failed! Aborting. -- {:?}", e); - } - }, + pool.execute(move || match args { + CmprInput::Transcode { + video_dst_dir, + video_dst_prefix, + video_bitrate, + src, + } => { + if let Err(e) = outq.send(run_transcode_script( + &src, + video_dst_dir, + video_dst_prefix, + video_bitrate, + prgr_sender, + &transcode_script_path, + )) { + tracing::error!("Transcode result send failed! Aborting. -- {:?}", e); + } + } + CmprInput::Thumbs { + thumb_dir, + thumb_sheet_dims, + thumb_size, + src, + } => { + if let Err(e) = outq.send(run_thumbnail_script( + thumb_dir, + thumb_size, + thumb_sheet_dims, + src, + &thumbnail_script_path, + )) { + tracing::error!("Thumbnail result send failed! Aborting. -- {:?}", e); + } } }); - }, + } Err(e) => { tracing::info!(details=%e, "Input queue closed."); break; @@ -963,4 +1189,4 @@ pub fn run_forever( } tracing::debug!("Exiting script processor."); -} \ No newline at end of file +} From aed6077bb66f7f1383cdfc5b56c61a4497c845da Mon Sep 17 00:00:00 2001 From: Mike Solar Date: Sun, 30 Nov 2025 17:01:04 +0800 Subject: [PATCH 04/10] File will appear automatically. --- .../lib/asset_browser/VideoTile.test.ts | 15 ++- client/src/lib/asset_browser/VideoTile.svelte | 5 +- server/src/video_pipeline/mod.rs | 100 +++++++++++++++++- 3 files changed, 117 insertions(+), 3 deletions(-) diff --git a/client/src/__tests__/lib/asset_browser/VideoTile.test.ts b/client/src/__tests__/lib/asset_browser/VideoTile.test.ts index f118173b..06776389 100644 --- a/client/src/__tests__/lib/asset_browser/VideoTile.test.ts +++ b/client/src/__tests__/lib/asset_browser/VideoTile.test.ts @@ -102,6 +102,19 @@ describe('VideoTile', () => { expect(container.querySelector('.flex-grow')).toBeInTheDocument(); }); + it('shows a default icon when no preview or visualization exists', () => { + const mediaFile = createMediaFile({ + id: 'no-preview', + title: 'No Preview Video', + previewData: undefined, + }); + + const { container } = render(VideoTile, { item: mediaFile }); + + const icon = container.querySelector('i.fa-video'); + expect(icon).toBeInTheDocument(); + }); + it('should render with visualization override when no preview data', () => { const mediaFile = createMediaFile({ id: 'test-video-3', @@ -354,4 +367,4 @@ describe('VideoTile', () => { expect(titleElement).toBeInTheDocument(); expect(titleElement.textContent).toBe(mediaFile.title); }); -}); \ No newline at end of file +}); diff --git a/client/src/lib/asset_browser/VideoTile.svelte b/client/src/lib/asset_browser/VideoTile.svelte index 9f204a84..6751e7d4 100644 --- a/client/src/lib/asset_browser/VideoTile.svelte +++ b/client/src/lib/asset_browser/VideoTile.svelte @@ -67,6 +67,10 @@ function fmt_date(d: Date | undefined) {

+ {:else} +
+ +
{/if} @@ -119,4 +123,3 @@ function fmt_date(d: Date | undefined) { } - diff --git a/server/src/video_pipeline/mod.rs b/server/src/video_pipeline/mod.rs index 436ab46c..98f3f913 100644 --- a/server/src/video_pipeline/mod.rs +++ b/server/src/video_pipeline/mod.rs @@ -4,7 +4,7 @@ #![allow(unused_parens)] -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::io::Read; use std::path::{Path, PathBuf}; use std::str::FromStr; @@ -128,6 +128,74 @@ fn upload_to_storage_with_progress( storage.upload_with_progress(abs_path, Some(callback)) } +fn cleanup_local_media_dir(videos_dir: &Path, media_id: &str) -> anyhow::Result<()> { + let media_dir = videos_dir.join(media_id); + if !media_dir.exists() { + return Ok(()); + } + + // Remove main video files/symlink in the media root + let video_exts = ["mp4", "mkv", "webm", "mov", "avi"]; + if let Ok(entries) = std::fs::read_dir(&media_dir) { + for entry in entries.flatten() { + let path = entry.path(); + if path.is_file() { + let remove = path + .extension() + .and_then(|e| e.to_str()) + .map(|ext| video_exts.contains(&ext.to_ascii_lowercase().as_str())) + .unwrap_or(false) + || path.file_name().and_then(|n| n.to_str()) == Some("video.mp4"); + + if remove { + if let Err(e) = std::fs::remove_file(&path) { + tracing::warn!(details=%e, file=?path, "Failed to remove local media file after upload"); + } + } + } + } + } + + for sub in ["orig", "thumbs"] { + let dir = media_dir.join(sub); + if dir.exists() { + if let Err(e) = std::fs::remove_dir_all(&dir) { + tracing::warn!(details=%e, dir=?dir, "Failed to remove local media directory after upload"); + } + } + } + + Ok(()) +} + +fn maybe_cleanup_local_media( + storage: &StorageBackend, + videos_dir: &Path, + media_id: &str, + transcode_pending: bool, + db: &DB, +) { + if !storage.needs_remote_upload() { + return; + } + + let ready = db + .conn() + .and_then(|mut conn| models::MediaFile::get(&mut conn, &media_id.to_string())) + .map(|mf| { + let thumbs_done = mf.thumbs_done.is_some(); + let transcode_done = mf.recompression_done.is_some() || !transcode_pending; + thumbs_done && transcode_done + }) + .unwrap_or(false); + + if ready { + if let Err(e) = cleanup_local_media_dir(videos_dir, media_id) { + tracing::warn!(details=%e, media_file_id=%media_id, "Failed to clean up local media after upload"); + } + } +} + /// Calculate hash identifier (media_file_id) for the submitted files, /// based on filename, user_id, size and sample of the file contents. fn calc_media_file_id( @@ -539,6 +607,8 @@ pub fn run_forever( ); }); + let mut transcode_pending: HashSet = HashSet::new(); + // Migration from older version: find a media file that is missing thumbnail sheet fn legacy_thumbnail_next_media_file( db: &DB, @@ -657,6 +727,13 @@ pub fn run_forever( } MetadataResult::Err(e) => (None, Err(e)) }; + if let (Some(vid), Ok(do_transcode)) = (&vid, &ing_res) { + if *do_transcode { + transcode_pending.insert(vid.clone()); + } else { + transcode_pending.remove(vid); + } + } // Relay errors, if any. // No need to send ok message here, variations of it are sent from ingest_media_file(). if let Err(e) = ing_res { @@ -730,6 +807,11 @@ pub fn run_forever( let videos_dir = media_files_dir.clone(); let vid = logs.media_file_id.clone(); let storage = storage.clone(); + let cleanup_vid = vid.clone(); + let cleanup_storage = storage.clone(); + let cleanup_videos_dir = videos_dir.clone(); + let cleanup_db = db.clone(); + transcode_pending.remove(&vid); tracing::info!(media_file=%vid, log_info=%logs.stdout, "Transcoding completed"); @@ -798,6 +880,14 @@ pub fn run_forever( subtitle_id: None, progress: Some(1.0) }).unwrap_or_else(|e| { tracing::error!(details=%e, "Error sending user message"); }); + + maybe_cleanup_local_media( + &cleanup_storage, + &cleanup_videos_dir, + &cleanup_vid, + transcode_pending.contains(&cleanup_vid), + &cleanup_db, + ); }, ThumbsSuccess { thumb_dir, thumb_sheet_dims, logs } => @@ -856,6 +946,14 @@ pub fn run_forever( tracing::error!(details=%e, "Error storing thumbs_done in DB"); } } + + maybe_cleanup_local_media( + &storage, + &videos_dir, + &vid, + transcode_pending.contains(&vid), + &db, + ); }, TranscodeFailure { logs, .. } | From 09f95315b74474d82280c27c54dc93eb8bc4d251 Mon Sep 17 00:00:00 2001 From: elonen Date: Sun, 30 Nov 2025 20:17:07 +0200 Subject: [PATCH 05/10] Separate PR #108 formatting from semantic changes for easier review This removes rustfmt formatting noise that was mixed with the S3 feature changes, making the PR diff focus on actual code changes. Changes: - Revert formatting-only changes - Restore Cargo.lock to version control (for reproducible builds) - Pin AWS SDK versions compatible with Rust 1.87 The S3 feature functionality is unchanged - this is purely a diff cleanup to make code review easier (~700 lines vs ~12,000). --- .gitignore | 2 +- server/src/api_server/file_upload.rs | 175 ++-- server/src/api_server/mod.rs | 420 +++------- server/src/api_server/server_state.rs | 172 +--- server/src/api_server/test_utils.rs | 135 +-- server/src/api_server/tests.rs | 115 ++- server/src/api_server/user_session.rs | 158 ++-- server/src/api_server/ws_handers.rs | 769 +++++------------- server/src/database/basic_query.rs | 66 +- server/src/database/custom_ops.rs | 99 +-- server/src/database/db_backup.rs | 55 +- server/src/database/error.rs | 2 +- server/src/database/migration_solver.rs | 362 +++------ server/src/database/mod.rs | 212 ++--- server/src/database/models.rs | 58 +- server/src/database/schema.rs | 1 + server/src/database/tests.rs | 321 ++------ server/src/grpc/caller.rs | 97 +-- server/src/grpc/db_models.rs | 376 +++------ server/src/grpc/grpc_client.rs | 96 +-- server/src/grpc/grpc_impl_helpers.rs | 28 +- server/src/grpc/grpc_server.rs | 371 +++------ server/src/grpc/mod.rs | 83 +- server/src/lib.rs | 323 +++----- server/src/log.rs | 111 +-- server/src/tests/integration_test.rs | 389 ++++----- server/src/video_pipeline/cleanup_rejected.rs | 24 +- server/src/video_pipeline/incoming_monitor.rs | 127 +-- server/src/video_pipeline/metadata_reader.rs | 194 ++--- server/src/video_pipeline/script_processor.rs | 652 +++++---------- 30 files changed, 1858 insertions(+), 4135 deletions(-) diff --git a/.gitignore b/.gitignore index d4c9eb82..8b9de520 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,4 @@ dist_deb/ .DS_Store .claude .idea -server/Cargo.lock \ No newline at end of file +.local-tmp/ diff --git a/server/src/api_server/file_upload.rs b/server/src/api_server/file_upload.rs index 8dc1ea31..a70f5524 100644 --- a/server/src/api_server/file_upload.rs +++ b/server/src/api_server/file_upload.rs @@ -1,20 +1,21 @@ -use futures::stream::TryStreamExt; use futures_util::stream::StreamExt; +use warp::http::HeaderMap; +use futures::stream::TryStreamExt; use mpart_async::server::MultipartStream; use std::convert::Infallible; use std::path::{Path, PathBuf}; use std::sync::Arc; -use warp::http::HeaderMap; -use super::parse_auth_headers; -use super::server_state::ServerState; -use super::user_session::{org_authz_with_default, AuthzError, AuthzTopic}; use crate::video_pipeline::IncomingFile; use crate::video_pipeline::TranscodePreference; +use super::parse_auth_headers; +use super::server_state::ServerState; +use super::user_session::{org_authz_with_default, AuthzTopic, AuthzError}; use lib_clapshot_grpc::proto; use proto::org::authz_user_action_request as authz_req; + /// Warp filter for multipart/form-data file upload /// /// # Arguments @@ -30,66 +31,45 @@ pub async fn handle_multipart_upload( mime: mime::Mime, hdrs: HeaderMap, server: ServerState, - body: impl warp::Stream> + Unpin, -) -> Result, Infallible> { - let (user_id, user_name, is_admin, mut cookies, filtered_headers, remote_error) = - parse_auth_headers(&hdrs, &server.default_user, &server.org_http_headers_regex); + body: impl warp::Stream> + Unpin) + -> Result, Infallible> +{ + let (user_id, user_name, is_admin, mut cookies, filtered_headers, remote_error) = parse_auth_headers(&hdrs, &server.default_user, &server.org_http_headers_regex); // If X-Remote-Error is set, return error response if let Some(error_msg) = remote_error { return Ok(warp::reply::with_status( format!("Authentication Error: {}", error_msg), - warp::http::StatusCode::FORBIDDEN, + warp::http::StatusCode::FORBIDDEN )); } // Check from organizer if user is allowed to upload. // Allow by default if organizer is not configured or doesn't care. if let Some(uri) = &server.organizer_uri { - if server - .organizer_has_connected - .load(std::sync::atomic::Ordering::Relaxed) - { + if server.organizer_has_connected.load(std::sync::atomic::Ordering::Relaxed) { let organizer = match crate::grpc::grpc_client::connect(uri.clone()).await { Ok(c) => Arc::new(tokio::sync::Mutex::new(c)), Err(e) => { tracing::error!("Failed to connect to organizer: {}", e); - return Ok(warp::reply::with_status( - "Internal error: failed to connect to organizer".into(), - warp::http::StatusCode::INTERNAL_SERVER_ERROR, - )); + return Ok(warp::reply::with_status("Internal error: failed to connect to organizer".into(), warp::http::StatusCode::INTERNAL_SERVER_ERROR)); } }; let org_session = proto::org::UserSessionData { sid: "".to_string(), - user: Some(proto::UserInfo { - id: user_id.clone(), - name: user_name.clone(), - }), + user: Some(proto::UserInfo { id: user_id.clone(), name: user_name.clone() }), is_admin, cookies: cookies.clone(), http_headers: filtered_headers, }; - match org_authz_with_default( - &org_session, - "upload media file", - true, - &server, - &Some(organizer), - true, - AuthzTopic::Other(None, authz_req::other_op::Op::UploadMediaFile), - ) - .await - { - Ok(_) => {} + match org_authz_with_default(&org_session, "upload media file", true, &server, &Some(organizer), + true, AuthzTopic::Other(None, authz_req::other_op::Op::UploadMediaFile)).await { + Ok(_) => {}, Err(AuthzError::Denied) => { - return Ok(warp::reply::with_status( - "Permission denied".into(), - warp::http::StatusCode::FORBIDDEN, - )); - } + return Ok(warp::reply::with_status("Permission denied".into(), warp::http::StatusCode::FORBIDDEN)); + }, } } } @@ -114,17 +94,9 @@ pub async fn handle_multipart_upload( let boundary = mime.get_param("boundary").map(|v| v.to_string()); let boundary = match boundary { Some(b) => b, - None => { - return Ok(warp::reply::with_status( - "Missing boundary".into(), - warp::http::StatusCode::BAD_REQUEST, - )) - } + None => return Ok(warp::reply::with_status("Missing boundary".into(), warp::http::StatusCode::BAD_REQUEST)), }; - let mut stream = MultipartStream::new( - boundary, - body.map_ok(|mut buf| buf.copy_to_bytes(buf.remaining())), - ); + let mut stream = MultipartStream::new(boundary, body.map_ok(|mut buf| buf.copy_to_bytes(buf.remaining()))); let mut uploaded_file: PathBuf = PathBuf::new(); while let Ok(Some(mut field)) = stream.try_next().await { @@ -134,79 +106,55 @@ pub async fn handle_multipart_upload( Err(e) => { let msg = format!("Error getting filename: {}", e); tracing::error!(msg); - return Ok(warp::reply::with_status( - msg, - warp::http::StatusCode::BAD_REQUEST, - )); - } - Ok(filename) => { + return Ok(warp::reply::with_status(msg, warp::http::StatusCode::BAD_REQUEST)); + }, + Ok(filename) => + { let path = Path::new(&filename); if path.file_name() != Some(path.as_os_str()) { - return Ok(warp::reply::with_status( - "Filename must not contain path".into(), - warp::http::StatusCode::BAD_REQUEST, - )); + return Ok(warp::reply::with_status("Filename must not contain path".into(), warp::http::StatusCode::BAD_REQUEST)); } // Make a unique upload dir let uuid = uuid::Uuid::new_v4(); - let new_dir = - async_std::path::PathBuf::from(&upload_dir).join(uuid.to_string()); - let dst = new_dir.join(path.file_name().unwrap()); + let new_dir = async_std::path::PathBuf::from(&upload_dir).join(uuid.to_string()); + let dst = new_dir.join(path.file_name().unwrap()); if dst.exists().await { tracing::error!("Upload dst '{}' already exists, even tough it was prefixed with uuid4. Bug??", dst.display()); - return Ok(warp::reply::with_status( - "Internal error: file already exists".into(), - warp::http::StatusCode::INTERNAL_SERVER_ERROR, - )); + return Ok(warp::reply::with_status("Internal error: file already exists".into(), warp::http::StatusCode::INTERNAL_SERVER_ERROR)); } if let Err(e) = async_std::fs::create_dir_all(&new_dir).await { tracing::error!("Failed to create upload dir: {}", e); - return Ok(warp::reply::with_status( - "Internal error: failed to create upload dir".into(), - warp::http::StatusCode::INTERNAL_SERVER_ERROR, - )); + return Ok(warp::reply::with_status("Internal error: failed to create upload dir".into(), warp::http::StatusCode::INTERNAL_SERVER_ERROR)); } // Create the file and stream the data into it match async_std::fs::File::create(&dst).await { Err(e) => { - let msg = - format!("Failed to create file '{}': {}", dst.display(), e); + let msg = format!("Failed to create file '{}': {}", dst.display(), e); tracing::error!(msg); - return Ok(warp::reply::with_status( - msg, - warp::http::StatusCode::INTERNAL_SERVER_ERROR, - )); - } - Ok(mut f) => { + return Ok(warp::reply::with_status(msg, warp::http::StatusCode::INTERNAL_SERVER_ERROR)); + }, + Ok(mut f) => + { // Read and write in parallel - let (buff_tx, mut buff_rx) = - tokio::sync::mpsc::channel::(16); + let (buff_tx, mut buff_rx) = tokio::sync::mpsc::channel::(16); // Read chunks from HTTP let read_all_chunks = async move { while let Some(chunk) = field.next().await { match chunk { - Ok(data) => { - buff_tx.send(data).await.unwrap(); - } - Err(e) => { - return Err(e.to_string()); - } - } - } - Ok(()) // buff_tx dropped + Ok(data) => { buff_tx.send(data).await.unwrap(); }, + Err(e) => { return Err(e.to_string()); } + }}; Ok(()) // buff_tx dropped }; // Write chunks to the file let write_all_chunks = async move { while let Some(data) = buff_rx.recv().await { - futures_util::AsyncWriteExt::write_all(&mut f, &data) - .await + futures_util::AsyncWriteExt::write_all(&mut f, &data).await .map_err(|e| e.to_string())?; - } - Ok(()) + }; Ok(()) }; // Run both tasks in parallel, cleanup on error @@ -215,49 +163,28 @@ pub async fn handle_multipart_upload( tracing::error!("Upload failed: {}", e); // Remove the file & dir, since it's incomplete if let Err(e) = async_std::fs::remove_file(&dst).await { - tracing::warn!( - "Failed to remove incomplete upload file: {}", - e - ); - } else if let Err(e) = async_std::fs::remove_dir(new_dir).await - { - tracing::warn!( - "Failed to remove incomplete upload dir: {}", - e - ); + tracing::warn!("Failed to remove incomplete upload file: {}", e); + } else if let Err(e) = async_std::fs::remove_dir(new_dir).await { + tracing::warn!("Failed to remove incomplete upload dir: {}", e); } - return Ok(warp::reply::with_status( - format!("Upload failed: {e}"), - warp::http::StatusCode::BAD_REQUEST, - )); + return Ok(warp::reply::with_status(format!("Upload failed: {e}"), warp::http::StatusCode::BAD_REQUEST)); } - tracing::info!(dst = dst.display().to_string(), "File uploaded."); + tracing::info!(dst=dst.display().to_string(), "File uploaded."); uploaded_file = dst.into(); } }; } } - } + }, fieldname => { tracing::info!("Skipping UNKNOWN multipart POST field '{fieldname}'"); - } + }, } } - if let Err(e) = upload_done.send(IncomingFile { - file_path: uploaded_file, - user_id, - cookies, - transcode_preference, - }) { + if let Err(e) = upload_done.send(IncomingFile{ file_path: uploaded_file, user_id, cookies, transcode_preference }) { tracing::error!("Failed to send upload ok signal: {:?}", e); - return Ok(warp::reply::with_status( - "Internal error: failed to send upload ok signal".into(), - warp::http::StatusCode::INTERNAL_SERVER_ERROR, - )); + return Ok(warp::reply::with_status("Internal error: failed to send upload ok signal".into(), warp::http::StatusCode::INTERNAL_SERVER_ERROR)); } - Ok(warp::reply::with_status( - "Ok".into(), - warp::http::StatusCode::OK, - )) + Ok(warp::reply::with_status("Ok".into(), warp::http::StatusCode::OK)) } diff --git a/server/src/api_server/mod.rs b/server/src/api_server/mod.rs index 486e1675..cd66ef5b 100644 --- a/server/src/api_server/mod.rs +++ b/server/src/api_server/mod.rs @@ -3,23 +3,23 @@ //#![allow(unused_imports)] use async_std::task::block_on; -use core::panic; -use futures_util::stream::StreamExt; -use futures_util::SinkExt; +use lib_clapshot_grpc::GrpcBindAddr; use lib_clapshot_grpc::proto; use lib_clapshot_grpc::proto::org::OnStartUserSessionResponse; -use lib_clapshot_grpc::GrpcBindAddr; -use parking_lot::RwLock; -use regex::Regex; +use tracing::debug; +use warp::Filter; +use core::panic; use std::collections::HashMap; -use std::sync::atomic::Ordering::Relaxed; use std::sync::Arc; use std::time::Duration; use tokio::time::sleep; -use tracing::debug; -use warp::http::HeaderMap; +use parking_lot::RwLock; +use futures_util::stream::StreamExt; +use futures_util::SinkExt; use warp::ws::Message; -use warp::Filter; +use warp::http::HeaderMap; +use regex::Regex; +use std::sync::atomic::Ordering::Relaxed; use anyhow::{anyhow, bail}; @@ -35,23 +35,23 @@ use ws_handers::msg_dispatch; #[cfg(test)] pub mod test_utils; -mod file_upload; #[cfg(test)] pub mod tests; -use self::user_session::UserSession; -use crate::api_server::user_session::org_authz; +mod file_upload; +use file_upload::handle_multipart_upload; use crate::api_server::user_session::AuthzTopic; -use crate::api_server::ws_handers::SessionClose; +use crate::api_server::user_session::org_authz; use crate::client_cmd; -use crate::database::models; use crate::database::DbBasicQuery; +use crate::database::models; use crate::grpc::db_models::proto_msg_type_to_event_name; use crate::grpc::grpc_client::OrganizerConnection; use crate::grpc::grpc_client::OrganizerURI; use crate::grpc::{grpc_server, make_media_file_popup_actions}; +use crate::api_server::ws_handers::SessionClose; use crate::video_pipeline::IncomingFile; use crate::PKG_VERSION; -use file_upload::handle_multipart_upload; +use self::user_session::UserSession; type Res = anyhow::Result; type WsMsgSender = tokio::sync::mpsc::UnboundedSender; @@ -71,7 +71,7 @@ pub enum SendTo<'a> { pub type UserMessageTopic = proto::user_message::Type; /// Message from other server modules to user(s) -#[derive(Clone, Debug, Default)] +#[derive (Clone, Debug, Default)] pub struct UserMessage { pub topic: UserMessageTopic, pub user_id: Option, @@ -92,24 +92,23 @@ fn abbrv(msg: &str) -> String { } } + /// User has connected to our WebSocket endpoint. /// This function will run (potentially forever) for each individual user that connects. async fn handle_ws_session( - ws: warp::ws::WebSocket, - sid: String, - user_id: String, - username: String, - is_admin: bool, - cookies: HashMap, - filtered_headers: HashMap, - server: ServerState, -) { + ws: warp::ws::WebSocket, + sid: String, + user_id: String, + username: String, + is_admin: bool, + cookies: HashMap, + filtered_headers: HashMap, + server: ServerState) +{ let (msgq_tx, mut msgq_rx) = tokio::sync::mpsc::unbounded_channel(); - let user = match server - .db - .conn() - .and_then(|mut conn| models::User::get_or_create(&mut conn, &user_id, Some(&username))) + let user = match server.db.conn().and_then(|mut conn| + models::User::get_or_create(&mut conn, &user_id, Some(&username))) { Ok(u) => u, Err(e) => { @@ -138,7 +137,7 @@ async fn handle_ws_session( is_admin, cookies, http_headers: filtered_headers, - }, + } }; let _user_session_guard = Some(server.register_user_session(&sid, &user_id, ses.clone())); @@ -151,31 +150,24 @@ async fn handle_ws_session( is_admin: is_admin, server_version: PKG_VERSION.to_string(), }), - SendTo::MsgSender(&ses.sender), + SendTo::MsgSender(&ses.sender) ) { tracing::error!(details=%e, "Error sending welcome message. Closing session."); return; } - async fn connect_organizer( - uri: OrganizerURI, - ses: &proto::org::UserSessionData, - ) -> Res<(OrganizerConnection, OnStartUserSessionResponse)> { + async fn connect_organizer(uri: OrganizerURI, ses: &proto::org::UserSessionData) -> Res<(OrganizerConnection, OnStartUserSessionResponse)> { let mut c = crate::grpc::grpc_client::connect(uri).await?; - let start_ses_req = proto::org::OnStartUserSessionRequest { - ses: Some(ses.clone()), - }; + let start_ses_req = proto::org::OnStartUserSessionRequest { ses: Some(ses.clone()) }; let res = match c.on_start_user_session(start_ses_req).await { Ok(res) => res.into_inner(), Err(e) => { if e.code() == tonic::Code::Unimplemented { - tracing::debug!( - "Organizer does not implement on_start_user_session. Ignoring.", - ); + tracing::debug!("Organizer does not implement on_start_user_session. Ignoring.",); OnStartUserSessionResponse::default() } else { - return Err(e.into()); + return Err(e.into()) } } }; @@ -184,9 +176,8 @@ async fn handle_ws_session( // Define default actions. Organizer may call DefineActions later to override these. if let Err(e) = server.emit_cmd( - client_cmd!(DefineActions, {actions: make_media_file_popup_actions()}), - SendTo::MsgSender(&ses.sender), - ) { + client_cmd!(DefineActions, {actions: make_media_file_popup_actions()}), + SendTo::MsgSender(&ses.sender)) { tracing::error!(details=%e, "Error sending define_actions to client. Closing session."); return; } @@ -196,42 +187,30 @@ async fn handle_ws_session( match connect_organizer(uri, &ses.org_session).await { Ok((c, _res)) => { ses.organizer = Some(tokio::sync::Mutex::new(c).into()); - let op = AuthzTopic::Other( - None, - proto::org::authz_user_action_request::other_op::Op::Login, - ); - if org_authz(&ses.org_session, "login", true, &server, &ses.organizer, op).await - == Some(false) - { - tracing::info!( - "User '{}' not authorized to login. Closing session.", - ses.user_id - ); - server - .emit_cmd( - client_cmd!(Error, {msg: "Login permission denied.".into()}), - SendTo::MsgSender(&ses.sender), - ) - .ok(); + let op = AuthzTopic::Other(None, proto::org::authz_user_action_request::other_op::Op::Login); + if org_authz(&ses.org_session, "login", true, &server, &ses.organizer, op).await == Some(false) { + tracing::info!("User '{}' not authorized to login. Closing session.", ses.user_id); + server.emit_cmd( + client_cmd!(Error, {msg: "Login permission denied.".into()}), + SendTo::MsgSender(&ses.sender)).ok(); return; } - } + }, Err(e) => { const MSG: &str = "Error connecting to Organizer. Closing session."; tracing::error!(details=%e, MSG); - server - .emit_cmd( - client_cmd!(Error, {msg: MSG.into()}), - SendTo::MsgSender(&ses.sender), - ) - .ok(); + server.emit_cmd( + client_cmd!(Error, {msg: MSG.into()}), + SendTo::MsgSender(&ses.sender)).ok(); return; } } }; - loop { - tokio::select! { + loop + { + tokio::select! + { // Termination flag set? Exit. _ = sleep(Duration::from_millis(100)) => { if server.terminate_flag.load(Relaxed) { @@ -360,13 +339,8 @@ async fn handle_ws_session( /// * `Ok(Regex)` - Compiled regex with case-insensitive matching /// * `Err(anyhow::Error)` - If pattern is invalid pub fn validate_org_http_headers_regex(pattern: &str) -> anyhow::Result { - Regex::new(&format!("(?i){}", pattern)).map_err(|e| { - anyhow!( - "Invalid org-http-headers regex pattern '{}': {}", - pattern, - e - ) - }) + Regex::new(&format!("(?i){}", pattern)) + .map_err(|e| anyhow!("Invalid org-http-headers regex pattern '{}': {}", pattern, e)) } /// Extract user id, name and clapshot_cookies from HTTP headers (set by nginx) @@ -382,102 +356,44 @@ pub fn validate_org_http_headers_regex(pattern: &str) -> anyhow::Result { /// * `filter_regex` - Regex to filter headers for organizer (case-insensitive) /// /// * Returns: (user_id: String, user_name: String, is_admin: bool, clapshot_cookies: HashMap, filtered_headers: HashMap, remote_error: Option) -fn parse_auth_headers( - hdrs: &HeaderMap, - default_user_id: &str, - filter_regex: &Regex, -) -> ( - String, - String, - bool, - HashMap, - HashMap, - Option, -) { +fn parse_auth_headers(hdrs: &HeaderMap, default_user_id: &str, filter_regex: &Regex) -> (String, String, bool, HashMap, HashMap, Option) +{ fn try_get_first_named_hdr(hdrs: &HeaderMap, names: T) -> Option - where - T: IntoIterator, - { + where T: IntoIterator { for n in names { if let Some(val) = hdrs.get(n).or(hdrs.get(n.to_lowercase())) { match val.to_str() { Ok(s) => return Some(s.into()), Err(e) => tracing::warn!(details=%e, "Error parsing header '{}'.", n), - } - } - } + }}} None } - let user_id = match try_get_first_named_hdr( - &hdrs, - vec![ - "X-Remote-User-Id", - "X_Remote_User_Id", - "HTTP_X_REMOTE_USER_ID", - ], - ) { + let user_id = match try_get_first_named_hdr(&hdrs, vec!["X-Remote-User-Id", "X_Remote_User_Id", "HTTP_X_REMOTE_USER_ID"]) { Some(id) => id, None => { - tracing::warn!( - "Missing X-Remote-User-Id in HTTP headers. Using '{}' instead.", - default_user_id - ); + tracing::warn!("Missing X-Remote-User-Id in HTTP headers. Using '{}' instead.", default_user_id); default_user_id.into() - } - }; - let user_name = try_get_first_named_hdr( - &hdrs, - vec![ - "X-Remote-User-Name", - "X_Remote_User_Name", - "HTTP_X_REMOTE_USER_NAME", - ], - ) - .unwrap_or_else(|| user_id.clone()); - - let cookies_str = try_get_first_named_hdr( - &hdrs, - vec![ - "X-Clapshot-Cookies", - "X_Clapshot_Cookies", - "HTTP_X_CLAPSHOT_COOKIES", - ], - ) - .unwrap_or_else(|| "{}".into()); - - let is_admin: bool = try_get_first_named_hdr( - &hdrs, - vec![ - "X-Remote-User-Is-Admin", - "X_Remote_User_Is_Admin", - "HTTP_X_REMOTE_USER_IS_ADMIN", - ], - ) - .map(|s| s.to_lowercase() == "true" || s == "1") - .unwrap_or(user_id == "admin"); - - let remote_error = try_get_first_named_hdr( - &hdrs, - vec!["X-Remote-Error", "X_Remote_Error", "HTTP_X_REMOTE_ERROR"], - ); + }}; + let user_name = try_get_first_named_hdr(&hdrs, vec!["X-Remote-User-Name", "X_Remote_User_Name", "HTTP_X_REMOTE_USER_NAME"]) + .unwrap_or_else(|| user_id.clone()); + + let cookies_str = try_get_first_named_hdr(&hdrs, vec!["X-Clapshot-Cookies", "X_Clapshot_Cookies", "HTTP_X_CLAPSHOT_COOKIES"]) + .unwrap_or_else(|| "{}".into()); + + let is_admin: bool = try_get_first_named_hdr(&hdrs, vec!["X-Remote-User-Is-Admin", "X_Remote_User_Is_Admin", "HTTP_X_REMOTE_USER_IS_ADMIN"]) + .map(|s| s.to_lowercase() == "true" || s == "1").unwrap_or(user_id == "admin"); + + let remote_error = try_get_first_named_hdr(&hdrs, vec!["X-Remote-Error", "X_Remote_Error", "HTTP_X_REMOTE_ERROR"]); let app_cookies = match cookies_str.parse::() { - Ok(c) => match c.as_object() { - Some(c) => c - .iter() - .map(|(k, v)| { - ( - k.clone(), - v.as_str() - .unwrap_or("") - .to_string(), - ) - }) - .collect(), - None => { - tracing::error!("'clapshot_cookies' was not a JSON dict, ignoring."); - HashMap::new() + Ok(c) => { + match c.as_object() { + Some(c) => c.iter().map(|(k, v)| (k.clone(), v.as_str().unwrap_or("").to_string())).collect(), + None => { + tracing::error!("'clapshot_cookies' was not a JSON dict, ignoring."); + HashMap::new() + } } }, Err(e) => { @@ -499,14 +415,7 @@ fn parse_auth_headers( } } - ( - user_id, - user_name, - is_admin, - app_cookies, - filtered_headers, - remote_error, - ) + (user_id, user_name, is_admin, app_cookies, filtered_headers, remote_error) } /// Handle HTTP requests, read authentication headers and dispatch to WebSocket handler. @@ -517,8 +426,8 @@ async fn run_api_server_async( user_msg_rx: crossbeam_channel::Receiver, upload_results_tx: crossbeam_channel::Sender, grpc_server_bind: Option, - port: u16, -) { + port: u16) +{ let session_counter = Arc::new(RwLock::new(0u64)); let server_state_cln1 = server_state.clone(); let server_state_cln2 = server_state.clone(); @@ -535,10 +444,9 @@ async fn run_api_server_async( tracing::info!("Starting gRPC server for org->srv."); let server = server_state.clone(); let b = bind.clone(); - let hdl = - tokio::spawn( - async move { grpc_server::run_org_to_srv_grpc_server(b, server).await }, - ); + let hdl = tokio::spawn(async move { + grpc_server::run_org_to_srv_grpc_server(b, server).await + }); let server = server_state.clone(); let mut wait_time = Duration::from_millis(10); sleep(wait_time).await; @@ -548,37 +456,28 @@ async fn run_api_server_async( tracing::debug!("Waiting for org->srv connection..."); } wait_time = std::cmp::min(wait_time * 2, Duration::from_secs(4)); - if server.terminate_flag.load(Relaxed) { - return; - } + if server.terminate_flag.load(Relaxed) { return; } } if let Some(org_info) = server.organizer_info.lock().await.as_ref() { tracing::info!( org_name = &org_info.name, description = &org_info.description, - version = org_info - .version - .as_ref() - .map(|v| format!("{}.{}.{}", v.major, v.minor, v.patch)), - "org->srv connected, bidirectional gRPC established." - ); + version = org_info.version.as_ref().map(|v| format!("{}.{}.{}", v.major, v.minor, v.patch)), + "org->srv connected, bidirectional gRPC established."); } else { panic!("Organizer connected, but no info received. This is a bug in server code."); } Some(hdl) - } + }, None => None, }; - tracing::info!(port = port, "Starting websocket API."); + tracing::info!(port=port, "Starting websocket API."); - let rt_health = warp::path("api") - .and(warp::path("health")) - .map(|| "I'm alive!"); + let rt_health = warp::path("api").and(warp::path("health")).map(|| "I'm alive!"); let upload_dir = server_state.upload_dir.clone(); - let rt_upload = warp::path("api") - .and(warp::path("upload")) + let rt_upload = warp::path("api").and(warp::path("upload")) .and(warp::post()) .and(warp::any().map(move || upload_dir.clone())) .and(warp::any().map(move || upload_results_tx.clone())) @@ -588,21 +487,17 @@ async fn run_api_server_async( .and(warp::body::stream()) .and_then(handle_multipart_upload); - let rt_videos = warp::path("videos") - .and(warp::fs::dir(server_state_cln1.media_files_dir.clone()).with(warp::log("videos"))); + let rt_videos = warp::path("videos").and( + warp::fs::dir(server_state_cln1.media_files_dir.clone()) + .with(warp::log("videos"))); - let rt_api_ws = warp::path("api") - .and(warp::path("ws")) + let rt_api_ws = warp::path("api").and(warp::path("ws")) .and(warp::header::headers_cloned()) .and(warp::ws()) - .map(move |hdrs: HeaderMap, ws: warp::ws::Ws| { + .map (move|hdrs: HeaderMap, ws: warp::ws::Ws| { + // Get user ID and username (from reverse proxy) - let (user_id, user_name, is_admin, app_cookies, filtered_headers, remote_error) = - parse_auth_headers( - &hdrs, - &server_state.default_user, - &server_state.org_http_headers_regex, - ); + let (user_id, user_name, is_admin, app_cookies, filtered_headers, remote_error) = parse_auth_headers(&hdrs, &server_state.default_user, &server_state.org_http_headers_regex); // Increment session counter let sid = { @@ -617,10 +512,9 @@ async fn run_api_server_async( // Check for X-Remote-Error and send error message if present if let Some(error_msg) = remote_error { let err_msg = proto::client::server_to_client_cmd::Error { - msg: format!("Authentication Error: {}", error_msg), + msg: format!("Authentication Error: {}", error_msg) }; - let json_txt = - serde_json::to_string(&err_msg).expect("Error serializing error message"); + let json_txt = serde_json::to_string(&err_msg).expect("Error serializing error message"); if let Err(e) = ws.send(warp::ws::Message::text(json_txt)).await { tracing::error!("Failed to send error message: {}", e); } @@ -633,49 +527,25 @@ async fn run_api_server_async( // Diesel SQLite calls are blocking, so run a thread per user session // even though we're using async/await tokio::task::spawn_blocking(move || { - let _span = - tracing::info_span!("ws_session", sid=%sid, user=%user_id).entered(); - block_on(handle_ws_session( - ws, - sid, - user_id, - user_name, - is_admin, - app_cookies, - filtered_headers, - server_state, - )); - }) - .await - .unwrap_or_else(|e| { - tracing::error!(details=%e, "Error joining handle_ws_session thread."); - }); + let _span = tracing::info_span!("ws_session", sid=%sid, user=%user_id).entered(); + block_on(handle_ws_session(ws, sid, user_id, user_name, is_admin, app_cookies, filtered_headers, server_state)); + }).await.unwrap_or_else(|e| { + tracing::error!(details=%e, "Error joining handle_ws_session thread."); }); }) }); - let routes = rt_health - .or(rt_api_ws) - .or(rt_upload) - .or(rt_videos) + let routes = rt_health.or(rt_api_ws).or(rt_upload).or(rt_videos) .with(warp::log("api_server")); - let mut cors_origins: Vec<&str> = cors_origins - .iter() + + let mut cors_origins: Vec<&str> = cors_origins.iter() .map(|s| s.as_str()) .filter(|s| !s.is_empty()) .collect(); tracing::info!("Allowed CORS origins: {:?}", cors_origins); let cors_methods = ["GET", "POST", "HEAD", "OPTIONS"]; - let cors_headers = [ - "x-file-name", - "x-clapshot-cookies", - "x-clapshot-transcode", - "content-type", - "upgrade", - "sec-websocket-protocol", - "sec-websocket-version", - ]; + let cors_headers = ["x-file-name", "x-clapshot-cookies", "x-clapshot-transcode", "content-type", "upgrade", "sec-websocket-protocol", "sec-websocket-version"]; let routes = if cors_origins.contains(&"*") { tracing::warn!(concat!( @@ -684,32 +554,17 @@ async fn run_api_server_async( "Do NOT use '*' in production! ", "Instead, specify the allowed origin, such as 'https://clapshot.example.com'." )); - routes - .with( - warp::cors() - .allow_methods(cors_methods) - .allow_headers(cors_headers) - .allow_any_origin(), - ) - .boxed() + routes.with(warp::cors().allow_methods(cors_methods).allow_headers(cors_headers) + .allow_any_origin()).boxed() } else { if cors_origins.is_empty() { cors_origins.push(url_base.as_str()); - tracing::info!( - "No CORS origins specified. Using url_base for it: '{}'", - url_base - ); + tracing::info!("No CORS origins specified. Using url_base for it: '{}'", url_base); } else { tracing::info!("Using CORS origins: {:?}", cors_origins); } - routes - .with( - warp::cors() - .allow_methods(cors_methods) - .allow_headers(cors_headers) - .allow_origins(cors_origins), - ) - .boxed() + routes.with(warp::cors().allow_methods(cors_methods).allow_headers(cors_headers) + .allow_origins(cors_origins)).boxed() }; debug!("Binding Websocket API to {}:{}", bind_addr, port); @@ -730,13 +585,12 @@ async fn run_api_server_async( while let Ok(m) = user_msg_rx.try_recv() { let topic_str = proto_msg_type_to_event_name(m.topic); - let msg_insert = models::MessageInsert { + let msg_insert = models::MessageInsert { event_name: topic_str.into(), user_id: m.user_id.clone().unwrap_or("".into()), message: m.msg.clone(), details: m.details.clone().unwrap_or("".into()), - seen: false, - comment_id: None, + seen: false, comment_id: None, media_file_id: m.media_file_id.clone(), subtitle_id: m.subtitle_id.clone(), }; @@ -748,12 +602,9 @@ async fn run_api_server_async( if let Some(vid) = m.media_file_id { if let Err(_) = server_state.emit_cmd( client_cmd!(ShowMessages, { msgs: vec![proto_msg.clone()] }), - SendTo::MediaFileId(&vid), + SendTo::MediaFileId(&vid) ) { - tracing::error!( - media_file = vid, - "Failed to send notification to media file watchers." - ); + tracing::error!(media_file=vid, "Failed to send notification to media file watchers."); } }; @@ -763,19 +614,12 @@ async fn run_api_server_async( let mut user_was_online = false; match server_state.emit_cmd( client_cmd!(ShowMessages, { msgs: vec![proto_msg.clone()] }), - SendTo::UserId(&user_id), - ) { - Ok(session_cnt) => user_was_online = session_cnt > 0, - Err(e) => { - tracing::error!(user=user_id, details=%e, "Failed to send user notification.") - } + SendTo::UserId(&user_id)) + { + Ok(session_cnt) => { user_was_online = session_cnt>0 }, + Err(e) => tracing::error!(user=user_id, details=%e, "Failed to send user notification."), } - if !(matches!( - m.topic, - UserMessageTopic::Progress - | UserMessageTopic::MediaFileAdded - | UserMessageTopic::MediaFileUpdated - )) { + if !(matches!(m.topic, UserMessageTopic::Progress | UserMessageTopic::MediaFileAdded | UserMessageTopic::MediaFileUpdated)) { let msg = models::MessageInsert { seen: msg_insert.seen || user_was_online, ..msg_insert @@ -787,7 +631,7 @@ async fn run_api_server_async( } }; } - } + }; server_state.terminate_flag.store(true, Relaxed); }; @@ -801,9 +645,7 @@ async fn run_api_server_async( debug!("Waiting for gRPC server to exit..."); match tokio::try_join!(g) { Ok((Ok(_),)) => tracing::debug!("gRPC server for org->srv exited OK."), - Ok((Err(e),)) => { - tracing::error!(details=%e, "gRPC server for org->srv exited with error.") - } + Ok((Err(e),)) => tracing::error!(details=%e, "gRPC server for org->srv exited with error."), Err(e) => tracing::error!(details=%e, "gRPC server for org->srv panicked."), }; } @@ -811,6 +653,7 @@ async fn run_api_server_async( tracing::debug!("Exiting."); } + #[tokio::main] pub async fn run_forever( user_msg_rx: crossbeam_channel::Receiver, @@ -820,8 +663,8 @@ pub async fn run_forever( url_base: String, cors_origins: Vec, state: ServerState, - port: u16, -) { + port: u16) +{ assert!(!url_base.ends_with('/')); // Should have been stripped by caller let bind_addr = match bind_addr.parse::() { @@ -833,14 +676,5 @@ pub async fn run_forever( }; let _span = tracing::info_span!("API").entered(); - run_api_server_async( - bind_addr, - cors_origins, - state, - user_msg_rx, - upload_res_tx, - grpc_server_bind, - port, - ) - .await; + run_api_server_async(bind_addr, cors_origins, state, user_msg_rx, upload_res_tx, grpc_server_bind, port).await; } diff --git a/server/src/api_server/server_state.rs b/server/src/api_server/server_state.rs index 8d641ed3..313a758f 100644 --- a/server/src/api_server/server_state.rs +++ b/server/src/api_server/server_state.rs @@ -1,30 +1,26 @@ use crate::storage::StorageBackend; -use lib_clapshot_grpc::proto::org::OrganizerInfo; -use parking_lot::{ - MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLock, RwLockReadGuard, RwLockWriteGuard, -}; -use regex::Regex; use std::collections::HashMap; use std::path::{Path, PathBuf}; -use std::sync::atomic::AtomicBool; use std::sync::Arc; +use lib_clapshot_grpc::proto::org::OrganizerInfo; +use parking_lot::{RwLock, MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLockReadGuard, RwLockWriteGuard}; +use std::sync::atomic::AtomicBool; +use regex::Regex; -use anyhow::anyhow; use tokio::sync::Mutex; +use anyhow::anyhow; -use base64::{engine::general_purpose as Base64GP, Engine as _}; +use base64::{Engine as _, engine::general_purpose as Base64GP}; use super::user_session::OpaqueGuard; -use super::{ - Res, SendTo, SenderList, SenderListMap, SessionMap, StringToStringMap, UserSession, WsMsgSender, -}; +use super::{WsMsgSender, SenderList, SessionMap, SenderListMap, StringToStringMap, Res, UserSession, SendTo}; use crate::client_cmd; -use crate::database::{models, DbBasicQuery, DB}; +use crate::database::{DB, models, DbBasicQuery}; use crate::grpc::grpc_client::OrganizerURI; use lib_clapshot_grpc::proto; /// Lists of all active connections and other server state vars -#[derive(Clone)] +#[derive (Clone)] pub struct ServerState { pub grpc_srv_listening_flag: Arc, pub terminate_flag: Arc, @@ -45,10 +41,11 @@ pub struct ServerState { pub organizer_uri: Option, pub organizer_has_connected: Arc, - pub organizer_info: Arc>>, + pub organizer_info: Arc>> } impl ServerState { + pub fn new( db: Arc, media_files_dir: &Path, @@ -59,8 +56,8 @@ impl ServerState { grpc_srv_listening_flag: Arc, default_user: String, terminate_flag: Arc, - org_http_headers_regex: Regex, - ) -> ServerState { + org_http_headers_regex: Regex) -> ServerState + { ServerState { db, media_files_dir: media_files_dir.to_path_buf(), @@ -93,10 +90,7 @@ impl ServerState { } } - pub fn get_session_write<'a>( - &'a self, - sid: &str, - ) -> Option> { + pub fn get_session_write<'a>(&'a self, sid: &str) -> Option> { let lock = self.sid_to_session.write(); if lock.contains_key(sid) { Some(RwLockWriteGuard::map(lock, |map| map.get_mut(sid).unwrap())) @@ -107,8 +101,7 @@ impl ServerState { /// Register a new sender (API connection) for a user_id. One user can have multiple connections. /// Returns a guard that will remove the sender when dropped. pub fn register_user_session(&self, sid: &str, user_id: &str, ses: UserSession) -> OpaqueGuard { - let guard1 = - self.add_sender_to_maplist(user_id, ses.sender.clone(), &self.user_id_to_senders); + let guard1 = self.add_sender_to_maplist(user_id, ses.sender.clone(), &self.user_id_to_senders); let guard2 = self.add_session_to_sid_map(sid, ses); Arc::new(Mutex::new((guard1, guard2))) } @@ -125,36 +118,22 @@ impl ServerState { } /// Send a client command to websocket of given recipient(s) - pub fn emit_cmd( - &self, - cmd: proto::client::server_to_client_cmd::Cmd, - send_to: SendTo, - ) -> Res { + pub fn emit_cmd(&self, cmd: proto::client::server_to_client_cmd::Cmd, send_to: SendTo) -> Res + { let cmd = proto::client::ServerToClientCmd { cmd: Some(cmd) }; let msg = serde_json::to_value(cmd)?; let msg = warp::ws::Message::text(msg.to_string()); match send_to { - SendTo::UserSession(sid) => self.send_to_user_session(&sid, &msg), - SendTo::Collab(id) => self.send_to_all_collab_users(&Some(id.into()), &msg), - SendTo::UserId(user_id) => self.send_to_all_user_sessions(user_id, &msg), - SendTo::MediaFileId(media_file_id) => { - self.send_to_all_media_file_sessions(media_file_id, &msg) - } - SendTo::MsgSender(sender) => { - sender.send(msg)?; - Ok(1u32) - } + SendTo::UserSession(sid) => { self.send_to_user_session(&sid, &msg) }, + SendTo::Collab(id) => { self.send_to_all_collab_users(&Some(id.into()), &msg) }, + SendTo::UserId(user_id) => { self.send_to_all_user_sessions(user_id, &msg) }, + SendTo::MediaFileId(media_file_id) => { self.send_to_all_media_file_sessions(media_file_id, &msg) }, + SendTo::MsgSender(sender) => { sender.send(msg)?; Ok(1u32) }, } } /// Send a user message to given recipients. - pub fn push_notify_message( - &self, - msg: &models::MessageInsert, - send_to: SendTo, - persist: bool, - progress: Option, - ) -> Res<()> { + pub fn push_notify_message(&self, msg: &models::MessageInsert, send_to: SendTo, persist: bool, progress: Option) -> Res<()> { let mut proto_msg = msg.to_proto3(); proto_msg.progress = progress; @@ -162,14 +141,10 @@ impl ServerState { let send_res = self.emit_cmd(cmd, send_to); if let Ok(sent_count) = send_res { if persist { - models::Message::insert( - &mut self.db.conn()?, - &models::MessageInsert { - seen: msg.seen || sent_count > 0, - ..msg.clone() - }, - ) - .map_err(|e| anyhow!("Failed to persist msg: {}", e))?; + models::Message::insert(&mut self.db.conn()?, &models::MessageInsert { + seen: msg.seen || sent_count > 0, + ..msg.clone() + }).map_err(|e| anyhow!("Failed to persist msg: {}", e))?; } }; send_res.map(|_| ()) @@ -183,26 +158,20 @@ impl ServerState { let map = self.user_id_to_senders.read(); for sender in map.get(user_id).unwrap_or(&vec![]).iter() { sender.send(msg.clone())?; - total_sent += 1; - } + total_sent += 1; }; Ok(total_sent) } /// Send a message to all sessions that are collaboratively viewing a media file. /// Bails out with error if any of the senders fail. /// Returns the number of messages sent. - pub fn send_to_all_collab_users( - &self, - collab_id: &Option, - msg: &super::Message, - ) -> Res { + pub fn send_to_all_collab_users(&self, collab_id: &Option, msg: &super::Message) -> Res { let mut total_sent = 0u32; if let Some(collab_id) = collab_id { let map = self.collab_id_to_senders.read(); for sender in map.get(collab_id).unwrap_or(&vec![]).iter() { sender.send(msg.clone())?; - total_sent += 1; - } + total_sent += 1; }; } Ok(total_sent) } @@ -212,14 +181,8 @@ impl ServerState { /// Returns a guard that will remove the sender when dropped. pub fn link_session_to_media_file(&self, session_id: &str, media_file_id: &str) -> Res<()> { let mut map = self.sid_to_session.write(); - let ses = map - .get_mut(session_id) - .ok_or_else(|| anyhow!("Session {} not found", session_id))?; - let grd: OpaqueGuard = self.add_sender_to_maplist( - media_file_id, - ses.sender.clone(), - &self.media_file_id_to_senders, - ); + let ses = map.get_mut(session_id).ok_or_else(|| anyhow!("Session {} not found", session_id))?; + let grd: OpaqueGuard = self.add_sender_to_maplist(media_file_id, ses.sender.clone(), &self.media_file_id_to_senders); ses.media_session_guard = Some(grd); Ok(()) } @@ -233,19 +196,10 @@ impl ServerState { pub fn sender_is_collab_participant(&self, collab_id: &str, sender: &WsMsgSender) -> bool { let senders = self.collab_id_to_senders.read(); - senders - .get(collab_id) - .unwrap_or(&vec![]) - .iter() - .any(|s| s.same_channel(sender)) + senders.get(collab_id).unwrap_or(&vec![]).iter().any(|s| s.same_channel(sender)) } - pub fn link_session_to_collab( - &self, - collab_id: &str, - media_file_id: &str, - sender: WsMsgSender, - ) -> Res { + pub fn link_session_to_collab(&self, collab_id: &str, media_file_id: &str, sender: WsMsgSender) -> Res { // GC collab media file map. (This might not be the optimal way to do this but at least it // will keep it from growing indefinitely.) self.garbage_collect_collab_media_file_map(); @@ -263,69 +217,43 @@ impl ServerState { /// Send a message to all sessions that are viewing a media file. /// Bails out with error if any of the senders fail. /// Returns the number of messages sent. - pub fn send_to_all_media_file_sessions( - &self, - media_file_id: &str, - msg: &super::Message, - ) -> Res { + pub fn send_to_all_media_file_sessions(&self, media_file_id: &str, msg: &super::Message) -> Res { let mut total_sent = 0u32; let map = self.media_file_id_to_senders.read(); for sender in map.get(media_file_id).unwrap_or(&vec![]).iter() { sender.send(msg.clone())?; - total_sent += 1; - } + total_sent += 1; }; Ok(total_sent) } // Common implementations for the above add functions. - fn add_sender_to_maplist( - &self, - key: &str, - sender: WsMsgSender, - maplist: &SenderListMap, - ) -> OpaqueGuard { + fn add_sender_to_maplist(&self, key: &str, sender: WsMsgSender, maplist: &SenderListMap) -> OpaqueGuard { let mut list = maplist.write(); let senders = list.entry(key.to_string()).or_insert(Vec::new()); senders.push(sender.clone()); - struct Guard { - maplist: SenderListMap, - sender: WsMsgSender, - key: String, - } + struct Guard { maplist: SenderListMap, sender: WsMsgSender, key: String } impl Drop for Guard { fn drop(&mut self) { let mut list = self.maplist.write(); let senders = list.entry(self.key.to_string()).or_insert(Vec::new()); senders.retain(|s| !self.sender.same_channel(&s)); - if senders.len() == 0 { - list.remove(&self.key); - } + if senders.len() == 0 { list.remove(&self.key); } } } - Arc::new(Mutex::new(Guard { - maplist: maplist.clone(), - sender: sender.clone(), - key: key.to_string(), - })) + Arc::new(Mutex::new(Guard { maplist: maplist.clone(), sender: sender.clone(), key: key.to_string() })) } fn add_session_to_sid_map(&self, sid: &str, ses: UserSession) -> OpaqueGuard { self.sid_to_session.write().insert(sid.into(), ses); - struct Guard { - map: SessionMap, - sid: String, - } + struct Guard { map: SessionMap, sid: String } impl Drop for Guard { fn drop(&mut self) { self.map.write().remove(&self.sid); } } - Arc::new(Mutex::new(Guard { - map: self.sid_to_session.clone(), - sid: sid.to_string(), - })) + Arc::new(Mutex::new(Guard { map: self.sid_to_session.clone(), sid: sid.to_string() })) } /// Reads the drawing data from disk and encodes it into a data URI, updating the comment's drawing field @@ -334,17 +262,10 @@ impl ServerState { if drawing != "" { // If drawing is present, read it from disk and encode it into a data URI. if !drawing.starts_with("data:") { - let path = self - .media_files_dir - .join(&c.media_file_id) - .join("drawings") - .join(&drawing); + let path = self.media_files_dir.join(&c.media_file_id).join("drawings").join(&drawing); if path.exists() { let data = tokio::fs::read(path).await?; - *drawing = format!( - "data:image/webp;base64,{}", - Base64GP::STANDARD_NO_PAD.encode(&data) - ); + *drawing = format!("data:image/webp;base64,{}", Base64GP::STANDARD_NO_PAD.encode(&data)); } else { tracing::warn!("Drawing file not found for comment: {}", c.id); c.comment += " [DRAWING NOT FOUND]"; @@ -352,10 +273,7 @@ impl ServerState { } else { // If drawing is already a data URI, just use it as is. // This shouldn't happen anymore, but it's here just in case. - tracing::warn!( - "Comment '{}' has data URI drawing stored in DB. Should be on disk.", - c.id - ); + tracing::warn!("Comment '{}' has data URI drawing stored in DB. Should be on disk.", c.id); } } }; diff --git a/server/src/api_server/test_utils.rs b/server/src/api_server/test_utils.rs index a8555be3..d4307dfe 100644 --- a/server/src/api_server/test_utils.rs +++ b/server/src/api_server/test_utils.rs @@ -2,30 +2,29 @@ //#![allow(unused_variables)] //#![allow(unused_imports)] +use std::sync::Arc; +use std::sync::atomic::AtomicBool; +use std::time::Duration; use futures_util::stream::StreamExt; use futures_util::SinkExt; use lib_clapshot_grpc::proto; use std::path::PathBuf; -use std::sync::atomic::AtomicBool; -use std::sync::Arc; -use std::time::Duration; use tokio_tungstenite::tungstenite::Message; -use crate::api_server::UserMessage; -use crate::database::{models, DB}; -use crate::storage::StorageBackend; use crate::video_pipeline::IncomingFile; +use crate::api_server::{UserMessage}; +use crate::database::{DB, models}; +use crate::storage::StorageBackend; + + #[macro_export] macro_rules! send_server_cmd { ($ws:expr, $cmd_name:ident, $options:expr) => {{ let cmd = proto::client::ClientToServerCmd { - cmd: Some(proto::client::client_to_server_cmd::Cmd::$cmd_name( - $options, - )), + cmd: Some(proto::client::client_to_server_cmd::Cmd::$cmd_name($options)), }; - let json_cmd = - serde_json::to_string(&cmd).expect("Failed to serialize ClientToServerCmd to JSON"); + let json_cmd = serde_json::to_string(&cmd).expect("Failed to serialize ClientToServerCmd to JSON"); crate::api_server::test_utils::write(&mut $ws, &json_cmd).await; }}; } @@ -44,21 +43,16 @@ pub(crate) struct ApiTestState { pub(crate) ws_url: String, } -pub(crate) type WsClient = - tokio_tungstenite::WebSocketStream>; +pub(crate) type WsClient = tokio_tungstenite::WebSocketStream>; pub(crate) async fn read(ws: &mut WsClient) -> Option { - let res = match async_std::future::timeout(std::time::Duration::from_secs_f32(0.25), ws.next()) - .await - { - Ok(Some(m)) => Some(m.expect("Failed to read server message")).map(|m| m.to_string()), - _ => None, + let res = match async_std::future::timeout( + std::time::Duration::from_secs_f32(0.25), ws.next()).await { + Ok(Some(m)) => Some(m.expect("Failed to read server message")).map(|m| m.to_string()), + _ => None, }; let res_str = res.as_ref().map(|s| s.as_str()).unwrap_or(""); - if let Some(Some(res_json)) = res - .as_ref() - .map(|s| serde_json::from_str::(s).ok()) - { + if let Some(Some(res_json)) = res.as_ref().map(|s| serde_json::from_str::(s).ok()) { println!("<--- [Client got]: {:#}", res_json); } else { println!("<--- [Client got]: {:#}", res_str); @@ -71,33 +65,17 @@ pub(crate) async fn expect_msg(ws: &mut WsClient) -> String { } pub(crate) async fn expect_parsed(ws: &mut WsClient) -> T -where - T: serde::de::DeserializeOwned, + where T: serde::de::DeserializeOwned { let msg = expect_msg(ws).await; - serde_json::from_str::(&msg).expect( - format!( - "Failed to parse type '{}' message from JSON", - std::any::type_name::() - ) - .as_str(), - ) + serde_json::from_str::(&msg).expect(format!("Failed to parse type '{}' message from JSON", std::any::type_name::()).as_str()) } pub(crate) async fn try_get_parsed(ws: &mut WsClient) -> Option -where - T: serde::de::DeserializeOwned, + where T: serde::de::DeserializeOwned { if let Some(msg) = read(ws).await { - Some( - serde_json::from_str::(&msg).expect( - format!( - "Failed to parse type '{}' message from JSON", - std::any::type_name::() - ) - .as_str(), - ), - ) + Some(serde_json::from_str::(&msg).expect(format!("Failed to parse type '{}' message from JSON", std::any::type_name::()).as_str())) } else { None } @@ -107,21 +85,15 @@ where macro_rules! expect_client_cmd { ($ws:expr, $variant:ident) => {{ println!("Client expecting command '{}'...", stringify!($variant)); - match crate::api_server::test_utils::expect_parsed::($ws) - .await - .cmd - { + match crate::api_server::test_utils::expect_parsed::($ws).await.cmd { Some(lib_clapshot_grpc::proto::client::server_to_client_cmd::Cmd::$variant(v)) => { println!("...got '{}' ok.", stringify!($variant)); println!(". . ."); v - } - _ => panic!( - "Client expected command '{}' BUT GOT SOMETHING ELSE.", - stringify!($variant) - ), + }, + _ => panic!("Client expected command '{}' BUT GOT SOMETHING ELSE.", stringify!($variant)), } - }}; + }} } /* @@ -142,15 +114,12 @@ pub(crate) async fn read_cmd_data(ws: &mut WsClient) -> Option<(serde_json::Valu } */ -pub(crate) async fn wait_for_thumbnails(ws: &mut WsClient) { +pub (crate) async fn wait_for_thumbnails(ws: &mut WsClient) { println!("Waiting for thumbnail generation..."); let mut thumb_done = false; for _ in 0..12 { - match crate::api_server::test_utils::try_get_parsed::(ws) - .await - .map(|c| c.cmd) - .flatten() - { + match crate::api_server::test_utils::try_get_parsed::(ws).await + .map(|c| c.cmd).flatten() { Some(proto::client::server_to_client_cmd::Cmd::ShowMessages(m)) => { if m.msgs[0].r#type == proto::user_message::Type::MediaFileUpdated as i32 { thumb_done = true; @@ -158,14 +127,14 @@ pub(crate) async fn wait_for_thumbnails(ws: &mut WsClient) { } else { println!(" (... got some other message: {:?})", m.msgs[0]); } - } + }, None => { // Wait for file to be processed tokio::time::sleep(Duration::from_secs_f32(0.2)).await; - } + }, _ => panic!("Unexpected message while waitig for thumbnail generation"), } - } + }; if !thumb_done { panic!("... thumbnail generation TIMED OUT"); } @@ -174,37 +143,28 @@ pub(crate) async fn wait_for_thumbnails(ws: &mut WsClient) { } pub(crate) async fn expect_no_msg(ws: &mut WsClient) { - assert!( - read(ws).await.is_none(), - "Got unexpected message from server" - ); + assert!(read(ws).await.is_none(), "Got unexpected message from server"); } pub(crate) async fn write(ws: &mut WsClient, msg: &str) { println!("---> [Client sending]: {:#}", msg); - ws.send(Message::text(msg)) - .await - .expect("Failed to send WS message"); + ws.send(Message::text(msg)).await.expect("Failed to send WS message"); } pub(crate) async fn connect_client_ws(ws_url: &str, user_id: &str) -> WsClient { - use tokio_tungstenite::connect_async; use tokio_tungstenite::tungstenite::http; + use tokio_tungstenite::connect_async; let request = http::Request::builder() .uri(ws_url) .header("Host", "127.0.0.1") .header("HTTP_X_REMOTE_USER_ID", user_id) - .header( - "HTTP_X_REMOTE_USER_NAME", - format!("Username for {}", user_id), - ) + .header("HTTP_X_REMOTE_USER_NAME", format!("Username for {}", user_id)) .header("Connection", "Upgrade") .header("Upgrade", "websocket") .header("Sec-WebSocket-Version", "13") .header("Sec-WebSocket-Key", "1234567890") - .body(()) - .unwrap(); + .body(()).unwrap(); let (mut ws, _) = connect_async(request).await.unwrap(); @@ -272,38 +232,29 @@ macro_rules! api_test { /// /// # Returns /// * OpenMediaFile message from the server -pub(crate) async fn open_media_file( - ws: &mut WsClient, - vid: &str, -) -> proto::client::server_to_client_cmd::OpenMediaFile { +pub(crate) async fn open_media_file(ws: &mut WsClient, vid: &str) -> proto::client::server_to_client_cmd::OpenMediaFile +{ println!("--------- TEST: open_media_file '{}'...", vid); use lib_clapshot_grpc::proto::client::client_to_server_cmd as cmd_enum; - send_server_cmd!( - *ws, - OpenMediaFile, - cmd_enum::OpenMediaFile { - media_file_id: vid.into() - } - ); + send_server_cmd!(*ws, OpenMediaFile, cmd_enum::OpenMediaFile{ media_file_id: vid.into() }); let ov = expect_client_cmd!(ws, OpenMediaFile); while let Some(msg) = read(ws).await { - let cmd: proto::client::ServerToClientCmd = - serde_json::from_str(&msg).expect("Failed to parse ServerToClientCmd from JSON"); + let cmd: proto::client::ServerToClientCmd = serde_json::from_str(&msg).expect("Failed to parse ServerToClientCmd from JSON"); match cmd.cmd { // Make sure the comments are for the media file we opened Some(proto::client::server_to_client_cmd::Cmd::AddComments(m)) => { assert!(m.comments.iter().all(|c| c.media_file_id == vid)); - } + }, // Thumbnail generation can take a while, so ignore it if it happens to be in the queue Some(proto::client::server_to_client_cmd::Cmd::ShowMessages(m)) => { assert!(m.msgs.iter().any(|m| m.message.contains("thumbnail"))); - } - None => {} + }, + None => {}, _ => panic!("Unexpected message from server: {}", msg), } - } + }; ov } diff --git a/server/src/api_server/tests.rs b/server/src/api_server/tests.rs index 531887ee..bfcccdb0 100644 --- a/server/src/api_server/tests.rs +++ b/server/src/api_server/tests.rs @@ -1,41 +1,34 @@ #![allow(dead_code)] use crate::storage::StorageBackend; -use lib_clapshot_grpc::proto; +use std::sync::Arc; use std::str::FromStr; use std::sync::atomic::AtomicBool; -use std::sync::atomic::Ordering::Relaxed; -use std::sync::Arc; +use lib_clapshot_grpc::proto; use tracing_test::traced_test; +use std::sync::atomic::Ordering::Relaxed; use reqwest::{multipart, Client}; -use crate::api_server::server_state::ServerState; -use crate::api_server::test_utils::{ - connect_client_ws, expect_msg, expect_no_msg, open_media_file, write, ApiTestState, -}; -use crate::api_server::{ - parse_auth_headers, run_api_server_async, validate_org_http_headers_regex, UserMessage, - UserMessageTopic, -}; +use crate::database::DbBasicQuery; use crate::database::error::DBError; +use crate::api_server::{parse_auth_headers, run_api_server_async, validate_org_http_headers_regex, UserMessage, UserMessageTopic}; +use crate::api_server::server_state::ServerState; +use crate::api_server::test_utils::{ApiTestState, expect_msg, expect_no_msg, write, open_media_file, connect_client_ws}; use crate::database::models::{self}; use crate::database::tests::make_test_db; -use crate::database::DbBasicQuery; use crate::grpc::db_models::proto_msg_type_to_event_name; use warp::http::{HeaderMap, HeaderValue}; -use lib_clapshot_grpc::proto::client::client_to_server_cmd::{ - AddComment, DelComment, DelMediaFile, EditComment, ListMyMessages, OpenMediaFile, - OpenNavigationPage, RenameMediaFile, -}; +use lib_clapshot_grpc::proto::client::client_to_server_cmd::{AddComment, DelComment, DelMediaFile, EditComment, ListMyMessages, OpenNavigationPage, OpenMediaFile, RenameMediaFile}; use std::convert::TryFrom; // --------------------------------------------------------------------------------------------- #[traced_test] -async fn test_echo() { +async fn test_echo() +{ api_test! {[ws, _ts] write(&mut ws, r#"{"cmd":"echo","data":"hello"}"#).await; assert_eq!(expect_msg(&mut ws).await, "Echo: hello"); @@ -44,7 +37,8 @@ async fn test_echo() { #[tokio::test] #[traced_test] -async fn test_api_push_msg() { +async fn test_api_push_msg() +{ api_test! {[ws, ts] let mut umsg = UserMessage { msg: "test_msg".into(), @@ -67,9 +61,11 @@ async fn test_api_push_msg() { } } + #[tokio::test] #[traced_test] -async fn test_api_navigation_page() { +async fn test_api_navigation_page() +{ api_test! {[ws, ts] send_server_cmd!(ws, OpenNavigationPage, OpenNavigationPage{..Default::default()}); let sp = expect_client_cmd!(&mut ws, ShowPage); @@ -82,9 +78,11 @@ async fn test_api_navigation_page() { } } + #[tokio::test] #[traced_test] -async fn test_api_del_video() { +async fn test_api_del_video() +{ api_test! {[ws, ts] let conn = &mut ts.db.conn().unwrap(); @@ -126,9 +124,11 @@ async fn test_api_del_video() { } } + #[tokio::test] #[traced_test] -async fn test_api_open_media_file() { +async fn test_api_open_media_file() +{ api_test! {[ws, ts] for vid in &ts.media_files { let v = open_media_file(&mut ws, &vid.id).await.media_file.unwrap(); @@ -152,17 +152,16 @@ async fn test_api_open_media_file() { #[tokio::test] #[traced_test] -async fn test_api_open_bad_media_file() { +async fn test_api_open_bad_media_file() +{ api_test! {[ws, ts] - send_server_cmd!(ws, OpenMediaFile, OpenMediaFile{media_file_id: "non-existent".into()}); - expect_user_msg(&mut ws, proto::user_message::Type::Error).await; - } + send_server_cmd!(ws, OpenMediaFile, OpenMediaFile{media_file_id: "non-existent".into()}); + expect_user_msg(&mut ws, proto::user_message::Type::Error).await; + } } -pub async fn expect_user_msg( - ws: &mut crate::api_server::test_utils::WsClient, - evt_type: proto::user_message::Type, -) -> proto::UserMessage { +pub async fn expect_user_msg(ws: &mut crate::api_server::test_utils::WsClient, evt_type: proto::user_message::Type ) -> proto::UserMessage +{ println!(" --expect_user_msg of type {:?} ....", evt_type); let cmd = expect_client_cmd!(ws, ShowMessages); assert_eq!(cmd.msgs.len(), 1); @@ -172,7 +171,8 @@ pub async fn expect_user_msg( #[tokio::test] #[traced_test] -async fn test_api_rename_media_file() { +async fn test_api_rename_media_file() +{ api_test! {[ws, ts] let media_file = &ts.media_files[0]; open_media_file(&mut ws, &media_file.id).await; @@ -196,9 +196,12 @@ async fn test_api_rename_media_file() { } } + + #[tokio::test] #[traced_test] -async fn test_api_add_plain_comment() { +async fn test_api_add_plain_comment() +{ api_test! {[ws, ts] let media = &ts.media_files[0]; send_server_cmd!(ws, AddComment, AddComment{media_file_id: media.id.clone(), comment: "Test comment".into(), ..Default::default()}); @@ -231,9 +234,11 @@ async fn test_api_add_plain_comment() { } } + #[tokio::test] #[traced_test] -async fn test_api_comment_other_users_video() { +async fn test_api_comment_other_users_video() +{ api_test! {[ws, ts] let other_users_vid = &ts.media_files[1]; assert_ne!(other_users_vid.user_id, ts.media_files[0].user_id); @@ -250,7 +255,8 @@ async fn test_api_comment_other_users_video() { #[tokio::test] #[traced_test] -async fn test_api_edit_comment() { +async fn test_api_edit_comment() +{ api_test! {[ws, ts] let media = &ts.media_files[0]; let com = &ts.comments[0]; @@ -287,9 +293,11 @@ async fn test_api_edit_comment() { } } + #[tokio::test] #[traced_test] -async fn test_api_del_comment() { +async fn test_api_del_comment() +{ // Summary of comment thread used in this test: // // media_file[0]: @@ -332,9 +340,11 @@ async fn test_api_del_comment() { } } + #[tokio::test] #[traced_test] -async fn test_api_list_my_messages() { +async fn test_api_list_my_messages() +{ api_test! {[ws, ts] send_server_cmd!(ws, ListMyMessages, ListMyMessages{}); @@ -368,9 +378,11 @@ async fn test_api_list_my_messages() { } } + #[tokio::test] #[traced_test] -async fn test_multipart_upload() { +async fn test_multipart_upload() +{ api_test! {[_ws, ts] // Upload file let file_body = "Testfile 1234"; @@ -394,6 +406,7 @@ async fn test_multipart_upload() { } } + #[test] fn test_validate_org_http_headers_regex() { // Test valid patterns @@ -415,10 +428,7 @@ fn test_header_filtering() { headers.insert("X-Remote-User-Id", HeaderValue::from_static("testuser")); headers.insert("X-Remote-User-Name", HeaderValue::from_static("Test User")); headers.insert("X-Remote-User-Can-Upload", HeaderValue::from_static("true")); - headers.insert( - "X_REMOTE_USER_GROUPS", - HeaderValue::from_static("admins,users"), - ); + headers.insert("X_REMOTE_USER_GROUPS", HeaderValue::from_static("admins,users")); headers.insert("Authorization", HeaderValue::from_static("Bearer token123")); headers.insert("Content-Type", HeaderValue::from_static("application/json")); @@ -432,22 +442,10 @@ fn test_header_filtering() { // Verify header filtering (HeaderMap converts names to lowercase) assert_eq!(filtered_headers.len(), 4); // 4 X-Remote headers - assert_eq!( - filtered_headers.get("x-remote-user-id"), - Some(&"testuser".to_string()) - ); - assert_eq!( - filtered_headers.get("x-remote-user-name"), - Some(&"Test User".to_string()) - ); - assert_eq!( - filtered_headers.get("x-remote-user-can-upload"), - Some(&"true".to_string()) - ); - assert_eq!( - filtered_headers.get("x_remote_user_groups"), - Some(&"admins,users".to_string()) - ); + assert_eq!(filtered_headers.get("x-remote-user-id"), Some(&"testuser".to_string())); + assert_eq!(filtered_headers.get("x-remote-user-name"), Some(&"Test User".to_string())); + assert_eq!(filtered_headers.get("x-remote-user-can-upload"), Some(&"true".to_string())); + assert_eq!(filtered_headers.get("x_remote_user_groups"), Some(&"admins,users".to_string())); // Verify non-matching headers are filtered out (also lowercase) assert!(!filtered_headers.contains_key("authorization")); @@ -477,10 +475,7 @@ fn test_remote_error_header() { // Test with X-Remote-Error header let mut headers = HeaderMap::new(); - headers.insert( - "X-Remote-Error", - HeaderValue::from_static("Access denied by IDP"), - ); + headers.insert("X-Remote-Error", HeaderValue::from_static("Access denied by IDP")); headers.insert("X-Remote-User-Id", HeaderValue::from_static("testuser")); let (user_id, _user_name, _is_admin, _cookies, filtered_headers, remote_error) = diff --git a/server/src/api_server/user_session.rs b/server/src/api_server/user_session.rs index dfe808c1..a90c3615 100644 --- a/server/src/api_server/user_session.rs +++ b/server/src/api_server/user_session.rs @@ -1,11 +1,7 @@ -use crate::{ - client_cmd, - database::models::{self, Comment, MediaFile}, - grpc::grpc_client::OrganizerConnection, -}; use std::sync::Arc; +use crate::{database::models::{self, MediaFile, Comment}, grpc::grpc_client::OrganizerConnection, client_cmd}; -use super::{server_state::ServerState, SendTo, WsMsgSender}; +use super::{WsMsgSender, server_state::ServerState, SendTo}; use lib_clapshot_grpc::proto; use tracing::{debug, error}; @@ -14,7 +10,7 @@ type Res = anyhow::Result; pub enum Topic<'a> { MediaFile(&'a str), Comment(i32), - None, + None } #[macro_export] @@ -59,31 +55,23 @@ macro_rules! send_user_ok( ($user_id:expr, $server:expr, $topic:expr, $msg:expr) => { send_user_ok!($user_id, $server, $topic, $msg, String::new(), false); }; ); -#[derive(Debug, Clone)] +#[derive (Debug, Clone)] pub enum AuthzTopic<'a> { - MediaFile( - &'a MediaFile, - proto::org::authz_user_action_request::media_file_op::Op, - ), - Comment( - &'a Comment, - proto::org::authz_user_action_request::comment_op::Op, - ), - Other( - Option<&'a str>, - proto::org::authz_user_action_request::other_op::Op, - ), + MediaFile(&'a MediaFile, proto::org::authz_user_action_request::media_file_op::Op), + Comment(&'a Comment, proto::org::authz_user_action_request::comment_op::Op), + Other(Option<&'a str>, proto::org::authz_user_action_request::other_op::Op) } -#[derive(thiserror::Error, Debug)] +#[derive (thiserror::Error, Debug)] pub enum AuthzError { #[error("Permission denied")] Denied, } + pub type OpaqueGuard = Arc>; -#[derive(Clone)] +#[derive (Clone)] pub struct UserSession { pub sid: String, pub sender: WsMsgSender, @@ -101,25 +89,16 @@ pub struct UserSession { } impl UserSession { - pub async fn emit_new_comment( - &self, - server: &ServerState, - mut c: models::Comment, - send_to: SendTo<'_>, - ) -> Res<()> { + + pub async fn emit_new_comment(&self, server: &ServerState, mut c: models::Comment, send_to: SendTo<'_>) -> Res<()> { server.fetch_drawing_data_into_comment(&mut c).await?; let cmd = client_cmd!(AddComments, {comments: vec![c.to_proto3()]}); server.emit_cmd(cmd, send_to).map(|_| ()) } } -fn try_send_error<'a>( - user_id: &str, - server: &ServerState, - msg: String, - details: Option, - op: &AuthzTopic<'a>, -) -> anyhow::Result<()> { + +fn try_send_error<'a>(user_id: &str, server: &ServerState, msg: String, details: Option, op: &AuthzTopic<'a>) -> anyhow::Result<()> { let topic = match op { AuthzTopic::MediaFile(v, _op) => Topic::MediaFile(&v.id), AuthzTopic::Comment(c, _op) => Topic::Comment(c.id), @@ -133,6 +112,8 @@ fn try_send_error<'a>( Ok(()) } + + /// Check from Organizer if the user is allowed to perform given action. /// /// Some(true) = allowed @@ -149,7 +130,8 @@ pub async fn org_authz<'a>( server: &ServerState, organizer: &Option>>, op: AuthzTopic<'a>, -) -> Option { +) -> Option +{ let user_id = match &session.user { Some(ui) => ui.id.clone(), None => { @@ -160,77 +142,60 @@ pub async fn org_authz<'a>( let org = match &organizer { Some(org) => org, - None => { - return None; - } + None => { return None; } }; tracing::debug!(op=?op, user=user_id, desc, "Checking authz from Organizer"); use proto::org::authz_user_action_request as authz_op; let pop = match op { - AuthzTopic::MediaFile(v, op) => authz_op::Op::MediaFileOp(authz_op::MediaFileOp { - op: op.into(), - media_file: Some(v.to_proto3(&server.media_base_url, vec![])), - }), // omit subtitles for authz check - AuthzTopic::Comment(c, op) => authz_op::Op::CommentOp(authz_op::CommentOp { - op: op.into(), - comment: Some(c.to_proto3()), - }), - AuthzTopic::Other(subj, op) => authz_op::Op::OtherOp(authz_op::OtherOp { - op: op.into(), - subject: subj.map(|s| s.into()), - }), - }; - let req = proto::org::AuthzUserActionRequest { - ses: Some(session.clone()), - op: Some(pop), + AuthzTopic::MediaFile(v, op) => authz_op::Op::MediaFileOp( + authz_op::MediaFileOp { + op: op.into(), + media_file: Some(v.to_proto3(&server.media_base_url, vec![])) }), // omit subtitles for authz check + AuthzTopic::Comment(c, op) => authz_op::Op::CommentOp( + authz_op::CommentOp { + op: op.into(), + comment: Some(c.to_proto3()) }), + AuthzTopic::Other(subj, op) => authz_op::Op::OtherOp( + authz_op::OtherOp { + op: op.into(), + subject: subj.map(|s| s.into()) }), }; + let req = proto::org::AuthzUserActionRequest { ses: Some(session.clone()), op: Some(pop) }; let res = org.lock().await.authz_user_action(req).await; match res { Err(e) => { if e.code() == tonic::Code::Unimplemented { - tracing::debug!(desc, user = user_id, "Organizer doesn't support authz"); + tracing::debug!(desc, user=user_id, "Organizer doesn't support authz"); None } else if e.code() == tonic::Code::Aborted { tracing::warn!(desc, user=user_id, "Organizer gRPC.ABORTED authz request. Unsupported behavior for authz_user_action. Denying by default."); Some(false) } else { error!(desc, user=&user_id, err=?e, "Error while authorizing user action"); - try_send_error( - &user_id, - &server, - format!("Internal error in authz: {}", desc), - None, - &op, - ) - .ok(); + try_send_error(&user_id, &server, format!("Internal error in authz: {}", desc), None, &op).ok(); Some(false) } - } - Ok(res) => match res.get_ref().is_authorized { - Some(false) => { - let msg = res - .get_ref() - .message - .clone() - .map(|s| s) - .unwrap_or_else(|| "Permission denied".to_string()); - let details = res.get_ref().details.clone(); - if msg_on_deny { - try_send_error(&user_id, &server, msg, details, &op).ok(); + }, + Ok(res) => { + match res.get_ref().is_authorized { + Some(false) => { + let msg = res.get_ref().message.clone().map(|s| s).unwrap_or_else(|| "Permission denied".to_string()); + let details = res.get_ref().details.clone(); + if msg_on_deny { try_send_error(&user_id, &server, msg, details, &op).ok(); } + debug!(desc, user=user_id, "Organizer: Permission denied"); + Some(false) + }, + Some(true) => { + debug!(desc, user=user_id, "Organizer: Authorized OK"); + Some(true) + }, + None => { + debug!(desc, user=user_id, "Organizer: don't care, use defaults"); + None } - debug!(desc, user = user_id, "Organizer: Permission denied"); - Some(false) - } - Some(true) => { - debug!(desc, user = user_id, "Organizer: Authorized OK"); - Some(true) - } - None => { - debug!(desc, user = user_id, "Organizer: don't care, use defaults"); - None } - }, + } } } @@ -244,25 +209,12 @@ pub async fn org_authz_with_default<'a>( op: AuthzTopic<'a>, ) -> Result<(), AuthzError> { if let Some(res) = org_authz(session, desc, msg_on_deny, server, organizer, op.clone()).await { - if res { - Ok(()) - } else { - Err(AuthzError::Denied) - } + if res { Ok(()) } else { Err(AuthzError::Denied) } } else { - if default { - Ok(()) - } else { + if default { Ok(()) } else { if msg_on_deny { if let Some(ui) = &session.user { - try_send_error( - &ui.id, - &server, - format!("Permission denied: {}", desc), - Some(format!("{:?}", &op)), - &op, - ) - .ok(); + try_send_error(&ui.id, &server, format!("Permission denied: {}", desc), Some(format!("{:?}", &op)), &op).ok(); } else { tracing::error!(desc, "No user ID in session. Couldn't send deny message from org_authz_with_default"); } diff --git a/server/src/api_server/ws_handers.rs b/server/src/api_server/ws_handers.rs index 096566cb..99fff452 100644 --- a/server/src/api_server/ws_handers.rs +++ b/server/src/api_server/ws_handers.rs @@ -2,17 +2,13 @@ #![allow(unused_variables)] #![allow(unused_imports)] -use lib_clapshot_grpc::proto::client::client_to_server_cmd::{ - AddSubtitle, CollabReport, DelComment, DelMediaFile, DelSubtitle, EditComment, - EditSubtitleInfo, JoinCollab, LeaveCollab, OpenMediaFile, OpenNavigationPage, RenameMediaFile, - ReorderItems, -}; -use lib_clapshot_grpc::proto::client::ClientToServerCmd; -use parking_lot::RwLock; use std::collections::HashMap; use std::path::{Path, PathBuf}; -use std::str::FromStr; use std::sync::Arc; +use std::str::FromStr; +use lib_clapshot_grpc::proto::client::ClientToServerCmd; +use lib_clapshot_grpc::proto::client::client_to_server_cmd::{AddSubtitle, CollabReport, DelComment, DelMediaFile, DelSubtitle, EditComment, EditSubtitleInfo, JoinCollab, LeaveCollab, OpenMediaFile, OpenNavigationPage, RenameMediaFile, ReorderItems}; +use parking_lot::RwLock; type WsMsg = warp::ws::Message; type Res = anyhow::Result; @@ -20,58 +16,43 @@ type MsgSender = tokio::sync::mpsc::UnboundedSender; type SenderList = Vec; type SenderListMap = Arc>>; -use anyhow::{anyhow, bail, Context}; use serde_json::json; +use anyhow::{anyhow, bail, Context}; -use data_url::{mime, DataUrl}; -use hex; use inflector::Inflector; -use sha2::{Digest, Sha256}; +use data_url::{DataUrl, mime}; +use sha2::{Sha256, Digest}; +use hex; -use super::user_session::{self, org_authz_with_default, AuthzTopic}; +use super::user_session::{self, AuthzTopic, org_authz_with_default}; use super::UserSession; use crate::api_server::server_state::ServerState; use crate::api_server::user_session::Topic; use crate::database::error::DBError; -use crate::database::{ - models, DBPaging, DbBasicQuery, DbQueryByMediaFile, DbQueryByUser, DbUpdate, DB, -}; -use crate::{ - client_cmd, optional_str_to_i32_or_tonic_error, send_user_error, send_user_ok, - str_to_i32_or_tonic_error, -}; +use crate::database::{models, DBPaging, DbBasicQuery, DbQueryByMediaFile, DbQueryByUser, DbUpdate, DB}; +use crate::{client_cmd, optional_str_to_i32_or_tonic_error, send_user_error, send_user_ok, str_to_i32_or_tonic_error}; use lib_clapshot_grpc::proto; use proto::org::authz_user_action_request as authz_req; + /// Get media file by ID from DB, or send user error. /// Return None if media file not found and error was sent, or Some(MediaFile) if found. -async fn get_media_file_or_send_error( - media_file_id: Option<&str>, - ses: &Option<&mut UserSession>, - server: &ServerState, -) -> Res> { +async fn get_media_file_or_send_error(media_file_id: Option<&str>, ses: &Option<&mut UserSession>, server: &ServerState) -> Res> { let media_file_id = media_file_id.ok_or(anyhow!("media file id missing"))?; match models::MediaFile::get(&mut server.db.conn()?, &media_file_id.into()) { Err(DBError::NotFound()) => { if let Some(ses) = ses { - send_user_error!( - ses.user_id, - server, - Topic::MediaFile(media_file_id), - "No such media file." - ); + send_user_error!(ses.user_id, server, Topic::MediaFile(media_file_id), "No such media file."); }; Ok(None) } - Err(e) => { - bail!(e); - } - Ok(v) => Ok(Some(v)), + Err(e) => { bail!(e); } + Ok(v) => { Ok(Some(v)) } } } @@ -80,21 +61,9 @@ async fn get_media_file_or_send_error( // --------------------------------------------------------------------- /// Send user a navigation page to browse the files / folders they have (and/or something else, if Organizer handles it). -pub async fn msg_open_navigation_page( - data: &OpenNavigationPage, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { - org_authz_with_default( - &ses.org_session, - "list media files", - true, - server, - &ses.organizer, - true, - AuthzTopic::Other(None, authz_req::other_op::Op::ViewHome), - ) - .await?; +pub async fn msg_open_navigation_page(data: &OpenNavigationPage , ses: &mut UserSession, server: &ServerState) -> Res<()> { + org_authz_with_default(&ses.org_session, "list media files", true, server, + &ses.organizer, true, AuthzTopic::Other(None, authz_req::other_op::Op::ViewHome)).await?; // Try to delegate request to Organizer. if let Some(org) = &ses.organizer { @@ -107,15 +76,13 @@ pub async fn msg_open_navigation_page( if e.code() == tonic::Code::Unimplemented { tracing::debug!("Organizer doesn't implement navigate_page(). Using default."); } else if e.code() == tonic::Code::Aborted { - tracing::debug!( - "Ignoring org.navigate_page() result because it GrpcStatus.ABORTED." - ); + tracing::debug!("Ignoring org.navigate_page() result because it GrpcStatus.ABORTED."); return Ok(()); } else { tracing::error!(err=?e, "Error in organizer navigate_page() call"); anyhow::bail!("{}: {}", e.code(), e.message()); } - } + }, Ok(res) => { let res = res.into_inner(); server.emit_cmd( @@ -124,8 +91,7 @@ pub async fn msg_open_navigation_page( page_id: res.page_id.clone(), page_title: res.page_title, }), - super::SendTo::UserSession(&ses.sid), - )?; + super::SendTo::UserSession(&ses.sid))?; return Ok(()); } } @@ -134,25 +100,13 @@ pub async fn msg_open_navigation_page( // Organizer didn't handle this, so return a default listing. let mut media_files: Vec = Vec::new(); - for m in - models::MediaFile::get_by_user(&mut server.db.conn()?, &ses.user_id, DBPaging::default())? - { - let subs = models::Subtitle::get_by_media_file( - &mut server.db.conn()?, - &m.id, - DBPaging::default(), - )?; + for m in models::MediaFile::get_by_user(&mut server.db.conn()?, &ses.user_id, DBPaging::default())? { + let subs = models::Subtitle::get_by_media_file(&mut server.db.conn()?, &m.id, DBPaging::default())?; media_files.push(m.to_proto3(&server.media_base_url, subs)); } - let h_txt = if media_files.is_empty() { - "

You have no media yet.

" - } else { - "

All your media files

" - }; - let heading = proto::PageItem { - item: Some(proto::page_item::Item::Html(h_txt.into())), - }; + let h_txt = if media_files.is_empty() { "

You have no media yet.

" } else { "

All your media files

" }; + let heading = proto::PageItem{ item: Some(proto::page_item::Item::Html(h_txt.into()))}; let listing = crate::grpc::folder_listing_for_media_files(&media_files); let page = vec![heading, listing]; @@ -162,38 +116,23 @@ pub async fn msg_open_navigation_page( Ok(()) } + /// User opens a media file. /// Send them the media info and all comments related to it. /// Register the session as a viewer of the file (media_file_session_guard). -pub async fn msg_open_media_file( - data: &OpenMediaFile, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { - if let Some(v) = - get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? - { - org_authz_with_default( - &ses.org_session, - "open media file", - true, - server, - &ses.organizer, - true, - AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::View), - ) - .await?; +pub async fn msg_open_media_file(data: &OpenMediaFile, ses: &mut UserSession, server: &ServerState) -> Res<()> { + if let Some(v) = get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { + org_authz_with_default(&ses.org_session, + "open media file", true, server, &ses.organizer, + true, AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::View)).await?; send_open_media_file_cmd(server, &ses.sid, &v.id).await?; ses.cur_media_file_id = Some(v.id); } Ok(()) } -pub async fn send_open_media_file_cmd( - server: &ServerState, - session_id: &str, - media_file_id: &str, -) -> Res<()> { + +pub async fn send_open_media_file_cmd(server: &ServerState, session_id: &str, media_file_id: &str) -> Res<()> { server.link_session_to_media_file(session_id, media_file_id)?; let conn = &mut server.db.conn()?; let v_db = models::MediaFile::get(conn, &media_file_id.into())?; @@ -204,8 +143,7 @@ pub async fn send_open_media_file_cmd( } server.emit_cmd( client_cmd!(OpenMediaFile, {media_file: Some(v)}), - super::SendTo::UserSession(session_id), - )?; + super::SendTo::UserSession(session_id))?; let mut cmts = vec![]; for mut c in models::Comment::get_by_media_file(conn, media_file_id, DBPaging::default())? { server.fetch_drawing_data_into_comment(&mut c).await?; @@ -213,51 +151,31 @@ pub async fn send_open_media_file_cmd( } server.emit_cmd( client_cmd!(AddComments, {comments: cmts}), - super::SendTo::UserSession(session_id), - )?; + super::SendTo::UserSession(session_id))?; Ok(()) } -pub async fn del_media_file_and_cleanup( - media_file_id: &str, - ses: Option<&mut UserSession>, - server: &ServerState, -) -> Res<()> { - tracing::info!( - media_file_id = media_file_id, - user_id = ses.as_ref().map(|u| u.user_id.clone()), - "Trashing media file." - ); + +pub async fn del_media_file_and_cleanup(media_file_id: &str, ses: Option<&mut UserSession>, server: &ServerState) -> Res<()> { + tracing::info!(media_file_id=media_file_id, user_id=ses.as_ref().map(|u|u.user_id.clone()), "Trashing media file."); if let Some(v) = get_media_file_or_send_error(Some(media_file_id), &ses, server).await? { + // Check authorization against user session, if provided if let Some(ses) = &ses { let default_perm = ses.user_id == (&v).user_id || ses.is_admin; - org_authz_with_default( - &ses.org_session, - "delete media file", - true, - server, - &ses.organizer, - default_perm, - AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Delete), - ) - .await?; + org_authz_with_default(&ses.org_session, "delete media file", true, server, &ses.organizer, + default_perm, AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Delete)).await?; } models::MediaFile::delete(&mut server.db.conn()?, &v.id)?; - let mut details = format!( - "Added by '{}' on {}. Filename was {}.", + let mut details = format!("Added by '{}' on {}. Filename was {}.", v.user_id.clone(), v.added_time, - v.orig_filename.clone().unwrap_or_default() - ); + v.orig_filename.clone().unwrap_or_default()); fn backup_media_file_db_row(server: &ServerState, v: &models::MediaFile) -> Res<()> { - let backup_file = server - .media_files_dir - .join(v.id.clone()) - .join("db_backup.json"); + let backup_file = server.media_files_dir.join(v.id.clone()).join("db_backup.json"); if backup_file.exists() { std::fs::remove_file(&backup_file)?; } @@ -266,17 +184,14 @@ pub async fn del_media_file_and_cleanup( Ok(()) } - fn move_media_file_to_trash(server: &ServerState, media_file_id: &str) -> Res<()> { + fn move_media_file_to_trash(server: &ServerState, media_file_id: &str) -> Res<()> + { let media_file_dir = server.media_files_dir.join(media_file_id); let trash_dir = server.media_files_dir.join("trash"); if !trash_dir.exists() { std::fs::create_dir(&trash_dir)?; } - let hash_and_datetime = format!( - "{}_{}", - media_file_id, - chrono::Utc::now().format("%Y%m%d-%H%M%S") - ); + let hash_and_datetime = format!("{}_{}", media_file_id, chrono::Utc::now().format("%Y%m%d-%H%M%S")); let media_file_trash_dir = trash_dir.join(hash_and_datetime); std::fs::rename(&media_file_dir, &media_file_trash_dir)?; Ok(()) @@ -286,6 +201,7 @@ pub async fn del_media_file_and_cleanup( if let Err(e) = backup_media_file_db_row(server, &v) { details.push_str(&format!(" WARNING: DB row backup failed: {:?}.", e)); cleanup_errors = true; + } if let Err(e) = move_media_file_to_trash(server, &v.id) { details.push_str(&format!(" WARNING: Move to trash failed: {:?}.", e)); @@ -294,124 +210,71 @@ pub async fn del_media_file_and_cleanup( if let Some(ses) = ses { let media_type_str = v.media_type.unwrap_or("file".to_string()).to_title_case(); - send_user_ok!( - &ses.user_id, - &server, - Topic::MediaFile(&v.id), - if !cleanup_errors { - format!("{} deleted.", media_type_str) - } else { - format!("{} deleted, but cleanup had errors.", media_type_str) - }, - details, - true - ); + send_user_ok!(&ses.user_id, &server, Topic::MediaFile(&v.id), + if !cleanup_errors { format!("{} deleted.", media_type_str) } else { format!("{} deleted, but cleanup had errors.", media_type_str) }, + details, true); } } Ok(()) } -pub async fn msg_del_media_file( - data: &DelMediaFile, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { + +pub async fn msg_del_media_file(data: &DelMediaFile, ses: &mut UserSession, server: &ServerState) -> Res<()> { del_media_file_and_cleanup(&data.media_file_id, Some(ses), server).await } -pub async fn msg_rename_media_file( - data: &RenameMediaFile, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { - if let Some(v) = - get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? - { + +pub async fn msg_rename_media_file(data: &RenameMediaFile, ses: &mut UserSession, server: &ServerState) -> Res<()> { + if let Some(v) = get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { let default_perm = ses.user_id == (&v).user_id || ses.is_admin; - org_authz_with_default( - &ses.org_session, - "rename media file", - true, - server, - &ses.organizer, - default_perm, - AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Rename), - ) - .await?; + org_authz_with_default(&ses.org_session, "rename media file", true, server, &ses.organizer, + default_perm, AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Rename)).await?; let new_name = data.new_name.trim(); if new_name.is_empty() || !new_name.chars().any(|c| c.is_alphanumeric()) { - send_user_error!( - &ses.user_id, - server, - Topic::MediaFile(&v.id), - "Invalid file name (must have letters/numbers)" - ); + send_user_error!(&ses.user_id, server, Topic::MediaFile(&v.id), "Invalid file name (must have letters/numbers)"); return Ok(()); } if new_name.len() > 160 { - send_user_error!( - &ses.user_id, - server, - Topic::MediaFile(&v.id), - "Name too long (max 160)" - ); + send_user_error!(&ses.user_id, server, Topic::MediaFile(&v.id), "Name too long (max 160)"); return Ok(()); } models::MediaFile::rename(&mut server.db.conn()?, &v.id, new_name)?; let media_type_str = v.media_type.unwrap_or("file".to_string()).to_title_case(); - send_user_ok!( - &ses.user_id, - server, - Topic::MediaFile(&v.id), - format!("{} renamed.", media_type_str), - format!("New name: '{}'", new_name), - true - ); + send_user_ok!(&ses.user_id, server, Topic::MediaFile(&v.id), format!("{} renamed.", media_type_str), + format!("New name: '{}'", new_name), true); } Ok(()) } -pub async fn msg_add_comment( - data: &proto::client::client_to_server_cmd::AddComment, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { - let media_file_id = - match get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { - Some(v) => { - let default_perm = true; // anyone can comment on any media file - org_authz_with_default( - &ses.org_session, - "comment media file", - true, - server, - &ses.organizer, - default_perm, - AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Comment), - ) - .await?; - v.id - } - None => return Ok(()), - }; + +pub async fn msg_add_comment(data: &proto::client::client_to_server_cmd::AddComment, ses: &mut UserSession, server: &ServerState) -> Res<()> { + + let media_file_id = match get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { + Some(v) => { + let default_perm = true; // anyone can comment on any media file + org_authz_with_default(&ses.org_session, "comment media file", true, server, &ses.organizer, + default_perm, AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Comment)).await?; + v.id + }, + None => return Ok(()), + }; // Parse drawing data if present and write to file let mut drwn = data.drawing.clone(); if let Some(d) = &drwn { if d.starts_with("data:") { + // Convert data URI to bytes let img_uri = DataUrl::process(&d).map_err(|e| anyhow!("Invalid drawing data URI"))?; if img_uri.mime_type().type_ != "image" || img_uri.mime_type().subtype != "webp" { bail!("Invalid mimetype in drawing: {:?}", img_uri.mime_type()) } - let img_data = img_uri - .decode_to_vec() - .map_err(|e| anyhow!("Failed to decode drawing data URI: {:?}", e))?; + let img_data = img_uri.decode_to_vec().map_err(|e| anyhow!("Failed to decode drawing data URI: {:?}", e))?; // Make up a filename - fn sha256hex(data: &[u8]) -> String { + fn sha256hex( data: &[u8] ) -> String { let mut hasher = Sha256::new(); hasher.update(data); let result = hasher.finalize(); @@ -421,16 +284,11 @@ pub async fn msg_add_comment( let fname = format!("{}.webp", short_csum); // Write to file - let drawing_path = server - .media_files_dir - .join(&media_file_id) - .join("drawings") - .join(&fname); + let drawing_path = server.media_files_dir.join(&media_file_id).join("drawings").join(&fname); std::fs::create_dir_all(drawing_path.parent().unwrap()) .map_err(|e| anyhow!("Failed to create drawings dir: {:?}", e))?; - async_std::fs::write(drawing_path, img_data.0) - .await - .map_err(|e| anyhow!("Failed to write drawing file: {:?}", e))?; + async_std::fs::write(drawing_path, img_data.0).await.map_err( + |e| anyhow!("Failed to write drawing file: {:?}", e))?; // Replace data URI with filename drwn = Some(fname); @@ -446,166 +304,93 @@ pub async fn msg_add_comment( timecode: data.timecode.clone(), drawing: drwn.clone(), subtitle_id: optional_str_to_i32_or_tonic_error!(data.subtitle_id)?, - subtitle_filename_ifnull: None, + subtitle_filename_ifnull: None }; let c = models::Comment::insert(&mut server.db.conn()?, &c) .map_err(|e| anyhow!("Failed to add comment: {:?}", e))?; // Send to all clients watching this media file - ses.emit_new_comment(server, c, super::SendTo::MediaFileId(&media_file_id)) - .await?; + ses.emit_new_comment(server, c, super::SendTo::MediaFileId(&media_file_id)).await?; Ok(()) } -pub async fn msg_edit_comment( - data: &EditComment, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { + +pub async fn msg_edit_comment(data: &EditComment, ses: &mut UserSession, server: &ServerState) -> Res<()> { let id = i32::from_str(&data.comment_id)?; let conn = &mut server.db.conn()?; match models::Comment::get(conn, &id) { Ok(old) => { let default_perm = Some(&ses.user_id) == old.user_id.as_ref() || ses.is_admin; - org_authz_with_default( - &ses.org_session, - "edit comment", - true, - server, - &ses.organizer, - default_perm, - AuthzTopic::Comment(&old, authz_req::comment_op::Op::Edit), - ) - .await?; + org_authz_with_default(&ses.org_session, "edit comment", true, server, &ses.organizer, + default_perm, AuthzTopic::Comment(&old, authz_req::comment_op::Op::Edit)).await?; let vid = &old.media_file_id; models::Comment::edit(conn, id, &data.new_comment)?; server.emit_cmd( client_cmd!(DelComment, {comment_id: id.to_string()}), - super::SendTo::MediaFileId(&vid), - )?; + super::SendTo::MediaFileId(&vid))?; let c = models::Comment::get(conn, &id)?; - ses.emit_new_comment(server, c, super::SendTo::MediaFileId(&vid)) - .await?; + ses.emit_new_comment(server, c, super::SendTo::MediaFileId(&vid)).await?; } Err(DBError::NotFound()) => { - send_user_error!( - &ses.user_id, - server, - Topic::None, - "Failed to edit comment.", - "No such comment. Cannot edit.", - true - ); - } - Err(e) => { - bail!(e); + send_user_error!(&ses.user_id, server, Topic::None, "Failed to edit comment.", "No such comment. Cannot edit.", true); } + Err(e) => { bail!(e); } } Ok(()) } -pub async fn msg_del_comment( - data: &DelComment, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { + +pub async fn msg_del_comment(data: &DelComment, ses: &mut UserSession, server: &ServerState) -> Res<()> { let id = i32::from_str(&data.comment_id)?; let conn = &mut server.db.conn()?; match models::Comment::get(conn, &id) { Ok(cmt) => { let default_perm = Some(&ses.user_id) == cmt.user_id.as_ref() || ses.is_admin; - org_authz_with_default( - &ses.org_session, - "delete comment", - true, - server, - &ses.organizer, - default_perm, - AuthzTopic::Comment(&cmt, authz_req::comment_op::Op::Delete), - ) - .await?; + org_authz_with_default(&ses.org_session, "delete comment", true, server, &ses.organizer, + default_perm, AuthzTopic::Comment(&cmt, authz_req::comment_op::Op::Delete)).await?; let vid = cmt.media_file_id; if Some(&ses.user_id) != cmt.user_id.as_ref() && !ses.is_admin { - send_user_error!( - &ses.user_id, - server, - Topic::MediaFile(&vid), - "Failed to delete comment.", - "You can only delete your own comments", - true - ); + send_user_error!(&ses.user_id, server, Topic::MediaFile(&vid), "Failed to delete comment.", "You can only delete your own comments", true); return Ok(()); } let all_comm = models::Comment::get_by_media_file(conn, &vid, DBPaging::default())?; - if all_comm - .iter() - .any(|c| c.parent_id.map(|i| i.to_string()) == Some(id.to_string())) - { - send_user_error!( - &ses.user_id, - server, - Topic::MediaFile(&vid), - "Failed to delete comment.", - "Comment has replies. Cannot delete.", - true - ); + if all_comm.iter().any(|c| c.parent_id.map(|i| i.to_string()) == Some(id.to_string())) { + send_user_error!(&ses.user_id, server, Topic::MediaFile(&vid), "Failed to delete comment.", "Comment has replies. Cannot delete.", true); return Ok(()); } models::Comment::delete(conn, &id)?; server.emit_cmd( client_cmd!(DelComment, {comment_id: id.to_string()}), - super::SendTo::MediaFileId(&vid), - )?; + super::SendTo::MediaFileId(&vid))?; } Err(DBError::NotFound()) => { - send_user_error!( - &ses.user_id, - server, - Topic::None, - "Failed to delete comment.", - "No such comment. Cannot delete.", - true - ); - } - Err(e) => { - bail!(e); + send_user_error!(&ses.user_id, server, Topic::None, "Failed to delete comment.", "No such comment. Cannot delete.", true); } + Err(e) => { bail!(e); } } Ok(()) } -pub async fn msg_add_subtitle( - data: &AddSubtitle, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { - let mf = - match get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { - Some(v) => { - let default_perm = ses.user_id == (&v).user_id || ses.is_admin; - org_authz_with_default( - &ses.org_session, - "add subtitle", - true, - server, - &ses.organizer, - default_perm, - AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Edit), - ) - .await?; - v - } - None => return Ok(()), - }; + +pub async fn msg_add_subtitle(data: &AddSubtitle, ses: &mut UserSession, server: &ServerState) -> Res<()> { + let mf = match get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { + Some(v) => { + let default_perm = ses.user_id == (&v).user_id || ses.is_admin; + org_authz_with_default(&ses.org_session, "add subtitle", true, server, &ses.organizer, + default_perm, AuthzTopic::MediaFile(&v, authz_req::media_file_op::Op::Edit)).await?; + v + }, + None => return Ok(()), + }; let language_code = { // Guess language from filename (e.g. "en" from "video.en.srt") let lang = data.file_name.split('.').rev().nth(1).unwrap_or_default(); - if (lang.len() == 2 || lang.len() == 3) && lang.chars().all(|c| c.is_ascii_lowercase()) { + if (lang.len()==2 || lang.len()==3) && lang.chars().all(|c| c.is_ascii_lowercase()) { lang.to_string() } else { "en".to_string() @@ -613,9 +398,7 @@ pub async fn msg_add_subtitle( }; let media_dir = server.media_files_dir.join(&mf.id); - if !media_dir.exists() { - bail!("Media file dir not found: {:?}", media_dir); - } + if !media_dir.exists() { bail!("Media file dir not found: {:?}", media_dir); } let subs_dir = media_dir.join("subs"); let orig_subs_dir = subs_dir.join("orig"); @@ -623,75 +406,41 @@ pub async fn msg_add_subtitle( bail!("Failed to create orig subs dir"); } - let orig_fn_clean: PathBuf = Path::new(&data.file_name) - .file_name() - .context("Bad filename")? - .into(); + let orig_fn_clean: PathBuf = Path::new(&data.file_name).file_name().context("Bad filename")?.into(); let orig_sub_file = orig_subs_dir.join(&orig_fn_clean); tracing::debug!("Writing orig subtitle file to: {:?}", orig_sub_file); if orig_sub_file.exists() { - send_user_error!( - &ses.user_id, - server, - Topic::MediaFile(&mf.id), - "Failed to add subtitle.", - format!("Subtitle file already exists: '{:?}'", &orig_fn_clean), - true - ); + send_user_error!(&ses.user_id, server, Topic::MediaFile(&mf.id), "Failed to add subtitle.", format!("Subtitle file already exists: '{:?}'", &orig_fn_clean), true); return Ok(()); } let file_contents = { use base64::{engine::general_purpose::STANDARD, Engine as _}; - STANDARD - .decode(&data.contents_base64) - .context("Failed to base64 decode subtitle file")? + STANDARD.decode(&data.contents_base64).context("Failed to base64 decode subtitle file")? }; - tokio::fs::write(&orig_sub_file, file_contents) - .await - .context("Failed to write orig subtitle file")?; + tokio::fs::write(&orig_sub_file, file_contents).await.context("Failed to write orig subtitle file")?; server.storage.upload_if_exists(&orig_sub_file); // Convert to WebVTT if needed - let playback_filename = { + let playback_filename ={ use aspasia::{AssSubtitle, SubRipSubtitle, Subtitle, TimedSubtitleFile, WebVttSubtitle}; - let vtt_path = subs_dir.join( - &orig_fn_clean - .with_extension("vtt") - .file_name() - .context("Bad filename")?, - ); + let vtt_path = subs_dir.join(&orig_fn_clean.with_extension("vtt").file_name().context("Bad filename")?); if vtt_path.exists() { - send_user_error!( - &ses.user_id, - server, - Topic::MediaFile(&mf.id), - "Failed to add subtitle.", - format!( - "WebVTT file already exists: '{:?}'", - &vtt_path.file_name().context("Bad filename")? - ), - true - ); + send_user_error!(&ses.user_id, server, Topic::MediaFile(&mf.id), "Failed to add subtitle.", format!("WebVTT file already exists: '{:?}'", &vtt_path.file_name().context("Bad filename")?), true); return Ok(()); } match TimedSubtitleFile::new(&orig_sub_file) { Ok(TimedSubtitleFile::WebVtt(sub)) => { - tracing::debug!( - "Subtitle file is already WebVTT, not converting: {:?}", - &orig_sub_file - ); + tracing::debug!("Subtitle file is already WebVTT, not converting: {:?}", &orig_sub_file); None - } + }, Ok(sub) => { tracing::debug!("Converting subtitle file to WebVTT: {:?}", &orig_sub_file); - WebVttSubtitle::from(sub) - .export(&vtt_path) - .context("Failed to convert to WebVTT")?; + WebVttSubtitle::from(sub).export(&vtt_path).context("Failed to convert to WebVTT")?; // Workaround for: https://github.com/ylysyym/aspasia/issues/1 fn temp_workaround_aspasia_webvtt_bug(vtt_file: &Path) -> std::io::Result<()> { @@ -702,9 +451,7 @@ pub async fn msg_add_subtitle( let mut lines: Vec = Vec::new(); for line in reader.lines() { let mut line = line?; - if line.contains("-->") { - line = line.replace(",", "."); - } + if line.contains("-->") { line = line.replace(",", "."); } lines.push(line); } fs::write(vtt_file, lines.join("\n")) @@ -712,32 +459,21 @@ pub async fn msg_add_subtitle( temp_workaround_aspasia_webvtt_bug(&vtt_path)?; server.storage.upload_if_exists(&vtt_path); - Some( - vtt_path - .file_name() - .context("Bad filename")? - .to_str() - .context("Bad filename")? - .to_string(), - ) - } + Some(vtt_path.file_name().context("Bad filename")?.to_str().context("Bad filename")?.to_string()) + }, Err(e) => return Err(anyhow!("Failed to parse subtitle file: {:?}", e)), } }; let conn = &mut server.db.conn()?; - let new_sub = models::Subtitle::insert( - conn, - &models::SubtitleInsert { - media_file_id: mf.id.clone(), - orig_filename: orig_fn_clean.to_string_lossy().into(), - title: orig_fn_clean.to_string_lossy().into(), - language_code, - filename: playback_filename, - time_offset: 0.0, - }, - ) - .map_err(|e| anyhow!("Failed to add subtitle: {:?}", e))?; + let new_sub = models::Subtitle::insert(conn, &models::SubtitleInsert { + media_file_id: mf.id.clone(), + orig_filename: orig_fn_clean.to_string_lossy().into(), + title: orig_fn_clean.to_string_lossy().into(), + language_code, + filename: playback_filename, + time_offset: 0.0, + }) .map_err(|e| anyhow!("Failed to add subtitle: {:?}", e))?; let all_subs = models::Subtitle::get_by_media_file(conn, &mf.id, DBPaging::default())?; if all_subs.len() == 1 { @@ -749,46 +485,28 @@ pub async fn msg_add_subtitle( Ok(()) } -pub async fn msg_edit_subtitle_info( - data: &EditSubtitleInfo, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { + +pub async fn msg_edit_subtitle_info(data: &EditSubtitleInfo, ses: &mut UserSession, server: &ServerState) -> Res<()> { let id = str_to_i32_or_tonic_error!(data.id)?; let conn = &mut server.db.conn()?; - let mut sub = - models::Subtitle::get(conn, &id).map_err(|e| anyhow!("Failed to get subtitle: {:?}", e))?; - let mf = models::MediaFile::get(conn, &sub.media_file_id) - .map_err(|e| anyhow!("Failed to get media file: {:?}", e))?; + let mut sub = models::Subtitle::get(conn, &id).map_err(|e| anyhow!("Failed to get subtitle: {:?}", e))?; + let mf = models::MediaFile::get(conn, &sub.media_file_id).map_err(|e| anyhow!("Failed to get media file: {:?}", e))?; let default_perm = ses.user_id == mf.user_id || ses.is_admin; - org_authz_with_default( - &ses.org_session, - "edit subtitle", - true, - server, - &ses.organizer, - default_perm, - AuthzTopic::MediaFile(&mf, authz_req::media_file_op::Op::Edit), - ) - .await?; + org_authz_with_default(&ses.org_session, "edit subtitle", true, server, &ses.organizer, + default_perm, AuthzTopic::MediaFile(&mf, authz_req::media_file_op::Op::Edit)).await?; // Update subtitle in DB sub.title = data.title.clone().unwrap_or(sub.title.clone()); - sub.language_code = data - .language_code - .clone() - .unwrap_or(sub.language_code.clone()); + sub.language_code = data.language_code.clone().unwrap_or(sub.language_code.clone()); sub.time_offset = data.time_offset.clone().unwrap_or(sub.time_offset); - models::Subtitle::update_many(conn, &[sub]) - .map_err(|e| anyhow!("Failed to update subtitle: {:?}", e))?; + models::Subtitle::update_many(conn, &[sub]) .map_err(|e| anyhow!("Failed to update subtitle: {:?}", e))?; // Set/unset default subtitle for media file if requested if let Some(is_default) = data.is_default { let new_val = if is_default { Some(id) } else { None }; - if is_default || mf.default_subtitle_id == Some(id) { - // only set null if this subtitle was previously the default + if is_default || mf.default_subtitle_id == Some(id) { // only set null if this subtitle was previously the default models::MediaFile::set_default_subtitle(conn, &mf.id, new_val) .map_err(|e| anyhow!("Failed to set default subtitle: {:?}", e))?; } @@ -798,105 +516,60 @@ pub async fn msg_edit_subtitle_info( Ok(()) } -pub async fn msg_del_subtitle( - data: &DelSubtitle, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { +pub async fn msg_del_subtitle(data: &DelSubtitle, ses: &mut UserSession, server: &ServerState) -> Res<()> { let id = str_to_i32_or_tonic_error!(data.id)?; let conn = &mut server.db.conn()?; - let sub = - models::Subtitle::get(conn, &id).map_err(|e| anyhow!("Failed to get subtitle: {:?}", e))?; - let mf = models::MediaFile::get(conn, &sub.media_file_id) - .map_err(|e| anyhow!("Failed to get media file: {:?}", e))?; + let sub = models::Subtitle::get(conn, &id).map_err(|e| anyhow!("Failed to get subtitle: {:?}", e))?; + let mf = models::MediaFile::get(conn, &sub.media_file_id).map_err(|e| anyhow!("Failed to get media file: {:?}", e))?; let default_perm = ses.user_id == mf.user_id || ses.is_admin; - org_authz_with_default( - &ses.org_session, - "delete subtitle", - true, - server, - &ses.organizer, - default_perm, - AuthzTopic::MediaFile(&mf, authz_req::media_file_op::Op::Edit), - ) - .await?; + org_authz_with_default(&ses.org_session, "delete subtitle", true, server, &ses.organizer, + default_perm, AuthzTopic::MediaFile(&mf, authz_req::media_file_op::Op::Edit)).await?; let subs_dir = server.media_files_dir.join(&mf.id).join("subs"); tracing::debug!(orig_file=?sub.orig_filename, vtt_file=?sub.filename, "Deleting subtitle files"); let orig_path = subs_dir.join("orig").join(&sub.orig_filename); - if orig_path.exists() { - std::fs::remove_file(&orig_path).context("Failed to delete orig subtitle file")?; - } + if orig_path.exists() { std::fs::remove_file(&orig_path).context("Failed to delete orig subtitle file")?; } if let Some(vtt) = sub.filename { let vtt_path = subs_dir.join(&vtt); - if vtt_path.exists() { - std::fs::remove_file(&vtt_path).context("Failed to delete vtt subtitle file")?; - } + if vtt_path.exists() { std::fs::remove_file(&vtt_path).context("Failed to delete vtt subtitle file")?; } } - models::Subtitle::delete(conn, &id) - .map_err(|e| anyhow!("Failed to delete subtitle: {:?}", e))?; + models::Subtitle::delete(conn, &id).map_err(|e| anyhow!("Failed to delete subtitle: {:?}", e))?; send_open_media_file_cmd(server, &ses.sid, &mf.id).await?; Ok(()) } -pub async fn msg_list_my_messages( - data: &proto::client::client_to_server_cmd::ListMyMessages, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { +pub async fn msg_list_my_messages(data: &proto::client::client_to_server_cmd::ListMyMessages, ses: &mut UserSession, server: &ServerState) -> Res<()> { let conn = &mut server.db.conn()?; let msgs = models::Message::get_by_user(conn, &ses.user_id, DBPaging::default())?; server.emit_cmd( client_cmd!(ShowMessages, { msgs: (&msgs).into_iter().map(|m| m.to_proto3()).collect() }), - super::SendTo::UserSession(&ses.sid), + super::SendTo::UserSession(&ses.sid) )?; for m in msgs { - if !m.seen { - models::Message::set_seen(conn, m.id, true)?; - } + if !m.seen { models::Message::set_seen(conn, m.id, true)?; } } Ok(()) } -pub async fn msg_join_collab( - data: &JoinCollab, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { + +pub async fn msg_join_collab(data: &JoinCollab, ses: &mut UserSession, server: &ServerState) -> Res<()> { if let Some(collab_id) = ses.cur_collab_id.clone() { if server.sender_is_collab_participant(collab_id.as_str(), &ses.sender) { - tracing::debug!( - "{} is already in collab {}. Ignoring double join.", - ses.user_name, - collab_id - ); + tracing::debug!("{} is already in collab {}. Ignoring double join.", ses.user_name, collab_id); return Ok(()); } } ses.collab_session_guard = None; ses.cur_collab_id = None; - if let Some(v) = - get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? - { - org_authz_with_default( - &ses.org_session, - "join collab", - true, - server, - &ses.organizer, - true, - AuthzTopic::Other( - Some(&data.collab_id), - authz_req::other_op::Op::JoinCollabSession, - ), - ) - .await?; + if let Some(v) = get_media_file_or_send_error(Some(&data.media_file_id), &Some(ses), server).await? { + org_authz_with_default(&ses.org_session, "join collab", true, server, &ses.organizer, + true, AuthzTopic::Other(Some(&data.collab_id), authz_req::other_op::Op::JoinCollabSession)).await?; match server.link_session_to_collab(&data.collab_id, &v.id, ses.sender.clone()) { Ok(csg) => { @@ -910,27 +583,19 @@ pub async fn msg_join_collab( ..Default::default() }] }), - super::SendTo::Collab(&data.collab_id), + super::SendTo::Collab(&data.collab_id) )?; } Err(e) => { - send_user_error!( - &ses.user_id, - server, - Topic::MediaFile(&v.id), - format!("Failed to join collab session: {}", e) - ); + send_user_error!(&ses.user_id, server, Topic::MediaFile(&v.id), format!("Failed to join collab session: {}", e)); } } } Ok(()) } -pub async fn msg_leave_collab( - data: &LeaveCollab, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { + +pub async fn msg_leave_collab(data: &LeaveCollab, ses: &mut UserSession, server: &ServerState) -> Res<()> { if let Some(collab_id) = &ses.cur_collab_id { server.emit_cmd( client_cmd!(ShowMessages, { msgs: vec![ @@ -940,7 +605,7 @@ pub async fn msg_leave_collab( ..Default::default() }] }), - super::SendTo::Collab(&collab_id), + super::SendTo::Collab(&collab_id) )?; ses.collab_session_guard = None; ses.cur_collab_id = None; @@ -948,11 +613,8 @@ pub async fn msg_leave_collab( Ok(()) } -pub async fn msg_collab_report( - data: &CollabReport, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { + +pub async fn msg_collab_report(data: &CollabReport, ses: &mut UserSession, server: &ServerState) -> Res<()> { if let Some(collab_id) = &ses.cur_collab_id { let ce = client_cmd!(CollabEvent, { paused: data.paused, @@ -962,25 +624,15 @@ pub async fn msg_collab_report( drawing: data.drawing.clone(), subtitle_id: data.subtitle_id.clone(), }); - server - .emit_cmd(ce, super::SendTo::Collab(collab_id)) - .map(|_| ()) + server.emit_cmd(ce, super::SendTo::Collab(collab_id)).map(|_| ()) } else { - send_user_error!( - &ses.user_id, - server, - Topic::None, - "Report rejected: no active collab session." - ); + send_user_error!(&ses.user_id, server, Topic::None, "Report rejected: no active collab session."); return Ok(()); } } -pub async fn msg_move_to_folder( - data: &proto::client::client_to_server_cmd::MoveToFolder, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { + +pub async fn msg_move_to_folder(data: &proto::client::client_to_server_cmd::MoveToFolder, ses: &mut UserSession, server: &ServerState) -> Res<()> { if let Some(org) = &ses.organizer { let req = proto::org::MoveToFolderRequest { ses: Some(ses.org_session.clone()), @@ -992,25 +644,17 @@ pub async fn msg_move_to_folder( if e.code() == tonic::Code::Unimplemented { tracing::debug!("Organizer doesn't implement move_to_folder(). Ignoring."); } else if e.code() == tonic::Code::Aborted { - tracing::debug!( - "Ignoring org.move_to_folder() result because it GrpcStatus.ABORTED." - ); + tracing::debug!("Ignoring org.move_to_folder() result because it GrpcStatus.ABORTED."); } else { tracing::error!(err=?e, "Error in organizer move_to_folder() call"); anyhow::bail!("Organizer error: {:?}", e); } } - } else { - send_user_error!(&ses.user_id, server, Topic::None, "No organizer session."); - } + } else { send_user_error!(&ses.user_id, server, Topic::None, "No organizer session."); } Ok(()) } -pub async fn msg_reorder_items( - data: &ReorderItems, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { +pub async fn msg_reorder_items(data: &ReorderItems, ses: &mut UserSession, server: &ServerState) -> Res<()> { if let Some(org) = &ses.organizer { let req = proto::org::ReorderItemsRequest { ses: Some(ses.org_session.clone()), @@ -1021,50 +665,41 @@ pub async fn msg_reorder_items( if e.code() == tonic::Code::Unimplemented { tracing::debug!("Organizer doesn't implement reorder_items(). Ignoring."); } else if e.code() == tonic::Code::Aborted { - tracing::debug!( - "Ignoring org.reorder_items() result because it GrpcStatus.ABORTED." - ); + tracing::debug!("Ignoring org.reorder_items() result because it GrpcStatus.ABORTED."); } else { tracing::error!(err=?e, "Error in organizer reorder_items() call"); anyhow::bail!("Organizer error: {:?}", e); } } - } else { - send_user_error!(&ses.user_id, server, Topic::None, "No organizer session."); - } + } else { send_user_error!(&ses.user_id, server, Topic::None, "No organizer session."); } Ok(()) } -pub async fn msg_organizer_cmd( - data: &proto::client::client_to_server_cmd::OrganizerCmd, - ses: &mut UserSession, - server: &ServerState, -) -> Res<()> { + +pub async fn msg_organizer_cmd(data: &proto::client::client_to_server_cmd::OrganizerCmd, ses: &mut UserSession, server: &ServerState) -> Res<()> { if let Some(org) = &ses.organizer { let req = proto::org::CmdFromClientRequest { ses: Some(ses.org_session.clone()), cmd: data.cmd.clone(), - args: data.args.clone(), + args: data.args.clone() }; match org.lock().await.cmd_from_client(req).await { Err(e) => { if e.code() == tonic::Code::Aborted { - tracing::debug!( - "Ignoring org.cmd_from_client() result because it GrpcStatus.ABORTED." - ); + tracing::debug!("Ignoring org.cmd_from_client() result because it GrpcStatus.ABORTED."); } else { tracing::error!(err=?e, "Error in organizer cmd_from_client() call"); anyhow::bail!("Organizer error: {:?}", e); } - } - Ok(res) => { - return Ok(()); - } + }, + Ok(res) => { return Ok(()); } } } Ok(()) } + + #[derive(thiserror::Error, Debug)] pub enum SessionClose { #[error("User logout")] @@ -1073,20 +708,11 @@ pub enum SessionClose { /// Dispatch a message from client to appropriate handler. /// Return true if the session should be kept open, or false if it should be closed. -pub async fn msg_dispatch( - req: &ClientToServerCmd, - ses: &mut UserSession, - server: &ServerState, -) -> Res { +pub async fn msg_dispatch(req: &ClientToServerCmd, ses: &mut UserSession, server: &ServerState) -> Res { use proto::client::client_to_server_cmd::Cmd; let res = match req.cmd.as_ref() { None => { - send_user_error!( - &ses.user_id, - server, - Topic::None, - format!("Missing command from client: {:?}", req) - ); + send_user_error!(&ses.user_id, server, Topic::None, format!("Missing command from client: {:?}", req)); Ok(()) } Some(cmd) => match cmd { @@ -1110,30 +736,17 @@ pub async fn msg_dispatch( Cmd::Logout(_) => { tracing::info!("logout from client: user={}", ses.user_id); return Err(SessionClose::Logout.into()); - } + }, }, }; if let Err(e) = res { // Ignore authz errors, they are already logged if let None = e.downcast_ref::() { - let cmd_str = req - .cmd - .as_ref() - .map(|c| format!("{:?}", c)) - .unwrap_or_default(); + let cmd_str = req.cmd.as_ref().map(|c| format!("{:?}", c)).unwrap_or_default(); tracing::warn!("[{}] '{cmd_str}' failed: {}", ses.sid, e); // Assume name is regex '^[a-zA-Z0-9_]+' of cmd_str - let cmd_name = regex::Regex::new(r"^[a-zA-Z0-9_]+") - .unwrap() - .find(&cmd_str) - .map(|m| m.as_str()) - .unwrap_or(cmd_str.as_str()); - send_user_error!( - &ses.user_id, - server, - Topic::None, - format!("Cmd '{cmd_name}' failed: {e}") - ); + let cmd_name = regex::Regex::new(r"^[a-zA-Z0-9_]+").unwrap().find(&cmd_str).map(|m| m.as_str()).unwrap_or(cmd_str.as_str()); + send_user_error!(&ses.user_id, server, Topic::None, format!("Cmd '{cmd_name}' failed: {e}")); } } Ok(true) diff --git a/server/src/database/basic_query.rs b/server/src/database/basic_query.rs index 8f23875a..c9b4f039 100644 --- a/server/src/database/basic_query.rs +++ b/server/src/database/basic_query.rs @@ -1,37 +1,32 @@ #[macro_export] macro_rules! implement_basic_query_traits { ($model:ty, $insert_model:ty, $table:ident, $pk_type:ty, $order_by:expr) => { + impl DbBasicQuery<$pk_type, $insert_model> for $model { + /// Insert a new object into the database. fn insert(conn: &mut PooledConnection, item: &$insert_model) -> DBResult { use schema::$table::dsl::*; - to_db_res(retry_if_db_locked!(diesel::insert_into($table) - .values(item) - .get_result(conn))) + to_db_res(retry_if_db_locked!(diesel::insert_into($table).values(item).get_result(conn))) } /// Insert multiple objects into the database. - fn insert_many( - conn: &mut PooledConnection, - items: &[$insert_model], - ) -> DBResult> { + fn insert_many(conn: &mut PooledConnection, items: &[$insert_model]) -> DBResult> { items.iter().map(|i| Self::insert(conn, i)).collect() } /// Get a single object by its primary key. - fn get(conn: &mut PooledConnection, pk: &$pk_type) -> DBResult { + fn get(conn: &mut PooledConnection, pk: &$pk_type) -> DBResult + { use schema::$table::dsl::*; - to_db_res(retry_if_db_locked!({ - $table.filter(id.eq(pk)).first::<$model>(conn) - })) + to_db_res(retry_if_db_locked!({ $table.filter(id.eq(pk)).first::<$model>(conn) })) } /// Get multiple objects by their primary keys. - fn get_many(conn: &mut PooledConnection, ids: &[$pk_type]) -> DBResult> { + fn get_many(conn: &mut PooledConnection, ids: &[$pk_type]) -> DBResult> + { use schema::$table::dsl::*; - to_db_res(retry_if_db_locked!({ - $table.filter(id.eq_any(ids)).load::<$model>(conn) - })) + to_db_res(retry_if_db_locked!({ $table.filter(id.eq_any(ids)).load::<$model>(conn) })) } /// Get all nodes of type Self, with no filtering, paginated. @@ -48,57 +43,52 @@ macro_rules! implement_basic_query_traits { } /// Delete a single object from the database. - fn delete(conn: &mut PooledConnection, pk: &$pk_type) -> DBResult { + fn delete(conn: &mut PooledConnection, pk: &$pk_type) -> DBResult + { use schema::$table::dsl::*; to_db_res(retry_if_db_locked!({ - diesel::delete($table.filter(id.eq(pk))) - .execute(conn) - .map(|n_rows| n_rows > 0) + diesel::delete($table.filter(id.eq(pk))).execute(conn).map(|n_rows| n_rows>0) })) } /// Delete multiple objects from the database. /// Returns the number of objects deleted. - fn delete_many(conn: &mut PooledConnection, ids: &[$pk_type]) -> DBResult { + fn delete_many(conn: &mut PooledConnection, ids: &[$pk_type]) -> DBResult + { use schema::$table::dsl::*; to_db_res(retry_if_db_locked!({ diesel::delete($table.filter(id.eq_any(ids))).execute(conn) })) } } - }; + } } #[macro_export] macro_rules! implement_update_traits { ($model:ty, $table:ident, $pk_type:ty) => { + impl DbUpdate<$pk_type> for $model { /// Update objects, replaces the entire object except for the primary key. fn update_many(conn: &mut PooledConnection, items: &[Self]) -> DBResult> { use schema::$table::dsl::*; let mut res: Vec = Vec::with_capacity(items.len()); for it in items { - res.push(retry_if_db_locked!(diesel::update( - $table.filter(id.eq(&it.id)) - ) - .set(it) - .get_result(conn))?); + res.push(retry_if_db_locked!(diesel::update($table.filter(id.eq(&it.id))).set(it).get_result(conn))?); } Ok(res) } } - }; + } } #[macro_export] macro_rules! implement_query_by_user_traits { ($model:ty, $table:ident, $user_field:ident, $order_by:expr) => { + impl DbQueryByUser for $model { - fn get_by_user( - conn: &mut PooledConnection, - uid: &str, - pg: DBPaging, - ) -> DBResult> { + + fn get_by_user(conn: &mut PooledConnection, uid: &str, pg: DBPaging) -> DBResult> { use schema::$table::dsl::*; to_db_res(retry_if_db_locked!({ $table @@ -111,18 +101,16 @@ macro_rules! implement_query_by_user_traits { })) } } - }; + } } #[macro_export] macro_rules! implement_query_by_media_file_traits { ($model:ty, $table:ident, $media_col:ident, $order_by:expr) => { + impl DbQueryByMediaFile for $model { - fn get_by_media_file( - conn: &mut PooledConnection, - vid: &str, - pg: DBPaging, - ) -> DBResult> { + + fn get_by_media_file(conn: &mut PooledConnection, vid: &str, pg: DBPaging) -> DBResult> { use schema::$table::dsl::*; to_db_res(retry_if_db_locked!({ $table @@ -135,5 +123,5 @@ macro_rules! implement_query_by_media_file_traits { })) } } - }; + } } diff --git a/server/src/database/custom_ops.rs b/server/src/database/custom_ops.rs index 3a4fc230..25147173 100644 --- a/server/src/database/custom_ops.rs +++ b/server/src/database/custom_ops.rs @@ -1,17 +1,15 @@ -use crate::{ - database::{models, schema, to_db_res, DBResult, EmptyDBResult}, - retry_if_db_locked, -}; use anyhow::Context; -use chrono::offset::Local; use diesel::prelude::*; +use chrono::offset::Local; +use crate::{database::{models, schema, to_db_res, DBResult, EmptyDBResult}, retry_if_db_locked}; use super::{error::DBError, DbBasicQuery, PooledConnection}; // ------------------- Model-specific custom operations ------------------- impl models::User { - pub fn set_name(conn: &mut PooledConnection, uid: &str, new_name: &str) -> EmptyDBResult { + pub fn set_name(conn: &mut PooledConnection, uid: &str, new_name: &str) -> EmptyDBResult + { use schema::users::dsl::*; retry_if_db_locked!({ diesel::update(users.filter(id.eq(uid))) @@ -27,20 +25,16 @@ impl models::User { /// * `conn` - Database connection /// * `user_id` - ID of the user /// * `username` - Name of the user, if you want to update it. If None, and user is being created, the name will be set to the user_id. - pub fn get_or_create( - conn: &mut PooledConnection, - user_id: &str, - username: Option<&str>, - ) -> DBResult { + pub fn get_or_create(conn: &mut PooledConnection, user_id: &str, username: Option<&str>) -> DBResult + { match models::User::get(conn, &user_id.to_string()) { Ok(u) => { // Update name if needed if let Some(username) = username { - models::User::set_name(conn, &u.id, &username) - .context("Failed to update user name")?; + models::User::set_name(conn, &u.id, &username).context("Failed to update user name")?; } models::User::get(conn, &u.id) - } + }, Err(DBError::NotFound()) => { // User not found, create a new user let new_user = models::UserInsert { @@ -48,19 +42,22 @@ impl models::User { name: username.unwrap_or(user_id).to_string(), }; models::User::insert(conn, &new_user) - } - Err(e) => Err(e), + }, + Err(e) => { Err(e) } } } } + impl models::MediaFile { + /// Set the recompressed flag for a media file. /// /// # Arguments /// * `db` - Database /// * `vid` - Id of the media file - pub fn set_recompressed(conn: &mut PooledConnection, vid: &str) -> EmptyDBResult { + pub fn set_recompressed(conn: &mut PooledConnection, vid: &str) -> EmptyDBResult + { use schema::media_files::dsl::*; retry_if_db_locked!({ diesel::update(media_files.filter(id.eq(vid))) @@ -76,11 +73,8 @@ impl models::MediaFile { /// * `db` - Database /// * `vid` - Id of the media file /// * `sid` - Id of the subtitle - pub fn set_default_subtitle( - conn: &mut PooledConnection, - vid: &str, - sid: Option, - ) -> EmptyDBResult { + pub fn set_default_subtitle(conn: &mut PooledConnection, vid: &str, sid: Option) -> EmptyDBResult + { use schema::media_files::dsl::*; retry_if_db_locked!({ diesel::update(media_files.filter(id.eq(vid))) @@ -97,19 +91,12 @@ impl models::MediaFile { /// * `vid` - Id of the media file /// * `cols` - Width of the thumbnail sheet /// * `rows` - Height of the thumbnail sheet - pub fn set_thumb_sheet_dimensions( - conn: &mut PooledConnection, - vid: &str, - cols: u32, - rows: u32, - ) -> EmptyDBResult { + pub fn set_thumb_sheet_dimensions(conn: &mut PooledConnection, vid: &str, cols: u32, rows: u32) -> EmptyDBResult + { use schema::media_files::dsl::*; retry_if_db_locked!({ diesel::update(media_files.filter(id.eq(vid))) - .set(( - thumb_sheet_cols.eq(cols as i32), - thumb_sheet_rows.eq(rows as i32), - )) + .set((thumb_sheet_cols.eq(cols as i32), thumb_sheet_rows.eq(rows as i32))) .execute(conn) })?; Ok(()) @@ -121,7 +108,8 @@ impl models::MediaFile { /// * `db` - Database /// * `vid` - Id of the media file /// * `new_value` - New value of the flag - pub fn set_has_thumb(conn: &mut PooledConnection, vid: &str, new_value: bool) -> EmptyDBResult { + pub fn set_has_thumb(conn: &mut PooledConnection, vid: &str, new_value: bool) -> EmptyDBResult + { use schema::media_files::dsl::*; retry_if_db_locked!({ diesel::update(media_files.filter(id.eq(vid))) @@ -137,7 +125,8 @@ impl models::MediaFile { /// # Arguments /// * `db` - Database /// * `vid` - Id of the media file - pub fn set_thumbs_done(conn: &mut PooledConnection, vid: &str) -> EmptyDBResult { + pub fn set_thumbs_done(conn: &mut PooledConnection, vid: &str) -> EmptyDBResult + { use schema::media_files::dsl::*; retry_if_db_locked!({ diesel::update(media_files.filter(id.eq(vid))) @@ -147,6 +136,7 @@ impl models::MediaFile { Ok(()) } + /// Rename a media file (title). /// /// # Arguments @@ -158,7 +148,8 @@ impl models::MediaFile { /// * `EmptyResult` /// * `Err(NotFound)` - MediaFile not found /// * `Err(Other)` - Other error - pub fn rename(conn: &mut PooledConnection, vid: &str, new_name: &str) -> EmptyDBResult { + pub fn rename(conn: &mut PooledConnection, vid: &str, new_name: &str) -> EmptyDBResult + { use schema::media_files::dsl::*; retry_if_db_locked!({ diesel::update(media_files.filter(id.eq(vid))) @@ -172,21 +163,17 @@ impl models::MediaFile { /// /// # Returns /// * `Vec` - List of MediaFile objects - pub fn get_all_with_missing_thumbnails( - conn: &mut PooledConnection, - ) -> DBResult> { + pub fn get_all_with_missing_thumbnails(conn: &mut PooledConnection) -> DBResult> + { use models::*; use schema::media_files::dsl::*; - to_db_res(retry_if_db_locked!({ - media_files - .filter(thumbs_done.is_null()) - .order_by(added_time.desc()) - .load::(conn) - })) + to_db_res(retry_if_db_locked!({ media_files.filter(thumbs_done.is_null()).order_by(added_time.desc()).load::(conn) })) } } + impl models::Comment { + /// Edit a comment (change text). /// /// # Arguments @@ -195,18 +182,19 @@ impl models::Comment { /// /// # Returns /// * `Res` - True if comment was edited, false if it was not found - pub fn edit(conn: &mut PooledConnection, comment_id: i32, new_comment: &str) -> DBResult { + pub fn edit(conn: &mut PooledConnection, comment_id: i32, new_comment: &str) -> DBResult + { use schema::comments::dsl::*; to_db_res(retry_if_db_locked!({ diesel::update(comments.filter(id.eq(comment_id))) - .set((comment.eq(new_comment), edited.eq(diesel::dsl::now))) - .execute(conn) - .map(|x| x > 0) + .set((comment.eq(new_comment), edited.eq(diesel::dsl::now))).execute(conn).map(|x| x > 0) })) } } + impl models::Message { + /// Set the seen status of a message. /// /// # Arguments @@ -216,13 +204,12 @@ impl models::Message { /// /// # Returns /// * `Res` - True if message was found and updated, false if it was not found - pub fn set_seen(conn: &mut PooledConnection, msg_id: i32, new_status: bool) -> DBResult { + pub fn set_seen(conn: &mut PooledConnection, msg_id: i32, new_status: bool) -> DBResult + { use schema::messages::dsl::*; to_db_res(retry_if_db_locked!({ diesel::update(messages.filter(id.eq(msg_id))) - .set(seen.eq(new_status)) - .execute(conn) - .map(|x| x > 0) + .set(seen.eq(new_status)).execute(conn).map(|x| x > 0) })) } @@ -234,13 +221,11 @@ impl models::Message { /// /// # Returns /// * `Res>` - List of messages - pub fn get_by_comment(conn: &mut PooledConnection, cid: i32) -> DBResult> { + pub fn get_by_comment(conn: &mut PooledConnection, cid: i32) -> DBResult> + { use schema::messages::dsl::*; to_db_res(retry_if_db_locked!({ - messages - .filter(comment_id.eq(cid)) - .order(created.desc()) - .load::(conn) + messages.filter(comment_id.eq(cid)).order(created.desc()).load::(conn) })) } } diff --git a/server/src/database/db_backup.rs b/server/src/database/db_backup.rs index d237f09f..0c326578 100644 --- a/server/src/database/db_backup.rs +++ b/server/src/database/db_backup.rs @@ -1,17 +1,17 @@ use std::{fs::File, path::PathBuf}; -use anyhow::bail; use anyhow::Context; use flate2::{write::GzEncoder, Compression}; +use anyhow::bail; + /// Backup the SQLite database to a tar.gz file. /// This is done before migrations. -pub fn backup_sqlite_database(db_file: std::path::PathBuf) -> anyhow::Result> { +pub fn backup_sqlite_database( db_file: std::path::PathBuf ) -> anyhow::Result> { if db_file.exists() { // Make a tar.gz backup let now = chrono::Local::now(); - let backup_path = - db_file.with_extension(format!("backup-{}.tar.gz", now.format("%Y-%m-%dT%H_%M_%S"))); + let backup_path = db_file.with_extension(format!("backup-{}.tar.gz", now.format("%Y-%m-%dT%H_%M_%S"))); tracing::info!(file=%db_file.display(), backup=%backup_path.display(), "Backing up database before migration."); let backup_file = File::create(&backup_path).context("Error creating DB backup file")?; @@ -21,38 +21,25 @@ pub fn backup_sqlite_database(db_file: std::path::PathBuf) -> anyhow::Result anyhow::Result<()> { + +pub fn restore_sqlite_database( db_file: std::path::PathBuf, backup_path: std::path::PathBuf ) -> anyhow::Result<()> { if db_file.exists() { let _span = tracing::info_span!("restore_sqlite_database").entered(); tracing::info!(file=%db_file.display(), backup=%backup_path.display(), "Restoring."); @@ -61,30 +48,18 @@ pub fn restore_sqlite_database( let gzip_reader = flate2::read::GzDecoder::new(backup_file); let mut tar = tar::Archive::new(gzip_reader); - let db_file_prefix = db_file - .file_name() - .context("DB file has no filename")? - .to_string_lossy(); + let db_file_prefix = db_file.file_name().context("DB file has no filename")?.to_string_lossy(); let suffices = ["", "-wal", "-shm"]; //tar.unpack(db_file.parent().unwrap()).context("Error unpacking DB backup")?; for entry in tar.entries().context("Error reading tar archive")? { let mut entry = entry.context("Error reading tar entry")?; - let path = entry - .path() - .context("Error getting tar entry path")? - .to_path_buf(); + let path = entry.path().context("Error getting tar entry path")?.to_path_buf(); let path_str = path.to_string_lossy(); - let acceptable_names: Vec = suffices - .iter() - .map(|suffix| format!("{}{}", db_file_prefix, suffix)) - .collect(); + let acceptable_names: Vec = suffices.iter().map(|suffix| format!("{}{}", db_file_prefix, suffix)).collect(); if acceptable_names.iter().any(|p| path_str.eq(p)) { - let dst_file = db_file - .parent() - .expect("DB file had no parent") - .join(path.file_name().expect("Tar entry has no filename")); + let dst_file = db_file.parent().expect("DB file had no parent").join(path.file_name().expect("Tar entry has no filename")); tracing::debug!(file=?path_str, "Unpacking file from tar."); entry.unpack(dst_file).context("Error unpacking file")?; } else { diff --git a/server/src/database/error.rs b/server/src/database/error.rs index 00d22656..0454cdad 100644 --- a/server/src/database/error.rs +++ b/server/src/database/error.rs @@ -1,5 +1,5 @@ -use anyhow; use thiserror; +use anyhow; #[derive(thiserror::Error, Debug)] pub enum DBError { diff --git a/server/src/database/migration_solver.rs b/server/src/database/migration_solver.rs index e88fd869..708603a9 100644 --- a/server/src/database/migration_solver.rs +++ b/server/src/database/migration_solver.rs @@ -1,22 +1,20 @@ -use lib_clapshot_grpc::proto::org::Migration; use std::collections::{HashMap, HashSet}; +use lib_clapshot_grpc::proto::org::Migration; pub struct MigrationGraphModule { - pub name: String, // Unique name (id) of this module - pub cur_version: Option, // Current migration version this modules is at - pub migrations: Vec, // Available (alternative) migrations for this module + pub name: String, // Unique name (id) of this module + pub cur_version: Option, // Current migration version this modules is at + pub migrations: Vec, // Available (alternative) migrations for this module } /// For a given set of modules and their migrations, find a valid path of migrations that /// upgrades all modules to their latest version. /// /// Returns `None` if no solution was found. -pub fn solve_migration_graph( - modules: Vec<&MigrationGraphModule>, -) -> anyhow::Result>> { +pub fn solve_migration_graph(modules: Vec<&MigrationGraphModule>) -> anyhow::Result>> { assert!(!modules.is_empty()); - let mut cur_module_versions = HashMap::new(); // name -> version + let mut cur_module_versions = HashMap::new(); // name -> version let mut target_module_versions = HashMap::new(); // Initialize the current and max versions for each module @@ -24,30 +22,17 @@ pub fn solve_migration_graph( if let Some(cur_version) = &module.cur_version { cur_module_versions.insert(module.name.as_str(), cur_version.as_str()); } - if let Some(max_version) = module - .migrations - .iter() - .max_by_key(|m| &m.version) - .map(|m| m.version.as_str()) - { + if let Some(max_version) = module.migrations.iter().max_by_key(|m| &m.version).map(|m| m.version.as_str()) { target_module_versions.insert(module.name.as_str(), max_version); } } // List all migrations that advance the current version of some module as a tuple (module_name, migration) - let mut all_migrations: Vec<(&str, &Migration)> = modules - .iter() - .flat_map(|module| { - module - .migrations - .iter() - .filter(|mig| { - module.cur_version.is_none() - || mig.version.as_str() > module.cur_version.as_ref().unwrap().as_str() - }) - .map(|mig| (module.name.as_str(), mig)) - }) - .collect(); + let mut all_migrations: Vec<(&str, &Migration)> = modules.iter() + .flat_map(|module| module.migrations.iter() + .filter(|mig| module.cur_version.is_none() || mig.version.as_str() > module.cur_version.as_ref().unwrap().as_str()) + .map(|mig| (module.name.as_str(), mig)) + ).collect(); // Check that uuids are unique let mut uuids = HashSet::new(); @@ -57,37 +42,29 @@ pub fn solve_migration_graph( } } - all_migrations.sort_by(|a, b| a.1.version.cmp(&b.1.version)); // Oldest versions first + all_migrations.sort_by(|a, b| a.1.version.cmp(&b.1.version)); // Oldest versions first let mut solution = None; - depth_first_search( - &all_migrations, - &target_module_versions, - cur_module_versions.clone(), - HashSet::new(), - vec![], - &mut solution, - ); - - Ok(solution.map(|path| path.into_iter().cloned().collect())) + depth_first_search(&all_migrations, &target_module_versions, cur_module_versions.clone(), HashSet::new(), vec![], &mut solution); + + Ok(solution.map(|path| { path.into_iter().cloned().collect() })) } + /// Recursive depth-first search for shortest path of migrations /// that upgrades all modules to their target version. fn depth_first_search<'a>( - all_migrations: &'a Vec<(&'a str, &'a Migration)>, // (module_name, migration) - max_module_versions: &'a HashMap<&'a str, &'a str>, // module_name -> target version - cur_module_versions: HashMap<&'a str, &'a str>, // module_name -> current version - visited: HashSet<&'a str>, // Set of visited migration UUIDs - cur_path: Vec<&'a Migration>, // Current path of migrations (in reverse order) - best_path: &mut Option>, // Best path found so far (in reverse order) + all_migrations: &'a Vec<(&'a str, &'a Migration)>, // (module_name, migration) + max_module_versions: &'a HashMap<&'a str, &'a str>, // module_name -> target version + cur_module_versions: HashMap<&'a str, &'a str>, // module_name -> current version + visited: HashSet<&'a str>, // Set of visited migration UUIDs + cur_path: Vec<&'a Migration>, // Current path of migrations (in reverse order) + best_path: &mut Option>, // Best path found so far (in reverse order) ) { // Solution found? (all modules are at their target version) if max_module_versions.iter().all(|(mod_name, max_ver)| { - cur_module_versions - .get(mod_name) - .map_or(false, |cur_ver| cur_ver == max_ver) - }) { + cur_module_versions.get(mod_name).map_or(false, |cur_ver| cur_ver == max_ver) }) + { if best_path.is_none() || cur_path.len() < best_path.as_ref().unwrap().len() { *best_path = Some(cur_path.iter().cloned().collect()); } @@ -127,18 +104,13 @@ fn depth_first_search<'a>( let mut new_visited = visited.clone(); new_visited.insert(mig.uuid.as_str()); - depth_first_search( - all_migrations, - max_module_versions, - new_cur_module_versions, - new_visited, - new_path, - best_path, - ); + depth_first_search(all_migrations, max_module_versions, new_cur_module_versions, new_visited, new_path, best_path); } } } + + #[cfg(test)] mod tests { use super::*; @@ -146,56 +118,30 @@ mod tests { macro_rules! migmod { ($name:expr, $cur_ver:expr, $migs:expr) => { - MigrationGraphModule { - name: $name.to_string(), - cur_version: $cur_ver.map(|s: &str| s.to_string()), - migrations: $migs, - } + MigrationGraphModule { name: $name.to_string(), cur_version: $cur_ver.map(|s: &str| s.to_string()), migrations: $migs } }; } macro_rules! mig { ($uuid:expr, $ver:expr, $deps:expr) => { - Migration { - uuid: $uuid.to_string(), - version: $ver.to_string(), - dependencies: $deps, - description: "dummy-desc".to_string(), - } + Migration { uuid: $uuid.to_string(), version: $ver.to_string(), dependencies: $deps, description: "dummy-desc".to_string() } }; } macro_rules! dep { ($name:expr, $min:expr, $max:expr) => { - Dependency { - name: $name.to_string(), - min_ver: $min.map(|s: &str| s.to_string()), - max_ver: $max.map(|s: &str| s.to_string()), - } + Dependency { name: $name.to_string(), min_ver: $min.map(|s: &str| s.to_string()), max_ver: $max.map(|s: &str| s.to_string()) } }; } fn compare_results(result: &Option>, expected: Option>) { match expected { None => { - assert!( - result.is_none(), - "Expected None, got: {:?}", - result - .clone() - .unwrap() - .iter() - .map(|m| m.uuid.as_str()) - .collect::>() - ); + assert!(result.is_none(), "Expected None, got: {:?}", + result.clone().unwrap().iter().map(|m| m.uuid.as_str()).collect::>()); } Some(expected) => { let result = result.as_ref().unwrap(); - let eq = result - .iter() - .zip(expected.iter()) - .all(|(a, b)| &a.uuid == b); - assert!( - eq, - "Expected:\n{:?}\nGot:\n{:?}", + let eq = result.iter().zip(expected.iter()).all(|(a, b)| &a.uuid == b); + assert!(eq, "Expected:\n{:?}\nGot:\n{:?}", expected.iter().map(|m| m).collect::>(), result.iter().map(|m| m.uuid.as_str()).collect::>() ); @@ -212,153 +158,94 @@ mod tests { #[test] fn test_msolv_trivial_from_empty() { - let mod_server = migmod!( - "server", - None, - vec![ - mig!("uuid1", "1", vec![]), - mig!("uuid2", "2", vec![dep!("server", Some("1"), Some("1"))]), - mig!("uuid3", "3", vec![dep!("server", Some("2"), Some("2"))]), - ] - ); - let correct = vec!["uuid1", "uuid2", "uuid3"]; + let mod_server = migmod!("server", None, vec![ + mig!("uuid1", "1", vec![]), + mig!("uuid2", "2", vec![dep!("server", Some("1"), Some("1"))]), + mig!("uuid3", "3", vec![dep!("server", Some("2"), Some("2"))]), + ]); + let correct = vec![ "uuid1", "uuid2", "uuid3" ]; solve_and_compare(vec![&mod_server], Some(correct)); } #[test] fn test_msolv_shortcut() { - let mod_server = migmod!( - "server", - Some("1"), - vec![ - mig!("uuid1", "1", vec![]), - mig!("uuid2", "2", vec![dep!("server", Some("1"), Some("1"))]), - mig!("uuid3", "3", vec![dep!("server", Some("2"), Some("2"))]), - mig!("uuid4", "4", vec![dep!("server", Some("1"), Some("3"))]), - ] - ); - let correct = vec!["uuid4"]; + let mod_server = migmod!("server", Some("1"), vec![ + mig!("uuid1", "1", vec![]), + mig!("uuid2", "2", vec![dep!("server", Some("1"), Some("1"))]), + mig!("uuid3", "3", vec![dep!("server", Some("2"), Some("2"))]), + mig!("uuid4", "4", vec![dep!("server", Some("1"), Some("3"))]), + ]); + let correct = vec![ "uuid4" ]; solve_and_compare(vec![&mod_server], Some(correct)); } #[test] fn test_msolv_two_modules_indep() { - let mod_server = migmod!( - "server", - None, - vec![ - mig!("S1", "1", vec![]), - mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), - mig!("S3", "3", vec![dep!("server", Some("2"), Some("2"))]), - ] - ); - let mod_org = migmod!( - "org", - Some("0"), - vec![ - mig!("G1", "1", vec![dep!("org", None, Some("0"))]), - mig!("G2", "2", vec![dep!("org", Some("1"), Some("1"))]), - mig!("G3", "3", vec![dep!("org", Some("2"), Some("2"))]), - ] - ); - let correct = vec!["S1", "G1", "S2", "G2", "S3", "G3"]; + let mod_server = migmod!("server", None, vec![ + mig!("S1", "1", vec![]), + mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), + mig!("S3", "3", vec![dep!("server", Some("2"), Some("2"))]), + ]); + let mod_org = migmod!("org", Some("0"), vec![ + mig!("G1", "1", vec![dep!("org", None, Some("0"))]), + mig!("G2", "2", vec![dep!("org", Some("1"), Some("1"))]), + mig!("G3", "3", vec![dep!("org", Some("2"), Some("2"))]), + ]); + let correct = vec![ "S1", "G1", "S2", "G2", "S3", "G3" ]; solve_and_compare(vec![&mod_server, &mod_org], Some(correct)); } #[test] fn test_msolv_two_modules_dep() { - let mod_server = migmod!( - "server", - None, - vec![ - mig!("S1", "1", vec![]), - mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), - mig!("S3", "3", vec![dep!("server", Some("2"), Some("2"))]), - ] - ); - let mod_org = migmod!( - "org", - Some("0"), - vec![ - mig!( - "G1", - "1", - vec![ - dep!("org", Some("0"), Some("0")), - dep!("server", None, Some("1")) - ] - ), - mig!( - "G2", - "2", - vec![ - dep!("org", Some("1"), Some("1")), - dep!("server", Some("2"), Some("2")) - ] - ), - mig!( - "G3", - "3", - vec![ - dep!("org", Some("2"), Some("2")), - dep!("server", Some("2"), Some("2")) - ] - ), - ] - ); + let mod_server = migmod!("server", None, vec![ + mig!("S1", "1", vec![]), + mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), + mig!("S3", "3", vec![dep!("server", Some("2"), Some("2"))]), + ]); + let mod_org = migmod!("org", Some("0"), vec![ + mig!("G1", "1", vec![ + dep!("org", Some("0"), Some("0")), + dep!("server", None, Some("1"))]), + mig!("G2", "2", vec![ + dep!("org", Some("1"), Some("1")), + dep!("server", Some("2"), Some("2"))]), + mig!("G3", "3", vec![ + dep!("org", Some("2"), Some("2")), + dep!("server", Some("2"), Some("2"))]), + ]); let correct = vec!["S1", "G1", "S2", "G2", "G3", "S3"]; solve_and_compare(vec![&mod_server, &mod_org], Some(correct)); } #[test] fn test_msolv_one_module_nonsolvable() { - let mod_server = migmod!( - "server", - None, - vec![ - mig!("S1", "1", vec![]), - mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), - // missing migration to version 3 - mig!("S4", "4", vec![dep!("server", Some("3"), Some("3"))]), - ] - ); + let mod_server = migmod!("server", None, vec![ + mig!("S1", "1", vec![]), + mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), + // missing migration to version 3 + mig!("S4", "4", vec![dep!("server", Some("3"), Some("3"))]), + ]); solve_and_compare(vec![&mod_server], None); } + #[test] fn test_msolv_two_modules_nonsolvable() { - let mod_server = migmod!( - "server", - None, - vec![ - mig!("S1", "1", vec![]), - mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), - mig!("S3", "3", vec![dep!("server", Some("2"), Some("2"))]), - ] - ); - let mod_org = migmod!( - "org", - Some("0"), - vec![ - mig!( - "G1", - "1", - vec![ - dep!("org", Some("0"), Some("0")), - dep!("server", Some("1"), Some("1")) - ] - ), - mig!("G2", "2", vec![dep!("org", Some("2"), Some("2"))]), - mig!( - "G3", - "3", - vec![ - dep!("org", Some("2"), Some("2")), - dep!("server", Some("1"), Some("1")) - ] - ), - ] - ); + let mod_server = migmod!("server", None, vec![ + mig!("S1", "1", vec![]), + mig!("S2", "2", vec![dep!("server", Some("1"), Some("1"))]), + mig!("S3", "3", vec![dep!("server", Some("2"), Some("2"))]), + ]); + let mod_org = migmod!("org", Some("0"), vec![ + mig!("G1", "1", vec![ + dep!("org", Some("0"), Some("0")), + dep!("server", Some("1"), Some("1"))]), + mig!("G2", "2", vec![ + dep!("org", Some("2"), Some("2"))]), + mig!("G3", "3", vec![ + dep!("org", Some("2"), Some("2")), + dep!("server", Some("1"), Some("1"))]), + ]); solve_and_compare(vec![&mod_server, &mod_org], None); } @@ -371,53 +258,14 @@ mod tests { Migration: '2024-05-22-163000_add_media_type' of module 'clapshot.server' depends on: '[Dependency { name: "clapshot.server", min_ver: Some("2024-05-13-093800_add_users_table"), max_ver: Some("2024-05-13-093800_add_users_table") }]') Migration: '2024-05-30-202000_add_missing_users' of module 'clapshot.server' depends on: '[Dependency { name: "clapshot.server", min_ver: Some("2024-05-22-163000_add_media_type"), max_ver: Some("2024-05-22-163000_add_media_type") }]') */ - let mod_server = migmod!( - "clapshot.server", - Some("1"), - vec![ - mig!( - "2023-04-18-190209_change_video_primkey", - "2023-04-18-190209_change_video_primkey", - vec![dep!("clapshot.server", None, Some(""))] - ), - mig!( - "2023-04-18-190300_add_cascade_rules", - "2023-04-18-190300_add_cascade_rules", - vec![dep!( - "clapshot.server", - Some("2023-04-18-190209_change_video_primkey"), - Some("2023-04-18-190209_change_video_primkey") - )] - ), - mig!( - "2024-05-13-093800_add_users_table", - "2024-05-13-093800_add_users_table", - vec![dep!( - "clapshot.server", - Some("2023-04-18-190300_add_cascade_rules"), - Some("2023-04-18-190300_add_cascade_rules") - )] - ), - mig!( - "2024-05-22-163000_add_media_type", - "2024-05-22-163000_add_media_type", - vec![dep!( - "clapshot.server", - Some("2024-05-13-093800_add_users_table"), - Some("2024-05-13-093800_add_users_table") - )] - ), - mig!( - "2024-05-30-202000_add_missing_users", - "2024-05-30-202000_add_missing_users", - vec![dep!( - "clapshot.server", - Some("2024-05-22-163000_add_media_type"), - Some("2024-05-22-163000_add_media_type") - )] - ), - ] - ); + let mod_server = migmod!("clapshot.server", Some("1"), vec![ + mig!("2023-04-18-190209_change_video_primkey", "2023-04-18-190209_change_video_primkey", vec![dep!("clapshot.server", None, Some(""))]), + mig!("2023-04-18-190300_add_cascade_rules", "2023-04-18-190300_add_cascade_rules", vec![dep!("clapshot.server", Some("2023-04-18-190209_change_video_primkey"), Some("2023-04-18-190209_change_video_primkey"))]), + mig!("2024-05-13-093800_add_users_table", "2024-05-13-093800_add_users_table", vec![dep!("clapshot.server", Some("2023-04-18-190300_add_cascade_rules"), Some("2023-04-18-190300_add_cascade_rules"))]), + mig!("2024-05-22-163000_add_media_type", "2024-05-22-163000_add_media_type", vec![dep!("clapshot.server", Some("2024-05-13-093800_add_users_table"), Some("2024-05-13-093800_add_users_table"))]), + mig!("2024-05-30-202000_add_missing_users", "2024-05-30-202000_add_missing_users", vec![dep!("clapshot.server", Some("2024-05-22-163000_add_media_type"), Some("2024-05-22-163000_add_media_type"))]), + ]); solve_and_compare(vec![&mod_server], None); } + } diff --git a/server/src/database/mod.rs b/server/src/database/mod.rs index 406ced6e..865de769 100644 --- a/server/src/database/mod.rs +++ b/server/src/database/mod.rs @@ -1,19 +1,19 @@ -use anyhow::{anyhow, Context}; use diesel::migration::Migration; use diesel::prelude::*; use diesel::r2d2::ConnectionManager; use diesel::SqliteConnection; +use anyhow::{Context, anyhow}; use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness}; use std::path::Path; use std::sync::atomic::AtomicBool; -pub mod db_backup; +pub mod schema; +pub mod models; pub mod error; pub mod migration_solver; -pub mod models; -pub mod schema; +pub mod db_backup; #[cfg(test)] pub mod tests; @@ -27,6 +27,7 @@ pub type Pool = r2d2::Pool>; pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations"); + #[macro_export] macro_rules! retry_if_db_locked { ($op:expr) => { @@ -39,12 +40,7 @@ macro_rules! retry_if_db_locked { } else { let err_msg = res.as_ref().err().unwrap().to_string(); if (attempt <= 8) && err_msg.to_lowercase().contains("locked") { - tracing::debug!( - "DB: '{}, retrying in 100ms (attempt {}/{})", - err_msg, - attempt, - 8 - ); + tracing::debug!("DB: '{}, retrying in 100ms (attempt {}/{})", err_msg, attempt, 8); std::thread::sleep(std::time::Duration::from_millis(100)); attempt += 1; continue; @@ -54,7 +50,7 @@ macro_rules! retry_if_db_locked { } } })() - }; + } } /// Convert a diesel result to a DBResult, turning empty result @@ -73,14 +69,13 @@ pub struct DB { broken_for_test: AtomicBool, } + impl DB { + /// Connect to SQLite database with an URL (use this for memory databases) pub fn open_db_url(db_url: &str) -> DBResult { let manager = ConnectionManager::::new(db_url); - let pool = Pool::builder() - .max_size(16) - .build(manager) - .context("Failed to build DB pool")?; + let pool = Pool::builder().max_size(16).build(manager).context("Failed to build DB pool")?; Ok(DB { pool, broken_for_test: AtomicBool::new(false), @@ -88,64 +83,38 @@ impl DB { } /// Connect to SQLite database with a file path - pub fn open_db_file(db_file: &Path) -> DBResult { - let db_url = format!( - "sqlite://{}", - db_file - .to_str() - .ok_or(anyhow!("Invalid DB file path")) - .context("Failed to connect DB file")? - ); + pub fn open_db_file( db_file: &Path ) -> DBResult { + let db_url = format!("sqlite://{}", db_file.to_str().ok_or(anyhow!("Invalid DB file path")) + .context("Failed to connect DB file")?); let res = DB::open_db_url(&db_url); res } /// Get a connection from the pool pub fn conn(&self) -> DBResult { - if self - .broken_for_test - .load(std::sync::atomic::Ordering::Relaxed) - { + if self.broken_for_test.load(std::sync::atomic::Ordering::Relaxed) { let bad_manager = ConnectionManager::::new("sqlite:///dev/urandom"); - let bad_pool = Pool::builder() - .build(bad_manager) - .context("TEST ERROR: Failed to build 'broken' DB pool")?; - return bad_pool.get().map_err(|e| { - anyhow!( - "TEST ERROR: Failed to get connection from 'broken' pool: {:?}", - e - ) - .into() - }); + let bad_pool = Pool::builder().build(bad_manager).context("TEST ERROR: Failed to build 'broken' DB pool")?; + return bad_pool.get().map_err(|e| anyhow!("TEST ERROR: Failed to get connection from 'broken' pool: {:?}", e).into()); } - let mut conn = self - .pool - .get() - .context("Failed to get connection from pool")?; - diesel::sql_query( - r#" + let mut conn = self.pool.get().context("Failed to get connection from pool")?; + diesel::sql_query(r#" PRAGMA foreign_keys = ON; PRAGMA journal_mode = WAL; PRAGMA wal_autocheckpoint = 1000; PRAGMA wal_checkpoint(TRUNCATE); PRAGMA synchronous = NORMAL; PRAGMA busy_timeout = 15000; - "#, - ) - .execute(&mut conn) - .context("Failed to set DB pragmas")?; + "#).execute(&mut conn).context("Failed to set DB pragmas")?; Ok(conn) } /// Return list of any pending (migration_name, version) tuples pub fn pending_server_migrations(&self) -> DBResult> { - Ok( - MigrationHarness::pending_migrations(&mut self.conn()?, MIGRATIONS) - .map_err(|e| anyhow!("Failed to get migrations: {:?}", e))? - .iter() - .map(|m| (format!("{}", m.name()), format!("{}", m.name().version()))) - .collect(), - ) + Ok(MigrationHarness::pending_migrations(&mut self.conn()?, MIGRATIONS) + .map_err(|e| anyhow!("Failed to get migrations: {:?}", e))? + .iter().map(|m| (format!("{}", m.name()), format!("{}", m.name().version()))) + .collect()) } /// Return name of the latest applied migration @@ -158,25 +127,18 @@ impl DB { } /// Run a named migration - pub fn apply_server_migration( - &self, - conn: &mut SqliteConnection, - migration_name: &str, - ) -> EmptyDBResult { + pub fn apply_server_migration(&self, conn: &mut SqliteConnection, migration_name: &str) -> EmptyDBResult { + let pending = MigrationHarness::pending_migrations(conn, MIGRATIONS) .map_err(|e| anyhow!("Failed to get migrations: {:?}", e))?; - let migration = pending - .iter() - .find(|m| m.name().to_string() == migration_name) + let migration = pending.iter().find(|m| m.name().to_string() == migration_name) .ok_or_else(|| anyhow!("Migration not found: {}", migration_name))?; - let _span = tracing::info_span!( - "apply_server_migration", + let _span = tracing::info_span!("apply_server_migration", name = migration.name().to_string(), new_ver = migration.name().version().to_string(), - ) - .entered(); + ).entered(); tracing::debug!("PRAGMA foreign_keys = OFF;"); diesel::sql_query("PRAGMA foreign_keys = OFF;").execute(conn)?; @@ -185,8 +147,7 @@ impl DB { diesel::sql_query("PRAGMA legacy_alter_table=ON;").execute(conn)?; let res: EmptyDBResult = conn.transaction(|conn| { - sqlite_check_foreign_key_status(conn, false) - .context("Pragma failed to disable foreign keys")?; + sqlite_check_foreign_key_status(conn, false).context("Pragma failed to disable foreign keys")?; tracing::info!("Applying..."); MigrationHarness::run_migration(conn, &**migration) @@ -198,19 +159,18 @@ impl DB { res.and_then(|_| { tracing::debug!("PRAGMA foreign_keys = ON;"); diesel::sql_query("PRAGMA foreign_keys = ON;").execute(conn)?; - sqlite_check_foreign_key_status(conn, true) - .context("Pragma failed to re-enable foreign keys")?; + sqlite_check_foreign_key_status(conn, true).context("Pragma failed to re-enable foreign keys")?; Ok(()) }) } /// "Corrupt" the connection for testing so that subsequent queries fail pub fn break_db(&self) { - self.broken_for_test - .store(true, std::sync::atomic::Ordering::Relaxed); + self.broken_for_test.store(true, std::sync::atomic::Ordering::Relaxed); } } + #[derive(QueryableByName, Debug)] struct ForeignKeyEnforcement { #[diesel(sql_type = diesel::sql_types::Integer)] @@ -218,17 +178,12 @@ struct ForeignKeyEnforcement { value: i32, } -pub fn sqlite_check_foreign_key_status( - conn: &mut SqliteConnection, - should_be_on: bool, -) -> EmptyDBResult { +pub fn sqlite_check_foreign_key_status(conn: &mut SqliteConnection, should_be_on: bool) -> EmptyDBResult { let fk_status: Vec = diesel::sql_query("PRAGMA foreign_keys;") .load(conn) .map_err(|e| anyhow!("Failed to check foreign key setting: {:?}", e))?; - if fk_status.is_empty() { - return Err(anyhow!("Failed to check foreign key setting"))?; - } + if fk_status.is_empty() { return Err(anyhow!("Failed to check foreign key setting"))?; } if should_be_on && fk_status.iter().any(|fk| fk.value != 1) { return Err(anyhow!("Assertion failed: SQLite foreign_keys != ON").into()); @@ -238,8 +193,11 @@ pub fn sqlite_check_foreign_key_status( Ok(()) } + + /// Check for foreign key violations in the database pub fn sqlite_foreign_key_check(conn: &mut SqliteConnection, log_as_errors: bool) -> EmptyDBResult { + #[derive(QueryableByName, Debug)] struct ForeignKeyCheck { #[diesel(sql_type = diesel::sql_types::Text)] @@ -257,8 +215,7 @@ pub fn sqlite_foreign_key_check(conn: &mut SqliteConnection, log_as_errors: bool } let violations: Vec = diesel::sql_query("PRAGMA foreign_key_check;") - .load(conn) - .map_err(|e| anyhow!("Failed to check foreign key violations: {:?}", e))?; + .load(conn).map_err(|e| anyhow!("Failed to check foreign key violations: {:?}", e))?; if violations.is_empty() { Ok(()) } else { @@ -292,17 +249,14 @@ impl DBPaging { impl Default for DBPaging { fn default() -> Self { - Self { - page_num: 0, - page_size: unsafe { std::num::NonZeroU32::new_unchecked(u32::MAX) }, - } + Self { page_num: 0, page_size: unsafe { std::num::NonZeroU32::new_unchecked(u32::MAX) } } } } + pub trait DbBasicQuery: Sized -where - P: std::str::FromStr + Send + Sync + Clone, - I: Send + Sync, + where P: std::str::FromStr + Send + Sync + Clone, + I: Send + Sync, { /// Insert a new object into the database. fn insert(conn: &mut PooledConnection, item: &I) -> DBResult; @@ -328,56 +282,19 @@ where } pub trait DbUpdate

: Sized -where - P: std::str::FromStr + Send + Sync + Clone, + where P: std::str::FromStr + Send + Sync + Clone, { /// Update objects, replaces the entire object except for the primary key. fn update_many(conn: &mut PooledConnection, items: &[Self]) -> DBResult>; } mod basic_query; -crate::implement_basic_query_traits!( - models::User, - models::UserInsert, - users, - String, - created.desc() -); -crate::implement_basic_query_traits!( - models::MediaType, - models::MediaType, - media_types, - String, - id.desc() -); -crate::implement_basic_query_traits!( - models::MediaFile, - models::MediaFileInsert, - media_files, - String, - added_time.desc() -); -crate::implement_basic_query_traits!( - models::Comment, - models::CommentInsert, - comments, - i32, - created.desc() -); -crate::implement_basic_query_traits!( - models::Message, - models::MessageInsert, - messages, - i32, - created.desc() -); -crate::implement_basic_query_traits!( - models::Subtitle, - models::SubtitleInsert, - subtitles, - i32, - added_time.desc() -); +crate::implement_basic_query_traits!(models::User, models::UserInsert, users, String, created.desc()); +crate::implement_basic_query_traits!(models::MediaType, models::MediaType, media_types, String, id.desc()); +crate::implement_basic_query_traits!(models::MediaFile, models::MediaFileInsert, media_files, String, added_time.desc()); +crate::implement_basic_query_traits!(models::Comment, models::CommentInsert, comments, i32, created.desc()); +crate::implement_basic_query_traits!(models::Message, models::MessageInsert, messages, i32, created.desc()); +crate::implement_basic_query_traits!(models::Subtitle, models::SubtitleInsert, subtitles, i32, added_time.desc()); crate::implement_update_traits!(models::User, users, String); crate::implement_update_traits!(models::MediaFile, media_files, String); @@ -385,6 +302,8 @@ crate::implement_update_traits!(models::Comment, comments, i32); crate::implement_update_traits!(models::Message, messages, i32); crate::implement_update_traits!(models::Subtitle, subtitles, i32); + + pub trait DbQueryByUser: Sized { /// Get all objects of type Self that belong to given user. fn get_by_user(conn: &mut PooledConnection, uid: &str, pg: DBPaging) -> DBResult>; @@ -394,29 +313,12 @@ crate::implement_query_by_user_traits!(models::MediaFile, media_files, user_id, crate::implement_query_by_user_traits!(models::Comment, comments, user_id, created.desc()); crate::implement_query_by_user_traits!(models::Message, messages, user_id, created.desc()); + + pub trait DbQueryByMediaFile: Sized { /// Get all objects of type Self that are linked to given media file. - fn get_by_media_file( - conn: &mut PooledConnection, - vid: &str, - pg: DBPaging, - ) -> DBResult>; + fn get_by_media_file(conn: &mut PooledConnection, vid: &str, pg: DBPaging) -> DBResult>; } -crate::implement_query_by_media_file_traits!( - models::Comment, - comments, - media_file_id, - created.desc() -); -crate::implement_query_by_media_file_traits!( - models::Message, - messages, - media_file_id, - created.desc() -); -crate::implement_query_by_media_file_traits!( - models::Subtitle, - subtitles, - media_file_id, - added_time.desc() -); +crate::implement_query_by_media_file_traits!(models::Comment, comments, media_file_id, created.desc()); +crate::implement_query_by_media_file_traits!(models::Message, messages, media_file_id, created.desc()); +crate::implement_query_by_media_file_traits!(models::Subtitle, subtitles, media_file_id, added_time.desc()); diff --git a/server/src/database/models.rs b/server/src/database/models.rs index 6507f176..4fe67b0f 100644 --- a/server/src/database/models.rs +++ b/server/src/database/models.rs @@ -1,14 +1,12 @@ +use diesel::{prelude::*, QueryId}; +use serde::{Deserialize, Serialize}; use super::schema::*; use chrono; use chrono::naive::serde::{ts_seconds, ts_seconds_option}; use chrono::TimeZone; -use diesel::{prelude::*, QueryId}; -use serde::{Deserialize, Serialize}; use timeago; -#[derive( - Serialize, Deserialize, Debug, Default, Queryable, Selectable, Identifiable, AsChangeset, Clone, -)] +#[derive(Serialize, Deserialize, Debug, Default, Queryable, Selectable, Identifiable, AsChangeset, Clone)] #[diesel(treat_none_as_null = true)] pub struct User { pub id: String, @@ -25,9 +23,7 @@ pub struct UserInsert { pub name: String, } -#[derive( - Serialize, Deserialize, Debug, Queryable, Selectable, Insertable, Identifiable, QueryId, Clone, -)] +#[derive(Serialize, Deserialize, Debug, Queryable, Selectable, Insertable, Identifiable, QueryId, Clone)] #[diesel(treat_none_as_null = true)] #[diesel(table_name = media_types)] #[diesel(primary_key(id))] @@ -35,9 +31,8 @@ pub struct MediaType { pub id: String, } -#[derive( - Serialize, Deserialize, Debug, Queryable, Selectable, Identifiable, QueryId, AsChangeset, Clone, -)] + +#[derive(Serialize, Deserialize, Debug, Queryable, Selectable, Identifiable, QueryId, AsChangeset, Clone)] #[diesel(treat_none_as_null = true)] #[diesel(table_name = media_files)] #[diesel(primary_key(id))] @@ -86,18 +81,7 @@ pub struct MediaFileInsert { // ------------------------------------------------------- -#[derive( - Serialize, - Deserialize, - Debug, - Default, - Queryable, - Selectable, - Identifiable, - Associations, - AsChangeset, - Clone, -)] +#[derive(Serialize, Deserialize, Debug, Default, Queryable, Selectable, Identifiable, Associations, AsChangeset, Clone)] #[diesel(belongs_to(MediaFile, foreign_key = media_file_id))] #[diesel(treat_none_as_null = true)] pub struct Subtitle { @@ -128,20 +112,10 @@ pub struct SubtitleInsert { // ------------------------------------------------------- -#[derive( - Serialize, - Deserialize, - Debug, - Associations, - Queryable, - Selectable, - Identifiable, - QueryId, - AsChangeset, - Clone, -)] +#[derive(Serialize, Deserialize, Debug, Associations, Queryable, Selectable, Identifiable, QueryId, AsChangeset, Clone)] #[diesel(belongs_to(User, foreign_key = user_id))] #[diesel(belongs_to(MediaFile, foreign_key = media_file_id))] + #[diesel(treat_none_as_null = true)] pub struct Comment { pub id: i32, @@ -177,22 +151,12 @@ pub struct CommentInsert { pub drawing: Option, pub subtitle_id: Option, pub subtitle_filename_ifnull: Option, + } // ------------------------------------------------------- -#[derive( - Serialize, - Deserialize, - Debug, - Default, - Queryable, - Selectable, - Identifiable, - Associations, - AsChangeset, - Clone, -)] +#[derive(Serialize, Deserialize, Debug, Default, Queryable, Selectable, Identifiable, Associations, AsChangeset, Clone)] #[diesel(belongs_to(User, foreign_key = user_id))] #[diesel(belongs_to(MediaFile, foreign_key = media_file_id))] #[diesel(belongs_to(Comment, foreign_key = comment_id))] diff --git a/server/src/database/schema.rs b/server/src/database/schema.rs index 03a9a64b..93f2c0d4 100644 --- a/server/src/database/schema.rs +++ b/server/src/database/schema.rs @@ -84,6 +84,7 @@ diesel::table! { } diesel::joinable!(comments -> subtitles (subtitle_id)); + diesel::allow_tables_to_appear_in_same_query!( users, comments, diff --git a/server/src/database/tests.rs b/server/src/database/tests.rs index beda0c95..37993331 100644 --- a/server/src/database/tests.rs +++ b/server/src/database/tests.rs @@ -1,9 +1,8 @@ -use crate::database::*; use tracing_test::traced_test; +use crate::database::*; + +use models::{User, MediaType, MediaFile, MediaFileInsert, Message, MessageInsert, Comment, CommentInsert}; -use models::{ - Comment, CommentInsert, MediaFile, MediaFileInsert, MediaType, Message, MessageInsert, User, -}; fn _dump_db(conn: &mut PooledConnection) { println!("================ dump_db ================"); @@ -11,31 +10,22 @@ fn _dump_db(conn: &mut PooledConnection) { conn.transaction(|conn| { let media_types = MediaType::get_all(conn, DBPaging::default()).unwrap(); println!("----- Media types -----"); - for v in media_types { - println!("----\n{:#?}", v); - } + for v in media_types { println!("----\n{:#?}", v);} let media_files = MediaFile::get_all(conn, DBPaging::default()).unwrap(); println!("----- Media files -----"); - for v in media_files { - println!("----\n{:#?}", v); - } + for v in media_files { println!("----\n{:#?}", v);} let comments = Comment::get_all(conn, DBPaging::default()).unwrap(); println!("----- Comments -----"); - for c in comments { - println!("----\n{:#?}", c); - } + for c in comments { println!("----\n{:#?}", c);} let messages = Message::get_all(conn, DBPaging::default()).unwrap(); println!("----- Messages -----"); - for m in messages { - println!("----\n{:#?}", m); - } + for m in messages { println!("----\n{:#?}", m);} DBResult::Ok(()) - }) - .unwrap(); + }).unwrap(); println!("========================================="); } @@ -56,34 +46,29 @@ fn _dump_db(conn: &mut PooledConnection) { /// /// /// ``` -pub fn make_test_db() -> ( - std::sync::Arc, - assert_fs::TempDir, - Vec, - Vec, -) { +pub fn make_test_db() -> (std::sync::Arc, assert_fs::TempDir, Vec, Vec) +{ println!("--- make_test_db"); let data_dir = assert_fs::TempDir::new().unwrap(); std::fs::create_dir(&data_dir.path().join("incoming")).ok(); - let db = - std::sync::Arc::new(DB::open_db_file(data_dir.join("clapshot.sqlite").as_path()).unwrap()); + let db = std::sync::Arc::new(DB::open_db_file(data_dir.join("clapshot.sqlite").as_path()).unwrap()); let conn = &mut db.conn().unwrap(); for (m, _ver) in db.pending_server_migrations().unwrap() { db.apply_server_migration(conn, &m).unwrap(); } - _dump_db(conn); // Uncomment to debug database contents + _dump_db(conn); // Uncomment to debug database contents // Make some videos let hashes = vec!["B1DE0", "11111", "22222", "B1DE3", "B1DE4"]; let mkvid = |i: usize| { + let user_id = format!("user.num{}", 1 + i % 2); let username = format!("User Number{}", 1 + i % 2); - let user = - User::get_or_create(conn, &user_id, Some(&username)).expect("Failed to create user"); + let user = User::get_or_create(conn, &user_id, Some(&username)).expect("Failed to create user"); let v = MediaFileInsert { id: hashes[i].to_string(), @@ -123,8 +108,7 @@ pub fn make_test_db() -> ( let c = Comment::insert(conn, &c).expect("Failed to insert comment"); let dp = data_dir.join("videos").join(vid).join("drawings"); std::fs::create_dir_all(&dp).expect("Failed to create drawing directory"); - std::fs::write(dp.join(&c.drawing.clone().unwrap()), "IMAGE_DATA") - .expect("Failed to write drawing"); + std::fs::write(dp.join(&c.drawing.clone().unwrap()), "IMAGE_DATA").expect("Failed to write drawing"); c }; let mut comments = (0..5) @@ -154,6 +138,7 @@ pub fn make_test_db() -> ( (db, data_dir, videos, comments) } + #[test] #[traced_test] fn test_pagination() -> anyhow::Result<()> { @@ -161,13 +146,7 @@ fn test_pagination() -> anyhow::Result<()> { let conn = &mut db.conn()?; // Test pagination of comments - let mut res = Comment::get_all( - conn, - DBPaging { - page_num: 0, - page_size: 3.try_into()?, - }, - )?; + let mut res = Comment::get_all(conn, DBPaging { page_num: 0, page_size: 3.try_into()? })?; println!("---- page 0, 3"); println!("res: {:#?}", res); @@ -176,13 +155,7 @@ fn test_pagination() -> anyhow::Result<()> { assert_eq!(res[1].id, comments[1].id); assert_eq!(res[2].id, comments[2].id); - res = Comment::get_all( - conn, - DBPaging { - page_num: 1, - page_size: 3.try_into()?, - }, - )?; + res = Comment::get_all(conn, DBPaging { page_num: 1, page_size: 3.try_into()? })?; println!("---- page 1, 3"); println!("res: {:#?}", res); assert_eq!(res.len(), 3); @@ -190,13 +163,7 @@ fn test_pagination() -> anyhow::Result<()> { assert_eq!(res[1].id, comments[4].id); assert_eq!(res[2].id, comments[5].id); - res = Comment::get_all( - conn, - DBPaging { - page_num: 2, - page_size: 3.try_into()?, - }, - )?; + res = Comment::get_all(conn, DBPaging { page_num: 2, page_size: 3.try_into()? })?; println!("---- page 2, 3"); println!("res: {:#?}", res); assert_eq!(res.len(), 2); @@ -206,21 +173,20 @@ fn test_pagination() -> anyhow::Result<()> { Ok(()) } + // ---------------------------------------------------------------------------- + #[test] #[traced_test] -fn test_fixture_state() -> anyhow::Result<()> { +fn test_fixture_state() -> anyhow::Result<()> +{ let (db, _data_dir, videos, comments) = make_test_db(); let conn = &mut db.conn()?; // First 5 comments have no parent, last 2 have parent_id=1 - for i in 0..5 { - assert!(comments[i].parent_id.is_none()); - } - for i in 5..5 + 2 { - assert_eq!(comments[i].parent_id, Some(comments[0].id)); - } + for i in 0..5 { assert!(comments[i].parent_id.is_none()); } + for i in 5..5 + 2 { assert_eq!(comments[i].parent_id, Some(comments[0].id)); } // Video #0 has 3 comments, video #1 has 2, video #2 has 1 assert_eq!(comments[0].media_file_id, comments[3].media_file_id); @@ -235,17 +201,14 @@ fn test_fixture_state() -> anyhow::Result<()> { for v in videos.iter() { assert_eq!(MediaFile::get(conn, &v.id)?.id, v.id); let comments = Comment::get_by_media_file(conn, &v.id, DBPaging::default())?; - assert_eq!( - comments.len(), - match v.id.as_str() { - "B1DE0" => 5, - "11111" => 2, - "22222" => 1, - "B1DE3" => 0, - "B1DE4" => 0, - _ => panic!("Unexpected media file id"), - } - ); + assert_eq!(comments.len(), match v.id.as_str() { + "B1DE0" => 5, + "11111" => 2, + "22222" => 1, + "B1DE3" => 0, + "B1DE4" => 0, + _ => panic!("Unexpected media file id"), + }); } for c in comments.iter() { assert_eq!(models::Comment::get(conn, &c.id)?.id, c.id); @@ -253,55 +216,32 @@ fn test_fixture_state() -> anyhow::Result<()> { } // Check that we can get videos by user - assert_eq!( - models::MediaFile::get_by_user(conn, "user.num1", DBPaging::default())?.len(), - 3 - ); - assert_eq!( - models::MediaFile::get_by_user(conn, "user.num2", DBPaging::default())?.len(), - 2 - ); + assert_eq!(models::MediaFile::get_by_user(conn, "user.num1", DBPaging::default())?.len(), 3); + assert_eq!(models::MediaFile::get_by_user(conn, "user.num2", DBPaging::default())?.len(), 2); Ok(()) } + #[test] #[traced_test] fn test_comment_delete() -> anyhow::Result<()> { let (db, _data_dir, _vid, com) = make_test_db(); let conn = &mut db.conn()?; - assert_eq!( - Comment::get_by_media_file(conn, &com[1].media_file_id, DBPaging::default())?.len(), - 2, - "Media files should have 2 comments before deletion" - ); + assert_eq!(Comment::get_by_media_file(conn, &com[1].media_file_id, DBPaging::default())?.len(), 2, "Media files should have 2 comments before deletion"); // Delete comment #2 and check that it was deleted, and nothing else models::Comment::delete(&mut db.conn()?, &com[1].id)?; for c in com.iter() { if c.id == com[1].id { - assert!( - matches!( - models::Comment::get(conn, &c.id).unwrap_err(), - DBError::NotFound() - ), - "Comment should be deleted" - ); + assert!(matches!(models::Comment::get(conn, &c.id).unwrap_err() , DBError::NotFound()), "Comment should be deleted"); } else { - assert_eq!( - models::Comment::get(conn, &c.id)?.id, - c.id, - "Deletion removed wrong comment(s)" - ); + assert_eq!(models::Comment::get(conn, &c.id)?.id, c.id, "Deletion removed wrong comment(s)"); } } // Check that media file still has 1 comment - assert_eq!( - Comment::get_by_media_file(conn, &com[1].media_file_id, DBPaging::default())?.len(), - 1, - "Media file should have 1 comment left" - ); + assert_eq!(Comment::get_by_media_file(conn, &com[1].media_file_id, DBPaging::default())?.len(), 1, "Media file should have 1 comment left"); // Delete last, add a new one and check for ID reuse models::Comment::delete(&mut db.conn()?, &com[6].id)?; @@ -317,10 +257,7 @@ fn test_comment_delete() -> anyhow::Result<()> { subtitle_filename_ifnull: None, }; let new_id = models::Comment::insert(conn, &c)?.id; - assert_ne!( - new_id, com[6].id, - "Comment ID was re-used after deletion. This would mix up comment threads in the UI." - ); + assert_ne!(new_id, com[6].id, "Comment ID was re-used after deletion. This would mix up comment threads in the UI."); Ok(()) } @@ -345,6 +282,7 @@ fn test_rename_video() -> anyhow::Result<()> { Ok(()) } + #[test] #[traced_test] fn test_user_messages() -> anyhow::Result<()> { @@ -393,21 +331,15 @@ fn test_user_messages() -> anyhow::Result<()> { let a = serde_json::to_value(Message::get(conn, &new_msg.id)?.to_proto3())?; let b = serde_json::to_value(new_msg.to_proto3())?; - assert_eq!(a, b); + assert_eq!(a,b); assert!(!Message::get(conn, &new_msg.id)?.seen); new_msgs.push(new_msg); } // Correctly count messages - assert_eq!( - Message::get_by_user(conn, "user.num1", DBPaging::default())?.len(), - 2 - ); - assert_eq!( - Message::get_by_user(conn, "user.num2", DBPaging::default())?.len(), - 1 - ); + assert_eq!(Message::get_by_user(conn, "user.num1", DBPaging::default())?.len(), 2); + assert_eq!(Message::get_by_user(conn, "user.num2", DBPaging::default())?.len(), 1); // Mark message #2 as seen Message::set_seen(conn, new_msgs[1].id, true)?; @@ -416,14 +348,8 @@ fn test_user_messages() -> anyhow::Result<()> { // Delete & recount Message::delete(conn, &new_msgs[2].id)?; Message::delete(conn, &new_msgs[0].id)?; - assert_eq!( - Message::get_by_user(conn, "user.num1", DBPaging::default())?.len(), - 1 - ); - assert_eq!( - Message::get_by_user(conn, "user.num2", DBPaging::default())?.len(), - 0 - ); + assert_eq!(Message::get_by_user(conn, "user.num1", DBPaging::default())?.len(), 1); + assert_eq!(Message::get_by_user(conn, "user.num2", DBPaging::default())?.len(), 0); Ok(()) } @@ -434,25 +360,15 @@ fn test_transaction_rollback() -> anyhow::Result<()> { let (db, _data_dir, vid, _com) = make_test_db(); let conn = &mut db.conn()?; - assert_eq!( - MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), - vid.len() - ); + assert_eq!(MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), vid.len()); conn.transaction::<(), _, _>(|conn| { MediaFile::delete(conn, &vid[0].id).unwrap(); - assert_eq!( - MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), - vid.len() - 1 - ); + assert_eq!(MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), vid.len()-1); Err(diesel::result::Error::RollbackTransaction) - }) - .ok(); + }).ok(); - assert_eq!( - MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), - vid.len() - ); + assert_eq!(MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), vid.len()); Ok(()) } @@ -462,23 +378,13 @@ fn test_transaction_commit() -> anyhow::Result<()> { let (db, _data_dir, vid, _com) = make_test_db(); let conn = &mut db.conn()?; - assert_eq!( - MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), - vid.len() - ); + assert_eq!(MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), vid.len()); conn.transaction::<(), _, _>(|conn| { MediaFile::delete(conn, &vid[0].id).unwrap(); - assert_eq!( - MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), - vid.len() - 1 - ); + assert_eq!(MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), vid.len()-1); DBResult::Ok(()) - }) - .unwrap(); - assert_eq!( - MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), - vid.len() - 1 - ); + }).unwrap(); + assert_eq!(MediaFile::get_all(conn, DBPaging::default()).unwrap().len(), vid.len()-1); Ok(()) } @@ -521,17 +427,11 @@ fn test_subtitle_add_update_delete() -> anyhow::Result<()> { }; let c = models::Comment::insert(conn, &c)?; assert_eq!(models::Comment::get(conn, &c.id)?.subtitle_id, Some(s.id)); - assert_eq!( - models::Comment::get(conn, &c.id)?.subtitle_filename_ifnull, - None - ); + assert_eq!(models::Comment::get(conn, &c.id)?.subtitle_filename_ifnull, None); // Delete subtitle models::Subtitle::delete(conn, &s.id)?; - assert!(matches!( - models::Subtitle::get(conn, &s.id).unwrap_err(), - DBError::NotFound() - )); + assert!(matches!(models::Subtitle::get(conn, &s.id).unwrap_err(), DBError::NotFound())); // Check that comment still exists, and that subtitle_filename_ifnull is set let c = models::Comment::get(conn, &c.id)?; @@ -541,16 +441,14 @@ fn test_subtitle_add_update_delete() -> anyhow::Result<()> { Ok(()) } + #[test] #[traced_test] fn test_migrate_existing_v056_db() -> anyhow::Result<()> { let data_dir = assert_fs::TempDir::new().unwrap(); let db_file = data_dir.path().join("clapshot.sqlite"); - std::fs::copy( - "src/tests/assets/databases/clapshot-migration-test-1_v056.sqlite", - &db_file, - ) - .expect("Failed to copy test DB for migration test"); + std::fs::copy("src/tests/assets/databases/clapshot-migration-test-1_v056.sqlite", &db_file) + .expect("Failed to copy test DB for migration test"); let db = DB::open_db_file(&db_file).unwrap(); let conn = &mut db.conn()?; @@ -561,93 +459,28 @@ fn test_migrate_existing_v056_db() -> anyhow::Result<()> { // Check that the database has (some of) the expected contents (still after migrations) let media_files = MediaFile::get_all(conn, DBPaging::default())?; assert_eq!(media_files.len(), 9); - assert_eq!( - media_files - .iter() - .filter(|v| v.user_id == "uid-4f9c36a6") - .count(), - 2 - ); - assert_eq!( - media_files - .iter() - .filter(|v| v.user_id == "uid-9e25df03") - .count(), - 2 - ); - assert_eq!( - media_files - .iter() - .filter(|v| v.user_id == "uid-d20ec3a4") - .count(), - 5 - ); + assert_eq!(media_files.iter().filter(|v| v.user_id == "uid-4f9c36a6").count(), 2); + assert_eq!(media_files.iter().filter(|v| v.user_id == "uid-9e25df03").count(), 2); + assert_eq!(media_files.iter().filter(|v| v.user_id == "uid-d20ec3a4").count(), 5); let comments = Comment::get_all(conn, DBPaging::default())?; assert_eq!(comments.len(), 41); - assert_eq!( - comments - .iter() - .filter(|c| c.user_id == Some("uid-9e25df03".into())) - .count(), - 7 - ); - assert_eq!( - comments - .iter() - .filter(|c| c.user_id == Some("uid-4f9c36a6".into())) - .count(), - 4 - ); - assert_eq!( - comments - .iter() - .filter(|c| c.user_id == Some("uid-addcb300".into())) - .count(), - 5 - ); - assert_eq!( - comments - .iter() - .filter(|c| c.user_id == Some("uid-d20ec3a4".into())) - .count(), - 25 - ); - assert_eq!( - comments - .iter() - .filter(|c| c.media_file_id == "77d7fe01") - .count(), - 14 - ); - assert_eq!( - comments - .iter() - .filter(|c| c.media_file_id == "338fb82c") - .count(), - 2 - ); + assert_eq!(comments.iter().filter(|c| c.user_id == Some("uid-9e25df03".into())).count(), 7); + assert_eq!(comments.iter().filter(|c| c.user_id == Some("uid-4f9c36a6".into())).count(), 4); + assert_eq!(comments.iter().filter(|c| c.user_id == Some("uid-addcb300".into())).count(), 5); + assert_eq!(comments.iter().filter(|c| c.user_id == Some("uid-d20ec3a4".into())).count(), 25); + assert_eq!(comments.iter().filter(|c| c.media_file_id == "77d7fe01").count(), 14); + assert_eq!(comments.iter().filter(|c| c.media_file_id == "338fb82c").count(), 2); let messages = Message::get_all(conn, DBPaging::default())?; assert_eq!(messages.len(), 36); - assert_eq!( - messages - .iter() - .filter(|m| m.user_id == "uid-d20ec3a4") - .count(), - 17 - ); - assert_eq!( - messages - .iter() - .filter(|m| m.media_file_id == Some("338fb82c".into())) - .count(), - 3 - ); + assert_eq!(messages.iter().filter(|m| m.user_id == "uid-d20ec3a4").count(), 17); + assert_eq!(messages.iter().filter(|m| m.media_file_id == Some("338fb82c".into())).count(), 3); Ok(()) } + #[test] #[traced_test] fn test_backup_restore() { @@ -664,18 +497,12 @@ fn test_backup_restore() { for c in comments.iter() { models::Comment::delete(conn, &c.id).expect("Failed to delete comment"); } - assert_eq!( - models::Comment::get_all(conn, DBPaging::default()) - .unwrap() - .len(), - 0 - ); // Make sure they are gone + assert_eq!(models::Comment::get_all(conn, DBPaging::default()).unwrap().len(), 0); // Make sure they are gone } // Close DB and restore drop(db); - db_backup::restore_sqlite_database(db_file.clone(), backup_file.clone()) - .expect("Failed to restore database"); + db_backup::restore_sqlite_database(db_file.clone(), backup_file.clone()).expect("Failed to restore database"); // Check that comments are back { diff --git a/server/src/grpc/caller.rs b/server/src/grpc/caller.rs index 2ecb799c..58ff357c 100644 --- a/server/src/grpc/caller.rs +++ b/server/src/grpc/caller.rs @@ -1,98 +1,71 @@ -use lib_clapshot_grpc::GrpcBindAddr; use std::path::Path; +use lib_clapshot_grpc::GrpcBindAddr; +use crate::grpc::grpc_client::{connect, OrganizerConnection}; use super::grpc_client::OrganizerURI; use super::proto; -use crate::grpc::grpc_client::{connect, OrganizerConnection}; pub struct OrganizerCaller { uri: OrganizerURI, } impl OrganizerCaller { - pub fn new(uri: &OrganizerURI) -> Self { + pub fn new(uri: &OrganizerURI ) -> Self { OrganizerCaller { uri: uri.clone() } } - pub fn blocking_handshake_organizer( - &self, - data_dir: &Path, - server_url: &str, - db_file: &Path, - backchannel: &GrpcBindAddr, - ) -> anyhow::Result<()> { - async fn async_call_handshake( - conn: &mut OrganizerConnection, - backchannel: &GrpcBindAddr, - data_dir: &Path, - server_url: &str, - db_file: &Path, - ) -> anyhow::Result<()> { + pub fn blocking_handshake_organizer(&self, data_dir: &Path, server_url: &str, db_file: &Path, backchannel: &GrpcBindAddr) + -> anyhow::Result<()> + { + async fn async_call_handshake(conn: &mut OrganizerConnection, backchannel: &GrpcBindAddr, data_dir: &Path, server_url: &str, db_file: &Path) + -> anyhow::Result<()> + { let v = semver::Version::parse(crate::PKG_VERSION)?; - use lib_clapshot_grpc::proto::org; + use lib_clapshot_grpc::proto::org as org; let req = proto::org::ServerInfo { storage: Some(org::server_info::Storage { storage: Some(org::server_info::storage::Storage::LocalFs( org::server_info::storage::LocalFilesystem { - base_dir: data_dir.to_string_lossy().into(), - }, - )), - }), + base_dir: data_dir.to_string_lossy().into() + }))}), backchannel: Some(org::server_info::GrpcEndpoint { - endpoint: Some(match backchannel { - GrpcBindAddr::Tcp(addr) => org::server_info::grpc_endpoint::Endpoint::Tcp( - org::server_info::grpc_endpoint::Tcp { - host: addr.ip().to_string(), - port: addr.port() as u32, - }, - ), - GrpcBindAddr::Unix(path) => { + endpoint: Some( + match backchannel { + GrpcBindAddr::Tcp(addr) => + org::server_info::grpc_endpoint::Endpoint::Tcp( + org::server_info::grpc_endpoint::Tcp { + host: addr.ip().to_string(), + port: addr.port() as u32, + }), + GrpcBindAddr::Unix(path) => org::server_info::grpc_endpoint::Endpoint::Unix( org::server_info::grpc_endpoint::Unix { - path: path.to_string_lossy().into(), - }, - ) - } + path: path.to_string_lossy().into(), + }), + }) }), - }), url_base: server_url.into(), db: Some(org::Database { r#type: org::database::DatabaseType::Sqlite.into(), - endpoint: db_file - .canonicalize()? - .to_str() - .ok_or(anyhow::anyhow!("Sqlite path is not valid UTF-8"))? - .into(), - }), - version: Some(proto::org::SemanticVersionNumber { - major: v.major, - minor: v.minor, - patch: v.patch, - }), + endpoint: db_file.canonicalize()?.to_str().ok_or( + anyhow::anyhow!("Sqlite path is not valid UTF-8"))?.into() + }), + version: Some(proto::org::SemanticVersionNumber { major: v.major, minor: v.minor, patch: v.patch }), }; conn.handshake(req).await?; Ok(()) } const MAX_TRIES: usize = 5; - for retry in 1..(MAX_TRIES + 1) { + for retry in 1..(MAX_TRIES+1) { match self.tokio_connect() { Ok((rt, mut conn)) => { tracing::info!("Connected to organizer (on attempt {retry}). Doing handshake."); - return rt.block_on(async_call_handshake( - &mut conn, - backchannel, - data_dir, - server_url, - db_file, - )); - } + return rt.block_on(async_call_handshake(&mut conn, backchannel, data_dir, server_url, db_file)); + }, Err(e) => { - tracing::warn!( - "Connecting organizer failed (attempt {retry}/{MAX_TRIES}: {}", - e - ); + tracing::warn!("Connecting organizer failed (attempt {retry}/{MAX_TRIES}: {}", e); std::thread::sleep(std::time::Duration::from_secs_f32(0.5)); } } @@ -100,12 +73,12 @@ impl OrganizerCaller { anyhow::bail!("Connecting organizer failed after {MAX_TRIES} attempts"); } + /// Helper for code that's not already async pub fn tokio_connect(&self) -> anyhow::Result<(tokio::runtime::Runtime, OrganizerConnection)> { - let rt = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build()?; + let rt = tokio::runtime::Builder::new_current_thread().enable_all().build()?; let client = rt.block_on(connect(self.uri.clone()))?; Ok((rt, client)) } + } diff --git a/server/src/grpc/db_models.rs b/server/src/grpc/db_models.rs index 67978568..30882286 100644 --- a/server/src/grpc/db_models.rs +++ b/server/src/grpc/db_models.rs @@ -1,19 +1,17 @@ -use crate::database::models; -use crate::database::{ - error::{DBError, DBResult}, - DBPaging, DbQueryByMediaFile, PooledConnection, -}; use lib_clapshot_grpc::proto; +use crate::database::{error::{DBError, DBResult}, DBPaging, DbQueryByMediaFile, PooledConnection}; +use crate::database::models; use super::{datetime_to_proto3, proto3_to_datetime}; + pub fn proto_msg_type_to_event_name(t: proto::user_message::Type) -> &'static str { match t { proto::user_message::Type::Ok => "ok", proto::user_message::Type::Error => "error", proto::user_message::Type::Progress => "progress", proto::user_message::Type::MediaFileUpdated => "media_file_updated", - proto::user_message::Type::MediaFileAdded => "media_file_added", + proto::user_message::Type::MediaFileAdded => "media_file_added" } } @@ -28,72 +26,35 @@ pub fn msg_event_name_to_proto_msg_type(t: &str) -> proto::user_message::Type { } } + // ============================ MediaFile ============================ -impl models::MediaFile { - pub fn from_proto3(v: &proto::MediaFile) -> DBResult { +impl models::MediaFile +{ + pub fn from_proto3(v: &proto::MediaFile) -> DBResult + { Ok(Self { id: v.id.clone(), user_id: v.user_id.clone(), media_type: Some(v.media_type.clone()), - added_time: v - .added_time - .as_ref() - .map(|t| proto3_to_datetime(t)) - .flatten() - .ok_or(DBError::Other(anyhow::anyhow!("Bad added_time")))?, - recompression_done: v - .processing_metadata - .as_ref() - .map(|m| m.recompression_done.as_ref().map(|x| proto3_to_datetime(x))) - .flatten() - .flatten(), - thumbs_done: v - .processing_metadata - .as_ref() - .map(|m| m.thumbs_done.as_ref().map(|x| proto3_to_datetime(x))) - .flatten() - .flatten(), + added_time: v.added_time.as_ref().map(|t| proto3_to_datetime(t)).flatten().ok_or(DBError::Other(anyhow::anyhow!("Bad added_time")))?, + recompression_done: v.processing_metadata.as_ref().map(|m| m.recompression_done.as_ref().map(|x| proto3_to_datetime(x))).flatten().flatten(), + thumbs_done: v.processing_metadata.as_ref().map(|m| m.thumbs_done.as_ref().map(|x| proto3_to_datetime(x))).flatten().flatten(), has_thumbnail: v.preview_data.as_ref().map(|d| d.thumb_url.is_some()), - thumb_sheet_cols: v - .preview_data - .as_ref() - .map(|d| d.thumb_sheet.as_ref().map(|x| x.cols as i32)) - .flatten(), - thumb_sheet_rows: v - .preview_data - .as_ref() - .map(|d| d.thumb_sheet.as_ref().map(|x| x.rows as i32)) - .flatten(), - orig_filename: v - .processing_metadata - .as_ref() - .map(|m| m.orig_filename.clone()), + thumb_sheet_cols: v.preview_data.as_ref().map(|d| d.thumb_sheet.as_ref().map(|x| x.cols as i32)).flatten(), + thumb_sheet_rows: v.preview_data.as_ref().map(|d| d.thumb_sheet.as_ref().map(|x| x.rows as i32)).flatten(), + orig_filename: v.processing_metadata.as_ref().map(|m| m.orig_filename.clone()), title: v.title.clone(), total_frames: v.duration.as_ref().map(|d| d.total_frames as i32), duration: v.duration.as_ref().map(|d| d.duration as f32), fps: v.duration.as_ref().map(|d| d.fps.clone()), - raw_metadata_all: v - .processing_metadata - .as_ref() - .map(|m| m.ffprobe_metadata_all.clone()) - .flatten(), - default_subtitle_id: v - .default_subtitle_id - .as_ref() - .map(|id| { - id.parse() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid default_subtitle_id"))) - }) - .transpose()?, + raw_metadata_all: v.processing_metadata.as_ref().map(|m| m.ffprobe_metadata_all.clone()).flatten(), + default_subtitle_id: v.default_subtitle_id.as_ref().map(|id| id.parse().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid default_subtitle_id")))).transpose()?, }) } - pub fn to_proto3( - &self, - media_base_url: &str, - subtitles: Vec, - ) -> proto::MediaFile { + pub fn to_proto3(&self, media_base_url: &str, subtitles: Vec) -> proto::MediaFile + { let duration = match (self.duration, self.total_frames, &self.fps) { (Some(dur), Some(total_frames), Some(fps)) => Some(proto::MediaFileDuration { duration: dur as f64, @@ -102,63 +63,42 @@ impl models::MediaFile { }), _ => None, }; - let processing_metadata = match ( - &self.orig_filename, - &self.recompression_done, - &self.raw_metadata_all.clone(), - ) { - (Some(orig_filename), recompression_done, ffprobe_metadata_all) => { - Some(proto::MediaFileProcessingMetadata { - orig_filename: orig_filename.clone(), - recompression_done: recompression_done.map(|t| datetime_to_proto3(&t)), - thumbs_done: self.thumbs_done.map(|t| datetime_to_proto3(&t)), - ffprobe_metadata_all: ffprobe_metadata_all.clone(), - }) - } + let processing_metadata = match (&self.orig_filename, &self.recompression_done, &self.raw_metadata_all.clone()) { + (Some(orig_filename), recompression_done, ffprobe_metadata_all) => Some(proto::MediaFileProcessingMetadata { + orig_filename: orig_filename.clone(), + recompression_done: recompression_done.map(|t| datetime_to_proto3(&t)), + thumbs_done: self.thumbs_done.map(|t| datetime_to_proto3(&t)), + ffprobe_metadata_all: ffprobe_metadata_all.clone(), + }), _ => None, }; // Make preview data (thumb sheet and/or thumb url) let thumb_url = if matches!(self.has_thumbnail, Some(true)) { - Some(format!( - "{}/thumbs/thumb.webp", - format!("{}/{}", media_base_url, &self.id) - )) - } else { - None - }; + Some(format!("{}/thumbs/thumb.webp", format!("{}/{}", media_base_url, &self.id))) + } else { None }; let thumb_sheet = match (self.thumb_sheet_cols, self.thumb_sheet_rows) { (Some(cols), Some(rows)) => Some(proto::media_file_preview_data::ThumbSheet { - url: format!( - "{}/thumbs/sheet-{}x{}.webp", - format!("{}/{}", media_base_url, &self.id), - cols, - rows - ), + url: format!("{}/thumbs/sheet-{}x{}.webp", format!("{}/{}", media_base_url, &self.id), cols, rows), rows: rows as u32, cols: cols as u32, }), - _ => None, + _ => None }; let preview_data = if thumb_url.is_some() || thumb_sheet.is_some() { - Some(proto::MediaFilePreviewData { - thumb_url, - thumb_sheet, - }) - } else { - None - }; + Some(proto::MediaFilePreviewData { thumb_url, thumb_sheet }) + } else { None }; // Use transcoded or orig video? let orig_uri = match &self.orig_filename { Some(f) => Some(format!("orig/{}", urlencoding::encode(f))), - None => None, + None => None }; let playback_uri = match self.recompression_done { Some(_) => Some("video.mp4".into()), - None => orig_uri.clone(), + None => orig_uri.clone() }; proto::MediaFile { @@ -170,14 +110,10 @@ impl models::MediaFile { added_time: Some(datetime_to_proto3(&self.added_time)), preview_data, processing_metadata, - subtitles: subtitles - .into_iter() - .map(|s| s.to_proto3(media_base_url)) - .collect(), + subtitles: subtitles.into_iter().map(|s| s.to_proto3(media_base_url)).collect(), default_subtitle_id: self.default_subtitle_id.map(|id| id.to_string()), - playback_url: playback_uri - .map(|uri| format!("{}/{}/{}", media_base_url, &self.id, uri)), - orig_url: orig_uri.map(|uri| format!("{}/{}/{}", media_base_url, &self.id, uri)), + playback_url: playback_uri.map(|uri| format!("{}/{}/{}", media_base_url, &self.id, uri)), + orig_url: orig_uri.map(|uri| format!("{}/{}/{}", media_base_url, &self.id, uri)) } } @@ -186,90 +122,56 @@ impl models::MediaFile { } } -impl models::MediaFileInsert { - pub fn from_proto3(v: &proto::MediaFile) -> DBResult { +impl models::MediaFileInsert +{ + pub fn from_proto3(v: &proto::MediaFile) -> DBResult + { Ok(Self { id: v.id.clone(), user_id: v.user_id.clone(), media_type: Some(v.media_type.clone()), - recompression_done: v - .processing_metadata - .as_ref() - .map(|m| m.recompression_done.as_ref().map(|x| proto3_to_datetime(x))) - .flatten() - .flatten(), - thumbs_done: v - .processing_metadata - .as_ref() - .map(|m| m.thumbs_done.as_ref().map(|x| proto3_to_datetime(x))) - .flatten() - .flatten(), + recompression_done: v.processing_metadata.as_ref().map(|m| m.recompression_done.as_ref().map(|x| proto3_to_datetime(x))).flatten().flatten(), + thumbs_done: v.processing_metadata.as_ref().map(|m| m.thumbs_done.as_ref().map(|x| proto3_to_datetime(x))).flatten().flatten(), has_thumbnail: v.preview_data.as_ref().map(|d| d.thumb_url.is_some()), - thumb_sheet_cols: v - .preview_data - .as_ref() - .map(|d| d.thumb_sheet.as_ref().map(|x| x.cols as i32)) - .flatten(), - thumb_sheet_rows: v - .preview_data - .as_ref() - .map(|d| d.thumb_sheet.as_ref().map(|x| x.rows as i32)) - .flatten(), - orig_filename: v - .processing_metadata - .as_ref() - .map(|m| m.orig_filename.clone()), + thumb_sheet_cols: v.preview_data.as_ref().map(|d| d.thumb_sheet.as_ref().map(|x| x.cols as i32)).flatten(), + thumb_sheet_rows: v.preview_data.as_ref().map(|d| d.thumb_sheet.as_ref().map(|x| x.rows as i32)).flatten(), + orig_filename: v.processing_metadata.as_ref().map(|m| m.orig_filename.clone()), title: v.title.clone(), total_frames: v.duration.as_ref().map(|d| d.total_frames as i32), duration: v.duration.as_ref().map(|d| d.duration as f32), fps: v.duration.as_ref().map(|d| d.fps.clone()), - raw_metadata_all: v - .processing_metadata - .as_ref() - .map(|m| m.ffprobe_metadata_all.clone()) - .flatten(), - default_subtitle_id: v - .default_subtitle_id - .as_ref() - .map(|id| { - id.parse() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid default_subtitle_id"))) - }) - .transpose()?, + raw_metadata_all: v.processing_metadata.as_ref().map(|m| m.ffprobe_metadata_all.clone()).flatten(), + default_subtitle_id: v.default_subtitle_id.as_ref().map(|id| id.parse().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid default_subtitle_id")))).transpose()?, }) } } // ============================ Subtitles ============================ -impl models::Subtitle { - pub fn from_proto3(v: &proto::Subtitle) -> DBResult { - let added_time = v - .added_time - .as_ref() - .ok_or(anyhow::anyhow!("Missing added_time timestamp"))?; +impl models::Subtitle +{ + pub fn from_proto3(v: &proto::Subtitle) -> DBResult + { + let added_time = v.added_time.as_ref().ok_or(anyhow::anyhow!("Missing added_time timestamp"))?; Ok(Self { - id: v - .id - .parse() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))?, + id: v.id.parse().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))?, media_file_id: v.media_file_id.clone(), title: v.title.clone(), language_code: v.language_code.clone(), filename: v.playback_url.split('/').last().map(|s| s.to_string()), orig_filename: v.orig_filename.clone(), - added_time: proto3_to_datetime(added_time) - .ok_or(anyhow::anyhow!("Invalid 'added_time' timestamp"))?, + added_time: proto3_to_datetime(added_time).ok_or(anyhow::anyhow!("Invalid 'added_time' timestamp"))?, time_offset: v.time_offset, }) } - pub fn to_proto3(&self, media_base_url: &str) -> proto::Subtitle { + pub fn to_proto3(&self, media_base_url: &str) -> proto::Subtitle + { let base = format!("{}/{}", media_base_url, &self.media_file_id); let orig_url = format!("{}/subs/orig/{}", base, &self.orig_filename); let playback_url = match &self.filename { Some(f) => format!("{}/subs/{}", base, f), - None => orig_url.clone(), + None => orig_url.clone() }; proto::Subtitle { id: self.id.to_string(), @@ -285,8 +187,11 @@ impl models::Subtitle { } } -impl models::SubtitleInsert { - pub fn from_proto3(s: &proto::Subtitle) -> DBResult { + +impl models::SubtitleInsert +{ + pub fn from_proto3(s: &proto::Subtitle) -> DBResult + { if s.id != String::default() { return Err(DBError::Other(anyhow::anyhow!("Subtitle ID must be empty for conversion to SubtitleInsert, which doesn't have 'id' field"))); } @@ -301,46 +206,33 @@ impl models::SubtitleInsert { } } + // ============================ Comment ============================ -impl models::Comment { - pub fn from_proto3(c: &proto::Comment) -> DBResult { +impl models::Comment +{ + pub fn from_proto3(c: &proto::Comment) -> DBResult + { //let user = v.user.as_ref().ok_or(anyhow::anyhow!("Missing user"))?; - let created = c - .created - .as_ref() - .ok_or(anyhow::anyhow!("Missing created timestamp"))?; + let created = c.created.as_ref().ok_or(anyhow::anyhow!("Missing created timestamp"))?; Ok(Self { - id: c - .id - .parse() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid comment ID")))?, + id: c.id.parse().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid comment ID")))?, media_file_id: c.media_file_id.clone(), user_id: c.user_id.clone(), username_ifnull: c.username_ifnull.clone(), comment: c.comment.clone(), timecode: c.timecode.clone(), - parent_id: c - .parent_id - .as_ref() - .map(|id| id.parse()) - .transpose() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid parent ID")))?, - created: proto3_to_datetime(created) - .ok_or(anyhow::anyhow!("Invalid 'created' timestamp"))?, + parent_id: c.parent_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid parent ID")))?, + created: proto3_to_datetime(created).ok_or(anyhow::anyhow!("Invalid 'created' timestamp"))?, edited: c.edited.as_ref().map(|t| proto3_to_datetime(t)).flatten(), drawing: c.drawing.clone(), - subtitle_id: c - .subtitle_id - .as_ref() - .map(|id| id.parse()) - .transpose() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))?, + subtitle_id: c.subtitle_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))?, subtitle_filename_ifnull: c.subtitle_filename_ifnull.clone(), }) } - pub fn to_proto3(&self) -> proto::Comment { + pub fn to_proto3(&self) -> proto::Comment + { let created_timestamp = Some(datetime_to_proto3(&self.created)); let edited_timestamp = self.edited.map(|edited| datetime_to_proto3(&edited)); @@ -361,8 +253,10 @@ impl models::Comment { } } -impl models::CommentInsert { - pub fn from_proto3(c: &proto::Comment) -> DBResult { +impl models::CommentInsert +{ + pub fn from_proto3(c: &proto::Comment) -> DBResult + { if c.id != String::default() { return Err(DBError::Other(anyhow::anyhow!("Comment ID must be empty for conversion to CommentInsert, which doesn't have 'id' field"))); } @@ -372,19 +266,9 @@ impl models::CommentInsert { username_ifnull: c.username_ifnull.clone(), comment: c.comment.clone(), timecode: c.timecode.clone(), - parent_id: c - .parent_id - .as_ref() - .map(|id| id.parse()) - .transpose() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid parent ID")))?, + parent_id: c.parent_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid parent ID")))?, drawing: c.drawing.clone(), - subtitle_id: c - .subtitle_id - .as_ref() - .map(|id| id.parse()) - .transpose() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))?, + subtitle_id: c.subtitle_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))?, subtitle_filename_ifnull: c.subtitle_filename_ifnull.clone(), }) } @@ -392,69 +276,40 @@ impl models::CommentInsert { // ============================ Message ============================ -impl models::Message { - pub fn from_proto3(v: &proto::UserMessage) -> DBResult { - let created = v - .created - .as_ref() - .ok_or(anyhow::anyhow!("Missing created timestamp"))?; +impl models::Message +{ + pub fn from_proto3(v: &proto::UserMessage) -> DBResult + { + let created = v.created.as_ref().ok_or(anyhow::anyhow!("Missing created timestamp"))?; let user_id = v.user_id.as_ref().ok_or(anyhow::anyhow!("Missing user"))?; let id = v.id.as_ref().ok_or(anyhow::anyhow!("Missing message ID"))?; Ok(Self { - id: id - .parse() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid message ID")))?, + id: id.parse().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid message ID")))?, event_name: proto_msg_type_to_event_name(v.r#type()).to_string(), user_id: user_id.clone(), media_file_id: v.refs.as_ref().map(|r| r.media_file_id.clone()).flatten(), - comment_id: v - .refs - .as_ref() - .map(|r| { - r.comment_id - .as_ref() - .map(|id| id.parse()) - .transpose() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid comment ID"))) - }) - .transpose()? - .flatten(), - subtitle_id: v - .refs - .as_ref() - .map(|r| { - r.comment_id - .as_ref() - .map(|id| id.parse()) - .transpose() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID"))) - }) - .transpose()? - .flatten(), + comment_id: v.refs.as_ref().map(|r| r.comment_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid comment ID")))).transpose()?.flatten(), + subtitle_id: v.refs.as_ref().map(|r| r.comment_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))).transpose()?.flatten(), message: v.message.clone(), details: v.details.clone().unwrap_or_default(), - created: proto3_to_datetime(created) - .ok_or(anyhow::anyhow!("Invalid 'created' timestamp"))?, + created: proto3_to_datetime(created).ok_or(anyhow::anyhow!("Invalid 'created' timestamp"))?, seen: v.seen, }) } - pub fn to_proto3(&self) -> proto::UserMessage { + pub fn to_proto3(&self) -> proto::UserMessage + { proto::UserMessage { id: Some(self.id.to_string()), r#type: msg_event_name_to_proto_msg_type(&self.event_name.as_str()).into(), user_id: Some(self.user_id.clone()), - refs: Some(proto::user_message::Refs { + refs:Some(proto::user_message::Refs { media_file_id: self.media_file_id.clone(), comment_id: self.comment_id.map(|id| id.to_string()), subtitle_id: self.subtitle_id.map(|id| id.to_string()), }), message: self.message.clone(), - details: if self.details.is_empty() { - None - } else { - Some(self.details.clone()) - }, + details: if self.details.is_empty() { None } else { Some(self.details.clone()) }, created: Some(datetime_to_proto3(&self.created)), seen: self.seen, progress: None, @@ -462,8 +317,10 @@ impl models::Message { } } -impl models::MessageInsert { - pub fn from_proto3(v: &proto::UserMessage) -> DBResult { +impl models::MessageInsert +{ + pub fn from_proto3(v: &proto::UserMessage) -> DBResult + { if v.id.is_some() { return Err(DBError::Other(anyhow::anyhow!("Message ID must be empty for conversion to MessageInsert, which doesn't have 'id' field"))); } @@ -473,52 +330,27 @@ impl models::MessageInsert { event_name: proto_msg_type_to_event_name(v.r#type()).to_string(), user_id: user_id.clone(), media_file_id: v.refs.as_ref().map(|r| r.media_file_id.clone()).flatten(), - comment_id: v - .refs - .as_ref() - .map(|r| { - r.comment_id - .as_ref() - .map(|id| id.parse()) - .transpose() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid comment ID"))) - }) - .transpose()? - .flatten(), - subtitle_id: v - .refs - .as_ref() - .map(|r| { - r.comment_id - .as_ref() - .map(|id| id.parse()) - .transpose() - .map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID"))) - }) - .transpose()? - .flatten(), + comment_id: v.refs.as_ref().map(|r| r.comment_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid comment ID")))).transpose()?.flatten(), + subtitle_id: v.refs.as_ref().map(|r| r.comment_id.as_ref().map(|id| id.parse()).transpose().map_err(|_| DBError::Other(anyhow::anyhow!("Invalid subtitle ID")))).transpose()?.flatten(), message: v.message.clone(), details: v.details.clone().unwrap_or_default(), seen: v.seen, }) } - pub fn to_proto3(&self) -> proto::UserMessage { + pub fn to_proto3(&self) -> proto::UserMessage + { proto::UserMessage { id: None, r#type: msg_event_name_to_proto_msg_type(&self.event_name.as_str()).into(), user_id: Some(self.user_id.clone()), - refs: Some(proto::user_message::Refs { + refs:Some(proto::user_message::Refs { media_file_id: self.media_file_id.clone(), comment_id: self.comment_id.map(|id| id.to_string()), subtitle_id: self.subtitle_id.map(|id| id.to_string()), }), message: self.message.clone(), - details: if self.details.is_empty() { - None - } else { - Some(self.details.clone()) - }, + details: if self.details.is_empty() { None } else { Some(self.details.clone()) }, created: None, seen: self.seen, progress: None, diff --git a/server/src/grpc/grpc_client.rs b/server/src/grpc/grpc_client.rs index e31afad1..6504370d 100644 --- a/server/src/grpc/grpc_client.rs +++ b/server/src/grpc/grpc_client.rs @@ -1,12 +1,13 @@ use std::path::{Path, PathBuf}; +use lib_clapshot_grpc::{unix_socket, subprocess::spawn_shell, subprocess::ProcHandle}; use lib_clapshot_grpc::proto::org::organizer_inbound_client::OrganizerInboundClient; -use lib_clapshot_grpc::{subprocess::spawn_shell, subprocess::ProcHandle, unix_socket}; -use anyhow::{bail, Context}; -use tonic::transport::{Channel, Endpoint}; +use anyhow::{Context, bail}; +use tonic::transport::{Endpoint, Channel}; use tracing::info_span; + pub type OrganizerConnection = OrganizerInboundClient; #[derive(Debug, Clone)] @@ -17,17 +18,19 @@ pub enum OrganizerURI { /// Connect to a gRPC server, either via a Unix socket or HTTP(S). /// Plain path string means Unix socket, "http://..." or "https://..." means HTTP(S). -pub async fn connect(uri: OrganizerURI) -> anyhow::Result { +pub async fn connect(uri: OrganizerURI) -> anyhow::Result +{ let channel = match uri { - OrganizerURI::UnixSocket(path) => { + OrganizerURI::UnixSocket(path) => + { unix_socket::wait_for(&path, 5.0).await?; // For tonic 0.13.1, create a custom connector that wraps UnixStream #[cfg(unix)] { - use hyper_util::rt::TokioIo; use tokio::net::UnixStream; use tower::service_fn; - + use hyper_util::rt::TokioIo; + let path_clone = path.clone(); Endpoint::try_from("http://[::]:50051")? .connect_timeout(std::time::Duration::from_secs(8)) @@ -45,25 +48,26 @@ pub async fn connect(uri: OrganizerURI) -> anyhow::Result { { anyhow::bail!("Unix sockets are not supported on this platform") } - } - OrganizerURI::Http(uri) => Channel::from_shared(uri.to_string()) - .context("Failed to parse organizer HTTP URI")? - .connect_timeout(std::time::Duration::from_secs(8)) - .connect() - .await - .context("HTTP Channel::connect failed")?, + }, + OrganizerURI::Http(uri) => + { + Channel::from_shared(uri.to_string()).context("Failed to parse organizer HTTP URI")? + .connect_timeout(std::time::Duration::from_secs(8)) + .connect().await.context("HTTP Channel::connect failed")? + }, }; Ok(OrganizerInboundClient::new(channel)) } /// Parse Organizer plugin arguments and spawn it if necessary pub fn prepare_organizer( - org_uri: &Option, - cmd: &Option, - level: tracing::Level, - json: bool, - data_dir: &Path, -) -> anyhow::Result<(Option, Option)> { + org_uri: &Option, + cmd: &Option, + level: tracing::Level, + json: bool, + data_dir: &Path) + -> anyhow::Result<(Option, Option)> +{ assert!(tracing::Level::TRACE > tracing::Level::DEBUG); let debug = level >= tracing::Level::DEBUG; @@ -76,51 +80,39 @@ pub fn prepare_organizer( Some((pcol, _)) => bail!("Unsupported gRPC protocol: {}", pcol), }), }; - let org_hdl = if let Some(cmd) = cmd { - // Use a temp sock if none was given - if org_uri.is_none() { - let unix_sock = data_dir - .canonicalize() - .context("Expanding data dir")? - .join("grpc-srv-to-org.sock"); - org_uri = Some(OrganizerURI::UnixSocket(unix_sock)); - }; - Some(spawn_organizer( - &cmd.as_str(), - org_uri.clone().unwrap(), - debug, - json, - )?) - } else { - None - }; + let org_hdl = + if let Some(cmd) = cmd { + // Use a temp sock if none was given + if org_uri.is_none() { + let unix_sock = data_dir + .canonicalize().context("Expanding data dir")? + .join("grpc-srv-to-org.sock"); + org_uri = Some(OrganizerURI::UnixSocket(unix_sock)); + }; + Some(spawn_organizer(&cmd.as_str(), org_uri.clone().unwrap(), debug, json)?) + } else { None }; Ok((org_uri, org_hdl)) } /// Spawn organizer gRPC server as a subprocess. /// Dropping the returned handle will signal/kill the subprocess. -fn spawn_organizer( - cmd: &str, - uri: OrganizerURI, - debug: bool, - json: bool, -) -> anyhow::Result { +fn spawn_organizer(cmd: &str, uri: OrganizerURI, debug: bool, json: bool) + -> anyhow::Result +{ assert!(cmd != "", "Empty organizer command"); let mut cmd = match uri { OrganizerURI::UnixSocket(path) => { unix_socket::delete_old(&path)?; format!("{} {}", cmd, path.display()) - } - OrganizerURI::Http(_) => cmd.into(), + }, + OrganizerURI::Http(_) => { + cmd.into() + }, }; - if debug { - cmd += " --debug"; - } - if json { - cmd += " --json"; - } + if debug { cmd += " --debug"; } + if json { cmd += " --json"; } spawn_shell(&cmd, "organizer", info_span!("ORG")) } diff --git a/server/src/grpc/grpc_impl_helpers.rs b/server/src/grpc/grpc_impl_helpers.rs index bac40a71..78a33616 100644 --- a/server/src/grpc/grpc_impl_helpers.rs +++ b/server/src/grpc/grpc_impl_helpers.rs @@ -1,31 +1,21 @@ -use crate::database::error::DBError; use std::num::NonZeroU32; use tonic::Status; +use crate::database::error::DBError; use lib_clapshot_grpc::proto::org; -pub(crate) fn rpc_expect_field<'a, T>( - fld: &'a Option, - name: &'a str, -) -> tonic::Result<&'a T, Status> { + +pub (crate) fn rpc_expect_field<'a, T> (fld: &'a Option, name: &'a str) -> tonic::Result<&'a T, Status> { match fld { Some(f) => Ok(f), - None => { - return Err(Status::invalid_argument(format!( - "Missing '{}' field", - name - ))) - } + None => return Err(Status::invalid_argument(format!("Missing '{}' field", name))), } } /// Emulate paging by taking a slice of the vector for database /// queries that don't support it. -pub(crate) fn paged_vec(v: Vec, p: crate::database::DBPaging) -> Vec { - v.into_iter() - .skip(p.offset() as usize) - .take(p.limit() as usize) - .collect() +pub (crate) fn paged_vec(v: Vec, p: crate::database::DBPaging) -> Vec { + v.into_iter().skip(p.offset() as usize).take(p.limit() as usize).collect() } /// Convert GRPC paging object to (type-safe) DB counterpart. @@ -42,7 +32,7 @@ impl TryInto for Option<&org::DbPaging> { page_num: p.page_num.into(), page_size, }) - } + }, None => Ok(crate::database::DBPaging::default()), } } @@ -53,9 +43,7 @@ impl From for Status { fn from(e: DBError) -> Self { match e { DBError::NotFound() => Status::not_found("DB item not found (on Server)"), - DBError::BackendError(e) => { - Status::internal(format!("DB backend error (on Server): {}", e)) - } + DBError::BackendError(e) => Status::internal(format!("DB backend error (on Server): {}", e)), DBError::Other(e) => Status::internal(format!("DB error (on Server): {}", e)), } } diff --git a/server/src/grpc/grpc_server.rs b/server/src/grpc/grpc_server.rs index 8e1a886b..68bab841 100644 --- a/server/src/grpc/grpc_server.rs +++ b/server/src/grpc/grpc_server.rs @@ -1,21 +1,12 @@ -use crate::database::models; -use crate::grpc::db_models::proto_msg_type_to_event_name; -use crate::{ - api_server::{server_state::ServerState, ws_handers::del_media_file_and_cleanup, SendTo}, - client_cmd, - database::{DbBasicQuery, DbQueryByMediaFile, DbQueryByUser, DbUpdate}, - grpc::grpc_impl_helpers::{paged_vec, rpc_expect_field}, - optional_str_to_i32_or_tonic_error, str_to_i32_or_tonic_error, -}; -use anyhow::Context; use std::{path::Path, sync::atomic::Ordering::Relaxed}; +use anyhow::Context; use tonic::{Request, Response, Status}; +use crate::{api_server::{server_state::ServerState, ws_handers::del_media_file_and_cleanup, SendTo}, client_cmd, database::{DbBasicQuery, DbQueryByMediaFile, DbQueryByUser, DbUpdate}, grpc::grpc_impl_helpers::{paged_vec, rpc_expect_field}, optional_str_to_i32_or_tonic_error, str_to_i32_or_tonic_error}; +use crate::grpc::db_models::proto_msg_type_to_event_name; +use crate::database::models; +use lib_clapshot_grpc::{proto::{self}, run_organizer_outbound_grpc_server, GrpcBindAddr, RpcResult}; use lib_clapshot_grpc::proto::org; -use lib_clapshot_grpc::{ - proto::{self}, - run_organizer_outbound_grpc_server, GrpcBindAddr, RpcResult, -}; pub struct OrganizerOutboundImpl { server: ServerState, @@ -24,67 +15,48 @@ pub struct OrganizerOutboundImpl { // Implement RCP methods for Organizer -> Server #[tonic::async_trait] -impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl { - async fn handshake(&self, req: tonic::Request) -> RpcResult { +impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl +{ + async fn handshake(&self, req: tonic::Request) -> RpcResult + { tracing::debug!("org->srv handshake received"); - self.server - .organizer_info - .lock() - .await - .replace(req.into_inner()); + self.server.organizer_info.lock().await.replace(req.into_inner()); self.server.organizer_has_connected.store(true, Relaxed); Ok(Response::new(proto::Empty {})) } - async fn client_define_actions( - &self, - req: Request, - ) -> RpcResult { + async fn client_define_actions(&self, req: Request) -> RpcResult + { let req = req.into_inner(); - to_rpc_empty(self.server.emit_cmd( - client_cmd!(DefineActions, {actions: req.actions}), - SendTo::UserSession(&req.sid), - )) + to_rpc_empty(self.server.emit_cmd(client_cmd!(DefineActions, {actions: req.actions}), SendTo::UserSession(&req.sid))) } - async fn client_show_page( - &self, - req: Request, - ) -> RpcResult { + async fn client_show_page(&self, req: Request) -> RpcResult + { let req = req.into_inner(); - to_rpc_empty(self.server.emit_cmd( - client_cmd!(ShowPage, { - page_items: req.page_items, - page_id: req.page_id, - page_title: req.page_title, - }), - SendTo::UserSession(&req.sid), - )) + to_rpc_empty(self.server.emit_cmd(client_cmd!(ShowPage, { + page_items: req.page_items, + page_id: req.page_id, + page_title: req.page_title, + }), SendTo::UserSession(&req.sid))) } /// Send a message to one or more user sessions. - async fn client_show_user_message( - &self, - req: Request, - ) -> RpcResult { - use crate::api_server::SendTo; + async fn client_show_user_message(&self, req: Request) -> RpcResult + { use org::client_show_user_message_request::Recipient; + use crate::api_server::SendTo; let req = req.into_inner(); - let msg_in = req.msg.map_or_else( - || return Err(Status::invalid_argument("No message specified")), - Ok, - )?; - let recipient = req - .recipient - .ok_or_else(|| Status::invalid_argument("No recipient specified"))?; + let msg_in = req.msg.map_or_else(|| return Err(Status::invalid_argument("No message specified")), Ok)?; + let recipient = req.recipient.ok_or_else(|| Status::invalid_argument("No recipient specified"))?; let (media_file_id, comment_id, subtitle_id) = match &msg_in.refs { Some(refs) => ( refs.media_file_id.clone(), optional_str_to_i32_or_tonic_error!(refs.comment_id)?, - optional_str_to_i32_or_tonic_error!(refs.subtitle_id)?, + optional_str_to_i32_or_tonic_error!(refs.subtitle_id)? ), None => (None, None, None), }; @@ -100,8 +72,7 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl message: msg_in.message.clone(), details: msg_in.details.clone().unwrap_or_default(), }; - self.server - .push_notify_message(&msg, to, persist, msg_in.progress) + self.server.push_notify_message(&msg, to, persist, msg_in.progress) }; let res = match recipient { @@ -111,48 +82,30 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl } else { Err(anyhow::anyhow!("Session not found")) } - } - Recipient::UserTemp(username) => send_msg(&username, SendTo::UserId(&username), false), - Recipient::UserPersist(username) => { - send_msg(&username, SendTo::UserId(&username), true) - } - Recipient::MediaFileId(id) => send_msg(&id, SendTo::MediaFileId(&id), false), - Recipient::CollabSession(csi) => send_msg(&csi, SendTo::Collab(&csi), false), + }, + Recipient::UserTemp(username) => { send_msg(&username, SendTo::UserId(&username), false) }, + Recipient::UserPersist(username) => { send_msg(&username, SendTo::UserId(&username), true) }, + Recipient::MediaFileId(id) => { send_msg(&id, SendTo::MediaFileId(&id), false) }, + Recipient::CollabSession(csi) => { send_msg(&csi, SendTo::Collab(&csi), false) }, }; to_rpc_empty(res) } - async fn client_open_media_file( - &self, - req: Request, - ) -> RpcResult { + async fn client_open_media_file(&self, req: Request) -> RpcResult + { let req = req.into_inner(); - to_rpc_empty( - crate::api_server::ws_handers::send_open_media_file_cmd( - &self.server, - &req.sid, - &req.id, - ) - .await, - ) + to_rpc_empty(crate::api_server::ws_handers::send_open_media_file_cmd(&self.server, &req.sid, &req.id).await) } - async fn client_set_cookies( - &self, - req: Request, - ) -> RpcResult { + async fn client_set_cookies(&self, req: Request) -> RpcResult + { let req = req.into_inner(); - to_rpc_empty(self.server.emit_cmd( - client_cmd!(SetCookies, {cookies: req.cookies, expire_time: req.expire_time}), - SendTo::UserSession(&req.sid), - )) + to_rpc_empty(self.server.emit_cmd(client_cmd!(SetCookies, {cookies: req.cookies, expire_time: req.expire_time}), SendTo::UserSession(&req.sid))) } - async fn delete_media_file( - &self, - req: Request, - ) -> RpcResult { + async fn delete_media_file(&self, req: Request) -> RpcResult + { let req = req.into_inner(); to_rpc_empty(del_media_file_and_cleanup(req.id.as_str(), None, &self.server).await) } @@ -163,25 +116,22 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl // (These aggregate a lot of filtering and paging functionality into a relatively // few RPC calls, so there's quite a bit of matching and dense logic here.) - async fn db_get_media_files( - &self, - req: Request, - ) -> RpcResult { + + async fn db_get_media_files(&self, req: Request) -> RpcResult + { use org::db_get_media_files_request::Filter; let req = req.into_inner(); let db = self.server.db.clone(); let pg = req.paging.as_ref().try_into()?; let conn = &mut db.conn()?; let items = match rpc_expect_field(&req.filter, "filter")? { - Filter::All(_) => models::MediaFile::get_all(conn, pg)?, - Filter::Ids(ids) => paged_vec(models::MediaFile::get_many(conn, &ids.ids)?, pg), - Filter::UserId(user_id) => models::MediaFile::get_by_user(conn, &user_id, pg)?, + Filter::All(_) => { models::MediaFile::get_all(conn, pg)? }, + Filter::Ids(ids) => { paged_vec(models::MediaFile::get_many(conn, &ids.ids)?, pg) }, + Filter::UserId(user_id) => { models::MediaFile::get_by_user(conn, &user_id, pg)? }, }; let mut proto_items = Vec::with_capacity(items.len()); - for mf in items { - proto_items.push(mf.to_proto3(&self.server.media_base_url, mf.get_subtitles(conn)?)); - } + for mf in items { proto_items.push(mf.to_proto3(&self.server.media_base_url, mf.get_subtitles(conn)?)); } Ok(Response::new(org::DbMediaFileList { items: proto_items, @@ -189,10 +139,9 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl })) } - async fn db_get_comments( - &self, - req: Request, - ) -> RpcResult { + + async fn db_get_comments(&self, req: Request) -> RpcResult + { use org::db_get_comments_request::Filter; let req = req.into_inner(); let db = self.server.db.clone(); @@ -200,19 +149,13 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl let conn = &mut db.conn()?; let items = match rpc_expect_field(&req.filter, "filter")? { - Filter::All(_) => models::Comment::get_all(conn, pg)?, + Filter::All(_) => { models::Comment::get_all(conn, pg)? }, Filter::Ids(ids) => { - let ids = ids - .ids - .iter() - .map(|comment_id| str_to_i32_or_tonic_error!(comment_id)) - .collect::, _>>()?; + let ids = ids.ids.iter().map(|comment_id| str_to_i32_or_tonic_error!(comment_id)).collect::, _>>()?; paged_vec(models::Comment::get_many(conn, &ids)?, pg) - } - Filter::UserId(user_id) => models::Comment::get_by_user(conn, user_id, pg)?, - Filter::MediaFileId(media_file_id) => { - models::Comment::get_by_media_file(conn, media_file_id, pg)? - } + }, + Filter::UserId(user_id) => { models::Comment::get_by_user(conn, user_id, pg)? }, + Filter::MediaFileId(media_file_id) => { models::Comment::get_by_media_file(conn, media_file_id, pg)? }, }; Ok(Response::new(org::DbCommentList { items: items.into_iter().map(|c| c.to_proto3()).collect(), @@ -220,32 +163,23 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl })) } - async fn db_get_user_messages( - &self, - req: Request, - ) -> RpcResult { + + async fn db_get_user_messages(&self, req: Request) -> RpcResult + { use org::db_get_user_messages_request::Filter; let req = req.into_inner(); let db = self.server.db.clone(); let pg = req.paging.as_ref().try_into()?; let conn = &mut db.conn()?; let items = match rpc_expect_field(&req.filter, "filter")? { - Filter::All(_) => models::Message::get_all(conn, pg)?, + Filter::All(_) => { models::Message::get_all(conn, pg)? }, Filter::Ids(ids) => { - let ids = ids - .ids - .iter() - .map(|message_id| str_to_i32_or_tonic_error!(message_id)) - .collect::, _>>()?; + let ids = ids.ids.iter().map(|message_id| str_to_i32_or_tonic_error!(message_id)).collect::, _>>()?; paged_vec(models::Message::get_many(conn, ids.as_slice())?, pg) - } - Filter::UserId(user_id) => models::Message::get_by_user(conn, user_id, pg)?, - Filter::MediaFileId(media_file_id) => { - models::Message::get_by_media_file(conn, media_file_id, pg)? - } - Filter::CommentId(comment_id) => { - models::Message::get_by_comment(conn, str_to_i32_or_tonic_error!(comment_id)?)? - } + }, + Filter::UserId(user_id) => { models::Message::get_by_user(conn, user_id, pg)? }, + Filter::MediaFileId(media_file_id) => { models::Message::get_by_media_file(conn, media_file_id, pg)? }, + Filter::CommentId(comment_id) => { models::Message::get_by_comment(conn, str_to_i32_or_tonic_error!(comment_id)?)? }, }; Ok(Response::new(org::DbUserMessageList { items: items.into_iter().map(|m| m.to_proto3()).collect(), @@ -253,121 +187,83 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl })) } - async fn db_upsert( - &self, - req: Request, - ) -> RpcResult { + + async fn db_upsert(&self, req: Request) -> RpcResult + { let req = req.into_inner(); macro_rules! upsert_type { - ([$db:expr, $input_items:expr, $model:ty, $ins_model:ty, $id_missing:expr, $to_proto:expr]) => {{ - let inserts = $input_items - .iter() - .filter(|it| $id_missing(it)) - .map(|it| <$ins_model>::from_proto3(it)) - .collect::, _>>()?; - - let updates = $input_items - .iter() - .filter(|it| !$id_missing(it)) - .map(|it| <$model>::from_proto3(it)) - .collect::, _>>()?; - - // Perform database operations - let ins_res = <$model>::insert_many($db, &inserts)?; - let upd_res = <$model>::update_many($db, &updates)?; - - if ins_res.len() + upd_res.len() != $input_items.len() { - return Err(Status::internal( - "Database upsert returned unexpected number of results", - )); - } - - // Combine the results in the original order - let mut ins_iter = ins_res.into_iter(); - let mut upd_iter = upd_res.into_iter(); - let res_comb_orig_order = $input_items - .iter() - .map(|it| { + ([$db:expr, $input_items:expr, $model:ty, $ins_model:ty, $id_missing:expr, $to_proto:expr]) => { + { + let inserts = $input_items.iter().filter(|it| $id_missing(it)) + .map(|it| <$ins_model>::from_proto3(it)) + .collect::, _>>()?; + + let updates = $input_items.iter().filter(|it| !$id_missing(it)) + .map(|it| <$model>::from_proto3(it)) + .collect::, _>>()?; + + // Perform database operations + let ins_res = <$model>::insert_many($db, &inserts)?; + let upd_res = <$model>::update_many($db, &updates)?; + + if ins_res.len() + upd_res.len() != $input_items.len() { + return Err(Status::internal("Database upsert returned unexpected number of results")); + } + + // Combine the results in the original order + let mut ins_iter = ins_res.into_iter(); + let mut upd_iter = upd_res.into_iter(); + let res_comb_orig_order = $input_items.iter().map(|it| { if $id_missing(it) { ins_iter.next().expect("Insert result missing") } else { upd_iter.next().expect("Update result missing") } - }) - .collect::>(); - - // Convert back to proto3 - res_comb_orig_order - .iter() - .map(|it| $to_proto(it)) - .collect::, tonic::Status>>() - }}; + }).collect::>(); + + // Convert back to proto3 + res_comb_orig_order.iter().map(|it| $to_proto(it)).collect::, tonic::Status>>() + } + } } let conn = &mut self.server.db.conn()?; Ok(Response::new(org::DbUpsertResponse { media_files: upsert_type!([ - conn, - req.media_files, - models::MediaFile, - models::MediaFileInsert, + conn, req.media_files, models::MediaFile, models::MediaFileInsert, |it: &proto::MediaFile| it.id.is_empty(), - |it: &models::MediaFile| Ok( - it.to_proto3(self.server.media_base_url.as_str(), it.get_subtitles(conn)?) - ) - ])?, + |it: &models::MediaFile| Ok(it.to_proto3(self.server.media_base_url.as_str(), it.get_subtitles(conn)?))])?, comments: upsert_type!([ - conn, - req.comments, - models::Comment, - models::CommentInsert, + conn, req.comments, models::Comment, models::CommentInsert, |it: &proto::Comment| it.id.is_empty(), - |it: &models::Comment| Ok(it.to_proto3()) - ])?, + |it: &models::Comment| Ok(it.to_proto3())])?, user_messages: upsert_type!([ - conn, - req.user_messages, - models::Message, - models::MessageInsert, + conn, req.user_messages, models::Message, models::MessageInsert, |it: &proto::UserMessage| it.id.is_none(), - |it: &models::Message| Ok(it.to_proto3()) - ])?, + |it: &models::Message| Ok(it.to_proto3())])?, subtitles: upsert_type!([ - conn, - req.subtitles, - models::Subtitle, - models::SubtitleInsert, + conn, req.subtitles, models::Subtitle, models::SubtitleInsert, |it: &proto::Subtitle| it.id.is_empty(), - |it: &models::Subtitle| Ok(it.to_proto3(self.server.media_base_url.as_str())) - ])?, + |it: &models::Subtitle| Ok(it.to_proto3(self.server.media_base_url.as_str()))])?, })) } - async fn db_delete( - &self, - req: Request, - ) -> RpcResult { + async fn db_delete(&self, req: Request) -> RpcResult + { let req = req.into_inner(); macro_rules! delete_type { - ([$db:expr, $input_ids:expr, $id_type:ty, $model:ty]) => {{ - use std::str::FromStr; - let ids = $input_ids - .iter() - .map(|s| { - <$id_type>::from_str(&s) + ([$db:expr, $input_ids:expr, $id_type:ty, $model:ty]) => { + { + use std::str::FromStr; + let ids = $input_ids.iter().map(|s| <$id_type>::from_str(&s) .map_err(|e| Status::invalid_argument(format!("Invalid ID: {}", e))) - }) - .collect::, _>>()?; - <$model>::delete_many($db, ids.as_slice())? as u32 - }}; + ).collect::, _>>()?; + <$model>::delete_many($db, ids.as_slice())? as u32 + } + } } let conn = &mut self.server.db.conn()?; Ok(Response::new(org::DbDeleteResponse { - media_files_deleted: delete_type!([ - conn, - req.media_file_ids, - String, - models::MediaFile - ]), + media_files_deleted: delete_type!([conn, req.media_file_ids, String, models::MediaFile]), subtitles_deleted: delete_type!([conn, req.subtitle_ids, i32, models::Subtitle]), comments_deleted: delete_type!([conn, req.comment_ids, i32, models::Comment]), user_messages_deleted: delete_type!([conn, req.user_message_ids, i32, models::Message]), @@ -375,9 +271,9 @@ impl org::organizer_outbound_server::OrganizerOutbound for OrganizerOutboundImpl } } + fn to_rpc_empty(res: Result) -> RpcResult -where - E: std::fmt::Display, + where E: std::fmt::Display, { match res { Ok(_) => Ok(Response::new(proto::Empty {})), @@ -385,37 +281,24 @@ where } } -pub async fn run_org_to_srv_grpc_server( - bind: GrpcBindAddr, - server: ServerState, -) -> anyhow::Result<()> { + +pub async fn run_org_to_srv_grpc_server(bind: GrpcBindAddr, server: ServerState) -> anyhow::Result<()> +{ let span = tracing::info_span!("gRPC server for org->srv"); let terminate_flag = server.terminate_flag.clone(); let server_listening_flag = server.grpc_srv_listening_flag.clone(); - let service = - org::organizer_outbound_server::OrganizerOutboundServer::new(OrganizerOutboundImpl { - server, - }); + let service = org::organizer_outbound_server::OrganizerOutboundServer::new(OrganizerOutboundImpl { server }); - run_organizer_outbound_grpc_server(bind, service, span, server_listening_flag, terminate_flag) - .await + run_organizer_outbound_grpc_server(bind, service, span, server_listening_flag, terminate_flag).await } -pub fn make_grpc_server_bind( - tcp: &Option, - data_dir: &Path, -) -> anyhow::Result { +pub fn make_grpc_server_bind(tcp: &Option, data_dir: &Path) -> anyhow::Result +{ match tcp { - None => Ok(GrpcBindAddr::Unix( - data_dir - .canonicalize() - .context("Expanding data dir")? - .join("grpc-org-to-srv.sock") - .into(), - )), - Some(s) => Ok(GrpcBindAddr::Tcp( - s.parse().context("Parsing TCP listen address")?, - )), + None => Ok(GrpcBindAddr::Unix(data_dir + .canonicalize().context("Expanding data dir")? + .join("grpc-org-to-srv.sock").into())), + Some(s) => Ok(GrpcBindAddr::Tcp(s.parse().context("Parsing TCP listen address")?)), } } diff --git a/server/src/grpc/mod.rs b/server/src/grpc/mod.rs index f791ead9..349c79f1 100644 --- a/server/src/grpc/mod.rs +++ b/server/src/grpc/mod.rs @@ -1,11 +1,12 @@ -pub mod caller; -pub mod db_models; pub mod grpc_client; -pub mod grpc_impl_helpers; +pub mod caller; pub mod grpc_server; +pub mod grpc_impl_helpers; +pub mod db_models; -use lib_clapshot_grpc::proto; use std::collections::HashMap; +use lib_clapshot_grpc::proto; + // Helper macro to simplify creation of ServerToClientCmd messages. // Prost/Tonic syntax is a bit verbose. @@ -22,33 +23,15 @@ macro_rules! client_cmd { // Proto3 objects use string for many IDs that are integers in DB. Helper to convert them. #[macro_export] macro_rules! str_to_i32_or_tonic_error { - ($r:expr) => { - $r.parse::().map_err(|e| { - tonic::Status::invalid_argument(format!( - "Could not parse {} as int: {}", - stringify!($r), - e - )) - }) - }; + ($r:expr) => { $r.parse::().map_err(|e| tonic::Status::invalid_argument(format!("Could not parse {} as int: {}", stringify!($r), e))) }; } #[macro_export] macro_rules! optional_str_to_i32_or_tonic_error { - ($r:expr) => { - $r.as_ref() - .map(|v| { - v.parse::().map_err(|e| { - tonic::Status::invalid_argument(format!( - "Could not parse {} as int: {}", - stringify!($r), - e - )) - }) - }) - .transpose() - }; + ($r:expr) => { $r.as_ref().map(|v| v.parse::().map_err(|e| tonic::Status::invalid_argument(format!("Could not parse {} as int: {}", stringify!($r), e)))).transpose() }; } + + /// Convert database time to protobuf3 pub fn datetime_to_proto3(dt: &chrono::NaiveDateTime) -> pbjson_types::Timestamp { pbjson_types::Timestamp { @@ -61,7 +44,7 @@ pub fn proto3_to_datetime(ts: &pbjson_types::Timestamp) -> Option HashMap { +pub (crate) fn make_media_file_popup_actions() -> HashMap { HashMap::from([ ("popup_builtin_rename".into(), make_builtin_rename_action()), ("popup_builtin_trash".into(), make_builting_trash_action()), @@ -69,14 +52,12 @@ pub(crate) fn make_media_file_popup_actions() -> HashMap proto::ActionDef { - proto::ActionDef { + proto::ActionDef { ui_props: Some(proto::ActionUiProps { label: Some(format!("Rename")), icon: Some(proto::Icon { src: Some(proto::icon::Src::FaClass(proto::icon::FaClass { - classes: "fa fa-edit".into(), - color: None, - })), + classes: "fa fa-edit".into(), color: None, })), ..Default::default() }), key_shortcut: Some("F2".into()), @@ -98,10 +79,8 @@ if (new_name && new_name != old_name) { alert("Unknown item type in rename action. Please report this bug."); } } - "# - .trim() - .into(), - }), + "#.trim().into() + }) } } @@ -150,28 +129,19 @@ if (confirm(msg)) { } } + + /// Convert a list of database MediaFiles to a protobuf3 PageItem (FolderListing) -pub(crate) fn folder_listing_for_media_files(media_files: &[proto::MediaFile]) -> proto::PageItem { - let media_files: Vec = media_files - .iter() - .map(|v| { +pub (crate) fn folder_listing_for_media_files(media_files: &[proto::MediaFile]) -> proto::PageItem { + let media_files: Vec = media_files.iter().map(|v| { proto::page_item::folder_listing::Item { - item: Some(proto::page_item::folder_listing::item::Item::MediaFile( - v.clone(), - )), + item: Some(proto::page_item::folder_listing::item::Item::MediaFile(v.clone())), open_action: Some(proto::ScriptCall { lang: proto::script_call::Lang::Javascript.into(), - code: format!("clapshot.openMediaFile(\"{}\")", v.id).into(), + code: format!("clapshot.openMediaFile(\"{}\")", v.id).into() }), popup_actions: vec!["popup_builtin_rename".into(), "popup_builtin_trash".into()], - vis: if v - .preview_data - .as_ref() - .and_then(|pv| pv.thumb_url.as_ref()) - .is_some() - { - None - } else { + vis: if v.preview_data.as_ref().and_then(|pv| pv.thumb_url.as_ref()).is_some() { None } else { // If no thumbnail, show an icon based on media type instead Some(proto::page_item::folder_listing::item::Visualization { icon: Some(proto::Icon { @@ -181,18 +151,14 @@ pub(crate) fn folder_listing_for_media_files(media_files: &[proto::MediaFile]) - "image" => "fas fa-image", "video" => "fas fa-video", _ => "fa fa-circle-question", - } - .into(), - color: None, - })), + }.into(), color: None, })), ..Default::default() }), ..Default::default() }) }, } - }) - .collect(); + }).collect(); proto::PageItem { item: Some(proto::page_item::Item::FolderListing( @@ -201,7 +167,6 @@ pub(crate) fn folder_listing_for_media_files(media_files: &[proto::MediaFile]) - allow_reordering: false, allow_upload: true, ..Default::default() - }, - )), + })), } } diff --git a/server/src/lib.rs b/server/src/lib.rs index a5efecee..6983d247 100644 --- a/server/src/lib.rs +++ b/server/src/lib.rs @@ -1,41 +1,24 @@ -use std::{ - collections::HashMap, - path::PathBuf, - sync::{ - atomic::{AtomicBool, Ordering}, - Arc, - }, - thread::{self, JoinHandle}, -}; - -use crate::{ - api_server::server_state::ServerState, - grpc::{caller::OrganizerCaller, grpc_client::OrganizerURI}, -}; +use std::{collections::HashMap, path::PathBuf, sync::{atomic::{AtomicBool, Ordering}, Arc}, thread::{self, JoinHandle}}; + use anyhow::Context; -use database::{ - db_backup::{backup_sqlite_database, restore_sqlite_database}, - migration_solver::MigrationGraphModule, - sqlite_foreign_key_check, DB, -}; -use lib_clapshot_grpc::{ - proto::org::{self, Migration}, - GrpcBindAddr, -}; +use database::{db_backup::{backup_sqlite_database, restore_sqlite_database}, migration_solver::MigrationGraphModule, sqlite_foreign_key_check, DB}; +use lib_clapshot_grpc::{proto::org::{self, Migration}, GrpcBindAddr}; +use crate::{api_server::server_state::ServerState, grpc::{caller::OrganizerCaller, grpc_client::OrganizerURI}}; use anyhow::bail; +pub mod video_pipeline; pub mod api_server; pub mod database; +pub mod tests; pub mod grpc; pub mod storage; -pub mod tests; -pub mod video_pipeline; pub const PKG_VERSION: &'static str = env!("CARGO_PKG_VERSION"); pub const PKG_NAME: &'static str = env!("CARGO_PKG_NAME"); -const SERVER_MODULE_NAME: &str = "clapshot.server"; // Name for migrations solver +const SERVER_MODULE_NAME: &str = "clapshot.server"; // Name for migrations solver + pub struct ClapshotInit { terminate_flag: Arc, @@ -44,6 +27,7 @@ pub struct ClapshotInit { } impl ClapshotInit { + /// Initialize clapshot and spawn all worker threads. pub fn init_and_spawn_workers( data_dir: std::path::PathBuf, @@ -64,11 +48,12 @@ impl ClapshotInit { thumbnail_script: String, org_http_headers_regex: regex::Regex, storage: crate::storage::StorageBackend, - terminate_flag: Arc, - ) -> anyhow::Result { - use crossbeam_channel::unbounded; + terminate_flag: Arc) + -> anyhow::Result + { use signal_hook::consts::TERM_SIGNALS; - use signal_hook::flag; // Work queue + use signal_hook::flag; + use crossbeam_channel::unbounded; // Work queue let _span = tracing::info_span!("INIT").entered(); @@ -97,8 +82,7 @@ impl ClapshotInit { let (user_msg_tx, user_msg_rx) = unbounded::(); let (upload_tx, upload_rx) = unbounded::(); let api_thread = Some({ - let server = ServerState::new( - db.clone(), + let server = ServerState::new( db.clone(), &data_dir.join("videos"), &data_dir.join("upload"), &url_base, @@ -107,26 +91,10 @@ impl ClapshotInit { grpc_srv_listening_flag.clone(), default_user, terminate_flag.clone(), - org_http_headers_regex, - ); - let grpc_srv = if (&organizer_uri).is_some() { - Some(grpc_server_bind.clone()) - } else { - None - }; + org_http_headers_regex); + let grpc_srv = if (&organizer_uri).is_some() { Some(grpc_server_bind.clone()) } else { None }; let ub = url_base.clone(); - thread::spawn(move || { - api_server::run_forever( - user_msg_rx, - grpc_srv, - upload_tx, - bind_api.to_string(), - ub, - cors_origins, - server, - port, - ) - }) + thread::spawn(move || { api_server::run_forever(user_msg_rx, grpc_srv, upload_tx, bind_api.to_string(), ub, cors_origins, server, port) }) }); // Handshake Organizer if configured @@ -143,12 +111,7 @@ impl ClapshotInit { // Ok, organizer should be able to connect back to us now, so handshake let org = OrganizerCaller::new(&ouri); tracing::info!("Connecting gRPC srv->org..."); - org.blocking_handshake_organizer( - &data_dir, - &url_base, - &db_file, - &grpc_server_bind, - )?; + org.blocking_handshake_organizer(&data_dir, &url_base, &db_file, &grpc_server_bind)?; tracing::debug!("srv->org handshake done."); } None => { @@ -163,34 +126,18 @@ impl ClapshotInit { let ths = thumbnail_script.clone(); let vpp_thread = Some({ let db = db.clone(); - thread::spawn(move || { - video_pipeline::run_forever( - db, - tf.clone(), - dd, - storage.clone(), - user_msg_tx, - poll_interval, - resubmit_delay, - target_bitrate, - upload_rx, - n_workers, - ingest_username_from, - ts, - ths, - ) - }) + thread::spawn(move || { video_pipeline::run_forever( + db, tf.clone(), dd, storage.clone(), user_msg_tx, poll_interval, resubmit_delay, target_bitrate, upload_rx, n_workers, ingest_username_from, ts, ths)}) }); - Ok(ClapshotInit { - terminate_flag, - api_thread, - vpp_thread, - }) + + Ok(ClapshotInit {terminate_flag, api_thread, vpp_thread}) } + /// Block until the terminate flag is set - pub fn wait_for_termination(&mut self) -> anyhow::Result<()> { + pub fn wait_for_termination(&mut self) -> anyhow::Result<()> + { // Loop forever, abort on SIGINT/SIGTERM or if child threads die while !self.terminate_flag.load(Ordering::Relaxed) { thread::sleep(std::time::Duration::from_secs(1)); @@ -200,26 +147,22 @@ impl ClapshotInit { } tracing::info!("Got kill signal. Cleaning up."); - self.vpp_thread - .take() - .and_then(|t| t.join().ok()) - .expect("VPP thread failed"); - self.api_thread - .take() - .and_then(|t| t.join().ok()) - .expect("API thread failed"); + self.vpp_thread.take().and_then(|t| t.join().ok()).expect("VPP thread failed"); + self.api_thread.take().and_then(|t| t.join().ok()).expect("API thread failed"); Ok(()) } } + + /// Find migrations from server and organizer, solve their dependencies, and apply them. /// Backup before starting, and restore if foreign key checks fail after applying the migrations. -fn migrate_db(db_file: &PathBuf, org_uri: &Option) -> anyhow::Result<()> { +fn migrate_db( db_file: &PathBuf, org_uri: &Option) -> anyhow::Result<()> +{ use lib_clapshot_grpc::proto::org::CheckMigrationsRequest; let _span = tracing::info_span!("migrate_db").entered(); - let db: Arc = - Arc::new(database::DB::open_db_file(&db_file).context("Error opening DB file")?); + let db: Arc = Arc::new(database::DB::open_db_file(&db_file).context("Error opening DB file")?); let cur_server_migration = db.latest_applied_server_migration_name()?; let pending_server_migrations = db.pending_server_migrations()?; @@ -231,57 +174,44 @@ fn migrate_db(db_file: &PathBuf, org_uri: &Option) -> anyhow::Resu // Represent server migrations for solver let server_module = { let mut prev_ver: Option = cur_server_migration.clone(); - let server_migs = pending_server_migrations - .iter() - .map(|(m_name, m_version)| { - let mig = Migration { - uuid: m_name.clone(), - version: m_version.clone(), - dependencies: vec![lib_clapshot_grpc::proto::org::migration::Dependency { - name: SERVER_MODULE_NAME.to_string(), - min_ver: prev_ver.clone(), - max_ver: prev_ver.clone(), - }], - description: "".to_string(), - }; - prev_ver = Some(m_version.clone()); - mig - }) - .collect::>(); - - tracing::debug!( - "Clapshot server has {} pending migrations.", - server_migs.len() - ); + let server_migs = pending_server_migrations.iter().map(|(m_name, m_version)| { + let mig = Migration { + uuid: m_name.clone(), + version: m_version.clone(), + dependencies: vec![lib_clapshot_grpc::proto::org::migration::Dependency { + name: SERVER_MODULE_NAME.to_string(), + min_ver: prev_ver.clone(), + max_ver: prev_ver.clone(), + }], + description: "".to_string(), + }; + prev_ver = Some(m_version.clone()); + mig + }).collect::>(); + + tracing::debug!("Clapshot server has {} pending migrations.", server_migs.len()); MigrationGraphModule { name: SERVER_MODULE_NAME.to_string(), cur_version: cur_server_migration.clone(), - migrations: server_migs, + migrations: server_migs } }; - let mut migration_modules: Vec = vec![server_module]; + let mut migration_modules: Vec = vec![ server_module ]; let org_db_info = Some(org::Database { r#type: org::database::DatabaseType::Sqlite.into(), - endpoint: db_file - .canonicalize()? - .to_str() - .ok_or(anyhow::anyhow!("Sqlite path is not valid UTF-8"))? - .into(), + endpoint: db_file.canonicalize()?.to_str().ok_or( + anyhow::anyhow!("Sqlite path is not valid UTF-8"))?.into() }); // Add Organizer and its migrations, if available if let Some(uri) = org_uri { let caller = OrganizerCaller::new(uri); - let (rt, mut org_conn) = caller - .tokio_connect() - .context("Error connecting to Organizer")?; + let (rt, mut org_conn) = caller.tokio_connect().context("Error connecting to Organizer")?; tracing::debug!("Calling check_migrations on Organizer."); - match rt.block_on(org_conn.check_migrations(CheckMigrationsRequest { - db: org_db_info.clone(), - })) { + match rt.block_on(org_conn.check_migrations(CheckMigrationsRequest { db: org_db_info.clone() })) { Ok(cm_res) => { let migrations = cm_res.get_ref().pending_migrations.clone(); tracing::debug!("Organizer has {} pending migrations.", migrations.len()); @@ -291,59 +221,37 @@ fn migrate_db(db_file: &PathBuf, org_uri: &Option) -> anyhow::Resu migrations, }); } - Err(e) => match e.code() { - tonic::Code::NotFound => { - tracing::info!("No pending migrations from Organizer."); - } - tonic::Code::Unimplemented => { - tracing::info!("Organizer does not implement migrations. Ignoring."); + Err(e) => { + match e.code() { + tonic::Code::NotFound => { tracing::info!("No pending migrations from Organizer."); }, + tonic::Code::Unimplemented => { tracing::info!("Organizer does not implement migrations. Ignoring."); }, + _ => { anyhow::bail!("Error checking Organizer migrations: {:?}", e); } } - _ => { - anyhow::bail!("Error checking Organizer migrations: {:?}", e); - } - }, + } } }; - match migration_modules - .iter() - .map(|m| m.migrations.len()) - .sum::() - { + match migration_modules.iter().map(|m| m.migrations.len()).sum::() { 0 => { tracing::info!("No pending migrations."); return Ok(()); - } - n => { - tracing::debug!("Total {} migrations to consider. Solving dependencies.", n); - } + }, + n => { tracing::debug!("Total {} migrations to consider. Solving dependencies.", n); } } // Solve migration order - let migration_order = - database::migration_solver::solve_migration_graph(migration_modules.iter().collect())?; + let migration_order = database::migration_solver::solve_migration_graph(migration_modules.iter().collect())?; match migration_order { None => { - tracing::error!( - "Failed to solve migration dependencies. List of considered migrations:" - ); + tracing::error!("Failed to solve migration dependencies. List of considered migrations:"); for m in migration_modules { - tracing::error!( - "Module: '{}': current version: '{:?}'", - &m.name, - &m.cur_version - ); + tracing::error!("Module: '{}': current version: '{:?}'", &m.name, &m.cur_version); for mig in &m.migrations { - tracing::error!( - " - '{}', brings version to '{}' depends on: '{:?}')", - mig.uuid, - mig.version, - mig.dependencies - ); + tracing::error!(" - '{}', brings version to '{}' depends on: '{:?}')", mig.uuid, mig.version, mig.dependencies); } } bail!("Cannot proceed with migrations due to unsolvable dependencies."); - } + }, // Solver returned a list of migrations to apply Some(order) => { if order.is_empty() { @@ -352,38 +260,25 @@ fn migrate_db(db_file: &PathBuf, org_uri: &Option) -> anyhow::Resu } tracing::info!("Migration plan created."); - drop(db); // Close before backup + drop(db); // Close before backup let db_backup_file = backup_sqlite_database(db_file.into())?; - let db: Arc = - Arc::new(database::DB::open_db_file(&db_file).context("Error opening DB file")?); - match apply_migrations( - &migration_modules, - &order, - &db, - db_file, - org_uri, - org_db_info.clone(), - ) - .and_then(|_| { - db.conn() - .context("Error opening DB connection after migrations") - }) - .and_then(|mut conn| { - sqlite_foreign_key_check(&mut conn, true) - .context("Foreign key checks failed after migrations") - }) { + let db: Arc = Arc::new(database::DB::open_db_file(&db_file).context("Error opening DB file")?); + match apply_migrations(&migration_modules, &order, &db, db_file, org_uri,org_db_info.clone()) + .and_then(|_| { db.conn().context("Error opening DB connection after migrations") }) + .and_then(|mut conn| { sqlite_foreign_key_check(&mut conn, true).context("Foreign key checks failed after migrations") }) + { Ok(_) => { tracing::info!("Migrations applied successfully. Foreign keys checked Ok."); return Ok(()); - } + }, Err(e) => { - drop(db); // Close before restore + drop (db); // Close before restore tracing::error!(error=%e, "Migration failure. Restoring DB from the backup."); match db_backup_file { None => { tracing::warn_span!("No backup file found. Skipping restore. This usually means DB was missing before migrations. If that's the case, delete the dangling DB before trying again."); - } + }, Some(db_backup_file) => { restore_sqlite_database(db_file.into(), db_backup_file) .context("Error restoring DB after failed migrations")?; @@ -394,12 +289,13 @@ fn migrate_db(db_file: &PathBuf, org_uri: &Option) -> anyhow::Resu } } } - } + }, } Ok(()) } + /// Execute given migration plan. fn apply_migrations( migration_modules: &Vec, @@ -407,18 +303,14 @@ fn apply_migrations( db: &Arc, db_file: &PathBuf, org_uri: &Option, - org_db_info: Option, -) -> Result<(), anyhow::Error> { + org_db_info: Option +) -> Result<(), anyhow::Error> +{ use lib_clapshot_grpc::proto::org::ApplyMigrationRequest; - let uuid_to_mod: HashMap = migration_modules - .iter() - .flat_map(|m| { - m.migrations - .iter() - .map(|mig| (mig.uuid.clone(), m.name.clone())) - }) - .collect(); + let uuid_to_mod: HashMap = migration_modules.iter().flat_map(|m| { + m.migrations.iter().map(|mig| (mig.uuid.clone(), m.name.clone())) + }).collect(); for mig in plan { match uuid_to_mod.get(mig.uuid.as_str()) { @@ -432,48 +324,30 @@ fn apply_migrations( } // Organizer Some(module_name) => { - let _span = tracing::info_span!( - "apply org migration", - name = mig.uuid, - new_ver = mig.version, - org = module_name - ) - .entered(); + let _span = tracing::info_span!("apply org migration", name=mig.uuid, new_ver=mig.version, org=module_name).entered(); tracing::info!("Applying on Organizer..."); if let Some(uri) = org_uri { - let (rt, mut org_conn) = OrganizerCaller::new(uri) - .tokio_connect() + let (rt, mut org_conn) = OrganizerCaller::new(uri).tokio_connect() .context("Error connecting to organizer for migrations")?; rt.block_on(org_conn.apply_migration(ApplyMigrationRequest { db: org_db_info.clone(), - uuid: mig.uuid.clone(), - })) - .map_err(|e| { - anyhow::anyhow!( - "Error applying organizer migration '{}': {:?}", - &mig.uuid, - e - ) - })?; + uuid: mig.uuid.clone() + })).map_err(|e| anyhow::anyhow!("Error applying organizer migration '{}': {:?}", &mig.uuid, e))?; } else { - bail!( - "Organizer migration '{}' found but no organizer URI to connect to.", - &mig.uuid - ); + bail!("Organizer migration '{}' found but no organizer URI to connect to.", &mig.uuid); } - } + }, None => { - bail!( - "Migration '{}' not found in modules. This should not happen.", - mig.uuid - ); + bail!("Migration '{}' not found in modules. This should not happen.", mig.uuid); } } } Ok(()) } + + pub fn run_clapshot( data_dir: std::path::PathBuf, migrate: bool, @@ -494,6 +368,7 @@ pub fn run_clapshot( org_http_headers_regex: regex::Regex, storage: crate::storage::StorageBackend, ) -> anyhow::Result<()> { + let terminate_flag = Arc::new(AtomicBool::new(false)); // Initialize clapshot @@ -516,9 +391,9 @@ pub fn run_clapshot( thumbnail_script, org_http_headers_regex, storage, - terminate_flag.clone(), + terminate_flag.clone() )?; // Wait until termination clapshot.wait_for_termination() -} +} \ No newline at end of file diff --git a/server/src/log.rs b/server/src/log.rs index 10130817..e080792b 100644 --- a/server/src/log.rs +++ b/server/src/log.rs @@ -1,4 +1,3 @@ -use signal_hook::{consts::SIGUSR1, iterator::Signals}; use std::{ fs::OpenOptions, io::{self, stdout, Write}, @@ -6,8 +5,9 @@ use std::{ sync::{Arc, Mutex}, thread, }; +use signal_hook::{consts::SIGUSR1, iterator::Signals}; use tracing::subscriber::set_global_default; -use tracing_subscriber::{fmt, fmt::time::OffsetTime, EnvFilter}; +use tracing_subscriber::{fmt, EnvFilter, fmt::time::OffsetTime}; /// Custom logger with the ability to write to a file or stdout. /// It supports transparent file reopen on SIGUSR1 (for `logrotate`), @@ -17,18 +17,14 @@ pub struct ClapshotLogger { pub _guard: tracing_appender::non_blocking::WorkerGuard, } -impl ClapshotLogger { +impl ClapshotLogger +{ /// Create a new Logger instance. /// - `time_offset`: Time offset for the log timestamps. /// - `level`: Tracing level to log. /// - `log_file`: Path to the log file or "-" for stdout. /// - `json_log`: Enable or disable JSON formatted logging. - pub fn new( - time_offset: time::UtcOffset, - level: tracing::Level, - log_file: &str, - json_log: bool, - ) -> anyhow::Result { + pub fn new(time_offset: time::UtcOffset, level: tracing::Level, log_file: &str, json_log: bool) -> anyhow::Result { let log_writer = Arc::new(Mutex::new(None)); let log_to_stdout = log_file.is_empty() || log_file == "-"; @@ -43,8 +39,7 @@ impl ClapshotLogger { let log_writer_cloned = log_writer.clone(); thread::spawn(move || { for _ in signals.forever() { - let mut log_writer = - log_writer_cloned.lock().expect("Failed to lock log writer"); + let mut log_writer = log_writer_cloned.lock().expect("Failed to lock log writer"); if let Some(file) = log_writer.as_mut() { file.sync_and_reopen().expect("Failed to reopen log file"); } @@ -55,17 +50,12 @@ impl ClapshotLogger { if std::env::var_os("RUST_LOG").is_none() { std::env::set_var( - "RUST_LOG", - match level { + "RUST_LOG", match level { tracing::Level::ERROR => "error", tracing::Level::WARN => "warn", tracing::Level::INFO => "info,clapshot_server=info", - tracing::Level::DEBUG => { - "debug,clapshot_server=debug,h2=info,hyper::proto::h1=info" - } - tracing::Level::TRACE => { - "trace,clapshot_server=trace,h2=debug,hyper::proto::h1=debug,async_io=debug" - } + tracing::Level::DEBUG => "debug,clapshot_server=debug,h2=info,hyper::proto::h1=info", + tracing::Level::TRACE => "trace,clapshot_server=trace,h2=debug,hyper::proto::h1=debug,async_io=debug", }, ); } @@ -77,11 +67,13 @@ impl ClapshotLogger { (true, _) => "[year]-[month]-[day]T[hour]:[minute]:[second].[subsecond digits:4][offset_hour sign:mandatory]:[offset_minute]", }; - let time_format = time::format_description::parse(if json_log { - "[unix_timestamp].[subsecond digits:4]]" - } else { - iso_fmt - }) + let time_format = time::format_description::parse( + if json_log { + "[unix_timestamp].[subsecond digits:4]]" + } else { + iso_fmt + }, + ) .expect("invalid time format"); let timer = OffsetTime::new(time_offset, time_format); @@ -103,13 +95,11 @@ impl ClapshotLogger { } .expect("tracing::subscriber::set_global_default failed"); - Ok(ClapshotLogger { - _log_writer: log_writer, - _guard: guard, - }) + Ok(ClapshotLogger { _log_writer: log_writer, _guard: guard }) } } + /// ReopenableFileWriter provides functionality to write to a file /// that can be reopened, allowing for log rotation without losing log entries. pub struct ReopenableFileWriter { @@ -124,11 +114,7 @@ impl ReopenableFileWriter { } fn open_file(path: &PathBuf) -> io::Result { - OpenOptions::new() - .create(true) - .write(true) - .append(true) - .open(path) + OpenOptions::new().create(true).write(true).append(true).open(path) } /// Sync the current log file to disk and reopen it under a new file descriptor. @@ -144,52 +130,40 @@ impl ReopenableFileWriter { impl Clone for ReopenableFileWriter { fn clone(&self) -> Self { - Self { - file: self.file.clone(), - path: self.path.clone(), - } + Self { file: self.file.clone(), path: self.path.clone() } } } impl Write for ReopenableFileWriter { fn write(&mut self, buf: &[u8]) -> io::Result { let mut file_lock = self.file.lock().unwrap(); - if let Some(file) = file_lock.as_mut() { - file.write(buf) - } else { - Ok(0) - } + if let Some(file) = file_lock.as_mut() { file.write(buf) } else { Ok(0) } } fn flush(&mut self) -> io::Result<()> { let mut file_lock = self.file.lock().unwrap(); - if let Some(file) = file_lock.as_mut() { - file.flush() - } else { - Ok(()) - } + if let Some(file) = file_lock.as_mut() { file.flush() } else { Ok(()) } } } + #[test] fn test_log_rotation_on_sigusr1() { + use std::{ + fs::File, + io::Read, + sync::Arc, + thread, + time::Duration, + }; use assert_fs::TempDir; - use std::{fs::File, io::Read, sync::Arc, thread, time::Duration}; let log_dir = TempDir::new().expect("Failed to create temp dir"); let log_file = log_dir.path().join("test_log.log"); let log_file_backup = log_dir.path().join("test_log_backup.log"); let time_offset = time::UtcOffset::from_whole_seconds(0).unwrap(); - let logger = Arc::new( - ClapshotLogger::new( - time_offset, - tracing::Level::DEBUG, - log_file.to_str().unwrap(), - false, - ) - .expect("Failed to setup logger"), - ); + let logger = Arc::new(ClapshotLogger::new(time_offset, tracing::Level::DEBUG, log_file.to_str().unwrap(), false).expect("Failed to setup logger")); tracing::info!("Logging before rotation"); @@ -212,25 +186,12 @@ fn test_log_rotation_on_sigusr1() { let mut old_log_content = String::new(); let mut old_log_file = File::open(&log_file_backup).expect("Failed to open old log file"); - old_log_file - .read_to_string(&mut old_log_content) - .expect("Failed to read old log file"); - assert!( - old_log_content.contains("Logging before rotation"), - "Old log file does not contain the expected log entry" - ); - assert!( - !old_log_content.contains("Logging after rotation"), - "Old log file contains the second log entry" - ); + old_log_file.read_to_string(&mut old_log_content).expect("Failed to read old log file"); + assert!(old_log_content.contains("Logging before rotation"), "Old log file does not contain the expected log entry"); + assert!(!old_log_content.contains("Logging after rotation"), "Old log file contains the second log entry"); let mut new_log_content = String::new(); let mut new_log_file = File::open(&log_file).expect("Failed to open new log file"); - new_log_file - .read_to_string(&mut new_log_content) - .expect("Failed to read new log file"); - assert!( - new_log_content.contains("Logging after rotation"), - "New log file does not contain the expected log entry" - ); + new_log_file.read_to_string(&mut new_log_content).expect("Failed to read new log file"); + assert!(new_log_content.contains("Logging after rotation"), "New log file does not contain the expected log entry"); } diff --git a/server/src/tests/integration_test.rs b/server/src/tests/integration_test.rs index fdb3fbe0..0233cdbc 100644 --- a/server/src/tests/integration_test.rs +++ b/server/src/tests/integration_test.rs @@ -3,11 +3,12 @@ #![allow(unused_imports)] #[cfg(test)] -mod integration_test { +mod integration_test +{ use std::collections::HashMap; use std::sync::atomic::AtomicBool; - use std::sync::{Arc, Mutex}; - use std::{any, error}; + use std::sync::{Mutex, Arc}; + use std::{error, any}; use std::{path::PathBuf, str::FromStr}; use std::{thread, time::Duration}; @@ -19,31 +20,33 @@ mod integration_test { use rust_decimal::prelude::*; use crossbeam_channel; - use crossbeam_channel::{select, unbounded, Receiver, RecvTimeoutError}; + use crossbeam_channel::{Receiver, RecvTimeoutError, unbounded, select}; use crate::api_server::tests::expect_user_msg; use crate::api_server::validate_org_http_headers_regex; use crate::storage::StorageBackend; - use crate::api_server::test_utils::{connect_client_ws, open_media_file, write}; use crate::database::schema::media_files::{thumb_sheet_cols, thumb_sheet_rows}; + use crate::{expect_client_cmd, send_server_cmd}; use crate::grpc::grpc_client::prepare_organizer; use crate::video_pipeline::{metadata_reader, IncomingFile, IngestUsernameFrom}; - use crate::{expect_client_cmd, send_server_cmd}; - use lib_clapshot_grpc::proto::client::server_to_client_cmd as s2c; + use crate::api_server::test_utils::{connect_client_ws, open_media_file, write}; + use lib_clapshot_grpc::{GrpcBindAddr, proto}; use lib_clapshot_grpc::proto::client::ServerToClientCmd; - use lib_clapshot_grpc::{proto, GrpcBindAddr}; + use lib_clapshot_grpc::proto::client::server_to_client_cmd as s2c; - use serial_test::serial; - use std::io::Write; use tracing; - use tracing::{error, info, instrument, warn}; + use tracing::{error, info, warn, instrument}; use tracing_test::traced_test; + use serial_test::serial; + use std::io::Write; + #[test] #[serial] #[traced_test] - fn test_integ_metadata_reader_ok() -> anyhow::Result<()> { + fn test_integ_metadata_reader_ok() -> anyhow::Result<()> + { let data_dir = assert_fs::TempDir::new()?; data_dir.copy_from("src/tests/assets/", &["*.mov"])?; @@ -55,17 +58,12 @@ mod integration_test { let (arg_sender, arg_recvr) = unbounded::(); let (res_sender, res_recvr) = unbounded::(); let th = thread::spawn(move || { - metadata_reader::run_forever(arg_recvr, res_sender, 4); - }); + metadata_reader::run_forever(arg_recvr, res_sender, 4); + }); // Send request to metadata reader let args = IncomingFile { - file_path: PathBuf::from_str( - data_dir - .join("NASA_Red_Lettuce_excerpt.mov") - .to_str() - .unwrap(), - )?, + file_path: PathBuf::from_str(data_dir.join("NASA_Red_Lettuce_excerpt.mov").to_str().unwrap())?, user_id: "nobody".to_string(), cookies: HashMap::new(), transcode_preference: crate::video_pipeline::TranscodePreference::Auto, @@ -90,28 +88,19 @@ mod integration_test { Ok(()) } + /// Query API health endpoint until it returns 200 OK or timeout fn wait_for_healthy(url_base: &str) -> bool { const MAX_RETRIES: usize = 10; let mut interval_ms: u64 = 10; let url = format!("{}/api/health", url_base); for i in 1..=MAX_RETRIES { - if i > 1 { - thread::sleep(Duration::from_millis(interval_ms)); - } + if i > 1 { thread::sleep(Duration::from_millis(interval_ms)); } interval_ms = std::cmp::min(interval_ms * 2, 1000); let resp_result = reqwest::blocking::get(&url); if let Ok(resp) = resp_result { - if resp.status() == 200 { - return true; - } else { - tracing::debug!( - "wait_for_healthy got status {} from /api/health. Try {}/{}.", - resp.status(), - i, - MAX_RETRIES - ) - } + if resp.status() == 200 { return true; } + else { tracing::debug!("wait_for_healthy got status {} from /api/health. Try {}/{}.", resp.status(), i, MAX_RETRIES) } } } false @@ -175,7 +164,8 @@ mod integration_test { #[test] #[serial] #[traced_test] - fn test_video_ingest_no_transcode() -> anyhow::Result<()> { + fn test_video_ingest_no_transcode() -> anyhow::Result<()> + { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 2500_000, None, None, IngestUsernameFrom::FileOwner] // Copy test file to incoming dir let mp4_file = "60fps-example.mp4"; @@ -231,7 +221,8 @@ mod integration_test { #[test] #[serial] #[traced_test] - fn test_video_try_ingest_corrupted_video() -> anyhow::Result<()> { + fn test_video_try_ingest_corrupted_video() -> anyhow::Result<()> + { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner] tracing::info!("WRITING CORRUPTED VIDEO"); @@ -253,6 +244,7 @@ mod integration_test { Ok(()) } + // --- Transcoding tests --- pub struct WaitForReportResults { @@ -270,79 +262,56 @@ mod integration_test { expect_transcode: bool, expect_thumbnail: bool, expect_thumbsheet: bool, - check_file_outputs: Option<(PathBuf, String)>, - ) -> WaitForReportResults { + check_file_outputs: Option<(PathBuf, String)>) -> WaitForReportResults + { let mut res = WaitForReportResults { - transcode_complete: false, - thumbs_complete: false, - got_progress_report: false, - got_transcode_report: false, - got_thumbnail_report: false, - ts_cols: String::new(), - ts_rows: String::new(), + transcode_complete: false, thumbs_complete: false, + got_progress_report: false, got_transcode_report: false, got_thumbnail_report: false, + ts_cols: String::new(), ts_rows: String::new(), }; const WAIT_AFTER_REPORTS_TIMEOUT_SECS: u32 = 5; // Wait for file to be processed thread::sleep(Duration::from_secs_f32(0.5)); - let msg = expect_user_msg(&mut ws, proto::user_message::Type::MediaFileAdded).await; // notification to client (with upload folder info etc) + let msg = expect_user_msg(&mut ws, proto::user_message::Type::MediaFileAdded).await; // notification to client (with upload folder info etc) let vid = msg.refs.unwrap().media_file_id.unwrap(); thread::sleep(Duration::from_secs_f32(0.5)); - let msg = expect_user_msg(&mut ws, proto::user_message::Type::Ok).await; // notification to user (in text) + let msg = expect_user_msg(&mut ws, proto::user_message::Type::Ok).await; // notification to user (in text) let vid2 = msg.refs.unwrap().media_file_id.unwrap(); assert_eq!(vid, vid2); assert!(vid.len() > 0); if expect_transcode { - assert!(msg - .details - .unwrap() - .to_ascii_lowercase() - .contains("transcod")); + assert!(msg.details.unwrap().to_ascii_lowercase().contains("transcod")); } - for _ in 0..(60 * 2 * 10) { + for _ in 0..(60*2*10) + { // Wait until server sends media updated messages about // transcoding and thumbnail generation being done // before we try to open and check metadata. let mut still_waiting = true; if still_waiting { - match crate::api_server::test_utils::try_get_parsed::(&mut ws) - .await - .map(|c| c.cmd) - .flatten() - { + match crate::api_server::test_utils::try_get_parsed::(&mut ws).await.map(|c| c.cmd).flatten() { Some(s2c::Cmd::ShowMessages(m)) => { // Got progress report? - res.got_progress_report |= m - .msgs - .iter() - .any(|msg| msg.r#type == proto::user_message::Type::Progress as i32); + res.got_progress_report |= m.msgs.iter().any(|msg| msg.r#type == proto::user_message::Type::Progress as i32); assert!(!m.msgs.iter().any(|msg| msg.r#type == proto::user_message::Type::Error as i32), "Got ERROR type message while waiting for transcode/thumbnail completion"); - if m.msgs.iter().any(|msg| { - msg.r#type == proto::user_message::Type::MediaFileUpdated as i32 - }) { + if m.msgs.iter().any(|msg| msg.r#type == proto::user_message::Type::MediaFileUpdated as i32) { // Got transcoding update message? - if m.msgs.iter().any(|msg| { - msg.clone() - .message - .to_ascii_lowercase() - .contains("transcod") - }) { + if m.msgs.iter().any(|msg| msg.clone().message.to_ascii_lowercase().contains("transcod")) { res.got_transcode_report = true; } // Got thumbnail update message? - else if m.msgs.iter().any(|msg| { - msg.clone().message.to_ascii_lowercase().contains("thumb") - }) { + else if m.msgs.iter().any(|msg| msg.clone().message.to_ascii_lowercase().contains("thumb")) { res.got_thumbnail_report = true; } } - } + }, _ => (), }; @@ -359,16 +328,8 @@ mod integration_test { if still_waiting { thread::sleep(Duration::from_millis(100)); } else { - println!( - "...waiting done, expected reports received. Doing OpenNavigationPage ..." - ); - send_server_cmd!( - ws, - OpenNavigationPage, - OpenNavigationPage { - ..Default::default() - } - ); + println!("...waiting done, expected reports received. Doing OpenNavigationPage ..."); + send_server_cmd!(ws, OpenNavigationPage, OpenNavigationPage {..Default::default()}); break; } } @@ -377,37 +338,26 @@ mod integration_test { let reports_received_at = std::time::Instant::now(); // Wait for page with media file to be shown - 'waitloop: for _ in 0..80 { - if reports_received_at.elapsed().as_millis() - > (WAIT_AFTER_REPORTS_TIMEOUT_SECS * 1000).into() - { + 'waitloop: for _ in 0..80 + { + if reports_received_at.elapsed().as_millis() > (WAIT_AFTER_REPORTS_TIMEOUT_SECS*1000).into() { panic!("Timeout checking API messages after transcode/thumbnail completion"); } - match crate::api_server::test_utils::expect_parsed::(&mut ws) - .await - .cmd - { + match crate::api_server::test_utils::expect_parsed::(&mut ws).await.cmd { + Some(s2c::Cmd::ShowMessages(m)) => { tracing::info!("Got ShowMessages (while waiting for ShowPage. Ignoring."); - res.got_progress_report |= m - .msgs - .iter() - .any(|msg| msg.r#type == proto::user_message::Type::Progress as i32); - assert!( - !m.msgs - .iter() - .any(|msg| msg.r#type == proto::user_message::Type::Error as i32), - "Got ERROR type message while waiting for ShowPage" - ); - } + res.got_progress_report |= m.msgs.iter().any(|msg| msg.r#type == proto::user_message::Type::Progress as i32); + assert!(!m.msgs.iter().any(|msg| msg.r#type == proto::user_message::Type::Error as i32), "Got ERROR type message while waiting for ShowPage"); + }, Some(s2c::Cmd::ShowPage(p)) => { let pitems = p.page_items; - assert!(pitems.len() == 1 + 1); + assert!(pitems.len() == 1+1); match &pitems[0].item { - Some(proto::page_item::Item::Html(_)) => {} + Some(proto::page_item::Item::Html(_)) => {}, _ => panic!("Expected HTML for page item 0"), }; @@ -440,10 +390,7 @@ mod integration_test { if let Some(pd) = v.preview_data { if let Some(thumb_url) = pd.thumb_url { - assert!( - pm.thumbs_done.is_some(), - "thumbs_done not set in processing metadata but got thumb_url" - ); + assert!(pm.thumbs_done.is_some(), "thumbs_done not set in processing metadata but got thumb_url"); res.thumbs_complete = true; } if let Some(thumb_sheet) = pd.thumb_sheet { @@ -454,34 +401,26 @@ mod integration_test { } } - if (expect_thumbnail == res.thumbs_complete) - && (expect_transcode == res.transcode_complete) - { + if (expect_thumbnail == res.thumbs_complete) && (expect_transcode == res.transcode_complete) { break 'waitloop; } else { tracing::info!("Not done yet: transcode_complete = {} (expected: {}), thumbs_complete = {} (expected: {})...", res.transcode_complete, expect_transcode, res.thumbs_complete, expect_thumbnail); + } - } + }, something_else => { - tracing::info!( - "Got UNEXPECTED (not necessarily a bug) message: {:?}", - something_else - ); - } + tracing::info!("Got UNEXPECTED (not necessarily a bug) message: {:?}", something_else); + }, } thread::sleep(Duration::from_secs_f32(0.1)); } - tracing::info!( - "Transcode complete: {} (expeted: {}), thumbs complete: {} (expected: {})", - res.transcode_complete, - expect_transcode, - res.thumbs_complete, - expect_thumbnail - ); + tracing::info!("Transcode complete: {} (expeted: {}), thumbs complete: {} (expected: {})", + res.transcode_complete, expect_transcode, + res.thumbs_complete, expect_thumbnail); if let Some((data_dir, input_filename)) = check_file_outputs { let vid_dir = data_dir.join("videos").join(vid); @@ -499,9 +438,7 @@ mod integration_test { if expect_thumbsheet { assert!(u32::from_str(&res.ts_cols).ok().unwrap() > 0); assert!(u32::from_str(&res.ts_rows).ok().unwrap() > 0); - assert!(thumb_dir - .join(format!("sheet-{}x{}.webp", res.ts_cols, res.ts_rows)) - .is_file()); + assert!(thumb_dir.join(format!("sheet-{}x{}.webp", res.ts_cols, res.ts_rows)).is_file()); } if expect_thumbnail || expect_thumbsheet { assert!(thumb_dir.join("thumbnail.log").is_file()); @@ -511,29 +448,30 @@ mod integration_test { res } - async fn wait_for_any_client_msg(mut ws: &mut crate::api_server::test_utils::WsClient) { - for _ in 0..(60 * 2 * 10) { - match crate::api_server::test_utils::try_get_parsed::(&mut ws) - .await - .map(|c| c.cmd) - .flatten() - { + + async fn wait_for_any_client_msg(mut ws: &mut crate::api_server::test_utils::WsClient) + { + for _ in 0..(60*2*10) + { + match crate::api_server::test_utils::try_get_parsed::(&mut ws).await.map(|c| c.cmd).flatten() { Some(x) => { tracing::info!("Got message: {:?}", x); return; - } + }, None => { thread::sleep(Duration::from_millis(50)); - } + }, }; } } + #[test] #[serial] #[traced_test] #[cfg(feature = "include_slow_tests")] - fn test_video_mov_ingest_and_transcode() -> anyhow::Result<()> { + fn test_video_mov_ingest_and_transcode() -> anyhow::Result<()> + { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner] // Copy test file to incoming dir let mov_file = "NASA_Red_Lettuce_excerpt.mov"; @@ -550,11 +488,13 @@ mod integration_test { Ok(()) } + #[test] #[serial] #[traced_test] #[cfg(feature = "include_slow_tests")] - fn test_video_12bit_dnxhr_alpha_ingest_and_transcode() -> anyhow::Result<()> { + fn test_video_12bit_dnxhr_alpha_ingest_and_transcode() -> anyhow::Result<()> + { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner] // Copy test file to incoming dir let mov_file = "alpha-test_dnxhr-444-12bit-dnxhr.mov"; @@ -570,11 +510,13 @@ mod integration_test { Ok(()) } + #[test] #[serial] #[traced_test] #[cfg(feature = "include_slow_tests")] - fn test_audio_ingest_and_transcode() -> anyhow::Result<()> { + fn test_audio_ingest_and_transcode() -> anyhow::Result<()> + { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner] // Copy test file to incoming dir let audio_file_name = "drunkards-special-short-mono.wav"; @@ -604,7 +546,8 @@ mod integration_test { #[serial] #[traced_test] #[cfg(feature = "include_slow_tests")] - fn test_mp3_full_integration() -> anyhow::Result<()> { + fn test_mp3_full_integration() -> anyhow::Result<()> + { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner] // Copy the MP3 file to incoming dir and test full integration let audio_file_name = "Apollo11_countdown.mp3"; @@ -636,10 +579,11 @@ mod integration_test { #[test] #[serial] #[traced_test] - fn test_mp3_metadata_detection() -> anyhow::Result<()> { + fn test_mp3_metadata_detection() -> anyhow::Result<()> + { + use crossbeam_channel; use crate::video_pipeline::metadata_reader; use crate::video_pipeline::IncomingFile; - use crossbeam_channel; use std::collections::HashMap; // Test that MP3 files are correctly detected as Audio @@ -669,21 +613,11 @@ mod integration_test { match result { metadata_reader::MetadataResult::Ok(metadata) => { // After the fix, MP3 files should be correctly detected as Audio - assert_eq!( - format!("{:?}", metadata.media_type), - "Audio", - "MP3 file should be detected as Audio" - ); + assert_eq!(format!("{:?}", metadata.media_type), "Audio", "MP3 file should be detected as Audio"); // Duration should be reasonable for the test file (~25 seconds) - assert!( - metadata.duration > rust_decimal::Decimal::from(20), - "Duration should be > 20 seconds" - ); - assert!( - metadata.duration < rust_decimal::Decimal::from(30), - "Duration should be < 30 seconds" - ); + assert!(metadata.duration > rust_decimal::Decimal::from(20), "Duration should be > 20 seconds"); + assert!(metadata.duration < rust_decimal::Decimal::from(30), "Duration should be < 30 seconds"); } metadata_reader::MetadataResult::Err(e) => { panic!("Metadata reading failed: {:?}", e); @@ -698,10 +632,12 @@ mod integration_test { Ok(()) } + #[test] #[serial] #[traced_test] - fn test_image_ingest_and_transcode() -> anyhow::Result<()> { + fn test_image_ingest_and_transcode() -> anyhow::Result<()> + { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner] let image_file_name = "NASA-48410_PIA25967_-_MAV_Test.jpeg"; data_dir.copy_from("src/tests/assets/", &[image_file_name]).unwrap(); @@ -712,19 +648,19 @@ mod integration_test { Ok(()) } + + #[test] #[serial] #[traced_test] - fn test_existing_v056_migrate_and_image_ingest() -> anyhow::Result<()> { + fn test_existing_v056_migrate_and_image_ingest() -> anyhow::Result<()> + { let (_db, temp_dir, _videos, _comments) = crate::database::tests::make_test_db(); // Overwrite the test DB with one from assets dir, for migration testing on existing DB let db_file = temp_dir.path().join("clapshot.sqlite"); - std::fs::copy( - "src/tests/assets/databases/clapshot-migration-test-1_v056.sqlite", - &db_file, - ) - .expect("Failed to copy test DB for migration test"); + std::fs::copy("src/tests/assets/databases/clapshot-migration-test-1_v056.sqlite", &db_file) + .expect("Failed to copy test DB for migration test"); cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, Some(temp_dir), IngestUsernameFrom::FileOwner] let image_file_name = "NASA-48410_PIA25967_-_MAV_Test.jpeg"; @@ -738,23 +674,21 @@ mod integration_test { #[test] #[serial] #[traced_test] - fn test_organizer_existing_v056_migrate() -> anyhow::Result<()> { + fn test_organizer_existing_v056_migrate() -> anyhow::Result<()> + { // This supplements the other v056_migrate test, by testing with Organizer too. - match std::env::var("TEST_ORG_CMD").ok() { + match std::env::var("TEST_ORG_CMD").ok() + { Some(org_cmd) => { let (_db, temp_dir, _videos, _comments) = crate::database::tests::make_test_db(); // Overwrite the test DB with one from assets dir, for migration testing on existing DB let db_file = temp_dir.path().join("clapshot.sqlite"); - std::fs::copy( - "src/tests/assets/databases/clapshot-migration-test-1_v056.sqlite", - &db_file, - ) - .expect("Failed to copy test DB for migration test"); + std::fs::copy("src/tests/assets/databases/clapshot-migration-test-1_v056.sqlite", &db_file).expect("Failed to copy test DB for migration test"); cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, Some(org_cmd), Some(temp_dir), IngestUsernameFrom::FileOwner] // If we get any client messages, Organizer migration was successful and API server was started wait_for_any_client_msg(&mut ws).await; } - } + }, None => { tracing::info!("Organizer cmd not specified, skipping organizer test"); } @@ -765,22 +699,20 @@ mod integration_test { #[test] #[serial] #[traced_test] - fn test_organizer_existing_v061_migrate() -> anyhow::Result<()> { - match std::env::var("TEST_ORG_CMD").ok() { + fn test_organizer_existing_v061_migrate() -> anyhow::Result<()> + { + match std::env::var("TEST_ORG_CMD").ok() + { Some(org_cmd) => { let (_db, temp_dir, _videos, _comments) = crate::database::tests::make_test_db(); // Overwrite the test DB with one from assets dir, for migration testing on existing DB let db_file = temp_dir.path().join("clapshot.sqlite"); - std::fs::copy( - "src/tests/assets/databases/clapshot-migration-test-2_v061.sqlite", - &db_file, - ) - .expect("Failed to copy test DB for migration test"); + std::fs::copy("src/tests/assets/databases/clapshot-migration-test-2_v061.sqlite", &db_file).expect("Failed to copy test DB for migration test"); cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, Some(org_cmd), Some(temp_dir), IngestUsernameFrom::FileOwner] // If we get any client messages, Organizer migration was successful and API server was started wait_for_any_client_msg(&mut ws).await; } - } + }, None => { tracing::info!("Organizer cmd not specified, skipping organizer test"); } @@ -788,23 +720,26 @@ mod integration_test { Ok(()) } + + + #[test] #[serial] #[traced_test] - fn test_organizer_run_organizer_tests() -> anyhow::Result<()> { + fn test_organizer_run_organizer_tests() -> anyhow::Result<()> + { // Environment variable TEST_ORG_CMD can be used to specify a command // to start organizer. If not specified, the test will be skipped. - match std::env::var("TEST_ORG_CMD").ok() { + match std::env::var("TEST_ORG_CMD").ok() + { Some(cmd) => { + // `cargo test` captures stdout/stderr, so we can't list the test to console, // put them in a log file instead. Open & truncate here, so it's empty if // listing fails. - let log_path = - std::env::var("TEST_ORG_LOG").unwrap_or("organizer_tests.log".into()); + let log_path = std::env::var("TEST_ORG_LOG").unwrap_or("organizer_tests.log".into()); let log = Arc::new(Mutex::new(std::io::BufWriter::new( - std::fs::File::create(&log_path) - .expect(format!("Failed to create log file '{}'", &log_path).as_str()), - ))); + std::fs::File::create(&log_path).expect(format!("Failed to create log file '{}'", &log_path).as_str())))); fn write_log(writer: &Arc>, s: &str) { let mut writer = writer.lock().unwrap(); @@ -813,8 +748,7 @@ mod integration_test { println!("{}", s); } - let test_results: Arc>> = - Arc::new(Mutex::new(Vec::new())); + let test_results: Arc>> = Arc::new(Mutex::new(Vec::new())); // Connect to organizer and list its test names write_log(&log, " Retrieving organizer tests..."); @@ -843,42 +777,25 @@ mod integration_test { println!("\n\n^^^ (that was just a call listing organizer tests, now running them...) ^^^"); // Call gRPC run_test() for each test name. Store results in test_results. - let mut test_names: Vec = test_names - .lock() - .unwrap() - .iter() - .map(|s| s.clone()) - .collect(); + let mut test_names: Vec = test_names.lock().unwrap().iter().map(|s| s.clone()).collect(); // Check for TEST_ORG_FILTER environment variable to filter tests - if let Some(filter) = std::env::var("TEST_ORG_FILTER") - .ok() - .filter(|s| !s.is_empty()) - { - write_log( - &log, - format!(" Filtering tests with pattern: '{}'", filter).as_str(), - ); + if let Some(filter) = std::env::var("TEST_ORG_FILTER").ok().filter(|s| !s.is_empty()) { + write_log(&log, format!(" Filtering tests with pattern: '{}'", filter).as_str()); test_names.retain(|name| name.contains(&filter)); if test_names.is_empty() { - write_log( - &log, - format!(" No tests match filter '{}'", filter).as_str(), - ); + write_log(&log, format!(" No tests match filter '{}'", filter).as_str()); panic!("No organizer tests match the filter '{}'", filter); } } - write_log( - &log, - format!(" Running {} organizer tests", test_names.len()).as_str(), - ); + write_log(&log, format!(" Running {} organizer tests", test_names.len()).as_str()); - for (i, test_name) in test_names.iter().enumerate() { + for (i, test_name) in test_names.iter().enumerate() + { println!("\n\n\n------------ Running organizer test {}/{}: '{}'... ------------\n\n\n", i+1, test_names.len()+1, test_name); - let (_db, temp_dir, _videos, _comments) = - crate::database::tests::make_test_db(); + let (_db, temp_dir, _videos, _comments) = crate::database::tests::make_test_db(); let test_results = test_results.clone(); let log = log.clone(); @@ -910,44 +827,24 @@ mod integration_test { // Write test results to log file and print to console, mimicking cargo test output let test_results = test_results.lock().unwrap(); - for (test_name, res) in test_results.iter() { + for (test_name, res) in test_results.iter() + { if let Some(err) = &res.error { write_log(&log, format!("\n\n").as_str()); - write_log( - &log, - format!( - "==================== FAILED ORG TEST: '{}' ====================", - test_name - ) - .as_str(), - ); + write_log(&log, format!("==================== FAILED ORG TEST: '{}' ====================", test_name).as_str()); write_log(&log, format!("(NOTE! For Clapshot Server -captured logs, see the cargo test output for integration_test::test_organizer!)").as_str()); - write_log( - &log, - format!("\n---------------- RunTestResponse.output ----------------") - .as_str(), - ); + write_log(&log, format!("\n---------------- RunTestResponse.output ----------------").as_str()); write_log(&log, format!("{}", res.output).as_str()); - write_log( - &log, - format!("\n---------------- RunTestResponse.error ----------------") - .as_str(), - ); + write_log(&log, format!("\n---------------- RunTestResponse.error ----------------").as_str()); write_log(&log, format!("{}", err).as_str()); write_log(&log, format!("\n\n").as_str()); } } if test_results.iter().any(|(_, res)| res.error.is_some()) { - write_log( - &log, - format!("### Some organizer tests failed ###").as_str(), - ); - panic!( - "Some organizer tests failed, output also logged into '{}'", - log_path - ); + write_log(&log, format!("### Some organizer tests failed ###").as_str()); + panic!("Some organizer tests failed, output also logged into '{}'", log_path); } - } + }, None => { tracing::info!("Organizer cmd not specified, skipping organizer test"); } @@ -958,7 +855,8 @@ mod integration_test { #[test] #[serial] #[traced_test] - fn test_ingest_username_from_file_owner() -> anyhow::Result<()> { + fn test_ingest_username_from_file_owner() -> anyhow::Result<()> + { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 2500_000, None, None, IngestUsernameFrom::FileOwner] // Copy test file to incoming dir (owned by current user) let mp4_file = "60fps-example.mp4"; @@ -988,7 +886,8 @@ mod integration_test { #[test] #[serial] #[traced_test] - fn test_ingest_username_from_folder_name() -> anyhow::Result<()> { + fn test_ingest_username_from_folder_name() -> anyhow::Result<()> + { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 2500_000, None, None, IngestUsernameFrom::FolderName, Some("test_folder_user".to_string())] // Create user folder structure with specific test username let current_user = whoami::username(); @@ -1024,7 +923,8 @@ mod integration_test { #[test] #[serial] #[traced_test] - fn test_ingest_username_from_folder_name_nested() -> anyhow::Result<()> { + fn test_ingest_username_from_folder_name_nested() -> anyhow::Result<()> + { cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 2500_000, None, None, IngestUsernameFrom::FolderName, Some("test_nested_user".to_string())] // Create folder structure with specific test username let current_user = whoami::username(); @@ -1056,4 +956,5 @@ mod integration_test { } Ok(()) } + } diff --git a/server/src/video_pipeline/cleanup_rejected.rs b/server/src/video_pipeline/cleanup_rejected.rs index 21da5558..b5ec76e3 100644 --- a/server/src/video_pipeline/cleanup_rejected.rs +++ b/server/src/video_pipeline/cleanup_rejected.rs @@ -1,24 +1,18 @@ -use anyhow::{anyhow, bail}; use std::path::Path; +use anyhow::{anyhow, bail}; use tracing; + /// Clean up after a processing error. Attempts to preserve the original file /// by moving it under the rejected directory. Then deletes any dangling files that were /// created during the failed ingestion. -pub fn clean_up_rejected_file( - data_dir: &Path, - src_file: &Path, - media_file_id: Option, -) -> anyhow::Result<()> { +pub fn clean_up_rejected_file(data_dir: &Path, src_file: &Path, media_file_id: Option) -> anyhow::Result<()> +{ // Create rejected directory if it doesn't exist let rejected_dir = data_dir.join("rejected"); - if !rejected_dir.exists() { - std::fs::create_dir(&rejected_dir)?; - }; + if !rejected_dir.exists() { std::fs::create_dir(&rejected_dir)?; }; - let src_file_name = src_file - .file_name() - .ok_or(anyhow!("Invalid filename {:?}", src_file))?; + let src_file_name = src_file.file_name().ok_or(anyhow!("Invalid filename {:?}", src_file))?; let move_to = rejected_dir.join(src_file_name); if !move_to.exists() { // Move the original file to the root of rejected directory @@ -28,11 +22,9 @@ pub fn clean_up_rejected_file( // Use media file id if available, otherwise an UUID4. let extra_dir = match &media_file_id { Some(id) => rejected_dir.join(id), - None => rejected_dir.join(uuid::Uuid::new_v4().to_string()), - }; - if !extra_dir.exists() { - std::fs::create_dir(&extra_dir)?; + None => rejected_dir.join( uuid::Uuid::new_v4().to_string() ), }; + if !extra_dir.exists() { std::fs::create_dir(&extra_dir)?; }; let move_to = extra_dir.join(src_file_name); if !move_to.exists() { diff --git a/server/src/video_pipeline/incoming_monitor.rs b/server/src/video_pipeline/incoming_monitor.rs index 6d4b7fe8..186af68a 100644 --- a/server/src/video_pipeline/incoming_monitor.rs +++ b/server/src/video_pipeline/incoming_monitor.rs @@ -2,20 +2,20 @@ #![allow(unused_variables)] #![allow(unused_imports)] -use anyhow::anyhow; -use async_std::net::Incoming; -use crossbeam_channel::{Receiver, RecvTimeoutError, Sender}; -use file_owner::PathExt; -use path_absolutize::*; use std::borrow::Cow; use std::collections::HashMap; use std::os; -use std::path::{Path, PathBuf}; use std::time::Duration; +use std::path::{Path, PathBuf}; +use file_owner::PathExt; +use async_std::net::Incoming; +use crossbeam_channel::{Sender, Receiver, RecvTimeoutError}; +use path_absolutize::*; use tracing; +use anyhow::anyhow; -use super::{cleanup_rejected::clean_up_rejected_file, IngestUsernameFrom}; use crate::video_pipeline::metadata_reader; +use super::{cleanup_rejected::clean_up_rejected_file, IngestUsernameFrom}; pub enum Void {} @@ -26,20 +26,13 @@ pub fn run_forever( resubmit_delay: f32, incoming_sender: Sender, exit_evt: Receiver, - ingest_username_from: IngestUsernameFrom, -) -> anyhow::Result<()> { + ingest_username_from: IngestUsernameFrom) -> anyhow::Result<()> +{ let _span = tracing::info_span!("INCOMING").entered(); - tracing::debug!( - dir = data_dir.to_str(), - poll_interval = poll_interval, - resubmit_delay = resubmit_delay, - "Starting." - ); + tracing::debug!(dir=data_dir.to_str(), poll_interval=poll_interval, resubmit_delay=resubmit_delay, "Starting."); - let mut last_tested_size: std::collections::HashMap = - std::collections::HashMap::new(); - let mut submission_time: std::collections::HashMap = - std::collections::HashMap::new(); + let mut last_tested_size: std::collections::HashMap = std::collections::HashMap::new(); + let mut submission_time: std::collections::HashMap = std::collections::HashMap::new(); loop { // Remove expired submissions @@ -47,14 +40,13 @@ pub fn run_forever( submission_time.retain(|_, t| now.duration_since(t.clone()).as_secs_f32() < resubmit_delay); match exit_evt.recv_timeout(Duration::from_secs_f32(poll_interval)) { - Err(RecvTimeoutError::Disconnected) => { - break; - } + Err(RecvTimeoutError::Disconnected) => { break; } _ => {} } //tracing::trace!("Polling dir."); match incoming_dir.read_dir() { Ok(entries) => { + // Collect files from incoming directory and one level of subdirectories let mut names_and_sizes = Vec::new(); for entry in entries { @@ -71,10 +63,7 @@ pub fn run_forever( if let Ok(subentry) = subentry { if let Ok(sub_metadata) = subentry.metadata() { if sub_metadata.is_file() { - names_and_sizes.push(( - subentry.path(), - sub_metadata.len(), - )); + names_and_sizes.push((subentry.path(), sub_metadata.len())); } } } @@ -86,60 +75,38 @@ pub fn run_forever( } fn get_file_owner_name(path: &Path) -> anyhow::Result { - path.owner()? - .name()? - .ok_or(anyhow!("Unnamed OS user for file {:?}", path)) + path.owner()?.name()?.ok_or(anyhow!("Unnamed OS user for file {:?}", path)) } - fn get_username_from_folder( - path: &Path, - incoming_dir: &Path, - ) -> anyhow::Result { - let relative_path = path.strip_prefix(incoming_dir).map_err(|e| { - anyhow!( - "File {:?} is not within incoming directory {:?}: {}", - path, - incoming_dir, - e - ) - })?; - - let first_component = relative_path.components().next().ok_or(anyhow!( - "File {:?} has no parent directory components", - relative_path - ))?; - + fn get_username_from_folder(path: &Path, incoming_dir: &Path) -> anyhow::Result { + let relative_path = path.strip_prefix(incoming_dir) + .map_err(|e| anyhow!("File {:?} is not within incoming directory {:?}: {}", path, incoming_dir, e))?; + + let first_component = relative_path.components().next() + .ok_or(anyhow!("File {:?} has no parent directory components", relative_path))?; + match first_component { - std::path::Component::Normal(username) => username - .to_str() - .ok_or(anyhow!( - "Username directory name is not valid UTF-8: {:?}", - username - )) - .map(|s| s.to_string()), - _ => Err(anyhow!( - "Invalid directory structure for file {:?}", - relative_path - )), + std::path::Component::Normal(username) => { + username.to_str() + .ok_or(anyhow!("Username directory name is not valid UTF-8: {:?}", username)) + .map(|s| s.to_string()) + }, + _ => Err(anyhow!("Invalid directory structure for file {:?}", relative_path)) } } for (path, sz) in names_and_sizes { - let _span = - tracing::debug_span!("Considering file.", path = path.to_str()).entered(); + let _span = tracing::debug_span!("Considering file.", path=path.to_str()).entered(); if !submission_time.contains_key(&path) { // Check if file is still being written to - if sz > 1 && sz != 4096 { - // 4096 = size of an empty file on ext4 + if sz > 1 && sz != 4096 { // 4096 = size of an empty file on ext4 if &sz == last_tested_size.get(&path).unwrap_or(&0) { let username_result = match ingest_username_from { IngestUsernameFrom::FileOwner => get_file_owner_name(&path), - IngestUsernameFrom::FolderName => { - get_username_from_folder(&path, &incoming_dir) - } + IngestUsernameFrom::FolderName => get_username_from_folder(&path, &incoming_dir), }; - + match username_result { Err(e) => { tracing::error!(details=%e, "Cannot ingest. Failed to get username for file."); @@ -150,33 +117,25 @@ pub fn run_forever( } Ok(username) => { tracing::info!("Submitting for processing."); - submission_time - .insert(path.clone(), std::time::Instant::now()); - if let Err(e) = incoming_sender.send(super::IncomingFile { - file_path: path.clone(), - user_id: username, - cookies: HashMap::new(), - transcode_preference: super::TranscodePreference::Auto, - }) { + submission_time.insert(path.clone(), std::time::Instant::now()); + if let Err(e) = incoming_sender.send( + super::IncomingFile {file_path: path.clone(), user_id: username, cookies: HashMap::new(), transcode_preference: super::TranscodePreference::Auto}) { tracing::error!(details=%e, "Failed to send incoming file to processing queue."); } - } + }, }; } else { tracing::debug!("File '{:?}' apparently still being written to. Skipping for now...", path); last_tested_size.insert(path, sz); - } - } - } - } - } + }}}} + }, Err(e) => { // Directory listing failed. Cannot continue monitoring. tracing::error!(details=%e, "Error monitoring dir {:?} - aborting.", - match incoming_dir.absolutize() { - Ok(Cow::Owned(p)) => p, // Got absolute path - _ => incoming_dir.clone(), // Some error happened, use original - }); + match incoming_dir.absolutize() { + Ok(Cow::Owned(p)) => p, // Got absolute path + _ => incoming_dir.clone(), // Some error happened, use original + }); break; } } diff --git a/server/src/video_pipeline/metadata_reader.rs b/server/src/video_pipeline/metadata_reader.rs index b3c030c8..6c299a52 100644 --- a/server/src/video_pipeline/metadata_reader.rs +++ b/server/src/video_pipeline/metadata_reader.rs @@ -1,14 +1,14 @@ -use super::{DetailedMsg, IncomingFile}; -use crossbeam_channel::{Receiver, RecvError, Sender}; -use rust_decimal::prelude::*; -use serde_json; -use std::path::PathBuf; -use std::str::FromStr; -use std::sync::atomic::AtomicBool; -use std::sync::atomic::Ordering; use std::{collections::HashMap, process::Command}; +use std::sync::atomic::Ordering; use threadpool::ThreadPool; +use std::path::PathBuf; +use serde_json; +use crossbeam_channel::{Sender, Receiver, RecvError}; use tracing; +use rust_decimal::prelude::*; +use std::sync::atomic::AtomicBool; +use std::str::FromStr; +use super::{IncomingFile, DetailedMsg}; #[derive(Debug, Clone)] pub enum MediaType { @@ -37,6 +37,7 @@ impl FromStr for MediaType { } } + #[derive(Debug, Clone)] pub struct Metadata { pub src_file: PathBuf, @@ -48,7 +49,7 @@ pub struct Metadata { pub fps: Decimal, pub bitrate: u32, pub metadata_all: String, - pub upload_cookies: HashMap, // Cookies from the upload, not read from the file + pub upload_cookies: HashMap, // Cookies from the upload, not read from the file pub transcode_preference: super::TranscodePreference, } @@ -58,16 +59,16 @@ pub type MetadataResult = Result; /// /// # Arguments /// * `file_path` - Path to the file to be analyzed -fn run_mediainfo(file: &PathBuf) -> Result { +fn run_mediainfo( file: &PathBuf ) -> Result +{ // Link to source file to a temporary file to avoid problems with // special characters in the path with mediainfo let uuid = uuid::Uuid::new_v4(); let file_dir = file.parent().ok_or("Failed to get parent directory")?; let temp_dir = file_dir.join(uuid.to_string()); - + // Preserve original file extension to help mediainfo detect format correctly - let extension = file - .extension() + let extension = file.extension() .map(|ext| format!(".{}", ext.to_string_lossy())) .unwrap_or_default(); let link_path = temp_dir.join(format!("tempname{}", extension)); @@ -94,30 +95,25 @@ fn run_mediainfo(file: &PathBuf) -> Result { } } - match mediainfo_res { + match mediainfo_res + { Ok(output) => { if output.status.success() { { - let json_res = String::from_utf8(output.stdout).map_err(|e| e.to_string())?; + let json_res = String::from_utf8(output.stdout) + .map_err(|e| e.to_string())?; serde_json::from_str(&json_res) - } - .map_err(|e| format!("Error parsing mediainfo JSON: {:?}", e)) + }.map_err(|e| format!("Error parsing mediainfo JSON: {:?}", e)) } else { - tracing::error!( - "Mediainfo stdout: {}", - String::from_utf8_lossy(&output.stdout) - ); - tracing::error!( - "Mediainfo stderr: {}", - String::from_utf8_lossy(&output.stderr) - ); - Err(format!( - "Mediainfo exited with error: {}", - String::from_utf8_lossy(&output.stderr) - )) + tracing::error!("Mediainfo stdout: {}", String::from_utf8_lossy(&output.stdout)); + tracing::error!("Mediainfo stderr: {}", String::from_utf8_lossy(&output.stderr)); + Err( format!("Mediainfo exited with error: {}", + String::from_utf8_lossy(&output.stderr))) } + }, + Err(e) => { + Err(format!("Failed to execute mediainfo: {}", e)) } - Err(e) => Err(format!("Failed to execute mediainfo: {}", e)), } } @@ -129,92 +125,58 @@ fn run_mediainfo(file: &PathBuf) -> Result { /// * `json` - Mediainfo JSON output /// * `args` - Metadata request arguments /// * `get_file_size` - Closure to get the file size (only called if bitrate is not available and we need to calculate it) -fn extract_variables( - json: serde_json::Value, - args: &IncomingFile, - get_file_size: F, -) -> Result -where - F: FnOnce() -> Result, +fn extract_variables(json: serde_json::Value, args: &IncomingFile, get_file_size: F) -> Result + where F: FnOnce() -> Result { - let tracks = json["media"]["track"] - .as_array() - .ok_or("No media tracks found")?; + let tracks = json["media"]["track"].as_array().ok_or("No media tracks found")?; // Video file if let Some(video_track) = tracks.iter().find(|t| t["@type"] == "Video") { + // Bitrate is tricky. It might be in "BitRate" or "BitRate_Nominal". If it's not in either, we'll estimate it. - let duration = Decimal::from_str( - video_track["Duration"] - .as_str() - .ok_or("Duration not found")?, - ) - .map_err(|_| "Invalid duration")?; + let duration = Decimal::from_str(video_track["Duration"].as_str().ok_or("Duration not found")?).map_err(|_| "Invalid duration")?; let bitrate = { - let bitrate_str = video_track["BitRate"] - .as_str() + let bitrate_str = video_track["BitRate"].as_str() .or(video_track["BitRate_Nominal"].as_str()); match bitrate_str { - Some(bit_rate_str) => bit_rate_str - .parse() - .map_err(|_| format!("Invalid bitrate: {}", bit_rate_str))?, + Some(bit_rate_str) => bit_rate_str.parse().map_err(|_| format!("Invalid bitrate: {}", bit_rate_str))?, None => { let duration = duration.to_f32().ok_or("Invalid duration")?; ((get_file_size()? as f32) * 8.0 / duration) as u32 - } - } - }; + }}}; Ok(Metadata { src_file: args.file_path.clone(), user_id: args.user_id.clone(), - total_frames: video_track["FrameCount"] - .as_str() - .ok_or("FrameCount not found")? - .parse() - .map_err(|_| "Invalid frame count".to_string())?, + total_frames: video_track["FrameCount"].as_str().ok_or("FrameCount not found")?.parse().map_err(|_| "Invalid frame count".to_string())?, duration, media_type: MediaType::Video, - orig_codec: video_track["Format"] - .as_str() - .ok_or("No codec found")? - .to_string(), - fps: Decimal::from_str(video_track["FrameRate"].as_str().ok_or("FPS not found")?) - .map_err(|_| "Invalid FPS".to_string())?, + orig_codec: video_track["Format"].as_str().ok_or("No codec found")?.to_string(), + fps: Decimal::from_str(video_track["FrameRate"].as_str().ok_or("FPS not found")?).map_err(|_| "Invalid FPS".to_string())?, bitrate, metadata_all: json.to_string(), upload_cookies: args.cookies.clone(), transcode_preference: args.transcode_preference, }) } + // Audio file else if let Some(audio_track) = tracks.iter().find(|t| t["@type"] == "Audio") { Ok(Metadata { src_file: args.file_path.clone(), user_id: args.user_id.clone(), total_frames: 0, - duration: Decimal::from_str( - audio_track["Duration"] - .as_str() - .ok_or("Duration not found")?, - ) - .map_err(|_| "Invalid duration".to_string())?, + duration: Decimal::from_str(audio_track["Duration"].as_str().ok_or("Duration not found")?).map_err(|_| "Invalid duration".to_string())?, media_type: MediaType::Audio, - orig_codec: audio_track["Format"] - .as_str() - .ok_or("No codec found")? - .to_string(), + orig_codec: audio_track["Format"].as_str().ok_or("No codec found")?.to_string(), fps: Decimal::from_u8(0).unwrap(), - bitrate: audio_track["BitRate"] - .as_str() - .ok_or("Bitrate not found")? - .parse() - .map_err(|_| "Invalid bitrate".to_string())?, + bitrate: audio_track["BitRate"].as_str().ok_or("Bitrate not found")?.parse().map_err(|_| "Invalid bitrate".to_string())?, metadata_all: json.to_string(), upload_cookies: args.cookies.clone(), transcode_preference: args.transcode_preference, }) } + // Image file else if let Some(image_track) = tracks.iter().find(|t| t["@type"] == "Image") { Ok(Metadata { @@ -223,10 +185,7 @@ where total_frames: 1, duration: Decimal::from_u8(0).unwrap(), media_type: MediaType::Image, - orig_codec: image_track["Format"] - .as_str() - .ok_or("No codec found")? - .to_string(), + orig_codec: image_track["Format"].as_str().ok_or("No codec found")?.to_string(), fps: Decimal::from_u8(0).unwrap(), bitrate: 0, metadata_all: json.to_string(), @@ -239,15 +198,10 @@ where } /// Run mediainfo and extract the metadata -fn read_metadata_from_file(args: &IncomingFile) -> Result { +fn read_metadata_from_file(args: &IncomingFile) -> Result +{ let json = run_mediainfo(&args.file_path)?; - extract_variables(json, args, || { - Ok(args - .file_path - .metadata() - .map_err(|e| format!("Failed to get file size: {:?}", e))? - .len()) - }) + extract_variables(json, args, || Ok(args.file_path.metadata().map_err(|e| format!("Failed to get file size: {:?}", e))?.len())) } /// Listens to inq for new files to scan for metadata with Mediainfo shell command. @@ -259,12 +213,13 @@ fn read_metadata_from_file(args: &IncomingFile) -> Result { /// * `inq` - channel to receive new files to process /// * `outq` - channel to send results to /// * `n_workers` - number of threads to use for processing -pub fn run_forever(inq: Receiver, outq: Sender, n_workers: usize) { +pub fn run_forever(inq: Receiver, outq: Sender, n_workers: usize) +{ let span = tracing::info_span!("MD").entered(); tracing::debug!(n_workers = n_workers, "Starting."); let pool = ThreadPool::new(n_workers); - let pool_is_healthy = std::sync::Arc::new(AtomicBool::new(true)); + let pool_is_healthy = std::sync::Arc::new(AtomicBool::new(true)); while pool_is_healthy.load(Ordering::Relaxed) { match inq.recv() { @@ -275,20 +230,20 @@ pub fn run_forever(inq: Receiver, outq: Sender, n_ let span = span.clone(); pool.execute(move || { span.in_scope(|| { - if let Err(e) = - outq.send(read_metadata_from_file(&args).map_err(|e| DetailedMsg { - msg: "Metadata read failed".to_string(), - details: e, - src_file: args.file_path.clone(), - user_id: args.user_id.clone(), - })) + if let Err(e) = outq.send( + read_metadata_from_file(&args).map_err(|e| { + DetailedMsg { + msg: "Metadata read failed".to_string(), + details: e, + src_file: args.file_path.clone(), + user_id: args.user_id.clone() }})) { tracing::error!(details=%e, "Result send failed! Aborting."); pool_is_healthy.store(false, Ordering::Relaxed); } }) }); - } + }, Err(RecvError) => { tracing::info!("Incoming queue closed."); break; @@ -299,31 +254,21 @@ pub fn run_forever(inq: Receiver, outq: Sender, n_ tracing::debug!("Exiting."); } + // Unit tests ===================================================================================== #[cfg(test)] -fn test_fixture(has_bitrate: bool, has_fps: bool) -> (IncomingFile, serde_json::Value) { - let bitrate = if has_bitrate { - r#", "BitRate": "1000""# - } else { - "" - }; - let fps = if has_fps { - r#", "FrameRate": "30""# - } else { - "" - }; +fn test_fixture(has_bitrate: bool, has_fps: bool) -> (IncomingFile, serde_json::Value) +{ + let bitrate = if has_bitrate { r#", "BitRate": "1000""# } else { "" }; + let fps = if has_fps { r#", "FrameRate": "30""# } else { "" }; - let json = serde_json::from_str(&format!( - r#"{{ + let json = serde_json::from_str(&format!(r#"{{ "media": {{ "track": [ {{ "@type": "Video", "FrameCount": "100", "Duration": "5.0", "Format": "H264" {}{} - }} ] }} }}"#, - bitrate, fps - )) - .unwrap(); + }} ] }} }}"#, bitrate, fps)).unwrap(); let args = IncomingFile { file_path: PathBuf::from("test.mp4"), @@ -336,7 +281,8 @@ fn test_fixture(has_bitrate: bool, has_fps: bool) -> (IncomingFile, serde_json:: } #[test] -fn test_extract_variables_ok() { +fn test_extract_variables_ok() +{ let (args, json) = test_fixture(true, true); let metadata = extract_variables(json, &args, || Ok(1000)).unwrap(); assert_eq!(metadata.total_frames, 100); @@ -347,14 +293,16 @@ fn test_extract_variables_ok() { } #[test] -fn test_extract_variables_missing_bitrate() { +fn test_extract_variables_missing_bitrate() +{ let (args, json) = test_fixture(false, true); let metadata = extract_variables(json, &args, || Ok(1000)).unwrap(); - assert_eq!(metadata.bitrate, 1000 * 8 / 5); + assert_eq!(metadata.bitrate, 1000*8/5); } #[test] -fn test_extract_variables_fail_missing_fps() { +fn test_extract_variables_fail_missing_fps() +{ let (args, json) = test_fixture(true, false); let metadata = extract_variables(json, &args, || Ok(1000)); assert!(metadata.is_err()); diff --git a/server/src/video_pipeline/script_processor.rs b/server/src/video_pipeline/script_processor.rs index e42b117a..a8c0ecab 100644 --- a/server/src/video_pipeline/script_processor.rs +++ b/server/src/video_pipeline/script_processor.rs @@ -1,12 +1,12 @@ -use chrono; -use crossbeam_channel::{Receiver, Sender}; -use rust_decimal::prelude::ToPrimitive; -use rust_decimal::Decimal; -use std::fs; +use std::{process::Command, io::BufRead, collections::HashMap}; use std::path::PathBuf; -use std::{collections::HashMap, io::BufRead, process::Command}; -use threadpool::ThreadPool; +use crossbeam_channel::{Sender, Receiver}; +use rust_decimal::Decimal; +use rust_decimal::prelude::ToPrimitive; use tracing; +use threadpool::ThreadPool; +use std::fs; +use chrono; use super::metadata_reader::MediaType; use super::DetailedMsg; @@ -17,7 +17,7 @@ pub type ProgressSender = crossbeam_channel::Sender<(String, String, String, Opt #[derive(Debug, Clone)] pub enum CmprInput { Transcode { - video_dst_dir: PathBuf, // Directory where script should output + video_dst_dir: PathBuf, // Directory where script should output video_dst_prefix: String, // Filename prefix (script decides extension) video_bitrate: u32, src: CmprInputSource, @@ -27,7 +27,7 @@ pub enum CmprInput { thumb_sheet_dims: (u32, u32), thumb_size: (u32, u32), src: CmprInputSource, - }, + } } #[derive(Debug, Clone)] @@ -43,20 +43,16 @@ pub struct CmprInputSource { #[derive(Debug, Clone)] pub enum CmprOutput { TranscodeSuccess { - video_dst: PathBuf, // Final output file path (determined by script) - logs: CmprLogs, + video_dst: PathBuf, // Final output file path (determined by script) + logs: CmprLogs }, ThumbsSuccess { thumb_dir: Option, thumb_sheet_dims: Option<(u32, u32)>, - logs: CmprLogs, - }, - TranscodeFailure { - logs: CmprLogs, - }, - ThumbsFailure { - logs: CmprLogs, + logs: CmprLogs }, + TranscodeFailure { logs: CmprLogs }, + ThumbsFailure { logs: CmprLogs } } #[derive(Debug, Clone)] @@ -68,6 +64,7 @@ pub struct CmprLogs { pub dmsg: DetailedMsg, } + /// Validate and sanitize values passed to scripts via environment variables fn validate_env_value(key: &str, value: &str) -> Result { match key { @@ -78,25 +75,22 @@ fn validate_env_value(key: &str, value: &str) -> Result { } else { Err(format!("Invalid bitrate format: {}", value)) } - } + }, "CLAPSHOT_MEDIA_TYPE" => { // Media type should be one of known values match value { "video" | "audio" | "image" => Ok(value.to_string()), - _ => Err(format!("Invalid media type: {}", value)), + _ => Err(format!("Invalid media type: {}", value)) } - } + }, "CLAPSHOT_USER_ID" | "CLAPSHOT_MEDIA_ID" => { // User/media IDs should be alphanumeric + basic chars only - if value - .chars() - .all(|c| c.is_alphanumeric() || c == '_' || c == '-' || c == '.') - { + if value.chars().all(|c| c.is_alphanumeric() || c == '_' || c == '-' || c == '.') { Ok(value.to_string()) } else { Err(format!("Invalid {} format: {}", key, value)) } - } + }, "CLAPSHOT_DURATION" => { // Duration should be numeric (can have decimal point) if value.chars().all(|c| c.is_numeric() || c == '.') { @@ -104,51 +98,31 @@ fn validate_env_value(key: &str, value: &str) -> Result { } else { Err(format!("Invalid duration format: {}", value)) } - } + }, "CLAPSHOT_THUMB_SIZE" | "CLAPSHOT_SHEET_DIMS" => { // Dimensions should be in format "NxN" where N is numeric - if value.matches('x').count() == 1 - && value - .split('x') - .all(|part| part.chars().all(|c| c.is_numeric()) && !part.is_empty()) - { + if value.matches('x').count() == 1 && + value.split('x').all(|part| part.chars().all(|c| c.is_numeric()) && !part.is_empty()) { Ok(value.to_string()) } else { - Err(format!( - "Invalid dimension format (expected NxN): {}", - value - )) + Err(format!("Invalid dimension format (expected NxN): {}", value)) } - } + }, "CLAPSHOT_OUTPUT_PREFIX" => { // Output prefix should be alphanumeric + basic chars only (no path separators) - if value - .chars() - .all(|c| c.is_alphanumeric() || c == '_' || c == '-' || c == '.') - { + if value.chars().all(|c| c.is_alphanumeric() || c == '_' || c == '-' || c == '.') { Ok(value.to_string()) } else { Err(format!("Invalid output prefix format: {}", value)) } - } + }, _ => { // For file paths, allow basic path chars but reject dangerous sequences - if value.contains("..") - || value.contains(";") - || value.contains("|") - || value.contains("&") - || value.contains("`") - || value.contains("$") - || value.contains("'") - || value.contains("\"") - || value.contains("\\") - || value.contains("\n") - || value.contains("\r") - { + if value.contains("..") || value.contains(";") || value.contains("|") || value.contains("&") || + value.contains("`") || value.contains("$") || value.contains("'") || value.contains("\"") || + value.contains("\\") || value.contains("\n") || value.contains("\r") { Err(format!("Potentially unsafe value for {}: {}", key, value)) - } else if !value.chars().all(|c| { - c.is_alphanumeric() || c == '/' || c == '_' || c == '-' || c == '.' || c == ' ' - }) { + } else if !value.chars().all(|c| c.is_alphanumeric() || c == '/' || c == '_' || c == '-' || c == '.' || c == ' ') { Err(format!("Invalid characters in {}: {}", key, value)) } else { Ok(value.to_string()) @@ -159,12 +133,10 @@ fn validate_env_value(key: &str, value: &str) -> Result { /// Create a sanitized symlink in the orig directory for script access fn create_sanitized_symlink(src_path: &PathBuf) -> Result { - let orig_dir = src_path - .parent() + let orig_dir = src_path.parent() .ok_or("Source file has no parent directory")?; - let extension = src_path - .extension() + let extension = src_path.extension() .and_then(|e| e.to_str()) .unwrap_or("bin"); @@ -177,10 +149,7 @@ fn create_sanitized_symlink(src_path: &PathBuf) -> Result { // Check if source file exists if !src_path.exists() { - return Err(format!( - "Source file does not exist: {}", - src_path.display() - )); + return Err(format!("Source file does not exist: {}", src_path.display())); } // Always create a fresh symlink (remove any existing one first) @@ -191,15 +160,12 @@ fn create_sanitized_symlink(src_path: &PathBuf) -> Result { } // Use relative path for symlink target (just the filename) since both files are in the same directory - let src_filename = src_path.file_name().ok_or("Source file has no filename")?; + let src_filename = src_path.file_name() + .ok_or("Source file has no filename")?; if let Err(e) = std::os::unix::fs::symlink(src_filename, &sanitized_path) { - return Err(format!( - "Failed to create sanitized symlink from {} to {}: {}", - src_filename.to_string_lossy(), - sanitized_path.display(), - e - )); + return Err(format!("Failed to create sanitized symlink from {} to {}: {}", + src_filename.to_string_lossy(), sanitized_path.display(), e)); } tracing::debug!(src=?src_path, symlink=?sanitized_path, "Created sanitized symlink"); @@ -207,55 +173,32 @@ fn create_sanitized_symlink(src_path: &PathBuf) -> Result { } /// Set up environment variables for script execution -fn setup_script_environment( - src: &CmprInputSource, - input_file: &PathBuf, - output_dir: &PathBuf, - output_prefix: &str, - target_bitrate: u32, - progress_pipe: &Option, -) -> Result, String> { +fn setup_script_environment(src: &CmprInputSource, input_file: &PathBuf, output_dir: &PathBuf, + output_prefix: &str, target_bitrate: u32, progress_pipe: &Option) + -> Result, String> { let mut env_vars = HashMap::new(); // Validate and set environment variables - env_vars.insert( - "CLAPSHOT_INPUT_FILE".to_string(), - validate_env_value("CLAPSHOT_INPUT_FILE", &input_file.to_string_lossy())?, - ); - env_vars.insert( - "CLAPSHOT_OUTPUT_DIR".to_string(), - validate_env_value("CLAPSHOT_OUTPUT_DIR", &output_dir.to_string_lossy())?, - ); - env_vars.insert( - "CLAPSHOT_OUTPUT_PREFIX".to_string(), - validate_env_value("CLAPSHOT_OUTPUT_PREFIX", output_prefix)?, - ); - env_vars.insert( - "CLAPSHOT_MEDIA_TYPE".to_string(), - validate_env_value("CLAPSHOT_MEDIA_TYPE", src.media_type.as_ref())?, - ); - env_vars.insert( - "CLAPSHOT_TARGET_BITRATE".to_string(), - validate_env_value("CLAPSHOT_TARGET_BITRATE", &target_bitrate.to_string())?, - ); - env_vars.insert( - "CLAPSHOT_USER_ID".to_string(), - validate_env_value("CLAPSHOT_USER_ID", &src.user_id)?, - ); - env_vars.insert( - "CLAPSHOT_MEDIA_ID".to_string(), - validate_env_value("CLAPSHOT_MEDIA_ID", &src.media_file_id)?, - ); - env_vars.insert( - "CLAPSHOT_DURATION".to_string(), - validate_env_value("CLAPSHOT_DURATION", &src.duration.to_string())?, - ); + env_vars.insert("CLAPSHOT_INPUT_FILE".to_string(), + validate_env_value("CLAPSHOT_INPUT_FILE", &input_file.to_string_lossy())?); + env_vars.insert("CLAPSHOT_OUTPUT_DIR".to_string(), + validate_env_value("CLAPSHOT_OUTPUT_DIR", &output_dir.to_string_lossy())?); + env_vars.insert("CLAPSHOT_OUTPUT_PREFIX".to_string(), + validate_env_value("CLAPSHOT_OUTPUT_PREFIX", output_prefix)?); + env_vars.insert("CLAPSHOT_MEDIA_TYPE".to_string(), + validate_env_value("CLAPSHOT_MEDIA_TYPE", src.media_type.as_ref())?); + env_vars.insert("CLAPSHOT_TARGET_BITRATE".to_string(), + validate_env_value("CLAPSHOT_TARGET_BITRATE", &target_bitrate.to_string())?); + env_vars.insert("CLAPSHOT_USER_ID".to_string(), + validate_env_value("CLAPSHOT_USER_ID", &src.user_id)?); + env_vars.insert("CLAPSHOT_MEDIA_ID".to_string(), + validate_env_value("CLAPSHOT_MEDIA_ID", &src.media_file_id)?); + env_vars.insert("CLAPSHOT_DURATION".to_string(), + validate_env_value("CLAPSHOT_DURATION", &src.duration.to_string())?); if let Some(pipe) = progress_pipe { - env_vars.insert( - "CLAPSHOT_PROGRESS_PIPE".to_string(), - validate_env_value("CLAPSHOT_PROGRESS_PIPE", pipe)?, - ); + env_vars.insert("CLAPSHOT_PROGRESS_PIPE".to_string(), + validate_env_value("CLAPSHOT_PROGRESS_PIPE", pipe)?); } Ok(env_vars) @@ -263,8 +206,8 @@ fn setup_script_environment( /// Find the actual output file created by the script fn find_script_output(output_dir: &PathBuf, output_prefix: &str) -> Result { - let entries = - fs::read_dir(output_dir).map_err(|e| format!("Failed to read output directory: {}", e))?; + let entries = fs::read_dir(output_dir) + .map_err(|e| format!("Failed to read output directory: {}", e))?; let mut candidates = Vec::new(); for entry in entries { @@ -287,26 +230,15 @@ fn find_script_output(output_dir: &PathBuf, output_prefix: &str) -> Result Err(format!( - "No valid output files found with prefix: {}", - output_prefix - )), + 0 => Err(format!("No valid output files found with prefix: {}", output_prefix)), 1 => Ok(candidates.into_iter().next().unwrap()), - _ => Err(format!( - "Multiple valid output files found with prefix {}: {:?}", - output_prefix, candidates - )), + _ => Err(format!("Multiple valid output files found with prefix {}: {:?}", output_prefix, candidates)) } } -fn err2cout( - msg_txt: &str, - err: E, - args: &CmprInput, - sanitized_symlink: Option<&PathBuf>, -) -> CmprOutput { +fn err2cout(msg_txt: &str, err: E, args: &CmprInput, sanitized_symlink: Option<&PathBuf>) -> CmprOutput { let details_str = format!("{:?}", err); - tracing::error!(details = &details_str, "err2cout: {}", msg_txt); + tracing::error!(details=&details_str, "err2cout: {}", msg_txt); // Clean up sanitized symlink if provided if let Some(symlink_path) = sanitized_symlink { @@ -328,62 +260,45 @@ fn err2cout( msg: msg_txt.to_string(), details: details_str, src_file: src.path.clone(), - user_id: src.user_id.clone(), - }, + user_id: src.user_id.clone() + } }; match args { - CmprInput::Transcode { .. } => CmprOutput::TranscodeFailure { logs }, - CmprInput::Thumbs { .. } => CmprOutput::ThumbsFailure { logs }, + CmprInput::Transcode { .. } => { CmprOutput::TranscodeFailure { logs } }, + CmprInput::Thumbs { .. } => { CmprOutput::ThumbsFailure { logs } } } } /// Run transcoding script and return the output -fn run_transcode_script( - src: &CmprInputSource, - output_dir: PathBuf, - output_prefix: String, - video_bitrate: u32, - progress: ProgressSender, - script_path: &str, -) -> CmprOutput { +fn run_transcode_script(src: &CmprInputSource, output_dir: PathBuf, output_prefix: String, + video_bitrate: u32, progress: ProgressSender, script_path: &str) -> CmprOutput { let _span = tracing::info_span!("run_transcode_script", media_file = %src.media_file_id, user = %src.user_id, - thread = ?std::thread::current().id()) - .entered(); + thread = ?std::thread::current().id()).entered(); // Create sanitized symlink for script access let sanitized_input = match create_sanitized_symlink(&src.path) { Ok(path) => path, - Err(e) => { - return err2cout( - "Failed to create sanitized symlink", - e, - &CmprInput::Transcode { - video_dst_dir: output_dir, - video_dst_prefix: output_prefix, - video_bitrate, - src: src.clone(), - }, - None, - ) - } + Err(e) => return err2cout("Failed to create sanitized symlink", e, + &CmprInput::Transcode { + video_dst_dir: output_dir, + video_dst_prefix: output_prefix, + video_bitrate, + src: src.clone() + }, None) }; // Create transcoded/ subdirectory for script to work in let script_work_dir = output_dir.join("transcoded"); if let Err(e) = fs::create_dir_all(&script_work_dir) { - return err2cout( - "Failed to create script work directory", - e, - &CmprInput::Transcode { - video_dst_dir: output_dir, - video_dst_prefix: output_prefix, - video_bitrate, - src: src.clone(), - }, - Some(&sanitized_input), - ); + return err2cout("Failed to create script work directory", e, + &CmprInput::Transcode { + video_dst_dir: output_dir, + video_dst_prefix: output_prefix, + video_bitrate, + src: src.clone() + }, Some(&sanitized_input)); } // Set up progress pipe in a temporary directory (not user-writable space) @@ -395,16 +310,10 @@ fn run_transcode_script( Some(fname) => unix_named_pipe::create(&fname, None) .map(|_| fname.to_string()) .map_err(|e| e.to_string()) - .map_or_else( - |e| { - tracing::warn!( - details = e, - "Won't track script progress; failed to create pipe file." - ); - None - }, - |f| Some(f), - ), + .map_or_else(|e| { + tracing::warn!(details=e, "Won't track script progress; failed to create pipe file."); + None + }, |f| Some(f)) }; let progress_terminate = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)); @@ -423,8 +332,7 @@ fn run_transcode_script( Some(pfn) => { std::thread::spawn(move || { let _span = tracing::info_span!("script_progress", - thread = ?std::thread::current().id()) - .entered(); + thread = ?std::thread::current().id()).entered(); let f = match unix_named_pipe::open_read(&pfn) { Ok(f) => f, @@ -449,7 +357,7 @@ fn run_transcode_script( } else { std::thread::sleep(std::time::Duration::from_millis(250)); } - } + }, None => { tracing::debug!("Progress pipe EOF. Sleeping..."); std::thread::sleep(std::time::Duration::from_millis(250)); @@ -466,43 +374,35 @@ fn run_transcode_script( "end" => { msg = Some("Transcoding done.".to_string()); done_ratio = Some(1.0); - } + }, "continue" => { msg = Some("Transcoding...".to_string()); // Calculate progress percentage if we have both current time and duration if let Some(time_us) = current_time_us { if total_duration_us > 0 { - let progress_pct = (time_us as f32 - / total_duration_us as f32) - .min(1.0) - .max(0.0); + let progress_pct = (time_us as f32 / total_duration_us as f32).min(1.0).max(0.0); done_ratio = Some(progress_pct); } } - } + }, _ => { msg = Some("Transcoding...".to_string()); } } - } + }, "out_time_us" => { // Parse current position in microseconds if let Ok(time_us) = val.parse::() { current_time_us = Some(time_us); } - } + }, _ => {} // Ignore other keys } } // Send progress message (if any) if let Some(msg) = msg.take() { - if let Err(e) = progress.send(( - vid.clone(), - user_id.clone(), - msg, - done_ratio.clone(), - )) { + if let Err(e) = progress.send((vid.clone(), user_id.clone(), msg, done_ratio.clone())) { tracing::debug!(details=%e, "Failed to send script progress message. Ending progress tracking."); return; } @@ -517,28 +417,16 @@ fn run_transcode_script( }; // Set up environment variables for script - let env_vars = match setup_script_environment( - src, - &sanitized_input, - &script_work_dir, - &output_prefix, - video_bitrate, - &ppipe_fname, - ) { + let env_vars = match setup_script_environment(src, &sanitized_input, &script_work_dir, &output_prefix, + video_bitrate, &ppipe_fname) { Ok(vars) => vars, - Err(e) => { - return err2cout( - "Failed to set up script environment", - e, - &CmprInput::Transcode { - video_dst_dir: output_dir, - video_dst_prefix: output_prefix, - video_bitrate, - src: src.clone(), - }, - Some(&sanitized_input), - ) - } + Err(e) => return err2cout("Failed to set up script environment", e, + &CmprInput::Transcode { + video_dst_dir: output_dir, + video_dst_prefix: output_prefix, + video_bitrate, + src: src.clone() + }, Some(&sanitized_input)) }; // Run the transcoding script @@ -550,8 +438,7 @@ fn run_transcode_script( let script_thread = { std::thread::spawn(move || { let _span = tracing::info_span!("transcode_script", - thread = ?std::thread::current().id()) - .entered(); + thread = ?std::thread::current().id()).entered(); let mut cmd = Command::new(&script_path_owned); @@ -597,16 +484,10 @@ fn run_transcode_script( tracing::error!(file=?log_file, details=%e, "Failed to write transcoding log file"); } - ( - if res.status.success() { - None - } else { - Some("Script exited with error".to_string()) - }, + (if res.status.success() {None} else {Some("Script exited with error".to_string())}, format!("Log written to: {}", log_file.display()), - "".to_string(), - ) - } + "".to_string() ) + }, Err(e) => { tracing::error!(details=%e, "Script exec failed"); @@ -633,11 +514,7 @@ fn run_transcode_script( tracing::error!(file=?log_file, details=%write_err, "Failed to write transcoding error log"); } - ( - Some(e.to_string()), - format!("Log written to: {}", log_file.display()), - "".into(), - ) + (Some(e.to_string()), format!("Log written to: {}", log_file.display()), "".into()) } } }) @@ -647,11 +524,7 @@ fn run_transcode_script( tracing::debug!("Waiting for transcoding script to complete..."); let (err_msg, stdout, stderr) = script_thread.join().unwrap_or_else(|e| { tracing::error!(details=?e, "Script thread panicked."); - ( - Some("Script thread panicked".to_string()), - "".into(), - format!("{:?}", e), - ) + (Some("Script thread panicked".to_string()), "".into(), format!("{:?}", e)) }); tracing::debug!("Terminating script progress thread."); @@ -688,9 +561,9 @@ fn run_transcode_script( msg: "Failed to get script output filename".to_string(), details: "Script output has invalid filename".to_string(), src_file: src.path.clone(), - user_id: src.user_id.clone(), - }, - }, + user_id: src.user_id.clone() + } + } }; } }; @@ -718,15 +591,15 @@ fn run_transcode_script( msg: "Failed to move script output to final location".to_string(), details: format!("Error moving file: {}", e), src_file: src.path.clone(), - user_id: src.user_id.clone(), - }, - }, + user_id: src.user_id.clone() + } + } }; } tracing::debug!(from=?script_output_path, to=?final_output_path, "Moved script output to final location"); final_output_path - } + }, Err(e) => { tracing::error!(details=%e, "Script completed but output validation failed"); // Clean up progress pipe if it exists @@ -745,13 +618,13 @@ fn run_transcode_script( msg: "Script output validation failed".to_string(), details: e, src_file: src.path.clone(), - user_id: src.user_id.clone(), - }, - }, + user_id: src.user_id.clone() + } + } }; } }, - Some(_) => PathBuf::new(), // Error case, path doesn't matter + Some(_) => PathBuf::new() // Error case, path doesn't matter }; let logs = CmprLogs { @@ -760,16 +633,11 @@ fn run_transcode_script( stdout, _stderr: stderr, dmsg: DetailedMsg { - msg: if err_msg.is_some() { - "Transcoding failed" - } else { - "Transcoding complete" - } - .to_string(), + msg: if err_msg.is_some() { "Transcoding failed" } else { "Transcoding complete" }.to_string(), details: format!("Error in script: {:?}", err_msg.clone().unwrap_or_default()), src_file: src.path.clone(), - user_id: src.user_id.clone(), - }, + user_id: src.user_id.clone() + } }; // Clean up progress pipe and sanitized symlink @@ -792,56 +660,40 @@ fn run_transcode_script( match err_msg { Some(_) => CmprOutput::TranscodeFailure { logs }, - None => CmprOutput::TranscodeSuccess { video_dst, logs }, + None => CmprOutput::TranscodeSuccess { video_dst, logs } } } /// Run thumbnailing script -fn run_thumbnail_script( - thumb_dir: PathBuf, - thumb_size: (u32, u32), - thumb_sheet_dims: (u32, u32), - src: CmprInputSource, - script_path: &str, -) -> CmprOutput { +fn run_thumbnail_script(thumb_dir: PathBuf, thumb_size: (u32,u32), thumb_sheet_dims: (u32, u32), + src: CmprInputSource, script_path: &str) -> CmprOutput { let _span = tracing::info_span!("run_thumbnail_script", media_file = %src.media_file_id, user = %src.user_id, - thread = ?std::thread::current().id()) - .entered(); + thread = ?std::thread::current().id()).entered(); // Create sanitized symlink for script access let sanitized_input = match create_sanitized_symlink(&src.path) { Ok(path) => path, - Err(e) => { - return err2cout( - "Failed to create sanitized symlink", - e, - &CmprInput::Thumbs { - thumb_dir: thumb_dir.clone(), - thumb_sheet_dims, - thumb_size, - src: src.clone(), - }, - None, - ) - } + Err(e) => return err2cout("Failed to create sanitized symlink", e, + &CmprInput::Thumbs { + thumb_dir: thumb_dir.clone(), + thumb_sheet_dims, + thumb_size, + src: src.clone() + }, None) }; // Create isolated script work directory for thumbnailing let script_work_dir = thumb_dir.join("transcoded"); if let Err(e) = fs::create_dir_all(&script_work_dir) { - return err2cout( - "Failed to create script work directory", - e.to_string(), - &CmprInput::Thumbs { - thumb_dir: thumb_dir.clone(), - thumb_sheet_dims, - thumb_size, - src: src.clone(), - }, - Some(&sanitized_input), - ); + return err2cout("Failed to create script work directory", e.to_string(), + &CmprInput::Thumbs { + thumb_dir: thumb_dir.clone(), + thumb_sheet_dims, + thumb_size, + src: src.clone() + }, Some(&sanitized_input)); } // Set up environment variables for script @@ -849,53 +701,29 @@ fn run_thumbnail_script( // Validate and set environment variables if let Err(e) = (|| -> Result<(), String> { - env_vars.insert( - "CLAPSHOT_INPUT_FILE".to_string(), - validate_env_value("CLAPSHOT_INPUT_FILE", &sanitized_input.to_string_lossy())?, - ); - env_vars.insert( - "CLAPSHOT_OUTPUT_DIR".to_string(), - validate_env_value("CLAPSHOT_OUTPUT_DIR", &script_work_dir.to_string_lossy())?, - ); - env_vars.insert( - "CLAPSHOT_MEDIA_TYPE".to_string(), - validate_env_value("CLAPSHOT_MEDIA_TYPE", src.media_type.as_ref())?, - ); - env_vars.insert( - "CLAPSHOT_USER_ID".to_string(), - validate_env_value("CLAPSHOT_USER_ID", &src.user_id)?, - ); - env_vars.insert( - "CLAPSHOT_MEDIA_ID".to_string(), - validate_env_value("CLAPSHOT_MEDIA_ID", &src.media_file_id)?, - ); - env_vars.insert( - "CLAPSHOT_THUMB_SIZE".to_string(), - validate_env_value( - "CLAPSHOT_THUMB_SIZE", - &format!("{}x{}", thumb_size.0, thumb_size.1), - )?, - ); - env_vars.insert( - "CLAPSHOT_SHEET_DIMS".to_string(), - validate_env_value( - "CLAPSHOT_SHEET_DIMS", - &format!("{}x{}", thumb_sheet_dims.0, thumb_sheet_dims.1), - )?, - ); + env_vars.insert("CLAPSHOT_INPUT_FILE".to_string(), + validate_env_value("CLAPSHOT_INPUT_FILE", &sanitized_input.to_string_lossy())?); + env_vars.insert("CLAPSHOT_OUTPUT_DIR".to_string(), + validate_env_value("CLAPSHOT_OUTPUT_DIR", &script_work_dir.to_string_lossy())?); + env_vars.insert("CLAPSHOT_MEDIA_TYPE".to_string(), + validate_env_value("CLAPSHOT_MEDIA_TYPE", src.media_type.as_ref())?); + env_vars.insert("CLAPSHOT_USER_ID".to_string(), + validate_env_value("CLAPSHOT_USER_ID", &src.user_id)?); + env_vars.insert("CLAPSHOT_MEDIA_ID".to_string(), + validate_env_value("CLAPSHOT_MEDIA_ID", &src.media_file_id)?); + env_vars.insert("CLAPSHOT_THUMB_SIZE".to_string(), + validate_env_value("CLAPSHOT_THUMB_SIZE", &format!("{}x{}", thumb_size.0, thumb_size.1))?); + env_vars.insert("CLAPSHOT_SHEET_DIMS".to_string(), + validate_env_value("CLAPSHOT_SHEET_DIMS", &format!("{}x{}", thumb_sheet_dims.0, thumb_sheet_dims.1))?); Ok(()) })() { - return err2cout( - "Failed to set up script environment", - e, - &CmprInput::Thumbs { - thumb_dir: thumb_dir.clone(), - thumb_sheet_dims, - thumb_size, - src: src.clone(), - }, - Some(&sanitized_input), - ); + return err2cout("Failed to set up script environment", e, + &CmprInput::Thumbs { + thumb_dir: thumb_dir.clone(), + thumb_sheet_dims, + thumb_size, + src: src.clone() + }, Some(&sanitized_input)); } // Run the thumbnailing script @@ -907,8 +735,7 @@ fn run_thumbnail_script( let script_thread = { std::thread::spawn(move || { let _span = tracing::info_span!("thumbnail_script", - thread = ?std::thread::current().id()) - .entered(); + thread = ?std::thread::current().id()).entered(); let mut cmd = Command::new(&script_path_owned); @@ -954,16 +781,10 @@ fn run_thumbnail_script( tracing::error!(file=?log_file, details=%e, "Failed to write thumbnailing log file"); } - ( - if res.status.success() { - None - } else { - Some("Script exited with error".to_string()) - }, + (if res.status.success() {None} else {Some("Script exited with error".to_string())}, format!("Log written to: {}", log_file.display()), - "".to_string(), - ) - } + "".to_string() ) + }, Err(e) => { tracing::error!(details=%e, "Script exec failed"); @@ -990,11 +811,7 @@ fn run_thumbnail_script( tracing::error!(file=?log_file, details=%write_err, "Failed to write thumbnailing error log"); } - ( - Some(e.to_string()), - format!("Log written to: {}", log_file.display()), - "".into(), - ) + (Some(e.to_string()), format!("Log written to: {}", log_file.display()), "".into()) } } }) @@ -1004,11 +821,7 @@ fn run_thumbnail_script( tracing::debug!("Waiting for thumbnailing script to complete..."); let (err_msg, stdout, stderr) = script_thread.join().unwrap_or_else(|e| { tracing::error!(details=?e, "Script thread panicked."); - ( - Some("Script thread panicked".to_string()), - "".into(), - format!("{:?}", e), - ) + (Some("Script thread panicked".to_string()), "".into(), format!("{:?}", e)) }); let logs = CmprLogs { @@ -1017,16 +830,11 @@ fn run_thumbnail_script( stdout, _stderr: stderr, dmsg: DetailedMsg { - msg: if err_msg.is_some() { - "Thumbnailing failed" - } else { - "Thumbnailing complete" - } - .to_string(), + msg: if err_msg.is_some() { "Thumbnailing failed" } else { "Thumbnailing complete" }.to_string(), details: format!("Error in script: {:?}", err_msg.clone().unwrap_or_default()), src_file: src.path.clone(), - user_id: src.user_id.clone(), - }, + user_id: src.user_id.clone() + } }; // Move thumbnail files from script work directory to main thumbnail directory @@ -1043,9 +851,7 @@ fn run_thumbnail_script( if let Err(e) = fs::rename(&src_path, &dest_path) { tracing::warn!(details=%e, from=?src_path, to=?dest_path, "Failed to move thumbnail file to final location"); // Try copy + remove as fallback - if let Ok(()) = - fs::copy(&src_path, &dest_path).and_then(|_| fs::remove_file(&src_path)) - { + if let Ok(()) = fs::copy(&src_path, &dest_path).and_then(|_| fs::remove_file(&src_path)) { tracing::debug!(from=?src_path, to=?dest_path, "Successfully copied and removed thumbnail file"); } } else { @@ -1072,39 +878,28 @@ fn run_thumbnail_script( Some(_) => CmprOutput::ThumbsFailure { logs }, None => { // Check if any thumbnail files were actually created - let has_thumbnails = thumb_dir.exists() - && fs::read_dir(&thumb_dir) - .map(|entries| { - entries.filter_map(|e| e.ok()).any(|entry| { - let path = entry.path(); - let is_thumb = path.is_file() - && path - .file_name() - .and_then(|name| name.to_str()) - .map(|s| { - // Only consider actual thumbnail files, not log files - s.ends_with(".webp") - || (s.starts_with("thumb") && s.ends_with(".webp")) - || (s.starts_with("sheet-") && s.ends_with(".webp")) - }) - .unwrap_or(false); - is_thumb - }) - }) + let has_thumbnails = thumb_dir.exists() && + fs::read_dir(&thumb_dir) + .map(|entries| entries.filter_map(|e| e.ok()).any(|entry| { + let path = entry.path(); + let is_thumb = path.is_file() && path.file_name() + .and_then(|name| name.to_str()) + .map(|s| { + // Only consider actual thumbnail files, not log files + s.ends_with(".webp") || + (s.starts_with("thumb") && s.ends_with(".webp")) || + (s.starts_with("sheet-") && s.ends_with(".webp")) + }) + .unwrap_or(false); + is_thumb + })) .unwrap_or(false); + CmprOutput::ThumbsSuccess { - thumb_dir: if has_thumbnails { - Some(thumb_dir) - } else { - None - }, - thumb_sheet_dims: if has_thumbnails { - Some(thumb_sheet_dims) - } else { - None - }, - logs, + thumb_dir: if has_thumbnails { Some(thumb_dir) } else { None }, + thumb_sheet_dims: if has_thumbnails { Some(thumb_sheet_dims) } else { None }, + logs } } } @@ -1117,8 +912,8 @@ pub fn run_forever( progress: ProgressSender, n_workers: usize, transcode_script: String, - thumbnail_script: String, -) { + thumbnail_script: String) +{ let _span = tracing::info_span!("SCRIPT_PROCESSOR").entered(); tracing::debug!(n_workers = n_workers, "Starting script processor."); @@ -1131,12 +926,12 @@ pub fn run_forever( tracing::info!(id=%src.media_file_id, r#type=?src.media_type, user=%src.user_id, file=%(src.path.file_name().unwrap_or_default().to_string_lossy()), "Media file transcode request (script)."); - } + }, CmprInput::Thumbs { src, .. } => { tracing::info!(id=%src.media_file_id, r#type=?src.media_type, user=%src.user_id, file=%(src.path.file_name().unwrap_or_default().to_string_lossy()), "Media file thumbnail request (script)."); - } + }, } tracing::debug!(details=?args, "Spawning script worker thread."); @@ -1145,42 +940,21 @@ pub fn run_forever( let transcode_script_path = transcode_script.clone(); let thumbnail_script_path = thumbnail_script.clone(); - pool.execute(move || match args { - CmprInput::Transcode { - video_dst_dir, - video_dst_prefix, - video_bitrate, - src, - } => { - if let Err(e) = outq.send(run_transcode_script( - &src, - video_dst_dir, - video_dst_prefix, - video_bitrate, - prgr_sender, - &transcode_script_path, - )) { - tracing::error!("Transcode result send failed! Aborting. -- {:?}", e); - } - } - CmprInput::Thumbs { - thumb_dir, - thumb_sheet_dims, - thumb_size, - src, - } => { - if let Err(e) = outq.send(run_thumbnail_script( - thumb_dir, - thumb_size, - thumb_sheet_dims, - src, - &thumbnail_script_path, - )) { - tracing::error!("Thumbnail result send failed! Aborting. -- {:?}", e); - } + pool.execute(move || { + match args { + CmprInput::Transcode { video_dst_dir, video_dst_prefix, video_bitrate, src } => { + if let Err(e) = outq.send(run_transcode_script(&src, video_dst_dir, video_dst_prefix, video_bitrate, prgr_sender, &transcode_script_path)) { + tracing::error!("Transcode result send failed! Aborting. -- {:?}", e); + } + }, + CmprInput::Thumbs { thumb_dir, thumb_sheet_dims, thumb_size, src } => { + if let Err(e) = outq.send(run_thumbnail_script(thumb_dir, thumb_size, thumb_sheet_dims, src, &thumbnail_script_path)) { + tracing::error!("Thumbnail result send failed! Aborting. -- {:?}", e); + } + }, } }); - } + }, Err(e) => { tracing::info!(details=%e, "Input queue closed."); break; @@ -1189,4 +963,4 @@ pub fn run_forever( } tracing::debug!("Exiting script processor."); -} +} \ No newline at end of file From 14345074f5bab0ed4cf5680d71ba2272c703c92e Mon Sep 17 00:00:00 2001 From: elonen Date: Sun, 30 Nov 2025 21:13:40 +0200 Subject: [PATCH 06/10] Further reduce formatting-only changes from main.rs --- server/src/main.rs | 112 ++++++++++++++++++--------------------------- 1 file changed, 45 insertions(+), 67 deletions(-) diff --git a/server/src/main.rs b/server/src/main.rs index a832660a..8d5e6f5a 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -3,15 +3,14 @@ use clap::Parser; use clapshot_server::{ api_server::validate_org_http_headers_regex, grpc::{grpc_client::prepare_organizer, grpc_server::make_grpc_server_bind}, - run_clapshot, + run_clapshot, PKG_NAME, PKG_VERSION, storage::StorageBackend, video_pipeline::IngestUsernameFrom, - PKG_NAME, PKG_VERSION, }; use http::Uri; -use indoc::indoc; -use std::{path::PathBuf, str::FromStr, sync::Arc}; +use std::{path::PathBuf, sync::Arc, str::FromStr}; use tracing::error; +use indoc::indoc; mod log; @@ -32,16 +31,17 @@ mod log; )] struct Args { /// Directory for database, /incoming, /videos and /rejected - #[arg(short = 'D', long, required = true, value_name = "DIR")] + #[arg(short='D', long, required=true, value_name="DIR" )] data_dir: PathBuf, /// Base URL of the API server, e.g. `https://clapshot.example.com`. /// This depends on your proxy server, and is usually different from `--host` and `--port`. - #[arg(short = 'U', long, required = true, value_name = "URL")] + #[arg(short='U', long, required=true, value_name="URL")] url_base: String, + /// TCP port to listen on - #[arg(short = 'p', long, default_value_t = 8095)] + #[arg(short='p', long, default_value_t = 8095)] port: u16, /// Host to listen on @@ -50,28 +50,31 @@ struct Args { /// Allowed CORS Origins, separated by commas. /// Defaults to the value of `url_base`. - #[arg(long, value_name = "ORIGINS")] + #[arg(long, value_name="ORIGINS")] cors: Option, + /// Polling interval for incoming folder - #[arg(short = 'P', long, default_value_t = 3.0, value_name = "SECONDS")] + #[arg(short='P', long, default_value_t = 3.0, value_name="SECONDS")] poll: f32, /// Max number of workers for media file processing /// (0 = number of CPU cores) - #[arg(short, long, default_value_t = 0, value_name = "NUM")] + #[arg(short, long, default_value_t = 0, value_name="NUM")] workers: usize, /// Target (max) bitrate for transcoding, in Mbps - #[arg(short, long, default_value_t = 2.5, value_name = "MBITS")] + #[arg(short, long, default_value_t = 2.5, value_name="MBITS")] bitrate: f32, + /// Migrate database to latest version. Makes an automatic backup. #[arg(long)] migrate: bool, + /// Log to file instead of stdout - #[arg(short, long, value_name = "FILE")] + #[arg(short, long, value_name="FILE")] log: Option, /// Set debug level by repeating (-d = debug, -dd = trace) @@ -79,89 +82,84 @@ struct Args { debug: u8, // Enable debug mode (same as -v) + /// Log in JSON format #[arg(short, long)] json: bool, + /// Use this user id if auth headers are not found. /// Mainly useful for debugging. - #[arg(long, default_value = "anonymous", value_name = "USER")] + #[arg(long, default_value="anonymous", value_name="USER")] default_user: String, /// How to determine username for files in incoming/ folder. /// 'file-owner' uses filesystem ownership, 'folder-name' uses first subfolder name. - #[arg(long, default_value = "file-owner", value_name = "METHOD")] + #[arg(long, default_value="file-owner", value_name="METHOD")] ingest_username_from: String, + /// Shell command to start Organizer plugin. /// The command should block until SIGTERM, and log to stdout/stderr without timestamps. /// Unless --org-uri is a HTTP(S) URI, the command will get a Unix socket path as an argument when Clapshot server calls it. - #[arg(long, value_name = "CMD")] - org_cmd: Option, // TODO: turn into a Vec to allow multiple plugins + #[arg(long, value_name="CMD")] + org_cmd: Option, // TODO: turn into a Vec to allow multiple plugins /// Custom endpoint for srv->org connections. /// E.g. `/path/to/plugin.sock` or `http://[::1]:50051` /// If `--org-cmd` is given, this defaults to a temp .sock in datadir. - #[arg(long, value_name = "URI")] + #[arg(long, value_name="URI")] org_in_uri: Option, /// Listen in TCP address port for org->srv connections. /// Default is to use a Unix socket in datadir. E.g. `[::1]:50052` - #[arg(long, value_name = "BIND")] + #[arg(long, value_name="BIND")] org_out_tcp: Option, /// Path to custom transcoding script - #[arg( - long, - value_name = "SCRIPT", - default_value = "scripts/clapshot-transcode" - )] + #[arg(long, value_name="SCRIPT", default_value="scripts/clapshot-transcode")] transcode_script: String, /// Path to custom thumbnailing script - #[arg( - long, - value_name = "SCRIPT", - default_value = "scripts/clapshot-thumbnail" - )] + #[arg(long, value_name="SCRIPT", default_value="scripts/clapshot-thumbnail")] thumbnail_script: String, /// Regular expression to filter HTTP headers passed to Organizer. /// Only headers matching this pattern will be included in UserSessionData. /// Case-insensitive matching. Default is disabled for security. - #[arg(long, value_name = "REGEX", default_value = "^$")] + #[arg(long, value_name="REGEX", default_value="^$")] org_http_headers: String, /// Storage backend (local or s3-compatible object storage) - #[arg(long, value_name = "BACKEND", default_value = "local")] + #[arg(long, value_name="BACKEND", default_value="local")] storage_backend: String, /// S3-compatible endpoint base URL, e.g. https://s3.example.com - #[arg(long, value_name = "URL")] + #[arg(long, value_name="URL")] s3_endpoint: Option, /// S3 region (required for S3 backend) - #[arg(long, value_name = "REGION")] + #[arg(long, value_name="REGION")] s3_region: Option, /// S3 bucket (required for S3 backend) - #[arg(long, value_name = "BUCKET")] + #[arg(long, value_name="BUCKET")] s3_bucket: Option, /// S3 access key (required for S3 backend) - #[arg(long, value_name = "KEY")] + #[arg(long, value_name="KEY")] s3_access_key: Option, /// S3 secret key (required for S3 backend) - #[arg(long, value_name = "SECRET")] + #[arg(long, value_name="SECRET")] s3_secret_key: Option, /// Path/prefix inside the bucket where media files are stored - #[arg(long, value_name = "PREFIX", default_value = "videos")] + #[arg(long, value_name="PREFIX", default_value="videos")] s3_prefix: String, /// Public base URL for accessing the bucket/prefix (used for playback URLs) - #[arg(long, value_name = "URL")] + #[arg(long, value_name="URL")] s3_public_url: Option, } @@ -206,8 +204,7 @@ fn main() -> anyhow::Result<()> { &args.data_dir, )?; - let cors_origins: Vec = args - .cors + let cors_origins: Vec = args.cors .map(|s| s.split(',').map(|s| s.trim().to_string()).collect()) .unwrap_or_default(); @@ -219,30 +216,18 @@ fn main() -> anyhow::Result<()> { let storage = match args.storage_backend.as_str() { "local" => StorageBackend::local(args.data_dir.join("videos"), &url_base), "s3" => { - let endpoint = args - .s3_endpoint - .clone() + let endpoint = args.s3_endpoint.clone() .ok_or_else(|| anyhow::anyhow!("--s3-endpoint is required for S3 backend"))?; - let region = args - .s3_region - .clone() + let region = args.s3_region.clone() .ok_or_else(|| anyhow::anyhow!("--s3-region is required for S3 backend"))?; - let bucket = args - .s3_bucket - .clone() + let bucket = args.s3_bucket.clone() .ok_or_else(|| anyhow::anyhow!("--s3-bucket is required for S3 backend"))?; - let access_key = args - .s3_access_key - .clone() + let access_key = args.s3_access_key.clone() .ok_or_else(|| anyhow::anyhow!("--s3-access-key is required for S3 backend"))?; - let secret_key = args - .s3_secret_key - .clone() + let secret_key = args.s3_secret_key.clone() .ok_or_else(|| anyhow::anyhow!("--s3-secret-key is required for S3 backend"))?; - let public_base_url = args - .s3_public_url - .clone() + let public_base_url = args.s3_public_url.clone() .or_else(|| { Uri::from_str(&endpoint).ok().and_then(|uri| { let scheme = uri.scheme_str()?; @@ -263,10 +248,7 @@ fn main() -> anyhow::Result<()> { public_base_url, )? } - other => bail!( - "Unknown storage backend '{}'. Valid options: local, s3", - other - ), + other => bail!("Unknown storage backend '{}'. Valid options: local, s3", other), }; // Run the server (blocking) @@ -279,11 +261,7 @@ fn main() -> anyhow::Result<()> { args.port, org_uri, grpc_server_bind, - if args.workers == 0 { - num_cpus::get() - } else { - args.workers - }, + if args.workers == 0 { num_cpus::get() } else { args.workers }, target_bitrate, default_user, args.poll, @@ -298,4 +276,4 @@ fn main() -> anyhow::Result<()> { } Ok(()) -} +} \ No newline at end of file From 536896865779b8c9a6ed93c7a8fbe1e6c0ce1f41 Mon Sep 17 00:00:00 2001 From: elonen Date: Mon, 1 Dec 2025 03:08:31 +0200 Subject: [PATCH 07/10] Fix S3 multipart upload short reads --- server/src/storage.rs | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/server/src/storage.rs b/server/src/storage.rs index bc7f8b80..a392e542 100644 --- a/server/src/storage.rs +++ b/server/src/storage.rs @@ -290,12 +290,25 @@ impl ObjectStorageBackend { let mut uploaded: u64 = 0; loop { - let bytes_read = file.read(&mut buf).await?; - if bytes_read == 0 { - break; + // Read a complete chunk (read() may return short reads with async I/O) + // S3 requires all parts except the last to be >= 5MB + let mut chunk_size = 0; + loop { + let bytes_read = file.read(&mut buf[chunk_size..]).await?; + if bytes_read == 0 { + break; // EOF + } + chunk_size += bytes_read; + if chunk_size >= MULTIPART_CHUNK_SIZE { + break; // Full chunk + } } - let body = ByteStream::from(buf[..bytes_read].to_vec()); + if chunk_size == 0 { + break; // No more data + } + + let body = ByteStream::from(buf[..chunk_size].to_vec()); let res = client .upload_part() .bucket(&bucket) @@ -319,7 +332,7 @@ impl ObjectStorageBackend { .build(), ); - uploaded += bytes_read as u64; + uploaded += chunk_size as u64; if let Some(cb) = progress.as_ref() { cb((uploaded as f32 / total_len as f32).clamp(0.0, 1.0)); } From b87ca6b43c353a52dfcaf1d6c1986efdf9f48774 Mon Sep 17 00:00:00 2001 From: elonen Date: Mon, 1 Dec 2025 00:14:58 +0200 Subject: [PATCH 08/10] Replace CLI arg passed S3 auth with S3 SDK's built-in auth cascade. Add some integration testing with MinIO. --- Dockerfile.server | 11 +- README.md | 10 +- server/Cargo.toml | 6 +- server/src/api_server/test_utils.rs | 1 - server/src/main.rs | 48 ++--- server/src/storage.rs | 71 +++----- server/src/tests/integration_test.rs | 263 +++++++++++++++++++++++++++ 7 files changed, 325 insertions(+), 85 deletions(-) diff --git a/Dockerfile.server b/Dockerfile.server index b00d5d80..0fed561b 100644 --- a/Dockerfile.server +++ b/Dockerfile.server @@ -1,4 +1,4 @@ -FROM rust:1.87-slim-bookworm AS rust-bookworm-slim +FROM rust:1.88-slim-bookworm AS rust-bookworm-slim # ---------------------------------- @@ -46,6 +46,15 @@ RUN apt-get -qy install libsqlite3-dev >/dev/null RUN apt-get -qy install protobuf-compiler >/dev/null RUN apt-get -qy install python3 python3.11 python3.11-venv >/dev/null +# Install MinIO for S3 integration tests +RUN apt-get -qy install curl >/dev/null +RUN ARCH=$(dpkg --print-architecture) && \ + if [ "$ARCH" = "amd64" ]; then MINIO_ARCH="amd64"; \ + elif [ "$ARCH" = "arm64" ]; then MINIO_ARCH="arm64"; \ + else echo "Unsupported architecture: $ARCH" && exit 1; fi && \ + curl -fsSL https://dl.min.io/server/minio/release/linux-${MINIO_ARCH}/minio -o /usr/local/bin/minio && \ + chmod +x /usr/local/bin/minio + # Switch to regular user RUN mkdir -p /app RUN chown -R ${UID}:${GID} /app diff --git a/README.md b/README.md index c603df6d..ac7fb847 100644 --- a/README.md +++ b/README.md @@ -134,7 +134,11 @@ Clapshot can upload processed media and thumbnails to an S3-compatible object st - `s3-endpoint = https://s3.example.com` - `s3-region = us-east-1` - `s3-bucket = clapshot-media` -- `s3-access-key`, `s3-secret-key` + +**Authentication** uses the standard AWS SDK credential chain (in order of precedence): +1. Environment variables: `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` +2. AWS credentials file (`~/.aws/credentials`) +3. IAM instance roles (for EC2/ECS deployments) **Optional settings:** - `s3-prefix` – path inside the bucket (default: `videos`) @@ -146,8 +150,8 @@ Clapshot can upload processed media and thumbnails to an S3-compatible object st -e CLAPSHOT_SERVER__S3_ENDPOINT=https://s3.example.com \ -e CLAPSHOT_SERVER__S3_REGION=us-east-1 \ -e CLAPSHOT_SERVER__S3_BUCKET=clapshot-media \ --e CLAPSHOT_SERVER__S3_ACCESS_KEY=YOUR_KEY \ --e CLAPSHOT_SERVER__S3_SECRET_KEY=YOUR_SECRET \ +-e AWS_ACCESS_KEY_ID=YOUR_KEY \ +-e AWS_SECRET_ACCESS_KEY=YOUR_SECRET \ -e CLAPSHOT_SERVER__S3_PUBLIC_URL=https://cdn.example.com/clapshot-media ``` diff --git a/server/Cargo.toml b/server/Cargo.toml index 2fd7118c..981abd91 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -122,9 +122,9 @@ indoc = "2.0.5" Inflector = "0.11.4" serial_test = "3.1.1" aspasia = "0.2.0" -aws-config = { version = "1.5.0", default-features = false, features = ["rustls"] } -aws-sdk-s3 = { version = "1.40.0", default-features = false, features = ["rustls"] } -aws-types = "1.3.0" +aws-config = { version = "1.8", default-features = false, features = ["rustls", "rt-tokio"] } +aws-sdk-s3 = { version = "1.115", default-features = false, features = ["rustls"] } +aws-types = "1.3" http = "0.2" [dev-dependencies] diff --git a/server/src/api_server/test_utils.rs b/server/src/api_server/test_utils.rs index d4307dfe..202180a6 100644 --- a/server/src/api_server/test_utils.rs +++ b/server/src/api_server/test_utils.rs @@ -14,7 +14,6 @@ use tokio_tungstenite::tungstenite::Message; use crate::video_pipeline::IncomingFile; use crate::api_server::{UserMessage}; use crate::database::{DB, models}; -use crate::storage::StorageBackend; diff --git a/server/src/main.rs b/server/src/main.rs index 8d5e6f5a..c3518f8c 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -7,8 +7,7 @@ use clapshot_server::{ storage::StorageBackend, video_pipeline::IngestUsernameFrom, }; -use http::Uri; -use std::{path::PathBuf, sync::Arc, str::FromStr}; +use std::{path::PathBuf, str::FromStr, sync::Arc}; use tracing::error; use indoc::indoc; @@ -134,31 +133,21 @@ struct Args { #[arg(long, value_name="BACKEND", default_value="local")] storage_backend: String, - /// S3-compatible endpoint base URL, e.g. https://s3.example.com + /// S3-compatible endpoint URL (only needed for non-AWS S3, e.g. MinIO). + /// For AWS S3, leave unset and configure AWS_REGION instead. #[arg(long, value_name="URL")] s3_endpoint: Option, - /// S3 region (required for S3 backend) - #[arg(long, value_name="REGION")] - s3_region: Option, - /// S3 bucket (required for S3 backend) #[arg(long, value_name="BUCKET")] s3_bucket: Option, - /// S3 access key (required for S3 backend) - #[arg(long, value_name="KEY")] - s3_access_key: Option, - - /// S3 secret key (required for S3 backend) - #[arg(long, value_name="SECRET")] - s3_secret_key: Option, - /// Path/prefix inside the bucket where media files are stored #[arg(long, value_name="PREFIX", default_value="videos")] s3_prefix: String, - /// Public base URL for accessing the bucket/prefix (used for playback URLs) + /// Public base URL for accessing the bucket/prefix (used for playback URLs). + /// If not set, defaults to endpoint/bucket or virtual-hosted style URL. #[arg(long, value_name="URL")] s3_public_url: Option, } @@ -216,34 +205,25 @@ fn main() -> anyhow::Result<()> { let storage = match args.storage_backend.as_str() { "local" => StorageBackend::local(args.data_dir.join("videos"), &url_base), "s3" => { - let endpoint = args.s3_endpoint.clone() - .ok_or_else(|| anyhow::anyhow!("--s3-endpoint is required for S3 backend"))?; - let region = args.s3_region.clone() - .ok_or_else(|| anyhow::anyhow!("--s3-region is required for S3 backend"))?; let bucket = args.s3_bucket.clone() .ok_or_else(|| anyhow::anyhow!("--s3-bucket is required for S3 backend"))?; - let access_key = args.s3_access_key.clone() - .ok_or_else(|| anyhow::anyhow!("--s3-access-key is required for S3 backend"))?; - let secret_key = args.s3_secret_key.clone() - .ok_or_else(|| anyhow::anyhow!("--s3-secret-key is required for S3 backend"))?; + // Compute public URL for playback. If not specified, derive from endpoint/bucket. + // For custom endpoints (MinIO etc), default to path-style URLs. let public_base_url = args.s3_public_url.clone() .or_else(|| { - Uri::from_str(&endpoint).ok().and_then(|uri| { - let scheme = uri.scheme_str()?; - let authority = uri.authority()?; - Some(format!("{scheme}://{bucket}.{}", authority)) - }) + args.s3_endpoint.as_ref().map(|ep| + format!("{}/{}", ep.trim_end_matches('/'), &bucket) + ) }) - .unwrap_or_else(|| format!("{}/{}", endpoint.trim_end_matches('/'), bucket)); + .ok_or_else(|| anyhow::anyhow!( + "--s3-public-url is required when --s3-endpoint is not set (AWS S3 requires explicit public URL)" + ))?; StorageBackend::s3( args.data_dir.join("videos"), bucket, - region, - access_key, - secret_key, - endpoint, + args.s3_endpoint.clone(), args.s3_prefix.clone(), public_base_url, )? diff --git a/server/src/storage.rs b/server/src/storage.rs index a392e542..d25443b0 100644 --- a/server/src/storage.rs +++ b/server/src/storage.rs @@ -1,16 +1,9 @@ use std::path::{Path, PathBuf}; -use std::str::FromStr; use std::sync::Arc; use anyhow::{anyhow, Context}; -use aws_sdk_s3::config::endpoint::DefaultResolver; -use aws_sdk_s3::config::Credentials; use aws_sdk_s3::types::{CompletedMultipartUpload, CompletedPart}; -use aws_sdk_s3::{ - config::AsyncSleep, config::Region, config::SharedAsyncSleep, config::Sleep, - primitives::ByteStream, Client, -}; -use http::Uri; +use aws_sdk_s3::{primitives::ByteStream, Client}; use tokio::fs; use tokio::io::AsyncReadExt; use tokio::runtime::Runtime; @@ -20,14 +13,6 @@ pub type ProgressCallback = Arc; const MULTIPART_MIN_SIZE: u64 = 5 * 1024 * 1024; const MULTIPART_CHUNK_SIZE: usize = 8 * 1024 * 1024; -#[derive(Debug)] -pub struct ForeverSleep; - -impl AsyncSleep for ForeverSleep { - fn sleep(&self, _duration: std::time::Duration) -> Sleep { - Sleep::new(std::future::pending()) - } -} /// Simple content type guessing for a handful of formats we serve. fn guess_content_type(path: &Path) -> &'static str { match path @@ -65,13 +50,21 @@ impl StorageBackend { }) } + /// Create an S3 storage backend using the AWS SDK default credential chain. + /// + /// Credentials are resolved automatically in this order: + /// 1. Environment variables: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY + /// 2. Shared credentials file: ~/.aws/credentials + /// 3. AWS config file: ~/.aws/config (with profiles) + /// 4. ECS container credentials + /// 5. EC2 instance metadata (IAM role) + /// + /// For MinIO or other S3-compatible storage, set endpoint to the service URL. + /// For AWS S3, leave endpoint as None and set AWS_REGION environment variable. pub fn s3( media_root: PathBuf, bucket: String, - region: String, - access_key: String, - secret_key: String, - endpoint: String, + endpoint: Option, prefix: String, public_base_url: String, ) -> anyhow::Result { @@ -82,29 +75,21 @@ impl StorageBackend { ); let rt = Runtime::new().context("create tokio runtime for S3 client")?; - let client = { - let region = Region::new(region); - let credentials = Credentials::new(access_key, secret_key, None, None, ""); - let _endpoint_uri = match Uri::from_str(&endpoint) { - Ok(u) => u, - Err(e) => return Err(anyhow!("failed to create uri: {}", e)), - }; - - let resolver = DefaultResolver::new(); - let cfg = rt.block_on(async { - let base = aws_config::defaults(aws_config::BehaviorVersion::latest()) - .region(region) - .endpoint_url(endpoint) - .credentials_provider(credentials) - .load() - .await; - aws_sdk_s3::config::Builder::from(&base) - .endpoint_resolver(resolver) - .sleep_impl(SharedAsyncSleep::new(ForeverSleep)) - .build() - }); - Client::from_conf(cfg) - }; + let client = rt.block_on(async { + let mut config_loader = aws_config::defaults(aws_config::BehaviorVersion::latest()); + + // Only override endpoint for non-AWS S3 (MinIO, etc.) + if let Some(ref ep) = endpoint { + config_loader = config_loader.endpoint_url(ep); + } + + let sdk_config = config_loader.load().await; + let s3_config = aws_sdk_s3::config::Builder::from(&sdk_config) + // Force path-style for MinIO compatibility + .force_path_style(endpoint.is_some()) + .build(); + Client::from_conf(s3_config) + }); Ok(StorageBackend::S3(ObjectStorageBackend { media_root, diff --git a/server/src/tests/integration_test.rs b/server/src/tests/integration_test.rs index 0233cdbc..32dd8fde 100644 --- a/server/src/tests/integration_test.rs +++ b/server/src/tests/integration_test.rs @@ -957,4 +957,267 @@ mod integration_test Ok(()) } + // ==================== S3/MinIO Integration Tests ==================== + + const TEST_BUCKET: &str = "clapshot-test"; + + /// Helper to manage a temporary MinIO instance for testing. + /// Spawns MinIO on a free port with a temp data directory. + /// Automatically cleans up when dropped. + struct TempMinIO { + process: std::process::Child, + endpoint: String, + port: u16, + _data_dir: assert_fs::TempDir, + } + + impl TempMinIO { + /// Start a new MinIO instance. Returns None if `minio` is not in PATH. + fn start() -> Option { + // Check if minio binary is available + if std::process::Command::new("minio") + .arg("--version") + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .is_err() + { + tracing::warn!("MinIO binary not found in PATH - skipping S3 tests"); + return None; + } + + let port = portpicker::pick_unused_port().expect("No TCP ports free"); + let console_port = portpicker::pick_unused_port().expect("No TCP ports free for console"); + let data_dir = assert_fs::TempDir::new().expect("Failed to create temp dir for MinIO"); + + tracing::info!("Starting MinIO on port {} with data dir {:?}", port, data_dir.path()); + + let process = std::process::Command::new("minio") + .arg("server") + .arg(data_dir.path()) + .arg("--address") + .arg(format!("127.0.0.1:{}", port)) + .arg("--console-address") + .arg(format!("127.0.0.1:{}", console_port)) + .env("MINIO_ROOT_USER", "minioadmin") + .env("MINIO_ROOT_PASSWORD", "minioadmin") + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .spawn() + .expect("Failed to start MinIO"); + + let endpoint = format!("http://127.0.0.1:{}", port); + + // Wait for MinIO to be ready + let start = std::time::Instant::now(); + let timeout = Duration::from_secs(10); + loop { + if start.elapsed() > timeout { + tracing::error!("MinIO failed to start within timeout"); + return None; + } + if reqwest::blocking::get(format!("{}/minio/health/live", endpoint)) + .map(|r| r.status().is_success()) + .unwrap_or(false) + { + tracing::info!("MinIO is ready on {}", endpoint); + break; + } + thread::sleep(Duration::from_millis(100)); + } + + Some(TempMinIO { + process, + endpoint, + port, + _data_dir: data_dir, + }) + } + + /// Create an S3 client for this MinIO instance (for test verification) + fn s3_client(&self) -> aws_sdk_s3::Client { + use aws_sdk_s3::config::Region; + + let rt = tokio::runtime::Runtime::new().unwrap(); + let endpoint = self.endpoint.clone(); + rt.block_on(async move { + let config = aws_config::defaults(aws_config::BehaviorVersion::latest()) + .endpoint_url(&endpoint) + .region(Region::new("us-east-1")) + .load() + .await; + let s3_config = aws_sdk_s3::config::Builder::from(&config) + .force_path_style(true) + .build(); + aws_sdk_s3::Client::from_conf(s3_config) + }) + } + + /// Create the test bucket + fn create_bucket(&self) -> anyhow::Result<()> { + let client = self.s3_client(); + let rt = tokio::runtime::Runtime::new()?; + rt.block_on(async { + client + .create_bucket() + .bucket(TEST_BUCKET) + .send() + .await + .map_err(|e| anyhow::anyhow!("Failed to create bucket: {}", e))?; + Ok(()) + }) + } + + /// Create a StorageBackend for this MinIO instance + fn storage_backend(&self, media_root: PathBuf, prefix: &str) -> anyhow::Result { + StorageBackend::s3( + media_root, + TEST_BUCKET.to_string(), + Some(self.endpoint.clone()), + prefix.to_string(), + format!("{}/{}", self.endpoint, TEST_BUCKET), + ) + } + } + + impl Drop for TempMinIO { + fn drop(&mut self) { + tracing::info!("Stopping MinIO on port {}", self.port); + let _ = self.process.kill(); + let _ = self.process.wait(); + } + } + + #[test] + #[serial] + #[traced_test] + fn test_s3_storage_backend_upload() -> anyhow::Result<()> { + // Set credentials for AWS SDK (used by both our code and test verification) + std::env::set_var("AWS_ACCESS_KEY_ID", "minioadmin"); + std::env::set_var("AWS_SECRET_ACCESS_KEY", "minioadmin"); + std::env::set_var("AWS_REGION", "us-east-1"); + + let minio = match TempMinIO::start() { + Some(m) => m, + None => return Ok(()), // Skip test if MinIO not available + }; + minio.create_bucket()?; + + let data_dir = assert_fs::TempDir::new()?; + let media_root = data_dir.path().join("videos"); + std::fs::create_dir_all(&media_root)?; + + let test_prefix = format!("test-{}", uuid::Uuid::new_v4()); + let storage = minio.storage_backend(media_root.clone(), &test_prefix)?; + + // Create a test file under media_root + let test_media_id = "test-media-123"; + let media_dir = media_root.join(test_media_id); + std::fs::create_dir_all(&media_dir)?; + let test_file = media_dir.join("test-video.mp4"); + std::fs::write(&test_file, b"fake video content for testing")?; + + // Upload the file + storage.upload_local_path(&test_file)?; + + // Verify the file was uploaded + let client = minio.s3_client(); + let rt = tokio::runtime::Runtime::new()?; + let exists = rt.block_on(async { + let key = format!("{}/{}/test-video.mp4", test_prefix, test_media_id); + client + .head_object() + .bucket(TEST_BUCKET) + .key(&key) + .send() + .await + .is_ok() + }); + + assert!(exists, "Uploaded file should exist in S3"); + + // Verify media_base_url is correct + let expected_url = format!("{}/{}/{}", minio.endpoint, TEST_BUCKET, test_prefix); + assert_eq!(storage.media_base_url(), expected_url); + + Ok(()) + } + + #[test] + #[serial] + #[traced_test] + fn test_s3_storage_progress_callback() -> anyhow::Result<()> { + std::env::set_var("AWS_ACCESS_KEY_ID", "minioadmin"); + std::env::set_var("AWS_SECRET_ACCESS_KEY", "minioadmin"); + std::env::set_var("AWS_REGION", "us-east-1"); + + let minio = match TempMinIO::start() { + Some(m) => m, + None => return Ok(()), + }; + minio.create_bucket()?; + + let data_dir = assert_fs::TempDir::new()?; + let media_root = data_dir.path().join("videos"); + std::fs::create_dir_all(&media_root)?; + + let test_prefix = format!("test-progress-{}", uuid::Uuid::new_v4()); + let storage = minio.storage_backend(media_root.clone(), &test_prefix)?; + + // Create a small test file (below multipart threshold) + let test_media_id = "test-progress-media"; + let media_dir = media_root.join(test_media_id); + std::fs::create_dir_all(&media_dir)?; + let test_file = media_dir.join("test-video.mp4"); + std::fs::write(&test_file, vec![0u8; 1024 * 1024])?; // 1MB + + // Track progress + let progress_values = Arc::new(std::sync::Mutex::new(Vec::new())); + let pv = progress_values.clone(); + let progress_cb: crate::storage::ProgressCallback = Arc::new(move |p| { + pv.lock().unwrap().push(p); + }); + + // Upload with progress tracking + storage.upload_with_progress(&test_file, Some(progress_cb))?; + + // Verify progress was reported and reached 1.0 + let progress = progress_values.lock().unwrap(); + assert!(!progress.is_empty(), "Progress should have been reported"); + assert!( + progress.last().map(|&p| (p - 1.0).abs() < 0.001).unwrap_or(false), + "Final progress should be ~1.0" + ); + + // Verify file exists in S3 + let client = minio.s3_client(); + let rt = tokio::runtime::Runtime::new()?; + let exists = rt.block_on(async { + let key = format!("{}/{}/test-video.mp4", test_prefix, test_media_id); + client + .head_object() + .bucket(TEST_BUCKET) + .key(&key) + .send() + .await + .is_ok() + }); + + assert!(exists, "Uploaded file should exist in S3"); + + Ok(()) + } + + #[test] + fn test_s3_storage_needs_remote_upload() -> anyhow::Result<()> { + let data_dir = assert_fs::TempDir::new()?; + let media_root = data_dir.path().join("videos"); + + // Local storage should not need remote upload + let local_storage = StorageBackend::local(media_root.clone(), "http://localhost:8080"); + assert!(!local_storage.needs_remote_upload()); + + Ok(()) + } + } From 536ad7e8e130900a6a85545634ca6178425afeea Mon Sep 17 00:00:00 2001 From: elonen Date: Mon, 1 Dec 2025 03:45:58 +0200 Subject: [PATCH 09/10] Add S3 E2E test and macro support for custom storage --- server/src/api_server/tests.rs | 28 +++- server/src/tests/integration_test.rs | 235 +++++++++++++++++---------- 2 files changed, 169 insertions(+), 94 deletions(-) diff --git a/server/src/api_server/tests.rs b/server/src/api_server/tests.rs index bfcccdb0..5d06ade7 100644 --- a/server/src/api_server/tests.rs +++ b/server/src/api_server/tests.rs @@ -163,10 +163,30 @@ async fn test_api_open_bad_media_file() pub async fn expect_user_msg(ws: &mut crate::api_server::test_utils::WsClient, evt_type: proto::user_message::Type ) -> proto::UserMessage { println!(" --expect_user_msg of type {:?} ....", evt_type); - let cmd = expect_client_cmd!(ws, ShowMessages); - assert_eq!(cmd.msgs.len(), 1); - assert_eq!(cmd.msgs[0].r#type, evt_type as i32); - cmd.msgs[0].clone() + // Loop to skip PROGRESS messages when waiting for other message types + loop { + let cmd = expect_client_cmd!(ws, ShowMessages); + // Filter out PROGRESS messages if we're not looking for them + let non_progress_msgs: Vec<_> = cmd.msgs.iter() + .filter(|m| { + if evt_type != proto::user_message::Type::Progress { + m.r#type != proto::user_message::Type::Progress as i32 + } else { + true + } + }) + .collect(); + + if non_progress_msgs.is_empty() { + // Only got PROGRESS messages, keep waiting + println!(" (skipping PROGRESS message, waiting for {:?})", evt_type); + continue; + } + + assert_eq!(non_progress_msgs.len(), 1, "Expected 1 message of type {:?}, got {} non-progress messages", evt_type, non_progress_msgs.len()); + assert_eq!(non_progress_msgs[0].r#type, evt_type as i32, "Expected message type {:?}, got type {}", evt_type, non_progress_msgs[0].r#type); + return non_progress_msgs[0].clone(); + } } #[tokio::test] diff --git a/server/src/tests/integration_test.rs b/server/src/tests/integration_test.rs index 32dd8fde..933507fa 100644 --- a/server/src/tests/integration_test.rs +++ b/server/src/tests/integration_test.rs @@ -107,10 +107,23 @@ mod integration_test } macro_rules! cs_main_test { + // 8-param variant: default storage, no ws_user_override ([$ws:ident, $data_dir:ident, $incoming_dir:ident, $org_conn:ident, $bitrate:expr, $org_cmd:expr, $custom_assertfs:expr, $ingest_username_from:expr] $($body:tt)*) => { - cs_main_test!([$ws, $data_dir, $incoming_dir, $org_conn, $bitrate, $org_cmd, $custom_assertfs, $ingest_username_from, None] $($body)*) + cs_main_test!(@impl [$ws, $data_dir, $incoming_dir, $org_conn, $bitrate, $org_cmd, $custom_assertfs, $ingest_username_from, None, + |media_root: std::path::PathBuf, url_base: &str| crate::storage::StorageBackend::local(media_root, url_base)] $($body)*) }; + // 9-param variant: default storage, with ws_user_override ([$ws:ident, $data_dir:ident, $incoming_dir:ident, $org_conn:ident, $bitrate:expr, $org_cmd:expr, $custom_assertfs:expr, $ingest_username_from:expr, $ws_user_override:expr] $($body:tt)*) => { + cs_main_test!(@impl [$ws, $data_dir, $incoming_dir, $org_conn, $bitrate, $org_cmd, $custom_assertfs, $ingest_username_from, $ws_user_override, + |media_root: std::path::PathBuf, url_base: &str| crate::storage::StorageBackend::local(media_root, url_base)] $($body)*) + }; + // 10-param variant: custom storage factory (receives media_root only, url_base captured by caller) + ([$ws:ident, $data_dir:ident, $incoming_dir:ident, $org_conn:ident, $bitrate:expr, $org_cmd:expr, $custom_assertfs:expr, $ingest_username_from:expr, $ws_user_override:expr, $storage_factory:expr] $($body:tt)*) => { + cs_main_test!(@impl [$ws, $data_dir, $incoming_dir, $org_conn, $bitrate, $org_cmd, $custom_assertfs, $ingest_username_from, $ws_user_override, + |media_root: std::path::PathBuf, _url_base: &str| { let f = $storage_factory; f(media_root) }] $($body)*) + }; + // Single implementation - storage_factory takes (media_root, url_base) + (@impl [$ws:ident, $data_dir:ident, $incoming_dir:ident, $org_conn:ident, $bitrate:expr, $org_cmd:expr, $custom_assertfs:expr, $ingest_username_from:expr, $ws_user_override:expr, $storage_factory:expr] $($body:tt)*) => { { let $data_dir = $custom_assertfs.unwrap_or(assert_fs::TempDir::new().unwrap()); let $incoming_dir = $data_dir.join("incoming"); @@ -133,7 +146,8 @@ mod integration_test let data_dir = $data_dir.path().to_path_buf(); let url_base = url_base.clone(); let org_uri = org_uri.clone(); - let storage = crate::storage::StorageBackend::local(data_dir.join("videos"), &url_base); + let media_root = data_dir.join("videos"); + let storage = { let f = $storage_factory; f(media_root, &url_base) }; let tf = terminate_flag.clone(); thread::spawn(move || { let mut clapshot = crate::ClapshotInit::init_and_spawn_workers(data_dir, true, url_base, vec![], "127.0.0.1".into(), port, org_uri.clone(), grpc_server_bind, 4, target_bitrate, poll_interval, "anonymous".to_string(), poll_interval*5.0, $ingest_username_from, "scripts/clapshot-transcode".to_string(), "scripts/clapshot-thumbnail".to_string(), regex, storage, tf)?; @@ -158,7 +172,7 @@ mod integration_test tracing::info!("Waiting for run_clapshot() to terminate..."); let _ = th.join().unwrap(); } - } + }; } #[test] @@ -248,6 +262,7 @@ mod integration_test // --- Transcoding tests --- pub struct WaitForReportResults { + pub media_id: String, pub transcode_complete: bool, pub thumbs_complete: bool, pub got_progress_report: bool, @@ -265,6 +280,7 @@ mod integration_test check_file_outputs: Option<(PathBuf, String)>) -> WaitForReportResults { let mut res = WaitForReportResults { + media_id: String::new(), transcode_complete: false, thumbs_complete: false, got_progress_report: false, got_transcode_report: false, got_thumbnail_report: false, ts_cols: String::new(), ts_rows: String::new(), @@ -276,6 +292,7 @@ mod integration_test thread::sleep(Duration::from_secs_f32(0.5)); let msg = expect_user_msg(&mut ws, proto::user_message::Type::MediaFileAdded).await; // notification to client (with upload folder info etc) let vid = msg.refs.unwrap().media_file_id.unwrap(); + res.media_id = vid.clone(); thread::sleep(Duration::from_secs_f32(0.5)); let msg = expect_user_msg(&mut ws, proto::user_message::Type::Ok).await; // notification to user (in text) @@ -1078,6 +1095,62 @@ mod integration_test format!("{}/{}", self.endpoint, TEST_BUCKET), ) } + + /// Check if an object exists in S3 + fn object_exists(&self, key: &str) -> bool { + let client = self.s3_client(); + let rt = tokio::runtime::Runtime::new().unwrap(); + rt.block_on(async { + client + .head_object() + .bucket(TEST_BUCKET) + .key(key) + .send() + .await + .is_ok() + }) + } + + /// List all objects under a prefix (blocking version for non-async tests) + fn list_objects(&self, prefix: &str) -> Vec { + let client = self.s3_client(); + let rt = tokio::runtime::Runtime::new().unwrap(); + rt.block_on(Self::list_objects_async(&client, prefix)) + } + + /// List all objects under a prefix (async version for use inside async contexts) + async fn list_objects_async(client: &aws_sdk_s3::Client, prefix: &str) -> Vec { + client + .list_objects_v2() + .bucket(TEST_BUCKET) + .prefix(prefix) + .send() + .await + .map(|r| r.contents().iter().filter_map(|o| o.key().map(String::from)).collect()) + .unwrap_or_default() + } + + /// Set up AWS env vars for SDK credential chain + fn setup_env_vars() { + std::env::set_var("AWS_ACCESS_KEY_ID", "minioadmin"); + std::env::set_var("AWS_SECRET_ACCESS_KEY", "minioadmin"); + std::env::set_var("AWS_REGION", "us-east-1"); + } + + /// Set up a complete test environment. Returns (storage, data_dir, prefix). + fn setup_test(&self) -> anyhow::Result<(StorageBackend, assert_fs::TempDir, String)> { + TempMinIO::setup_env_vars(); + self.create_bucket()?; + + let data_dir = assert_fs::TempDir::new()?; + let media_root = data_dir.path().join("videos"); + std::fs::create_dir_all(&media_root)?; + + let prefix = format!("test-{}", uuid::Uuid::new_v4()); + let storage = self.storage_backend(media_root, &prefix)?; + + Ok((storage, data_dir, prefix)) + } } impl Drop for TempMinIO { @@ -1088,122 +1161,104 @@ mod integration_test } } + /// Tests S3 upload for both small files (simple PUT) and large files (multipart). + /// Also verifies progress callback and media_base_url. #[test] #[serial] #[traced_test] - fn test_s3_storage_backend_upload() -> anyhow::Result<()> { - // Set credentials for AWS SDK (used by both our code and test verification) - std::env::set_var("AWS_ACCESS_KEY_ID", "minioadmin"); - std::env::set_var("AWS_SECRET_ACCESS_KEY", "minioadmin"); - std::env::set_var("AWS_REGION", "us-east-1"); - + fn test_s3_storage_upload() -> anyhow::Result<()> { let minio = match TempMinIO::start() { Some(m) => m, - None => return Ok(()), // Skip test if MinIO not available + None => return Ok(()), // Skip if MinIO not available }; - minio.create_bucket()?; - - let data_dir = assert_fs::TempDir::new()?; + let (storage, data_dir, prefix) = minio.setup_test()?; let media_root = data_dir.path().join("videos"); - std::fs::create_dir_all(&media_root)?; - - let test_prefix = format!("test-{}", uuid::Uuid::new_v4()); - let storage = minio.storage_backend(media_root.clone(), &test_prefix)?; - - // Create a test file under media_root - let test_media_id = "test-media-123"; - let media_dir = media_root.join(test_media_id); - std::fs::create_dir_all(&media_dir)?; - let test_file = media_dir.join("test-video.mp4"); - std::fs::write(&test_file, b"fake video content for testing")?; - - // Upload the file - storage.upload_local_path(&test_file)?; - - // Verify the file was uploaded - let client = minio.s3_client(); - let rt = tokio::runtime::Runtime::new()?; - let exists = rt.block_on(async { - let key = format!("{}/{}/test-video.mp4", test_prefix, test_media_id); - client - .head_object() - .bucket(TEST_BUCKET) - .key(&key) - .send() - .await - .is_ok() - }); - - assert!(exists, "Uploaded file should exist in S3"); // Verify media_base_url is correct - let expected_url = format!("{}/{}/{}", minio.endpoint, TEST_BUCKET, test_prefix); + let expected_url = format!("{}/{}/{}", minio.endpoint, TEST_BUCKET, prefix); assert_eq!(storage.media_base_url(), expected_url); + // Test 1: Small file upload (simple PUT, below 5MB threshold) + let small_dir = media_root.join("small-file"); + std::fs::create_dir_all(&small_dir)?; + let small_file = small_dir.join("small.mp4"); + std::fs::write(&small_file, b"small test content")?; + storage.upload_local_path(&small_file)?; + assert!(minio.object_exists(&format!("{}/small-file/small.mp4", prefix)), + "Small file should exist in S3"); + + // Test 2: Large file upload (multipart, 10MB > 5MB threshold) with progress + let large_dir = media_root.join("large-file"); + std::fs::create_dir_all(&large_dir)?; + let large_file = large_dir.join("large.mp4"); + std::fs::write(&large_file, vec![0u8; 10 * 1024 * 1024])?; + + let progress_values = Arc::new(std::sync::Mutex::new(Vec::new())); + let pv = progress_values.clone(); + let progress_cb: crate::storage::ProgressCallback = Arc::new(move |p| { + pv.lock().unwrap().push(p); + }); + storage.upload_with_progress(&large_file, Some(progress_cb))?; + + let progress = progress_values.lock().unwrap(); + assert!(!progress.is_empty(), "Progress should have been reported"); + assert!((progress.last().unwrap() - 1.0).abs() < 0.001, "Final progress should be ~1.0"); + assert!(minio.object_exists(&format!("{}/large-file/large.mp4", prefix)), + "Large file should exist in S3"); + Ok(()) } + /// Full E2E test: ingest video → transcode → upload to S3 #[test] #[serial] #[traced_test] - fn test_s3_storage_progress_callback() -> anyhow::Result<()> { - std::env::set_var("AWS_ACCESS_KEY_ID", "minioadmin"); - std::env::set_var("AWS_SECRET_ACCESS_KEY", "minioadmin"); - std::env::set_var("AWS_REGION", "us-east-1"); - + #[cfg(feature = "include_slow_tests")] + fn test_s3_video_ingest_transcode_upload() -> anyhow::Result<()> { + TempMinIO::setup_env_vars(); let minio = match TempMinIO::start() { Some(m) => m, None => return Ok(()), }; minio.create_bucket()?; - let data_dir = assert_fs::TempDir::new()?; - let media_root = data_dir.path().join("videos"); - std::fs::create_dir_all(&media_root)?; + let test_prefix = format!("test-e2e-{}", uuid::Uuid::new_v4()); + let minio_endpoint = minio.endpoint.clone(); + let prefix_clone = test_prefix.clone(); - let test_prefix = format!("test-progress-{}", uuid::Uuid::new_v4()); - let storage = minio.storage_backend(media_root.clone(), &test_prefix)?; + // Get S3 client before entering async context (avoids nested runtime) + let s3_client = minio.s3_client(); - // Create a small test file (below multipart threshold) - let test_media_id = "test-progress-media"; - let media_dir = media_root.join(test_media_id); - std::fs::create_dir_all(&media_dir)?; - let test_file = media_dir.join("test-video.mp4"); - std::fs::write(&test_file, vec![0u8; 1024 * 1024])?; // 1MB + let storage_factory = move |media_root: PathBuf| -> StorageBackend { + StorageBackend::s3( + media_root, TEST_BUCKET.to_string(), Some(minio_endpoint.clone()), + prefix_clone.clone(), format!("{}/{}", minio_endpoint, TEST_BUCKET), + ).expect("Failed to create S3 storage backend") + }; - // Track progress - let progress_values = Arc::new(std::sync::Mutex::new(Vec::new())); - let pv = progress_values.clone(); - let progress_cb: crate::storage::ProgressCallback = Arc::new(move |p| { - pv.lock().unwrap().push(p); - }); + cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner, None, storage_factory] + // Ingest test video + let video_file_name = "NASA_Red_Lettuce_excerpt.mov"; + data_dir.copy_from("src/tests/assets/", &[video_file_name])?; + std::fs::rename(data_dir.join(video_file_name), incoming_dir.join(video_file_name))?; - // Upload with progress tracking - storage.upload_with_progress(&test_file, Some(progress_cb))?; + // Wait for processing (transcode + thumbnails) + let wait_res = wait_for_reports(&mut ws, true, true, true, None).await; + assert!(wait_res.transcode_complete, "Transcode did not complete"); + assert!(wait_res.thumbs_complete, "Thumbnails did not complete"); - // Verify progress was reported and reached 1.0 - let progress = progress_values.lock().unwrap(); - assert!(!progress.is_empty(), "Progress should have been reported"); - assert!( - progress.last().map(|&p| (p - 1.0).abs() < 0.001).unwrap_or(false), - "Final progress should be ~1.0" - ); - - // Verify file exists in S3 - let client = minio.s3_client(); - let rt = tokio::runtime::Runtime::new()?; - let exists = rt.block_on(async { - let key = format!("{}/{}/test-video.mp4", test_prefix, test_media_id); - client - .head_object() - .bucket(TEST_BUCKET) - .key(&key) - .send() - .await - .is_ok() - }); + // Give S3 upload time to finish + thread::sleep(Duration::from_secs(2)); + + // Verify files in S3 (use async version to avoid nested runtime) + let objects = TempMinIO::list_objects_async(&s3_client, &format!("{}/{}/", test_prefix, wait_res.media_id)).await; + tracing::info!("S3 objects: {:?}", objects); - assert!(exists, "Uploaded file should exist in S3"); + assert!(objects.iter().any(|k| k.contains("video.mp4")), "Transcoded video missing: {:?}", objects); + assert!(objects.iter().any(|k| k.contains("/thumbs/")), "Thumbnails missing: {:?}", objects); + + Ok::<_, anyhow::Error>(()) + } Ok(()) } From 9a27e25f24a760242f6ad63be223249971d76140 Mon Sep 17 00:00:00 2001 From: elonen Date: Mon, 1 Dec 2025 11:35:14 +0200 Subject: [PATCH 10/10] Fix S3 storage runtime drop panic during server shutdown (=> dirty exits, flaky integration tests) Don't persist tokio runtime in ObjectStorageBackend. Instead, create temporary runtimes for client init and each upload operation. This avoids "cannot drop runtime in async context" panics when storage is dropped inside api_server's tokio runtime during shutdown Trade-off: slightly less efficient (no connection pooling), but safe. A proper fix would make uploads truly async. --- server/src/storage.rs | 47 +++++++++++++++++----------- server/src/tests/integration_test.rs | 6 ++-- 2 files changed, 30 insertions(+), 23 deletions(-) diff --git a/server/src/storage.rs b/server/src/storage.rs index d25443b0..7cda258b 100644 --- a/server/src/storage.rs +++ b/server/src/storage.rs @@ -6,7 +6,6 @@ use aws_sdk_s3::types::{CompletedMultipartUpload, CompletedPart}; use aws_sdk_s3::{primitives::ByteStream, Client}; use tokio::fs; use tokio::io::AsyncReadExt; -use tokio::runtime::Runtime; use tracing; pub type ProgressCallback = Arc; @@ -74,22 +73,30 @@ impl StorageBackend { prefix.trim_end_matches('/') ); - let rt = Runtime::new().context("create tokio runtime for S3 client")?; - let client = rt.block_on(async { - let mut config_loader = aws_config::defaults(aws_config::BehaviorVersion::latest()); - - // Only override endpoint for non-AWS S3 (MinIO, etc.) - if let Some(ref ep) = endpoint { - config_loader = config_loader.endpoint_url(ep); - } + // Create a temporary runtime just for client initialization. + // The client survives after the runtime is dropped. + // We don't persist the runtime to avoid "cannot drop runtime in async context" panics + // when the storage is dropped inside another tokio runtime (e.g., during server shutdown). + let client = { + let rt = tokio::runtime::Runtime::new().context("create tokio runtime for S3 client init")?; + let client = rt.block_on(async { + let mut config_loader = aws_config::defaults(aws_config::BehaviorVersion::latest()); + + // Only override endpoint for non-AWS S3 (MinIO, etc.) + if let Some(ref ep) = endpoint { + config_loader = config_loader.endpoint_url(ep); + } - let sdk_config = config_loader.load().await; - let s3_config = aws_sdk_s3::config::Builder::from(&sdk_config) - // Force path-style for MinIO compatibility - .force_path_style(endpoint.is_some()) - .build(); - Client::from_conf(s3_config) - }); + let sdk_config = config_loader.load().await; + let s3_config = aws_sdk_s3::config::Builder::from(&sdk_config) + // Force path-style for MinIO compatibility + .force_path_style(endpoint.is_some()) + .build(); + Client::from_conf(s3_config) + }); + // rt is dropped here, but client survives + client + }; Ok(StorageBackend::S3(ObjectStorageBackend { media_root, @@ -97,7 +104,6 @@ impl StorageBackend { media_base_url, client: Arc::new(client), bucket, - rt: Arc::new(rt), })) } @@ -197,7 +203,6 @@ pub struct ObjectStorageBackend { pub media_base_url: String, pub bucket: String, pub client: Arc, - pub rt: Arc, } impl ObjectStorageBackend { @@ -212,7 +217,11 @@ impl ObjectStorageBackend { let client = self.client.clone(); let path = abs_path.to_path_buf(); - self.rt.block_on(async move { + // Create a fresh runtime for each upload to avoid "cannot drop runtime in async context" panics. + // This is slightly less efficient than reusing a runtime, but much safer when the storage + // is held by code that runs inside another tokio runtime (like the api_server). + let rt = tokio::runtime::Runtime::new().context("create tokio runtime for S3 upload")?; + rt.block_on(async move { let mut file = fs::File::open(&path) .await .with_context(|| format!("Open file {:?}", path))?; diff --git a/server/src/tests/integration_test.rs b/server/src/tests/integration_test.rs index 933507fa..ea75c3bb 100644 --- a/server/src/tests/integration_test.rs +++ b/server/src/tests/integration_test.rs @@ -1239,8 +1239,8 @@ mod integration_test cs_main_test! {[ws, data_dir, incoming_dir, _org_conn, 500_000, None, None, IngestUsernameFrom::FileOwner, None, storage_factory] // Ingest test video let video_file_name = "NASA_Red_Lettuce_excerpt.mov"; - data_dir.copy_from("src/tests/assets/", &[video_file_name])?; - std::fs::rename(data_dir.join(video_file_name), incoming_dir.join(video_file_name))?; + data_dir.copy_from("src/tests/assets/", &[video_file_name]).unwrap(); + std::fs::rename(data_dir.join(video_file_name), incoming_dir.join(video_file_name)).unwrap(); // Wait for processing (transcode + thumbnails) let wait_res = wait_for_reports(&mut ws, true, true, true, None).await; @@ -1256,8 +1256,6 @@ mod integration_test assert!(objects.iter().any(|k| k.contains("video.mp4")), "Transcoded video missing: {:?}", objects); assert!(objects.iter().any(|k| k.contains("/thumbs/")), "Thumbnails missing: {:?}", objects); - - Ok::<_, anyhow::Error>(()) } Ok(())