From 7839f1f4e93d521eae0cdff8125e354c56441b26 Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 25 Dec 2025 21:21:56 +0200 Subject: [PATCH 01/35] cargo.lock re-gen --- .github/workflows/docker.yml | 6 +- Cargo.lock | 2377 ++++++++++++++++++---------------- Cargo.toml | 7 +- 3 files changed, 1264 insertions(+), 1126 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index bf9a453..da9b43c 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -30,7 +30,7 @@ jobs: components: rustfmt, clippy - name: Cache cargo registry - uses: actions/cache@v3.0.7 + uses: actions/cache@v4 with: path: ~/.cargo/registry key: docker-registry-${{ hashFiles('**/Cargo.lock') }} @@ -39,7 +39,7 @@ jobs: docker- - name: Cache cargo index - uses: actions/cache@v3.0.7 + uses: actions/cache@v4 with: path: ~/.cargo/git key: docker-index-${{ hashFiles('**/Cargo.lock') }} @@ -52,7 +52,7 @@ jobs: head -c16 /dev/urandom > src/secret.key - name: Cache cargo build - uses: actions/cache@v3.0.7 + uses: actions/cache@v4 with: path: target key: docker-build-${{ hashFiles('**/Cargo.lock') }} diff --git a/Cargo.lock b/Cargo.lock index 1cc251e..4217d1f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5,7 +5,7 @@ version = 4 [[package]] name = "actix-casbin-auth" version = "1.1.0" -source = "git+https://github.com/casbin-rs/actix-casbin-auth.git#1bf1ef5854994c3df8703e96350758e748c8d099" +source = "git+https://github.com/casbin-rs/actix-casbin-auth.git#d7cde82f76fa8d7e415650dda9f2daefcc575caa" dependencies = [ "actix-service", "actix-web", @@ -20,7 +20,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.10.0", "bytes", "futures-core", "futures-sink", @@ -39,7 +39,7 @@ checksum = "0346d8c1f762b41b458ed3145eea914966bb9ad20b9be0d6d463b20d45586370" dependencies = [ "actix-utils", "actix-web", - "derive_more", + "derive_more 0.99.20", "futures-util", "log", "once_cell", @@ -48,23 +48,23 @@ dependencies = [ [[package]] name = "actix-http" -version = "3.9.0" +version = "3.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d48f96fc3003717aeb9856ca3d02a8c7de502667ad76eeacd830b48d2e91fac4" +checksum = "7926860314cbe2fb5d1f13731e387ab43bd32bca224e82e6e2db85de0a3dba49" dependencies = [ "actix-codec", "actix-rt", "actix-service", "actix-utils", - "ahash 0.8.11", "base64 0.22.1", - "bitflags 2.6.0", - "brotli 6.0.0", + "bitflags 2.10.0", + "brotli 8.0.2", "bytes", "bytestring", - "derive_more", + "derive_more 2.1.1", "encoding_rs", "flate2", + "foldhash", "futures-core", "h2", "http", @@ -76,7 +76,7 @@ dependencies = [ "mime", "percent-encoding", "pin-project-lite", - "rand 0.8.5", + "rand 0.9.2", "sha1", "smallvec", "tokio", @@ -92,7 +92,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" dependencies = [ "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] @@ -112,9 +112,9 @@ dependencies = [ [[package]] name = "actix-rt" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24eda4e2a6e042aa4e55ac438a2ae052d3b5da0ecf83d7411e1a368946925208" +checksum = "92589714878ca59a7626ea19734f0e07a6a875197eec751bb5d3f99e64998c63" dependencies = [ "futures-core", "tokio", @@ -122,9 +122,9 @@ dependencies = [ [[package]] name = "actix-server" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ca2549781d8dd6d75c40cf6b6051260a2cc2f3c62343d761a969a0640646894" +checksum = "a65064ea4a457eaf07f2fba30b4c695bf43b721790e9530d26cb6f9019ff7502" dependencies = [ "actix-rt", "actix-service", @@ -132,19 +132,18 @@ dependencies = [ "futures-core", "futures-util", "mio", - "socket2 0.5.7", + "socket2 0.5.10", "tokio", "tracing", ] [[package]] name = "actix-service" -version = "2.0.2" +version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b894941f818cfdc7ccc4b9e60fa7e53b5042a2e8567270f9147d5591893373a" +checksum = "9e46f36bf0e5af44bdc4bdb36fbbd421aa98c79a9bce724e1edeb3894e10dc7f" dependencies = [ "futures-core", - "paste", "pin-project-lite", ] @@ -160,9 +159,9 @@ dependencies = [ [[package]] name = "actix-web" -version = "4.9.0" +version = "4.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9180d76e5cc7ccbc4d60a506f2c727730b154010262df5b910eb17dbe4b8cb38" +checksum = "1654a77ba142e37f049637a3e5685f864514af11fcbc51cb51eb6596afe5b8d6" dependencies = [ "actix-codec", "actix-http", @@ -173,13 +172,13 @@ dependencies = [ "actix-service", "actix-utils", "actix-web-codegen", - "ahash 0.8.11", "bytes", "bytestring", "cfg-if", "cookie", - "derive_more", + "derive_more 2.1.1", "encoding_rs", + "foldhash", "futures-core", "futures-util", "impl-more", @@ -195,8 +194,9 @@ dependencies = [ "serde_json", "serde_urlencoded", "smallvec", - "socket2 0.5.7", - "time 0.3.36", + "socket2 0.6.1", + "time", + "tracing", "url", ] @@ -209,23 +209,14 @@ dependencies = [ "actix-router", "proc-macro2", "quote", - "syn 2.0.87", -] - -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "gimli", + "syn 2.0.111", ] [[package]] name = "adler2" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aead" @@ -268,20 +259,20 @@ version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" dependencies = [ "cfg-if", "const-random", - "getrandom 0.2.15", + "getrandom 0.3.4", "once_cell", "version_check", "zerocopy", @@ -289,9 +280,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" dependencies = [ "memchr", ] @@ -313,15 +304,15 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "amq-protocol" -version = "7.2.2" +version = "7.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a41c091e49edfcc098b4f90d4d7706a8cf9158034e84ebfee7ff346092f67c" +checksum = "587d313f3a8b4a40f866cc84b6059fe83133bf172165ac3b583129dd211d8e1c" dependencies = [ "amq-protocol-tcp", "amq-protocol-types", @@ -333,9 +324,9 @@ dependencies = [ [[package]] name = "amq-protocol-tcp" -version = "7.2.2" +version = "7.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed7a4a662472f88823ed2fc81babb0b00562f2c54284e3e7bffc02b6df649bf" +checksum = "dc707ab9aa964a85d9fc25908a3fdc486d2e619406883b3105b48bf304a8d606" dependencies = [ "amq-protocol-uri", "tcp-stream", @@ -344,9 +335,9 @@ dependencies = [ [[package]] name = "amq-protocol-types" -version = "7.2.2" +version = "7.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6484fdc918c1b6e2ae8eda2914d19a5873e1975f93ad8d33d6a24d1d98df05" +checksum = "bf99351d92a161c61ec6ecb213bc7057f5b837dd4e64ba6cb6491358efd770c4" dependencies = [ "cookie-factory", "nom", @@ -356,21 +347,15 @@ dependencies = [ [[package]] name = "amq-protocol-uri" -version = "7.2.2" +version = "7.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7f2da69e0e1182765bf33407cd8a843f20791b5af2b57a2645818c4776c56c" +checksum = "f89f8273826a676282208e5af38461a07fe939def57396af6ad5997fcf56577d" dependencies = [ "amq-protocol-types", "percent-encoding", "url", ] -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - [[package]] name = "android_system_properties" version = "0.1.5" @@ -382,9 +367,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.18" +version = "0.6.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" dependencies = [ "anstyle", "anstyle-parse", @@ -397,55 +382,59 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.10" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" [[package]] name = "anstyle-parse" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.2" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] name = "anstyle-wincon" -version = "3.0.6" +version = "3.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", - "windows-sys 0.59.0", + "once_cell_polyfill", + "windows-sys 0.61.2", ] [[package]] name = "anyhow" -version = "1.0.93" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "arc-swap" -version = "1.7.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" +checksum = "51d03449bb8ca2cc2ef70869af31463d1ae5ccc8fa3e334b307203fbf815207e" +dependencies = [ + "rustversion", +] [[package]] name = "asn1-rs" -version = "0.6.2" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5493c3bedbacf7fd7382c6346bbd66687d12bbaad3a89a2d2c303ee6cf20b048" +checksum = "56624a96882bb8c26d61312ae18cb45868e5a9992ea73c58e45c3101e56a1e60" dependencies = [ "asn1-rs-derive", "asn1-rs-impl", @@ -453,19 +442,19 @@ dependencies = [ "nom", "num-traits", "rusticata-macros", - "thiserror", - "time 0.3.36", + "thiserror 2.0.17", + "time", ] [[package]] name = "asn1-rs-derive" -version = "0.5.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "965c2d33e53cb6b267e148a4cb0760bc01f4904c1cd4bb4002a085bb016d1490" +checksum = "3109e49b1e4909e9db6515a30c633684d68cdeaa252f215214cb4fa1a5bfee2c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", "synstructure", ] @@ -477,7 +466,7 @@ checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] @@ -503,9 +492,9 @@ dependencies = [ [[package]] name = "async-channel" -version = "2.3.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" dependencies = [ "concurrent-queue", "event-listener-strategy", @@ -515,37 +504,37 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.13.1" +version = "1.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" +checksum = "497c00e0fd83a72a79a39fcbd8e3e2f055d6f6c7e025f3b3d91f4f8e76527fb8" dependencies = [ "async-task", "concurrent-queue", - "fastrand 2.2.0", - "futures-lite 2.5.0", + "fastrand 2.3.0", + "futures-lite 2.6.1", + "pin-project-lite", "slab", ] [[package]] name = "async-global-executor" -version = "2.4.1" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" +checksum = "13f937e26114b93193065fd44f507aa2e9169ad0cdabbb996920b1fe1ddea7ba" dependencies = [ - "async-channel 2.3.1", + "async-channel 2.5.0", "async-executor", - "async-io 2.4.0", - "async-lock 3.4.0", + "async-io 2.6.0", + "async-lock 3.4.2", "blocking", - "futures-lite 2.5.0", - "once_cell", + "futures-lite 2.6.1", ] [[package]] name = "async-global-executor-trait" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80f19936c1a84fb48ceb8899b642d2a72572587d1021cc561bfb24de9f33ee89" +checksum = "9af57045d58eeb1f7060e7025a1631cbc6399e0a1d10ad6735b3d0ea7f8346ce" dependencies = [ "async-global-executor", "async-trait", @@ -566,7 +555,7 @@ dependencies = [ "log", "parking", "polling 2.8.0", - "rustix 0.37.27", + "rustix 0.37.28", "slab", "socket2 0.4.10", "waker-fn", @@ -574,21 +563,20 @@ dependencies = [ [[package]] name = "async-io" -version = "2.4.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" dependencies = [ - "async-lock 3.4.0", + "autocfg", "cfg-if", "concurrent-queue", "futures-io", - "futures-lite 2.5.0", + "futures-lite 2.6.1", "parking", - "polling 3.7.4", - "rustix 0.38.40", + "polling 3.11.0", + "rustix 1.1.3", "slab", - "tracing", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -602,11 +590,11 @@ dependencies = [ [[package]] name = "async-lock" -version = "3.4.0" +version = "3.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" +checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311" dependencies = [ - "event-listener 5.3.1", + "event-listener 5.4.1", "event-listener-strategy", "pin-project-lite", ] @@ -631,22 +619,13 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-trait" -version = "0.1.83" +version = "0.1.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", -] - -[[package]] -name = "atoi" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7c57d12312ff59c811c0643f4d80830505833c9ffaebd193d819392b265be8e" -dependencies = [ - "num-traits", + "syn 2.0.111", ] [[package]] @@ -666,24 +645,9 @@ checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" - -[[package]] -name = "backtrace" -version = "0.3.74" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" -dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets 0.52.6", -] +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "base64" @@ -705,9 +669,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" -version = "1.6.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +checksum = "0e050f626429857a27ddccb31e0aca21356bfa709c04041aefddac081a8f068a" [[package]] name = "bitflags" @@ -717,9 +681,12 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.6.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +dependencies = [ + "serde_core", +] [[package]] name = "block-buffer" @@ -741,14 +708,14 @@ dependencies = [ [[package]] name = "blocking" -version = "1.6.1" +version = "1.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" +checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21" dependencies = [ - "async-channel 2.3.1", + "async-channel 2.5.0", "async-task", "futures-io", - "futures-lite 2.5.0", + "futures-lite 2.6.1", "piper", ] @@ -765,13 +732,13 @@ dependencies = [ [[package]] name = "brotli" -version = "6.0.0" +version = "8.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" +checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", - "brotli-decompressor 4.0.1", + "brotli-decompressor 5.0.0", ] [[package]] @@ -786,9 +753,9 @@ dependencies = [ [[package]] name = "brotli-decompressor" -version = "4.0.1" +version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a45bd2e4095a8b518033b128020dd4a55aab1c0a381ba4404a472630f4bc362" +checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -796,15 +763,15 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.16.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" [[package]] name = "bytecount" -version = "0.6.8" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" +checksum = "175812e0be2bccb6abe50bb8d566126198344f707e304f45c648fd8f2cc0365e" [[package]] name = "byteorder" @@ -814,33 +781,33 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.8.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" [[package]] name = "bytestring" -version = "1.3.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d80203ea6b29df88012294f62733de21cfeab47f17b41af3a38bc30a03ee72" +checksum = "113b4343b5f6617e7ad401ced8de3cc8b012e73a594347c307b90db3e9271289" dependencies = [ "bytes", ] [[package]] name = "camino" -version = "1.1.9" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" +checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48" dependencies = [ - "serde", + "serde_core", ] [[package]] name = "cargo-platform" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" dependencies = [ "serde", ] @@ -860,17 +827,17 @@ dependencies = [ [[package]] name = "casbin" -version = "2.5.0" +version = "2.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66e141a8db13c2e8bf3fdd6ac2b48ace7e70d2e4a66c329a4bb759e1368f22dc" +checksum = "4b12705127ab9fcf4fbc22a0c93f441514fe7bd7a7248ce443e4bf531c54b7ee" dependencies = [ "async-trait", "fixedbitset", - "getrandom 0.2.15", + "getrandom 0.3.4", "hashlink 0.9.1", "mini-moka", "once_cell", - "parking_lot 0.12.3", + "parking_lot", "petgraph", "regex", "rhai", @@ -879,10 +846,17 @@ dependencies = [ "slog", "slog-async", "slog-term", - "thiserror", + "thiserror 1.0.69", "tokio", + "wasm-bindgen-test", ] +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + [[package]] name = "cbc" version = "0.1.2" @@ -894,10 +868,11 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.1" +version = "1.2.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd9de9f2205d5ef3fd67e685b0df337994ddd4495e2a28d185500d0e1edfea47" +checksum = "9f50d563227a1c37cc0a263f64eca3334388c01c5e4c4861a9def205c614383c" dependencies = [ + "find-msvc-tools", "jobserver", "libc", "shlex", @@ -905,24 +880,22 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "chrono" -version = "0.4.29" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d87d9d13be47a5b7c3907137f1290b0459a7f80efb26be8c52afb11963bccb02" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ - "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", - "time 0.1.45", "wasm-bindgen", - "windows-targets 0.48.5", + "windows-link", ] [[package]] @@ -937,9 +910,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.21" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" +checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8" dependencies = [ "clap_builder", "clap_derive", @@ -947,9 +920,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.21" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" +checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00" dependencies = [ "anstream", "anstyle", @@ -959,21 +932,21 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.18" +version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] name = "clap_lex" -version = "0.7.3" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" [[package]] name = "cms" @@ -989,9 +962,9 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] name = "combine" @@ -1056,7 +1029,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "once_cell", "tiny-keccak", ] @@ -1067,6 +1040,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" +[[package]] +name = "convert_case" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "cookie" version = "0.16.2" @@ -1074,7 +1056,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" dependencies = [ "percent-encoding", - "time 0.3.36", + "time", "version_check", ] @@ -1102,18 +1084,18 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.15" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca741a962e1b0bff6d724a1a0958b686406e853bb14061f218562e1896f95e6" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crc" -version = "3.2.1" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" +checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" dependencies = [ "crc-catalog", ] @@ -1126,48 +1108,48 @@ checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" -version = "1.4.2" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-channel" -version = "0.5.13" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-queue" -version = "0.3.11" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crunchy" -version = "0.2.2" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" [[package]] name = "crypto-common" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" dependencies = [ "generic-array", "rand_core 0.6.4", @@ -1228,14 +1210,14 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core 0.9.10", + "parking_lot_core", ] [[package]] name = "data-encoding" -version = "2.6.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" +checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" [[package]] name = "deadpool" @@ -1252,11 +1234,12 @@ dependencies = [ [[package]] name = "deadpool" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6541a3916932fe57768d4be0b1ffb5ec7cbf74ca8c903fdfd5c0fe8aa958f0ed" +checksum = "0be2b1d1d6ec8d846f05e137292d0b89133caf95ef33695424c09568bdd39b1b" dependencies = [ "deadpool-runtime", + "lazy_static", "num_cpus", "tokio", ] @@ -1267,7 +1250,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33c7b14064f854a3969735e7c948c677a57ef17ca7f0bc029da8fe2e5e0fc1eb" dependencies = [ - "deadpool 0.12.1", + "deadpool 0.12.3", "lapin", "tokio-executor-trait", ] @@ -1283,9 +1266,9 @@ dependencies = [ [[package]] name = "der" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" dependencies = [ "const-oid", "der_derive", @@ -1296,9 +1279,9 @@ dependencies = [ [[package]] name = "der-parser" -version = "9.0.0" +version = "10.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cd0a5c643689626bec213c4d8bd4d96acc8ffdb4ad4bb6bc16abf27d5f4b553" +checksum = "07da5016415d5a3c4dd39b11ed26f915f52fc4e0dc197d87908bc916e51bc1a6" dependencies = [ "asn1-rs", "displaydoc", @@ -1316,14 +1299,14 @@ checksum = "8034092389675178f570469e6c3b0465d3d30b4505c294a6550db47f3c17ad18" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] name = "deranged" -version = "0.3.11" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" dependencies = [ "powerfmt", ] @@ -1392,76 +1375,59 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.18" +version = "0.99.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" dependencies = [ - "convert_case", + "convert_case 0.4.0", "proc-macro2", "quote", "rustc_version", - "syn 2.0.87", -] - -[[package]] -name = "des" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdd80ce8ce993de27e9f063a444a4d53ce8e8db4c1f00cc03af5ad5a9867a1e" -dependencies = [ - "cipher", -] - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "crypto-common", - "subtle", + "syn 2.0.111", ] [[package]] -name = "dirs" -version = "4.0.0" +name = "derive_more" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" +checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" dependencies = [ - "dirs-sys", + "derive_more-impl", ] [[package]] -name = "dirs-next" -version = "2.0.0" +name = "derive_more-impl" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" dependencies = [ - "cfg-if", - "dirs-sys-next", + "convert_case 0.10.0", + "proc-macro2", + "quote", + "rustc_version", + "syn 2.0.111", + "unicode-xid", ] [[package]] -name = "dirs-sys" -version = "0.3.7" +name = "des" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +checksum = "ffdd80ce8ce993de27e9f063a444a4d53ce8e8db4c1f00cc03af5ad5a9867a1e" dependencies = [ - "libc", - "redox_users", - "winapi", + "cipher", ] [[package]] -name = "dirs-sys-next" -version = "0.1.2" +name = "digest" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ - "libc", - "redox_users", - "winapi", + "block-buffer", + "const-oid", + "crypto-common", + "subtle", ] [[package]] @@ -1472,7 +1438,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] @@ -1483,9 +1449,9 @@ checksum = "0688c2a7f92e427f44895cd63841bff7b29f8d7a1648b9e7e07a4a365b2e1257" [[package]] name = "doc-comment" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" +checksum = "780955b8b195a21ab8e4ac6b60dd1dbdcec1dc6c51c0617964b08c81785e12c9" [[package]] name = "docker-compose-types" @@ -1494,7 +1460,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d6fdd6fa1c9e8e716f5f73406b868929f468702449621e7397066478b9bf89c" dependencies = [ "derive_builder 0.13.1", - "indexmap 2.6.0", + "indexmap", "serde", "serde_yaml", ] @@ -1507,9 +1473,9 @@ checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" [[package]] name = "either" -version = "1.13.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" dependencies = [ "serde", ] @@ -1525,18 +1491,27 @@ dependencies = [ [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "erased-serde" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "6c138974f9d5e7fe373eb04df7cae98833802ae4b11c24ac7039a21d5af4b26c" +dependencies = [ + "serde", +] [[package]] name = "errno" -version = "0.3.9" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -1567,9 +1542,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "event-listener" -version = "5.3.1" +version = "5.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" dependencies = [ "concurrent-queue", "parking", @@ -1578,11 +1553,11 @@ dependencies = [ [[package]] name = "event-listener-strategy" -version = "0.5.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" dependencies = [ - "event-listener 5.3.1", + "event-listener 5.4.1", "pin-project-lite", ] @@ -1606,9 +1581,15 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.2.0" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "find-msvc-tools" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" +checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" [[package]] name = "fixedbitset" @@ -1618,15 +1599,15 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flagset" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3ea1ec5f8307826a5b71094dd91fc04d4ae75d5709b20ad351c7fb4815c86ec" +checksum = "b7ac824320a75a52197e8f2d787f6a38b6718bb6897a35142d749af3c0e8f4fe" [[package]] name = "flate2" -version = "1.0.35" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb" dependencies = [ "crc32fast", "miniz_oxide", @@ -1649,6 +1630,12 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "foreign-types" version = "0.3.2" @@ -1666,9 +1653,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] @@ -1715,17 +1702,6 @@ dependencies = [ "futures-util", ] -[[package]] -name = "futures-intrusive" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5" -dependencies = [ - "futures-core", - "lock_api", - "parking_lot 0.11.2", -] - [[package]] name = "futures-intrusive" version = "0.5.0" @@ -1734,7 +1710,7 @@ checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" dependencies = [ "futures-core", "lock_api", - "parking_lot 0.12.3", + "parking_lot", ] [[package]] @@ -1760,11 +1736,11 @@ dependencies = [ [[package]] name = "futures-lite" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cef40d21ae2c515b51041df9ed313ed21e572df340ea58a922a0aefe7e8891a1" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" dependencies = [ - "fastrand 2.2.0", + "fastrand 2.3.0", "futures-core", "futures-io", "parking", @@ -1779,7 +1755,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] @@ -1851,14 +1827,26 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "r-efi", + "wasip2", "wasm-bindgen", ] @@ -1872,23 +1860,17 @@ dependencies = [ "polyval", ] -[[package]] -name = "gimli" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" - [[package]] name = "glob" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "h2" -version = "0.3.26" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" dependencies = [ "bytes", "fnv", @@ -1896,7 +1878,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 2.6.0", + "indexmap", "slab", "tokio", "tokio-util", @@ -1918,24 +1900,25 @@ version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ - "ahash 0.8.11", - "allocator-api2", + "ahash 0.8.12", ] [[package]] name = "hashbrown" -version = "0.15.1" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] [[package]] -name = "hashlink" -version = "0.8.4" +name = "hashbrown" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" -dependencies = [ - "hashbrown 0.14.5", -] +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" [[package]] name = "hashlink" @@ -1947,12 +1930,12 @@ dependencies = [ ] [[package]] -name = "heck" -version = "0.4.1" +name = "hashlink" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" dependencies = [ - "unicode-segmentation", + "hashbrown 0.15.5", ] [[package]] @@ -1969,9 +1952,9 @@ checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hermit-abi" -version = "0.4.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" [[package]] name = "hex" @@ -1999,11 +1982,11 @@ dependencies = [ [[package]] name = "home" -version = "0.5.9" +version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -2051,9 +2034,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.9.5" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "httpdate" @@ -2063,9 +2046,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.31" +version = "0.14.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" dependencies = [ "bytes", "futures-channel", @@ -2078,7 +2061,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.7", + "socket2 0.5.10", "tokio", "tower-service", "tracing", @@ -2100,14 +2083,15 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.61" +version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", + "log", "wasm-bindgen", "windows-core", ] @@ -2123,21 +2107,22 @@ dependencies = [ [[package]] name = "icu_collections" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" dependencies = [ "displaydoc", + "potential_utf", "yoke", "zerofrom", "zerovec", ] [[package]] -name = "icu_locid" -version = "1.5.0" +name = "icu_locale_core" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" dependencies = [ "displaydoc", "litemap", @@ -2146,99 +2131,61 @@ dependencies = [ "zerovec", ] -[[package]] -name = "icu_locid_transform" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_locid_transform_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" - [[package]] name = "icu_normalizer" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" dependencies = [ - "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", "icu_provider", "smallvec", - "utf16_iter", - "utf8_iter", - "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] name = "icu_properties" -version = "1.5.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" dependencies = [ - "displaydoc", "icu_collections", - "icu_locid_transform", + "icu_locale_core", "icu_properties_data", "icu_provider", - "tinystr", + "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "1.5.0" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" [[package]] name = "icu_provider" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" dependencies = [ "displaydoc", - "icu_locid", - "icu_provider_macros", - "stable_deref_trait", - "tinystr", + "icu_locale_core", "writeable", "yoke", "zerofrom", + "zerotrie", "zerovec", ] -[[package]] -name = "icu_provider_macros" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - [[package]] name = "ident_case" version = "1.0.1" @@ -2247,9 +2194,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -2258,9 +2205,9 @@ dependencies = [ [[package]] name = "idna_adapter" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ "icu_normalizer", "icu_properties", @@ -2268,29 +2215,20 @@ dependencies = [ [[package]] name = "impl-more" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aae21c3177a27788957044151cc2800043d127acaa460a47ebb9b84dfa2c6aa0" +checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2" [[package]] name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", -] - -[[package]] -name = "indexmap" -version = "2.6.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" dependencies = [ "equivalent", - "hashbrown 0.15.1", + "hashbrown 0.16.1", "serde", + "serde_core", ] [[package]] @@ -2301,9 +2239,9 @@ checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac" [[package]] name = "inout" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" dependencies = [ "block-padding", "generic-array", @@ -2331,32 +2269,26 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" - -[[package]] -name = "ipnetwork" -version = "0.19.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f84f1612606f3753f205a4e9a2efd6fe5b4c573a6269b2cc6c3003d44a0d127" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "is-terminal" -version = "0.4.13" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b" +checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" dependencies = [ - "hermit-abi 0.4.0", + "hermit-abi 0.5.2", "libc", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] name = "is_terminal_polyfill" -version = "1.70.1" +version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" [[package]] name = "itertools" @@ -2367,27 +2299,38 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "7ee5b5339afb4c41626dde77b7a611bd4f2c202b897852b4bcf5d03eddc61010" [[package]] name = "jobserver" -version = "0.1.32" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" dependencies = [ + "getrandom 0.3.4", "libc", ] [[package]] name = "js-sys" -version = "0.3.72" +version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" +checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" dependencies = [ + "once_cell", "wasm-bindgen", ] @@ -2410,9 +2353,9 @@ checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388" [[package]] name = "lapin" -version = "2.5.0" +version = "2.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "209b09a06f4bd4952a0fd0594f90d53cf4496b062f59acc838a2823e1bb7d95c" +checksum = "02d2aa4725b9607915fa1a73e940710a3be6af508ce700e56897cbe8847fbb07" dependencies = [ "amq-protocol", "async-global-executor-trait", @@ -2422,7 +2365,7 @@ dependencies = [ "flume", "futures-core", "futures-io", - "parking_lot 0.12.3", + "parking_lot", "pinky-swear", "reactor-trait", "serde", @@ -2436,21 +2379,41 @@ name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin 0.9.8", +] [[package]] name = "libc" -version = "0.2.162" +version = "0.2.178" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" + +[[package]] +name = "libm" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18d287de67fe55fd7e1581fe933d965a5a9477b38e949cfa9f8574ef01506398" +checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" [[package]] name = "libredox" -version = "0.1.3" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +checksum = "df15f6eac291ed1cf25865b1ee60399f57e7c227e7f51bdbd4c5270396a9ed50" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.10.0", "libc", + "redox_syscall 0.6.0", +] + +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "pkg-config", + "vcpkg", ] [[package]] @@ -2467,15 +2430,15 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" [[package]] name = "litemap" -version = "0.7.3" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" [[package]] name = "local-channel" @@ -2496,27 +2459,26 @@ checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487" [[package]] name = "lock_api" -version = "0.4.12" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" dependencies = [ - "autocfg", "scopeguard", ] [[package]] name = "log" -version = "0.4.22" +version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] name = "matchers" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" dependencies = [ - "regex-automata 0.1.10", + "regex-automata", ] [[package]] @@ -2531,9 +2493,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] name = "mime" @@ -2556,6 +2518,16 @@ dependencies = [ "triomphe", ] +[[package]] +name = "minicov" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4869b6a491569605d66d3952bcdf03df789e5b536e5f0cf7758a7f08a55ae24d" +dependencies = [ + "cc", + "walkdir", +] + [[package]] name = "minimal-lexical" version = "0.2.1" @@ -2564,24 +2536,24 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.8.0" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", + "simd-adler32", ] [[package]] name = "mio" -version = "1.0.2" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" dependencies = [ - "hermit-abi 0.3.9", "libc", "log", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.52.0", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.61.2", ] [[package]] @@ -2592,9 +2564,9 @@ checksum = "e94e1e6445d314f972ff7395df2de295fe51b71821694f0b0e1e79c4f12c8577" [[package]] name = "native-tls" -version = "0.2.12" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" dependencies = [ "libc", "log", @@ -2628,12 +2600,11 @@ dependencies = [ [[package]] name = "nu-ansi-term" -version = "0.46.0" +version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "overload", - "winapi", + "windows-sys 0.61.2", ] [[package]] @@ -2646,6 +2617,22 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-bigint-dig" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" +dependencies = [ + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + [[package]] name = "num-conv" version = "0.1.0" @@ -2662,51 +2649,66 @@ dependencies = [ ] [[package]] -name = "num-traits" -version = "0.2.19" +name = "num-iter" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" dependencies = [ "autocfg", + "num-integer", + "num-traits", ] [[package]] -name = "num_cpus" -version = "1.16.0" +name = "num-traits" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ - "hermit-abi 0.3.9", - "libc", + "autocfg", + "libm", ] [[package]] -name = "object" -version = "0.36.5" +name = "num_cpus" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" dependencies = [ - "memchr", + "hermit-abi 0.5.2", + "libc", ] [[package]] name = "oid-registry" -version = "0.7.1" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d8034d9489cdaf79228eb9f6a3b8d7bb32ba00d6645ebd48eef4077ceb5bd9" +checksum = "12f40cff3dde1b6087cc5d5f5d4d65712f34016a03ed60e9c08dcc392736b5b7" dependencies = [ "asn1-rs", ] [[package]] name = "once_cell" -version = "1.20.2" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" dependencies = [ "portable-atomic", ] +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "oorandom" +version = "11.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" + [[package]] name = "opaque-debug" version = "0.3.1" @@ -2715,11 +2717,11 @@ checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "openssl" -version = "0.10.68" +version = "0.10.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" +checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.10.0", "cfg-if", "foreign-types", "libc", @@ -2736,20 +2738,20 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] name = "openssl-probe" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" -version = "0.9.104" +version = "0.9.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" dependencies = [ "cc", "libc", @@ -2767,17 +2769,11 @@ dependencies = [ "hashbrown 0.12.3", ] -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - [[package]] name = "p12-keystore" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df7b60d0b2dcace322e6e8c4499c4c8bdf331c1bae046a54be5e4191c3610286" +checksum = "3cae83056e7cb770211494a0ecf66d9fa7eba7d00977e5bb91f0e925b40b937f" dependencies = [ "cbc", "cms", @@ -2787,11 +2783,11 @@ dependencies = [ "hmac", "pkcs12", "pkcs5", - "rand 0.8.5", + "rand 0.9.2", "rc2", "sha1", "sha2", - "thiserror", + "thiserror 2.0.17", "x509-parser", ] @@ -2803,50 +2799,25 @@ checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] - -[[package]] -name = "parking_lot" -version = "0.12.3" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" dependencies = [ "lock_api", - "parking_lot_core 0.9.10", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall 0.2.16", - "smallvec", - "winapi", + "parking_lot_core", ] [[package]] name = "parking_lot_core" -version = "0.9.10" +version = "0.9.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.7", + "redox_syscall 0.5.18", "smallvec", - "windows-targets 0.52.6", + "windows-link", ] [[package]] @@ -2857,9 +2828,9 @@ checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "pathdiff" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c5ce1153ab5b689d0c074c4e7fc613e942dfb7dd9eea5ab202d2ad91fe361" +checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" [[package]] name = "pbkdf2" @@ -2882,26 +2853,25 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" -version = "2.7.14" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" +checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22" dependencies = [ "memchr", - "thiserror", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.14" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d214365f632b123a47fd913301e14c946c61d1c183ee245fa76eb752e59a02dd" +checksum = "51f72981ade67b1ca6adc26ec221be9f463f2b5839c7508998daa17c23d94d7f" dependencies = [ "pest", "pest_generator", @@ -2909,24 +2879,23 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.14" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb55586734301717aea2ac313f50b2eb8f60d2fc3dc01d190eefa2e625f60c4e" +checksum = "dee9efd8cdb50d719a80088b76f81aec7c41ed6d522ee750178f83883d271625" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] name = "pest_meta" -version = "2.7.14" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b75da2a70cf4d9cb76833c990ac9cd3923c9a8905a8929789ce347c84564d03d" +checksum = "bf1d70880e76bdc13ba52eafa6239ce793d85c8e43896507e43dd8984ff05b82" dependencies = [ - "once_cell", "pest", "sha2", ] @@ -2938,34 +2907,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.6.0", + "indexmap", ] [[package]] name = "pin-project" -version = "1.1.7" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.7" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" +checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] name = "pin-project-lite" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" @@ -2975,13 +2944,13 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pinky-swear" -version = "6.2.0" +version = "6.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cfae3ead413ca051a681152bd266438d3bfa301c9bdf836939a14c721bb2a21" +checksum = "b1ea6e230dd3a64d61bcb8b79e597d3ab6b4c94ec7a234ce687dd718b4f2e657" dependencies = [ "doc-comment", "flume", - "parking_lot 0.12.3", + "parking_lot", "tracing", ] @@ -2992,10 +2961,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" dependencies = [ "atomic-waker", - "fastrand 2.2.0", + "fastrand 2.3.0", "futures-io", ] +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + [[package]] name = "pkcs12" version = "0.1.0" @@ -3026,11 +3006,21 @@ dependencies = [ "spki", ] +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + [[package]] name = "pkg-config" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "polling" @@ -3050,17 +3040,16 @@ dependencies = [ [[package]] name = "polling" -version = "3.7.4" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" dependencies = [ "cfg-if", "concurrent-queue", - "hermit-abi 0.4.0", + "hermit-abi 0.5.2", "pin-project-lite", - "rustix 0.38.40", - "tracing", - "windows-sys 0.59.0", + "rustix 1.1.3", + "windows-sys 0.61.2", ] [[package]] @@ -3077,9 +3066,18 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.9.0" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f59e70c4aef1e55797c2e8fd94a4f2a973fc972cfde0e0b05f683667b0cd39dd" + +[[package]] +name = "potential_utf" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] [[package]] name = "powerfmt" @@ -3089,9 +3087,9 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ "zerocopy", ] @@ -3122,9 +3120,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.89" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" +checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" dependencies = [ "unicode-ident", ] @@ -3135,20 +3133,26 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.10.0", "memchr", "unicase", ] [[package]] name = "quote" -version = "1.0.37" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + [[package]] name = "rand" version = "0.7.3" @@ -3173,6 +3177,16 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", +] + [[package]] name = "rand_chacha" version = "0.2.2" @@ -3193,6 +3207,16 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", +] + [[package]] name = "rand_core" version = "0.5.1" @@ -3208,7 +3232,16 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.4", ] [[package]] @@ -3242,22 +3275,23 @@ dependencies = [ [[package]] name = "redis" -version = "0.27.5" +version = "0.27.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cccf17a692ce51b86564334614d72dcae1def0fd5ecebc9f02956da74352b5" +checksum = "09d8f99a4090c89cc489a94833c901ead69bfbf3877b4867d5482e321ee875bc" dependencies = [ "arc-swap", "async-trait", "bytes", "combine", "futures-util", + "itertools 0.13.0", "itoa", "num-bigint", "percent-encoding", "pin-project-lite", "ryu", "sha1_smol", - "socket2 0.5.7", + "socket2 0.5.10", "tokio", "tokio-util", "url", @@ -3265,82 +3299,56 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.2.16" +version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.10.0", ] [[package]] name = "redox_syscall" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" -dependencies = [ - "bitflags 2.6.0", -] - -[[package]] -name = "redox_users" -version = "0.4.6" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +checksum = "ec96166dafa0886eb81fe1c0a388bece180fbef2135f97c1e2cf8302e74b43b5" dependencies = [ - "getrandom 0.2.15", - "libredox", - "thiserror", + "bitflags 2.10.0", ] [[package]] name = "regex" -version = "1.11.1" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.9", - "regex-syntax 0.8.5", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", + "regex-automata", + "regex-syntax", ] [[package]] name = "regex-automata" -version = "0.4.9" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.5", + "regex-syntax", ] [[package]] name = "regex-lite" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" - -[[package]] -name = "regex-syntax" -version = "0.6.29" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" +checksum = "8d942b98df5e658f56f20d592c7f868833fe38115e65c33003d8cd224b0155da" [[package]] name = "regex-syntax" -version = "0.8.5" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "reqwest" @@ -3390,12 +3398,12 @@ checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0" [[package]] name = "rhai" -version = "1.20.0" +version = "1.23.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8867cfc57aaf2320b60ec0f4d55603ac950ce852e6ab6b9109aa3d626a4dd7ea" +checksum = "f4e35aaaa439a5bda2f8d15251bc375e4edfac75f9865734644782c9701b5709" dependencies = [ - "ahash 0.8.11", - "bitflags 2.6.0", + "ahash 0.8.12", + "bitflags 2.10.0", "instant", "no-std-compat", "num-traits", @@ -3409,42 +3417,26 @@ dependencies = [ [[package]] name = "rhai_codegen" -version = "2.2.0" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5a11a05ee1ce44058fa3d5961d05194fdbe3ad6b40f904af764d81b86450e6b" +checksum = "d4322a2a4e8cf30771dd9f27f7f37ca9ac8fe812dddd811096a98483080dabe6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", -] - -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", + "syn 2.0.111", ] [[package]] name = "ring" -version = "0.17.8" +version = "0.17.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom 0.2.16", "libc", - "spin 0.9.8", - "untrusted 0.9.0", + "untrusted", "windows-sys 0.52.0", ] @@ -3459,6 +3451,26 @@ dependencies = [ "serde", ] +[[package]] +name = "rsa" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "signature", + "spki", + "subtle", + "zeroize", +] + [[package]] name = "rust-ini" version = "0.18.0" @@ -3469,12 +3481,6 @@ dependencies = [ "ordered-multimap", ] -[[package]] -name = "rustc-demangle" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" - [[package]] name = "rustc_version" version = "0.4.1" @@ -3495,9 +3501,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.27" +version = "0.37.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" +checksum = "519165d378b97752ca44bbe15047d5d3409e875f39327546b42ac81d7e18c1b6" dependencies = [ "bitflags 1.3.2", "errno", @@ -3509,37 +3515,25 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.40" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99e4ea3e1cdc4b559b8e5650f9c8e5998e3e5c1343b4eaf034565f32318d63c0" +checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.10.0", "errno", "libc", - "linux-raw-sys 0.4.14", - "windows-sys 0.52.0", -] - -[[package]] -name = "rustls" -version = "0.20.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" -dependencies = [ - "log", - "ring 0.16.20", - "sct", - "webpki", + "linux-raw-sys 0.11.0", + "windows-sys 0.61.2", ] [[package]] name = "rustls" -version = "0.23.16" +version = "0.23.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eee87ff5d9b36712a58574e12e9f0ea80f915a5b0ac518d322b24a465617925e" +checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" dependencies = [ "once_cell", - "ring 0.17.8", + "ring", "rustls-pki-types", "rustls-webpki", "subtle", @@ -3548,12 +3542,12 @@ dependencies = [ [[package]] name = "rustls-connector" -version = "0.20.1" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a980454b497c439c274f2feae2523ed8138bbd3d323684e1435fec62f800481" +checksum = "70cc376c6ba1823ae229bacf8ad93c136d93524eab0e4e5e0e4f96b9c4e5b212" dependencies = [ "log", - "rustls 0.23.16", + "rustls", "rustls-native-certs", "rustls-pki-types", "rustls-webpki", @@ -3592,32 +3586,35 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.10.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" +checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282" +dependencies = [ + "zeroize", +] [[package]] name = "rustls-webpki" -version = "0.102.8" +version = "0.103.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" dependencies = [ - "ring 0.17.8", + "ring", "rustls-pki-types", - "untrusted 0.9.0", + "untrusted", ] [[package]] name = "rustversion" -version = "1.0.18" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "62049b2877bf12821e8f9ad256ee38fdc31db7387ec2d3b3f403024de2034aea" [[package]] name = "salsa20" @@ -3639,11 +3636,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.26" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -3663,23 +3660,13 @@ dependencies = [ "sha2", ] -[[package]] -name = "sct" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" -dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", -] - [[package]] name = "security-framework" version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.10.0", "core-foundation", "core-foundation-sys", "libc", @@ -3688,9 +3675,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.12.1" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" dependencies = [ "core-foundation-sys", "libc", @@ -3698,53 +3685,66 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.23" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" dependencies = [ "serde", + "serde_core", ] [[package]] name = "serde" -version = "1.0.215" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.215" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] name = "serde_json" -version = "1.0.132" +version = "1.0.147" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" +checksum = "6af14725505314343e673e9ecb7cd7e8a36aa9791eb936235a3567cc31447ae4" dependencies = [ "itoa", "memchr", - "ryu", "serde", + "serde_core", + "zmij", ] [[package]] name = "serde_path_to_error" -version = "0.1.16" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" dependencies = [ "itoa", "serde", + "serde_core", ] [[package]] @@ -3755,7 +3755,7 @@ checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6" dependencies = [ "percent-encoding", "serde", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3776,8 +3776,8 @@ version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70c0e00fab6460447391a1981c21341746bc2d0178a7c46a3bbf667f450ac6e4" dependencies = [ - "indexmap 2.6.0", - "itertools", + "indexmap", + "itertools 0.12.1", "num-traits", "once_cell", "paste", @@ -3786,7 +3786,7 @@ dependencies = [ "serde_json", "serde_valid_derive", "serde_valid_literal", - "thiserror", + "thiserror 1.0.69", "unicode-segmentation", ] @@ -3801,7 +3801,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.11.1", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] @@ -3820,7 +3820,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.6.0", + "indexmap", "itoa", "ryu", "serde", @@ -3846,9 +3846,9 @@ checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" [[package]] name = "sha2" -version = "0.10.8" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", "cpufeatures", @@ -3872,13 +3872,30 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.2" +version = "1.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" dependencies = [ + "errno", "libc", ] +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core 0.6.4", +] + +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + [[package]] name = "skeptic" version = "0.13.7" @@ -3896,18 +3913,21 @@ dependencies = [ [[package]] name = "slab" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "slog" -version = "2.7.0" +version = "2.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8347046d4ebd943127157b94d63abb990fcf729dc4e9978927fdf4ac3c998d06" +checksum = "9b3b8565691b22d2bdfc066426ed48f837fc0c5f2c8cad8d9718f7f99d6995c1" +dependencies = [ + "anyhow", + "erased-serde", + "rustversion", + "serde_core", +] [[package]] name = "slog-async" @@ -3923,22 +3943,23 @@ dependencies = [ [[package]] name = "slog-term" -version = "2.9.1" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6e022d0b998abfe5c3782c1f03551a596269450ccd677ea51c56f8b214610e8" +checksum = "5cb1fc680b38eed6fad4c02b3871c09d2c81db8c96aa4e9c0a34904c830f09b5" dependencies = [ + "chrono", "is-terminal", "slog", "term", "thread_local", - "time 0.3.36", + "time", ] [[package]] name = "smallvec" -version = "1.13.2" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" dependencies = [ "serde", ] @@ -3967,14 +3988,24 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" dependencies = [ "libc", "windows-sys 0.52.0", ] +[[package]] +name = "socket2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + [[package]] name = "spin" version = "0.5.2" @@ -4000,154 +4031,92 @@ dependencies = [ "der", ] -[[package]] -name = "sqlformat" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bba3a93db0cc4f7bdece8bb09e77e2e785c20bfebf79eb8340ed80708048790" -dependencies = [ - "nom", - "unicode_categories", -] - [[package]] name = "sqlx" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8de3b03a925878ed54a954f621e64bf55a3c1bd29652d0d1a17830405350188" -dependencies = [ - "sqlx-core 0.6.3", - "sqlx-macros 0.6.3", -] - -[[package]] -name = "sqlx" -version = "0.8.2" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93334716a037193fac19df402f8571269c84a00852f6a7066b5d2616dcd64d3e" +checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc" dependencies = [ - "sqlx-core 0.8.2", - "sqlx-macros 0.8.2", + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", "sqlx-postgres", + "sqlx-sqlite", ] [[package]] name = "sqlx-adapter" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "446099e7e4da3573bb0039b18354460eb7a38b5a2cb3568cf96c37fdbc569de0" +checksum = "2a88e13f5aaf770420184c9e2955345f157953fb7ed9f26df59a4a0664478daf" dependencies = [ "async-trait", "casbin", "dotenvy", - "sqlx 0.8.2", + "sqlx", ] [[package]] name = "sqlx-core" -version = "0.6.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029" +checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" dependencies = [ - "ahash 0.7.8", - "atoi 1.0.0", - "base64 0.13.1", - "bitflags 1.3.2", - "byteorder", + "base64 0.22.1", "bytes", "chrono", "crc", "crossbeam-queue", - "dirs", - "dotenvy", "either", - "event-listener 2.5.3", - "futures-channel", + "event-listener 5.4.1", "futures-core", - "futures-intrusive 0.4.2", + "futures-intrusive", + "futures-io", "futures-util", - "hashlink 0.8.4", - "hex", - "hkdf", - "hmac", - "indexmap 1.9.3", - "ipnetwork", - "itoa", - "libc", + "hashbrown 0.15.5", + "hashlink 0.10.0", + "indexmap", "log", - "md-5", "memchr", + "native-tls", "once_cell", - "paste", "percent-encoding", - "rand 0.8.5", - "rustls 0.20.9", - "rustls-pemfile 1.0.4", + "rustls", "serde", "serde_json", - "sha1", "sha2", "smallvec", - "sqlformat", - "sqlx-rt", - "stringprep", - "thiserror", + "thiserror 2.0.17", + "tokio", "tokio-stream", + "tracing", "url", "uuid", - "webpki-roots", - "whoami", + "webpki-roots 0.26.11", ] [[package]] -name = "sqlx-core" -version = "0.8.2" +name = "sqlx-macros" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d8060b456358185f7d50c55d9b5066ad956956fddec42ee2e8567134a8936e" +checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d" dependencies = [ - "atoi 2.0.0", - "byteorder", - "bytes", - "crc", - "crossbeam-queue", - "either", - "event-listener 5.3.1", - "futures-channel", - "futures-core", - "futures-intrusive 0.5.0", - "futures-io", - "futures-util", - "hashbrown 0.14.5", - "hashlink 0.9.1", - "hex", - "indexmap 2.6.0", - "log", - "memchr", - "native-tls", - "once_cell", - "paste", - "percent-encoding", - "serde", - "serde_json", - "sha2", - "smallvec", - "sqlformat", - "thiserror", - "tokio", - "tokio-stream", - "tracing", - "url", + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn 2.0.111", ] [[package]] -name = "sqlx-macros" -version = "0.6.3" +name = "sqlx-macros-core" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9966e64ae989e7e575b19d7265cb79d7fc3cbbdf179835cb0d716f294c2049c9" +checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b" dependencies = [ "dotenvy", "either", - "heck 0.4.1", + "heck", "hex", "once_cell", "proc-macro2", @@ -4155,65 +4124,75 @@ dependencies = [ "serde", "serde_json", "sha2", - "sqlx-core 0.6.3", - "sqlx-rt", - "syn 1.0.109", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn 2.0.111", + "tokio", "url", ] [[package]] -name = "sqlx-macros" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cac0692bcc9de3b073e8d747391827297e075c7710ff6276d9f7a1f3d58c6657" -dependencies = [ - "proc-macro2", - "quote", - "sqlx-core 0.8.2", - "sqlx-macros-core", - "syn 2.0.87", -] - -[[package]] -name = "sqlx-macros-core" -version = "0.8.2" +name = "sqlx-mysql" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1804e8a7c7865599c9c79be146dc8a9fd8cc86935fa641d3ea58e5f0688abaa5" +checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags 2.10.0", + "byteorder", + "bytes", + "chrono", + "crc", + "digest", "dotenvy", "either", - "heck 0.5.0", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", "once_cell", - "proc-macro2", - "quote", + "percent-encoding", + "rand 0.8.5", + "rsa", "serde", - "serde_json", + "sha1", "sha2", - "sqlx-core 0.8.2", - "sqlx-postgres", - "syn 2.0.87", - "tempfile", - "tokio", - "url", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.17", + "tracing", + "uuid", + "whoami", ] [[package]] name = "sqlx-postgres" -version = "0.8.2" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fa91a732d854c5d7726349bb4bb879bb9478993ceb764247660aee25f67c2f8" +checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46" dependencies = [ - "atoi 2.0.0", + "atoi", "base64 0.22.1", - "bitflags 2.6.0", + "bitflags 2.10.0", "byteorder", + "chrono", "crc", "dotenvy", "etcetera", "futures-channel", "futures-core", - "futures-io", "futures-util", "hex", "hkdf", @@ -4229,29 +4208,45 @@ dependencies = [ "serde_json", "sha2", "smallvec", - "sqlx-core 0.8.2", + "sqlx-core", "stringprep", - "thiserror", + "thiserror 2.0.17", "tracing", + "uuid", "whoami", ] [[package]] -name = "sqlx-rt" -version = "0.6.3" +name = "sqlx-sqlite" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "804d3f245f894e61b1e6263c84b23ca675d96753b5abfd5cc8597d86806e8024" +checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea" dependencies = [ - "once_cell", - "tokio", - "tokio-rustls", + "atoi", + "chrono", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "serde_urlencoded", + "sqlx-core", + "thiserror 2.0.17", + "tracing", + "url", + "uuid", ] [[package]] name = "stable_deref_trait" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "stacker" @@ -4264,7 +4259,6 @@ dependencies = [ "aes-gcm", "base64 0.22.1", "brotli 3.5.0", ->>>>>>> dev "casbin", "chrono", "clap", @@ -4274,11 +4268,11 @@ dependencies = [ "docker-compose-types", "dotenvy", "futures", - "futures-lite 2.5.0", + "futures-lite 2.6.1", "futures-util", "glob", "hmac", - "indexmap 2.6.0", + "indexmap", "lapin", "rand 0.8.5", "redis", @@ -4291,9 +4285,9 @@ dependencies = [ "serde_valid", "serde_yaml", "sha2", - "sqlx 0.6.3", + "sqlx", "sqlx-adapter", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", "tracing", @@ -4353,9 +4347,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.87" +version = "2.0.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" +checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87" dependencies = [ "proc-macro2", "quote", @@ -4370,13 +4364,13 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "synstructure" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] @@ -4426,33 +4420,31 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.14.0" +version = "3.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" +checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" dependencies = [ - "cfg-if", - "fastrand 2.2.0", + "fastrand 2.3.0", + "getrandom 0.3.4", "once_cell", - "rustix 0.38.40", - "windows-sys 0.59.0", + "rustix 1.1.3", + "windows-sys 0.61.2", ] [[package]] name = "term" -version = "0.7.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" +checksum = "d8c27177b12a6399ffc08b98f76f7c9a1f4fe9fc967c784c5a071fa8d93cf7e1" dependencies = [ - "dirs-next", - "rustversion", - "winapi", + "windows-sys 0.61.2", ] [[package]] name = "thin-vec" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38c90d48152c236a3ab59271da4f4ae63d678c5d7ad6b7714d7cb9760be5e4b" +checksum = "144f754d318415ac792f9d69fc87abbbfc043ce2ef041c60f16ad828f638717d" dependencies = [ "serde", ] @@ -4463,7 +4455,16 @@ version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" +dependencies = [ + "thiserror-impl 2.0.17", ] [[package]] @@ -4474,35 +4475,34 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] -name = "thread_local" -version = "1.1.8" +name = "thiserror-impl" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ - "cfg-if", - "once_cell", + "proc-macro2", + "quote", + "syn 2.0.111", ] [[package]] -name = "time" -version = "0.1.45" +name = "thread_local" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", + "cfg-if", ] [[package]] name = "time" -version = "0.3.36" +version = "0.3.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", "itoa", @@ -4515,15 +4515,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.2" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" dependencies = [ "num-conv", "time-core", @@ -4540,9 +4540,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.7.6" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" dependencies = [ "displaydoc", "zerovec", @@ -4550,9 +4550,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" dependencies = [ "tinyvec_macros", ] @@ -4565,27 +4565,26 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.41.1" +version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ - "backtrace", "bytes", "libc", "mio", - "parking_lot 0.12.3", + "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.7", + "socket2 0.6.1", "tokio-macros", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] name = "tokio-executor-trait" -version = "2.1.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96a1593beae7759f592e1100c5997fe9e9ebf4b5968062f1fbcd807989cd1b79" +checksum = "6278565f9fd60c2d205dfbc827e8bb1236c2b1a57148708e95861eff7a6b3bad" dependencies = [ "async-trait", "executor-trait", @@ -4594,13 +4593,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.4.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] @@ -4613,22 +4612,11 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.23.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" -dependencies = [ - "rustls 0.20.9", - "tokio", - "webpki", -] - [[package]] name = "tokio-stream" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", @@ -4637,9 +4625,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.12" +version = "0.7.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" dependencies = [ "bytes", "futures-core", @@ -4665,9 +4653,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" dependencies = [ "log", "pin-project-lite", @@ -4677,9 +4665,9 @@ dependencies = [ [[package]] name = "tracing-actix-web" -version = "0.7.14" +version = "0.7.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b87073920bcce23e9f5cb0d2671e9f01d6803bb5229c159b2f5ce6806d73ffc" +checksum = "2f28f45dd524790b44a7b372f7c3aec04a3af6b42d494e861b67de654cb25a5e" dependencies = [ "actix-web", "mutually_exclusive_features", @@ -4690,27 +4678,27 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] name = "tracing-bunyan-formatter" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5c266b9ac83dedf0e0385ad78514949e6d89491269e7065bee51d2bb8ec7373" +checksum = "2d637245a0d8774bd48df6482e086c59a8b5348a910c3b0579354045a9d82411" dependencies = [ - "ahash 0.8.11", + "ahash 0.8.12", "gethostname", "log", "serde", "serde_json", - "time 0.3.36", + "time", "tracing", "tracing-core", "tracing-log 0.1.4", @@ -4719,9 +4707,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" dependencies = [ "once_cell", "valuable", @@ -4751,14 +4739,14 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" dependencies = [ "matchers", "nu-ansi-term", "once_cell", - "regex", + "regex-automata", "sharded-slab", "smallvec", "thread_local", @@ -4769,9 +4757,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85" +checksum = "dd69c5aa8f924c7519d6372789a74eac5b94fb0f8fcf0d4a97eb0bfc3e785f39" [[package]] name = "try-lock" @@ -4781,9 +4769,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.17.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" [[package]] name = "ucd-trie" @@ -4793,36 +4781,36 @@ checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "unicase" -version = "2.8.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-bidi" -version = "0.3.17" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" [[package]] name = "unicode-normalization" -version = "0.1.24" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" dependencies = [ "tinyvec", ] [[package]] name = "unicode-properties" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" +checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d" [[package]] name = "unicode-segmentation" @@ -4831,10 +4819,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] -name = "unicode_categories" -version = "0.1.1" +name = "unicode-xid" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" [[package]] name = "universal-hash" @@ -4852,12 +4840,6 @@ version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - [[package]] name = "untrusted" version = "0.9.0" @@ -4866,9 +4848,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.3" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d157f1b96d14500ffdc1f10ba712e780825526c03d9a49b4d0324b0d9113ada" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", @@ -4876,12 +4858,6 @@ dependencies = [ "serde", ] -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - [[package]] name = "utf8_iter" version = "1.0.4" @@ -4896,19 +4872,21 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.11.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" +checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a" dependencies = [ - "getrandom 0.2.15", - "serde", + "getrandom 0.3.4", + "js-sys", + "serde_core", + "wasm-bindgen", ] [[package]] name = "valuable" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "vcpkg" @@ -4955,15 +4933,18 @@ checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" [[package]] name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +name = "wasip2" +version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] [[package]] name = "wasite" @@ -4973,47 +4954,35 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.95" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" +checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" dependencies = [ "cfg-if", "once_cell", + "rustversion", "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.95" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" -dependencies = [ - "bumpalo", - "log", - "once_cell", - "proc-macro2", - "quote", - "syn 2.0.87", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.45" +version = "0.4.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" +checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c" dependencies = [ "cfg-if", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.95" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" +checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5021,61 +4990,94 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.95" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" +checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" dependencies = [ + "bumpalo", "proc-macro2", "quote", - "syn 2.0.87", - "wasm-bindgen-backend", + "syn 2.0.111", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.95" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" +checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-bindgen-test" +version = "0.3.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25e90e66d265d3a1efc0e72a54809ab90b9c0c515915c67cdf658689d2c22c6c" +dependencies = [ + "async-trait", + "cast", + "js-sys", + "libm", + "minicov", + "nu-ansi-term", + "num-traits", + "oorandom", + "serde", + "serde_json", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-bindgen-test-macro", +] + +[[package]] +name = "wasm-bindgen-test-macro" +version = "0.3.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7150335716dce6028bead2b848e72f47b45e7b9422f64cccdc23bedca89affc1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.111", +] [[package]] name = "web-sys" -version = "0.3.72" +version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" +checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" dependencies = [ "js-sys", "wasm-bindgen", ] [[package]] -name = "webpki" -version = "0.22.4" +name = "webpki-roots" +version = "0.26.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", + "webpki-roots 1.0.4", ] [[package]] name = "webpki-roots" -version = "0.22.6" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +checksum = "b2878ef029c47c6e8cf779119f20fcf52bde7ad42a731b2a304bc221df17571e" dependencies = [ - "webpki", + "rustls-pki-types", ] [[package]] name = "whoami" -version = "1.5.2" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" dependencies = [ - "redox_syscall 0.5.7", + "libredox", "wasite", - "web-sys", ] [[package]] @@ -5096,11 +5098,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -5111,11 +5113,61 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-core" -version = "0.52.0" +version = "0.62.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ - "windows-targets 0.52.6", + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.111", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.111", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", ] [[package]] @@ -5138,11 +5190,20 @@ dependencies = [ [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows-targets 0.52.6", + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", ] [[package]] @@ -5169,13 +5230,30 @@ dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", + "windows_i686_gnullvm 0.52.6", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -5188,6 +5266,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + [[package]] name = "windows_aarch64_msvc" version = "0.48.5" @@ -5200,6 +5284,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + [[package]] name = "windows_i686_gnu" version = "0.48.5" @@ -5212,12 +5302,24 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + [[package]] name = "windows_i686_msvc" version = "0.48.5" @@ -5230,6 +5332,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + [[package]] name = "windows_x86_64_gnu" version = "0.48.5" @@ -5242,6 +5350,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" @@ -5254,6 +5368,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + [[package]] name = "windows_x86_64_msvc" version = "0.48.5" @@ -5266,6 +5386,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + [[package]] name = "winreg" version = "0.50.0" @@ -5299,16 +5425,16 @@ dependencies = [ ] [[package]] -name = "write16" -version = "1.0.0" +name = "wit-bindgen" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "writeable" -version = "0.5.5" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" [[package]] name = "x509-cert" @@ -5323,9 +5449,9 @@ dependencies = [ [[package]] name = "x509-parser" -version = "0.16.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcbc162f30700d6f3f82a24bf7cc62ffe7caea42c0b2cba8bf7f3ae50cf51f69" +checksum = "4569f339c0c402346d4a75a9e39cf8dad310e287eef1ff56d4c68e5067f53460" dependencies = [ "asn1-rs", "data-encoding", @@ -5334,8 +5460,8 @@ dependencies = [ "nom", "oid-registry", "rusticata-macros", - "thiserror", - "time 0.3.36", + "thiserror 2.0.17", + "time", ] [[package]] @@ -5349,11 +5475,10 @@ dependencies = [ [[package]] name = "yoke" -version = "0.7.4" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" dependencies = [ - "serde", "stable_deref_trait", "yoke-derive", "zerofrom", @@ -5361,69 +5486,79 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.7.4" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", "synstructure", ] [[package]] name = "zerocopy" -version = "0.7.35" +version = "0.8.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3" dependencies = [ - "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.35" +version = "0.8.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] [[package]] name = "zerofrom" -version = "0.1.4" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.4" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", "synstructure", ] [[package]] name = "zeroize" -version = "1.8.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] [[package]] name = "zerovec" -version = "0.10.4" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" dependencies = [ "yoke", "zerofrom", @@ -5432,38 +5567,44 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.10.3" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.111", ] +[[package]] +name = "zmij" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0095ecd462946aa3927d9297b63ef82fb9a5316d7a37d134eeb36e58228615a" + [[package]] name = "zstd" -version = "0.13.2" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcf2b778a664581e31e389454a7072dab1647606d44f7feea22cd5abb9c9f3f9" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "7.2.1" +version = "7.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a3ab4db68cea366acc5c897c7b4d4d1b8994a9cd6e6f841f8964566a419059" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.13+zstd.1.5.6" +version = "2.0.16+zstd.1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" +checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index daebfa9..3fe5eda 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,7 +20,7 @@ required-features = ["explain"] [dependencies] actix-web = "4.3.1" -chrono = { version = "0.4.29", features = ["time", "serde"] } +chrono = { version = "0.4.29", features = ["serde"] } config = "0.13.4" reqwest = { version = "0.11.23", features = ["json", "blocking"] } serde = { version = "1.0.195", features = ["derive"] } @@ -67,14 +67,11 @@ redis = { version = "0.27.5", features = ["tokio-comp"] } [dependencies.sqlx] version = "0.8.1" features = [ - 'runtime-actix-rustls', + "runtime-tokio-rustls", "postgres", "uuid", - "tls", "chrono", "json", - "ipnetwork", - "offline", "macros" ] From 624b2632f4665aa3c659cea0f7f2efe72bb194a8 Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 25 Dec 2025 21:27:11 +0200 Subject: [PATCH 02/35] create linux/macos binaries --- .github/workflows/rust.yml | 65 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 62 insertions(+), 3 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 739553d..c60f2cc 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -12,10 +12,69 @@ env: jobs: build: - runs-on: ubuntu-latest + strategy: + matrix: + include: + - os: ubuntu-latest + target: x86_64-unknown-linux-gnu + artifact_name: stacker-linux-x86_64 + - os: macos-latest + target: x86_64-apple-darwin + artifact_name: stacker-macos-x86_64 + - os: macos-latest + target: aarch64-apple-darwin + artifact_name: stacker-macos-aarch64 + runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 + + - name: Install Rust toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + target: ${{ matrix.target }} + override: true + + - name: Cache cargo registry + uses: actions/cache@v4 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo-registry- + + - name: Cache cargo index + uses: actions/cache@v4 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo-index- + + - name: Cache target directory + uses: actions/cache@v4 + with: + path: target + key: ${{ runner.os }}-target-${{ matrix.target }}-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-target-${{ matrix.target }}- + - name: cargo build - run: cargo build --verbose + run: cargo build --release --target ${{ matrix.target }} --verbose + - name: cargo test - run: cargo test --verbose + run: cargo test --target ${{ matrix.target }} --verbose + + - name: Prepare binaries + run: | + mkdir -p artifacts + cp target/${{ matrix.target }}/release/server artifacts/server + cp target/${{ matrix.target }}/release/console artifacts/console + tar -czf ${{ matrix.artifact_name }}.tar.gz -C artifacts . + + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.artifact_name }} + path: ${{ matrix.artifact_name }}.tar.gz + retention-days: 7 From 396768ef19ffd3de4ce0fbf34e6a84e5fa9ae606 Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 25 Dec 2025 21:41:20 +0200 Subject: [PATCH 03/35] downgrade sqlx --- Cargo.lock | 573 +++++++++++++++++++++++++++++++++-------------------- Cargo.toml | 9 +- 2 files changed, 363 insertions(+), 219 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4217d1f..0056afa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -195,7 +195,7 @@ dependencies = [ "serde_urlencoded", "smallvec", "socket2 0.6.1", - "time", + "time 0.3.44", "tracing", "url", ] @@ -356,6 +356,12 @@ dependencies = [ "url", ] +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + [[package]] name = "android_system_properties" version = "0.1.5" @@ -443,7 +449,7 @@ dependencies = [ "num-traits", "rusticata-macros", "thiserror 2.0.17", - "time", + "time 0.3.44", ] [[package]] @@ -628,6 +634,15 @@ dependencies = [ "syn 2.0.111", ] +[[package]] +name = "atoi" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7c57d12312ff59c811c0643f4d80830505833c9ffaebd193d819392b265be8e" +dependencies = [ + "num-traits", +] + [[package]] name = "atoi" version = "2.0.0" @@ -684,9 +699,6 @@ name = "bitflags" version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" -dependencies = [ - "serde_core", -] [[package]] name = "block-buffer" @@ -837,7 +849,7 @@ dependencies = [ "hashlink 0.9.1", "mini-moka", "once_cell", - "parking_lot", + "parking_lot 0.12.5", "petgraph", "regex", "rhai", @@ -886,16 +898,18 @@ checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "chrono" -version = "0.4.42" +version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" +checksum = "d87d9d13be47a5b7c3907137f1290b0459a7f80efb26be8c52afb11963bccb02" dependencies = [ + "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", + "time 0.1.45", "wasm-bindgen", - "windows-link", + "windows-targets 0.48.5", ] [[package]] @@ -936,7 +950,7 @@ version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" dependencies = [ - "heck", + "heck 0.5.0", "proc-macro2", "quote", "syn 2.0.111", @@ -1056,7 +1070,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" dependencies = [ "percent-encoding", - "time", + "time 0.3.44", "version_check", ] @@ -1210,7 +1224,7 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core", + "parking_lot_core 0.9.12", ] [[package]] @@ -1425,11 +1439,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", - "const-oid", "crypto-common", "subtle", ] +[[package]] +name = "dirs" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + [[package]] name = "displaydoc" version = "0.2.5" @@ -1460,7 +1493,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d6fdd6fa1c9e8e716f5f73406b868929f468702449621e7397066478b9bf89c" dependencies = [ "derive_builder 0.13.1", - "indexmap", + "indexmap 2.12.1", "serde", "serde_yaml", ] @@ -1702,6 +1735,17 @@ dependencies = [ "futures-util", ] +[[package]] +name = "futures-intrusive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot 0.11.2", +] + [[package]] name = "futures-intrusive" version = "0.5.0" @@ -1710,7 +1754,7 @@ checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" dependencies = [ "futures-core", "lock_api", - "parking_lot", + "parking_lot 0.12.5", ] [[package]] @@ -1878,7 +1922,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap", + "indexmap 2.12.1", "slab", "tokio", "tokio-util", @@ -1901,6 +1945,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash 0.8.12", + "allocator-api2", ] [[package]] @@ -1920,6 +1965,15 @@ version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +[[package]] +name = "hashlink" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +dependencies = [ + "hashbrown 0.14.5", +] + [[package]] name = "hashlink" version = "0.9.1" @@ -1938,6 +1992,15 @@ dependencies = [ "hashbrown 0.15.5", ] +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "heck" version = "0.5.0" @@ -2219,6 +2282,16 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2" +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + [[package]] name = "indexmap" version = "2.12.1" @@ -2273,6 +2346,12 @@ version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +[[package]] +name = "ipnetwork" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f84f1612606f3753f205a4e9a2efd6fe5b4c573a6269b2cc6c3003d44a0d127" + [[package]] name = "is-terminal" version = "0.4.17" @@ -2365,7 +2444,7 @@ dependencies = [ "flume", "futures-core", "futures-io", - "parking_lot", + "parking_lot 0.12.5", "pinky-swear", "reactor-trait", "serde", @@ -2379,9 +2458,6 @@ name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" -dependencies = [ - "spin 0.9.8", -] [[package]] name = "libc" @@ -2406,16 +2482,6 @@ dependencies = [ "redox_syscall 0.6.0", ] -[[package]] -name = "libsqlite3-sys" -version = "0.30.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" -dependencies = [ - "pkg-config", - "vcpkg", -] - [[package]] name = "linked-hash-map" version = "0.5.6" @@ -2617,22 +2683,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-bigint-dig" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" -dependencies = [ - "lazy_static", - "libm", - "num-integer", - "num-iter", - "num-traits", - "rand 0.8.5", - "smallvec", - "zeroize", -] - [[package]] name = "num-conv" version = "0.1.0" @@ -2648,17 +2698,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-iter" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - [[package]] name = "num-traits" version = "0.2.19" @@ -2797,6 +2836,17 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + [[package]] name = "parking_lot" version = "0.12.5" @@ -2804,7 +2854,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" dependencies = [ "lock_api", - "parking_lot_core", + "parking_lot_core 0.9.12", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", ] [[package]] @@ -2907,7 +2971,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap", + "indexmap 2.12.1", ] [[package]] @@ -2950,7 +3014,7 @@ checksum = "b1ea6e230dd3a64d61bcb8b79e597d3ab6b4c94ec7a234ce687dd718b4f2e657" dependencies = [ "doc-comment", "flume", - "parking_lot", + "parking_lot 0.12.5", "tracing", ] @@ -2965,17 +3029,6 @@ dependencies = [ "futures-io", ] -[[package]] -name = "pkcs1" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" -dependencies = [ - "der", - "pkcs8", - "spki", -] - [[package]] name = "pkcs12" version = "0.1.0" @@ -3006,16 +3059,6 @@ dependencies = [ "spki", ] -[[package]] -name = "pkcs8" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" -dependencies = [ - "der", - "spki", -] - [[package]] name = "pkg-config" version = "0.3.32" @@ -3297,6 +3340,15 @@ dependencies = [ "url", ] +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags 1.3.2", +] + [[package]] name = "redox_syscall" version = "0.5.18" @@ -3315,6 +3367,17 @@ dependencies = [ "bitflags 2.10.0", ] +[[package]] +name = "redox_users" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +dependencies = [ + "getrandom 0.2.16", + "libredox", + "thiserror 1.0.69", +] + [[package]] name = "regex" version = "1.12.2" @@ -3426,6 +3489,21 @@ dependencies = [ "syn 2.0.111", ] +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + [[package]] name = "ring" version = "0.17.14" @@ -3436,7 +3514,7 @@ dependencies = [ "cfg-if", "getrandom 0.2.16", "libc", - "untrusted", + "untrusted 0.9.0", "windows-sys 0.52.0", ] @@ -3451,26 +3529,6 @@ dependencies = [ "serde", ] -[[package]] -name = "rsa" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88" -dependencies = [ - "const-oid", - "digest", - "num-bigint-dig", - "num-integer", - "num-traits", - "pkcs1", - "pkcs8", - "rand_core 0.6.4", - "signature", - "spki", - "subtle", - "zeroize", -] - [[package]] name = "rust-ini" version = "0.18.0" @@ -3526,6 +3584,18 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + [[package]] name = "rustls" version = "0.23.35" @@ -3533,7 +3603,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" dependencies = [ "once_cell", - "ring", + "ring 0.17.14", "rustls-pki-types", "rustls-webpki", "subtle", @@ -3547,7 +3617,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70cc376c6ba1823ae229bacf8ad93c136d93524eab0e4e5e0e4f96b9c4e5b212" dependencies = [ "log", - "rustls", + "rustls 0.23.35", "rustls-native-certs", "rustls-pki-types", "rustls-webpki", @@ -3599,9 +3669,9 @@ version = "0.103.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" dependencies = [ - "ring", + "ring 0.17.14", "rustls-pki-types", - "untrusted", + "untrusted 0.9.0", ] [[package]] @@ -3660,6 +3730,16 @@ dependencies = [ "sha2", ] +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.14", + "untrusted 0.9.0", +] + [[package]] name = "security-framework" version = "2.11.1" @@ -3776,7 +3856,7 @@ version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70c0e00fab6460447391a1981c21341746bc2d0178a7c46a3bbf667f450ac6e4" dependencies = [ - "indexmap", + "indexmap 2.12.1", "itertools 0.12.1", "num-traits", "once_cell", @@ -3820,7 +3900,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap", + "indexmap 2.12.1", "itoa", "ryu", "serde", @@ -3880,16 +3960,6 @@ dependencies = [ "libc", ] -[[package]] -name = "signature" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" -dependencies = [ - "digest", - "rand_core 0.6.4", -] - [[package]] name = "simd-adler32" version = "0.3.8" @@ -3952,7 +4022,7 @@ dependencies = [ "slog", "term", "thread_local", - "time", + "time 0.3.44", ] [[package]] @@ -4031,17 +4101,35 @@ dependencies = [ "der", ] +[[package]] +name = "sqlformat" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bba3a93db0cc4f7bdece8bb09e77e2e785c20bfebf79eb8340ed80708048790" +dependencies = [ + "nom", + "unicode_categories", +] + +[[package]] +name = "sqlx" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8de3b03a925878ed54a954f621e64bf55a3c1bd29652d0d1a17830405350188" +dependencies = [ + "sqlx-core 0.6.3", + "sqlx-macros 0.6.3", +] + [[package]] name = "sqlx" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc" dependencies = [ - "sqlx-core", - "sqlx-macros", - "sqlx-mysql", + "sqlx-core 0.8.6", + "sqlx-macros 0.8.6", "sqlx-postgres", - "sqlx-sqlite", ] [[package]] @@ -4053,7 +4141,63 @@ dependencies = [ "async-trait", "casbin", "dotenvy", - "sqlx", + "sqlx 0.8.6", +] + +[[package]] +name = "sqlx-core" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029" +dependencies = [ + "ahash 0.7.8", + "atoi 1.0.0", + "base64 0.13.1", + "bitflags 1.3.2", + "byteorder", + "bytes", + "chrono", + "crc", + "crossbeam-queue", + "dirs", + "dotenvy", + "either", + "event-listener 2.5.3", + "futures-channel", + "futures-core", + "futures-intrusive 0.4.2", + "futures-util", + "hashlink 0.8.4", + "hex", + "hkdf", + "hmac", + "indexmap 1.9.3", + "ipnetwork", + "itoa", + "libc", + "log", + "md-5", + "memchr", + "once_cell", + "paste", + "percent-encoding", + "rand 0.8.5", + "rustls 0.20.9", + "rustls-pemfile 1.0.4", + "serde", + "serde_json", + "sha1", + "sha2", + "smallvec", + "sqlformat", + "sqlx-rt", + "stringprep", + "thiserror 1.0.69", + "tokio-stream", + "url", + "uuid", + "webpki-roots", + "whoami", ] [[package]] @@ -4064,24 +4208,22 @@ checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" dependencies = [ "base64 0.22.1", "bytes", - "chrono", "crc", "crossbeam-queue", "either", "event-listener 5.4.1", "futures-core", - "futures-intrusive", + "futures-intrusive 0.5.0", "futures-io", "futures-util", "hashbrown 0.15.5", "hashlink 0.10.0", - "indexmap", + "indexmap 2.12.1", "log", "memchr", "native-tls", "once_cell", "percent-encoding", - "rustls", "serde", "serde_json", "sha2", @@ -4091,8 +4233,28 @@ dependencies = [ "tokio-stream", "tracing", "url", - "uuid", - "webpki-roots 0.26.11", +] + +[[package]] +name = "sqlx-macros" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9966e64ae989e7e575b19d7265cb79d7fc3cbbdf179835cb0d716f294c2049c9" +dependencies = [ + "dotenvy", + "either", + "heck 0.4.1", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2", + "sqlx-core 0.6.3", + "sqlx-rt", + "syn 1.0.109", + "url", ] [[package]] @@ -4103,7 +4265,7 @@ checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d" dependencies = [ "proc-macro2", "quote", - "sqlx-core", + "sqlx-core 0.8.6", "sqlx-macros-core", "syn 2.0.111", ] @@ -4116,7 +4278,7 @@ checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b" dependencies = [ "dotenvy", "either", - "heck", + "heck 0.5.0", "hex", "once_cell", "proc-macro2", @@ -4124,70 +4286,23 @@ dependencies = [ "serde", "serde_json", "sha2", - "sqlx-core", - "sqlx-mysql", + "sqlx-core 0.8.6", "sqlx-postgres", - "sqlx-sqlite", "syn 2.0.111", "tokio", "url", ] -[[package]] -name = "sqlx-mysql" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" -dependencies = [ - "atoi", - "base64 0.22.1", - "bitflags 2.10.0", - "byteorder", - "bytes", - "chrono", - "crc", - "digest", - "dotenvy", - "either", - "futures-channel", - "futures-core", - "futures-io", - "futures-util", - "generic-array", - "hex", - "hkdf", - "hmac", - "itoa", - "log", - "md-5", - "memchr", - "once_cell", - "percent-encoding", - "rand 0.8.5", - "rsa", - "serde", - "sha1", - "sha2", - "smallvec", - "sqlx-core", - "stringprep", - "thiserror 2.0.17", - "tracing", - "uuid", - "whoami", -] - [[package]] name = "sqlx-postgres" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46" dependencies = [ - "atoi", + "atoi 2.0.0", "base64 0.22.1", "bitflags 2.10.0", "byteorder", - "chrono", "crc", "dotenvy", "etcetera", @@ -4208,38 +4323,22 @@ dependencies = [ "serde_json", "sha2", "smallvec", - "sqlx-core", + "sqlx-core 0.8.6", "stringprep", "thiserror 2.0.17", "tracing", - "uuid", "whoami", ] [[package]] -name = "sqlx-sqlite" -version = "0.8.6" +name = "sqlx-rt" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea" +checksum = "804d3f245f894e61b1e6263c84b23ca675d96753b5abfd5cc8597d86806e8024" dependencies = [ - "atoi", - "chrono", - "flume", - "futures-channel", - "futures-core", - "futures-executor", - "futures-intrusive", - "futures-util", - "libsqlite3-sys", - "log", - "percent-encoding", - "serde", - "serde_urlencoded", - "sqlx-core", - "thiserror 2.0.17", - "tracing", - "url", - "uuid", + "once_cell", + "tokio", + "tokio-rustls", ] [[package]] @@ -4272,7 +4371,7 @@ dependencies = [ "futures-util", "glob", "hmac", - "indexmap", + "indexmap 2.12.1", "lapin", "rand 0.8.5", "redis", @@ -4285,7 +4384,7 @@ dependencies = [ "serde_valid", "serde_yaml", "sha2", - "sqlx", + "sqlx 0.6.3", "sqlx-adapter", "thiserror 1.0.69", "tokio", @@ -4498,6 +4597,17 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "time" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + [[package]] name = "time" version = "0.3.44" @@ -4572,7 +4682,7 @@ dependencies = [ "bytes", "libc", "mio", - "parking_lot", + "parking_lot 0.12.5", "pin-project-lite", "signal-hook-registry", "socket2 0.6.1", @@ -4612,6 +4722,17 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls 0.20.9", + "tokio", + "webpki", +] + [[package]] name = "tokio-stream" version = "0.1.17" @@ -4698,7 +4819,7 @@ dependencies = [ "log", "serde", "serde_json", - "time", + "time 0.3.44", "tracing", "tracing-core", "tracing-log 0.1.4", @@ -4824,6 +4945,12 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" +[[package]] +name = "unicode_categories" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" + [[package]] name = "universal-hash" version = "0.5.1" @@ -4840,6 +4967,12 @@ version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + [[package]] name = "untrusted" version = "0.9.0" @@ -4931,6 +5064,12 @@ version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + [[package]] name = "wasi" version = "0.11.1+wasi-snapshot-preview1" @@ -5053,21 +5192,22 @@ dependencies = [ ] [[package]] -name = "webpki-roots" -version = "0.26.11" +name = "webpki" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" dependencies = [ - "webpki-roots 1.0.4", + "ring 0.17.14", + "untrusted 0.9.0", ] [[package]] name = "webpki-roots" -version = "1.0.4" +version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2878ef029c47c6e8cf779119f20fcf52bde7ad42a731b2a304bc221df17571e" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" dependencies = [ - "rustls-pki-types", + "webpki", ] [[package]] @@ -5078,6 +5218,7 @@ checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" dependencies = [ "libredox", "wasite", + "web-sys", ] [[package]] @@ -5461,7 +5602,7 @@ dependencies = [ "oid-registry", "rusticata-macros", "thiserror 2.0.17", - "time", + "time 0.3.44", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 3fe5eda..5159b15 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,7 +20,7 @@ required-features = ["explain"] [dependencies] actix-web = "4.3.1" -chrono = { version = "0.4.29", features = ["serde"] } +chrono = { version = "0.4.29", features = ["time", "serde"] } config = "0.13.4" reqwest = { version = "0.11.23", features = ["json", "blocking"] } serde = { version = "1.0.195", features = ["derive"] } @@ -65,13 +65,16 @@ base64 = "0.22.1" redis = { version = "0.27.5", features = ["tokio-comp"] } [dependencies.sqlx] -version = "0.8.1" +version = "0.6.3" features = [ - "runtime-tokio-rustls", + "runtime-actix-rustls", "postgres", "uuid", + "tls", "chrono", "json", + "ipnetwork", + "offline", "macros" ] From baf230331f676f11d3e9fea11b8310a34e6ff7a4 Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 25 Dec 2025 21:59:06 +0200 Subject: [PATCH 04/35] sqlx cache --- .github/workflows/rust.yml | 148 ++++++++++++++++++++++++------------- 1 file changed, 96 insertions(+), 52 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index c60f2cc..ddc8f65 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -11,7 +11,56 @@ env: SQLX_OFFLINE: true jobs: + prepare-sqlx-cache: + name: Prepare sqlx offline cache + runs-on: ubuntu-latest + services: + postgres: + image: postgres:16 + env: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: stacker + ports: + - 5432:5432 + options: >- + --health-cmd "pg_isready -U postgres" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + steps: + - uses: actions/checkout@v4 + - name: Install Rust toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + profile: minimal + override: true + - name: Install sqlx-cli + run: cargo install sqlx-cli --no-default-features --features postgres,rustls + - name: Set DATABASE_URL + run: echo "DATABASE_URL=postgres://postgres:postgres@localhost:5432/stacker" >> $GITHUB_ENV + - name: Wait for Postgres + run: | + for i in {1..10}; do + pg_isready -h localhost -U postgres && break + sleep 3 + done + - name: Create database (idempotent) + run: sqlx database create || true + - name: Run migrations + run: sqlx migrate run + - name: Generate sqlx offline cache + run: cargo sqlx prepare -- --workspace --all-targets + - name: Upload .sqlx cache + uses: actions/upload-artifact@v4 + with: + name: sqlx-cache + path: .sqlx + build: + name: Build binaries (Linux/macOS) + needs: prepare-sqlx-cache strategy: matrix: include: @@ -26,55 +75,50 @@ jobs: artifact_name: stacker-macos-aarch64 runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v4 - - - name: Install Rust toolchain - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - target: ${{ matrix.target }} - override: true - - - name: Cache cargo registry - uses: actions/cache@v4 - with: - path: ~/.cargo/registry - key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} - restore-keys: | - ${{ runner.os }}-cargo-registry- - - - name: Cache cargo index - uses: actions/cache@v4 - with: - path: ~/.cargo/git - key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} - restore-keys: | - ${{ runner.os }}-cargo-index- - - - name: Cache target directory - uses: actions/cache@v4 - with: - path: target - key: ${{ runner.os }}-target-${{ matrix.target }}-${{ hashFiles('**/Cargo.lock') }} - restore-keys: | - ${{ runner.os }}-target-${{ matrix.target }}- - - - name: cargo build - run: cargo build --release --target ${{ matrix.target }} --verbose - - - name: cargo test - run: cargo test --target ${{ matrix.target }} --verbose - - - name: Prepare binaries - run: | - mkdir -p artifacts - cp target/${{ matrix.target }}/release/server artifacts/server - cp target/${{ matrix.target }}/release/console artifacts/console - tar -czf ${{ matrix.artifact_name }}.tar.gz -C artifacts . - - - name: Upload artifacts - uses: actions/upload-artifact@v4 - with: - name: ${{ matrix.artifact_name }} - path: ${{ matrix.artifact_name }}.tar.gz - retention-days: 7 + - uses: actions/checkout@v4 + - name: Download sqlx cache + uses: actions/download-artifact@v4 + with: + name: sqlx-cache + path: .sqlx + - name: Install Rust toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + target: ${{ matrix.target }} + override: true + - name: Cache cargo registry + uses: actions/cache@v4 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo-registry- + - name: Cache cargo index + uses: actions/cache@v4 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo-index- + - name: Cache target directory + uses: actions/cache@v4 + with: + path: target + key: ${{ runner.os }}-target-${{ matrix.target }}-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-target-${{ matrix.target }}- + - name: Build (release) + run: cargo build --release --target ${{ matrix.target }} --verbose + - name: Prepare binaries + run: | + mkdir -p artifacts + cp target/${{ matrix.target }}/release/server artifacts/server + cp target/${{ matrix.target }}/release/console artifacts/console + tar -czf ${{ matrix.artifact_name }}.tar.gz -C artifacts . + - name: Upload binaries + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.artifact_name }} + path: ${{ matrix.artifact_name }}.tar.gz + retention-days: 7 From a06ffd115db568acf6dd655cc8ffd3de162902ab Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 25 Dec 2025 22:15:27 +0200 Subject: [PATCH 05/35] Disable SQLX_OFFLINE for prepare --- .github/workflows/rust.yml | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index ddc8f65..0435eb3 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -30,6 +30,8 @@ jobs: --health-retries 5 steps: - uses: actions/checkout@v4 + - name: Disable SQLX_OFFLINE for prepare + run: echo "SQLX_OFFLINE=false" >> $GITHUB_ENV - name: Install Rust toolchain uses: actions-rs/toolchain@v1 with: @@ -50,13 +52,13 @@ jobs: run: sqlx database create || true - name: Run migrations run: sqlx migrate run - - name: Generate sqlx offline cache - run: cargo sqlx prepare -- --workspace --all-targets - - name: Upload .sqlx cache + - name: Generate sqlx offline cache (sqlx 0.6) + run: cargo sqlx prepare + - name: Upload sqlx-data.json uses: actions/upload-artifact@v4 with: name: sqlx-cache - path: .sqlx + path: sqlx-data.json build: name: Build binaries (Linux/macOS) @@ -80,7 +82,9 @@ jobs: uses: actions/download-artifact@v4 with: name: sqlx-cache - path: .sqlx + path: . + - name: Ensure SQLX_OFFLINE enabled + run: echo "SQLX_OFFLINE=true" >> $GITHUB_ENV - name: Install Rust toolchain uses: actions-rs/toolchain@v1 with: From b47dad9da81a84fd5f43cd73aec11644da4e8810 Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 25 Dec 2025 22:30:02 +0200 Subject: [PATCH 06/35] sqlx-date.json generate --- .github/workflows/rust.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 0435eb3..e4aae15 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -8,12 +8,13 @@ on: env: CARGO_TERM_COLOR: always - SQLX_OFFLINE: true jobs: prepare-sqlx-cache: name: Prepare sqlx offline cache runs-on: ubuntu-latest + env: + SQLX_OFFLINE: false services: postgres: image: postgres:16 @@ -30,8 +31,6 @@ jobs: --health-retries 5 steps: - uses: actions/checkout@v4 - - name: Disable SQLX_OFFLINE for prepare - run: echo "SQLX_OFFLINE=false" >> $GITHUB_ENV - name: Install Rust toolchain uses: actions-rs/toolchain@v1 with: @@ -63,6 +62,8 @@ jobs: build: name: Build binaries (Linux/macOS) needs: prepare-sqlx-cache + env: + SQLX_OFFLINE: true strategy: matrix: include: @@ -83,8 +84,6 @@ jobs: with: name: sqlx-cache path: . - - name: Ensure SQLX_OFFLINE enabled - run: echo "SQLX_OFFLINE=true" >> $GITHUB_ENV - name: Install Rust toolchain uses: actions-rs/toolchain@v1 with: From 930fc11c0112871fc0e87616c1d07e52d4900722 Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 25 Dec 2025 22:38:49 +0200 Subject: [PATCH 07/35] sqlx-date.json check --- .github/workflows/rust.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index e4aae15..9aebff8 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -53,6 +53,10 @@ jobs: run: sqlx migrate run - name: Generate sqlx offline cache (sqlx 0.6) run: cargo sqlx prepare + - name: Verify sqlx-data.json was generated + run: | + ls -lh sqlx-data.json + head -50 sqlx-data.json - name: Upload sqlx-data.json uses: actions/upload-artifact@v4 with: @@ -84,6 +88,11 @@ jobs: with: name: sqlx-cache path: . + - name: Verify sqlx-data.json exists + run: | + ls -la sqlx-data.json || echo "File not found in current dir" + pwd + ls -la | head -20 - name: Install Rust toolchain uses: actions-rs/toolchain@v1 with: From b77855e08b1d07b26db7a93ff30bf35d73988d0b Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 25 Dec 2025 22:46:09 +0200 Subject: [PATCH 08/35] sqlx-date.json check --- .github/workflows/rust.yml | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 9aebff8..123c705 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -53,15 +53,15 @@ jobs: run: sqlx migrate run - name: Generate sqlx offline cache (sqlx 0.6) run: cargo sqlx prepare - - name: Verify sqlx-data.json was generated + - name: Verify .sqlx/ cache was generated run: | - ls -lh sqlx-data.json - head -50 sqlx-data.json - - name: Upload sqlx-data.json + ls -lh .sqlx/ || echo "No .sqlx directory found" + find .sqlx -type f | head -20 + - name: Upload .sqlx cache uses: actions/upload-artifact@v4 with: name: sqlx-cache - path: sqlx-data.json + path: .sqlx build: name: Build binaries (Linux/macOS) @@ -88,11 +88,10 @@ jobs: with: name: sqlx-cache path: . - - name: Verify sqlx-data.json exists + - name: Verify .sqlx/ cache exists run: | - ls -la sqlx-data.json || echo "File not found in current dir" - pwd - ls -la | head -20 + ls -lh .sqlx/ || echo ".sqlx directory not found" + find .sqlx -type f 2>/dev/null | wc -l - name: Install Rust toolchain uses: actions-rs/toolchain@v1 with: From 3a1063a2a89e79fce26b709ecf8159768314d5a3 Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 25 Dec 2025 22:52:46 +0200 Subject: [PATCH 09/35] sqlx-date.json check --- .github/workflows/rust.yml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 123c705..1aa5acb 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -51,17 +51,26 @@ jobs: run: sqlx database create || true - name: Run migrations run: sqlx migrate run + - name: Check project compiles first + run: cargo check --all-targets - name: Generate sqlx offline cache (sqlx 0.6) run: cargo sqlx prepare - name: Verify .sqlx/ cache was generated run: | - ls -lh .sqlx/ || echo "No .sqlx directory found" + if [ ! -d ".sqlx" ]; then + echo "ERROR: .sqlx directory was not created" + exit 1 + fi + echo ".sqlx directory contents:" + ls -lh .sqlx/ || echo "Directory empty or inaccessible" + echo "Query cache files:" find .sqlx -type f | head -20 - name: Upload .sqlx cache uses: actions/upload-artifact@v4 with: name: sqlx-cache path: .sqlx + if-no-files-found: error build: name: Build binaries (Linux/macOS) From 96239e3fce169a8ff39ff757180f227f0e412e46 Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 25 Dec 2025 23:02:25 +0200 Subject: [PATCH 10/35] sqlx-date.json check --- .github/workflows/rust.yml | 68 -------------------------------------- 1 file changed, 68 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 1aa5acb..f8d55dc 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -10,71 +10,8 @@ env: CARGO_TERM_COLOR: always jobs: - prepare-sqlx-cache: - name: Prepare sqlx offline cache - runs-on: ubuntu-latest - env: - SQLX_OFFLINE: false - services: - postgres: - image: postgres:16 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: stacker - ports: - - 5432:5432 - options: >- - --health-cmd "pg_isready -U postgres" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - steps: - - uses: actions/checkout@v4 - - name: Install Rust toolchain - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - profile: minimal - override: true - - name: Install sqlx-cli - run: cargo install sqlx-cli --no-default-features --features postgres,rustls - - name: Set DATABASE_URL - run: echo "DATABASE_URL=postgres://postgres:postgres@localhost:5432/stacker" >> $GITHUB_ENV - - name: Wait for Postgres - run: | - for i in {1..10}; do - pg_isready -h localhost -U postgres && break - sleep 3 - done - - name: Create database (idempotent) - run: sqlx database create || true - - name: Run migrations - run: sqlx migrate run - - name: Check project compiles first - run: cargo check --all-targets - - name: Generate sqlx offline cache (sqlx 0.6) - run: cargo sqlx prepare - - name: Verify .sqlx/ cache was generated - run: | - if [ ! -d ".sqlx" ]; then - echo "ERROR: .sqlx directory was not created" - exit 1 - fi - echo ".sqlx directory contents:" - ls -lh .sqlx/ || echo "Directory empty or inaccessible" - echo "Query cache files:" - find .sqlx -type f | head -20 - - name: Upload .sqlx cache - uses: actions/upload-artifact@v4 - with: - name: sqlx-cache - path: .sqlx - if-no-files-found: error - build: name: Build binaries (Linux/macOS) - needs: prepare-sqlx-cache env: SQLX_OFFLINE: true strategy: @@ -92,11 +29,6 @@ jobs: runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 - - name: Download sqlx cache - uses: actions/download-artifact@v4 - with: - name: sqlx-cache - path: . - name: Verify .sqlx/ cache exists run: | ls -lh .sqlx/ || echo ".sqlx directory not found" From 8086318c5bb800faa630f2640d7dbc900da73a6f Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 26 Dec 2025 11:48:10 +0200 Subject: [PATCH 11/35] =?UTF-8?q?sqlx=200.6=20=E2=86=92=200.8=20migration?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/docker.yml | 16 +- .github/workflows/rust.yml | 2 +- ...43010c534673240007b76da8b92288c5223e9.json | 104 ++++ ...012242345a8b4e4f9d838dc6d44cc34a89433.json | 46 ++ ...cd8dbfd785bb982a0622d3c05afb2ab3e260f.json | 76 +++ ...298f6d6f6f231554d80ed621076157af7f80a.json | 25 + ...bcfe5f968b31500e8c8cf97fe16814bc04164.json | 20 - ...69b6857e5f3c8f4292ba9c4491e062591575b.json | 28 + ...766573c91b2775a086c65bc9a5fdc91300bb0.json | 17 - ...36247a328db780a48da47c9402e1d3ebd80c9.json | 12 - ...44610fb79a1b9330730c65953f0c1b88c2a53.json | 20 - ...2077a054026cb2bc0c010aba218506e76110f.json | 64 ++ ...74e0c9173f355d69459333acf181ff2a82a1c.json | 15 + ...07431de81f886f6a8d6e0fbcd7b6633d30b98.json | 100 +++ ...30a215779928a041ef51e93383e93288aac2.json} | 38 +- ...10bc38e48635c4df0c73c211d345a26cccf4e.json | 46 ++ ...339d172624d59fff7494f1929c8fe37f564a4.json | 34 ++ ...d77692bd1a336be4d06ff6e0ac6831164617e.json | 64 ++ ...b93cf4838bd1e7e668dafd0fffbd13c90d5aa.json | 14 + ...806b4c78b7aa2a9609c4eccb941c7dff7b107.json | 12 - ...7cb75a999041a3eb6a8f8177bebfa3c30d56f.json | 16 - ...d8c578770e2d52bf531de6e69561a4adbb21c.json | 94 +++ ...094044e237999123952be7c78b46c937b8778.json | 100 +++ ...b89853785c32a5f83cb0b25609329c760428a.json | 19 - ...bf3192c3108a2776bb56f36787af3fa884554.json | 14 + ...8915ab4494cbd7058fdec868ab93c0fcfb4d8.json | 17 + ...423869bd7b79dd5b246d80f0b6f39ce4659dc.json | 64 ++ ...89ccf3035f08340bf80a345ff74570cd62043.json | 103 ++++ ...be7a3759a98b5f1c637eb632aa440a1ffadb6.json | 85 +++ ...7bb2395caa02475163facde831cc9ada1ff30.json | 31 + ...44df13c46ef2eb373398a535090edf738cb5a.json | 76 +++ ...c48ab4946535a96baf0f49996d79387a3791c.json | 94 +++ ...2fd0382be589bf5d6dcde690b63f281160159.json | 15 + ...fe27d2ee90aa4598b17d90e5db82244ad6ff1.json | 14 + ...47fbcd0626347744c7f8de6dce25d6e9a1fe7.json | 46 ++ ...7480579468a5cb4ecdf7b315920b5e0bd894c.json | 106 ++++ ...53b4d76ec4c4dea338877ef5ba72fa49c28ad.json | 22 + ...b82a392e59683b9dfa1c457974e8fa8b7d00f.json | 22 + ...7ba89da5a49c211c8627c314b8a32c92a62e1.json | 94 +++ ...6790f3e5971d7a2bff2d32f2d92590ec3393d.json | 87 +++ ...e1dfcb0e7769f0333d2acf7d6e0fb97ca12dc.json | 27 + ...756595265b21dd6f7a06a2f7a846d162b340c.json | 100 +++ ...dc00c95626c94f0f02cbc69336836f95ec45e.json | 46 ++ ...ff7f21bafde8c7c1306cc7efc976a9eae0071.json | 25 + ...153f90eefabe5a252f86d5e8d1964785025c0.json | 16 + ...445dc1f4b2d659a3805f92f6f5f83b562266b.json | 70 +++ ...12f4794c1fc48b67d64c34c88fd9caf4508f5.json | 30 + ...39c1cc03348eb4b4fe698ad06283ba7072b7f.json | 113 ++++ ...7ea36f2a01b6b778fd61921e0046ad3f2efb2.json | 47 ++ ...77ce724f60cdb03492eef912a9fe89aee2ac4.json | 83 +++ ...5c23d56315ad817bea716d6a71c8b2bb18087.json | 44 ++ ...7a55dccaaeb0fe55d5eabb7319a90cbdfe951.json | 85 +++ ...b4d54ef603448c0c44272aec8f2ff04920b83.json | 69 +++ ...6310aea0a7dd4c8fc5f7c2cff4dac327cf3f7.json | 23 + ...038846f0cb4440e4b377d495ffe0f0bfc11b6.json | 34 ++ ...89ea77781df5a251a6731b42f8ddefb8a4c8b.json | 100 +++ ...226ba97993ede9988a4c57d58bd066500a119.json | 20 - ...21e00c42a3fad8082cf15c2af88cd8388f41b.json | 18 - ...b37d46c5a2f4202e1b8dce1f66a65069beb0b.json | 15 - ...41f06835f8687122987d87fad751981b0c2b1.json | 101 +++ ...c1b90b67b053add3d4cffb8d579bfc8f08345.json | 75 --- ...865d0612bc0d3f620d5cba76a6b44a8812417.json | 48 ++ Cargo.lock | 576 +++++++----------- Cargo.toml | 10 +- src/db/agreement.rs | 36 +- src/db/cloud.rs | 33 +- src/db/project.rs | 44 +- src/db/server.rs | 33 +- 68 files changed, 3051 insertions(+), 742 deletions(-) create mode 100644 .sqlx/query-0a1da2fad9e02675e88f31a77fc43010c534673240007b76da8b92288c5223e9.json create mode 100644 .sqlx/query-0f9023a3cea267596e9f99b3887012242345a8b4e4f9d838dc6d44cc34a89433.json create mode 100644 .sqlx/query-172dbb0c3947fa99e8522510096cd8dbfd785bb982a0622d3c05afb2ab3e260f.json create mode 100644 .sqlx/query-17f59e9f273d48aaf85b09c227f298f6d6f6f231554d80ed621076157af7f80a.json delete mode 100644 .sqlx/query-1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164.json create mode 100644 .sqlx/query-1f1b8182d59d8253662da0ea73b69b6857e5f3c8f4292ba9c4491e062591575b.json delete mode 100644 .sqlx/query-1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0.json delete mode 100644 .sqlx/query-24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9.json delete mode 100644 .sqlx/query-2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53.json create mode 100644 .sqlx/query-2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f.json create mode 100644 .sqlx/query-309c79e9f4b28e19488e71ca49974e0c9173f355d69459333acf181ff2a82a1c.json create mode 100644 .sqlx/query-327394e1777395afda4a1f6c1ca07431de81f886f6a8d6e0fbcd7b6633d30b98.json rename .sqlx/{query-3022cb733970ae5836ab3891367b209a7e1f0974242ecd0f55e5b0098152bad5.json => query-32d118e607db4364979c52831e0c30a215779928a041ef51e93383e93288aac2.json} (54%) create mode 100644 .sqlx/query-36f6c8ba5c553e6c13d0041482910bc38e48635c4df0c73c211d345a26cccf4e.json create mode 100644 .sqlx/query-3b6ec5ef58cb3b234d8c8d45641339d172624d59fff7494f1929c8fe37f564a4.json create mode 100644 .sqlx/query-3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e.json create mode 100644 .sqlx/query-41edb5195e8e68b8c80c8412f5bb93cf4838bd1e7e668dafd0fffbd13c90d5aa.json delete mode 100644 .sqlx/query-438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107.json delete mode 100644 .sqlx/query-4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f.json create mode 100644 .sqlx/query-4bdcd8d475ffd8aab728ec2b9d0d8c578770e2d52bf531de6e69561a4adbb21c.json create mode 100644 .sqlx/query-4e375cca55b0f106578474e5736094044e237999123952be7c78b46c937b8778.json delete mode 100644 .sqlx/query-4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a.json create mode 100644 .sqlx/query-4f54a93856a693345a9f63552dabf3192c3108a2776bb56f36787af3fa884554.json create mode 100644 .sqlx/query-55e886a505d00b70674a19fd3228915ab4494cbd7058fdec868ab93c0fcfb4d8.json create mode 100644 .sqlx/query-5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc.json create mode 100644 .sqlx/query-6cdfab7ffca4a98abcd7fb2325289ccf3035f08340bf80a345ff74570cd62043.json create mode 100644 .sqlx/query-6e44fd63bcb2075e9515a7ce3d0be7a3759a98b5f1c637eb632aa440a1ffadb6.json create mode 100644 .sqlx/query-6ff761b4fa0b1ccc22722b481b37bb2395caa02475163facde831cc9ada1ff30.json create mode 100644 .sqlx/query-7b6c7e798237d0c08b7c1126d7044df13c46ef2eb373398a535090edf738cb5a.json create mode 100644 .sqlx/query-7c087b528df89eb0bf41a4e46bcc48ab4946535a96baf0f49996d79387a3791c.json create mode 100644 .sqlx/query-8038cec278228a04f83f4d67f8e2fd0382be589bf5d6dcde690b63f281160159.json create mode 100644 .sqlx/query-8218dc7f0a2d15d19391bdcde1dfe27d2ee90aa4598b17d90e5db82244ad6ff1.json create mode 100644 .sqlx/query-82eb411b1d8f6f3bed3db367ea147fbcd0626347744c7f8de6dce25d6e9a1fe7.json create mode 100644 .sqlx/query-836ec7786ee20369b6b49aa89587480579468a5cb4ecdf7b315920b5e0bd894c.json create mode 100644 .sqlx/query-83cd9d573480c8a83e9e58f375653b4d76ec4c4dea338877ef5ba72fa49c28ad.json create mode 100644 .sqlx/query-8aafae4565e572dc36aef3bb3d7b82a392e59683b9dfa1c457974e8fa8b7d00f.json create mode 100644 .sqlx/query-8cfb2d3a45ff6c5d1d51a98f6a37ba89da5a49c211c8627c314b8a32c92a62e1.json create mode 100644 .sqlx/query-8db13c16e29b4aecd87646859296790f3e5971d7a2bff2d32f2d92590ec3393d.json create mode 100644 .sqlx/query-8ec4c1e77a941efe4c1c36e26c5e1dfcb0e7769f0333d2acf7d6e0fb97ca12dc.json create mode 100644 .sqlx/query-91966b9578edeb2303bbba93cfc756595265b21dd6f7a06a2f7a846d162b340c.json create mode 100644 .sqlx/query-954605527a3ca7b9d6cbf1fbc03dc00c95626c94f0f02cbc69336836f95ec45e.json create mode 100644 .sqlx/query-9d821bd27d5202d2c3d49a2f148ff7f21bafde8c7c1306cc7efc976a9eae0071.json create mode 100644 .sqlx/query-9e4f216c828c7d53547c33da062153f90eefabe5a252f86d5e8d1964785025c0.json create mode 100644 .sqlx/query-b8296183bd28695d3a7574e57db445dc1f4b2d659a3805f92f6f5f83b562266b.json create mode 100644 .sqlx/query-b92417574329b82cae2347027db12f4794c1fc48b67d64c34c88fd9caf4508f5.json create mode 100644 .sqlx/query-bc798b1837501109ff69f44c01d39c1cc03348eb4b4fe698ad06283ba7072b7f.json create mode 100644 .sqlx/query-c28d645182680aaeaf265abcb687ea36f2a01b6b778fd61921e0046ad3f2efb2.json create mode 100644 .sqlx/query-c59246b73cf3c5a0fd961d2709477ce724f60cdb03492eef912a9fe89aee2ac4.json create mode 100644 .sqlx/query-cd6ddae34b29c15924e0ec26ea55c23d56315ad817bea716d6a71c8b2bb18087.json create mode 100644 .sqlx/query-cf85345c0c38d7ba1c347a9cf027a55dccaaeb0fe55d5eabb7319a90cbdfe951.json create mode 100644 .sqlx/query-db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83.json create mode 100644 .sqlx/query-dd36c2beb4867d36db9dc0fe47e6310aea0a7dd4c8fc5f7c2cff4dac327cf3f7.json create mode 100644 .sqlx/query-e5a60eb49da1cd42fc6c1bac36f038846f0cb4440e4b377d495ffe0f0bfc11b6.json create mode 100644 .sqlx/query-f0af06a2002ce933966cf6cfe8289ea77781df5a251a6731b42f8ddefb8a4c8b.json delete mode 100644 .sqlx/query-f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119.json delete mode 100644 .sqlx/query-f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b.json delete mode 100644 .sqlx/query-fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b.json create mode 100644 .sqlx/query-fb07f53c015c852c4ef9e0ce52541f06835f8687122987d87fad751981b0c2b1.json delete mode 100644 .sqlx/query-fb7ce69e70b345d2cf0ca017523c1b90b67b053add3d4cffb8d579bfc8f08345.json create mode 100644 .sqlx/query-ffb567ac44b9a0525bd41392c3a865d0612bc0d3f620d5cba76a6b44a8812417.json diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index da9b43c..bd57cde 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -9,18 +9,22 @@ on: branches: - main -env: - SQLX_OFFLINE: true - jobs: cicd-docker: name: Cargo and npm build runs-on: ubuntu-latest + env: + SQLX_OFFLINE: true steps: - name: Checkout sources uses: actions/checkout@v4 + - name: Verify .sqlx cache exists + run: | + ls -lh .sqlx/ || echo ".sqlx directory not found" + find .sqlx -type f 2>/dev/null | wc -l + - name: Install stable toolchain uses: actions-rs/toolchain@v1 with: @@ -65,12 +69,6 @@ jobs: with: command: check - - name: Run cargo sqlx prepare - uses: actions-rs/cargo@v1 - with: - command: sqlx prepare - args: --release - - name: Cargo test if: ${{ always() }} uses: actions-rs/cargo@v1 diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index f8d55dc..5c9e960 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -29,7 +29,7 @@ jobs: runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 - - name: Verify .sqlx/ cache exists + - name: Verify .sqlx cache exists run: | ls -lh .sqlx/ || echo ".sqlx directory not found" find .sqlx -type f 2>/dev/null | wc -l diff --git a/.sqlx/query-0a1da2fad9e02675e88f31a77fc43010c534673240007b76da8b92288c5223e9.json b/.sqlx/query-0a1da2fad9e02675e88f31a77fc43010c534673240007b76da8b92288c5223e9.json new file mode 100644 index 0000000..f4f076b --- /dev/null +++ b/.sqlx/query-0a1da2fad9e02675e88f31a77fc43010c534673240007b76da8b92288c5223e9.json @@ -0,0 +1,104 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE server\n SET\n user_id=$2,\n project_id=$3,\n region=$4,\n zone=$5,\n server=$6,\n os=$7,\n disk_type=$8,\n updated_at=NOW() at time zone 'utc',\n srv_ip=$9,\n ssh_user=$10,\n ssh_port=$11\n WHERE id = $1\n RETURNING *\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "project_id", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "region", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "zone", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "server", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "os", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "disk_type", + "type_info": "Varchar" + }, + { + "ordinal": 8, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "srv_ip", + "type_info": "Varchar" + }, + { + "ordinal": 11, + "name": "ssh_user", + "type_info": "Varchar" + }, + { + "ordinal": 12, + "name": "ssh_port", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int4", + "Varchar", + "Int4", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + true, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "0a1da2fad9e02675e88f31a77fc43010c534673240007b76da8b92288c5223e9" +} diff --git a/.sqlx/query-0f9023a3cea267596e9f99b3887012242345a8b4e4f9d838dc6d44cc34a89433.json b/.sqlx/query-0f9023a3cea267596e9f99b3887012242345a8b4e4f9d838dc6d44cc34a89433.json new file mode 100644 index 0000000..a4c80ab --- /dev/null +++ b/.sqlx/query-0f9023a3cea267596e9f99b3887012242345a8b4e4f9d838dc6d44cc34a89433.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM agreement\n WHERE id=$1\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "name", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "text", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "0f9023a3cea267596e9f99b3887012242345a8b4e4f9d838dc6d44cc34a89433" +} diff --git a/.sqlx/query-172dbb0c3947fa99e8522510096cd8dbfd785bb982a0622d3c05afb2ab3e260f.json b/.sqlx/query-172dbb0c3947fa99e8522510096cd8dbfd785bb982a0622d3c05afb2ab3e260f.json new file mode 100644 index 0000000..963dd77 --- /dev/null +++ b/.sqlx/query-172dbb0c3947fa99e8522510096cd8dbfd785bb982a0622d3c05afb2ab3e260f.json @@ -0,0 +1,76 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT id, project_id, deployment_hash, user_id, deleted, status, metadata,\n last_seen_at, created_at, updated_at\n FROM deployment\n WHERE id=$1\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "project_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "deployment_hash", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "deleted", + "type_info": "Bool" + }, + { + "ordinal": 5, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "metadata", + "type_info": "Json" + }, + { + "ordinal": 7, + "name": "last_seen_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + false, + false, + true, + false, + false + ] + }, + "hash": "172dbb0c3947fa99e8522510096cd8dbfd785bb982a0622d3c05afb2ab3e260f" +} diff --git a/.sqlx/query-17f59e9f273d48aaf85b09c227f298f6d6f6f231554d80ed621076157af7f80a.json b/.sqlx/query-17f59e9f273d48aaf85b09c227f298f6d6f6f231554d80ed621076157af7f80a.json new file mode 100644 index 0000000..c0f6288 --- /dev/null +++ b/.sqlx/query-17f59e9f273d48aaf85b09c227f298f6d6f6f231554d80ed621076157af7f80a.json @@ -0,0 +1,25 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO agreement (name, text, created_at, updated_at)\n VALUES ($1, $2, $3, $4)\n RETURNING id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Varchar", + "Text", + "Timestamptz", + "Timestamptz" + ] + }, + "nullable": [ + false + ] + }, + "hash": "17f59e9f273d48aaf85b09c227f298f6d6f6f231554d80ed621076157af7f80a" +} diff --git a/.sqlx/query-1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164.json b/.sqlx/query-1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164.json deleted file mode 100644 index eb3a84f..0000000 --- a/.sqlx/query-1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO casbin_rule ( ptype, v0, v1, v2, v3, v4, v5 )\n VALUES ( $1, $2, $3, $4, $5, $6, $7 )", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164" -} diff --git a/.sqlx/query-1f1b8182d59d8253662da0ea73b69b6857e5f3c8f4292ba9c4491e062591575b.json b/.sqlx/query-1f1b8182d59d8253662da0ea73b69b6857e5f3c8f4292ba9c4491e062591575b.json new file mode 100644 index 0000000..4fe673b --- /dev/null +++ b/.sqlx/query-1f1b8182d59d8253662da0ea73b69b6857e5f3c8f4292ba9c4491e062591575b.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO project (stack_id, user_id, name, metadata, created_at, updated_at, request_json)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n RETURNING id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Varchar", + "Text", + "Json", + "Timestamptz", + "Timestamptz", + "Json" + ] + }, + "nullable": [ + false + ] + }, + "hash": "1f1b8182d59d8253662da0ea73b69b6857e5f3c8f4292ba9c4491e062591575b" +} diff --git a/.sqlx/query-1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0.json b/.sqlx/query-1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0.json deleted file mode 100644 index 1ea12e3..0000000 --- a/.sqlx/query-1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v3 is NULL OR v3 = COALESCE($2,v3)) AND\n (v4 is NULL OR v4 = COALESCE($3,v4)) AND\n (v5 is NULL OR v5 = COALESCE($4,v5))", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Varchar", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0" -} diff --git a/.sqlx/query-24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9.json b/.sqlx/query-24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9.json deleted file mode 100644 index 8046c5d..0000000 --- a/.sqlx/query-24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule", - "describe": { - "columns": [], - "parameters": { - "Left": [] - }, - "nullable": [] - }, - "hash": "24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9" -} diff --git a/.sqlx/query-2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53.json b/.sqlx/query-2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53.json deleted file mode 100644 index e246e53..0000000 --- a/.sqlx/query-2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n v0 = $2 AND\n v1 = $3 AND\n v2 = $4 AND\n v3 = $5 AND\n v4 = $6 AND\n v5 = $7", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text" - ] - }, - "nullable": [] - }, - "hash": "2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53" -} diff --git a/.sqlx/query-2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f.json b/.sqlx/query-2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f.json new file mode 100644 index 0000000..3524e58 --- /dev/null +++ b/.sqlx/query-2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f.json @@ -0,0 +1,64 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM project\n WHERE user_id=$1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "stack_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "metadata", + "type_info": "Json" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "request_json", + "type_info": "Json" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false + ] + }, + "hash": "2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f" +} diff --git a/.sqlx/query-309c79e9f4b28e19488e71ca49974e0c9173f355d69459333acf181ff2a82a1c.json b/.sqlx/query-309c79e9f4b28e19488e71ca49974e0c9173f355d69459333acf181ff2a82a1c.json new file mode 100644 index 0000000..1e22508 --- /dev/null +++ b/.sqlx/query-309c79e9f4b28e19488e71ca49974e0c9173f355d69459333acf181ff2a82a1c.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE agents \n SET last_heartbeat = NOW(), status = $2, updated_at = NOW()\n WHERE id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "309c79e9f4b28e19488e71ca49974e0c9173f355d69459333acf181ff2a82a1c" +} diff --git a/.sqlx/query-327394e1777395afda4a1f6c1ca07431de81f886f6a8d6e0fbcd7b6633d30b98.json b/.sqlx/query-327394e1777395afda4a1f6c1ca07431de81f886f6a8d6e0fbcd7b6633d30b98.json new file mode 100644 index 0000000..4916207 --- /dev/null +++ b/.sqlx/query-327394e1777395afda4a1f6c1ca07431de81f886f6a8d6e0fbcd7b6633d30b98.json @@ -0,0 +1,100 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n FROM commands\n WHERE deployment_hash = $1\n ORDER BY created_at DESC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "command_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "deployment_hash", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "type", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "priority", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "parameters", + "type_info": "Jsonb" + }, + { + "ordinal": 7, + "name": "result", + "type_info": "Jsonb" + }, + { + "ordinal": 8, + "name": "error", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "created_by", + "type_info": "Varchar" + }, + { + "ordinal": 10, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 11, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 12, + "name": "timeout_seconds", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "metadata", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + true, + false, + false, + false, + true, + true + ] + }, + "hash": "327394e1777395afda4a1f6c1ca07431de81f886f6a8d6e0fbcd7b6633d30b98" +} diff --git a/.sqlx/query-3022cb733970ae5836ab3891367b209a7e1f0974242ecd0f55e5b0098152bad5.json b/.sqlx/query-32d118e607db4364979c52831e0c30a215779928a041ef51e93383e93288aac2.json similarity index 54% rename from .sqlx/query-3022cb733970ae5836ab3891367b209a7e1f0974242ecd0f55e5b0098152bad5.json rename to .sqlx/query-32d118e607db4364979c52831e0c30a215779928a041ef51e93383e93288aac2.json index 4d06843..e23eb43 100644 --- a/.sqlx/query-3022cb733970ae5836ab3891367b209a7e1f0974242ecd0f55e5b0098152bad5.json +++ b/.sqlx/query-32d118e607db4364979c52831e0c30a215779928a041ef51e93383e93288aac2.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT * FROM casbin_rule", + "query": "SELECT * FROM cloud WHERE id=$1 LIMIT 1 ", "describe": { "columns": [ { @@ -10,53 +10,61 @@ }, { "ordinal": 1, - "name": "ptype", + "name": "user_id", "type_info": "Varchar" }, { "ordinal": 2, - "name": "v0", + "name": "provider", "type_info": "Varchar" }, { "ordinal": 3, - "name": "v1", + "name": "cloud_token", "type_info": "Varchar" }, { "ordinal": 4, - "name": "v2", + "name": "cloud_key", "type_info": "Varchar" }, { "ordinal": 5, - "name": "v3", + "name": "cloud_secret", "type_info": "Varchar" }, { "ordinal": 6, - "name": "v4", - "type_info": "Varchar" + "name": "save_token", + "type_info": "Bool" }, { "ordinal": 7, - "name": "v5", - "type_info": "Varchar" + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "updated_at", + "type_info": "Timestamptz" } ], "parameters": { - "Left": [] + "Left": [ + "Int4" + ] }, "nullable": [ false, false, false, - false, - false, - false, + true, + true, + true, + true, false, false ] }, - "hash": "3022cb733970ae5836ab3891367b209a7e1f0974242ecd0f55e5b0098152bad5" + "hash": "32d118e607db4364979c52831e0c30a215779928a041ef51e93383e93288aac2" } diff --git a/.sqlx/query-36f6c8ba5c553e6c13d0041482910bc38e48635c4df0c73c211d345a26cccf4e.json b/.sqlx/query-36f6c8ba5c553e6c13d0041482910bc38e48635c4df0c73c211d345a26cccf4e.json new file mode 100644 index 0000000..fbcc830 --- /dev/null +++ b/.sqlx/query-36f6c8ba5c553e6c13d0041482910bc38e48635c4df0c73c211d345a26cccf4e.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM agreement\n WHERE name=$1\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "name", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "text", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "36f6c8ba5c553e6c13d0041482910bc38e48635c4df0c73c211d345a26cccf4e" +} diff --git a/.sqlx/query-3b6ec5ef58cb3b234d8c8d45641339d172624d59fff7494f1929c8fe37f564a4.json b/.sqlx/query-3b6ec5ef58cb3b234d8c8d45641339d172624d59fff7494f1929c8fe37f564a4.json new file mode 100644 index 0000000..bbcd341 --- /dev/null +++ b/.sqlx/query-3b6ec5ef58cb3b234d8c8d45641339d172624d59fff7494f1929c8fe37f564a4.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n id,\n user_id,\n secret \n FROM client c\n WHERE c.id = $1\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "secret", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + false, + false, + true + ] + }, + "hash": "3b6ec5ef58cb3b234d8c8d45641339d172624d59fff7494f1929c8fe37f564a4" +} diff --git a/.sqlx/query-3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e.json b/.sqlx/query-3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e.json new file mode 100644 index 0000000..5c8c7ac --- /dev/null +++ b/.sqlx/query-3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e.json @@ -0,0 +1,64 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM project\n WHERE name=$1\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "stack_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "metadata", + "type_info": "Json" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "request_json", + "type_info": "Json" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false + ] + }, + "hash": "3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e" +} diff --git a/.sqlx/query-41edb5195e8e68b8c80c8412f5bb93cf4838bd1e7e668dafd0fffbd13c90d5aa.json b/.sqlx/query-41edb5195e8e68b8c80c8412f5bb93cf4838bd1e7e668dafd0fffbd13c90d5aa.json new file mode 100644 index 0000000..6af6017 --- /dev/null +++ b/.sqlx/query-41edb5195e8e68b8c80c8412f5bb93cf4838bd1e7e668dafd0fffbd13c90d5aa.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM command_queue\n WHERE command_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [] + }, + "hash": "41edb5195e8e68b8c80c8412f5bb93cf4838bd1e7e668dafd0fffbd13c90d5aa" +} diff --git a/.sqlx/query-438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107.json b/.sqlx/query-438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107.json deleted file mode 100644 index 75c6da3..0000000 --- a/.sqlx/query-438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "CREATE TABLE IF NOT EXISTS casbin_rule (\n id SERIAL PRIMARY KEY,\n ptype VARCHAR NOT NULL,\n v0 VARCHAR NOT NULL,\n v1 VARCHAR NOT NULL,\n v2 VARCHAR NOT NULL,\n v3 VARCHAR NOT NULL,\n v4 VARCHAR NOT NULL,\n v5 VARCHAR NOT NULL,\n CONSTRAINT unique_key_sqlx_adapter UNIQUE(ptype, v0, v1, v2, v3, v4, v5)\n );\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [] - }, - "nullable": [] - }, - "hash": "438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107" -} diff --git a/.sqlx/query-4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f.json b/.sqlx/query-4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f.json deleted file mode 100644 index ce229dc..0000000 --- a/.sqlx/query-4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v4 is NULL OR v4 = COALESCE($2,v4)) AND\n (v5 is NULL OR v5 = COALESCE($3,v5))", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f" -} diff --git a/.sqlx/query-4bdcd8d475ffd8aab728ec2b9d0d8c578770e2d52bf531de6e69561a4adbb21c.json b/.sqlx/query-4bdcd8d475ffd8aab728ec2b9d0d8c578770e2d52bf531de6e69561a4adbb21c.json new file mode 100644 index 0000000..35db09e --- /dev/null +++ b/.sqlx/query-4bdcd8d475ffd8aab728ec2b9d0d8c578770e2d52bf531de6e69561a4adbb21c.json @@ -0,0 +1,94 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM server\n WHERE user_id=$1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "project_id", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "region", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "zone", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "server", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "os", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "disk_type", + "type_info": "Varchar" + }, + { + "ordinal": 8, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "srv_ip", + "type_info": "Varchar" + }, + { + "ordinal": 11, + "name": "ssh_user", + "type_info": "Varchar" + }, + { + "ordinal": 12, + "name": "ssh_port", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + true, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "4bdcd8d475ffd8aab728ec2b9d0d8c578770e2d52bf531de6e69561a4adbb21c" +} diff --git a/.sqlx/query-4e375cca55b0f106578474e5736094044e237999123952be7c78b46c937b8778.json b/.sqlx/query-4e375cca55b0f106578474e5736094044e237999123952be7c78b46c937b8778.json new file mode 100644 index 0000000..09cd0c0 --- /dev/null +++ b/.sqlx/query-4e375cca55b0f106578474e5736094044e237999123952be7c78b46c937b8778.json @@ -0,0 +1,100 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE commands\n SET status = 'cancelled', updated_at = NOW()\n WHERE command_id = $1\n RETURNING id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "command_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "deployment_hash", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "type", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "priority", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "parameters", + "type_info": "Jsonb" + }, + { + "ordinal": 7, + "name": "result", + "type_info": "Jsonb" + }, + { + "ordinal": 8, + "name": "error", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "created_by", + "type_info": "Varchar" + }, + { + "ordinal": 10, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 11, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 12, + "name": "timeout_seconds", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "metadata", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + true, + false, + false, + false, + true, + true + ] + }, + "hash": "4e375cca55b0f106578474e5736094044e237999123952be7c78b46c937b8778" +} diff --git a/.sqlx/query-4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a.json b/.sqlx/query-4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a.json deleted file mode 100644 index 4c4c1df..0000000 --- a/.sqlx/query-4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v1 is NULL OR v1 = COALESCE($2,v1)) AND\n (v2 is NULL OR v2 = COALESCE($3,v2)) AND\n (v3 is NULL OR v3 = COALESCE($4,v3)) AND\n (v4 is NULL OR v4 = COALESCE($5,v4)) AND\n (v5 is NULL OR v5 = COALESCE($6,v5))", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a" -} diff --git a/.sqlx/query-4f54a93856a693345a9f63552dabf3192c3108a2776bb56f36787af3fa884554.json b/.sqlx/query-4f54a93856a693345a9f63552dabf3192c3108a2776bb56f36787af3fa884554.json new file mode 100644 index 0000000..f76fff6 --- /dev/null +++ b/.sqlx/query-4f54a93856a693345a9f63552dabf3192c3108a2776bb56f36787af3fa884554.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM agents WHERE id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "4f54a93856a693345a9f63552dabf3192c3108a2776bb56f36787af3fa884554" +} diff --git a/.sqlx/query-55e886a505d00b70674a19fd3228915ab4494cbd7058fdec868ab93c0fcfb4d8.json b/.sqlx/query-55e886a505d00b70674a19fd3228915ab4494cbd7058fdec868ab93c0fcfb4d8.json new file mode 100644 index 0000000..bd0e16f --- /dev/null +++ b/.sqlx/query-55e886a505d00b70674a19fd3228915ab4494cbd7058fdec868ab93c0fcfb4d8.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE rating\n SET \n comment=$1,\n rate=$2,\n hidden=$3,\n updated_at=NOW() at time zone 'utc'\n WHERE id = $4\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int4", + "Bool", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "55e886a505d00b70674a19fd3228915ab4494cbd7058fdec868ab93c0fcfb4d8" +} diff --git a/.sqlx/query-5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc.json b/.sqlx/query-5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc.json new file mode 100644 index 0000000..6c81374 --- /dev/null +++ b/.sqlx/query-5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc.json @@ -0,0 +1,64 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM project\n WHERE id=$1\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "stack_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "metadata", + "type_info": "Json" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "request_json", + "type_info": "Json" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false + ] + }, + "hash": "5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc" +} diff --git a/.sqlx/query-6cdfab7ffca4a98abcd7fb2325289ccf3035f08340bf80a345ff74570cd62043.json b/.sqlx/query-6cdfab7ffca4a98abcd7fb2325289ccf3035f08340bf80a345ff74570cd62043.json new file mode 100644 index 0000000..2bbb52c --- /dev/null +++ b/.sqlx/query-6cdfab7ffca4a98abcd7fb2325289ccf3035f08340bf80a345ff74570cd62043.json @@ -0,0 +1,103 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE commands\n SET status = $2, result = $3, error = $4, updated_at = NOW()\n WHERE command_id = $1\n RETURNING id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "command_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "deployment_hash", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "type", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "priority", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "parameters", + "type_info": "Jsonb" + }, + { + "ordinal": 7, + "name": "result", + "type_info": "Jsonb" + }, + { + "ordinal": 8, + "name": "error", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "created_by", + "type_info": "Varchar" + }, + { + "ordinal": 10, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 11, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 12, + "name": "timeout_seconds", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "metadata", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Text", + "Varchar", + "Jsonb", + "Jsonb" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + true, + false, + false, + false, + true, + true + ] + }, + "hash": "6cdfab7ffca4a98abcd7fb2325289ccf3035f08340bf80a345ff74570cd62043" +} diff --git a/.sqlx/query-6e44fd63bcb2075e9515a7ce3d0be7a3759a98b5f1c637eb632aa440a1ffadb6.json b/.sqlx/query-6e44fd63bcb2075e9515a7ce3d0be7a3759a98b5f1c637eb632aa440a1ffadb6.json new file mode 100644 index 0000000..b6c5726 --- /dev/null +++ b/.sqlx/query-6e44fd63bcb2075e9515a7ce3d0be7a3759a98b5f1c637eb632aa440a1ffadb6.json @@ -0,0 +1,85 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT \n id,\n user_id,\n obj_id,\n category as \"category: _\",\n comment,\n hidden,\n rate,\n created_at,\n updated_at\n FROM rating\n WHERE hidden = false \n ORDER BY id DESC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "obj_id", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "category: _", + "type_info": { + "Custom": { + "name": "rate_category", + "kind": { + "Enum": [ + "application", + "cloud", + "project", + "deploymentSpeed", + "documentation", + "design", + "techSupport", + "price", + "memoryUsage" + ] + } + } + } + }, + { + "ordinal": 4, + "name": "comment", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "hidden", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "rate", + "type_info": "Int4" + }, + { + "ordinal": 7, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false, + false, + true, + true, + true, + false, + false + ] + }, + "hash": "6e44fd63bcb2075e9515a7ce3d0be7a3759a98b5f1c637eb632aa440a1ffadb6" +} diff --git a/.sqlx/query-6ff761b4fa0b1ccc22722b481b37bb2395caa02475163facde831cc9ada1ff30.json b/.sqlx/query-6ff761b4fa0b1ccc22722b481b37bb2395caa02475163facde831cc9ada1ff30.json new file mode 100644 index 0000000..2a91bb1 --- /dev/null +++ b/.sqlx/query-6ff761b4fa0b1ccc22722b481b37bb2395caa02475163facde831cc9ada1ff30.json @@ -0,0 +1,31 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO server (\n user_id,\n project_id,\n region,\n zone,\n server,\n os,\n disk_type,\n created_at,\n updated_at,\n srv_ip,\n ssh_user,\n ssh_port\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, NOW() at time zone 'utc',NOW() at time zone 'utc', $8, $9, $10)\n RETURNING id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Varchar", + "Int4", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Int4" + ] + }, + "nullable": [ + false + ] + }, + "hash": "6ff761b4fa0b1ccc22722b481b37bb2395caa02475163facde831cc9ada1ff30" +} diff --git a/.sqlx/query-7b6c7e798237d0c08b7c1126d7044df13c46ef2eb373398a535090edf738cb5a.json b/.sqlx/query-7b6c7e798237d0c08b7c1126d7044df13c46ef2eb373398a535090edf738cb5a.json new file mode 100644 index 0000000..ed0cd48 --- /dev/null +++ b/.sqlx/query-7b6c7e798237d0c08b7c1126d7044df13c46ef2eb373398a535090edf738cb5a.json @@ -0,0 +1,76 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE cloud\n SET\n user_id=$2,\n provider=$3,\n cloud_token=$4,\n cloud_key=$5,\n cloud_secret=$6,\n save_token=$7,\n updated_at=NOW() at time zone 'utc'\n WHERE id = $1\n RETURNING *\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "provider", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "cloud_token", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "cloud_key", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "cloud_secret", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "save_token", + "type_info": "Bool" + }, + { + "ordinal": 7, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Int4", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Bool" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + true, + false, + false + ] + }, + "hash": "7b6c7e798237d0c08b7c1126d7044df13c46ef2eb373398a535090edf738cb5a" +} diff --git a/.sqlx/query-7c087b528df89eb0bf41a4e46bcc48ab4946535a96baf0f49996d79387a3791c.json b/.sqlx/query-7c087b528df89eb0bf41a4e46bcc48ab4946535a96baf0f49996d79387a3791c.json new file mode 100644 index 0000000..b6d94b3 --- /dev/null +++ b/.sqlx/query-7c087b528df89eb0bf41a4e46bcc48ab4946535a96baf0f49996d79387a3791c.json @@ -0,0 +1,94 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM server\n WHERE project_id=$1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "project_id", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "region", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "zone", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "server", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "os", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "disk_type", + "type_info": "Varchar" + }, + { + "ordinal": 8, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "srv_ip", + "type_info": "Varchar" + }, + { + "ordinal": 11, + "name": "ssh_user", + "type_info": "Varchar" + }, + { + "ordinal": 12, + "name": "ssh_port", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + true, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "7c087b528df89eb0bf41a4e46bcc48ab4946535a96baf0f49996d79387a3791c" +} diff --git a/.sqlx/query-8038cec278228a04f83f4d67f8e2fd0382be589bf5d6dcde690b63f281160159.json b/.sqlx/query-8038cec278228a04f83f4d67f8e2fd0382be589bf5d6dcde690b63f281160159.json new file mode 100644 index 0000000..aafa449 --- /dev/null +++ b/.sqlx/query-8038cec278228a04f83f4d67f8e2fd0382be589bf5d6dcde690b63f281160159.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE client\n SET \n secret=$1,\n updated_at=NOW() at time zone 'utc'\n WHERE id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "8038cec278228a04f83f4d67f8e2fd0382be589bf5d6dcde690b63f281160159" +} diff --git a/.sqlx/query-8218dc7f0a2d15d19391bdcde1dfe27d2ee90aa4598b17d90e5db82244ad6ff1.json b/.sqlx/query-8218dc7f0a2d15d19391bdcde1dfe27d2ee90aa4598b17d90e5db82244ad6ff1.json new file mode 100644 index 0000000..17b8891 --- /dev/null +++ b/.sqlx/query-8218dc7f0a2d15d19391bdcde1dfe27d2ee90aa4598b17d90e5db82244ad6ff1.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM rating\n WHERE id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [] + }, + "hash": "8218dc7f0a2d15d19391bdcde1dfe27d2ee90aa4598b17d90e5db82244ad6ff1" +} diff --git a/.sqlx/query-82eb411b1d8f6f3bed3db367ea147fbcd0626347744c7f8de6dce25d6e9a1fe7.json b/.sqlx/query-82eb411b1d8f6f3bed3db367ea147fbcd0626347744c7f8de6dce25d6e9a1fe7.json new file mode 100644 index 0000000..d95a94c --- /dev/null +++ b/.sqlx/query-82eb411b1d8f6f3bed3db367ea147fbcd0626347744c7f8de6dce25d6e9a1fe7.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM user_agreement\n WHERE user_id=$1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "agrt_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "82eb411b1d8f6f3bed3db367ea147fbcd0626347744c7f8de6dce25d6e9a1fe7" +} diff --git a/.sqlx/query-836ec7786ee20369b6b49aa89587480579468a5cb4ecdf7b315920b5e0bd894c.json b/.sqlx/query-836ec7786ee20369b6b49aa89587480579468a5cb4ecdf7b315920b5e0bd894c.json new file mode 100644 index 0000000..6dabdee --- /dev/null +++ b/.sqlx/query-836ec7786ee20369b6b49aa89587480579468a5cb4ecdf7b315920b5e0bd894c.json @@ -0,0 +1,106 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT \n id,\n user_id,\n obj_id,\n category as \"category: _\",\n comment,\n hidden,\n rate,\n created_at,\n updated_at\n FROM rating\n WHERE user_id=$1\n AND obj_id=$2\n AND category=$3\n LIMIT 1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "obj_id", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "category: _", + "type_info": { + "Custom": { + "name": "rate_category", + "kind": { + "Enum": [ + "application", + "cloud", + "project", + "deploymentSpeed", + "documentation", + "design", + "techSupport", + "price", + "memoryUsage" + ] + } + } + } + }, + { + "ordinal": 4, + "name": "comment", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "hidden", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "rate", + "type_info": "Int4" + }, + { + "ordinal": 7, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text", + "Int4", + { + "Custom": { + "name": "rate_category", + "kind": { + "Enum": [ + "application", + "cloud", + "project", + "deploymentSpeed", + "documentation", + "design", + "techSupport", + "price", + "memoryUsage" + ] + } + } + } + ] + }, + "nullable": [ + false, + false, + false, + false, + true, + true, + true, + false, + false + ] + }, + "hash": "836ec7786ee20369b6b49aa89587480579468a5cb4ecdf7b315920b5e0bd894c" +} diff --git a/.sqlx/query-83cd9d573480c8a83e9e58f375653b4d76ec4c4dea338877ef5ba72fa49c28ad.json b/.sqlx/query-83cd9d573480c8a83e9e58f375653b4d76ec4c4dea338877ef5ba72fa49c28ad.json new file mode 100644 index 0000000..44d0fe6 --- /dev/null +++ b/.sqlx/query-83cd9d573480c8a83e9e58f375653b4d76ec4c4dea338877ef5ba72fa49c28ad.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n count(*) as found\n FROM client c \n WHERE c.secret = $1\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "found", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + null + ] + }, + "hash": "83cd9d573480c8a83e9e58f375653b4d76ec4c4dea338877ef5ba72fa49c28ad" +} diff --git a/.sqlx/query-8aafae4565e572dc36aef3bb3d7b82a392e59683b9dfa1c457974e8fa8b7d00f.json b/.sqlx/query-8aafae4565e572dc36aef3bb3d7b82a392e59683b9dfa1c457974e8fa8b7d00f.json new file mode 100644 index 0000000..6d69a7d --- /dev/null +++ b/.sqlx/query-8aafae4565e572dc36aef3bb3d7b82a392e59683b9dfa1c457974e8fa8b7d00f.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n count(*) as client_count\n FROM client c \n WHERE c.user_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "client_count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + null + ] + }, + "hash": "8aafae4565e572dc36aef3bb3d7b82a392e59683b9dfa1c457974e8fa8b7d00f" +} diff --git a/.sqlx/query-8cfb2d3a45ff6c5d1d51a98f6a37ba89da5a49c211c8627c314b8a32c92a62e1.json b/.sqlx/query-8cfb2d3a45ff6c5d1d51a98f6a37ba89da5a49c211c8627c314b8a32c92a62e1.json new file mode 100644 index 0000000..991ef36 --- /dev/null +++ b/.sqlx/query-8cfb2d3a45ff6c5d1d51a98f6a37ba89da5a49c211c8627c314b8a32c92a62e1.json @@ -0,0 +1,94 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT * FROM server WHERE id=$1 LIMIT 1 ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "project_id", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "region", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "zone", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "server", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "os", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "disk_type", + "type_info": "Varchar" + }, + { + "ordinal": 8, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "srv_ip", + "type_info": "Varchar" + }, + { + "ordinal": 11, + "name": "ssh_user", + "type_info": "Varchar" + }, + { + "ordinal": 12, + "name": "ssh_port", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + true, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "8cfb2d3a45ff6c5d1d51a98f6a37ba89da5a49c211c8627c314b8a32c92a62e1" +} diff --git a/.sqlx/query-8db13c16e29b4aecd87646859296790f3e5971d7a2bff2d32f2d92590ec3393d.json b/.sqlx/query-8db13c16e29b4aecd87646859296790f3e5971d7a2bff2d32f2d92590ec3393d.json new file mode 100644 index 0000000..dea9192 --- /dev/null +++ b/.sqlx/query-8db13c16e29b4aecd87646859296790f3e5971d7a2bff2d32f2d92590ec3393d.json @@ -0,0 +1,87 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT \n id,\n user_id,\n obj_id,\n category as \"category: _\",\n comment,\n hidden,\n rate,\n created_at,\n updated_at\n FROM rating\n WHERE id=$1\n LIMIT 1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "obj_id", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "category: _", + "type_info": { + "Custom": { + "name": "rate_category", + "kind": { + "Enum": [ + "application", + "cloud", + "project", + "deploymentSpeed", + "documentation", + "design", + "techSupport", + "price", + "memoryUsage" + ] + } + } + } + }, + { + "ordinal": 4, + "name": "comment", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "hidden", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "rate", + "type_info": "Int4" + }, + { + "ordinal": 7, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + false, + true, + true, + true, + false, + false + ] + }, + "hash": "8db13c16e29b4aecd87646859296790f3e5971d7a2bff2d32f2d92590ec3393d" +} diff --git a/.sqlx/query-8ec4c1e77a941efe4c1c36e26c5e1dfcb0e7769f0333d2acf7d6e0fb97ca12dc.json b/.sqlx/query-8ec4c1e77a941efe4c1c36e26c5e1dfcb0e7769f0333d2acf7d6e0fb97ca12dc.json new file mode 100644 index 0000000..0679752 --- /dev/null +++ b/.sqlx/query-8ec4c1e77a941efe4c1c36e26c5e1dfcb0e7769f0333d2acf7d6e0fb97ca12dc.json @@ -0,0 +1,27 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO cloud (\n user_id,\n provider,\n cloud_token,\n cloud_key,\n cloud_secret,\n save_token,\n created_at,\n updated_at\n )\n VALUES ($1, $2, $3, $4, $5, $6, NOW() at time zone 'utc', NOW() at time zone 'utc')\n RETURNING id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Bool" + ] + }, + "nullable": [ + false + ] + }, + "hash": "8ec4c1e77a941efe4c1c36e26c5e1dfcb0e7769f0333d2acf7d6e0fb97ca12dc" +} diff --git a/.sqlx/query-91966b9578edeb2303bbba93cfc756595265b21dd6f7a06a2f7a846d162b340c.json b/.sqlx/query-91966b9578edeb2303bbba93cfc756595265b21dd6f7a06a2f7a846d162b340c.json new file mode 100644 index 0000000..0146a6a --- /dev/null +++ b/.sqlx/query-91966b9578edeb2303bbba93cfc756595265b21dd6f7a06a2f7a846d162b340c.json @@ -0,0 +1,100 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT c.id, c.command_id, c.deployment_hash, c.type, c.status, c.priority,\n c.parameters, c.result, c.error, c.created_by, c.created_at, c.updated_at,\n c.timeout_seconds, c.metadata\n FROM commands c\n INNER JOIN command_queue q ON c.command_id = q.command_id\n WHERE q.deployment_hash = $1\n ORDER BY q.priority DESC, q.created_at ASC\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "command_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "deployment_hash", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "type", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "priority", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "parameters", + "type_info": "Jsonb" + }, + { + "ordinal": 7, + "name": "result", + "type_info": "Jsonb" + }, + { + "ordinal": 8, + "name": "error", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "created_by", + "type_info": "Varchar" + }, + { + "ordinal": 10, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 11, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 12, + "name": "timeout_seconds", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "metadata", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + true, + false, + false, + false, + true, + true + ] + }, + "hash": "91966b9578edeb2303bbba93cfc756595265b21dd6f7a06a2f7a846d162b340c" +} diff --git a/.sqlx/query-954605527a3ca7b9d6cbf1fbc03dc00c95626c94f0f02cbc69336836f95ec45e.json b/.sqlx/query-954605527a3ca7b9d6cbf1fbc03dc00c95626c94f0f02cbc69336836f95ec45e.json new file mode 100644 index 0000000..e181206 --- /dev/null +++ b/.sqlx/query-954605527a3ca7b9d6cbf1fbc03dc00c95626c94f0f02cbc69336836f95ec45e.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT\n *\n FROM product\n WHERE obj_id = $1\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "obj_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "obj_type", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "954605527a3ca7b9d6cbf1fbc03dc00c95626c94f0f02cbc69336836f95ec45e" +} diff --git a/.sqlx/query-9d821bd27d5202d2c3d49a2f148ff7f21bafde8c7c1306cc7efc976a9eae0071.json b/.sqlx/query-9d821bd27d5202d2c3d49a2f148ff7f21bafde8c7c1306cc7efc976a9eae0071.json new file mode 100644 index 0000000..8adc74c --- /dev/null +++ b/.sqlx/query-9d821bd27d5202d2c3d49a2f148ff7f21bafde8c7c1306cc7efc976a9eae0071.json @@ -0,0 +1,25 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_agreement (agrt_id, user_id, created_at, updated_at)\n VALUES ($1, $2, $3, $4)\n RETURNING id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int4", + "Varchar", + "Timestamptz", + "Timestamptz" + ] + }, + "nullable": [ + false + ] + }, + "hash": "9d821bd27d5202d2c3d49a2f148ff7f21bafde8c7c1306cc7efc976a9eae0071" +} diff --git a/.sqlx/query-9e4f216c828c7d53547c33da062153f90eefabe5a252f86d5e8d1964785025c0.json b/.sqlx/query-9e4f216c828c7d53547c33da062153f90eefabe5a252f86d5e8d1964785025c0.json new file mode 100644 index 0000000..67d8c69 --- /dev/null +++ b/.sqlx/query-9e4f216c828c7d53547c33da062153f90eefabe5a252f86d5e8d1964785025c0.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO command_queue (command_id, deployment_hash, priority)\n VALUES ($1, $2, $3)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Varchar", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "9e4f216c828c7d53547c33da062153f90eefabe5a252f86d5e8d1964785025c0" +} diff --git a/.sqlx/query-b8296183bd28695d3a7574e57db445dc1f4b2d659a3805f92f6f5f83b562266b.json b/.sqlx/query-b8296183bd28695d3a7574e57db445dc1f4b2d659a3805f92f6f5f83b562266b.json new file mode 100644 index 0000000..a924adf --- /dev/null +++ b/.sqlx/query-b8296183bd28695d3a7574e57db445dc1f4b2d659a3805f92f6f5f83b562266b.json @@ -0,0 +1,70 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM cloud\n WHERE user_id=$1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "provider", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "cloud_token", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "cloud_key", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "cloud_secret", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "save_token", + "type_info": "Bool" + }, + { + "ordinal": 7, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + true, + false, + false + ] + }, + "hash": "b8296183bd28695d3a7574e57db445dc1f4b2d659a3805f92f6f5f83b562266b" +} diff --git a/.sqlx/query-b92417574329b82cae2347027db12f4794c1fc48b67d64c34c88fd9caf4508f5.json b/.sqlx/query-b92417574329b82cae2347027db12f4794c1fc48b67d64c34c88fd9caf4508f5.json new file mode 100644 index 0000000..d77b472 --- /dev/null +++ b/.sqlx/query-b92417574329b82cae2347027db12f4794c1fc48b67d64c34c88fd9caf4508f5.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO deployment (\n project_id, user_id, deployment_hash, deleted, status, metadata, last_seen_at, created_at, updated_at\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)\n RETURNING id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int4", + "Varchar", + "Varchar", + "Bool", + "Varchar", + "Json", + "Timestamptz", + "Timestamptz", + "Timestamptz" + ] + }, + "nullable": [ + false + ] + }, + "hash": "b92417574329b82cae2347027db12f4794c1fc48b67d64c34c88fd9caf4508f5" +} diff --git a/.sqlx/query-bc798b1837501109ff69f44c01d39c1cc03348eb4b4fe698ad06283ba7072b7f.json b/.sqlx/query-bc798b1837501109ff69f44c01d39c1cc03348eb4b4fe698ad06283ba7072b7f.json new file mode 100644 index 0000000..0f85900 --- /dev/null +++ b/.sqlx/query-bc798b1837501109ff69f44c01d39c1cc03348eb4b4fe698ad06283ba7072b7f.json @@ -0,0 +1,113 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO commands (\n id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)\n RETURNING id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "command_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "deployment_hash", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "type", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "priority", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "parameters", + "type_info": "Jsonb" + }, + { + "ordinal": 7, + "name": "result", + "type_info": "Jsonb" + }, + { + "ordinal": 8, + "name": "error", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "created_by", + "type_info": "Varchar" + }, + { + "ordinal": 10, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 11, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 12, + "name": "timeout_seconds", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "metadata", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Jsonb", + "Jsonb", + "Jsonb", + "Varchar", + "Timestamptz", + "Timestamptz", + "Int4", + "Jsonb" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + true, + false, + false, + false, + true, + true + ] + }, + "hash": "bc798b1837501109ff69f44c01d39c1cc03348eb4b4fe698ad06283ba7072b7f" +} diff --git a/.sqlx/query-c28d645182680aaeaf265abcb687ea36f2a01b6b778fd61921e0046ad3f2efb2.json b/.sqlx/query-c28d645182680aaeaf265abcb687ea36f2a01b6b778fd61921e0046ad3f2efb2.json new file mode 100644 index 0000000..155c1fc --- /dev/null +++ b/.sqlx/query-c28d645182680aaeaf265abcb687ea36f2a01b6b778fd61921e0046ad3f2efb2.json @@ -0,0 +1,47 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM user_agreement\n WHERE user_id=$1\n AND agrt_id=$2\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "agrt_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text", + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "c28d645182680aaeaf265abcb687ea36f2a01b6b778fd61921e0046ad3f2efb2" +} diff --git a/.sqlx/query-c59246b73cf3c5a0fd961d2709477ce724f60cdb03492eef912a9fe89aee2ac4.json b/.sqlx/query-c59246b73cf3c5a0fd961d2709477ce724f60cdb03492eef912a9fe89aee2ac4.json new file mode 100644 index 0000000..838d20a --- /dev/null +++ b/.sqlx/query-c59246b73cf3c5a0fd961d2709477ce724f60cdb03492eef912a9fe89aee2ac4.json @@ -0,0 +1,83 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE deployment\n SET\n project_id=$2,\n user_id=$3,\n deployment_hash=$4,\n deleted=$5,\n status=$6,\n metadata=$7,\n last_seen_at=$8,\n updated_at=NOW() at time zone 'utc'\n WHERE id = $1\n RETURNING *\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "project_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "metadata", + "type_info": "Json" + }, + { + "ordinal": 3, + "name": "deleted", + "type_info": "Bool" + }, + { + "ordinal": 4, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "deployment_hash", + "type_info": "Varchar" + }, + { + "ordinal": 8, + "name": "last_seen_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "user_id", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Int4", + "Int4", + "Varchar", + "Varchar", + "Bool", + "Varchar", + "Json", + "Timestamptz" + ] + }, + "nullable": [ + false, + false, + false, + true, + false, + false, + false, + false, + true, + true + ] + }, + "hash": "c59246b73cf3c5a0fd961d2709477ce724f60cdb03492eef912a9fe89aee2ac4" +} diff --git a/.sqlx/query-cd6ddae34b29c15924e0ec26ea55c23d56315ad817bea716d6a71c8b2bb18087.json b/.sqlx/query-cd6ddae34b29c15924e0ec26ea55c23d56315ad817bea716d6a71c8b2bb18087.json new file mode 100644 index 0000000..64f052c --- /dev/null +++ b/.sqlx/query-cd6ddae34b29c15924e0ec26ea55c23d56315ad817bea716d6a71c8b2bb18087.json @@ -0,0 +1,44 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO rating (user_id, obj_id, category, comment, hidden, rate, created_at, updated_at)\n VALUES ($1, $2, $3, $4, $5, $6, NOW() at time zone 'utc', NOW() at time zone 'utc')\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Varchar", + "Int4", + { + "Custom": { + "name": "rate_category", + "kind": { + "Enum": [ + "application", + "cloud", + "project", + "deploymentSpeed", + "documentation", + "design", + "techSupport", + "price", + "memoryUsage" + ] + } + } + }, + "Text", + "Bool", + "Int4" + ] + }, + "nullable": [ + false + ] + }, + "hash": "cd6ddae34b29c15924e0ec26ea55c23d56315ad817bea716d6a71c8b2bb18087" +} diff --git a/.sqlx/query-cf85345c0c38d7ba1c347a9cf027a55dccaaeb0fe55d5eabb7319a90cbdfe951.json b/.sqlx/query-cf85345c0c38d7ba1c347a9cf027a55dccaaeb0fe55d5eabb7319a90cbdfe951.json new file mode 100644 index 0000000..e24d9cb --- /dev/null +++ b/.sqlx/query-cf85345c0c38d7ba1c347a9cf027a55dccaaeb0fe55d5eabb7319a90cbdfe951.json @@ -0,0 +1,85 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT \n id,\n user_id,\n obj_id,\n category as \"category: _\",\n comment,\n hidden,\n rate,\n created_at,\n updated_at\n FROM rating\n ORDER BY id DESC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "obj_id", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "category: _", + "type_info": { + "Custom": { + "name": "rate_category", + "kind": { + "Enum": [ + "application", + "cloud", + "project", + "deploymentSpeed", + "documentation", + "design", + "techSupport", + "price", + "memoryUsage" + ] + } + } + } + }, + { + "ordinal": 4, + "name": "comment", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "hidden", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "rate", + "type_info": "Int4" + }, + { + "ordinal": 7, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false, + false, + true, + true, + true, + false, + false + ] + }, + "hash": "cf85345c0c38d7ba1c347a9cf027a55dccaaeb0fe55d5eabb7319a90cbdfe951" +} diff --git a/.sqlx/query-db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83.json b/.sqlx/query-db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83.json new file mode 100644 index 0000000..2841e6e --- /dev/null +++ b/.sqlx/query-db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83.json @@ -0,0 +1,69 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE project\n SET \n stack_id=$2,\n user_id=$3,\n name=$4,\n metadata=$5,\n request_json=$6,\n updated_at=NOW() at time zone 'utc'\n WHERE id = $1\n RETURNING *\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "stack_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "metadata", + "type_info": "Json" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "request_json", + "type_info": "Json" + } + ], + "parameters": { + "Left": [ + "Int4", + "Uuid", + "Varchar", + "Text", + "Json", + "Json" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false + ] + }, + "hash": "db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83" +} diff --git a/.sqlx/query-dd36c2beb4867d36db9dc0fe47e6310aea0a7dd4c8fc5f7c2cff4dac327cf3f7.json b/.sqlx/query-dd36c2beb4867d36db9dc0fe47e6310aea0a7dd4c8fc5f7c2cff4dac327cf3f7.json new file mode 100644 index 0000000..2091a8b --- /dev/null +++ b/.sqlx/query-dd36c2beb4867d36db9dc0fe47e6310aea0a7dd4c8fc5f7c2cff4dac327cf3f7.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO client (user_id, secret, created_at, updated_at)\n VALUES ($1, $2, NOW() at time zone 'utc', NOW() at time zone 'utc')\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Varchar", + "Varchar" + ] + }, + "nullable": [ + false + ] + }, + "hash": "dd36c2beb4867d36db9dc0fe47e6310aea0a7dd4c8fc5f7c2cff4dac327cf3f7" +} diff --git a/.sqlx/query-e5a60eb49da1cd42fc6c1bac36f038846f0cb4440e4b377d495ffe0f0bfc11b6.json b/.sqlx/query-e5a60eb49da1cd42fc6c1bac36f038846f0cb4440e4b377d495ffe0f0bfc11b6.json new file mode 100644 index 0000000..966ab27 --- /dev/null +++ b/.sqlx/query-e5a60eb49da1cd42fc6c1bac36f038846f0cb4440e4b377d495ffe0f0bfc11b6.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT id, user_id, secret FROM client c WHERE c.id = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "secret", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + false, + false, + true + ] + }, + "hash": "e5a60eb49da1cd42fc6c1bac36f038846f0cb4440e4b377d495ffe0f0bfc11b6" +} diff --git a/.sqlx/query-f0af06a2002ce933966cf6cfe8289ea77781df5a251a6731b42f8ddefb8a4c8b.json b/.sqlx/query-f0af06a2002ce933966cf6cfe8289ea77781df5a251a6731b42f8ddefb8a4c8b.json new file mode 100644 index 0000000..0b08ecb --- /dev/null +++ b/.sqlx/query-f0af06a2002ce933966cf6cfe8289ea77781df5a251a6731b42f8ddefb8a4c8b.json @@ -0,0 +1,100 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n FROM commands\n WHERE command_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "command_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "deployment_hash", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "type", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "priority", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "parameters", + "type_info": "Jsonb" + }, + { + "ordinal": 7, + "name": "result", + "type_info": "Jsonb" + }, + { + "ordinal": 8, + "name": "error", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "created_by", + "type_info": "Varchar" + }, + { + "ordinal": 10, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 11, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 12, + "name": "timeout_seconds", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "metadata", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + true, + false, + false, + false, + true, + true + ] + }, + "hash": "f0af06a2002ce933966cf6cfe8289ea77781df5a251a6731b42f8ddefb8a4c8b" +} diff --git a/.sqlx/query-f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119.json b/.sqlx/query-f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119.json deleted file mode 100644 index ef54cdb..0000000 --- a/.sqlx/query-f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v0 is NULL OR v0 = COALESCE($2,v0)) AND\n (v1 is NULL OR v1 = COALESCE($3,v1)) AND\n (v2 is NULL OR v2 = COALESCE($4,v2)) AND\n (v3 is NULL OR v3 = COALESCE($5,v3)) AND\n (v4 is NULL OR v4 = COALESCE($6,v4)) AND\n (v5 is NULL OR v5 = COALESCE($7,v5))", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119" -} diff --git a/.sqlx/query-f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b.json b/.sqlx/query-f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b.json deleted file mode 100644 index 0daaa8a..0000000 --- a/.sqlx/query-f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v2 is NULL OR v2 = COALESCE($2,v2)) AND\n (v3 is NULL OR v3 = COALESCE($3,v3)) AND\n (v4 is NULL OR v4 = COALESCE($4,v4)) AND\n (v5 is NULL OR v5 = COALESCE($5,v5))", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Varchar", - "Varchar", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b" -} diff --git a/.sqlx/query-fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b.json b/.sqlx/query-fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b.json deleted file mode 100644 index 4a5f7e8..0000000 --- a/.sqlx/query-fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v5 is NULL OR v5 = COALESCE($2,v5))", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b" -} diff --git a/.sqlx/query-fb07f53c015c852c4ef9e0ce52541f06835f8687122987d87fad751981b0c2b1.json b/.sqlx/query-fb07f53c015c852c4ef9e0ce52541f06835f8687122987d87fad751981b0c2b1.json new file mode 100644 index 0000000..58b296c --- /dev/null +++ b/.sqlx/query-fb07f53c015c852c4ef9e0ce52541f06835f8687122987d87fad751981b0c2b1.json @@ -0,0 +1,101 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE commands\n SET status = $2, updated_at = NOW()\n WHERE command_id = $1\n RETURNING id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "command_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "deployment_hash", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "type", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "priority", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "parameters", + "type_info": "Jsonb" + }, + { + "ordinal": 7, + "name": "result", + "type_info": "Jsonb" + }, + { + "ordinal": 8, + "name": "error", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "created_by", + "type_info": "Varchar" + }, + { + "ordinal": 10, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 11, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 12, + "name": "timeout_seconds", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "metadata", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Text", + "Varchar" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + true, + false, + false, + false, + true, + true + ] + }, + "hash": "fb07f53c015c852c4ef9e0ce52541f06835f8687122987d87fad751981b0c2b1" +} diff --git a/.sqlx/query-fb7ce69e70b345d2cf0ca017523c1b90b67b053add3d4cffb8d579bfc8f08345.json b/.sqlx/query-fb7ce69e70b345d2cf0ca017523c1b90b67b053add3d4cffb8d579bfc8f08345.json deleted file mode 100644 index 897ae52..0000000 --- a/.sqlx/query-fb7ce69e70b345d2cf0ca017523c1b90b67b053add3d4cffb8d579bfc8f08345.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT * from casbin_rule WHERE (\n ptype LIKE 'g%' AND v0 LIKE $1 AND v1 LIKE $2 AND v2 LIKE $3 AND v3 LIKE $4 AND v4 LIKE $5 AND v5 LIKE $6 )\n OR (\n ptype LIKE 'p%' AND v0 LIKE $7 AND v1 LIKE $8 AND v2 LIKE $9 AND v3 LIKE $10 AND v4 LIKE $11 AND v5 LIKE $12 );\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "ptype", - "type_info": "Varchar" - }, - { - "ordinal": 2, - "name": "v0", - "type_info": "Varchar" - }, - { - "ordinal": 3, - "name": "v1", - "type_info": "Varchar" - }, - { - "ordinal": 4, - "name": "v2", - "type_info": "Varchar" - }, - { - "ordinal": 5, - "name": "v3", - "type_info": "Varchar" - }, - { - "ordinal": 6, - "name": "v4", - "type_info": "Varchar" - }, - { - "ordinal": 7, - "name": "v5", - "type_info": "Varchar" - } - ], - "parameters": { - "Left": [ - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - false, - false, - false - ] - }, - "hash": "fb7ce69e70b345d2cf0ca017523c1b90b67b053add3d4cffb8d579bfc8f08345" -} diff --git a/.sqlx/query-ffb567ac44b9a0525bd41392c3a865d0612bc0d3f620d5cba76a6b44a8812417.json b/.sqlx/query-ffb567ac44b9a0525bd41392c3a865d0612bc0d3f620d5cba76a6b44a8812417.json new file mode 100644 index 0000000..12efb85 --- /dev/null +++ b/.sqlx/query-ffb567ac44b9a0525bd41392c3a865d0612bc0d3f620d5cba76a6b44a8812417.json @@ -0,0 +1,48 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE agreement\n SET\n name=$2,\n text=$3,\n updated_at=NOW() at time zone 'utc'\n WHERE id = $1\n RETURNING *\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "name", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "text", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Int4", + "Varchar", + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "ffb567ac44b9a0525bd41392c3a865d0612bc0d3f620d5cba76a6b44a8812417" +} diff --git a/Cargo.lock b/Cargo.lock index 0056afa..b02e164 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -195,7 +195,7 @@ dependencies = [ "serde_urlencoded", "smallvec", "socket2 0.6.1", - "time 0.3.44", + "time", "tracing", "url", ] @@ -356,12 +356,6 @@ dependencies = [ "url", ] -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - [[package]] name = "android_system_properties" version = "0.1.5" @@ -449,7 +443,7 @@ dependencies = [ "num-traits", "rusticata-macros", "thiserror 2.0.17", - "time 0.3.44", + "time", ] [[package]] @@ -634,15 +628,6 @@ dependencies = [ "syn 2.0.111", ] -[[package]] -name = "atoi" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7c57d12312ff59c811c0643f4d80830505833c9ffaebd193d819392b265be8e" -dependencies = [ - "num-traits", -] - [[package]] name = "atoi" version = "2.0.0" @@ -699,6 +684,9 @@ name = "bitflags" version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +dependencies = [ + "serde_core", +] [[package]] name = "block-buffer" @@ -849,7 +837,7 @@ dependencies = [ "hashlink 0.9.1", "mini-moka", "once_cell", - "parking_lot 0.12.5", + "parking_lot", "petgraph", "regex", "rhai", @@ -898,18 +886,16 @@ checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "chrono" -version = "0.4.29" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d87d9d13be47a5b7c3907137f1290b0459a7f80efb26be8c52afb11963bccb02" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ - "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", - "time 0.1.45", "wasm-bindgen", - "windows-targets 0.48.5", + "windows-link", ] [[package]] @@ -950,7 +936,7 @@ version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", "syn 2.0.111", @@ -1070,7 +1056,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" dependencies = [ "percent-encoding", - "time 0.3.44", + "time", "version_check", ] @@ -1224,7 +1210,7 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core 0.9.12", + "parking_lot_core", ] [[package]] @@ -1439,30 +1425,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", + "const-oid", "crypto-common", "subtle", ] -[[package]] -name = "dirs" -version = "4.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-sys" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" -dependencies = [ - "libc", - "redox_users", - "winapi", -] - [[package]] name = "displaydoc" version = "0.2.5" @@ -1493,7 +1460,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d6fdd6fa1c9e8e716f5f73406b868929f468702449621e7397066478b9bf89c" dependencies = [ "derive_builder 0.13.1", - "indexmap 2.12.1", + "indexmap", "serde", "serde_yaml", ] @@ -1735,17 +1702,6 @@ dependencies = [ "futures-util", ] -[[package]] -name = "futures-intrusive" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5" -dependencies = [ - "futures-core", - "lock_api", - "parking_lot 0.11.2", -] - [[package]] name = "futures-intrusive" version = "0.5.0" @@ -1754,7 +1710,7 @@ checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" dependencies = [ "futures-core", "lock_api", - "parking_lot 0.12.5", + "parking_lot", ] [[package]] @@ -1922,7 +1878,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 2.12.1", + "indexmap", "slab", "tokio", "tokio-util", @@ -1945,7 +1901,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash 0.8.12", - "allocator-api2", ] [[package]] @@ -1965,15 +1920,6 @@ version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" -[[package]] -name = "hashlink" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" -dependencies = [ - "hashbrown 0.14.5", -] - [[package]] name = "hashlink" version = "0.9.1" @@ -1992,15 +1938,6 @@ dependencies = [ "hashbrown 0.15.5", ] -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -dependencies = [ - "unicode-segmentation", -] - [[package]] name = "heck" version = "0.5.0" @@ -2282,16 +2219,6 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2" -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", -] - [[package]] name = "indexmap" version = "2.12.1" @@ -2348,9 +2275,12 @@ checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "ipnetwork" -version = "0.19.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f84f1612606f3753f205a4e9a2efd6fe5b4c573a6269b2cc6c3003d44a0d127" +checksum = "bf466541e9d546596ee94f9f69590f89473455f88372423e0008fc1a7daf100e" +dependencies = [ + "serde", +] [[package]] name = "is-terminal" @@ -2444,7 +2374,7 @@ dependencies = [ "flume", "futures-core", "futures-io", - "parking_lot 0.12.5", + "parking_lot", "pinky-swear", "reactor-trait", "serde", @@ -2458,6 +2388,9 @@ name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin 0.9.8", +] [[package]] name = "libc" @@ -2482,6 +2415,16 @@ dependencies = [ "redox_syscall 0.6.0", ] +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "pkg-config", + "vcpkg", +] + [[package]] name = "linked-hash-map" version = "0.5.6" @@ -2683,6 +2626,22 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-bigint-dig" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" +dependencies = [ + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + [[package]] name = "num-conv" version = "0.1.0" @@ -2698,6 +2657,17 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + [[package]] name = "num-traits" version = "0.2.19" @@ -2836,17 +2806,6 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] - [[package]] name = "parking_lot" version = "0.12.5" @@ -2854,21 +2813,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" dependencies = [ "lock_api", - "parking_lot_core 0.9.12", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall 0.2.16", - "smallvec", - "winapi", + "parking_lot_core", ] [[package]] @@ -2971,7 +2916,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.12.1", + "indexmap", ] [[package]] @@ -3014,7 +2959,7 @@ checksum = "b1ea6e230dd3a64d61bcb8b79e597d3ab6b4c94ec7a234ce687dd718b4f2e657" dependencies = [ "doc-comment", "flume", - "parking_lot 0.12.5", + "parking_lot", "tracing", ] @@ -3029,6 +2974,17 @@ dependencies = [ "futures-io", ] +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + [[package]] name = "pkcs12" version = "0.1.0" @@ -3059,6 +3015,16 @@ dependencies = [ "spki", ] +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + [[package]] name = "pkg-config" version = "0.3.32" @@ -3340,15 +3306,6 @@ dependencies = [ "url", ] -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" version = "0.5.18" @@ -3367,17 +3324,6 @@ dependencies = [ "bitflags 2.10.0", ] -[[package]] -name = "redox_users" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" -dependencies = [ - "getrandom 0.2.16", - "libredox", - "thiserror 1.0.69", -] - [[package]] name = "regex" version = "1.12.2" @@ -3489,21 +3435,6 @@ dependencies = [ "syn 2.0.111", ] -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", -] - [[package]] name = "ring" version = "0.17.14" @@ -3514,7 +3445,7 @@ dependencies = [ "cfg-if", "getrandom 0.2.16", "libc", - "untrusted 0.9.0", + "untrusted", "windows-sys 0.52.0", ] @@ -3529,6 +3460,26 @@ dependencies = [ "serde", ] +[[package]] +name = "rsa" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "signature", + "spki", + "subtle", + "zeroize", +] + [[package]] name = "rust-ini" version = "0.18.0" @@ -3584,18 +3535,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "rustls" -version = "0.20.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" -dependencies = [ - "log", - "ring 0.16.20", - "sct", - "webpki", -] - [[package]] name = "rustls" version = "0.23.35" @@ -3603,7 +3542,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" dependencies = [ "once_cell", - "ring 0.17.14", + "ring", "rustls-pki-types", "rustls-webpki", "subtle", @@ -3617,7 +3556,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70cc376c6ba1823ae229bacf8ad93c136d93524eab0e4e5e0e4f96b9c4e5b212" dependencies = [ "log", - "rustls 0.23.35", + "rustls", "rustls-native-certs", "rustls-pki-types", "rustls-webpki", @@ -3669,9 +3608,9 @@ version = "0.103.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" dependencies = [ - "ring 0.17.14", + "ring", "rustls-pki-types", - "untrusted 0.9.0", + "untrusted", ] [[package]] @@ -3730,16 +3669,6 @@ dependencies = [ "sha2", ] -[[package]] -name = "sct" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" -dependencies = [ - "ring 0.17.14", - "untrusted 0.9.0", -] - [[package]] name = "security-framework" version = "2.11.1" @@ -3856,7 +3785,7 @@ version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70c0e00fab6460447391a1981c21341746bc2d0178a7c46a3bbf667f450ac6e4" dependencies = [ - "indexmap 2.12.1", + "indexmap", "itertools 0.12.1", "num-traits", "once_cell", @@ -3900,7 +3829,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.12.1", + "indexmap", "itoa", "ryu", "serde", @@ -3960,6 +3889,16 @@ dependencies = [ "libc", ] +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core 0.6.4", +] + [[package]] name = "simd-adler32" version = "0.3.8" @@ -4022,7 +3961,7 @@ dependencies = [ "slog", "term", "thread_local", - "time 0.3.44", + "time", ] [[package]] @@ -4101,35 +4040,17 @@ dependencies = [ "der", ] -[[package]] -name = "sqlformat" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bba3a93db0cc4f7bdece8bb09e77e2e785c20bfebf79eb8340ed80708048790" -dependencies = [ - "nom", - "unicode_categories", -] - -[[package]] -name = "sqlx" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8de3b03a925878ed54a954f621e64bf55a3c1bd29652d0d1a17830405350188" -dependencies = [ - "sqlx-core 0.6.3", - "sqlx-macros 0.6.3", -] - [[package]] name = "sqlx" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc" dependencies = [ - "sqlx-core 0.8.6", - "sqlx-macros 0.8.6", + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", "sqlx-postgres", + "sqlx-sqlite", ] [[package]] @@ -4141,63 +4062,7 @@ dependencies = [ "async-trait", "casbin", "dotenvy", - "sqlx 0.8.6", -] - -[[package]] -name = "sqlx-core" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029" -dependencies = [ - "ahash 0.7.8", - "atoi 1.0.0", - "base64 0.13.1", - "bitflags 1.3.2", - "byteorder", - "bytes", - "chrono", - "crc", - "crossbeam-queue", - "dirs", - "dotenvy", - "either", - "event-listener 2.5.3", - "futures-channel", - "futures-core", - "futures-intrusive 0.4.2", - "futures-util", - "hashlink 0.8.4", - "hex", - "hkdf", - "hmac", - "indexmap 1.9.3", - "ipnetwork", - "itoa", - "libc", - "log", - "md-5", - "memchr", - "once_cell", - "paste", - "percent-encoding", - "rand 0.8.5", - "rustls 0.20.9", - "rustls-pemfile 1.0.4", - "serde", - "serde_json", - "sha1", - "sha2", - "smallvec", - "sqlformat", - "sqlx-rt", - "stringprep", - "thiserror 1.0.69", - "tokio-stream", - "url", - "uuid", - "webpki-roots", - "whoami", + "sqlx", ] [[package]] @@ -4208,22 +4073,25 @@ checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" dependencies = [ "base64 0.22.1", "bytes", + "chrono", "crc", "crossbeam-queue", "either", "event-listener 5.4.1", "futures-core", - "futures-intrusive 0.5.0", + "futures-intrusive", "futures-io", "futures-util", "hashbrown 0.15.5", "hashlink 0.10.0", - "indexmap 2.12.1", + "indexmap", + "ipnetwork", "log", "memchr", "native-tls", "once_cell", "percent-encoding", + "rustls", "serde", "serde_json", "sha2", @@ -4233,28 +4101,8 @@ dependencies = [ "tokio-stream", "tracing", "url", -] - -[[package]] -name = "sqlx-macros" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9966e64ae989e7e575b19d7265cb79d7fc3cbbdf179835cb0d716f294c2049c9" -dependencies = [ - "dotenvy", - "either", - "heck 0.4.1", - "hex", - "once_cell", - "proc-macro2", - "quote", - "serde", - "serde_json", - "sha2", - "sqlx-core 0.6.3", - "sqlx-rt", - "syn 1.0.109", - "url", + "uuid", + "webpki-roots 0.26.11", ] [[package]] @@ -4265,7 +4113,7 @@ checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d" dependencies = [ "proc-macro2", "quote", - "sqlx-core 0.8.6", + "sqlx-core", "sqlx-macros-core", "syn 2.0.111", ] @@ -4278,7 +4126,7 @@ checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b" dependencies = [ "dotenvy", "either", - "heck 0.5.0", + "heck", "hex", "once_cell", "proc-macro2", @@ -4286,23 +4134,70 @@ dependencies = [ "serde", "serde_json", "sha2", - "sqlx-core 0.8.6", + "sqlx-core", + "sqlx-mysql", "sqlx-postgres", + "sqlx-sqlite", "syn 2.0.111", "tokio", "url", ] +[[package]] +name = "sqlx-mysql" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags 2.10.0", + "byteorder", + "bytes", + "chrono", + "crc", + "digest", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand 0.8.5", + "rsa", + "serde", + "sha1", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.17", + "tracing", + "uuid", + "whoami", +] + [[package]] name = "sqlx-postgres" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46" dependencies = [ - "atoi 2.0.0", + "atoi", "base64 0.22.1", "bitflags 2.10.0", "byteorder", + "chrono", "crc", "dotenvy", "etcetera", @@ -4313,6 +4208,7 @@ dependencies = [ "hkdf", "hmac", "home", + "ipnetwork", "itoa", "log", "md-5", @@ -4323,22 +4219,38 @@ dependencies = [ "serde_json", "sha2", "smallvec", - "sqlx-core 0.8.6", + "sqlx-core", "stringprep", "thiserror 2.0.17", "tracing", + "uuid", "whoami", ] [[package]] -name = "sqlx-rt" -version = "0.6.3" +name = "sqlx-sqlite" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "804d3f245f894e61b1e6263c84b23ca675d96753b5abfd5cc8597d86806e8024" +checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea" dependencies = [ - "once_cell", - "tokio", - "tokio-rustls", + "atoi", + "chrono", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "serde_urlencoded", + "sqlx-core", + "thiserror 2.0.17", + "tracing", + "url", + "uuid", ] [[package]] @@ -4371,7 +4283,7 @@ dependencies = [ "futures-util", "glob", "hmac", - "indexmap 2.12.1", + "indexmap", "lapin", "rand 0.8.5", "redis", @@ -4384,7 +4296,7 @@ dependencies = [ "serde_valid", "serde_yaml", "sha2", - "sqlx 0.6.3", + "sqlx", "sqlx-adapter", "thiserror 1.0.69", "tokio", @@ -4597,17 +4509,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "time" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", -] - [[package]] name = "time" version = "0.3.44" @@ -4682,7 +4583,7 @@ dependencies = [ "bytes", "libc", "mio", - "parking_lot 0.12.5", + "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2 0.6.1", @@ -4722,17 +4623,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.23.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" -dependencies = [ - "rustls 0.20.9", - "tokio", - "webpki", -] - [[package]] name = "tokio-stream" version = "0.1.17" @@ -4819,7 +4709,7 @@ dependencies = [ "log", "serde", "serde_json", - "time 0.3.44", + "time", "tracing", "tracing-core", "tracing-log 0.1.4", @@ -4945,12 +4835,6 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" -[[package]] -name = "unicode_categories" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" - [[package]] name = "universal-hash" version = "0.5.1" @@ -4967,12 +4851,6 @@ version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - [[package]] name = "untrusted" version = "0.9.0" @@ -5064,12 +4942,6 @@ version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - [[package]] name = "wasi" version = "0.11.1+wasi-snapshot-preview1" @@ -5192,22 +5064,21 @@ dependencies = [ ] [[package]] -name = "webpki" -version = "0.22.4" +name = "webpki-roots" +version = "0.26.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" dependencies = [ - "ring 0.17.14", - "untrusted 0.9.0", + "webpki-roots 1.0.4", ] [[package]] name = "webpki-roots" -version = "0.22.6" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +checksum = "b2878ef029c47c6e8cf779119f20fcf52bde7ad42a731b2a304bc221df17571e" dependencies = [ - "webpki", + "rustls-pki-types", ] [[package]] @@ -5218,7 +5089,6 @@ checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" dependencies = [ "libredox", "wasite", - "web-sys", ] [[package]] @@ -5602,7 +5472,7 @@ dependencies = [ "oid-registry", "rusticata-macros", "thiserror 2.0.17", - "time 0.3.44", + "time", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 5159b15..f901e7a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,7 +20,7 @@ required-features = ["explain"] [dependencies] actix-web = "4.3.1" -chrono = { version = "0.4.29", features = ["time", "serde"] } +chrono = { version = "0.4.39", features = ["serde", "clock"] } config = "0.13.4" reqwest = { version = "0.11.23", features = ["json", "blocking"] } serde = { version = "1.0.195", features = ["derive"] } @@ -44,7 +44,7 @@ tokio-stream = "0.1.14" actix-http = "3.4.0" hmac = "0.12.1" sha2 = "0.10.8" -sqlx-adapter = { version = "1.0.0", default-features = false, features = ["postgres", "runtime-tokio-native-tls"]} +sqlx-adapter = { version = "1.8.0", default-features = false, features = ["postgres", "runtime-tokio-native-tls"]} dotenvy = "0.15" # dctypes @@ -65,16 +65,14 @@ base64 = "0.22.1" redis = { version = "0.27.5", features = ["tokio-comp"] } [dependencies.sqlx] -version = "0.6.3" +version = "0.8.2" features = [ - "runtime-actix-rustls", + "runtime-tokio-rustls", "postgres", "uuid", - "tls", "chrono", "json", "ipnetwork", - "offline", "macros" ] diff --git a/src/db/agreement.rs b/src/db/agreement.rs index d676588..aaaac10 100644 --- a/src/db/agreement.rs +++ b/src/db/agreement.rs @@ -205,35 +205,13 @@ pub async fn update( #[tracing::instrument(name = "Delete user's agreement.")] pub async fn delete(pool: &PgPool, id: i32) -> Result { tracing::info!("Delete agreement {}", id); - let mut tx = match pool.begin().await { - Ok(result) => result, - Err(err) => { - tracing::error!("Failed to begin transaction: {:?}", err); - return Err("".to_string()); - } - }; - - // Combine delete queries into a single query - let delete_query = " - DELETE FROM agreement WHERE id = $1; - "; - - match sqlx::query(delete_query) + sqlx::query::("DELETE FROM agreement WHERE id = $1;") .bind(id) - .execute(&mut tx) + .execute(pool) .await - .map_err(|err| println!("{:?}", err)) - { - Ok(_) => { - let _ = tx.commit().await.map_err(|err| { - tracing::error!("Failed to commit transaction: {:?}", err); - false - }); - Ok(true) - } - Err(_err) => { - let _ = tx.rollback().await.map_err(|err| println!("{:?}", err)); - Ok(false) - } // todo, when empty commit() - } + .map(|_| true) + .map_err(|err| { + tracing::error!("Failed to delete agreement: {:?}", err); + "Failed to delete agreement".to_string() + }) } diff --git a/src/db/cloud.rs b/src/db/cloud.rs index 5a0b7f1..0e06f1b 100644 --- a/src/db/cloud.rs +++ b/src/db/cloud.rs @@ -121,32 +121,13 @@ pub async fn update(pool: &PgPool, mut cloud: models::Cloud) -> Result Result { tracing::info!("Delete cloud {}", id); - let mut tx = match pool.begin().await { - Ok(result) => result, - Err(err) => { - tracing::error!("Failed to begin transaction: {:?}", err); - return Err("".to_string()); - } - }; - - let delete_query = " DELETE FROM cloud WHERE id = $1; "; - - match sqlx::query(delete_query) + sqlx::query::("DELETE FROM cloud WHERE id = $1;") .bind(id) - .execute(&mut tx) + .execute(pool) .await - .map_err(|err| println!("{:?}", err)) - { - Ok(_) => { - let _ = tx.commit().await.map_err(|err| { - tracing::error!("Failed to commit transaction: {:?}", err); - false - }); - Ok(true) - } - Err(_err) => { - let _ = tx.rollback().await.map_err(|err| println!("{:?}", err)); - Ok(false) - } - } + .map(|_| true) + .map_err(|err| { + tracing::error!("Failed to delete cloud: {:?}", err); + "Failed to delete cloud".to_string() + }) } diff --git a/src/db/project.rs b/src/db/project.rs index 1042f0a..397bf98 100644 --- a/src/db/project.rs +++ b/src/db/project.rs @@ -152,37 +152,15 @@ pub async fn update( #[tracing::instrument(name = "Delete user's project.")] pub async fn delete(pool: &PgPool, id: i32) -> Result { tracing::info!("Delete project {}", id); - let mut tx = match pool.begin().await { - Ok(result) => result, - Err(err) => { - tracing::error!("Failed to begin transaction: {:?}", err); - return Err("".to_string()); - } - }; - - // Combine delete queries into a single query - let delete_query = " - --DELETE FROM deployment WHERE project_id = $1; // on delete cascade - --DELETE FROM server WHERE project_id = $1; // on delete cascade - DELETE FROM project WHERE id = $1; - "; - - match sqlx::query(delete_query) - .bind(id) - .execute(&mut tx) - .await - .map_err(|err| println!("{:?}", err)) - { - Ok(_) => { - let _ = tx.commit().await.map_err(|err| { - tracing::error!("Failed to commit transaction: {:?}", err); - false - }); - Ok(true) - } - Err(_err) => { - let _ = tx.rollback().await.map_err(|err| println!("{:?}", err)); - Ok(false) - } // todo, when empty commit() - } + sqlx::query::( + "DELETE FROM project WHERE id = $1;", + ) + .bind(id) + .execute(pool) + .await + .map(|_| true) + .map_err(|err| { + tracing::error!("Failed to delete project: {:?}", err); + "Failed to delete project".to_string() + }) } diff --git a/src/db/server.rs b/src/db/server.rs index c9fd7d4..64d80f1 100644 --- a/src/db/server.rs +++ b/src/db/server.rs @@ -170,32 +170,13 @@ pub async fn update(pool: &PgPool, mut server: models::Server) -> Result Result { tracing::info!("Delete server {}", id); - let mut tx = match pool.begin().await { - Ok(result) => result, - Err(err) => { - tracing::error!("Failed to begin transaction: {:?}", err); - return Err("".to_string()); - } - }; - - let delete_query = " DELETE FROM server WHERE id = $1; "; - - match sqlx::query(delete_query) + sqlx::query::("DELETE FROM server WHERE id = $1;") .bind(id) - .execute(&mut tx) + .execute(pool) .await - .map_err(|err| println!("{:?}", err)) - { - Ok(_) => { - let _ = tx.commit().await.map_err(|err| { - tracing::error!("Failed to commit transaction: {:?}", err); - false - }); - Ok(true) - } - Err(_err) => { - let _ = tx.rollback().await.map_err(|err| println!("{:?}", err)); - Ok(false) - } - } + .map(|_| true) + .map_err(|err| { + tracing::error!("Failed to delete server: {:?}", err); + "Failed to delete server".to_string() + }) } From b287eb9603558ceae55198dabae01f303a664fb8 Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 26 Dec 2025 12:26:27 +0200 Subject: [PATCH 12/35] no console for prod build for now --- .github/workflows/docker.yml | 10 +++++----- .github/workflows/rust.yml | 7 +++++-- Dockerfile | 3 +-- README.md | 4 ++-- 4 files changed, 13 insertions(+), 11 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index bd57cde..2942628 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -95,11 +95,11 @@ jobs: command: clippy args: -- -D warnings - - name: Run cargo build + - name: Build server (release) uses: actions-rs/cargo@v1 with: command: build - args: --release + args: --release --bin server - name: npm install, build, and test working-directory: ./web @@ -122,9 +122,9 @@ jobs: - name: Copy app files and zip run: | mkdir -p app/stacker/dist - cp target/release/stacker app/stacker - cp -a web/dist/. app/stacker - cp docker/prod/Dockerfile app/Dockerfile + cp target/release/server app/stacker/server + cp -a web/dist/. app/stacker || true + cp Dockerfile app/Dockerfile cd app touch .env tar -czvf ../app.tar.gz . diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 5c9e960..e617b62 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -60,8 +60,11 @@ jobs: key: ${{ runner.os }}-target-${{ matrix.target }}-${{ hashFiles('**/Cargo.lock') }} restore-keys: | ${{ runner.os }}-target-${{ matrix.target }}- - - name: Build (release) - run: cargo build --release --target ${{ matrix.target }} --verbose + - name: Build server (release) + run: cargo build --release --target ${{ matrix.target }} --bin server --verbose + + - name: Build console (release with features) + run: cargo build --release --target ${{ matrix.target }} --bin console --features explain --verbose - name: Prepare binaries run: | mkdir -p artifacts diff --git a/Dockerfile b/Dockerfile index 6962494..ab94018 100644 --- a/Dockerfile +++ b/Dockerfile @@ -34,7 +34,7 @@ COPY ./src ./src ENV SQLX_OFFLINE true RUN apt-get update && apt-get install --no-install-recommends -y libssl-dev; \ - cargo build --bin=console --features="explain" && cargo build --release --features="explain" + cargo build --release --bin server #RUN ls -la /app/target/release/ >&2 @@ -48,7 +48,6 @@ RUN mkdir ./files && chmod 0777 ./files # copy binary and configuration files COPY --from=builder /app/target/release/server . -COPY --from=builder /app/target/release/console . COPY --from=builder /app/.env . COPY --from=builder /app/configuration.yaml . COPY --from=builder /usr/local/cargo/bin/sqlx sqlx diff --git a/README.md b/README.md index f6c932f..edd60aa 100644 --- a/README.md +++ b/README.md @@ -69,14 +69,14 @@ The core Project model includes: - Required headers: `X-Agent-Id`, `X-Timestamp`, `X-Request-Id`, `X-Agent-Signature` - Signature: base64(HMAC_SHA256(AGENT_TOKEN, raw_body_bytes)) - Helper available: `helpers::AgentClient` - - Base URL: set `AGENT_BASE_URL` to point Stacker at the target agent (e.g., `http://agent:8080`). + - Base URL: set `AGENT_BASE_URL` to point Stacker at the target agent (e.g., `http://agent:5000`). Example: ```rust use stacker::helpers::AgentClient; use serde_json::json; -let client = AgentClient::new("http://agent:8080", agent_id, agent_token); +let client = AgentClient::new("http://agent:5000", agent_id, agent_token); let payload = json!({"deployment_hash": dh, "type": "restart_service", "parameters": {"service": "web"}}); let resp = client.commands_execute(&payload).await?; ``` From c634fbecb706df543cb46f880579c874038a5e3b Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 26 Dec 2025 12:52:02 +0200 Subject: [PATCH 13/35] tests config --- AGENT_REGISTRATION_SPEC.md | 812 ------------------------------------- src/configuration.rs | 13 +- 2 files changed, 7 insertions(+), 818 deletions(-) delete mode 100644 AGENT_REGISTRATION_SPEC.md diff --git a/AGENT_REGISTRATION_SPEC.md b/AGENT_REGISTRATION_SPEC.md deleted file mode 100644 index 634c62b..0000000 --- a/AGENT_REGISTRATION_SPEC.md +++ /dev/null @@ -1,812 +0,0 @@ -# Agent Registration Specification - -## Overview - -The **Agent Registration API** allows Status Panel agents running on deployed systems to register themselves with the Stacker control plane. Upon successful registration, agents receive authentication credentials (JWT token) that they use for all subsequent API calls. - -This document provides comprehensive guidance for developers implementing agent clients. - ---- - -## Quick Start - -### Registration Flow (3 Steps) - -```mermaid -graph LR - Agent["Agent
(Status Panel)"] -->|1. POST /api/v1/agent/register| Server["Stacker Server"] - Server -->|2. Generate JWT Token| Vault["Vault
(Optional)"] - Server -->|3. Return agent_token| Agent - Agent -->|4. Future requests with
Authorization: Bearer agent_token| Server -``` - -### Minimal Example - -**Absolute minimum (empty system_info):** -```bash -curl -X POST http://localhost:8000/api/v1/agent/register \ - -H "Content-Type: application/json" \ - -d '{ - "deployment_hash": "550e8400-e29b-41d4-a716-446655440000", - "agent_version": "1.0.0", - "capabilities": ["docker"], - "system_info": {} - }' -``` - -**Recommended (with system info):** -```bash -curl -X POST http://localhost:8000/api/v1/agent/register \ - -H "Content-Type: application/json" \ - -d '{ - "deployment_hash": "550e8400-e29b-41d4-a716-446655440000", - "agent_version": "1.0.0", - "capabilities": ["docker", "compose", "logs"], - "system_info": { - "os": "linux", - "arch": "x86_64", - "memory_gb": 8, - "docker_version": "24.0.0" - } - }' -``` - -**Response:** -```json -{ - "data": { - "item": { - "agent_id": "42", - "agent_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...", - "dashboard_version": "2.0.0", - "supported_api_versions": ["1.0"] - } - }, - "status": 201, - "message": "Agent registered" -} -``` - ---- - -## API Reference - -### Endpoint: `POST /api/v1/agent/register` - -**Purpose:** Register a new agent instance with the Stacker server. - -**Authentication:** None required (public endpoint) *See Security Considerations below* - -**Content-Type:** `application/json` - ---- - -## Request Format - -### Body Parameters - -| Field | Type | Required | Constraints | Description | Example | -|-------|------|----------|-------------|-------------|----------| -| `deployment_hash` | `string` | ✅ **Yes** | Non-empty, max 255 chars, URL-safe preferred | Unique identifier for the deployment/stack instance. Should be stable (doesn't change across restarts). Recommend using UUID or hash-based format. | `"abc123-def456-ghi789"`, `"550e8400-e29b-41d4-a716-446655440000"` | -| `agent_version` | `string` | ✅ **Yes** | Semantic version format (e.g., X.Y.Z) | Semantic version of the agent binary. Used for compatibility checks and upgrade decisions. | `"1.0.0"`, `"1.2.3"`, `"2.0.0-rc1"` | -| `capabilities` | `array[string]` | ✅ **Yes** | Non-empty array, each item: 1-32 chars, lowercase alphanumeric + underscore | List of feature identifiers this agent supports. Used for command routing and capability discovery. Must be non-empty - agent must support at least one capability. | `["docker", "compose", "logs"]`, `["docker", "compose", "logs", "monitoring", "backup"]` | -| `system_info` | `object` (JSON) | ✅ **Yes** | Valid JSON object, can be empty `{}` | System environment details. Server uses this for telemetry, debugging, and agent classification. No required fields, but recommended fields shown below. | `{"os": "linux", "arch": "x86_64"}` or `{}` | -| `public_key` | `string` \| `null` | ❌ **No** | Optional, PEM format if provided (starts with `-----BEGIN PUBLIC KEY-----`) | PEM-encoded RSA public key for future request signing. Currently unused; reserved for security upgrade to HMAC-SHA256 request signatures. | `"-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkq...\n-----END PUBLIC KEY-----"` or `null` | - -### `system_info` Object Structure - -**Requirement:** `system_info` field accepts any valid JSON object. It can be empty `{}` or contain detailed system information. - -**Recommended fields** (all optional): - -```json -{ - "system_info": { - "os": "linux", // Operating system: linux, windows, darwin, freebsd, etc. - "arch": "x86_64", // CPU architecture: x86_64, arm64, i386, armv7l, etc. - "memory_gb": 16, // Available system memory (float or int) - "hostname": "deploy-server-01", // Hostname or instance name - "docker_version": "24.0.0", // Docker engine version if available - "docker_compose_version": "2.20.0", // Docker Compose version if available - "kernel_version": "5.15.0-91", // OS kernel version if available - "uptime_seconds": 604800, // System uptime in seconds - "cpu_cores": 8, // Number of CPU cores - "disk_free_gb": 50 // Free disk space available - } -} -``` - -**Minimum valid requests:** - -```bash -# Minimal with empty system_info -{ - "deployment_hash": "my-deployment", - "agent_version": "1.0.0", - "capabilities": ["docker"], - "system_info": {} -} - -# Minimal with basic info -{ - "deployment_hash": "my-deployment", - "agent_version": "1.0.0", - "capabilities": ["docker", "compose"], - "system_info": { - "os": "linux", - "arch": "x86_64", - "memory_gb": 8 - } -} -``` -``` - ---- - -## Response Format - -### Success Response (HTTP 201 Created) - -```json -{ - "data": { - "item": { - "agent_id": "550e8400-e29b-41d4-a716-446655440000", - "agent_token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrst", - "dashboard_version": "2.0.0", - "supported_api_versions": ["1.0"] - } - }, - "status": 201, - "message": "Agent registered" -} -``` - -**Response Structure:** -- `data.item` - Contains the registration result object -- `status` - HTTP status code (201 for success) -- `message` - Human-readable status message - -**Response Fields:** - -| Field | Type | Value | Description | -|-------|------|-------|-------------| -| `agent_id` | `string` | UUID format (e.g., `"550e8400-e29b-41d4-a716-446655440000"`) | Server-assigned unique identifier for this agent instance. Stable across restarts. | -| `agent_token` | `string` | 86-character random string (URL-safe: A-Z, a-z, 0-9, `-`, `_`) | Secure bearer token for authenticating future requests. Store securely. | -| `dashboard_version` | `string` | Semantic version (e.g., `"2.0.0"`) | Version of the Stacker control plane. Used for compatibility checks. | -| `supported_api_versions` | `array[string]` | Array of semantic versions (e.g., `["1.0"]`) | API versions supported by this server. Agent should use one of these versions for requests. | - -### Error Responses - -#### HTTP 400 Bad Request -Sent when: -- Required fields are missing -- Invalid JSON structure -- `deployment_hash` format is incorrect - -```json -{ - "data": {}, - "status": 400, - "message": "Invalid JSON: missing field 'deployment_hash'" -} -``` - -#### HTTP 409 Conflict -Sent when: -- Agent is already registered for this deployment hash - -```json -{ - "data": {}, - "status": 409, - "message": "Agent already registered for this deployment" -} -``` - -#### HTTP 500 Internal Server Error -Sent when: -- Database error occurs -- Vault token storage fails (graceful degradation) - -```json -{ - "data": {}, - "status": 500, - "message": "Internal Server Error" -} -``` - ---- - -## Implementation Guide - -### Step 1: Prepare Agent Information - -Gather system details (optional but recommended). All fields in `system_info` are optional. - -```python -import platform -import json -import os -import docker -import subprocess - -def get_system_info(): - """ - Gather deployment system information. - - Note: All fields are optional. Return minimal info if not available. - Server accepts empty dict: {} - """ - info = {} - - # Basic system info (most reliable) - info["os"] = platform.system().lower() # "linux", "windows", "darwin" - info["arch"] = platform.machine() # "x86_64", "arm64", etc. - info["hostname"] = platform.node() - - # Memory (can fail on some systems) - try: - memory_bytes = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') - info["memory_gb"] = round(memory_bytes / (1024**3), 2) - except (AttributeError, ValueError): - pass # Skip if not available - - # Docker info (optional) - try: - client = docker.from_env(timeout=5) - docker_version = client.version()['Version'] - info["docker_version"] = docker_version - except Exception: - pass # Docker not available or not running - - # Docker Compose info (optional) - try: - result = subprocess.run( - ['docker-compose', '--version'], - capture_output=True, - text=True, - timeout=5 - ) - if result.returncode == 0: - # Parse "Docker Compose version 2.20.0" - version = result.stdout.split()[-1] - info["docker_compose_version"] = version - except (FileNotFoundError, subprocess.TimeoutExpired): - pass # Docker Compose not available - - return info - -def get_agent_capabilities(): - """Determine agent capabilities based on installed tools""" - capabilities = ["docker", "compose", "logs"] - - # Check for additional tools - if shutil.which("rsync"): - capabilities.append("backup") - if shutil.which("curl"): - capabilities.append("monitoring") - - return capabilities -``` - -### Step 2: Generate Deployment Hash - -The deployment hash should be **stable and unique** for each deployment: - -```python -import hashlib -import json -import os - -def generate_deployment_hash(): - """ - Create a stable hash from deployment configuration. - This should remain consistent across restarts. - """ - # Option 1: Hash from stack configuration file - config_hash = hashlib.sha256( - open('/opt/stacker/docker-compose.yml').read().encode() - ).hexdigest()[:16] - - # Option 2: From environment variable (set at deploy time) - env_hash = os.environ.get('DEPLOYMENT_HASH') - - # Option 3: From hostname + date (resets on redeploy) - from datetime import datetime - date_hash = hashlib.sha256( - f"{platform.node()}-{datetime.now().date()}".encode() - ).hexdigest()[:16] - - return env_hash or config_hash or date_hash -``` - -### Step 3: Perform Registration Request - -```python -import requests -import json -from typing import Dict, Tuple - -class AgentRegistrationClient: - def __init__(self, server_url: str = "http://localhost:8000"): - self.server_url = server_url - self.agent_token = None - self.agent_id = None - - def register(self, - deployment_hash: str, - agent_version: str = "1.0.0", - capabilities: list = None, - system_info: dict = None, - public_key: str = None) -> Tuple[bool, Dict]: - """ - Register agent with Stacker server. - - Args: - deployment_hash (str): Unique deployment identifier. Required, non-empty, max 255 chars. - agent_version (str): Semantic version (e.g., "1.0.0"). Default: "1.0.0" - capabilities (list[str]): Non-empty list of capability strings. Required. - Default: ["docker", "compose", "logs"] - system_info (dict): JSON object with system details. All fields optional. - Default: {} (empty object) - public_key (str): PEM-encoded RSA public key (optional, reserved for future use). - - Returns: - Tuple of (success: bool, response: dict) - - Raises: - ValueError: If deployment_hash or capabilities are empty/invalid - """ - # Validate required fields - if not deployment_hash or not deployment_hash.strip(): - raise ValueError("deployment_hash cannot be empty") - - if not capabilities or len(capabilities) == 0: - capabilities = ["docker", "compose", "logs"] - - if system_info is None: - system_info = get_system_info() # Returns dict (possibly empty) - - payload = { - "deployment_hash": deployment_hash.strip(), - "agent_version": agent_version, - "capabilities": capabilities, - "system_info": system_info - } - - # Add optional public_key if provided - if public_key: - payload["public_key"] = public_key - - try: - response = requests.post( - f"{self.server_url}/api/v1/agent/register", - json=payload, - timeout=10 - ) - - if response.status_code == 201: - data = response.json() - self.agent_token = data['data']['item']['agent_token'] - self.agent_id = data['data']['item']['agent_id'] - return True, data - else: - return False, response.json() - - except requests.RequestException as e: - return False, {"error": str(e)} - - def is_registered(self) -> bool: - """Check if agent has valid token""" - return self.agent_token is not None -``` - -### Step 4: Store and Use Agent Token - -After successful registration, store the token securely: - -```python -import os -from pathlib import Path - -def store_agent_credentials(agent_id: str, agent_token: str): - """ - Store agent credentials for future requests. - Use restricted file permissions (0600). - """ - creds_dir = Path('/var/lib/stacker') - creds_dir.mkdir(mode=0o700, parents=True, exist_ok=True) - - creds_file = creds_dir / 'agent.json' - - credentials = { - "agent_id": agent_id, - "agent_token": agent_token - } - - with open(creds_file, 'w') as f: - json.dump(credentials, f) - - # Restrict permissions - os.chmod(creds_file, 0o600) - -def load_agent_credentials(): - """Load previously stored credentials""" - creds_file = Path('/var/lib/stacker/agent.json') - - if creds_file.exists(): - with open(creds_file, 'r') as f: - return json.load(f) - return None - -# In subsequent requests to Stacker API: -creds = load_agent_credentials() -if creds: - headers = { - "Authorization": f"Bearer {creds['agent_token']}", - "Content-Type": "application/json" - } - response = requests.get( - "http://localhost:8000/api/v1/commands", - headers=headers - ) -``` - ---- - -## Signature & Authentication Details - -### Registration Endpoint Security - -- `POST /api/v1/agent/register` remains public (no signature, no bearer) as implemented. -- Response includes `agent_id` and `agent_token` to be used for subsequent authenticated flows. - -### Stacker → Agent POST Signing (Required) - -- All POST requests from Stacker to the agent MUST be HMAC signed per [STACKER_INTEGRATION_REQUIREMENTS.md](STACKER_INTEGRATION_REQUIREMENTS.md). -- Required headers: `X-Agent-Id`, `X-Timestamp`, `X-Request-Id`, `X-Agent-Signature`. -- Signature: `Base64( HMAC_SHA256(AGENT_TOKEN, raw_request_body) )`. -- Use the helper `helpers::AgentClient` to generate headers and send requests. - ---- - -## Capabilities Reference - -The `capabilities` array (required, non-empty) indicates which Status Panel features the agent supports. - -**Capability values:** Lowercase alphanumeric + underscore, 1-32 characters. Examples: - -| Capability | Type | Description | Commands routed | -|------------|------|-------------|------------------| -| `docker` | Core | Docker engine interaction (info, inspect, stats) | `docker_stats`, `docker_info`, `docker_ps` | -| `compose` | Core | Docker Compose operations (up, down, logs) | `compose_up`, `compose_down`, `compose_restart` | -| `logs` | Core | Log streaming and retrieval | `tail_logs`, `stream_logs`, `grep_logs` | -| `monitoring` | Feature | Health checks and metrics collection | `health_check`, `collect_metrics`, `cpu_usage` | -| `backup` | Feature | Backup/snapshot operations | `backup_volume`, `snapshot_create`, `restore` | -| `updates` | Feature | Agent or service updates | `update_agent`, `update_service` | -| `networking` | Feature | Network diagnostics | `ping_host`, `traceroute`, `netstat` | -| `shell` | Feature | Remote shell/command execution | `execute_command`, `run_script` | -| `file_ops` | Feature | File operations (read, write, delete) | `read_file`, `write_file`, `delete_file` | - -**Rules:** -- `deployment_hash` must declare at least one capability (array cannot be empty) -- Declare **only** capabilities actually implemented by your agent -- Server uses capabilities for command routing and authorization -- Unknown capabilities are stored but generate warnings in logs - -**Examples:** -```json -"capabilities": ["docker"] // Minimal -"capabilities": ["docker", "compose", "logs"] // Standard -"capabilities": ["docker", "compose", "logs", "monitoring", "backup"] // Full-featured -``` - ---- - -## Security Considerations - -### ⚠️ Current Security Gap - -**Issue:** Agent registration endpoint is currently public (no authentication required). - -**Implications:** -- Any client can register agents under any deployment hash -- Potential for registration spam or hijacking - -**Mitigation (Planned):** -- Add user authentication requirement to `/api/v1/agent/register` -- Verify user owns the deployment before accepting registration -- Implement rate limiting per deployment - -**Workaround (Current):** -- Restrict network access to Stacker server (firewall rules) -- Use deployment hashes that are difficult to guess -- Monitor audit logs for suspicious registrations - -### Best Practices - -1. **Token Storage** - - Store agent tokens in secure locations (not in git, config files, or environment variables) - - Use file permissions (mode 0600) when storing to disk - - Consider using secrets management systems (Vault, HashiCorp Consul) - -2. **HTTPS in Production** - - Always use HTTPS when registering agents - - Verify server certificate validity - - Never trust self-signed certificates without explicit validation - -3. **Deployment Hash** - - Use values derived from deployed configuration (not sequential/predictable) - - Include stack version/hash in the deployment identifier - - Avoid generic values like "default", "production", "main" - -4. **Capability Declaration** - - Be conservative: only declare capabilities actually implemented - - Remove capabilities not in use (reduces attack surface) - ---- - -## Troubleshooting - -### Agent Registration Fails with "Already Registered" - -**Symptom:** HTTP 409 Conflict after first registration - -**Cause:** Agent with same `deployment_hash` already exists in database - -**Solutions:** -- Use unique deployment hash: `deployment_hash = "stack-v1.2.3-${UNIQUE_ID}"` -- Clear database and restart (dev only): `make clean-db` -- Check database for duplicates: - ```sql - SELECT id, deployment_hash FROM agent WHERE deployment_hash = 'YOUR_HASH'; - ``` - -### Vault Token Storage Warning - -**Symptom:** Logs show `"Failed to store token in Vault (continuing anyway)"` - -**Cause:** Vault service is unreachable (development environment) - -**Impact:** Agent tokens fall back to bearer tokens instead of Vault storage - -**Fix:** -- Ensure Vault is running: `docker-compose logs vault` -- Check Vault connectivity in config: `curl http://localhost:8200/v1/sys/health` -- For production, ensure Vault address is correctly configured in `.env` - -### Agent Token Expired - -**Symptom:** Subsequent API calls return 401 Unauthorized - -**Cause:** JWT token has expired (default TTL: varies by configuration) - -**Fix:** -- Re-register the agent: `POST /api/v1/agent/register` with same `deployment_hash` -- Store the new token and use for subsequent requests -- Implement token refresh logic in agent client - ---- - -## Example Implementations - -### Python Client Library - -```python -class StacherAgentClient: - """Production-ready agent registration client""" - - def __init__(self, server_url: str, deployment_hash: str): - self.server_url = server_url.rstrip('/') - self.deployment_hash = deployment_hash - self.agent_token = None - self._load_cached_token() - - def _load_cached_token(self): - """Attempt to load token from disk""" - try: - creds = load_agent_credentials() - if creds: - self.agent_token = creds.get('agent_token') - except Exception as e: - print(f"Failed to load cached token: {e}") - - def register_or_reuse(self, agent_version="1.0.0"): - """Register new agent or reuse existing token""" - - # If we have a cached token, assume we're already registered - if self.agent_token: - return self.agent_token - - # Otherwise, register - success, response = self.register(agent_version) - - if not success: - raise RuntimeError(f"Registration failed: {response}") - - return self.agent_token - - def request(self, method: str, path: str, **kwargs): - """Make authenticated request to Stacker API""" - - if not self.agent_token: - raise RuntimeError("Agent not registered. Call register() first.") - - headers = kwargs.pop('headers', {}) - headers['Authorization'] = f'Bearer {self.agent_token}' - - url = f"{self.server_url}{path}" - - response = requests.request(method, url, headers=headers, **kwargs) - - if response.status_code == 401: - # Token expired, re-register - self.register() - headers['Authorization'] = f'Bearer {self.agent_token}' - response = requests.request(method, url, headers=headers, **kwargs) - - return response - -# Usage -client = StacherAgentClient( - server_url="https://stacker.example.com", - deployment_hash=generate_deployment_hash() -) - -# Register or reuse token -token = client.register_or_reuse(agent_version="1.0.0") - -# Use for subsequent requests -response = client.request('GET', '/api/v1/commands') -``` - -### Rust Client - -```rust -use reqwest::Client; -use serde::{Deserialize, Serialize}; - -#[derive(Serialize)] -struct RegisterRequest { - deployment_hash: String, - agent_version: String, - capabilities: Vec, - system_info: serde_json::Value, -} - -#[derive(Deserialize)] -struct RegisterResponse { - data: ResponseData, -} - -#[derive(Deserialize)] -struct ResponseData { - item: AgentCredentials, -} - -#[derive(Deserialize)] -struct AgentCredentials { - agent_id: String, - agent_token: String, - dashboard_version: String, - supported_api_versions: Vec, -} - -pub struct AgentClient { - http_client: Client, - server_url: String, - agent_token: Option, -} - -impl AgentClient { - pub async fn register( - &mut self, - deployment_hash: String, - agent_version: String, - capabilities: Vec, - ) -> Result> { - - let system_info = get_system_info(); - - let request = RegisterRequest { - deployment_hash, - agent_version, - capabilities, - system_info, - }; - - let response = self.http_client - .post(&format!("{}/api/v1/agent/register", self.server_url)) - .json(&request) - .send() - .await? - .json::() - .await?; - - self.agent_token = Some(response.data.item.agent_token.clone()); - - Ok(response.data.item) - } -} -``` - ---- - -## Testing - -### Manual Test with curl - -**Test 1: Minimal registration (empty system_info)** -```bash -DEPLOYMENT_HASH=$(uuidgen | tr '[:upper:]' '[:lower:]') - -curl -X POST http://localhost:8000/api/v1/agent/register \ - -H "Content-Type: application/json" \ - -d "{ - \"deployment_hash\": \"$DEPLOYMENT_HASH\", - \"agent_version\": \"1.0.0\", - \"capabilities\": [\"docker\"], - \"system_info\": {} - }" | jq '.' -``` - -**Test 2: Full registration (with system info)** -```bash -DEPLOYMENT_HASH=$(uuidgen | tr '[:upper:]' '[:lower:]') - -curl -X POST http://localhost:8000/api/v1/agent/register \ - -H "Content-Type: application/json" \ - -d "{ - \"deployment_hash\": \"$DEPLOYMENT_HASH\", - \"agent_version\": \"1.0.0\", - \"capabilities\": [\"docker\", \"compose\", \"logs\"], - \"system_info\": { - \"os\": \"linux\", - \"arch\": \"x86_64\", - \"memory_gb\": 16, - \"hostname\": \"deploy-server-01\", - \"docker_version\": \"24.0.0\", - \"docker_compose_version\": \"2.20.0\" - } - }" | jq '.' -``` - -**Test 3: Registration with public_key (future feature)** -```bash -DEPLOYMENT_HASH=$(uuidgen | tr '[:upper:]' '[:lower:]') -PUBLIC_KEY=$(cat /path/to/public_key.pem | jq -Rs .) - -curl -X POST http://localhost:8000/api/v1/agent/register \ - -H "Content-Type: application/json" \ - -d "{ - \"deployment_hash\": \"$DEPLOYMENT_HASH\", - \"agent_version\": \"1.0.0\", - \"capabilities\": [\"docker\", \"compose\"], - \"system_info\": {}, - \"public_key\": $PUBLIC_KEY - }" | jq '.' -``` - -### Integration Test - -See [tests/agent_command_flow.rs](tests/agent_command_flow.rs) for full test example. - ---- - -## Related Documentation - -- [Architecture Overview](README.md#architecture) -- [Authentication Methods](src/middleware/authentication/README.md) -- [Vault Integration](src/helpers/vault.rs) -- [Agent Models](src/models/agent.rs) -- [Agent Database Queries](src/db/agent.rs) - ---- - -## Feedback & Questions - -For issues or clarifications about this specification, see: -- TODO items: [TODO.md](TODO.md#agent-registration--security) -- Architecture guide: [Copilot Instructions](.github/copilot-instructions.md) diff --git a/src/configuration.rs b/src/configuration.rs index 8bc3d06..d26f7a0 100644 --- a/src/configuration.rs +++ b/src/configuration.rs @@ -82,12 +82,13 @@ pub fn get_configuration() -> Result { // Load environment variables from .env file dotenvy::dotenv().ok(); - // Initialize our configuration reader - let mut settings = config::Config::default(); - - // Add configuration values from a file named `configuration` - // with the .yaml extension - settings.merge(config::File::with_name("configuration"))?; // .json, .toml, .yaml, .yml + // Prefer real config, fall back to dist samples so tests do not fail when config is missing + let settings = config::Config::builder() + .add_source(config::File::with_name("configuration.yaml").required(false)) + .add_source(config::File::with_name("configuration").required(false)) + .add_source(config::File::with_name("configuration.yaml.dist").required(false)) + .add_source(config::File::with_name("configuration.dist").required(false)) + .build()?; // Try to convert the configuration values it read into our Settings type let mut config: Settings = settings.try_deserialize()?; From 06416fc5649b5a65107324de0a9dbd03fe6fa84a Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 26 Dec 2025 13:02:32 +0200 Subject: [PATCH 14/35] config sources for tests --- src/configuration.rs | 14 +++++++++----- src/console/main.rs | 2 ++ 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/configuration.rs b/src/configuration.rs index d26f7a0..4fdda4b 100644 --- a/src/configuration.rs +++ b/src/configuration.rs @@ -82,12 +82,16 @@ pub fn get_configuration() -> Result { // Load environment variables from .env file dotenvy::dotenv().ok(); - // Prefer real config, fall back to dist samples so tests do not fail when config is missing + // Prefer real config, fall back to dist sample, require at least one to exist let settings = config::Config::builder() - .add_source(config::File::with_name("configuration.yaml").required(false)) - .add_source(config::File::with_name("configuration").required(false)) - .add_source(config::File::with_name("configuration.yaml.dist").required(false)) - .add_source(config::File::with_name("configuration.dist").required(false)) + .add_source( + config::File::with_name("configuration.yaml") + .required(false) + ) + .add_source( + config::File::with_name("configuration.yaml.dist") + .required(false) + ) .build()?; // Try to convert the configuration values it read into our Settings type diff --git a/src/console/main.rs b/src/console/main.rs index 1181a1d..e157fb0 100644 --- a/src/console/main.rs +++ b/src/console/main.rs @@ -35,6 +35,8 @@ enum AgentCommands { new_token: String, }, } + +#[derive(Debug, Subcommand)] enum AppClientCommands { New { #[arg(long)] From d8abbe5a88e622eb81425ae899daa0f50508bba1 Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 26 Dec 2025 15:22:01 +0200 Subject: [PATCH 15/35] access_control.conf in Dockerfile --- Dockerfile | 2 +- src/configuration.rs | 18 +++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/Dockerfile b/Dockerfile index ab94018..6a8c4cc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -51,7 +51,7 @@ COPY --from=builder /app/target/release/server . COPY --from=builder /app/.env . COPY --from=builder /app/configuration.yaml . COPY --from=builder /usr/local/cargo/bin/sqlx sqlx -COPY ./access_control.conf.dist /app +COPY ./access_control.conf.dist ./access_control.conf EXPOSE 8000 diff --git a/src/configuration.rs b/src/configuration.rs index 4fdda4b..865b103 100644 --- a/src/configuration.rs +++ b/src/configuration.rs @@ -82,16 +82,16 @@ pub fn get_configuration() -> Result { // Load environment variables from .env file dotenvy::dotenv().ok(); - // Prefer real config, fall back to dist sample, require at least one to exist + // Prefer real config, fall back to dist samples; layer multiple formats let settings = config::Config::builder() - .add_source( - config::File::with_name("configuration.yaml") - .required(false) - ) - .add_source( - config::File::with_name("configuration.yaml.dist") - .required(false) - ) + // Primary local config + .add_source(config::File::with_name("configuration.yaml").required(false)) + .add_source(config::File::with_name("configuration.yml").required(false)) + .add_source(config::File::with_name("configuration").required(false)) + // Fallback samples + .add_source(config::File::with_name("configuration.yaml.dist").required(false)) + .add_source(config::File::with_name("configuration.yml.dist").required(false)) + .add_source(config::File::with_name("configuration.dist").required(false)) .build()?; // Try to convert the configuration values it read into our Settings type From 56e2dd884924453048dbac8558e1c80ac070f478 Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 26 Dec 2025 15:31:13 +0200 Subject: [PATCH 16/35] Added Default implementations for all configuration structs in configuration.rs --- src/configuration.rs | 60 +++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 56 insertions(+), 4 deletions(-) diff --git a/src/configuration.rs b/src/configuration.rs index 865b103..e536b3e 100644 --- a/src/configuration.rs +++ b/src/configuration.rs @@ -11,7 +11,21 @@ pub struct Settings { pub vault: VaultSettings, } -#[derive(Debug, serde::Deserialize)] +impl Default for Settings { + fn default() -> Self { + Self { + database: DatabaseSettings::default(), + app_port: 8000, + app_host: "127.0.0.1".to_string(), + auth_url: "http://localhost:8080/me".to_string(), + max_clients_number: 10, + amqp: AmqpSettings::default(), + vault: VaultSettings::default(), + } + } +} + +#[derive(Debug, serde::Deserialize, Clone)] pub struct DatabaseSettings { pub username: String, pub password: String, @@ -20,7 +34,19 @@ pub struct DatabaseSettings { pub database_name: String, } -#[derive(Debug, serde::Deserialize)] +impl Default for DatabaseSettings { + fn default() -> Self { + Self { + username: "postgres".to_string(), + password: "postgres".to_string(), + host: "127.0.0.1".to_string(), + port: 5432, + database_name: "stacker".to_string(), + } + } +} + +#[derive(Debug, serde::Deserialize, Clone)] pub struct AmqpSettings { pub username: String, pub password: String, @@ -28,13 +54,34 @@ pub struct AmqpSettings { pub port: u16, } -#[derive(Debug, serde::Deserialize)] +impl Default for AmqpSettings { + fn default() -> Self { + Self { + username: "guest".to_string(), + password: "guest".to_string(), + host: "127.0.0.1".to_string(), + port: 5672, + } + } +} + +#[derive(Debug, serde::Deserialize, Clone)] pub struct VaultSettings { pub address: String, pub token: String, pub agent_path_prefix: String, } +impl Default for VaultSettings { + fn default() -> Self { + Self { + address: "http://127.0.0.1:8200".to_string(), + token: "dev-token".to_string(), + agent_path_prefix: "agent".to_string(), + } + } +} + impl VaultSettings { /// Overlay Vault settings from environment variables, if present. /// If an env var is missing, keep the existing file-provided value. @@ -82,6 +129,9 @@ pub fn get_configuration() -> Result { // Load environment variables from .env file dotenvy::dotenv().ok(); + // Start with defaults + let mut config = Settings::default(); + // Prefer real config, fall back to dist samples; layer multiple formats let settings = config::Config::builder() // Primary local config @@ -95,7 +145,9 @@ pub fn get_configuration() -> Result { .build()?; // Try to convert the configuration values it read into our Settings type - let mut config: Settings = settings.try_deserialize()?; + if let Ok(loaded) = settings.try_deserialize::() { + config = loaded; + } // Overlay Vault settings with environment variables if present config.vault = config.vault.overlay_env(); From da02b099a86db68b73a9975951815fe2f50de22f Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 26 Dec 2025 15:36:52 +0200 Subject: [PATCH 17/35] test required db running --- .github/workflows/docker.yml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 2942628..f4849ba 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -16,6 +16,20 @@ jobs: runs-on: ubuntu-latest env: SQLX_OFFLINE: true + services: + postgres: + image: postgres:16 + env: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 steps: - name: Checkout sources uses: actions/checkout@v4 From 1aac3186d7c5aeb028f6ad755b00eefa158093ee Mon Sep 17 00:00:00 2001 From: vsilent Date: Sat, 27 Dec 2025 14:35:38 +0200 Subject: [PATCH 18/35] migration fix, check if table casbin_rule table is created --- .github/workflows/docker.yml | 14 ------ Dockerfile | 4 +- docker-compose.yml | 47 ++++++++++++-------- docker/dev/.env | 4 ++ docker/local/.env | 2 +- docker/local/configuration.yaml | 2 +- migrations/20240128174529_casbin_rule.up.sql | 2 +- 7 files changed, 37 insertions(+), 38 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index f4849ba..2942628 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -16,20 +16,6 @@ jobs: runs-on: ubuntu-latest env: SQLX_OFFLINE: true - services: - postgres: - image: postgres:16 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 steps: - name: Checkout sources uses: actions/checkout@v4 diff --git a/Dockerfile b/Dockerfile index 6a8c4cc..c325f65 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,7 +15,7 @@ COPY ./rustfmt.toml . COPY ./Makefile . COPY ./docker/local/.env . COPY ./docker/local/configuration.yaml . -COPY .sqlx . +COPY .sqlx .sqlx/ # build this project to cache dependencies #RUN sqlx database create && sqlx migrate run @@ -50,7 +50,7 @@ RUN mkdir ./files && chmod 0777 ./files COPY --from=builder /app/target/release/server . COPY --from=builder /app/.env . COPY --from=builder /app/configuration.yaml . -COPY --from=builder /usr/local/cargo/bin/sqlx sqlx +COPY --from=builder /usr/local/cargo/bin/sqlx /usr/local/bin/sqlx COPY ./access_control.conf.dist ./access_control.conf EXPOSE 8000 diff --git a/docker-compose.yml b/docker-compose.yml index 66b2c45..af4ec60 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,6 +7,9 @@ volumes: redis-data: driver: local +networks: + stacker-network: + driver: bridge services: @@ -15,6 +18,8 @@ services: build: . container_name: stacker restart: always + networks: + - stacker-network volumes: - ./files:/app/files - ./docker/local/configuration.yaml:/app/configuration.yaml @@ -28,14 +33,16 @@ services: environment: - RUST_LOG=debug - RUST_BACKTRACE=1 -# depends_on: -# stackerdb: -# condition: service_healthy + depends_on: + stackerdb: + condition: service_healthy redis: container_name: redis image: redis restart: always + networks: + - stacker-network ports: - 6379:6379 volumes: @@ -68,19 +75,21 @@ services: # condition: service_healthy # entrypoint: /app/console mq listen -# stackerdb: -# container_name: stackerdb -# healthcheck: -# test: ["CMD-SHELL", "pg_isready -U postgres"] -# interval: 10s -# timeout: 5s -# retries: 5 -# image: postgres:16.0 -# restart: always -# ports: -# - 5432:5432 -# env_file: -# - ./docker/local/.env -# volumes: -# - stackerdb:/var/lib/postgresql/data -# - ./docker/local/postgresql.conf:/etc/postgresql/postgresql.conf \ No newline at end of file + stackerdb: + container_name: stackerdb + networks: + - stacker-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 + image: postgres:16.0 + restart: always + ports: + - 5432:5432 + env_file: + - ./docker/local/.env + volumes: + - stackerdb:/var/lib/postgresql/data + - ./docker/local/postgresql.conf:/etc/postgresql/postgresql.conf \ No newline at end of file diff --git a/docker/dev/.env b/docker/dev/.env index d60f266..a397928 100644 --- a/docker/dev/.env +++ b/docker/dev/.env @@ -6,3 +6,7 @@ POSTGRES_PASSWORD=postgres POSTGRES_DB=stacker POSTGRES_PORT=5432 +# Vault Configuration +VAULT_ADDRESS=http://127.0.0.1:8200 +VAULT_TOKEN=your_vault_token_here +VAULT_AGENT_PATH_PREFIX=agent \ No newline at end of file diff --git a/docker/local/.env b/docker/local/.env index 247a3fd..6371a97 100644 --- a/docker/local/.env +++ b/docker/local/.env @@ -1,4 +1,4 @@ -DATABASE_URL=postgres://postgres:postgres@172.17.0.2:5432/stacker +DATABASE_URL=postgres://postgres:postgres@stackerdb:5432/stacker POSTGRES_USER=postgres POSTGRES_PASSWORD=postgres POSTGRES_DB=stacker diff --git a/docker/local/configuration.yaml b/docker/local/configuration.yaml index 750f1cb..141a67e 100644 --- a/docker/local/configuration.yaml +++ b/docker/local/configuration.yaml @@ -4,7 +4,7 @@ auth_url: https://dev.try.direct/server/user/oauth_server/api/me max_clients_number: 2 database: - host: 172.17.0.2 + host: stackerdb port: 5432 username: postgres password: postgres diff --git a/migrations/20240128174529_casbin_rule.up.sql b/migrations/20240128174529_casbin_rule.up.sql index 15b9914..ef9ddec 100644 --- a/migrations/20240128174529_casbin_rule.up.sql +++ b/migrations/20240128174529_casbin_rule.up.sql @@ -1,5 +1,5 @@ -- Add up migration script here -CREATE TABLE casbin_rule ( +CREATE TABLE IF NOT EXISTS casbin_rule ( id SERIAL PRIMARY KEY, ptype VARCHAR NOT NULL, v0 VARCHAR NOT NULL, From 2cb55b205e472fcb8c7c5fb2cc9cf79cd5f9b0af Mon Sep 17 00:00:00 2001 From: vsilent Date: Sat, 27 Dec 2025 15:21:14 +0200 Subject: [PATCH 19/35] admin access project endpoint --- .../20251227132000_add_group_admin_project_get_rule.down.sql | 3 +++ .../20251227132000_add_group_admin_project_get_rule.up.sql | 4 ++++ 2 files changed, 7 insertions(+) create mode 100644 migrations/20251227132000_add_group_admin_project_get_rule.down.sql create mode 100644 migrations/20251227132000_add_group_admin_project_get_rule.up.sql diff --git a/migrations/20251227132000_add_group_admin_project_get_rule.down.sql b/migrations/20251227132000_add_group_admin_project_get_rule.down.sql new file mode 100644 index 0000000..d737da4 --- /dev/null +++ b/migrations/20251227132000_add_group_admin_project_get_rule.down.sql @@ -0,0 +1,3 @@ +-- Rollback: remove the group_admin GET /project rule +DELETE FROM public.casbin_rule +WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/project' AND v2 = 'GET' AND v3 = '' AND v4 = '' AND v5 = ''; diff --git a/migrations/20251227132000_add_group_admin_project_get_rule.up.sql b/migrations/20251227132000_add_group_admin_project_get_rule.up.sql new file mode 100644 index 0000000..8a9e2d3 --- /dev/null +++ b/migrations/20251227132000_add_group_admin_project_get_rule.up.sql @@ -0,0 +1,4 @@ +-- Ensure group_admin can GET /project +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) +VALUES ('p', 'group_admin', '/project', 'GET', '', '', '') +ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING; From a4ada147492eb87218f014fd5a91387534b1f1b2 Mon Sep 17 00:00:00 2001 From: vsilent Date: Sat, 27 Dec 2025 16:57:39 +0200 Subject: [PATCH 20/35] feat: Implement MCP server foundation - Add MCP protocol types with JSON-RPC 2.0 support - Implement WebSocket handler with heartbeat mechanism - Create tool registry with pluggable handler architecture - Add session management for conversation context - Register /mcp WebSocket endpoint with OAuth auth - Add Casbin rules for group_user and group_admin access - Include comprehensive unit tests for protocol layer Components: - src/mcp/protocol.rs: JSON-RPC 2.0 + MCP types - src/mcp/websocket.rs: Actix WebSocket actor - src/mcp/registry.rs: Tool handler infrastructure - src/mcp/session.rs: Session state management - migrations/20251227140000: Casbin authorization rules Dependencies: - actix 0.13.5 (WebSocket actor framework) - actix-web-actors 4.3.1 (Actix-web WS integration) - async-trait 0.1.77 (Tool handler trait) Supports: - initialize, tools/list, tools/call methods - OAuth bearer token authentication - Casbin role-based authorization - Structured logging with tracing - Graceful connection handling --- Cargo.toml | 3 + docs/MCP_PHASE1_SUMMARY.md | 253 +++ docs/MCP_SERVER_BACKEND_PLAN.md | 1215 +++++++++++++++ docs/MCP_SERVER_FRONTEND_INTEGRATION.md | 1355 +++++++++++++++++ ...0251227140000_casbin_mcp_endpoint.down.sql | 7 + .../20251227140000_casbin_mcp_endpoint.up.sql | 8 + src/lib.rs | 1 + src/mcp/mod.rs | 11 + src/mcp/protocol.rs | 226 +++ src/mcp/protocol_tests.rs | 147 ++ src/mcp/registry.rs | 80 + src/mcp/session.rs | 53 + src/mcp/websocket.rs | 317 ++++ src/startup.rs | 11 + 14 files changed, 3687 insertions(+) create mode 100644 docs/MCP_PHASE1_SUMMARY.md create mode 100644 docs/MCP_SERVER_BACKEND_PLAN.md create mode 100644 docs/MCP_SERVER_FRONTEND_INTEGRATION.md create mode 100644 migrations/20251227140000_casbin_mcp_endpoint.down.sql create mode 100644 migrations/20251227140000_casbin_mcp_endpoint.up.sql create mode 100644 src/mcp/mod.rs create mode 100644 src/mcp/protocol.rs create mode 100644 src/mcp/protocol_tests.rs create mode 100644 src/mcp/registry.rs create mode 100644 src/mcp/session.rs create mode 100644 src/mcp/websocket.rs diff --git a/Cargo.toml b/Cargo.toml index f901e7a..d19a096 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,6 +20,8 @@ required-features = ["explain"] [dependencies] actix-web = "4.3.1" +actix = "0.13.5" +actix-web-actors = "4.3.1" chrono = { version = "0.4.39", features = ["serde", "clock"] } config = "0.13.4" reqwest = { version = "0.11.23", features = ["json", "blocking"] } @@ -33,6 +35,7 @@ uuid = { version = "1.3.4", features = ["v4", "serde"] } thiserror = "1.0" serde_valid = "0.18.0" serde_json = { version = "1.0.111", features = [] } +async-trait = "0.1.77" serde_derive = "1.0.195" actix-cors = "0.6.4" tracing-actix-web = "0.7.7" diff --git a/docs/MCP_PHASE1_SUMMARY.md b/docs/MCP_PHASE1_SUMMARY.md new file mode 100644 index 0000000..d0f1042 --- /dev/null +++ b/docs/MCP_PHASE1_SUMMARY.md @@ -0,0 +1,253 @@ +# MCP Server Implementation - Phase 1 Complete ✅ + +## What Was Implemented + +### Core Protocol Support (`src/mcp/protocol.rs`) +- ✅ JSON-RPC 2.0 request/response structures +- ✅ MCP-specific types (Tool, ToolContent, InitializeParams, etc.) +- ✅ Error handling with standard JSON-RPC error codes +- ✅ Full type safety with Serde serialization + +### WebSocket Handler (`src/mcp/websocket.rs`) +- ✅ Actix WebSocket actor for persistent connections +- ✅ Heartbeat mechanism (5s interval, 10s timeout) +- ✅ JSON-RPC message routing +- ✅ Three core methods implemented: + - `initialize` - Client handshake + - `tools/list` - List available tools + - `tools/call` - Execute tools +- ✅ OAuth authentication integration (via middleware) +- ✅ Structured logging with tracing + +### Tool Registry (`src/mcp/registry.rs`) +- ✅ Pluggable tool handler architecture +- ✅ `ToolHandler` trait for async tool execution +- ✅ `ToolContext` with user, database pool, settings +- ✅ Dynamic tool registration system +- ✅ Tool schema validation support + +### Session Management (`src/mcp/session.rs`) +- ✅ Per-connection session state +- ✅ Context storage (for multi-turn conversations) +- ✅ Initialization tracking +- ✅ UUID-based session IDs + +### Integration +- ✅ Route registered: `GET /mcp` (WebSocket upgrade) +- ✅ Authentication: OAuth bearer token required +- ✅ Authorization: Casbin rules added for `group_user` and `group_admin` +- ✅ Migration: `20251227140000_casbin_mcp_endpoint.up.sql` + +### Dependencies Added +```toml +actix = "0.13.5" +actix-web-actors = "4.3.1" +async-trait = "0.1.77" +``` + +## Architecture + +``` +┌─────────────────────────────────────────────────────┐ +│ HTTP Request: GET /mcp │ +│ Headers: Authorization: Bearer │ +└──────────────────┬──────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────┐ +│ Authentication Middleware │ +│ - OAuth token validation │ +│ - User object from TryDirect service │ +└──────────────────┬──────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────┐ +│ Authorization Middleware (Casbin) │ +│ - Check: user.role → group_user/group_admin │ +│ - Rule: p, group_user, /mcp, GET │ +└──────────────────┬──────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────┐ +│ mcp_websocket Handler │ +│ - Upgrade HTTP → WebSocket │ +│ - Create McpWebSocket actor │ +└──────────────────┬──────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────┐ +│ McpWebSocket Actor (persistent connection) │ +│ │ +│ JSON-RPC Message Loop: │ +│ 1. Receive text message │ +│ 2. Parse JsonRpcRequest │ +│ 3. Route to method handler: │ +│ - initialize → return server capabilities │ +│ - tools/list → return tool schemas │ +│ - tools/call → execute tool via registry │ +│ 4. Send JsonRpcResponse │ +│ │ +│ Heartbeat: Ping every 5s, timeout after 10s │ +└─────────────────────────────────────────────────────┘ +``` + +## Testing Status + +### Unit Tests +- ✅ JSON-RPC protocol serialization/deserialization +- ✅ Error code generation +- ✅ Tool schema structures +- ✅ Initialize handshake +- ⏳ WebSocket integration tests (requires database) + +### Manual Testing +To test the WebSocket connection: + +```bash +# 1. Start the server +make dev + +# 2. Connect with wscat (install: npm install -g wscat) +wscat -c "ws://localhost:8000/mcp" -H "Authorization: Bearer " + +# 3. Send initialize request +{"jsonrpc":"2.0","id":1,"method":"initialize","params":{"protocolVersion":"2024-11-05","capabilities":{}}} + +# Expected response: +{ + "jsonrpc": "2.0", + "id": 1, + "result": { + "protocolVersion": "2024-11-05", + "capabilities": { + "tools": { + "listChanged": false + } + }, + "serverInfo": { + "name": "stacker-mcp", + "version": "0.2.0" + } + } +} + +# 4. List tools +{"jsonrpc":"2.0","id":2,"method":"tools/list","params":{}} + +# Expected response (initially empty): +{ + "jsonrpc": "2.0", + "id": 2, + "result": { + "tools": [] + } +} +``` + +## Next Steps (Phase 2: Core Tools) + +### 1. Project Management Tools +- [ ] `src/mcp/tools/project.rs` + - [ ] `CreateProjectTool` - Create new stack + - [ ] `ListProjectsTool` - List user's projects + - [ ] `GetProjectTool` - Get project details + - [ ] `UpdateProjectTool` - Update project + - [ ] `DeleteProjectTool` - Delete project + +### 2. Composition & Deployment +- [ ] `src/mcp/tools/deployment.rs` + - [ ] `GenerateComposeTool` - Generate docker-compose.yml + - [ ] `DeployProjectTool` - Deploy to cloud + - [ ] `GetDeploymentStatusTool` - Check deployment status + +### 3. Templates & Discovery +- [ ] `src/mcp/tools/templates.rs` + - [ ] `ListTemplatesTool` - Browse public templates + - [ ] `GetTemplateTool` - Get template details + - [ ] `SuggestResourcesTool` - AI resource recommendations + +### 4. Tool Registration +Update `src/mcp/registry.rs`: +```rust +pub fn new() -> Self { + let mut registry = Self { + handlers: HashMap::new(), + }; + + registry.register("create_project", Box::new(CreateProjectTool)); + registry.register("list_projects", Box::new(ListProjectsTool)); + registry.register("suggest_resources", Box::new(SuggestResourcesTool)); + // ... register all tools + + registry +} +``` + +## Files Modified/Created + +### New Files +- `src/mcp/mod.rs` - Module exports +- `src/mcp/protocol.rs` - MCP protocol types +- `src/mcp/session.rs` - Session management +- `src/mcp/registry.rs` - Tool registry +- `src/mcp/websocket.rs` - WebSocket handler +- `src/mcp/protocol_tests.rs` - Unit tests +- `migrations/20251227140000_casbin_mcp_endpoint.up.sql` - Authorization rules +- `migrations/20251227140000_casbin_mcp_endpoint.down.sql` - Rollback + +### Modified Files +- `src/lib.rs` - Added `pub mod mcp;` +- `src/startup.rs` - Registered `/mcp` route, initialized registry +- `Cargo.toml` - Added `actix`, `actix-web-actors`, `async-trait` + +## Known Limitations + +1. **No tools registered yet** - Tools list returns empty array +2. **Session persistence** - Sessions only live in memory (not Redis) +3. **Rate limiting** - Not yet implemented (planned for Phase 4) +4. **Metrics** - No Prometheus metrics yet +5. **Database tests** - Cannot run tests without database connection + +## Security + +- ✅ OAuth authentication required +- ✅ Casbin authorization enforced +- ✅ User isolation (ToolContext includes authenticated user) +- ⏳ Rate limiting (planned) +- ⏳ Input validation (will be added per-tool) + +## Performance + +- Connection pooling: Yes (reuses app's PgPool) +- Concurrent connections: Limited by Actix worker pool +- WebSocket overhead: ~2KB per connection +- Heartbeat interval: 5s (configurable) +- Tool execution: Async (non-blocking) + +## Deployment + +### Environment Variables +No new environment variables needed. Uses existing: +- `DATABASE_URL` - PostgreSQL connection +- `RUST_LOG` - Logging level +- OAuth settings from `configuration.yaml` + +### Database Migration +```bash +sqlx migrate run +``` + +### Docker +No changes needed to existing Dockerfile. + +## Documentation + +- ✅ Backend plan: `docs/MCP_SERVER_BACKEND_PLAN.md` +- ✅ Frontend integration: `docs/MCP_SERVER_FRONTEND_INTEGRATION.md` +- ✅ This README: `docs/MCP_PHASE1_SUMMARY.md` + +## Questions? + +- MCP Protocol Spec: https://spec.modelcontextprotocol.io/ +- Actix WebSocket Docs: https://actix.rs/docs/websockets/ +- Tool implementation examples: See planning docs in `docs/` diff --git a/docs/MCP_SERVER_BACKEND_PLAN.md b/docs/MCP_SERVER_BACKEND_PLAN.md new file mode 100644 index 0000000..d78db97 --- /dev/null +++ b/docs/MCP_SERVER_BACKEND_PLAN.md @@ -0,0 +1,1215 @@ +# MCP Server Backend Implementation Plan + +## Overview +This document outlines the implementation plan for adding Model Context Protocol (MCP) server capabilities to the Stacker backend. The MCP server will expose Stacker's functionality as tools that AI assistants can use to help users build and deploy application stacks. + +## Architecture + +``` +┌─────────────────────────────────────────────────────────┐ +│ Stacker Backend (Rust/Actix-web) │ +│ │ +│ ┌──────────────────┐ ┌────────────────────┐ │ +│ │ REST API │ │ MCP Server │ │ +│ │ (Existing) │ │ (New) │ │ +│ │ │ │ │ │ +│ │ /project │◄───────┤ Tool Registry │ │ +│ │ /cloud │ │ - create_project │ │ +│ │ /rating │ │ - list_projects │ │ +│ │ /deployment │ │ - get_templates │ │ +│ └──────────────────┘ │ - deploy_project │ │ +│ │ │ - etc... │ │ +│ │ └────────────────────┘ │ +│ │ │ │ +│ │ │ │ +│ └───────────┬───────────────┘ │ +│ ▼ │ +│ ┌─────────────────┐ │ +│ │ PostgreSQL DB │ │ +│ │ + Session Store │ │ +│ └─────────────────┘ │ +└─────────────────────────────────────────────────────────┘ + │ + │ WebSocket (JSON-RPC 2.0) + ▼ +┌─────────────────────────────────────────────────────────┐ +│ Frontend (React) or AI Client │ +│ - Sends tool requests │ +│ - Receives tool results │ +│ - Manages conversation context │ +└─────────────────────────────────────────────────────────┘ +``` + +## Technology Stack + +### Core Dependencies +```toml +[dependencies] +# MCP Protocol +tokio-tungstenite = "0.21" # WebSocket server +serde_json = "1.0" # JSON-RPC 2.0 serialization +uuid = { version = "1.0", features = ["v4"] } # Request IDs + +# Existing (reuse) +actix-web = "4.4" # HTTP server +sqlx = "0.8" # Database +tokio = { version = "1", features = ["full"] } +``` + +### MCP Protocol Specification +- **Protocol**: JSON-RPC 2.0 over WebSocket +- **Version**: MCP 2024-11-05 +- **Transport**: `wss://api.try.direct/mcp` (production) +- **Authentication**: OAuth Bearer token (reuse existing auth) + +## Implementation Phases + +--- + +## Phase 1: Foundation (Week 1-2) + +### 1.1 MCP Protocol Implementation + +**Create core protocol structures:** + +```rust +// src/mcp/protocol.rs +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "jsonrpc")] +pub struct JsonRpcRequest { + pub jsonrpc: String, // "2.0" + pub id: Option, + pub method: String, + pub params: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct JsonRpcResponse { + pub jsonrpc: String, + pub id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub result: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct JsonRpcError { + pub code: i32, + pub message: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub data: Option, +} + +// MCP-specific types +#[derive(Debug, Serialize, Deserialize)] +pub struct Tool { + pub name: String, + pub description: String, + #[serde(rename = "inputSchema")] + pub input_schema: Value, // JSON Schema for parameters +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ToolListResponse { + pub tools: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct CallToolRequest { + pub name: String, + pub arguments: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct CallToolResponse { + pub content: Vec, + #[serde(rename = "isError", skip_serializing_if = "Option::is_none")] + pub is_error: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum ToolContent { + #[serde(rename = "text")] + Text { text: String }, + #[serde(rename = "image")] + Image { + data: String, // base64 + #[serde(rename = "mimeType")] + mime_type: String + }, +} +``` + +### 1.2 WebSocket Handler + +```rust +// src/mcp/websocket.rs +use actix::{Actor, StreamHandler}; +use actix_web::{web, Error, HttpRequest, HttpResponse}; +use actix_web_actors::ws; +use tokio_tungstenite::tungstenite::protocol::Message; + +pub struct McpWebSocket { + user: Arc, + session: McpSession, +} + +impl Actor for McpWebSocket { + type Context = ws::WebsocketContext; +} + +impl StreamHandler> for McpWebSocket { + fn handle(&mut self, msg: Result, ctx: &mut Self::Context) { + match msg { + Ok(ws::Message::Text(text)) => { + let request: JsonRpcRequest = serde_json::from_str(&text).unwrap(); + let response = self.handle_jsonrpc(request).await; + ctx.text(serde_json::to_string(&response).unwrap()); + } + Ok(ws::Message::Close(reason)) => { + ctx.close(reason); + ctx.stop(); + } + _ => {} + } + } +} + +impl McpWebSocket { + async fn handle_jsonrpc(&self, req: JsonRpcRequest) -> JsonRpcResponse { + match req.method.as_str() { + "initialize" => self.handle_initialize(req).await, + "tools/list" => self.handle_tools_list(req).await, + "tools/call" => self.handle_tools_call(req).await, + _ => JsonRpcResponse { + jsonrpc: "2.0".to_string(), + id: req.id, + result: None, + error: Some(JsonRpcError { + code: -32601, + message: "Method not found".to_string(), + data: None, + }), + }, + } + } +} + +// Route registration +pub async fn mcp_websocket( + req: HttpRequest, + stream: web::Payload, + user: web::ReqData>, + pg_pool: web::Data, +) -> Result { + let ws = McpWebSocket { + user: user.into_inner(), + session: McpSession::new(), + }; + ws::start(ws, &req, stream) +} +``` + +### 1.3 Tool Registry + +```rust +// src/mcp/registry.rs +use std::collections::HashMap; +use async_trait::async_trait; + +#[async_trait] +pub trait ToolHandler: Send + Sync { + async fn execute( + &self, + args: Value, + context: &ToolContext, + ) -> Result; + + fn schema(&self) -> Tool; +} + +pub struct ToolRegistry { + handlers: HashMap>, +} + +impl ToolRegistry { + pub fn new() -> Self { + let mut registry = Self { + handlers: HashMap::new(), + }; + + // Register all tools + registry.register("create_project", Box::new(CreateProjectTool)); + registry.register("list_projects", Box::new(ListProjectsTool)); + registry.register("get_project", Box::new(GetProjectTool)); + registry.register("update_project", Box::new(UpdateProjectTool)); + registry.register("delete_project", Box::new(DeleteProjectTool)); + registry.register("generate_compose", Box::new(GenerateComposeTool)); + registry.register("deploy_project", Box::new(DeployProjectTool)); + registry.register("list_templates", Box::new(ListTemplatesTool)); + registry.register("get_template", Box::new(GetTemplateTool)); + registry.register("list_clouds", Box::new(ListCloudsTool)); + registry.register("suggest_resources", Box::new(SuggestResourcesTool)); + + registry + } + + pub fn get(&self, name: &str) -> Option<&Box> { + self.handlers.get(name) + } + + pub fn list_tools(&self) -> Vec { + self.handlers.values().map(|h| h.schema()).collect() + } +} + +pub struct ToolContext { + pub user: Arc, + pub pg_pool: PgPool, + pub settings: Arc, +} +``` + +### 1.4 Session Management + +```rust +// src/mcp/session.rs +use std::collections::HashMap; + +pub struct McpSession { + pub id: String, + pub created_at: chrono::DateTime, + pub context: HashMap, // Store conversation state +} + +impl McpSession { + pub fn new() -> Self { + Self { + id: uuid::Uuid::new_v4().to_string(), + created_at: chrono::Utc::now(), + context: HashMap::new(), + } + } + + pub fn set_context(&mut self, key: String, value: Value) { + self.context.insert(key, value); + } + + pub fn get_context(&self, key: &str) -> Option<&Value> { + self.context.get(key) + } +} +``` + +**Deliverables:** +- [ ] MCP protocol types in `src/mcp/protocol.rs` +- [ ] WebSocket handler in `src/mcp/websocket.rs` +- [ ] Tool registry in `src/mcp/registry.rs` +- [ ] Session management in `src/mcp/session.rs` +- [ ] Route registration: `web::resource("/mcp").route(web::get().to(mcp_websocket))` + +--- + +## Phase 2: Core Tools (Week 3-4) + +### 2.1 Project Management Tools + +```rust +// src/mcp/tools/project.rs + +pub struct CreateProjectTool; + +#[async_trait] +impl ToolHandler for CreateProjectTool { + async fn execute(&self, args: Value, ctx: &ToolContext) -> Result { + let form: forms::project::Add = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + let project = db::project::insert( + &ctx.pg_pool, + &ctx.user.id, + &form, + ).await + .map_err(|e| format!("Database error: {}", e))?; + + Ok(ToolContent::Text { + text: serde_json::to_string(&project).unwrap(), + }) + } + + fn schema(&self) -> Tool { + Tool { + name: "create_project".to_string(), + description: "Create a new application stack project with services, networking, and deployment configuration".to_string(), + input_schema: serde_json::json!({ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Project name (required)" + }, + "description": { + "type": "string", + "description": "Project description (optional)" + }, + "apps": { + "type": "array", + "description": "List of applications/services", + "items": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "dockerImage": { + "type": "object", + "properties": { + "namespace": { "type": "string" }, + "repository": { "type": "string" }, + "password": { "type": "string" } + }, + "required": ["repository"] + }, + "resources": { + "type": "object", + "properties": { + "cpu": { "type": "number", "description": "CPU cores (0-8)" }, + "ram": { "type": "number", "description": "RAM in GB (0-16)" }, + "storage": { "type": "number", "description": "Storage in GB (0-100)" } + } + }, + "ports": { + "type": "array", + "items": { + "type": "object", + "properties": { + "hostPort": { "type": "number" }, + "containerPort": { "type": "number" } + } + } + } + }, + "required": ["name", "dockerImage"] + } + } + }, + "required": ["name", "apps"] + }), + } + } +} + +pub struct ListProjectsTool; + +#[async_trait] +impl ToolHandler for ListProjectsTool { + async fn execute(&self, _args: Value, ctx: &ToolContext) -> Result { + let projects = db::project::fetch_by_user(&ctx.pg_pool, &ctx.user.id) + .await + .map_err(|e| format!("Database error: {}", e))?; + + Ok(ToolContent::Text { + text: serde_json::to_string(&projects).unwrap(), + }) + } + + fn schema(&self) -> Tool { + Tool { + name: "list_projects".to_string(), + description: "List all projects owned by the authenticated user".to_string(), + input_schema: serde_json::json!({ + "type": "object", + "properties": {} + }), + } + } +} +``` + +### 2.2 Template & Discovery Tools + +```rust +// src/mcp/tools/templates.rs + +pub struct ListTemplatesTool; + +#[async_trait] +impl ToolHandler for ListTemplatesTool { + async fn execute(&self, args: Value, ctx: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + category: Option, + search: Option, + } + + let params: Args = serde_json::from_value(args).unwrap_or_default(); + + // Fetch public templates from rating table + let templates = db::rating::fetch_public_templates(&ctx.pg_pool, params.category) + .await + .map_err(|e| format!("Database error: {}", e))?; + + // Filter by search term if provided + let filtered = if let Some(search) = params.search { + templates.into_iter() + .filter(|t| t.name.to_lowercase().contains(&search.to_lowercase())) + .collect() + } else { + templates + }; + + Ok(ToolContent::Text { + text: serde_json::to_string(&filtered).unwrap(), + }) + } + + fn schema(&self) -> Tool { + Tool { + name: "list_templates".to_string(), + description: "List available stack templates (WordPress, Node.js, Django, etc.) with ratings and descriptions".to_string(), + input_schema: serde_json::json!({ + "type": "object", + "properties": { + "category": { + "type": "string", + "enum": ["web", "api", "database", "cms", "ecommerce"], + "description": "Filter by category (optional)" + }, + "search": { + "type": "string", + "description": "Search templates by name (optional)" + } + } + }), + } + } +} + +pub struct SuggestResourcesTool; + +#[async_trait] +impl ToolHandler for SuggestResourcesTool { + async fn execute(&self, args: Value, _ctx: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + app_type: String, + expected_traffic: Option, // "low", "medium", "high" + } + + let params: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + // Simple heuristic-based suggestions + let (cpu, ram, storage) = match params.app_type.to_lowercase().as_str() { + "wordpress" | "cms" => (1, 2, 20), + "nodejs" | "express" => (1, 1, 10), + "django" | "flask" => (2, 2, 15), + "nextjs" | "react" => (1, 2, 10), + "mysql" | "postgresql" => (2, 4, 50), + "redis" | "memcached" => (1, 1, 5), + "nginx" | "traefik" => (1, 0.5, 5), + _ => (1, 1, 10), // default + }; + + // Adjust for traffic + let multiplier = match params.expected_traffic.as_deref() { + Some("high") => 2.0, + Some("medium") => 1.5, + _ => 1.0, + }; + + let suggestion = serde_json::json!({ + "cpu": (cpu as f64 * multiplier).ceil() as i32, + "ram": (ram as f64 * multiplier).ceil() as i32, + "storage": (storage as f64 * multiplier).ceil() as i32, + "recommendation": format!( + "For {} with {} traffic: {}x{} CPU, {} GB RAM, {} GB storage", + params.app_type, + params.expected_traffic.as_deref().unwrap_or("low"), + (cpu as f64 * multiplier).ceil(), + if multiplier > 1.0 { "vCPU" } else { "core" }, + (ram as f64 * multiplier).ceil(), + (storage as f64 * multiplier).ceil() + ) + }); + + Ok(ToolContent::Text { + text: serde_json::to_string(&suggestion).unwrap(), + }) + } + + fn schema(&self) -> Tool { + Tool { + name: "suggest_resources".to_string(), + description: "Suggest appropriate CPU, RAM, and storage limits for an application type".to_string(), + input_schema: serde_json::json!({ + "type": "object", + "properties": { + "app_type": { + "type": "string", + "description": "Application type (e.g., 'wordpress', 'nodejs', 'postgresql')" + }, + "expected_traffic": { + "type": "string", + "enum": ["low", "medium", "high"], + "description": "Expected traffic level (optional, default: low)" + } + }, + "required": ["app_type"] + }), + } + } +} +``` + +**Deliverables:** +- [ ] Project CRUD tools (create, list, get, update, delete) +- [ ] Deployment tools (generate_compose, deploy) +- [ ] Template discovery tools (list_templates, get_template) +- [ ] Resource suggestion tool +- [ ] Cloud provider tools (list_clouds, add_cloud) + +--- + +## Phase 3: Advanced Features (Week 5-6) + +### 3.1 Context & State Management + +```rust +// Store partial project data during multi-turn conversations +session.set_context("draft_project".to_string(), serde_json::json!({ + "name": "My API", + "apps": [ + { + "name": "api", + "dockerImage": { "repository": "node:18-alpine" } + } + ], + "step": 2 // User is on step 2 of 5 +})); +``` + +### 3.2 Validation Tools + +```rust +pub struct ValidateDomainTool; + +#[async_trait] +impl ToolHandler for ValidateDomainTool { + async fn execute(&self, args: Value, _ctx: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + domain: String, + } + + let params: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + // Simple regex validation + let domain_regex = regex::Regex::new(r"^([a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,}$").unwrap(); + let is_valid = domain_regex.is_match(¶ms.domain); + + let result = serde_json::json!({ + "domain": params.domain, + "valid": is_valid, + "message": if is_valid { + "Domain format is valid" + } else { + "Invalid domain format. Use lowercase letters, numbers, hyphens, and dots only" + } + }); + + Ok(ToolContent::Text { + text: serde_json::to_string(&result).unwrap(), + }) + } + + fn schema(&self) -> Tool { + Tool { + name: "validate_domain".to_string(), + description: "Validate domain name format".to_string(), + input_schema: serde_json::json!({ + "type": "object", + "properties": { + "domain": { + "type": "string", + "description": "Domain to validate (e.g., 'example.com')" + } + }, + "required": ["domain"] + }), + } + } +} +``` + +### 3.3 Deployment Status Tools + +```rust +pub struct GetDeploymentStatusTool; + +#[async_trait] +impl ToolHandler for GetDeploymentStatusTool { + async fn execute(&self, args: Value, ctx: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + deployment_id: i32, + } + + let params: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + let deployment = db::deployment::fetch(&ctx.pg_pool, params.deployment_id) + .await + .map_err(|e| format!("Database error: {}", e))?; + + Ok(ToolContent::Text { + text: serde_json::to_string(&deployment).unwrap(), + }) + } + + fn schema(&self) -> Tool { + Tool { + name: "get_deployment_status".to_string(), + description: "Get current deployment status and details".to_string(), + input_schema: serde_json::json!({ + "type": "object", + "properties": { + "deployment_id": { + "type": "number", + "description": "Deployment ID" + } + }, + "required": ["deployment_id"] + }), + } + } +} +``` + +**Deliverables:** +- [ ] Session context persistence +- [ ] Domain validation tool +- [ ] Port validation tool +- [ ] Git repository parsing tool +- [ ] Deployment status monitoring tool + +--- + +## Phase 4: Security & Production (Week 7-8) + +### 4.1 Authentication & Authorization + +```rust +// Reuse existing OAuth middleware +// src/mcp/websocket.rs + +pub async fn mcp_websocket( + req: HttpRequest, + stream: web::Payload, + user: web::ReqData>, // ← Injected by auth middleware + pg_pool: web::Data, +) -> Result { + // User is already authenticated via Bearer token + // Casbin rules apply: only admin/user roles can access MCP + + let ws = McpWebSocket { + user: user.into_inner(), + session: McpSession::new(), + }; + ws::start(ws, &req, stream) +} +``` + +**Casbin Rules for MCP:** +```sql +-- migrations/20251228000000_casbin_mcp_rules.up.sql +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) +VALUES + ('p', 'group_admin', '/mcp', 'GET', '', '', ''), + ('p', 'group_user', '/mcp', 'GET', '', '', '') +ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING; +``` + +### 4.2 Rate Limiting + +```rust +// src/mcp/rate_limit.rs +use std::collections::HashMap; +use std::sync::{Arc, Mutex}; +use std::time::{Duration, Instant}; + +pub struct RateLimiter { + limits: Arc>>>, + max_requests: usize, + window: Duration, +} + +impl RateLimiter { + pub fn new(max_requests: usize, window: Duration) -> Self { + Self { + limits: Arc::new(Mutex::new(HashMap::new())), + max_requests, + window, + } + } + + pub fn check(&self, user_id: &str) -> Result<(), String> { + let mut limits = self.limits.lock().unwrap(); + let now = Instant::now(); + + let requests = limits.entry(user_id.to_string()).or_insert_with(Vec::new); + + // Remove expired entries + requests.retain(|&time| now.duration_since(time) < self.window); + + if requests.len() >= self.max_requests { + return Err(format!( + "Rate limit exceeded: {} requests per {} seconds", + self.max_requests, + self.window.as_secs() + )); + } + + requests.push(now); + Ok(()) + } +} + +// Usage in McpWebSocket +impl McpWebSocket { + async fn handle_tools_call(&self, req: JsonRpcRequest) -> JsonRpcResponse { + // Rate limit: 100 tool calls per minute per user + if let Err(msg) = self.rate_limiter.check(&self.user.id) { + return JsonRpcResponse { + jsonrpc: "2.0".to_string(), + id: req.id, + result: None, + error: Some(JsonRpcError { + code: -32000, + message: msg, + data: None, + }), + }; + } + + // ... proceed with tool execution + } +} +``` + +### 4.3 Error Handling & Logging + +```rust +// Enhanced error responses with tracing +impl McpWebSocket { + async fn handle_tools_call(&self, req: JsonRpcRequest) -> JsonRpcResponse { + let call_req: CallToolRequest = match serde_json::from_value(req.params.unwrap()) { + Ok(r) => r, + Err(e) => { + tracing::error!("Invalid tool call params: {:?}", e); + return JsonRpcResponse { + jsonrpc: "2.0".to_string(), + id: req.id, + result: None, + error: Some(JsonRpcError { + code: -32602, + message: "Invalid params".to_string(), + data: Some(serde_json::json!({ "error": e.to_string() })), + }), + }; + } + }; + + let tool_span = tracing::info_span!("mcp_tool_call", tool = %call_req.name, user = %self.user.id); + let _enter = tool_span.enter(); + + match self.registry.get(&call_req.name) { + Some(handler) => { + match handler.execute( + call_req.arguments.unwrap_or(serde_json::json!({})), + &self.context(), + ).await { + Ok(content) => { + tracing::info!("Tool executed successfully"); + JsonRpcResponse { + jsonrpc: "2.0".to_string(), + id: req.id, + result: Some(serde_json::to_value(CallToolResponse { + content: vec![content], + is_error: None, + }).unwrap()), + error: None, + } + } + Err(e) => { + tracing::error!("Tool execution failed: {}", e); + JsonRpcResponse { + jsonrpc: "2.0".to_string(), + id: req.id, + result: Some(serde_json::to_value(CallToolResponse { + content: vec![ToolContent::Text { + text: format!("Error: {}", e), + }], + is_error: Some(true), + }).unwrap()), + error: None, + } + } + } + } + None => { + tracing::warn!("Unknown tool requested: {}", call_req.name); + JsonRpcResponse { + jsonrpc: "2.0".to_string(), + id: req.id, + result: None, + error: Some(JsonRpcError { + code: -32601, + message: format!("Tool not found: {}", call_req.name), + data: None, + }), + } + } + } + } +} +``` + +**Deliverables:** +- [ ] Casbin rules for MCP endpoint +- [ ] Rate limiting (100 calls/min per user) +- [ ] Comprehensive error handling +- [ ] Structured logging with tracing +- [ ] Input validation for all tools + +--- + +## Phase 5: Testing & Documentation (Week 9) + +### 5.1 Unit Tests + +```rust +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn test_create_project_tool() { + let tool = CreateProjectTool; + let ctx = create_test_context().await; + + let args = serde_json::json!({ + "name": "Test Project", + "apps": [{ + "name": "web", + "dockerImage": { "repository": "nginx" } + }] + }); + + let result = tool.execute(args, &ctx).await; + assert!(result.is_ok()); + + let ToolContent::Text { text } = result.unwrap(); + let project: models::Project = serde_json::from_str(&text).unwrap(); + assert_eq!(project.name, "Test Project"); + } + + #[tokio::test] + async fn test_list_templates_tool() { + let tool = ListTemplatesTool; + let ctx = create_test_context().await; + + let result = tool.execute(serde_json::json!({}), &ctx).await; + assert!(result.is_ok()); + } +} +``` + +### 5.2 Integration Tests + +```rust +// tests/mcp_integration.rs +use actix_web::test; +use tokio_tungstenite::connect_async; + +#[actix_web::test] +async fn test_mcp_websocket_connection() { + let app = spawn_app().await; + + let ws_url = format!("ws://{}/mcp", app.address); + let (ws_stream, _) = connect_async(ws_url).await.unwrap(); + + // Send initialize request + let init_msg = serde_json::json!({ + "jsonrpc": "2.0", + "id": 1, + "method": "initialize", + "params": { + "protocolVersion": "2024-11-05", + "capabilities": {} + } + }); + + // ... test flow +} + +#[actix_web::test] +async fn test_create_project_via_mcp() { + // Test full create project flow via MCP +} +``` + +### 5.3 Documentation + +**API Documentation:** +- Generate OpenAPI/Swagger spec for MCP tools +- Document all tool schemas with examples +- Create integration guide for frontend developers + +**Example Documentation:** +```markdown +## MCP Tool: create_project + +**Description**: Create a new application stack project + +**Parameters:** +```json +{ + "name": "My WordPress Site", + "apps": [ + { + "name": "wordpress", + "dockerImage": { + "repository": "wordpress", + "tag": "latest" + }, + "resources": { + "cpu": 2, + "ram": 4, + "storage": 20 + }, + "ports": [ + { "hostPort": 80, "containerPort": 80 } + ] + } + ] +} +``` + +**Response:** +```json +{ + "id": 123, + "name": "My WordPress Site", + "user_id": "user_abc", + "created_at": "2025-12-27T10:00:00Z", + ... +} +``` +``` + +**Deliverables:** +- [ ] Unit tests for all tools (>80% coverage) +- [ ] Integration tests for WebSocket connection +- [ ] End-to-end tests for tool execution flow +- [ ] API documentation (MCP tool schemas) +- [ ] Integration guide for frontend + +--- + +## Deployment Configuration + +### Update `startup.rs` + +```rust +// src/startup.rs +use crate::mcp; + +pub async fn run( + listener: TcpListener, + pg_pool: Pool, + settings: Settings, +) -> Result { + // ... existing setup ... + + // Initialize MCP registry + let mcp_registry = web::Data::new(mcp::ToolRegistry::new()); + + let server = HttpServer::new(move || { + App::new() + // ... existing middleware and routes ... + + // Add MCP WebSocket endpoint + .service( + web::resource("/mcp") + .route(web::get().to(mcp::mcp_websocket)) + ) + .app_data(mcp_registry.clone()) + }) + .listen(listener)? + .run(); + + Ok(server) +} +``` + +### Update `Cargo.toml` + +```toml +[dependencies] +tokio-tungstenite = "0.21" +uuid = { version = "1.0", features = ["v4", "serde"] } +async-trait = "0.1" +regex = "1.10" + +# Consider adding MCP SDK if available +# mcp-server = "0.1" # Hypothetical official SDK +``` + +--- + +## Monitoring & Metrics + +### Key Metrics to Track + +```rust +// src/mcp/metrics.rs +use prometheus::{IntCounterVec, HistogramVec, Registry}; + +pub struct McpMetrics { + pub tool_calls_total: IntCounterVec, + pub tool_duration: HistogramVec, + pub websocket_connections: IntCounterVec, + pub errors_total: IntCounterVec, +} + +impl McpMetrics { + pub fn new(registry: &Registry) -> Self { + let tool_calls_total = IntCounterVec::new( + prometheus::Opts::new("mcp_tool_calls_total", "Total MCP tool calls"), + &["tool", "user_id", "status"] + ).unwrap(); + registry.register(Box::new(tool_calls_total.clone())).unwrap(); + + // ... register other metrics + + Self { + tool_calls_total, + // ... + } + } +} +``` + +**Metrics to expose:** +- `mcp_tool_calls_total{tool, user_id, status}` - Counter +- `mcp_tool_duration_seconds{tool}` - Histogram +- `mcp_websocket_connections_active` - Gauge +- `mcp_errors_total{tool, error_type}` - Counter + +--- + +## Complete Tool List (Initial Release) + +### Project Management (7 tools) +1. ✅ `create_project` - Create new project +2. ✅ `list_projects` - List user's projects +3. ✅ `get_project` - Get project details +4. ✅ `update_project` - Update project +5. ✅ `delete_project` - Delete project +6. ✅ `generate_compose` - Generate docker-compose.yml +7. ✅ `deploy_project` - Deploy to cloud + +### Template & Discovery (3 tools) +8. ✅ `list_templates` - List available templates +9. ✅ `get_template` - Get template details +10. ✅ `suggest_resources` - Suggest resource limits + +### Cloud Management (2 tools) +11. ✅ `list_clouds` - List cloud providers +12. ✅ `add_cloud` - Add cloud credentials + +### Validation (3 tools) +13. ✅ `validate_domain` - Validate domain format +14. ✅ `validate_ports` - Validate port configuration +15. ✅ `parse_git_repo` - Parse Git repository URL + +### Deployment (2 tools) +16. ✅ `list_deployments` - List deployments +17. ✅ `get_deployment_status` - Get deployment status + +**Total: 17 tools for MVP** + +--- + +## Success Criteria + +### Functional Requirements +- [ ] All 17 tools implemented and tested +- [ ] WebSocket connection stable for >1 hour +- [ ] Handle 100 concurrent WebSocket connections +- [ ] Rate limiting prevents abuse +- [ ] Authentication/authorization enforced + +### Performance Requirements +- [ ] Tool execution <500ms (p95) +- [ ] WebSocket latency <50ms +- [ ] Support 10 tool calls/second per user +- [ ] No memory leaks in long-running sessions + +### Security Requirements +- [ ] OAuth authentication required +- [ ] Casbin ACL enforced +- [ ] Input validation on all parameters +- [ ] SQL injection protection (via sqlx) +- [ ] Rate limiting (100 calls/min per user) + +--- + +## Migration Path + +1. **Week 1-2**: Core protocol + 3 basic tools (create_project, list_projects, list_templates) +2. **Week 3-4**: All 17 tools implemented +3. **Week 5-6**: Advanced features (validation, suggestions) +4. **Week 7-8**: Security hardening + production readiness +5. **Week 9**: Testing + documentation +6. **Week 10**: Beta release with frontend integration + +--- + +## Questions & Decisions + +### Open Questions +1. **Session persistence**: Store in PostgreSQL or Redis? + - **Recommendation**: Redis for ephemeral session data + +2. **Tool versioning**: How to handle breaking changes? + - **Recommendation**: Version in tool name (`create_project_v1`) + +3. **Error recovery**: Retry failed tool calls? + - **Recommendation**: Let AI/client decide on retry + +### Technical Decisions +- ✅ Use tokio-tungstenite for WebSocket +- ✅ JSON-RPC 2.0 over WebSocket (not HTTP SSE) +- ✅ Reuse existing auth middleware +- ✅ Store sessions in memory (move to Redis later) +- ✅ Rate limit at WebSocket level (not per-tool) + +--- + +## Contact & Resources + +**References:** +- MCP Specification: https://spec.modelcontextprotocol.io/ +- Example Rust MCP Server: https://github.com/modelcontextprotocol/servers +- Actix WebSocket: https://actix.rs/docs/websockets/ + +**Team Contacts:** +- Backend Lead: [Your Name] +- Frontend Integration: [Frontend Lead] +- DevOps: [DevOps Contact] diff --git a/docs/MCP_SERVER_FRONTEND_INTEGRATION.md b/docs/MCP_SERVER_FRONTEND_INTEGRATION.md new file mode 100644 index 0000000..c23eda7 --- /dev/null +++ b/docs/MCP_SERVER_FRONTEND_INTEGRATION.md @@ -0,0 +1,1355 @@ +# MCP Server Frontend Integration Guide + +## Overview +This document provides comprehensive guidance for integrating the Stacker MCP (Model Context Protocol) server with the ReactJS Stack Builder frontend. The integration enables an AI-powered chat assistant that helps users build and deploy application stacks through natural language interactions. + +## Architecture Overview + +``` +┌──────────────────────────────────────────────────────────────┐ +│ React Frontend (Stack Builder UI) │ +│ │ +│ ┌────────────────┐ ┌──────────────────────────┐ │ +│ │ Project Form │◄────────┤ AI Chat Assistant │ │ +│ │ - Name │ fills │ - Chat Messages │ │ +│ │ - Services │◄────────┤ - Input Box │ │ +│ │ - Resources │ │ - Context Display │ │ +│ │ - Domains │ │ - Suggestions │ │ +│ └────────────────┘ └──────────────────────────┘ │ +│ │ │ │ +│ │ │ │ +│ └──────────┬───────────────────┘ │ +│ │ │ +│ ┌───────▼───────┐ │ +│ │ MCP Client │ │ +│ │ (WebSocket) │ │ +│ └───────────────┘ │ +│ │ │ +└────────────────────┼─────────────────────────────────────────┘ + │ WebSocket (JSON-RPC 2.0) + ▼ +┌──────────────────────────────────────────────────────────────┐ +│ Stacker Backend (MCP Server) │ +│ - Tool Registry (17+ tools) │ +│ - Session Management │ +│ - OAuth Authentication │ +└──────────────────────────────────────────────────────────────┘ +``` + +## Technology Stack + +### Core Dependencies + +```json +{ + "dependencies": { + "@modelcontextprotocol/sdk": "^0.5.0", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "zustand": "^4.4.0", + "@tanstack/react-query": "^5.0.0", + "ws": "^8.16.0" + }, + "devDependencies": { + "@types/react": "^18.2.0", + "@types/ws": "^8.5.0", + "typescript": "^5.0.0" + } +} +``` + +### TypeScript Configuration + +```json +{ + "compilerOptions": { + "target": "ES2020", + "lib": ["ES2020", "DOM", "DOM.Iterable"], + "jsx": "react-jsx", + "module": "ESNext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "allowJs": true, + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true + } +} +``` + +--- + +## Phase 1: MCP Client Setup (Week 1) + +### 1.1 WebSocket Client + +```typescript +// src/lib/mcp/client.ts +import { Client } from '@modelcontextprotocol/sdk/client/index.js'; +import { WebSocketClientTransport } from '@modelcontextprotocol/sdk/client/websocket.js'; + +export interface McpClientConfig { + url: string; + authToken: string; +} + +export class StackerMcpClient { + private client: Client | null = null; + private transport: WebSocketClientTransport | null = null; + private config: McpClientConfig; + + constructor(config: McpClientConfig) { + this.config = config; + } + + async connect(): Promise { + // Create WebSocket transport with auth headers + this.transport = new WebSocketClientTransport( + new URL(this.config.url), + { + headers: { + 'Authorization': `Bearer ${this.config.authToken}` + } + } + ); + + // Initialize MCP client + this.client = new Client( + { + name: 'stacker-ui', + version: '1.0.0', + }, + { + capabilities: { + tools: {} + } + } + ); + + // Connect to server + await this.client.connect(this.transport); + + console.log('MCP client connected'); + } + + async disconnect(): Promise { + if (this.client) { + await this.client.close(); + this.client = null; + } + if (this.transport) { + await this.transport.close(); + this.transport = null; + } + } + + async listTools(): Promise> { + if (!this.client) { + throw new Error('MCP client not connected'); + } + + const response = await this.client.listTools(); + return response.tools; + } + + async callTool( + name: string, + args: Record + ): Promise<{ + content: Array<{ type: string; text?: string; data?: string }>; + isError?: boolean; + }> { + if (!this.client) { + throw new Error('MCP client not connected'); + } + + const response = await this.client.callTool({ + name, + arguments: args + }); + + return response; + } + + isConnected(): boolean { + return this.client !== null; + } +} +``` + +### 1.2 MCP Context Provider + +```typescript +// src/contexts/McpContext.tsx +import React, { createContext, useContext, useEffect, useState } from 'react'; +import { StackerMcpClient } from '@/lib/mcp/client'; +import { useAuth } from '@/hooks/useAuth'; + +interface McpContextValue { + client: StackerMcpClient | null; + isConnected: boolean; + error: string | null; + reconnect: () => Promise; +} + +const McpContext = createContext(undefined); + +export const McpProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => { + const { token } = useAuth(); + const [client, setClient] = useState(null); + const [isConnected, setIsConnected] = useState(false); + const [error, setError] = useState(null); + + const connect = async () => { + if (!token) { + setError('Authentication required'); + return; + } + + try { + const mcpClient = new StackerMcpClient({ + url: process.env.REACT_APP_MCP_URL || 'ws://localhost:8000/mcp', + authToken: token + }); + + await mcpClient.connect(); + setClient(mcpClient); + setIsConnected(true); + setError(null); + } catch (err) { + setError(err instanceof Error ? err.message : 'Connection failed'); + setIsConnected(false); + } + }; + + const reconnect = async () => { + if (client) { + await client.disconnect(); + } + await connect(); + }; + + useEffect(() => { + connect(); + + return () => { + if (client) { + client.disconnect(); + } + }; + }, [token]); + + return ( + + {children} + + ); +}; + +export const useMcp = () => { + const context = useContext(McpContext); + if (!context) { + throw new Error('useMcp must be used within McpProvider'); + } + return context; +}; +``` + +### 1.3 Connection Setup in App + +```typescript +// src/App.tsx +import { McpProvider } from '@/contexts/McpContext'; +import { AuthProvider } from '@/contexts/AuthContext'; +import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; + +const queryClient = new QueryClient(); + +function App() { + return ( + + + + + + + + ); +} + +export default App; +``` + +--- + +## Phase 2: Chat Interface Components (Week 2) + +### 2.1 Chat Message Types + +```typescript +// src/types/chat.ts +export interface ChatMessage { + id: string; + role: 'user' | 'assistant' | 'system'; + content: string; + timestamp: Date; + toolCalls?: ToolCall[]; + metadata?: { + projectId?: number; + step?: number; + suggestions?: string[]; + }; +} + +export interface ToolCall { + id: string; + toolName: string; + arguments: Record; + result?: { + success: boolean; + data?: any; + error?: string; + }; + status: 'pending' | 'completed' | 'failed'; +} + +export interface ChatContext { + currentProject?: { + id?: number; + name?: string; + apps?: any[]; + step?: number; + }; + lastAction?: string; + availableTools?: string[]; +} +``` + +### 2.2 Chat Store (Zustand) + +```typescript +// src/stores/chatStore.ts +import { create } from 'zustand'; +import { ChatMessage, ChatContext } from '@/types/chat'; + +interface ChatStore { + messages: ChatMessage[]; + context: ChatContext; + isProcessing: boolean; + + addMessage: (message: Omit) => void; + updateMessage: (id: string, updates: Partial) => void; + clearMessages: () => void; + setContext: (context: Partial) => void; + setProcessing: (processing: boolean) => void; +} + +export const useChatStore = create((set) => ({ + messages: [], + context: {}, + isProcessing: false, + + addMessage: (message) => + set((state) => ({ + messages: [ + ...state.messages, + { + ...message, + id: crypto.randomUUID(), + timestamp: new Date(), + }, + ], + })), + + updateMessage: (id, updates) => + set((state) => ({ + messages: state.messages.map((msg) => + msg.id === id ? { ...msg, ...updates } : msg + ), + })), + + clearMessages: () => set({ messages: [], context: {} }), + + setContext: (context) => + set((state) => ({ + context: { ...state.context, ...context }, + })), + + setProcessing: (processing) => set({ isProcessing: processing }), +})); +``` + +### 2.3 Chat Sidebar Component + +```tsx +// src/components/chat/ChatSidebar.tsx +import React, { useRef, useEffect } from 'react'; +import { useChatStore } from '@/stores/chatStore'; +import { ChatMessage } from './ChatMessage'; +import { ChatInput } from './ChatInput'; +import { ChatHeader } from './ChatHeader'; + +export const ChatSidebar: React.FC = () => { + const messages = useChatStore((state) => state.messages); + const messagesEndRef = useRef(null); + + useEffect(() => { + messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); + }, [messages]); + + return ( +
+ + +
+ {messages.length === 0 ? ( +
+ + + +

Ask me anything!

+

+ I can help you create projects, suggest configurations,
+ and deploy your applications to the cloud. +

+
+ ) : ( + messages.map((message) => ( + + )) + )} +
+
+ + +
+ ); +}; +``` + +### 2.4 Chat Message Component + +```tsx +// src/components/chat/ChatMessage.tsx +import React from 'react'; +import { ChatMessage as ChatMessageType } from '@/types/chat'; +import { ToolCallDisplay } from './ToolCallDisplay'; +import ReactMarkdown from 'react-markdown'; + +interface Props { + message: ChatMessageType; +} + +export const ChatMessage: React.FC = ({ message }) => { + const isUser = message.role === 'user'; + + return ( +
+
+ {!isUser && ( +
+ + + + AI Assistant +
+ )} + +
+ {message.content} +
+ + {message.toolCalls && message.toolCalls.length > 0 && ( +
+ {message.toolCalls.map((toolCall) => ( + + ))} +
+ )} + +
+ {message.timestamp.toLocaleTimeString()} +
+
+
+ ); +}; +``` + +### 2.5 Chat Input Component + +```tsx +// src/components/chat/ChatInput.tsx +import React, { useState } from 'react'; +import { useChatStore } from '@/stores/chatStore'; +import { useAiAssistant } from '@/hooks/useAiAssistant'; + +export const ChatInput: React.FC = () => { + const [input, setInput] = useState(''); + const isProcessing = useChatStore((state) => state.isProcessing); + const { sendMessage } = useAiAssistant(); + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + if (!input.trim() || isProcessing) return; + + await sendMessage(input); + setInput(''); + }; + + return ( +
+
+ setInput(e.target.value)} + placeholder="Ask me to create a project, suggest resources..." + disabled={isProcessing} + className="flex-1 rounded-lg border border-gray-300 px-4 py-2 focus:outline-none focus:ring-2 focus:ring-blue-500 disabled:bg-gray-100" + /> + +
+ +
+ + + +
+
+ ); +}; + +const QuickAction: React.FC<{ action: string }> = ({ action }) => { + const { sendMessage } = useAiAssistant(); + + return ( + + ); +}; +``` + +--- + +## Phase 3: AI Assistant Hook (Week 3) + +### 3.1 AI Assistant Logic + +```typescript +// src/hooks/useAiAssistant.ts +import { useMcp } from '@/contexts/McpContext'; +import { useChatStore } from '@/stores/chatStore'; +import { OpenAI } from 'openai'; + +const openai = new OpenAI({ + apiKey: process.env.REACT_APP_OPENAI_API_KEY, + dangerouslyAllowBrowser: true // Only for demo; use backend proxy in production +}); + +export const useAiAssistant = () => { + const { client } = useMcp(); + const addMessage = useChatStore((state) => state.addMessage); + const updateMessage = useChatStore((state) => state.updateMessage); + const setProcessing = useChatStore((state) => state.setProcessing); + const context = useChatStore((state) => state.context); + const messages = useChatStore((state) => state.messages); + + const sendMessage = async (userMessage: string) => { + if (!client?.isConnected()) { + addMessage({ + role: 'system', + content: 'MCP connection lost. Please refresh the page.', + }); + return; + } + + // Add user message + addMessage({ + role: 'user', + content: userMessage, + }); + + setProcessing(true); + + try { + // Get available tools from MCP server + const tools = await client.listTools(); + + // Convert MCP tools to OpenAI function format + const openaiTools = tools.map((tool) => ({ + type: 'function' as const, + function: { + name: tool.name, + description: tool.description, + parameters: tool.inputSchema, + }, + })); + + // Build conversation history for OpenAI + const conversationMessages = [ + { + role: 'system' as const, + content: buildSystemPrompt(context), + }, + ...messages.slice(-10).map((msg) => ({ + role: msg.role as 'user' | 'assistant', + content: msg.content, + })), + { + role: 'user' as const, + content: userMessage, + }, + ]; + + // Call OpenAI with tools + const response = await openai.chat.completions.create({ + model: 'gpt-4-turbo-preview', + messages: conversationMessages, + tools: openaiTools, + tool_choice: 'auto', + }); + + const assistantMessage = response.choices[0].message; + + // Handle tool calls + if (assistantMessage.tool_calls) { + const messageId = crypto.randomUUID(); + + addMessage({ + role: 'assistant', + content: 'Let me help you with that...', + toolCalls: assistantMessage.tool_calls.map((tc) => ({ + id: tc.id, + toolName: tc.function.name, + arguments: JSON.parse(tc.function.arguments), + status: 'pending' as const, + })), + }); + + // Execute tools via MCP + for (const toolCall of assistantMessage.tool_calls) { + try { + const result = await client.callTool( + toolCall.function.name, + JSON.parse(toolCall.function.arguments) + ); + + updateMessage(messageId, { + toolCalls: assistantMessage.tool_calls.map((tc) => + tc.id === toolCall.id + ? { + id: tc.id, + toolName: tc.function.name, + arguments: JSON.parse(tc.function.arguments), + result: { + success: !result.isError, + data: result.content[0].text, + }, + status: 'completed' as const, + } + : tc + ), + }); + + // Parse result and update context + if (toolCall.function.name === 'create_project' && result.content[0].text) { + const project = JSON.parse(result.content[0].text); + useChatStore.getState().setContext({ + currentProject: { + id: project.id, + name: project.name, + apps: project.apps, + }, + }); + } + } catch (error) { + updateMessage(messageId, { + toolCalls: assistantMessage.tool_calls.map((tc) => + tc.id === toolCall.id + ? { + id: tc.id, + toolName: tc.function.name, + arguments: JSON.parse(tc.function.arguments), + result: { + success: false, + error: error instanceof Error ? error.message : 'Unknown error', + }, + status: 'failed' as const, + } + : tc + ), + }); + } + } + + // Get final response after tool execution + const finalResponse = await openai.chat.completions.create({ + model: 'gpt-4-turbo-preview', + messages: [ + ...conversationMessages, + assistantMessage, + ...assistantMessage.tool_calls.map((tc) => ({ + role: 'tool' as const, + tool_call_id: tc.id, + content: 'Tool executed successfully', + })), + ], + }); + + addMessage({ + role: 'assistant', + content: finalResponse.choices[0].message.content || 'Done!', + }); + } else { + // No tool calls, just add assistant response + addMessage({ + role: 'assistant', + content: assistantMessage.content || 'I understand. How can I help further?', + }); + } + } catch (error) { + addMessage({ + role: 'system', + content: `Error: ${error instanceof Error ? error.message : 'Unknown error'}`, + }); + } finally { + setProcessing(false); + } + }; + + return { sendMessage }; +}; + +function buildSystemPrompt(context: any): string { + return `You are an AI assistant for the Stacker platform, helping users build and deploy Docker-based application stacks. + +Current context: +${context.currentProject ? `- Working on project: "${context.currentProject.name}" (ID: ${context.currentProject.id})` : '- No active project'} +${context.lastAction ? `- Last action: ${context.lastAction}` : ''} + +You can help users with: +1. Creating new projects with multiple services +2. Suggesting appropriate resource limits (CPU, RAM, storage) +3. Listing available templates (WordPress, Node.js, Django, etc.) +4. Deploying projects to cloud providers +5. Managing cloud credentials +6. Validating domains and ports + +Always be helpful, concise, and guide users through multi-step processes one step at a time. +When creating projects, ask for all necessary details before calling the create_project tool.`; +} +``` + +--- + +## Phase 4: Form Integration (Week 4) + +### 4.1 Enhanced Project Form with AI + +```tsx +// src/components/project/ProjectFormWithAI.tsx +import React, { useState } from 'react'; +import { useChatStore } from '@/stores/chatStore'; +import { ChatSidebar } from '@/components/chat/ChatSidebar'; +import { ProjectForm } from '@/components/project/ProjectForm'; + +export const ProjectFormWithAI: React.FC = () => { + const [showChat, setShowChat] = useState(true); + const context = useChatStore((state) => state.context); + + // Auto-fill form from AI context + const formData = context.currentProject || { + name: '', + apps: [], + }; + + return ( +
+ {/* Main Form Area */} +
+
+
+

Create New Project

+ +
+ + +
+
+ + {/* Chat Sidebar */} + {showChat && ( +
+ +
+ )} +
+ ); +}; +``` + +### 4.2 Progressive Form Steps + +```tsx +// src/components/project/ProgressiveProjectForm.tsx +import React, { useState } from 'react'; +import { useAiAssistant } from '@/hooks/useAiAssistant'; +import { useChatStore } from '@/stores/chatStore'; + +const STEPS = [ + { id: 1, name: 'Basic Info', description: 'Project name and description' }, + { id: 2, name: 'Services', description: 'Add applications and Docker images' }, + { id: 3, name: 'Resources', description: 'Configure CPU, RAM, and storage' }, + { id: 4, name: 'Networking', description: 'Set up domains and ports' }, + { id: 5, name: 'Review', description: 'Review and deploy' }, +]; + +export const ProgressiveProjectForm: React.FC = () => { + const [currentStep, setCurrentStep] = useState(1); + const context = useChatStore((state) => state.context); + const { sendMessage } = useAiAssistant(); + + const project = context.currentProject || { + name: '', + description: '', + apps: [], + }; + + const handleAiSuggestion = (prompt: string) => { + sendMessage(prompt); + }; + + return ( +
+ {/* Progress Stepper */} +
+
+ {STEPS.map((step, index) => ( +
+
+
+ {step.id < currentStep ? '✓' : step.id} +
+
{step.name}
+
{step.description}
+
+
+ ))} +
+
+ + {/* AI Suggestions */} +
+
+ + + +
+

+ AI Suggestion for Step {currentStep}: +

+ {currentStep === 1 && ( + + )} + {currentStep === 2 && ( + + )} + {currentStep === 3 && ( + + )} +
+
+
+ + {/* Step Content */} +
+ {currentStep === 1 && } + {currentStep === 2 && } + {currentStep === 3 && } + {currentStep === 4 && } + {currentStep === 5 && } +
+ + {/* Navigation */} +
+ + +
+
+ ); +}; +``` + +--- + +## Phase 5: Testing & Optimization (Week 5) + +### 5.1 Unit Tests + +```typescript +// src/lib/mcp/__tests__/client.test.ts +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { StackerMcpClient } from '../client'; + +describe('StackerMcpClient', () => { + let client: StackerMcpClient; + + beforeEach(() => { + client = new StackerMcpClient({ + url: 'ws://localhost:8000/mcp', + authToken: 'test-token', + }); + }); + + afterEach(async () => { + if (client.isConnected()) { + await client.disconnect(); + } + }); + + it('should connect successfully', async () => { + await client.connect(); + expect(client.isConnected()).toBe(true); + }); + + it('should list available tools', async () => { + await client.connect(); + const tools = await client.listTools(); + + expect(tools).toBeInstanceOf(Array); + expect(tools.length).toBeGreaterThan(0); + expect(tools[0]).toHaveProperty('name'); + expect(tools[0]).toHaveProperty('description'); + }); + + it('should call create_project tool', async () => { + await client.connect(); + + const result = await client.callTool('create_project', { + name: 'Test Project', + apps: [ + { + name: 'web', + dockerImage: { repository: 'nginx' }, + }, + ], + }); + + expect(result.content).toBeInstanceOf(Array); + expect(result.isError).toBeFalsy(); + }); +}); +``` + +### 5.2 Integration Tests + +```typescript +// src/components/chat/__tests__/ChatSidebar.integration.test.tsx +import { render, screen, waitFor } from '@testing-library/react'; +import userEvent from '@testing-library/user-event'; +import { ChatSidebar } from '../ChatSidebar'; +import { McpProvider } from '@/contexts/McpContext'; + +describe('ChatSidebar Integration', () => { + it('should send message and receive response', async () => { + render( + + + + ); + + const input = screen.getByPlaceholderText(/ask me to create/i); + const sendButton = screen.getByRole('button', { name: /send/i }); + + await userEvent.type(input, 'Create a WordPress project'); + await userEvent.click(sendButton); + + await waitFor(() => { + expect(screen.getByText('Create a WordPress project')).toBeInTheDocument(); + }); + + await waitFor(() => { + expect(screen.getByText(/let me help/i)).toBeInTheDocument(); + }, { timeout: 5000 }); + }); +}); +``` + +### 5.3 Performance Optimization + +```typescript +// src/lib/mcp/optimizations.ts + +// 1. Debounce AI calls to prevent spam +import { useMemo } from 'react'; +import debounce from 'lodash/debounce'; + +export const useDebouncedAi = () => { + const { sendMessage } = useAiAssistant(); + + const debouncedSend = useMemo( + () => debounce(sendMessage, 500), + [sendMessage] + ); + + return { sendMessage: debouncedSend }; +}; + +// 2. Cache tool list +export const useToolsCache = () => { + const { client } = useMcp(); + const { data: tools, isLoading } = useQuery({ + queryKey: ['mcp-tools'], + queryFn: () => client?.listTools(), + staleTime: 5 * 60 * 1000, // 5 minutes + enabled: !!client?.isConnected(), + }); + + return { tools, isLoading }; +}; + +// 3. Lazy load chat component +import { lazy, Suspense } from 'react'; + +const ChatSidebar = lazy(() => import('@/components/chat/ChatSidebar')); + +export const LazyChat = () => ( + }> + + +); +``` + +--- + +## Environment Configuration + +### Production Setup + +```bash +# .env.production +REACT_APP_MCP_URL=wss://api.try.direct/mcp +REACT_APP_API_URL=https://api.try.direct +REACT_APP_OPENAI_API_KEY=your_openai_key_here +``` + +### Development Setup + +```bash +# .env.development +REACT_APP_MCP_URL=ws://localhost:8000/mcp +REACT_APP_API_URL=http://localhost:8000 +REACT_APP_OPENAI_API_KEY=your_openai_key_here +``` + +--- + +## Error Handling Best Practices + +```typescript +// src/lib/mcp/errorHandler.ts + +export class McpError extends Error { + constructor( + message: string, + public code: string, + public recoverable: boolean = true + ) { + super(message); + this.name = 'McpError'; + } +} + +export const handleMcpError = (error: unknown): McpError => { + if (error instanceof McpError) { + return error; + } + + if (error instanceof Error) { + if (error.message.includes('WebSocket')) { + return new McpError( + 'Connection lost. Please refresh the page.', + 'CONNECTION_LOST', + true + ); + } + + if (error.message.includes('auth')) { + return new McpError( + 'Authentication failed. Please log in again.', + 'AUTH_FAILED', + false + ); + } + } + + return new McpError( + 'An unexpected error occurred.', + 'UNKNOWN_ERROR', + true + ); +}; +``` + +--- + +## Deployment Checklist + +### Pre-Launch +- [ ] All MCP tools tested and working +- [ ] WebSocket connection stable for extended periods +- [ ] Error handling covers all edge cases +- [ ] Loading states implemented for all async operations +- [ ] Mobile responsive design verified +- [ ] Authentication integrated with existing OAuth +- [ ] Rate limiting enforced on frontend +- [ ] CORS configured for production domain + +### Production +- [ ] Environment variables set correctly +- [ ] HTTPS/WSS enabled for secure connections +- [ ] CDN configured for static assets +- [ ] Analytics tracking added +- [ ] Error logging (Sentry, LogRocket) +- [ ] Performance monitoring +- [ ] User feedback mechanism + +--- + +## User Flows & Examples + +### Example 1: Create WordPress Site + +**User**: "Create a WordPress site" + +**AI Response**: "I'll help you create a WordPress site. Let me ask a few questions: +1. What would you like to name your project? +2. Do you need a database (MySQL)? +3. Expected traffic level (low/medium/high)?" + +**User**: "Call it 'My Blog', yes I need MySQL, low traffic" + +**AI**: *Calls tools:* +``` +suggest_resources({ app_type: "wordpress", expected_traffic: "low" }) +suggest_resources({ app_type: "mysql", expected_traffic: "low" }) +create_project({ + name: "My Blog", + apps: [ + { name: "wordpress", dockerImage: { repository: "wordpress" }, resources: { cpu: 1, ram: 2, storage: 20 } }, + { name: "mysql", dockerImage: { repository: "mysql" }, resources: { cpu: 2, ram: 4, storage: 50 } } + ] +}) +``` + +**AI Response**: "✓ Created project 'My Blog' with WordPress and MySQL! Resource suggestions: +- WordPress: 1 CPU, 2GB RAM, 20GB storage +- MySQL: 2 CPU, 4GB RAM, 50GB storage + +Would you like to deploy this now?" + +### Example 2: List Projects + +**User**: "Show my projects" + +**AI**: *Calls `list_projects()`* + +**AI Response**: "You have 3 projects: +1. My Blog (WordPress + MySQL) - Created Dec 27 +2. API Server (Node.js) - Created Dec 26 +3. E-commerce (Next.js + PostgreSQL) - Created Dec 25 + +Which one would you like to work on?" + +--- + +## Troubleshooting Guide + +### Common Issues + +#### 1. WebSocket Connection Fails +```typescript +// Check: Is MCP server running? +// Check: Is auth token valid? +// Check: CORS headers configured? + +// Solution: +console.log('MCP URL:', process.env.REACT_APP_MCP_URL); +console.log('Auth token:', token ? 'Present' : 'Missing'); +``` + +#### 2. Tool Calls Timeout +```typescript +// Increase timeout in client +const result = await client.callTool(name, args, { timeout: 30000 }); +``` + +#### 3. Context Not Persisting +```typescript +// Check: Is Zustand store properly configured? +// Ensure setContext is called after tool execution +useChatStore.getState().setContext({ currentProject: project }); +``` + +--- + +## Future Enhancements + +### Phase 2 Features +- **Voice Input**: Add speech-to-text for hands-free interaction +- **Template Marketplace**: Browse and install community templates +- **Multi-language Support**: Internationalization for non-English users +- **Collaborative Editing**: Multiple users working on same project +- **Version Control**: Git integration for project configurations +- **Cost Estimation**: Show estimated monthly costs for deployments + +### Advanced AI Features +- **Proactive Suggestions**: AI monitors form and suggests improvements +- **Error Prevention**: Validate before deployment and warn about issues +- **Learning Mode**: AI learns from user preferences over time +- **Guided Tutorials**: Step-by-step walkthroughs for beginners + +--- + +## Performance Targets + +- **Initial Load**: < 2 seconds +- **Chat Message Latency**: < 500ms +- **Tool Execution**: < 3 seconds (p95) +- **WebSocket Reconnect**: < 5 seconds +- **Memory Usage**: < 50MB per tab + +--- + +## Security Considerations + +1. **Token Security**: Never expose OpenAI API key in frontend; use backend proxy +2. **Input Sanitization**: Validate all user inputs before sending to AI +3. **Rate Limiting**: Implement frontend rate limiting to prevent abuse +4. **XSS Prevention**: Sanitize AI responses before rendering as HTML +5. **CSP Headers**: Configure Content Security Policy for production + +--- + +## Team Coordination + +### Frontend Team Responsibilities +- Implement React components +- Design chat UI/UX +- Handle state management +- Write unit/integration tests + +### Backend Team Responsibilities +- Ensure MCP server is production-ready +- Provide WebSocket endpoint +- Maintain tool schemas +- Monitor performance + +### Shared Responsibilities +- Define tool contracts (JSON schemas) +- End-to-end testing +- Documentation +- Deployment coordination + +--- + +## Resources & Links + +- **MCP SDK Docs**: https://github.com/modelcontextprotocol/sdk +- **OpenAI API**: https://platform.openai.com/docs +- **WebSocket API**: https://developer.mozilla.org/en-US/docs/Web/API/WebSocket +- **React Query**: https://tanstack.com/query/latest +- **Zustand**: https://github.com/pmndrs/zustand + +--- + +## Contact + +**Frontend Lead**: [Your Name] +**Questions**: Open GitHub issue or Slack #stacker-ai channel diff --git a/migrations/20251227140000_casbin_mcp_endpoint.down.sql b/migrations/20251227140000_casbin_mcp_endpoint.down.sql new file mode 100644 index 0000000..6f26ad9 --- /dev/null +++ b/migrations/20251227140000_casbin_mcp_endpoint.down.sql @@ -0,0 +1,7 @@ +-- Remove Casbin rules for MCP WebSocket endpoint + +DELETE FROM public.casbin_rule +WHERE ptype = 'p' + AND v0 IN ('group_admin', 'group_user') + AND v1 = '/mcp' + AND v2 = 'GET'; diff --git a/migrations/20251227140000_casbin_mcp_endpoint.up.sql b/migrations/20251227140000_casbin_mcp_endpoint.up.sql new file mode 100644 index 0000000..9eb3a28 --- /dev/null +++ b/migrations/20251227140000_casbin_mcp_endpoint.up.sql @@ -0,0 +1,8 @@ +-- Add Casbin rules for MCP WebSocket endpoint +-- Allow authenticated users and admins to access MCP + +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) +VALUES + ('p', 'group_admin', '/mcp', 'GET', '', '', ''), + ('p', 'group_user', '/mcp', 'GET', '', '', '') +ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING; diff --git a/src/lib.rs b/src/lib.rs index 45e6ae9..03c6203 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -3,6 +3,7 @@ pub mod console; pub mod db; pub mod forms; pub mod helpers; +pub mod mcp; mod middleware; pub mod models; pub mod routes; diff --git a/src/mcp/mod.rs b/src/mcp/mod.rs new file mode 100644 index 0000000..94bb53d --- /dev/null +++ b/src/mcp/mod.rs @@ -0,0 +1,11 @@ +pub mod protocol; +pub mod registry; +pub mod session; +pub mod websocket; +#[cfg(test)] +mod protocol_tests; + +pub use protocol::*; +pub use registry::{ToolContext, ToolHandler, ToolRegistry}; +pub use session::McpSession; +pub use websocket::mcp_websocket; diff --git a/src/mcp/protocol.rs b/src/mcp/protocol.rs new file mode 100644 index 0000000..c7e982e --- /dev/null +++ b/src/mcp/protocol.rs @@ -0,0 +1,226 @@ +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +/// JSON-RPC 2.0 Request structure +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct JsonRpcRequest { + pub jsonrpc: String, // Must be "2.0" + #[serde(skip_serializing_if = "Option::is_none")] + pub id: Option, + pub method: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub params: Option, +} + +/// JSON-RPC 2.0 Response structure +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct JsonRpcResponse { + pub jsonrpc: String, // Must be "2.0" + #[serde(skip_serializing_if = "Option::is_none")] + pub id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub result: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, +} + +impl JsonRpcResponse { + pub fn success(id: Option, result: Value) -> Self { + Self { + jsonrpc: "2.0".to_string(), + id, + result: Some(result), + error: None, + } + } + + pub fn error(id: Option, error: JsonRpcError) -> Self { + Self { + jsonrpc: "2.0".to_string(), + id, + result: None, + error: Some(error), + } + } +} + +/// JSON-RPC 2.0 Error structure +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct JsonRpcError { + pub code: i32, + pub message: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub data: Option, +} + +impl JsonRpcError { + pub fn parse_error() -> Self { + Self { + code: -32700, + message: "Parse error".to_string(), + data: None, + } + } + + pub fn invalid_request() -> Self { + Self { + code: -32600, + message: "Invalid Request".to_string(), + data: None, + } + } + + pub fn method_not_found(method: &str) -> Self { + Self { + code: -32601, + message: format!("Method not found: {}", method), + data: None, + } + } + + pub fn invalid_params(msg: &str) -> Self { + Self { + code: -32602, + message: "Invalid params".to_string(), + data: Some(serde_json::json!({ "error": msg })), + } + } + + pub fn internal_error(msg: &str) -> Self { + Self { + code: -32603, + message: "Internal error".to_string(), + data: Some(serde_json::json!({ "error": msg })), + } + } + + pub fn custom(code: i32, message: String, data: Option) -> Self { + Self { + code, + message, + data, + } + } +} + +// MCP-specific types + +/// MCP Tool definition +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Tool { + pub name: String, + pub description: String, + #[serde(rename = "inputSchema")] + pub input_schema: Value, // JSON Schema for parameters +} + +/// Response for tools/list method +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolListResponse { + pub tools: Vec, +} + +/// Request for tools/call method +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CallToolRequest { + pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub arguments: Option, +} + +/// Response for tools/call method +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CallToolResponse { + pub content: Vec, + #[serde(rename = "isError", skip_serializing_if = "Option::is_none")] + pub is_error: Option, +} + +impl CallToolResponse { + pub fn text(text: String) -> Self { + Self { + content: vec![ToolContent::Text { text }], + is_error: None, + } + } + + pub fn error(text: String) -> Self { + Self { + content: vec![ToolContent::Text { text }], + is_error: Some(true), + } + } +} + +/// Tool execution result content +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum ToolContent { + #[serde(rename = "text")] + Text { text: String }, + #[serde(rename = "image")] + Image { + data: String, // base64 encoded + #[serde(rename = "mimeType")] + mime_type: String, + }, +} + +/// MCP Initialize request parameters +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct InitializeParams { + #[serde(rename = "protocolVersion")] + pub protocol_version: String, + pub capabilities: ClientCapabilities, + #[serde(rename = "clientInfo", skip_serializing_if = "Option::is_none")] + pub client_info: Option, +} + +/// Client information +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ClientInfo { + pub name: String, + pub version: String, +} + +/// Client capabilities +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ClientCapabilities { + #[serde(skip_serializing_if = "Option::is_none")] + pub experimental: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub sampling: Option, +} + +/// MCP Initialize response +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct InitializeResult { + #[serde(rename = "protocolVersion")] + pub protocol_version: String, + pub capabilities: ServerCapabilities, + #[serde(rename = "serverInfo")] + pub server_info: ServerInfo, +} + +/// Server capabilities +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ServerCapabilities { + #[serde(skip_serializing_if = "Option::is_none")] + pub tools: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub experimental: Option, +} + +/// Tools capability +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolsCapability { + #[serde(rename = "listChanged", skip_serializing_if = "Option::is_none")] + pub list_changed: Option, +} + +/// Server information +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ServerInfo { + pub name: String, + pub version: String, +} diff --git a/src/mcp/protocol_tests.rs b/src/mcp/protocol_tests.rs new file mode 100644 index 0000000..864275b --- /dev/null +++ b/src/mcp/protocol_tests.rs @@ -0,0 +1,147 @@ +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_json_rpc_request_deserialize() { + let json = r#"{ + "jsonrpc": "2.0", + "id": 1, + "method": "initialize", + "params": {"test": "value"} + }"#; + + let req: JsonRpcRequest = serde_json::from_str(json).unwrap(); + assert_eq!(req.jsonrpc, "2.0"); + assert_eq!(req.method, "initialize"); + assert!(req.params.is_some()); + } + + #[test] + fn test_json_rpc_response_success() { + let response = JsonRpcResponse::success( + Some(serde_json::json!(1)), + serde_json::json!({"result": "ok"}), + ); + + assert_eq!(response.jsonrpc, "2.0"); + assert!(response.result.is_some()); + assert!(response.error.is_none()); + } + + #[test] + fn test_json_rpc_response_error() { + let response = JsonRpcResponse::error( + Some(serde_json::json!(1)), + JsonRpcError::method_not_found("test_method"), + ); + + assert_eq!(response.jsonrpc, "2.0"); + assert!(response.result.is_none()); + assert!(response.error.is_some()); + + let error = response.error.unwrap(); + assert_eq!(error.code, -32601); + assert!(error.message.contains("test_method")); + } + + #[test] + fn test_json_rpc_error_codes() { + assert_eq!(JsonRpcError::parse_error().code, -32700); + assert_eq!(JsonRpcError::invalid_request().code, -32600); + assert_eq!(JsonRpcError::method_not_found("test").code, -32601); + assert_eq!(JsonRpcError::invalid_params("test").code, -32602); + assert_eq!(JsonRpcError::internal_error("test").code, -32603); + } + + #[test] + fn test_tool_schema() { + let tool = Tool { + name: "test_tool".to_string(), + description: "A test tool".to_string(), + input_schema: serde_json::json!({ + "type": "object", + "properties": { + "param1": { "type": "string" } + } + }), + }; + + assert_eq!(tool.name, "test_tool"); + assert_eq!(tool.description, "A test tool"); + } + + #[test] + fn test_call_tool_request_deserialize() { + let json = r#"{ + "name": "create_project", + "arguments": {"name": "Test Project"} + }"#; + + let req: CallToolRequest = serde_json::from_str(json).unwrap(); + assert_eq!(req.name, "create_project"); + assert!(req.arguments.is_some()); + } + + #[test] + fn test_call_tool_response() { + let response = CallToolResponse::text("Success".to_string()); + + assert_eq!(response.content.len(), 1); + assert!(response.is_error.is_none()); + + match &response.content[0] { + ToolContent::Text { text } => assert_eq!(text, "Success"), + _ => panic!("Expected text content"), + } + } + + #[test] + fn test_call_tool_response_error() { + let response = CallToolResponse::error("Failed".to_string()); + + assert_eq!(response.content.len(), 1); + assert_eq!(response.is_error, Some(true)); + } + + #[test] + fn test_initialize_params_deserialize() { + let json = r#"{ + "protocolVersion": "2024-11-05", + "capabilities": {}, + "clientInfo": { + "name": "test-client", + "version": "1.0.0" + } + }"#; + + let params: InitializeParams = serde_json::from_str(json).unwrap(); + assert_eq!(params.protocol_version, "2024-11-05"); + assert!(params.client_info.is_some()); + + let client_info = params.client_info.unwrap(); + assert_eq!(client_info.name, "test-client"); + assert_eq!(client_info.version, "1.0.0"); + } + + #[test] + fn test_initialize_result_serialize() { + let result = InitializeResult { + protocol_version: "2024-11-05".to_string(), + capabilities: ServerCapabilities { + tools: Some(ToolsCapability { + list_changed: Some(false), + }), + experimental: None, + }, + server_info: ServerInfo { + name: "stacker-mcp".to_string(), + version: "0.2.0".to_string(), + }, + }; + + let json = serde_json::to_string(&result).unwrap(); + assert!(json.contains("stacker-mcp")); + assert!(json.contains("2024-11-05")); + } +} diff --git a/src/mcp/registry.rs b/src/mcp/registry.rs new file mode 100644 index 0000000..1027264 --- /dev/null +++ b/src/mcp/registry.rs @@ -0,0 +1,80 @@ +use crate::configuration::Settings; +use crate::models; +use async_trait::async_trait; +use serde_json::Value; +use sqlx::PgPool; +use std::collections::HashMap; +use std::sync::Arc; + +use super::protocol::{Tool, ToolContent}; + +/// Context passed to tool handlers +pub struct ToolContext { + pub user: Arc, + pub pg_pool: PgPool, + pub settings: Arc, +} + +/// Trait for tool handlers +#[async_trait] +pub trait ToolHandler: Send + Sync { + /// Execute the tool with given arguments + async fn execute(&self, args: Value, context: &ToolContext) + -> Result; + + /// Return the tool schema definition + fn schema(&self) -> Tool; +} + +/// Tool registry managing all available MCP tools +pub struct ToolRegistry { + handlers: HashMap>, +} + +impl ToolRegistry { + /// Create a new tool registry with all handlers registered + pub fn new() -> Self { + let registry = Self { + handlers: HashMap::new(), + }; + + // TODO: Register tools as they are implemented + // registry.register("create_project", Box::new(CreateProjectTool)); + // registry.register("list_projects", Box::new(ListProjectsTool)); + // registry.register("get_project", Box::new(GetProjectTool)); + // registry.register("suggest_resources", Box::new(SuggestResourcesTool)); + + registry + } + + /// Register a tool handler + pub fn register(&mut self, name: &str, handler: Box) { + self.handlers.insert(name.to_string(), handler); + } + + /// Get a tool handler by name + pub fn get(&self, name: &str) -> Option<&Box> { + self.handlers.get(name) + } + + /// List all available tools + pub fn list_tools(&self) -> Vec { + self.handlers.values().map(|h| h.schema()).collect() + } + + /// Check if a tool exists + pub fn has_tool(&self, name: &str) -> bool { + self.handlers.contains_key(name) + } + + /// Get count of registered tools + pub fn count(&self) -> usize { + self.handlers.len() + } +} + +impl Default for ToolRegistry { + fn default() -> Self { + Self::new() + } +} diff --git a/src/mcp/session.rs b/src/mcp/session.rs new file mode 100644 index 0000000..55c443c --- /dev/null +++ b/src/mcp/session.rs @@ -0,0 +1,53 @@ +use serde_json::Value; +use std::collections::HashMap; + +/// MCP Session state management +#[derive(Debug, Clone)] +pub struct McpSession { + pub id: String, + pub created_at: chrono::DateTime, + pub context: HashMap, + pub initialized: bool, +} + +impl McpSession { + pub fn new() -> Self { + Self { + id: uuid::Uuid::new_v4().to_string(), + created_at: chrono::Utc::now(), + context: HashMap::new(), + initialized: false, + } + } + + /// Store context value + pub fn set_context(&mut self, key: String, value: Value) { + self.context.insert(key, value); + } + + /// Retrieve context value + pub fn get_context(&self, key: &str) -> Option<&Value> { + self.context.get(key) + } + + /// Clear all context + pub fn clear_context(&mut self) { + self.context.clear(); + } + + /// Mark session as initialized + pub fn set_initialized(&mut self, initialized: bool) { + self.initialized = initialized; + } + + /// Check if session is initialized + pub fn is_initialized(&self) -> bool { + self.initialized + } +} + +impl Default for McpSession { + fn default() -> Self { + Self::new() + } +} diff --git a/src/mcp/websocket.rs b/src/mcp/websocket.rs new file mode 100644 index 0000000..76425a5 --- /dev/null +++ b/src/mcp/websocket.rs @@ -0,0 +1,317 @@ +use crate::configuration::Settings; +use crate::models; +use actix::{Actor, ActorContext, AsyncContext, StreamHandler}; +use actix_web::{web, Error, HttpRequest, HttpResponse}; +use actix_web_actors::ws; +use sqlx::PgPool; +use std::sync::Arc; +use std::time::{Duration, Instant}; + +use super::protocol::{ + CallToolRequest, CallToolResponse, InitializeParams, InitializeResult, + JsonRpcError, JsonRpcRequest, JsonRpcResponse, ServerCapabilities, ServerInfo, + ToolListResponse, ToolsCapability, +}; +use super::registry::{ToolContext, ToolRegistry}; +use super::session::McpSession; + +/// WebSocket heartbeat interval +const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(5); +/// Client timeout - close connection if no heartbeat received +const CLIENT_TIMEOUT: Duration = Duration::from_secs(10); + +/// MCP WebSocket actor +pub struct McpWebSocket { + user: Arc, + session: McpSession, + registry: Arc, + pg_pool: PgPool, + settings: Arc, + hb: Instant, +} + +impl McpWebSocket { + pub fn new( + user: Arc, + registry: Arc, + pg_pool: PgPool, + settings: Arc, + ) -> Self { + Self { + user, + session: McpSession::new(), + registry, + pg_pool, + settings, + hb: Instant::now(), + } + } + + /// Start heartbeat process to check connection health + fn hb(&self, ctx: &mut ::Context) { + ctx.run_interval(HEARTBEAT_INTERVAL, |act, ctx| { + if Instant::now().duration_since(act.hb) > CLIENT_TIMEOUT { + tracing::warn!("MCP WebSocket client heartbeat failed, disconnecting"); + ctx.stop(); + return; + } + + ctx.ping(b""); + }); + } + + /// Handle JSON-RPC request + async fn handle_jsonrpc(&self, req: JsonRpcRequest) -> JsonRpcResponse { + match req.method.as_str() { + "initialize" => self.handle_initialize(req).await, + "tools/list" => self.handle_tools_list(req).await, + "tools/call" => self.handle_tools_call(req).await, + _ => JsonRpcResponse::error(req.id, JsonRpcError::method_not_found(&req.method)), + } + } + + /// Handle MCP initialize method + async fn handle_initialize(&self, req: JsonRpcRequest) -> JsonRpcResponse { + let params: InitializeParams = match req.params { + Some(p) => match serde_json::from_value(p) { + Ok(params) => params, + Err(e) => { + return JsonRpcResponse::error( + req.id, + JsonRpcError::invalid_params(&e.to_string()), + ) + } + }, + None => { + return JsonRpcResponse::error(req.id, JsonRpcError::invalid_params("Missing params")) + } + }; + + tracing::info!( + "MCP client initialized: protocol_version={}, client={}", + params.protocol_version, + params + .client_info + .as_ref() + .map(|c| c.name.as_str()) + .unwrap_or("unknown") + ); + + let result = InitializeResult { + protocol_version: "2024-11-05".to_string(), + capabilities: ServerCapabilities { + tools: Some(ToolsCapability { + list_changed: Some(false), + }), + experimental: None, + }, + server_info: ServerInfo { + name: "stacker-mcp".to_string(), + version: env!("CARGO_PKG_VERSION").to_string(), + }, + }; + + JsonRpcResponse::success(req.id, serde_json::to_value(result).unwrap()) + } + + /// Handle tools/list method + async fn handle_tools_list(&self, req: JsonRpcRequest) -> JsonRpcResponse { + let tools = self.registry.list_tools(); + + tracing::debug!("Listing {} available tools", tools.len()); + + let result = ToolListResponse { tools }; + + JsonRpcResponse::success(req.id, serde_json::to_value(result).unwrap()) + } + + /// Handle tools/call method + async fn handle_tools_call(&self, req: JsonRpcRequest) -> JsonRpcResponse { + let call_req: CallToolRequest = match req.params { + Some(p) => match serde_json::from_value(p) { + Ok(params) => params, + Err(e) => { + return JsonRpcResponse::error( + req.id, + JsonRpcError::invalid_params(&e.to_string()), + ) + } + }, + None => { + return JsonRpcResponse::error(req.id, JsonRpcError::invalid_params("Missing params")) + } + }; + + let tool_span = tracing::info_span!( + "mcp_tool_call", + tool = %call_req.name, + user = %self.user.id + ); + let _enter = tool_span.enter(); + + match self.registry.get(&call_req.name) { + Some(handler) => { + let context = ToolContext { + user: self.user.clone(), + pg_pool: self.pg_pool.clone(), + settings: self.settings.clone(), + }; + + match handler + .execute( + call_req.arguments.unwrap_or(serde_json::json!({})), + &context, + ) + .await + { + Ok(content) => { + tracing::info!("Tool executed successfully"); + let response = CallToolResponse { + content: vec![content], + is_error: None, + }; + JsonRpcResponse::success(req.id, serde_json::to_value(response).unwrap()) + } + Err(e) => { + tracing::error!("Tool execution failed: {}", e); + let response = CallToolResponse::error(format!("Error: {}", e)); + JsonRpcResponse::success(req.id, serde_json::to_value(response).unwrap()) + } + } + } + None => { + tracing::warn!("Tool not found: {}", call_req.name); + JsonRpcResponse::error( + req.id, + JsonRpcError::custom( + -32001, + format!("Tool not found: {}", call_req.name), + None, + ), + ) + } + } + } +} + +impl Actor for McpWebSocket { + type Context = ws::WebsocketContext; + + fn started(&mut self, ctx: &mut Self::Context) { + tracing::info!( + "MCP WebSocket connection started: session_id={}, user={}", + self.session.id, + self.user.id + ); + self.hb(ctx); + } + + fn stopped(&mut self, _ctx: &mut Self::Context) { + tracing::info!( + "MCP WebSocket connection closed: session_id={}, user={}", + self.session.id, + self.user.id + ); + } +} + +impl StreamHandler> for McpWebSocket { + fn handle(&mut self, msg: Result, ctx: &mut Self::Context) { + match msg { + Ok(ws::Message::Ping(msg)) => { + self.hb = Instant::now(); + ctx.pong(&msg); + } + Ok(ws::Message::Pong(_)) => { + self.hb = Instant::now(); + } + Ok(ws::Message::Text(text)) => { + tracing::debug!("Received JSON-RPC message: {}", text); + + let request: JsonRpcRequest = match serde_json::from_str(&text) { + Ok(req) => req, + Err(e) => { + tracing::error!("Failed to parse JSON-RPC request: {}", e); + let error_response = + JsonRpcResponse::error(None, JsonRpcError::parse_error()); + ctx.text(serde_json::to_string(&error_response).unwrap()); + return; + } + }; + + let user = self.user.clone(); + let session = self.session.clone(); + let registry = self.registry.clone(); + let pg_pool = self.pg_pool.clone(); + let settings = self.settings.clone(); + + let fut = async move { + let ws = McpWebSocket { + user, + session, + registry, + pg_pool, + settings, + hb: Instant::now(), + }; + ws.handle_jsonrpc(request).await + }; + + let addr = ctx.address(); + actix::spawn(async move { + let response = fut.await; + addr.do_send(SendResponse(response)); + }); + } + Ok(ws::Message::Binary(_)) => { + tracing::warn!("Binary messages not supported in MCP protocol"); + } + Ok(ws::Message::Close(reason)) => { + tracing::info!("MCP WebSocket close received: {:?}", reason); + ctx.close(reason); + ctx.stop(); + } + _ => {} + } + } +} + +/// Message to send JSON-RPC response back to client +#[derive(actix::Message)] +#[rtype(result = "()")] +struct SendResponse(JsonRpcResponse); + +impl actix::Handler for McpWebSocket { + type Result = (); + + fn handle(&mut self, msg: SendResponse, ctx: &mut Self::Context) { + let response_text = serde_json::to_string(&msg.0).unwrap(); + tracing::debug!("Sending JSON-RPC response: {}", response_text); + ctx.text(response_text); + } +} + +/// WebSocket route handler - entry point for MCP connections +#[tracing::instrument( + name = "MCP WebSocket connection", + skip(req, stream, user, registry, pg_pool, settings) +)] +pub async fn mcp_websocket( + req: HttpRequest, + stream: web::Payload, + user: web::ReqData>, + registry: web::Data>, + pg_pool: web::Data, + settings: web::Data, +) -> Result { + tracing::info!("New MCP WebSocket connection request from user: {}", user.id); + + let ws = McpWebSocket::new( + user.into_inner(), + registry.get_ref().clone(), + pg_pool.get_ref().clone(), + settings.as_ref().clone().into(), + ); + + ws::start(ws, &req, stream) +} diff --git a/src/startup.rs b/src/startup.rs index 4ff0177..ea5f9f1 100644 --- a/src/startup.rs +++ b/src/startup.rs @@ -1,11 +1,13 @@ use crate::configuration::Settings; use crate::helpers; +use crate::mcp; use crate::middleware; use crate::routes; use actix_cors::Cors; use actix_web::{dev::Server, error, http, web, App, HttpServer}; use sqlx::{Pool, Postgres}; use std::net::TcpListener; +use std::sync::Arc; use tracing_actix_web::TracingLogger; pub async fn run( @@ -22,6 +24,10 @@ pub async fn run( let vault_client = helpers::VaultClient::new(&settings.vault); let vault_client = web::Data::new(vault_client); + // Initialize MCP tool registry + let mcp_registry = Arc::new(mcp::ToolRegistry::new()); + let mcp_registry = web::Data::new(mcp_registry); + let authorization = middleware::authorization::try_new(settings.database.connection_string()).await?; let json_config = web::JsonConfig::default().error_handler(|err, _req| { @@ -132,10 +138,15 @@ pub async fn run( .service(crate::routes::agreement::get_handler) .service(crate::routes::agreement::accept_handler), ) + .service( + web::resource("/mcp") + .route(web::get().to(mcp::mcp_websocket)) + ) .app_data(json_config.clone()) .app_data(pg_pool.clone()) .app_data(mq_manager.clone()) .app_data(vault_client.clone()) + .app_data(mcp_registry.clone()) .app_data(settings.clone()) }) .listen(listener)? From 12933779778d525e75efa697de9b943e1d35591f Mon Sep 17 00:00:00 2001 From: vsilent Date: Sun, 28 Dec 2025 14:37:41 +0200 Subject: [PATCH 21/35] root/admin_group user, MCP registry, tools implementation --- Cargo.lock | 57 ++++++++++++++ docker-compose.dev.yml | 77 +++++++++++++++++++ ...227000000_casbin_root_admin_group.down.sql | 3 + ...51227000000_casbin_root_admin_group.up.sql | 3 + src/mcp/registry.rs | 3 +- src/mcp/websocket.rs | 6 +- 6 files changed, 145 insertions(+), 4 deletions(-) create mode 100644 docker-compose.dev.yml create mode 100644 migrations/20251227000000_casbin_root_admin_group.down.sql create mode 100644 migrations/20251227000000_casbin_root_admin_group.up.sql diff --git a/Cargo.lock b/Cargo.lock index b02e164..0263c66 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,31 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "actix" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de7fa236829ba0841304542f7614c42b80fca007455315c45c785ccfa873a85b" +dependencies = [ + "actix-macros", + "actix-rt", + "actix_derive", + "bitflags 2.10.0", + "bytes", + "crossbeam-channel", + "futures-core", + "futures-sink", + "futures-task", + "futures-util", + "log", + "once_cell", + "parking_lot", + "pin-project-lite", + "smallvec", + "tokio", + "tokio-util", +] + [[package]] name = "actix-casbin-auth" version = "1.1.0" @@ -200,6 +225,24 @@ dependencies = [ "url", ] +[[package]] +name = "actix-web-actors" +version = "4.3.1+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f98c5300b38fd004fe7d2a964f9a90813fdbe8a81fed500587e78b1b71c6f980" +dependencies = [ + "actix", + "actix-codec", + "actix-http", + "actix-web", + "bytes", + "bytestring", + "futures-core", + "pin-project-lite", + "tokio", + "tokio-util", +] + [[package]] name = "actix-web-codegen" version = "4.3.0" @@ -212,6 +255,17 @@ dependencies = [ "syn 2.0.111", ] +[[package]] +name = "actix_derive" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6ac1e58cded18cb28ddc17143c4dea5345b3ad575e14f32f66e4054a56eb271" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.111", +] + [[package]] name = "adler2" version = "2.0.1" @@ -4263,11 +4317,14 @@ checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" name = "stacker" version = "0.2.0" dependencies = [ + "actix", "actix-casbin-auth", "actix-cors", "actix-http", "actix-web", + "actix-web-actors", "aes-gcm", + "async-trait", "base64 0.22.1", "brotli 3.5.0", "casbin", diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml new file mode 100644 index 0000000..864d1ce --- /dev/null +++ b/docker-compose.dev.yml @@ -0,0 +1,77 @@ +version: "2.2" + +volumes: + stackerdb: + driver: local + + redis-data: + driver: local + +networks: + stacker-network: + driver: bridge + +services: + stacker: + image: trydirect/stacker:0.0.9 + container_name: stacker-dev + restart: always + networks: + - stacker-network + volumes: + # Mount local compiled binary for fast iteration + - ./target/debug/server:/app/server:ro + # Project configuration and assets + - ./files:/app/files + - ./docker/local/configuration.yaml:/app/configuration.yaml + - ./access_control.conf:/app/access_control.conf + - ./migrations:/app/migrations + - ./docker/local/.env:/app/.env + ports: + - "8000:8000" + env_file: + - ./docker/local/.env + environment: + - RUST_LOG=debug + - RUST_BACKTRACE=1 + depends_on: + stackerdb: + condition: service_healthy + entrypoint: ["/app/server"] + + redis: + container_name: redis-dev + image: redis + restart: always + networks: + - stacker-network + ports: + - 6379:6379 + volumes: + - redis-data:/data + sysctls: + net.core.somaxconn: 1024 + logging: + driver: "json-file" + options: + max-size: "10m" + tag: "container_{{.Name}}" + + stackerdb: + container_name: stackerdb-dev + networks: + - stacker-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 + image: postgres:16.0 + restart: always + ports: + - 5432:5432 + env_file: + - ./docker/local/.env + volumes: + - stackerdb:/var/lib/postgresql/data + - ./docker/local/postgresql.conf:/etc/postgresql/postgresql.conf diff --git a/migrations/20251227000000_casbin_root_admin_group.down.sql b/migrations/20251227000000_casbin_root_admin_group.down.sql new file mode 100644 index 0000000..6eaf28b --- /dev/null +++ b/migrations/20251227000000_casbin_root_admin_group.down.sql @@ -0,0 +1,3 @@ +-- Rollback: Remove root group from group_admin +DELETE FROM public.casbin_rule +WHERE ptype = 'g' AND v0 = 'root' AND v1 = 'group_admin'; diff --git a/migrations/20251227000000_casbin_root_admin_group.up.sql b/migrations/20251227000000_casbin_root_admin_group.up.sql new file mode 100644 index 0000000..d13cc20 --- /dev/null +++ b/migrations/20251227000000_casbin_root_admin_group.up.sql @@ -0,0 +1,3 @@ +-- Add root group assigned to group_admin for external application access +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) +VALUES ('g', 'root', 'group_admin', '', '', '', ''); diff --git a/src/mcp/registry.rs b/src/mcp/registry.rs index 1027264..0d61359 100644 --- a/src/mcp/registry.rs +++ b/src/mcp/registry.rs @@ -1,4 +1,5 @@ use crate::configuration::Settings; +use actix_web::web; use crate::models; use async_trait::async_trait; use serde_json::Value; @@ -12,7 +13,7 @@ use super::protocol::{Tool, ToolContent}; pub struct ToolContext { pub user: Arc, pub pg_pool: PgPool, - pub settings: Arc, + pub settings: web::Data, } /// Trait for tool handlers diff --git a/src/mcp/websocket.rs b/src/mcp/websocket.rs index 76425a5..9227ed2 100644 --- a/src/mcp/websocket.rs +++ b/src/mcp/websocket.rs @@ -26,7 +26,7 @@ pub struct McpWebSocket { session: McpSession, registry: Arc, pg_pool: PgPool, - settings: Arc, + settings: web::Data, hb: Instant, } @@ -35,7 +35,7 @@ impl McpWebSocket { user: Arc, registry: Arc, pg_pool: PgPool, - settings: Arc, + settings: web::Data, ) -> Self { Self { user, @@ -310,7 +310,7 @@ pub async fn mcp_websocket( user.into_inner(), registry.get_ref().clone(), pg_pool.get_ref().clone(), - settings.as_ref().clone().into(), + settings.clone(), ); ws::start(ws, &req, stream) From 3ccd14f842f1aa38a51c320abbb5ff21a70ee99d Mon Sep 17 00:00:00 2001 From: vsilent Date: Sun, 28 Dec 2025 20:05:09 +0200 Subject: [PATCH 22/35] MCP server updates, websocker + cookie based auth, server connected --- .env | 4 +- configuration.yaml.dist | 27 -- docker-compose.yml | 28 +- docker/local/postgresql.conf | 2 +- src/mcp/mod.rs | 1 + src/mcp/registry.rs | 38 ++- src/mcp/tools/cloud.rs | 238 ++++++++++++++ src/mcp/tools/compose.rs | 140 ++++++++ src/mcp/tools/deployment.rs | 195 +++++++++++ src/mcp/tools/mod.rs | 11 + src/mcp/tools/project.rs | 182 ++++++++++ src/mcp/tools/templates.rs | 310 ++++++++++++++++++ src/mcp/websocket.rs | 41 ++- .../authentication/manager_middleware.rs | 1 + .../authentication/method/f_cookie.rs | 56 ++++ .../authentication/method/f_oauth.rs | 2 +- src/middleware/authentication/method/mod.rs | 2 + 17 files changed, 1204 insertions(+), 74 deletions(-) delete mode 100644 configuration.yaml.dist create mode 100644 src/mcp/tools/cloud.rs create mode 100644 src/mcp/tools/compose.rs create mode 100644 src/mcp/tools/deployment.rs create mode 100644 src/mcp/tools/mod.rs create mode 100644 src/mcp/tools/project.rs create mode 100644 src/mcp/tools/templates.rs create mode 100644 src/middleware/authentication/method/f_cookie.rs diff --git a/.env b/.env index 53a1e1f..39aa19f 100644 --- a/.env +++ b/.env @@ -1,6 +1,4 @@ -#BUILDKIT_PROGRESS=plain -#DOCKER_BUILDKIT=1 -DATABASE_URL=postgres://postgres:postgres@127.0.0.1:5432/stacker +DATABASE_URL=postgres://postgres:postgres@stackerdb:5432/stacker POSTGRES_USER=postgres POSTGRES_PASSWORD=postgres POSTGRES_DB=stacker diff --git a/configuration.yaml.dist b/configuration.yaml.dist deleted file mode 100644 index 68f9b85..0000000 --- a/configuration.yaml.dist +++ /dev/null @@ -1,27 +0,0 @@ -#auth_url: http://127.0.0.1:8080/me -app_host: 127.0.0.1 -app_port: 8000 -auth_url: https://dev.try.direct/server/user/oauth_server/api/me -max_clients_number: 2 -database: - host: 127.0.0.1 - port: 5432 - username: postgres - password: postgres - database_name: stacker - -amqp: - host: 127.0.0.1 - port: 5672 - username: guest - password: guest - -# Vault configuration (can be overridden by environment variables) -vault: - address: http://127.0.0.1:8200 - token: change-me-dev-token - # KV mount/prefix for agent tokens, e.g. 'kv/agent' or 'agent' - agent_path_prefix: agent - -# Env overrides (optional): -# VAULT_ADDRESS, VAULT_TOKEN, VAULT_AGENT_PATH_PREFIX diff --git a/docker-compose.yml b/docker-compose.yml index af4ec60..139b902 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,10 +7,6 @@ volumes: redis-data: driver: local -networks: - stacker-network: - driver: bridge - services: stacker: @@ -18,8 +14,6 @@ services: build: . container_name: stacker restart: always - networks: - - stacker-network volumes: - ./files:/app/files - ./docker/local/configuration.yaml:/app/configuration.yaml @@ -37,12 +31,11 @@ services: stackerdb: condition: service_healthy + redis: container_name: redis image: redis restart: always - networks: - - stacker-network ports: - 6379:6379 volumes: @@ -58,27 +51,8 @@ services: tag: "container_{{.Name}}" -# stacker_queue: -# image: trydirect/stacker:0.0.7 -# container_name: stacker_queue -# restart: always -# volumes: -# - ./configuration.yaml:/app/configuration.yaml -# - ./.env:/app/.env -# environment: -# - RUST_LOG=debug -# - RUST_BACKTRACE=1 -# env_file: -# - ./.env -# depends_on: -# stackerdb: -# condition: service_healthy -# entrypoint: /app/console mq listen - stackerdb: container_name: stackerdb - networks: - - stacker-network healthcheck: test: ["CMD-SHELL", "pg_isready -U postgres"] interval: 10s diff --git a/docker/local/postgresql.conf b/docker/local/postgresql.conf index 4e89674..9fed453 100644 --- a/docker/local/postgresql.conf +++ b/docker/local/postgresql.conf @@ -795,4 +795,4 @@ listen_addresses = '*' # CUSTOMIZED OPTIONS #------------------------------------------------------------------------------ -# Add settings for extensions here +# Add settings for extensions here \ No newline at end of file diff --git a/src/mcp/mod.rs b/src/mcp/mod.rs index 94bb53d..e82017a 100644 --- a/src/mcp/mod.rs +++ b/src/mcp/mod.rs @@ -2,6 +2,7 @@ pub mod protocol; pub mod registry; pub mod session; pub mod websocket; +pub mod tools; #[cfg(test)] mod protocol_tests; diff --git a/src/mcp/registry.rs b/src/mcp/registry.rs index 0d61359..bea607f 100644 --- a/src/mcp/registry.rs +++ b/src/mcp/registry.rs @@ -8,6 +8,13 @@ use std::collections::HashMap; use std::sync::Arc; use super::protocol::{Tool, ToolContent}; +use crate::mcp::tools::{ + ListProjectsTool, GetProjectTool, CreateProjectTool, + SuggestResourcesTool, ListTemplatesTool, ValidateDomainTool, + GetDeploymentStatusTool, StartDeploymentTool, CancelDeploymentTool, + ListCloudsTool, GetCloudTool, AddCloudTool, DeleteCloudTool, + DeleteProjectTool, CloneProjectTool, +}; /// Context passed to tool handlers pub struct ToolContext { @@ -35,15 +42,34 @@ pub struct ToolRegistry { impl ToolRegistry { /// Create a new tool registry with all handlers registered pub fn new() -> Self { - let registry = Self { + let mut registry = Self { handlers: HashMap::new(), }; - // TODO: Register tools as they are implemented - // registry.register("create_project", Box::new(CreateProjectTool)); - // registry.register("list_projects", Box::new(ListProjectsTool)); - // registry.register("get_project", Box::new(GetProjectTool)); - // registry.register("suggest_resources", Box::new(SuggestResourcesTool)); + // Project management tools + registry.register("list_projects", Box::new(ListProjectsTool)); + registry.register("get_project", Box::new(GetProjectTool)); + registry.register("create_project", Box::new(CreateProjectTool)); + + // Template & discovery tools + registry.register("suggest_resources", Box::new(SuggestResourcesTool)); + registry.register("list_templates", Box::new(ListTemplatesTool)); + registry.register("validate_domain", Box::new(ValidateDomainTool)); + + // Phase 3: Deployment tools + registry.register("get_deployment_status", Box::new(GetDeploymentStatusTool)); + registry.register("start_deployment", Box::new(StartDeploymentTool)); + registry.register("cancel_deployment", Box::new(CancelDeploymentTool)); + + // Phase 3: Cloud tools + registry.register("list_clouds", Box::new(ListCloudsTool)); + registry.register("get_cloud", Box::new(GetCloudTool)); + registry.register("add_cloud", Box::new(AddCloudTool)); + registry.register("delete_cloud", Box::new(DeleteCloudTool)); + + // Phase 3: Project management + registry.register("delete_project", Box::new(DeleteProjectTool)); + registry.register("clone_project", Box::new(CloneProjectTool)); registry } diff --git a/src/mcp/tools/cloud.rs b/src/mcp/tools/cloud.rs new file mode 100644 index 0000000..c34191b --- /dev/null +++ b/src/mcp/tools/cloud.rs @@ -0,0 +1,238 @@ +use async_trait::async_trait; +use serde_json::{json, Value}; + +use crate::db; +use crate::models; +use crate::mcp::registry::{ToolContext, ToolHandler}; +use crate::mcp::protocol::{Tool, ToolContent}; +use serde::Deserialize; + +/// List user's cloud credentials +pub struct ListCloudsTool; + +#[async_trait] +impl ToolHandler for ListCloudsTool { + async fn execute(&self, _args: Value, context: &ToolContext) -> Result { + let clouds = db::cloud::fetch_by_user(&context.pg_pool, &context.user.id) + .await + .map_err(|e| { + tracing::error!("Failed to fetch clouds: {}", e); + format!("Database error: {}", e) + })?; + + let result = serde_json::to_string(&clouds) + .map_err(|e| format!("Serialization error: {}", e))?; + + tracing::info!("Listed {} clouds for user {}", clouds.len(), context.user.id); + + Ok(ToolContent::Text { text: result }) + } + + fn schema(&self) -> Tool { + Tool { + name: "list_clouds".to_string(), + description: "List all cloud provider credentials owned by the authenticated user".to_string(), + input_schema: json!({ + "type": "object", + "properties": {}, + "required": [] + }), + } + } +} + +/// Get a specific cloud by ID +pub struct GetCloudTool; + +#[async_trait] +impl ToolHandler for GetCloudTool { + async fn execute(&self, args: Value, context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + id: i32, + } + + let args: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + let cloud = db::cloud::fetch(&context.pg_pool, args.id) + .await + .map_err(|e| { + tracing::error!("Failed to fetch cloud: {}", e); + format!("Cloud error: {}", e) + })? + .ok_or_else(|| "Cloud not found".to_string())?; + + let result = serde_json::to_string(&cloud) + .map_err(|e| format!("Serialization error: {}", e))?; + + tracing::info!("Retrieved cloud {} for user {}", args.id, context.user.id); + + Ok(ToolContent::Text { text: result }) + } + + fn schema(&self) -> Tool { + Tool { + name: "get_cloud".to_string(), + description: "Get details of a specific cloud provider credential by ID".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "id": { + "type": "number", + "description": "Cloud ID" + } + }, + "required": ["id"] + }), + } + } +} + +/// Delete a cloud credential +pub struct DeleteCloudTool; + +#[async_trait] +impl ToolHandler for DeleteCloudTool { + async fn execute(&self, args: Value, context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + id: i32, + } + + let args: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + let cloud = db::cloud::fetch(&context.pg_pool, args.id) + .await + .map_err(|e| format!("Cloud error: {}", e))? + .ok_or_else(|| "Cloud not found".to_string())?; + + db::cloud::delete(&context.pg_pool, args.id) + .await + .map_err(|e| format!("Failed to delete cloud: {}", e))?; + + let response = serde_json::json!({ + "id": args.id, + "message": "Cloud credential deleted successfully" + }); + + tracing::info!("Deleted cloud {} for user {}", args.id, context.user.id); + + Ok(ToolContent::Text { text: response.to_string() }) + } + + fn schema(&self) -> Tool { + Tool { + name: "delete_cloud".to_string(), + description: "Delete a cloud provider credential".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "id": { + "type": "number", + "description": "Cloud ID to delete" + } + }, + "required": ["id"] + }), + } + } +} + +/// Add new cloud credentials +pub struct AddCloudTool; + +#[async_trait] +impl ToolHandler for AddCloudTool { + async fn execute(&self, args: Value, context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + provider: String, + cloud_token: Option, + cloud_key: Option, + cloud_secret: Option, + save_token: Option, + } + + let args: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + // Validate provider + let valid_providers = ["aws", "digitalocean", "hetzner", "azure", "gcp"]; + if !valid_providers.contains(&args.provider.to_lowercase().as_str()) { + return Err(format!( + "Invalid provider. Must be one of: {}", + valid_providers.join(", ") + )); + } + + // Validate at least one credential is provided + if args.cloud_token.is_none() && args.cloud_key.is_none() && args.cloud_secret.is_none() { + return Err("At least one of cloud_token, cloud_key, or cloud_secret must be provided".to_string()); + } + + // Create cloud record + let cloud = models::Cloud { + id: 0, // Will be set by DB + user_id: context.user.id.clone(), + provider: args.provider.clone(), + cloud_token: args.cloud_token, + cloud_key: args.cloud_key, + cloud_secret: args.cloud_secret, + save_token: args.save_token, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + }; + + let created_cloud = db::cloud::insert(&context.pg_pool, cloud) + .await + .map_err(|e| format!("Failed to create cloud: {}", e))?; + + let response = serde_json::json!({ + "id": created_cloud.id, + "provider": created_cloud.provider, + "save_token": created_cloud.save_token, + "created_at": created_cloud.created_at, + "message": "Cloud credentials added successfully" + }); + + tracing::info!("Added cloud {} for user {}", created_cloud.id, context.user.id); + + Ok(ToolContent::Text { text: response.to_string() }) + } + + fn schema(&self) -> Tool { + Tool { + name: "add_cloud".to_string(), + description: "Add new cloud provider credentials for deployments".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "provider": { + "type": "string", + "description": "Cloud provider name (aws, digitalocean, hetzner, azure, gcp)", + "enum": ["aws", "digitalocean", "hetzner", "azure", "gcp"] + }, + "cloud_token": { + "type": "string", + "description": "Cloud API token (optional)" + }, + "cloud_key": { + "type": "string", + "description": "Cloud access key (optional)" + }, + "cloud_secret": { + "type": "string", + "description": "Cloud secret key (optional)" + }, + "save_token": { + "type": "boolean", + "description": "Whether to save the token for future use (default: true)" + } + }, + "required": ["provider"] + }), + } + } +} diff --git a/src/mcp/tools/compose.rs b/src/mcp/tools/compose.rs new file mode 100644 index 0000000..8213a9c --- /dev/null +++ b/src/mcp/tools/compose.rs @@ -0,0 +1,140 @@ +use async_trait::async_trait; +use serde_json::{json, Value}; + +use crate::db; +use crate::mcp::registry::{ToolContext, ToolHandler}; +use crate::mcp::protocol::{Tool, ToolContent}; +use serde::Deserialize; + +/// Delete a project +pub struct DeleteProjectTool; + +#[async_trait] +impl ToolHandler for DeleteProjectTool { + async fn execute(&self, args: Value, context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + project_id: i32, + } + + let args: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + let project = db::project::fetch(&context.pg_pool, args.project_id) + .await + .map_err(|e| format!("Project not found: {}", e))? + .ok_or_else(|| "Project not found".to_string())?; + + if project.user_id != context.user.id { + return Err("Unauthorized: You do not own this project".to_string()); + } + + db::project::delete(&context.pg_pool, args.project_id) + .await + .map_err(|e| format!("Failed to delete project: {}", e))?; + + let response = serde_json::json!({ + "project_id": args.project_id, + "message": "Project deleted successfully" + }); + + tracing::info!("Deleted project {} for user {}", args.project_id, context.user.id); + + Ok(ToolContent::Text { text: response.to_string() }) + } + + fn schema(&self) -> Tool { + Tool { + name: "delete_project".to_string(), + description: "Delete a project permanently".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "project_id": { + "type": "number", + "description": "Project ID to delete" + } + }, + "required": ["project_id"] + }), + } + } +} + +/// Clone a project +pub struct CloneProjectTool; + +#[async_trait] +impl ToolHandler for CloneProjectTool { + async fn execute(&self, args: Value, context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + project_id: i32, + new_name: String, + } + + let args: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + if args.new_name.trim().is_empty() { + return Err("New project name cannot be empty".to_string()); + } + + if args.new_name.len() > 255 { + return Err("Project name must be 255 characters or less".to_string()); + } + + let project = db::project::fetch(&context.pg_pool, args.project_id) + .await + .map_err(|e| format!("Project not found: {}", e))? + .ok_or_else(|| "Project not found".to_string())?; + + if project.user_id != context.user.id { + return Err("Unauthorized: You do not own this project".to_string()); + } + + // Create new project with cloned data + let cloned_project = crate::models::Project::new( + context.user.id.clone(), + args.new_name.clone(), + project.metadata.clone(), + project.request_json.clone(), + ); + + let cloned_project = db::project::insert(&context.pg_pool, cloned_project) + .await + .map_err(|e| format!("Failed to clone project: {}", e))?; + + let response = serde_json::json!({ + "original_id": args.project_id, + "cloned_id": cloned_project.id, + "cloned_name": cloned_project.name, + "message": "Project cloned successfully" + }); + + tracing::info!("Cloned project {} to {} for user {}", args.project_id, cloned_project.id, context.user.id); + + Ok(ToolContent::Text { text: response.to_string() }) + } + + fn schema(&self) -> Tool { + Tool { + name: "clone_project".to_string(), + description: "Clone/duplicate an existing project with a new name".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "project_id": { + "type": "number", + "description": "Project ID to clone" + }, + "new_name": { + "type": "string", + "description": "Name for the cloned project (max 255 chars)" + } + }, + "required": ["project_id", "new_name"] + }), + } + } +} diff --git a/src/mcp/tools/deployment.rs b/src/mcp/tools/deployment.rs new file mode 100644 index 0000000..6213f99 --- /dev/null +++ b/src/mcp/tools/deployment.rs @@ -0,0 +1,195 @@ +use async_trait::async_trait; +use serde_json::{json, Value}; + +use crate::db; +use crate::mcp::registry::{ToolContext, ToolHandler}; +use crate::mcp::protocol::{Tool, ToolContent}; +use serde::Deserialize; + +/// Get deployment status +pub struct GetDeploymentStatusTool; + +#[async_trait] +impl ToolHandler for GetDeploymentStatusTool { + async fn execute(&self, args: Value, context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + deployment_id: i32, + } + + let args: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + let deployment = db::deployment::fetch(&context.pg_pool, args.deployment_id) + .await + .map_err(|e| { + tracing::error!("Failed to fetch deployment: {}", e); + format!("Database error: {}", e) + })? + .ok_or_else(|| "Deployment not found".to_string())?; + + let result = serde_json::to_string(&deployment) + .map_err(|e| format!("Serialization error: {}", e))?; + + tracing::info!("Got deployment status: {}", args.deployment_id); + + Ok(ToolContent::Text { text: result }) + } + + fn schema(&self) -> Tool { + Tool { + name: "get_deployment_status".to_string(), + description: "Get the current status of a deployment (pending, running, completed, failed)".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "deployment_id": { + "type": "number", + "description": "Deployment ID" + } + }, + "required": ["deployment_id"] + }), + } + } +} + +/// Start a new deployment +pub struct StartDeploymentTool; + +#[async_trait] +impl ToolHandler for StartDeploymentTool { + async fn execute(&self, args: Value, context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + project_id: i32, + cloud_id: Option, + environment: Option, + } + + let args: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + // Verify user owns the project + let project = db::project::fetch(&context.pg_pool, args.project_id) + .await + .map_err(|e| format!("Project not found: {}", e))? + .ok_or_else(|| "Project not found".to_string())?; + + if project.user_id != context.user.id { + return Err("Unauthorized: You do not own this project".to_string()); + } + + // Create deployment record with hash + let deployment_hash = uuid::Uuid::new_v4().to_string(); + let deployment = crate::models::Deployment::new( + args.project_id, + Some(context.user.id.clone()), + deployment_hash.clone(), + "pending".to_string(), + json!({ "environment": args.environment.unwrap_or_else(|| "production".to_string()), "cloud_id": args.cloud_id }), + ); + + let deployment = db::deployment::insert(&context.pg_pool, deployment) + .await + .map_err(|e| format!("Failed to create deployment: {}", e))?; + + let response = serde_json::json!({ + "id": deployment.id, + "project_id": deployment.project_id, + "status": deployment.status, + "deployment_hash": deployment.deployment_hash, + "created_at": deployment.created_at, + "message": "Deployment initiated - agent will connect shortly" + }); + + tracing::info!("Started deployment {} for project {}", deployment.id, args.project_id); + + Ok(ToolContent::Text { text: response.to_string() }) + } + + fn schema(&self) -> Tool { + Tool { + name: "start_deployment".to_string(), + description: "Initiate deployment of a project to cloud infrastructure".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "project_id": { + "type": "number", + "description": "Project ID to deploy" + }, + "cloud_id": { + "type": "number", + "description": "Cloud provider ID (optional)" + }, + "environment": { + "type": "string", + "description": "Deployment environment (optional, default: production)", + "enum": ["development", "staging", "production"] + } + }, + "required": ["project_id"] + }), + } + } +} + +/// Cancel a deployment +pub struct CancelDeploymentTool; + +#[async_trait] +impl ToolHandler for CancelDeploymentTool { + async fn execute(&self, args: Value, context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + deployment_id: i32, + } + + let args: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + let _deployment = db::deployment::fetch(&context.pg_pool, args.deployment_id) + .await + .map_err(|e| format!("Deployment not found: {}", e))? + .ok_or_else(|| "Deployment not found".to_string())?; + + // Verify user owns the project (via deployment) + let project = db::project::fetch(&context.pg_pool, _deployment.project_id) + .await + .map_err(|e| format!("Project not found: {}", e))? + .ok_or_else(|| "Project not found".to_string())?; + + if project.user_id != context.user.id { + return Err("Unauthorized: You do not own this deployment".to_string()); + } + + // Mark deployment as cancelled (would update status in real implementation) + let response = serde_json::json!({ + "deployment_id": args.deployment_id, + "status": "cancelled", + "message": "Deployment cancellation initiated" + }); + + tracing::info!("Cancelled deployment {}", args.deployment_id); + + Ok(ToolContent::Text { text: response.to_string() }) + } + + fn schema(&self) -> Tool { + Tool { + name: "cancel_deployment".to_string(), + description: "Cancel an in-progress or pending deployment".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "deployment_id": { + "type": "number", + "description": "Deployment ID to cancel" + } + }, + "required": ["deployment_id"] + }), + } + } +} diff --git a/src/mcp/tools/mod.rs b/src/mcp/tools/mod.rs new file mode 100644 index 0000000..6e1966e --- /dev/null +++ b/src/mcp/tools/mod.rs @@ -0,0 +1,11 @@ +pub mod project; +pub mod templates; +pub mod deployment; +pub mod cloud; +pub mod compose; + +pub use project::*; +pub use templates::*; +pub use deployment::*; +pub use cloud::*; +pub use compose::*; diff --git a/src/mcp/tools/project.rs b/src/mcp/tools/project.rs new file mode 100644 index 0000000..4314c57 --- /dev/null +++ b/src/mcp/tools/project.rs @@ -0,0 +1,182 @@ +use async_trait::async_trait; +use serde_json::{json, Value}; + +use crate::db; +use crate::mcp::registry::{ToolContext, ToolHandler}; +use crate::mcp::protocol::{Tool, ToolContent}; +use serde::Deserialize; + +/// List user's projects +pub struct ListProjectsTool; + +#[async_trait] +impl ToolHandler for ListProjectsTool { + async fn execute(&self, _args: Value, context: &ToolContext) -> Result { + let projects = db::project::fetch_by_user(&context.pg_pool, &context.user.id) + .await + .map_err(|e| { + tracing::error!("Failed to fetch projects: {}", e); + format!("Database error: {}", e) + })?; + + let result = serde_json::to_string(&projects) + .map_err(|e| format!("Serialization error: {}", e))?; + + tracing::info!("Listed {} projects for user {}", projects.len(), context.user.id); + + Ok(ToolContent::Text { text: result }) + } + + fn schema(&self) -> Tool { + Tool { + name: "list_projects".to_string(), + description: "List all projects owned by the authenticated user".to_string(), + input_schema: json!({ + "type": "object", + "properties": {}, + "required": [] + }), + } + } +} + +/// Get a specific project by ID +pub struct GetProjectTool; + +#[async_trait] +impl ToolHandler for GetProjectTool { + async fn execute(&self, args: Value, context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + id: i32, + } + + let params: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + let project = db::project::fetch(&context.pg_pool, params.id) + .await + .map_err(|e| { + tracing::error!("Failed to fetch project {}: {}", params.id, e); + format!("Database error: {}", e) + })?; + + let result = serde_json::to_string(&project) + .map_err(|e| format!("Serialization error: {}", e))?; + + Ok(ToolContent::Text { text: result }) + } + + fn schema(&self) -> Tool { + Tool { + name: "get_project".to_string(), + description: "Get details of a specific project by ID".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "id": { + "type": "number", + "description": "Project ID" + } + }, + "required": ["id"] + }), + } + } +} + +/// Create a new project +pub struct CreateProjectTool; + +#[async_trait] +impl ToolHandler for CreateProjectTool { + async fn execute(&self, args: Value, context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct CreateArgs { + name: String, + #[serde(default)] + description: Option, + #[serde(default)] + apps: Vec, + } + + let params: CreateArgs = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + if params.name.trim().is_empty() { + return Err("Project name cannot be empty".to_string()); + } + + if params.name.len() > 255 { + return Err("Project name too long (max 255 characters)".to_string()); + } + + // Create a new Project model with empty metadata/request + let project = crate::models::Project::new( + context.user.id.clone(), + params.name.clone(), + serde_json::json!({}), + serde_json::json!(params.apps), + ); + + let project = db::project::insert(&context.pg_pool, project) + .await + .map_err(|e| { + tracing::error!("Failed to create project: {}", e); + format!("Failed to create project: {}", e) + })?; + + let result = serde_json::to_string(&project) + .map_err(|e| format!("Serialization error: {}", e))?; + + tracing::info!("Created project {} for user {}", project.id, context.user.id); + + Ok(ToolContent::Text { text: result }) + } + + fn schema(&self) -> Tool { + Tool { + name: "create_project".to_string(), + description: "Create a new application stack project with services and configuration".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Project name (required, max 255 chars)" + }, + "description": { + "type": "string", + "description": "Project description (optional)" + }, + "apps": { + "type": "array", + "description": "List of applications/services to include", + "items": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Service name" + }, + "dockerImage": { + "type": "object", + "properties": { + "namespace": { "type": "string" }, + "repository": { + "type": "string", + "description": "Docker image repository" + }, + "tag": { "type": "string" } + }, + "required": ["repository"] + } + } + } + } + }, + "required": ["name"] + }), + } + } +} diff --git a/src/mcp/tools/templates.rs b/src/mcp/tools/templates.rs new file mode 100644 index 0000000..b49c82a --- /dev/null +++ b/src/mcp/tools/templates.rs @@ -0,0 +1,310 @@ +use async_trait::async_trait; +use serde_json::{json, Value}; + +use crate::mcp::registry::{ToolContext, ToolHandler}; +use crate::mcp::protocol::{Tool, ToolContent}; +use serde::Deserialize; + +/// Suggest appropriate resource limits for an application type +pub struct SuggestResourcesTool; + +#[async_trait] +impl ToolHandler for SuggestResourcesTool { + async fn execute(&self, args: Value, _context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + app_type: String, + #[serde(default)] + expected_traffic: Option, + } + + let params: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + // Heuristic-based recommendations + let (base_cpu, base_ram, base_storage) = match params.app_type.to_lowercase().as_str() { + "wordpress" | "cms" => (1.0, 2.0, 20.0), + "nodejs" | "express" | "nextjs" => (1.0, 1.0, 10.0), + "django" | "flask" | "python" => (2.0, 2.0, 15.0), + "react" | "vue" | "frontend" => (1.0, 1.0, 5.0), + "mysql" | "mariadb" => (2.0, 4.0, 50.0), + "postgresql" | "postgres" => (2.0, 4.0, 100.0), + "redis" | "memcached" | "cache" => (1.0, 1.0, 5.0), + "mongodb" | "nosql" => (2.0, 4.0, 100.0), + "nginx" | "apache" | "traefik" | "proxy" => (0.5, 0.5, 2.0), + "rabbitmq" | "kafka" | "queue" => (2.0, 4.0, 20.0), + "elasticsearch" | "search" => (4.0, 8.0, 200.0), + _ => (1.0, 1.0, 10.0), // Default + }; + + // Multiplier for traffic level + let multiplier = match params.expected_traffic.as_deref() { + Some("high") => 3.0, + Some("medium") => 1.5, + Some("low") | None | Some("") => 1.0, + _ => 1.0, + }; + + let final_cpu = ((base_cpu as f64) * multiplier).ceil() as i32; + let final_ram = ((base_ram as f64) * multiplier).ceil() as i32; + let final_storage = (base_storage * multiplier).ceil() as i32; + + let traffic_label = params + .expected_traffic + .clone() + .unwrap_or_else(|| "low".to_string()); + + let result = json!({ + "app_type": params.app_type, + "expected_traffic": traffic_label, + "recommendations": { + "cpu": final_cpu, + "cpu_unit": "cores", + "ram": final_ram, + "ram_unit": "GB", + "storage": final_storage, + "storage_unit": "GB" + }, + "summary": format!( + "For {} with {} traffic: {} cores, {} GB RAM, {} GB storage", + params.app_type, traffic_label, final_cpu, final_ram, final_storage + ), + "notes": match params.app_type.to_lowercase().as_str() { + "wordpress" => "Recommended setup includes WordPress + MySQL. Add MySQL with 4GB RAM and 50GB storage.", + "nodejs" => "Lightweight runtime. Add database separately if needed.", + "postgresql" => "Database server. Allocate adequate storage for backups.", + "mysql" => "Database server. Consider replication for HA.", + _ => "Adjust resources based on your workload." + } + }); + + tracing::info!( + "Suggested resources for {} with {} traffic", + params.app_type, + traffic_label + ); + + Ok(ToolContent::Text { + text: serde_json::to_string(&result).unwrap(), + }) + } + + fn schema(&self) -> Tool { + Tool { + name: "suggest_resources".to_string(), + description: "Get AI-powered resource recommendations (CPU, RAM, storage) for an application type and expected traffic level".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "app_type": { + "type": "string", + "description": "Application type (e.g., 'wordpress', 'nodejs', 'postgresql', 'django')" + }, + "expected_traffic": { + "type": "string", + "enum": ["low", "medium", "high"], + "description": "Expected traffic level (optional, default: low)" + } + }, + "required": ["app_type"] + }), + } + } +} + +/// List available templates/stack configurations +pub struct ListTemplatesTool; + +#[async_trait] +impl ToolHandler for ListTemplatesTool { + async fn execute(&self, args: Value, context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + #[serde(default)] + category: Option, + #[serde(default)] + search: Option, + } + + let params: Args = serde_json::from_value(args).unwrap_or(Args { + category: None, + search: None, + }); + + // For now, return curated list of popular templates + // In Phase 3, this will query the database for public ratings + let templates = vec![ + json!({ + "id": "wordpress-mysql", + "name": "WordPress with MySQL", + "description": "Complete WordPress blog/site with MySQL database", + "category": "cms", + "services": ["wordpress", "mysql"], + "rating": 4.8, + "downloads": 1250 + }), + json!({ + "id": "nodejs-express", + "name": "Node.js Express API", + "description": "RESTful API server with Express.js", + "category": "api", + "services": ["nodejs"], + "rating": 4.6, + "downloads": 850 + }), + json!({ + "id": "nextjs-postgres", + "name": "Next.js Full Stack", + "description": "Next.js frontend + PostgreSQL database", + "category": "web", + "services": ["nextjs", "postgresql"], + "rating": 4.7, + "downloads": 920 + }), + json!({ + "id": "django-postgres", + "name": "Django Web Application", + "description": "Django web framework with PostgreSQL", + "category": "web", + "services": ["django", "postgresql"], + "rating": 4.5, + "downloads": 680 + }), + json!({ + "id": "lamp-stack", + "name": "LAMP Stack", + "description": "Linux + Apache + MySQL + PHP", + "category": "web", + "services": ["apache", "php", "mysql"], + "rating": 4.4, + "downloads": 560 + }), + json!({ + "id": "elasticsearch-kibana", + "name": "ELK Stack", + "description": "Elasticsearch + Logstash + Kibana for logging", + "category": "infrastructure", + "services": ["elasticsearch", "kibana"], + "rating": 4.7, + "downloads": 730 + }), + ]; + + // Filter by category if provided + let filtered = if let Some(cat) = params.category { + templates + .into_iter() + .filter(|t| { + t["category"] + .as_str() + .unwrap_or("") + .eq_ignore_ascii_case(&cat) + }) + .collect::>() + } else { + templates + }; + + // Filter by search term if provided + let final_list = if let Some(search) = params.search { + filtered + .into_iter() + .filter(|t| { + let name = t["name"].as_str().unwrap_or(""); + let desc = t["description"].as_str().unwrap_or(""); + name.to_lowercase().contains(&search.to_lowercase()) + || desc.to_lowercase().contains(&search.to_lowercase()) + }) + .collect() + } else { + filtered + }; + + let result = json!({ + "count": final_list.len(), + "templates": final_list + }); + + tracing::info!("Listed {} templates", final_list.len()); + + Ok(ToolContent::Text { + text: serde_json::to_string(&result).unwrap(), + }) + } + + fn schema(&self) -> Tool { + Tool { + name: "list_templates".to_string(), + description: "Browse available stack templates (WordPress, Node.js, Django, etc.) with ratings and descriptions".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "category": { + "type": "string", + "enum": ["cms", "api", "web", "database", "infrastructure"], + "description": "Filter by template category (optional)" + }, + "search": { + "type": "string", + "description": "Search templates by name or description (optional)" + } + }, + "required": [] + }), + } + } +} + +/// Validate domain name format +pub struct ValidateDomainTool; + +#[async_trait] +impl ToolHandler for ValidateDomainTool { + async fn execute(&self, args: Value, _context: &ToolContext) -> Result { + #[derive(Deserialize)] + struct Args { + domain: String, + } + + let params: Args = serde_json::from_value(args) + .map_err(|e| format!("Invalid arguments: {}", e))?; + + // Simple domain validation regex + let domain_regex = regex::Regex::new( + r"^([a-z0-9]([a-z0-9\-]{0,61}[a-z0-9])?\.)+[a-z]{2,}$" + ).unwrap(); + + let is_valid = domain_regex.is_match(¶ms.domain.to_lowercase()); + + let result = json!({ + "domain": params.domain, + "valid": is_valid, + "message": if is_valid { + "Domain format is valid" + } else { + "Invalid domain format" + } + }); + + Ok(ToolContent::Text { + text: serde_json::to_string(&result).unwrap(), + }) + } + + fn schema(&self) -> Tool { + Tool { + name: "validate_domain".to_string(), + description: "Validate domain name format".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "domain": { + "type": "string", + "description": "Domain name to validate (e.g., 'example.com')" + } + }, + "required": ["domain"] + }), + } + } +} diff --git a/src/mcp/websocket.rs b/src/mcp/websocket.rs index 9227ed2..85f36c9 100644 --- a/src/mcp/websocket.rs +++ b/src/mcp/websocket.rs @@ -61,13 +61,25 @@ impl McpWebSocket { } /// Handle JSON-RPC request - async fn handle_jsonrpc(&self, req: JsonRpcRequest) -> JsonRpcResponse { - match req.method.as_str() { + async fn handle_jsonrpc(&self, req: JsonRpcRequest) -> Option { + // Notifications arrive without an id and must not receive a response per JSON-RPC 2.0 + if req.id.is_none() { + if req.method == "notifications/initialized" { + tracing::info!("Ignoring notifications/initialized (notification)"); + } else { + tracing::warn!("Ignoring notification without id: method={}", req.method); + } + return None; + } + + let response = match req.method.as_str() { "initialize" => self.handle_initialize(req).await, "tools/list" => self.handle_tools_list(req).await, "tools/call" => self.handle_tools_call(req).await, _ => JsonRpcResponse::error(req.id, JsonRpcError::method_not_found(&req.method)), - } + }; + + Some(response) } /// Handle MCP initialize method @@ -226,15 +238,17 @@ impl StreamHandler> for McpWebSocket { self.hb = Instant::now(); } Ok(ws::Message::Text(text)) => { - tracing::debug!("Received JSON-RPC message: {}", text); + tracing::info!("[MCP] Received JSON-RPC message: {}", text); let request: JsonRpcRequest = match serde_json::from_str(&text) { Ok(req) => req, Err(e) => { - tracing::error!("Failed to parse JSON-RPC request: {}", e); + tracing::error!("[MCP] Failed to parse JSON-RPC request: {}", e); let error_response = JsonRpcResponse::error(None, JsonRpcError::parse_error()); - ctx.text(serde_json::to_string(&error_response).unwrap()); + let response_text = serde_json::to_string(&error_response).unwrap(); + tracing::error!("[MCP] Sending parse error response: {}", response_text); + ctx.text(response_text); return; } }; @@ -259,8 +273,11 @@ impl StreamHandler> for McpWebSocket { let addr = ctx.address(); actix::spawn(async move { - let response = fut.await; - addr.do_send(SendResponse(response)); + if let Some(response) = fut.await { + addr.do_send(SendResponse(response)); + } else { + tracing::debug!("[MCP] Dropped response for notification (no id)"); + } }); } Ok(ws::Message::Binary(_)) => { @@ -286,7 +303,13 @@ impl actix::Handler for McpWebSocket { fn handle(&mut self, msg: SendResponse, ctx: &mut Self::Context) { let response_text = serde_json::to_string(&msg.0).unwrap(); - tracing::debug!("Sending JSON-RPC response: {}", response_text); + tracing::info!( + "[MCP] Sending JSON-RPC response: id={:?}, has_result={}, has_error={}, message={}", + msg.0.id, + msg.0.result.is_some(), + msg.0.error.is_some(), + response_text + ); ctx.text(response_text); } } diff --git a/src/middleware/authentication/manager_middleware.rs b/src/middleware/authentication/manager_middleware.rs index d07cd5c..b24bcbe 100644 --- a/src/middleware/authentication/manager_middleware.rs +++ b/src/middleware/authentication/manager_middleware.rs @@ -41,6 +41,7 @@ where async move { let _ = method::try_agent(&mut req).await? || method::try_oauth(&mut req).await? + || method::try_cookie(&mut req).await? || method::try_hmac(&mut req).await? || method::anonym(&mut req)?; diff --git a/src/middleware/authentication/method/f_cookie.rs b/src/middleware/authentication/method/f_cookie.rs new file mode 100644 index 0000000..16efc57 --- /dev/null +++ b/src/middleware/authentication/method/f_cookie.rs @@ -0,0 +1,56 @@ +use crate::configuration::Settings; +use crate::middleware::authentication::get_header; +use crate::models; +use actix_web::{dev::ServiceRequest, web, HttpMessage}; +use std::sync::Arc; + +#[tracing::instrument(name = "Authenticate with cookie")] +pub async fn try_cookie(req: &mut ServiceRequest) -> Result { + // Get Cookie header + let cookie_header = get_header::(&req, "cookie")?; + if cookie_header.is_none() { + return Ok(false); + } + + // Parse cookies to find access_token + let cookies = cookie_header.unwrap(); + let token = cookies + .split(';') + .find_map(|cookie| { + let parts: Vec<&str> = cookie.trim().splitn(2, '=').collect(); + if parts.len() == 2 && parts[0] == "access_token" { + Some(parts[1].to_string()) + } else { + None + } + }); + + if token.is_none() { + return Ok(false); + } + + tracing::debug!("Found access_token in cookies"); + + // Use same OAuth validation as Bearer token + let settings = req.app_data::>().unwrap(); + let user = super::f_oauth::fetch_user(settings.auth_url.as_str(), &token.unwrap()) + .await + .map_err(|err| format!("{err}"))?; + + // Control access using user role + tracing::debug!("ACL check for role (cookie auth): {}", user.role.clone()); + let acl_vals = actix_casbin_auth::CasbinVals { + subject: user.role.clone(), + domain: None, + }; + + if req.extensions_mut().insert(Arc::new(user)).is_some() { + return Err("user already logged".to_string()); + } + + if req.extensions_mut().insert(acl_vals).is_some() { + return Err("Something wrong with access control".to_string()); + } + + Ok(true) +} diff --git a/src/middleware/authentication/method/f_oauth.rs b/src/middleware/authentication/method/f_oauth.rs index 4934dc3..3d3ea42 100644 --- a/src/middleware/authentication/method/f_oauth.rs +++ b/src/middleware/authentication/method/f_oauth.rs @@ -52,7 +52,7 @@ pub async fn try_oauth(req: &mut ServiceRequest) -> Result { Ok(true) } -async fn fetch_user(auth_url: &str, token: &str) -> Result { +pub async fn fetch_user(auth_url: &str, token: &str) -> Result { let client = reqwest::Client::new(); let resp = client .get(auth_url) diff --git a/src/middleware/authentication/method/mod.rs b/src/middleware/authentication/method/mod.rs index c258fe4..48b802b 100644 --- a/src/middleware/authentication/method/mod.rs +++ b/src/middleware/authentication/method/mod.rs @@ -1,9 +1,11 @@ mod f_agent; mod f_anonym; +mod f_cookie; mod f_hmac; mod f_oauth; pub use f_agent::try_agent; pub use f_anonym::anonym; +pub use f_cookie::try_cookie; pub use f_hmac::try_hmac; pub use f_oauth::try_oauth; From e8d739a644768ce24c4fd10b653360e4471e54b4 Mon Sep 17 00:00:00 2001 From: vsilent Date: Mon, 29 Dec 2025 13:51:14 +0200 Subject: [PATCH 23/35] Marketplace API init --- ...c3e6fe803644553f9cf879271e5b86fe11a5d.json | 150 ++++++ ...3709286b2a50446caa2a609aaf77af12b30bb.json | 17 + ...5f54d89279057657c92305f606522fa142cf7.json | 14 + ...c323869489c6dc7e17479b647f0aa799df910.json | 14 + ...bda940a334195e3f15cae22153762131a247b.json | 23 + ...2077a054026cb2bc0c010aba218506e76110f.json | 14 +- ...d77692bd1a336be4d06ff6e0ac6831164617e.json | 14 +- ...4d82beb1dedc0f62405d008f18045df981277.json | 22 + ...bace6cc4a4d068392f7b58f2d165042ab509e.json | 16 + ...423869bd7b79dd5b246d80f0b6f39ce4659dc.json | 14 +- ...85b37f0bcfba5f07e131ab4d67df659344034.json | 142 ++++++ ...d646a3305a10349e9422c45e8e47bbd911ab9.json | 140 ++++++ ...444c6c2656615fb29b4c04031a090cf103bdd.json | 68 +++ ...b4d54ef603448c0c44272aec8f2ff04920b83.json | 14 +- ...6706ad8a6255bba2812d4e32da205773c6de9.json | 64 +++ ...1623b22207dc86d11b5d4227d5893a0199983.json | 142 ++++++ ...a1f5406b31542b6b0219d7daa1705bf7b2f37.json | 22 + TODO.md | 27 ++ configuration.yaml.dist | 27 ++ .../20251229120000_marketplace.down.sql | 43 ++ migrations/20251229120000_marketplace.up.sql | 201 ++++++++ ...29121000_casbin_marketplace_rules.down.sql | 12 + ...1229121000_casbin_marketplace_rules.up.sql | 16 + src/db/marketplace.rs | 445 ++++++++++++++++++ src/db/mod.rs | 1 + .../authentication/method/f_cookie.rs | 1 - src/models/marketplace.rs | 40 ++ src/models/mod.rs | 2 + src/models/project.rs | 6 + src/routes/marketplace/admin.rs | 69 +++ src/routes/marketplace/creator.rs | 174 +++++++ src/routes/marketplace/mod.rs | 7 + src/routes/marketplace/public.rs | 49 ++ src/routes/mod.rs | 2 + src/startup.rs | 21 + 35 files changed, 2028 insertions(+), 5 deletions(-) create mode 100644 .sqlx/query-073f2677aeaea2595771abbf5d2c3e6fe803644553f9cf879271e5b86fe11a5d.json create mode 100644 .sqlx/query-0bb6c35cba6f3c5573cf45c42b93709286b2a50446caa2a609aaf77af12b30bb.json create mode 100644 .sqlx/query-0dab58aa1022e2c1f4320f232195f54d89279057657c92305f606522fa142cf7.json create mode 100644 .sqlx/query-0faf1a2932ba1b37fc9f982bc86c323869489c6dc7e17479b647f0aa799df910.json create mode 100644 .sqlx/query-17560a0750685b4b5fc01f4df36bda940a334195e3f15cae22153762131a247b.json create mode 100644 .sqlx/query-3efacedb58ab13dad5eeaa4454a4d82beb1dedc0f62405d008f18045df981277.json create mode 100644 .sqlx/query-5bf9f8aacbe676339d0811d305abace6cc4a4d068392f7b58f2d165042ab509e.json create mode 100644 .sqlx/query-8b44ddf6b3e98a100756fa1f80685b37f0bcfba5f07e131ab4d67df659344034.json create mode 100644 .sqlx/query-8c4c8b7e304bbc02d727bcc2507d646a3305a10349e9422c45e8e47bbd911ab9.json create mode 100644 .sqlx/query-ab22f5f84d90a3c2717cea339f6444c6c2656615fb29b4c04031a090cf103bdd.json create mode 100644 .sqlx/query-f93b65a30034b0558781a3173986706ad8a6255bba2812d4e32da205773c6de9.json create mode 100644 .sqlx/query-fd4227629d262e5ef9ee83458441623b22207dc86d11b5d4227d5893a0199983.json create mode 100644 .sqlx/query-ffd49d0e0354d8d4010863204b1a1f5406b31542b6b0219d7daa1705bf7b2f37.json create mode 100644 configuration.yaml.dist create mode 100644 migrations/20251229120000_marketplace.down.sql create mode 100644 migrations/20251229120000_marketplace.up.sql create mode 100644 migrations/20251229121000_casbin_marketplace_rules.down.sql create mode 100644 migrations/20251229121000_casbin_marketplace_rules.up.sql create mode 100644 src/db/marketplace.rs create mode 100644 src/models/marketplace.rs create mode 100644 src/routes/marketplace/admin.rs create mode 100644 src/routes/marketplace/creator.rs create mode 100644 src/routes/marketplace/mod.rs create mode 100644 src/routes/marketplace/public.rs diff --git a/.sqlx/query-073f2677aeaea2595771abbf5d2c3e6fe803644553f9cf879271e5b86fe11a5d.json b/.sqlx/query-073f2677aeaea2595771abbf5d2c3e6fe803644553f9cf879271e5b86fe11a5d.json new file mode 100644 index 0000000..9735af5 --- /dev/null +++ b/.sqlx/query-073f2677aeaea2595771abbf5d2c3e6fe803644553f9cf879271e5b86fe11a5d.json @@ -0,0 +1,150 @@ +{ + "db_name": "PostgreSQL", + "query": "INSERT INTO stack_template (\n creator_user_id, creator_name, name, slug,\n short_description, long_description, category_id,\n tags, tech_stack, status\n ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,'draft')\n RETURNING \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n tags,\n tech_stack,\n status,\n plan_type,\n price,\n currency,\n is_configurable,\n view_count,\n deploy_count,\n average_rating,\n created_at,\n updated_at,\n approved_at\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "creator_user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "creator_name", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "name", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "slug", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "short_description", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "long_description", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "category_id", + "type_info": "Int4" + }, + { + "ordinal": 8, + "name": "tags", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "tech_stack", + "type_info": "Jsonb" + }, + { + "ordinal": 10, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 11, + "name": "plan_type", + "type_info": "Varchar" + }, + { + "ordinal": 12, + "name": "price", + "type_info": "Float8" + }, + { + "ordinal": 13, + "name": "currency", + "type_info": "Varchar" + }, + { + "ordinal": 14, + "name": "is_configurable", + "type_info": "Bool" + }, + { + "ordinal": 15, + "name": "view_count", + "type_info": "Int4" + }, + { + "ordinal": 16, + "name": "deploy_count", + "type_info": "Int4" + }, + { + "ordinal": 17, + "name": "average_rating", + "type_info": "Float4" + }, + { + "ordinal": 18, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 19, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 20, + "name": "approved_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Text", + "Text", + "Int4", + "Jsonb", + "Jsonb" + ] + }, + "nullable": [ + false, + false, + true, + false, + false, + true, + true, + true, + true, + true, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "073f2677aeaea2595771abbf5d2c3e6fe803644553f9cf879271e5b86fe11a5d" +} diff --git a/.sqlx/query-0bb6c35cba6f3c5573cf45c42b93709286b2a50446caa2a609aaf77af12b30bb.json b/.sqlx/query-0bb6c35cba6f3c5573cf45c42b93709286b2a50446caa2a609aaf77af12b30bb.json new file mode 100644 index 0000000..5f0a36e --- /dev/null +++ b/.sqlx/query-0bb6c35cba6f3c5573cf45c42b93709286b2a50446caa2a609aaf77af12b30bb.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "INSERT INTO stack_template_review (template_id, reviewer_user_id, decision, review_reason, reviewed_at) VALUES ($1::uuid, $2, $3, $4, now())", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Varchar", + "Varchar", + "Text" + ] + }, + "nullable": [] + }, + "hash": "0bb6c35cba6f3c5573cf45c42b93709286b2a50446caa2a609aaf77af12b30bb" +} diff --git a/.sqlx/query-0dab58aa1022e2c1f4320f232195f54d89279057657c92305f606522fa142cf7.json b/.sqlx/query-0dab58aa1022e2c1f4320f232195f54d89279057657c92305f606522fa142cf7.json new file mode 100644 index 0000000..3e6250a --- /dev/null +++ b/.sqlx/query-0dab58aa1022e2c1f4320f232195f54d89279057657c92305f606522fa142cf7.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE stack_template_version SET is_latest = false WHERE template_id = $1 AND is_latest = true", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "0dab58aa1022e2c1f4320f232195f54d89279057657c92305f606522fa142cf7" +} diff --git a/.sqlx/query-0faf1a2932ba1b37fc9f982bc86c323869489c6dc7e17479b647f0aa799df910.json b/.sqlx/query-0faf1a2932ba1b37fc9f982bc86c323869489c6dc7e17479b647f0aa799df910.json new file mode 100644 index 0000000..5b7cb8e --- /dev/null +++ b/.sqlx/query-0faf1a2932ba1b37fc9f982bc86c323869489c6dc7e17479b647f0aa799df910.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE stack_template SET status = 'submitted' WHERE id = $1::uuid AND status IN ('draft','rejected')", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "0faf1a2932ba1b37fc9f982bc86c323869489c6dc7e17479b647f0aa799df910" +} diff --git a/.sqlx/query-17560a0750685b4b5fc01f4df36bda940a334195e3f15cae22153762131a247b.json b/.sqlx/query-17560a0750685b4b5fc01f4df36bda940a334195e3f15cae22153762131a247b.json new file mode 100644 index 0000000..5cd8517 --- /dev/null +++ b/.sqlx/query-17560a0750685b4b5fc01f4df36bda940a334195e3f15cae22153762131a247b.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE stack_template SET \n name = COALESCE($2, name),\n short_description = COALESCE($3, short_description),\n long_description = COALESCE($4, long_description),\n category_id = COALESCE($5, category_id),\n tags = COALESCE($6, tags),\n tech_stack = COALESCE($7, tech_stack),\n plan_type = COALESCE($8, plan_type),\n price = COALESCE($9, price),\n currency = COALESCE($10, currency)\n WHERE id = $1::uuid", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Varchar", + "Text", + "Text", + "Int4", + "Jsonb", + "Jsonb", + "Varchar", + "Float8", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "17560a0750685b4b5fc01f4df36bda940a334195e3f15cae22153762131a247b" +} diff --git a/.sqlx/query-2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f.json b/.sqlx/query-2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f.json index 3524e58..4c5595e 100644 --- a/.sqlx/query-2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f.json +++ b/.sqlx/query-2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f.json @@ -42,6 +42,16 @@ "ordinal": 7, "name": "request_json", "type_info": "Json" + }, + { + "ordinal": 8, + "name": "source_template_id", + "type_info": "Uuid" + }, + { + "ordinal": 9, + "name": "template_version", + "type_info": "Varchar" } ], "parameters": { @@ -57,7 +67,9 @@ false, false, false, - false + false, + true, + true ] }, "hash": "2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f" diff --git a/.sqlx/query-3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e.json b/.sqlx/query-3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e.json index 5c8c7ac..f8f958e 100644 --- a/.sqlx/query-3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e.json +++ b/.sqlx/query-3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e.json @@ -42,6 +42,16 @@ "ordinal": 7, "name": "request_json", "type_info": "Json" + }, + { + "ordinal": 8, + "name": "source_template_id", + "type_info": "Uuid" + }, + { + "ordinal": 9, + "name": "template_version", + "type_info": "Varchar" } ], "parameters": { @@ -57,7 +67,9 @@ false, false, false, - false + false, + true, + true ] }, "hash": "3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e" diff --git a/.sqlx/query-3efacedb58ab13dad5eeaa4454a4d82beb1dedc0f62405d008f18045df981277.json b/.sqlx/query-3efacedb58ab13dad5eeaa4454a4d82beb1dedc0f62405d008f18045df981277.json new file mode 100644 index 0000000..ec0c073 --- /dev/null +++ b/.sqlx/query-3efacedb58ab13dad5eeaa4454a4d82beb1dedc0f62405d008f18045df981277.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT creator_user_id FROM stack_template WHERE id = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "creator_user_id", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false + ] + }, + "hash": "3efacedb58ab13dad5eeaa4454a4d82beb1dedc0f62405d008f18045df981277" +} diff --git a/.sqlx/query-5bf9f8aacbe676339d0811d305abace6cc4a4d068392f7b58f2d165042ab509e.json b/.sqlx/query-5bf9f8aacbe676339d0811d305abace6cc4a4d068392f7b58f2d165042ab509e.json new file mode 100644 index 0000000..e01c813 --- /dev/null +++ b/.sqlx/query-5bf9f8aacbe676339d0811d305abace6cc4a4d068392f7b58f2d165042ab509e.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE stack_template SET status = $2, approved_at = CASE WHEN $3 THEN now() ELSE approved_at END WHERE id = $1::uuid", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Varchar", + "Bool" + ] + }, + "nullable": [] + }, + "hash": "5bf9f8aacbe676339d0811d305abace6cc4a4d068392f7b58f2d165042ab509e" +} diff --git a/.sqlx/query-5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc.json b/.sqlx/query-5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc.json index 6c81374..cd18bf7 100644 --- a/.sqlx/query-5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc.json +++ b/.sqlx/query-5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc.json @@ -42,6 +42,16 @@ "ordinal": 7, "name": "request_json", "type_info": "Json" + }, + { + "ordinal": 8, + "name": "source_template_id", + "type_info": "Uuid" + }, + { + "ordinal": 9, + "name": "template_version", + "type_info": "Varchar" } ], "parameters": { @@ -57,7 +67,9 @@ false, false, false, - false + false, + true, + true ] }, "hash": "5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc" diff --git a/.sqlx/query-8b44ddf6b3e98a100756fa1f80685b37f0bcfba5f07e131ab4d67df659344034.json b/.sqlx/query-8b44ddf6b3e98a100756fa1f80685b37f0bcfba5f07e131ab4d67df659344034.json new file mode 100644 index 0000000..fa4b0fe --- /dev/null +++ b/.sqlx/query-8b44ddf6b3e98a100756fa1f80685b37f0bcfba5f07e131ab4d67df659344034.json @@ -0,0 +1,142 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n tags,\n tech_stack,\n status,\n plan_type,\n price,\n currency,\n is_configurable,\n view_count,\n deploy_count,\n average_rating,\n created_at,\n updated_at,\n approved_at\n FROM stack_template WHERE creator_user_id = $1 ORDER BY created_at DESC", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "creator_user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "creator_name", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "name", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "slug", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "short_description", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "long_description", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "category_id", + "type_info": "Int4" + }, + { + "ordinal": 8, + "name": "tags", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "tech_stack", + "type_info": "Jsonb" + }, + { + "ordinal": 10, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 11, + "name": "plan_type", + "type_info": "Varchar" + }, + { + "ordinal": 12, + "name": "price", + "type_info": "Float8" + }, + { + "ordinal": 13, + "name": "currency", + "type_info": "Varchar" + }, + { + "ordinal": 14, + "name": "is_configurable", + "type_info": "Bool" + }, + { + "ordinal": 15, + "name": "view_count", + "type_info": "Int4" + }, + { + "ordinal": 16, + "name": "deploy_count", + "type_info": "Int4" + }, + { + "ordinal": 17, + "name": "average_rating", + "type_info": "Float4" + }, + { + "ordinal": 18, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 19, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 20, + "name": "approved_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + true, + false, + false, + true, + true, + true, + true, + true, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "8b44ddf6b3e98a100756fa1f80685b37f0bcfba5f07e131ab4d67df659344034" +} diff --git a/.sqlx/query-8c4c8b7e304bbc02d727bcc2507d646a3305a10349e9422c45e8e47bbd911ab9.json b/.sqlx/query-8c4c8b7e304bbc02d727bcc2507d646a3305a10349e9422c45e8e47bbd911ab9.json new file mode 100644 index 0000000..7f4f2d0 --- /dev/null +++ b/.sqlx/query-8c4c8b7e304bbc02d727bcc2507d646a3305a10349e9422c45e8e47bbd911ab9.json @@ -0,0 +1,140 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n tags,\n tech_stack,\n status,\n plan_type,\n price,\n currency,\n is_configurable,\n view_count,\n deploy_count,\n average_rating,\n created_at,\n updated_at,\n approved_at\n FROM stack_template WHERE status = 'submitted' ORDER BY created_at ASC", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "creator_user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "creator_name", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "name", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "slug", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "short_description", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "long_description", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "category_id", + "type_info": "Int4" + }, + { + "ordinal": 8, + "name": "tags", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "tech_stack", + "type_info": "Jsonb" + }, + { + "ordinal": 10, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 11, + "name": "plan_type", + "type_info": "Varchar" + }, + { + "ordinal": 12, + "name": "price", + "type_info": "Float8" + }, + { + "ordinal": 13, + "name": "currency", + "type_info": "Varchar" + }, + { + "ordinal": 14, + "name": "is_configurable", + "type_info": "Bool" + }, + { + "ordinal": 15, + "name": "view_count", + "type_info": "Int4" + }, + { + "ordinal": 16, + "name": "deploy_count", + "type_info": "Int4" + }, + { + "ordinal": 17, + "name": "average_rating", + "type_info": "Float4" + }, + { + "ordinal": 18, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 19, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 20, + "name": "approved_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + true, + false, + false, + true, + true, + true, + true, + true, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "8c4c8b7e304bbc02d727bcc2507d646a3305a10349e9422c45e8e47bbd911ab9" +} diff --git a/.sqlx/query-ab22f5f84d90a3c2717cea339f6444c6c2656615fb29b4c04031a090cf103bdd.json b/.sqlx/query-ab22f5f84d90a3c2717cea339f6444c6c2656615fb29b4c04031a090cf103bdd.json new file mode 100644 index 0000000..f684d17 --- /dev/null +++ b/.sqlx/query-ab22f5f84d90a3c2717cea339f6444c6c2656615fb29b4c04031a090cf103bdd.json @@ -0,0 +1,68 @@ +{ + "db_name": "PostgreSQL", + "query": "INSERT INTO stack_template_version (\n template_id, version, stack_definition, definition_format, changelog, is_latest\n ) VALUES ($1,$2,$3,$4,$5,true)\n RETURNING id, template_id, version, stack_definition, definition_format, changelog, is_latest, created_at", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "template_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "version", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "stack_definition", + "type_info": "Jsonb" + }, + { + "ordinal": 4, + "name": "definition_format", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "changelog", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "is_latest", + "type_info": "Bool" + }, + { + "ordinal": 7, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Varchar", + "Jsonb", + "Varchar", + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + true, + true, + true, + true + ] + }, + "hash": "ab22f5f84d90a3c2717cea339f6444c6c2656615fb29b4c04031a090cf103bdd" +} diff --git a/.sqlx/query-db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83.json b/.sqlx/query-db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83.json index 2841e6e..0300aa2 100644 --- a/.sqlx/query-db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83.json +++ b/.sqlx/query-db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83.json @@ -42,6 +42,16 @@ "ordinal": 7, "name": "request_json", "type_info": "Json" + }, + { + "ordinal": 8, + "name": "source_template_id", + "type_info": "Uuid" + }, + { + "ordinal": 9, + "name": "template_version", + "type_info": "Varchar" } ], "parameters": { @@ -62,7 +72,9 @@ false, false, false, - false + false, + true, + true ] }, "hash": "db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83" diff --git a/.sqlx/query-f93b65a30034b0558781a3173986706ad8a6255bba2812d4e32da205773c6de9.json b/.sqlx/query-f93b65a30034b0558781a3173986706ad8a6255bba2812d4e32da205773c6de9.json new file mode 100644 index 0000000..7dff911 --- /dev/null +++ b/.sqlx/query-f93b65a30034b0558781a3173986706ad8a6255bba2812d4e32da205773c6de9.json @@ -0,0 +1,64 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT \n id,\n template_id,\n version,\n stack_definition,\n definition_format,\n changelog,\n is_latest,\n created_at\n FROM stack_template_version WHERE template_id = $1 AND is_latest = true LIMIT 1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "template_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "version", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "stack_definition", + "type_info": "Jsonb" + }, + { + "ordinal": 4, + "name": "definition_format", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "changelog", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "is_latest", + "type_info": "Bool" + }, + { + "ordinal": 7, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + false, + true, + true, + true, + true + ] + }, + "hash": "f93b65a30034b0558781a3173986706ad8a6255bba2812d4e32da205773c6de9" +} diff --git a/.sqlx/query-fd4227629d262e5ef9ee83458441623b22207dc86d11b5d4227d5893a0199983.json b/.sqlx/query-fd4227629d262e5ef9ee83458441623b22207dc86d11b5d4227d5893a0199983.json new file mode 100644 index 0000000..1ab486e --- /dev/null +++ b/.sqlx/query-fd4227629d262e5ef9ee83458441623b22207dc86d11b5d4227d5893a0199983.json @@ -0,0 +1,142 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n tags,\n tech_stack,\n status,\n plan_type,\n price,\n currency,\n is_configurable,\n view_count,\n deploy_count,\n average_rating,\n created_at,\n updated_at,\n approved_at\n FROM stack_template WHERE slug = $1 AND status = 'approved'", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "creator_user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "creator_name", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "name", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "slug", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "short_description", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "long_description", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "category_id", + "type_info": "Int4" + }, + { + "ordinal": 8, + "name": "tags", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "tech_stack", + "type_info": "Jsonb" + }, + { + "ordinal": 10, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 11, + "name": "plan_type", + "type_info": "Varchar" + }, + { + "ordinal": 12, + "name": "price", + "type_info": "Float8" + }, + { + "ordinal": 13, + "name": "currency", + "type_info": "Varchar" + }, + { + "ordinal": 14, + "name": "is_configurable", + "type_info": "Bool" + }, + { + "ordinal": 15, + "name": "view_count", + "type_info": "Int4" + }, + { + "ordinal": 16, + "name": "deploy_count", + "type_info": "Int4" + }, + { + "ordinal": 17, + "name": "average_rating", + "type_info": "Float4" + }, + { + "ordinal": 18, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 19, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 20, + "name": "approved_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + true, + false, + false, + true, + true, + true, + true, + true, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "fd4227629d262e5ef9ee83458441623b22207dc86d11b5d4227d5893a0199983" +} diff --git a/.sqlx/query-ffd49d0e0354d8d4010863204b1a1f5406b31542b6b0219d7daa1705bf7b2f37.json b/.sqlx/query-ffd49d0e0354d8d4010863204b1a1f5406b31542b6b0219d7daa1705bf7b2f37.json new file mode 100644 index 0000000..fd95a35 --- /dev/null +++ b/.sqlx/query-ffd49d0e0354d8d4010863204b1a1f5406b31542b6b0219d7daa1705bf7b2f37.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT status FROM stack_template WHERE id = $1::uuid", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "status", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false + ] + }, + "hash": "ffd49d0e0354d8d4010863204b1a1f5406b31542b6b0219d7daa1705bf7b2f37" +} diff --git a/TODO.md b/TODO.md index aad65f3..68bc84a 100644 --- a/TODO.md +++ b/TODO.md @@ -1,5 +1,32 @@ # Stacker Development TODO +## MCP Tool Development + +- [ ] **GenerateComposeTool Implementation** + - Currently: Tool removed during Phase 3 due to ProjectForm schema complexity + - Issue: Needs proper understanding of ProjectForm structure (especially `custom.web` array and nested docker_image fields) + - TODO: + 1. Inspect actual ProjectForm structure in [src/forms/project/](src/forms/project/) + 2. Map correct field paths for docker_image (namespace, repository, tag) and port configuration + 3. Implement Docker Compose YAML generation from project metadata + - Reference: Previous implementation in [src/mcp/tools/compose.rs](src/mcp/tools/compose.rs) + - Status: Phase 3 complete with 15 tools (9 Phase 3 tools without GenerateComposeTool) + +- [ ] **MCP Browser-Based Client Support (Cookie Authentication)** + - Currently: Backend supports Bearer token auth (works for server-side clients like wscat, CLI tools) + - Issue: Browser WebSocket API cannot set `Authorization` header (W3C spec limitation) + - Impact: Browser-based MCP UI clients cannot connect (get 403 Forbidden) + - TODO: + 1. Create `src/middleware/authentication/method/f_cookie.rs` - Extract `access_token` from Cookie header + 2. Update `src/middleware/authentication/manager_middleware.rs` - Add `try_cookie()` after `try_oauth()` + 3. Export cookie method in `src/middleware/authentication/method/mod.rs` + 4. Test with wscat: `wscat -c ws://localhost:8000/mcp -H "Cookie: access_token=..."` + 5. Test with browser WebSocket connection + - Reference: Full implementation guide in [docs/MCP_BROWSER_AUTH.md](docs/MCP_BROWSER_AUTH.md) + - Priority: Medium (only needed for browser-based MCP clients) + - Status: Server-side clients work perfectly; browser support blocked until cookie auth added + - Note: Both auth methods should coexist - Bearer for servers, cookies for browsers + ## Agent Registration & Security - [ ] **Agent Registration Access Control** diff --git a/configuration.yaml.dist b/configuration.yaml.dist new file mode 100644 index 0000000..68f9b85 --- /dev/null +++ b/configuration.yaml.dist @@ -0,0 +1,27 @@ +#auth_url: http://127.0.0.1:8080/me +app_host: 127.0.0.1 +app_port: 8000 +auth_url: https://dev.try.direct/server/user/oauth_server/api/me +max_clients_number: 2 +database: + host: 127.0.0.1 + port: 5432 + username: postgres + password: postgres + database_name: stacker + +amqp: + host: 127.0.0.1 + port: 5672 + username: guest + password: guest + +# Vault configuration (can be overridden by environment variables) +vault: + address: http://127.0.0.1:8200 + token: change-me-dev-token + # KV mount/prefix for agent tokens, e.g. 'kv/agent' or 'agent' + agent_path_prefix: agent + +# Env overrides (optional): +# VAULT_ADDRESS, VAULT_TOKEN, VAULT_AGENT_PATH_PREFIX diff --git a/migrations/20251229120000_marketplace.down.sql b/migrations/20251229120000_marketplace.down.sql new file mode 100644 index 0000000..1866d76 --- /dev/null +++ b/migrations/20251229120000_marketplace.down.sql @@ -0,0 +1,43 @@ +-- Rollback TryDirect Marketplace Schema + +DROP TRIGGER IF EXISTS maintain_template_rating ON stack_template_rating; +DROP FUNCTION IF EXISTS update_template_average_rating(); + +DROP TRIGGER IF EXISTS update_stack_template_plan_updated_at ON stack_template_plan; +DROP TRIGGER IF EXISTS update_stack_template_updated_at ON stack_template; +DROP FUNCTION IF EXISTS update_updated_at_column(); + +DROP INDEX IF EXISTS idx_project_source_template; + +DROP INDEX IF EXISTS idx_purchase_creator; +DROP INDEX IF EXISTS idx_purchase_buyer; +DROP INDEX IF EXISTS idx_purchase_template; + +DROP INDEX IF EXISTS idx_template_rating_user; +DROP INDEX IF EXISTS idx_template_rating_template; + +DROP INDEX IF EXISTS idx_review_decision; +DROP INDEX IF EXISTS idx_review_template; + +DROP INDEX IF EXISTS idx_template_version_latest; +DROP INDEX IF EXISTS idx_template_version_template; + +DROP INDEX IF EXISTS idx_stack_template_category; +DROP INDEX IF EXISTS idx_stack_template_slug; +DROP INDEX IF EXISTS idx_stack_template_status; +DROP INDEX IF EXISTS idx_stack_template_creator; + +ALTER TABLE IF EXISTS stack DROP COLUMN IF EXISTS is_user_submitted; +ALTER TABLE IF EXISTS stack DROP COLUMN IF EXISTS marketplace_template_id; +ALTER TABLE IF EXISTS project DROP COLUMN IF EXISTS template_version; +ALTER TABLE IF EXISTS project DROP COLUMN IF EXISTS source_template_id; + +DROP TABLE IF EXISTS template_purchase; +DROP TABLE IF EXISTS stack_template_plan; +DROP TABLE IF EXISTS stack_template_rating; +DROP TABLE IF EXISTS stack_template_review; +DROP TABLE IF EXISTS stack_template_version; +DROP TABLE IF EXISTS stack_template; + +-- Keep categories table if used elsewhere; comment out to drop +-- DROP TABLE IF EXISTS stack_category; diff --git a/migrations/20251229120000_marketplace.up.sql b/migrations/20251229120000_marketplace.up.sql new file mode 100644 index 0000000..3c44ed2 --- /dev/null +++ b/migrations/20251229120000_marketplace.up.sql @@ -0,0 +1,201 @@ +-- TryDirect Marketplace Schema Migration + +-- Ensure UUID generation +CREATE EXTENSION IF NOT EXISTS pgcrypto; + +-- 1. Categories (needed by templates) +CREATE TABLE IF NOT EXISTS stack_category ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) UNIQUE NOT NULL +); + +-- 2. Core marketplace tables +CREATE TABLE IF NOT EXISTS stack_template ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + creator_user_id VARCHAR(50) NOT NULL, + creator_name VARCHAR(255), + name VARCHAR(255) NOT NULL, + slug VARCHAR(255) UNIQUE NOT NULL, + short_description TEXT, + long_description TEXT, + category_id INTEGER REFERENCES stack_category(id), + tags JSONB DEFAULT '[]'::jsonb, + tech_stack JSONB DEFAULT '{}'::jsonb, + status VARCHAR(50) NOT NULL DEFAULT 'draft' CHECK ( + status IN ('draft', 'submitted', 'under_review', 'approved', 'rejected', 'deprecated') + ), + plan_type VARCHAR(50) DEFAULT 'free' CHECK ( + plan_type IN ('free', 'one_time', 'subscription') + ), + price DOUBLE PRECISION, + currency VARCHAR(3) DEFAULT 'USD', + is_configurable BOOLEAN DEFAULT true, + view_count INTEGER DEFAULT 0, + deploy_count INTEGER DEFAULT 0, + average_rating REAL, + created_at TIMESTAMP WITH TIME ZONE DEFAULT now(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT now(), + approved_at TIMESTAMP WITH TIME ZONE +); + +CREATE TABLE IF NOT EXISTS stack_template_version ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + template_id UUID NOT NULL REFERENCES stack_template(id) ON DELETE CASCADE, + version VARCHAR(20) NOT NULL, + stack_definition JSONB NOT NULL, + definition_format VARCHAR(20) DEFAULT 'yaml', + changelog TEXT, + is_latest BOOLEAN DEFAULT false, + created_at TIMESTAMP WITH TIME ZONE DEFAULT now(), + UNIQUE(template_id, version) +); + +CREATE TABLE IF NOT EXISTS stack_template_review ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + template_id UUID NOT NULL REFERENCES stack_template(id) ON DELETE CASCADE, + reviewer_user_id VARCHAR(50), + decision VARCHAR(50) NOT NULL DEFAULT 'pending' CHECK ( + decision IN ('pending', 'approved', 'rejected', 'needs_changes') + ), + review_reason TEXT, + security_checklist JSONB DEFAULT '{ + "no_secrets": null, + "no_hardcoded_creds": null, + "valid_docker_syntax": null, + "no_malicious_code": null + }'::jsonb, + submitted_at TIMESTAMP WITH TIME ZONE DEFAULT now(), + reviewed_at TIMESTAMP WITH TIME ZONE +); + +CREATE TABLE IF NOT EXISTS stack_template_rating ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + template_id UUID NOT NULL REFERENCES stack_template(id) ON DELETE CASCADE, + user_id VARCHAR(50) NOT NULL, + rating INTEGER NOT NULL CHECK (rating >= 1 AND rating <= 5), + rate_category VARCHAR(100), + review_text TEXT, + is_flagged BOOLEAN DEFAULT false, + created_at TIMESTAMP WITH TIME ZONE DEFAULT now(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT now(), + UNIQUE(template_id, user_id, rate_category) +); + +-- Monetization +CREATE TABLE IF NOT EXISTS stack_template_plan ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + template_id UUID NOT NULL REFERENCES stack_template(id) ON DELETE CASCADE, + plan_code VARCHAR(50) NOT NULL, + price DOUBLE PRECISION, + currency VARCHAR(3) DEFAULT 'USD', + period VARCHAR(20) DEFAULT 'one_time', + description TEXT, + includes JSONB DEFAULT '[]'::jsonb, + created_at TIMESTAMP WITH TIME ZONE DEFAULT now(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT now() +); + +CREATE TABLE IF NOT EXISTS template_purchase ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + template_id UUID NOT NULL REFERENCES stack_template(id), + plan_id UUID NOT NULL REFERENCES stack_template_plan(id), + buyer_user_id VARCHAR(50) NOT NULL, + creator_user_id VARCHAR(50) NOT NULL, + amount DOUBLE PRECISION, + currency VARCHAR(3), + stripe_charge_id VARCHAR(255), + creator_share DOUBLE PRECISION, + platform_share DOUBLE PRECISION, + status VARCHAR(50) DEFAULT 'completed', + purchased_at TIMESTAMP WITH TIME ZONE DEFAULT now(), + refunded_at TIMESTAMP WITH TIME ZONE +); + +-- Extend existing tables +DO $$ BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'project' AND column_name = 'source_template_id' + ) THEN + ALTER TABLE project ADD COLUMN source_template_id UUID REFERENCES stack_template(id); + END IF; +END $$; + +DO $$ BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'project' AND column_name = 'template_version' + ) THEN + ALTER TABLE project ADD COLUMN template_version VARCHAR(20); + END IF; +END $$; + +-- Indexes +CREATE INDEX IF NOT EXISTS idx_stack_template_creator ON stack_template(creator_user_id); +CREATE INDEX IF NOT EXISTS idx_stack_template_status ON stack_template(status); +CREATE INDEX IF NOT EXISTS idx_stack_template_slug ON stack_template(slug); +CREATE INDEX IF NOT EXISTS idx_stack_template_category ON stack_template(category_id); + +CREATE INDEX IF NOT EXISTS idx_template_version_template ON stack_template_version(template_id); +CREATE INDEX IF NOT EXISTS idx_template_version_latest ON stack_template_version(template_id, is_latest) WHERE is_latest = true; + +CREATE INDEX IF NOT EXISTS idx_review_template ON stack_template_review(template_id); +CREATE INDEX IF NOT EXISTS idx_review_decision ON stack_template_review(decision); + +CREATE INDEX IF NOT EXISTS idx_template_rating_template ON stack_template_rating(template_id); +CREATE INDEX IF NOT EXISTS idx_template_rating_user ON stack_template_rating(user_id); + +CREATE INDEX IF NOT EXISTS idx_purchase_template ON template_purchase(template_id); +CREATE INDEX IF NOT EXISTS idx_purchase_buyer ON template_purchase(buyer_user_id); +CREATE INDEX IF NOT EXISTS idx_purchase_creator ON template_purchase(creator_user_id); + +CREATE INDEX IF NOT EXISTS idx_project_source_template ON project(source_template_id); + +-- Triggers +CREATE OR REPLACE FUNCTION update_updated_at_column() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = now(); + RETURN NEW; +END; +$$ language 'plpgsql'; + +DROP TRIGGER IF EXISTS update_stack_template_updated_at ON stack_template; +CREATE TRIGGER update_stack_template_updated_at + BEFORE UPDATE ON stack_template + FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); + +DROP TRIGGER IF EXISTS update_stack_template_plan_updated_at ON stack_template_plan; +CREATE TRIGGER update_stack_template_plan_updated_at + BEFORE UPDATE ON stack_template_plan + FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); + +-- Maintain average_rating on stack_template +CREATE OR REPLACE FUNCTION update_template_average_rating() +RETURNS TRIGGER AS $$ +BEGIN + UPDATE stack_template + SET average_rating = ( + SELECT AVG(rating::DECIMAL) + FROM stack_template_rating + WHERE template_id = COALESCE(OLD.template_id, NEW.template_id) + ) + WHERE id = COALESCE(OLD.template_id, NEW.template_id); + RETURN NULL; +END; +$$ language 'plpgsql'; + +DROP TRIGGER IF EXISTS maintain_template_rating ON stack_template_rating; +CREATE TRIGGER maintain_template_rating + AFTER INSERT OR UPDATE OR DELETE ON stack_template_rating + FOR EACH ROW EXECUTE FUNCTION update_template_average_rating(); + +-- Seed sample categories +INSERT INTO stack_category (name) +VALUES + ('AI Agents'), + ('Data Pipelines'), + ('SaaS Starter'), + ('Dev Tools'), + ('Automation') +ON CONFLICT DO NOTHING; diff --git a/migrations/20251229121000_casbin_marketplace_rules.down.sql b/migrations/20251229121000_casbin_marketplace_rules.down.sql new file mode 100644 index 0000000..29018e0 --- /dev/null +++ b/migrations/20251229121000_casbin_marketplace_rules.down.sql @@ -0,0 +1,12 @@ +-- Rollback Casbin rules for Marketplace endpoints +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_anonymous' AND v1 = '/api/templates' AND v2 = 'GET'; +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_anonymous' AND v1 = '/api/templates/:slug' AND v2 = 'GET'; + +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_user' AND v1 = '/api/templates' AND v2 = 'POST'; +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_user' AND v1 = '/api/templates/:id' AND v2 = 'PUT'; +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_user' AND v1 = '/api/templates/:id/submit' AND v2 = 'POST'; +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_user' AND v1 = '/api/templates/mine' AND v2 = 'GET'; + +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/admin/templates' AND v2 = 'GET'; +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/admin/templates/:id/approve' AND v2 = 'POST'; +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/admin/templates/:id/reject' AND v2 = 'POST'; diff --git a/migrations/20251229121000_casbin_marketplace_rules.up.sql b/migrations/20251229121000_casbin_marketplace_rules.up.sql new file mode 100644 index 0000000..03f2917 --- /dev/null +++ b/migrations/20251229121000_casbin_marketplace_rules.up.sql @@ -0,0 +1,16 @@ +-- Casbin rules for Marketplace endpoints + +-- Public read rules +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_anonymous', '/api/templates', 'GET', '', '', ''); +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_anonymous', '/api/templates/:slug', 'GET', '', '', ''); + +-- Creator rules +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/templates', 'POST', '', '', ''); +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/templates/:id', 'PUT', '', '', ''); +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/templates/:id/submit', 'POST', '', '', ''); +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/templates/mine', 'GET', '', '', ''); + +-- Admin moderation rules +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/admin/templates', 'GET', '', '', ''); +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/admin/templates/:id/approve', 'POST', '', '', ''); +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/admin/templates/:id/reject', 'POST', '', '', ''); diff --git a/src/db/marketplace.rs b/src/db/marketplace.rs new file mode 100644 index 0000000..632dd9f --- /dev/null +++ b/src/db/marketplace.rs @@ -0,0 +1,445 @@ +use crate::models::{StackTemplate, StackTemplateVersion}; +use sqlx::PgPool; +use tracing::Instrument; + +pub async fn list_approved(pool: &PgPool, category: Option<&str>, tag: Option<&str>, sort: Option<&str>) -> Result, String> { + let mut base = String::from( + r#"SELECT + id, + creator_user_id, + creator_name, + name, + slug, + short_description, + long_description, + category_id, + tags, + tech_stack, + status, + plan_type, + price, + currency, + is_configurable, + view_count, + deploy_count, + average_rating, + created_at, + updated_at, + approved_at + FROM stack_template + WHERE status = 'approved'"#, + ); + + if category.is_some() { + base.push_str(" AND category_id = (SELECT id FROM stack_category WHERE name = $1)"); + } + if tag.is_some() { + base.push_str(r" AND tags \? $2"); + } + + match sort.unwrap_or("recent") { + "popular" => base.push_str(" ORDER BY deploy_count DESC, view_count DESC"), + "rating" => base.push_str(" ORDER BY average_rating DESC NULLS LAST"), + _ => base.push_str(" ORDER BY approved_at DESC NULLS LAST, created_at DESC"), + } + + let query_span = tracing::info_span!("marketplace_list_approved"); + + let res = if category.is_some() && tag.is_some() { + sqlx::query_as::<_, StackTemplate>(&base) + .bind(category.unwrap()) + .bind(tag.unwrap()) + .fetch_all(pool) + .instrument(query_span) + .await + } else if category.is_some() { + sqlx::query_as::<_, StackTemplate>(&base) + .bind(category.unwrap()) + .fetch_all(pool) + .instrument(query_span) + .await + } else if tag.is_some() { + sqlx::query_as::<_, StackTemplate>(&base) + .bind(tag.unwrap()) + .fetch_all(pool) + .instrument(query_span) + .await + } else { + sqlx::query_as::<_, StackTemplate>(&base) + .fetch_all(pool) + .instrument(query_span) + .await + }; + + res.map_err(|e| { + tracing::error!("list_approved error: {:?}", e); + "Internal Server Error".to_string() + }) +} + +pub async fn get_by_slug_with_latest(pool: &PgPool, slug: &str) -> Result<(StackTemplate, Option), String> { + let query_span = tracing::info_span!("marketplace_get_by_slug_with_latest", slug = %slug); + + let template = sqlx::query_as!( + StackTemplate, + r#"SELECT + id, + creator_user_id, + creator_name, + name, + slug, + short_description, + long_description, + category_id, + tags, + tech_stack, + status, + plan_type, + price, + currency, + is_configurable, + view_count, + deploy_count, + average_rating, + created_at, + updated_at, + approved_at + FROM stack_template WHERE slug = $1 AND status = 'approved'"#, + slug + ) + .fetch_one(pool) + .instrument(query_span.clone()) + .await + .map_err(|e| { + tracing::error!("get_by_slug template error: {:?}", e); + "Not Found".to_string() + })?; + + let version = sqlx::query_as!( + StackTemplateVersion, + r#"SELECT + id, + template_id, + version, + stack_definition, + definition_format, + changelog, + is_latest, + created_at + FROM stack_template_version WHERE template_id = $1 AND is_latest = true LIMIT 1"#, + template.id + ) + .fetch_optional(pool) + .instrument(query_span) + .await + .map_err(|e| { + tracing::error!("get_by_slug version error: {:?}", e); + "Internal Server Error".to_string() + })?; + + Ok((template, version)) +} + +pub async fn create_draft( + pool: &PgPool, + creator_user_id: &str, + creator_name: Option<&str>, + name: &str, + slug: &str, + short_description: Option<&str>, + long_description: Option<&str>, + category_id: Option, + tags: serde_json::Value, + tech_stack: serde_json::Value, +) -> Result { + let query_span = tracing::info_span!("marketplace_create_draft", slug = %slug); + + let rec = sqlx::query_as!( + StackTemplate, + r#"INSERT INTO stack_template ( + creator_user_id, creator_name, name, slug, + short_description, long_description, category_id, + tags, tech_stack, status + ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,'draft') + RETURNING + id, + creator_user_id, + creator_name, + name, + slug, + short_description, + long_description, + category_id, + tags, + tech_stack, + status, + plan_type, + price, + currency, + is_configurable, + view_count, + deploy_count, + average_rating, + created_at, + updated_at, + approved_at + "#, + creator_user_id, + creator_name, + name, + slug, + short_description, + long_description, + category_id, + tags, + tech_stack + ) + .fetch_one(pool) + .instrument(query_span) + .await + .map_err(|e| { + tracing::error!("create_draft error: {:?}", e); + "Internal Server Error".to_string() + })?; + + Ok(rec) +} + +pub async fn set_latest_version(pool: &PgPool, template_id: &uuid::Uuid, version: &str, stack_definition: serde_json::Value, definition_format: Option<&str>, changelog: Option<&str>) -> Result { + let query_span = tracing::info_span!("marketplace_set_latest_version", template_id = %template_id); + + // Clear previous latest + sqlx::query!( + r#"UPDATE stack_template_version SET is_latest = false WHERE template_id = $1 AND is_latest = true"#, + template_id + ) + .execute(pool) + .instrument(query_span.clone()) + .await + .map_err(|e| { + tracing::error!("clear_latest error: {:?}", e); + "Internal Server Error".to_string() + })?; + + let rec = sqlx::query_as!( + StackTemplateVersion, + r#"INSERT INTO stack_template_version ( + template_id, version, stack_definition, definition_format, changelog, is_latest + ) VALUES ($1,$2,$3,$4,$5,true) + RETURNING id, template_id, version, stack_definition, definition_format, changelog, is_latest, created_at"#, + template_id, + version, + stack_definition, + definition_format, + changelog + ) + .fetch_one(pool) + .instrument(query_span) + .await + .map_err(|e| { + tracing::error!("set_latest_version error: {:?}", e); + "Internal Server Error".to_string() + })?; + + Ok(rec) +} + +pub async fn update_metadata(pool: &PgPool, template_id: &uuid::Uuid, name: Option<&str>, short_description: Option<&str>, long_description: Option<&str>, category_id: Option, tags: Option, tech_stack: Option, plan_type: Option<&str>, price: Option, currency: Option<&str>) -> Result { + let query_span = tracing::info_span!("marketplace_update_metadata", template_id = %template_id); + + // Update only allowed statuses + let status = sqlx::query_scalar!( + r#"SELECT status FROM stack_template WHERE id = $1::uuid"#, + template_id + ) + .fetch_one(pool) + .instrument(query_span.clone()) + .await + .map_err(|e| { + tracing::error!("get status error: {:?}", e); + "Not Found".to_string() + })?; + + if status != "draft" && status != "rejected" { + return Err("Template not editable in current status".to_string()); + } + + let res = sqlx::query!( + r#"UPDATE stack_template SET + name = COALESCE($2, name), + short_description = COALESCE($3, short_description), + long_description = COALESCE($4, long_description), + category_id = COALESCE($5, category_id), + tags = COALESCE($6, tags), + tech_stack = COALESCE($7, tech_stack), + plan_type = COALESCE($8, plan_type), + price = COALESCE($9, price), + currency = COALESCE($10, currency) + WHERE id = $1::uuid"#, + template_id, + name, + short_description, + long_description, + category_id, + tags, + tech_stack, + plan_type, + price, + currency + ) + .execute(pool) + .instrument(query_span) + .await + .map_err(|e| { + tracing::error!("update_metadata error: {:?}", e); + "Internal Server Error".to_string() + })?; + + Ok(res.rows_affected() > 0) +} + +pub async fn submit_for_review(pool: &PgPool, template_id: &uuid::Uuid) -> Result { + let query_span = tracing::info_span!("marketplace_submit_for_review", template_id = %template_id); + + let res = sqlx::query!( + r#"UPDATE stack_template SET status = 'submitted' WHERE id = $1::uuid AND status IN ('draft','rejected')"#, + template_id + ) + .execute(pool) + .instrument(query_span) + .await + .map_err(|e| { + tracing::error!("submit_for_review error: {:?}", e); + "Internal Server Error".to_string() + })?; + + Ok(res.rows_affected() > 0) +} + +pub async fn list_mine(pool: &PgPool, user_id: &str) -> Result, String> { + let query_span = tracing::info_span!("marketplace_list_mine", user = %user_id); + + sqlx::query_as!( + StackTemplate, + r#"SELECT + id, + creator_user_id, + creator_name, + name, + slug, + short_description, + long_description, + category_id, + tags, + tech_stack, + status, + plan_type, + price, + currency, + is_configurable, + view_count, + deploy_count, + average_rating, + created_at, + updated_at, + approved_at + FROM stack_template WHERE creator_user_id = $1 ORDER BY created_at DESC"#, + user_id + ) + .fetch_all(pool) + .instrument(query_span) + .await + .map_err(|e| { + tracing::error!("list_mine error: {:?}", e); + "Internal Server Error".to_string() + }) +} + +pub async fn admin_list_submitted(pool: &PgPool) -> Result, String> { + let query_span = tracing::info_span!("marketplace_admin_list_submitted"); + + sqlx::query_as!( + StackTemplate, + r#"SELECT + id, + creator_user_id, + creator_name, + name, + slug, + short_description, + long_description, + category_id, + tags, + tech_stack, + status, + plan_type, + price, + currency, + is_configurable, + view_count, + deploy_count, + average_rating, + created_at, + updated_at, + approved_at + FROM stack_template WHERE status = 'submitted' ORDER BY created_at ASC"# + ) + .fetch_all(pool) + .instrument(query_span) + .await + .map_err(|e| { + tracing::error!("admin_list_submitted error: {:?}", e); + "Internal Server Error".to_string() + }) +} + +pub async fn admin_decide(pool: &PgPool, template_id: &uuid::Uuid, reviewer_user_id: &str, decision: &str, review_reason: Option<&str>) -> Result { + let query_span = tracing::info_span!("marketplace_admin_decide", template_id = %template_id, decision = %decision); + + let valid = ["approved", "rejected", "needs_changes"]; + if !valid.contains(&decision) { + return Err("Invalid decision".to_string()); + } + + let mut tx = pool.begin().await.map_err(|e| { + tracing::error!("tx begin error: {:?}", e); + "Internal Server Error".to_string() + })?; + + sqlx::query!( + r#"INSERT INTO stack_template_review (template_id, reviewer_user_id, decision, review_reason, reviewed_at) VALUES ($1::uuid, $2, $3, $4, now())"#, + template_id, + reviewer_user_id, + decision, + review_reason + ) + .execute(&mut *tx) + .await + .map_err(|e| { + tracing::error!("insert review error: {:?}", e); + "Internal Server Error".to_string() + })?; + + let status_sql = if decision == "approved" { "approved" } else if decision == "rejected" { "rejected" } else { "under_review" }; + let should_set_approved = decision == "approved"; + + sqlx::query!( + r#"UPDATE stack_template SET status = $2, approved_at = CASE WHEN $3 THEN now() ELSE approved_at END WHERE id = $1::uuid"#, + template_id, + status_sql, + should_set_approved + ) + .execute(&mut *tx) + .await + .map_err(|e| { + tracing::error!("update template status error: {:?}", e); + "Internal Server Error".to_string() + })?; + + tx.commit().await.map_err(|e| { + tracing::error!("tx commit error: {:?}", e); + "Internal Server Error".to_string() + })?; + + Ok(true) +} diff --git a/src/db/mod.rs b/src/db/mod.rs index 539d487..5876f50 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -8,3 +8,4 @@ pub mod product; pub mod project; pub mod rating; pub(crate) mod server; +pub mod marketplace; diff --git a/src/middleware/authentication/method/f_cookie.rs b/src/middleware/authentication/method/f_cookie.rs index 16efc57..3fa3893 100644 --- a/src/middleware/authentication/method/f_cookie.rs +++ b/src/middleware/authentication/method/f_cookie.rs @@ -1,6 +1,5 @@ use crate::configuration::Settings; use crate::middleware::authentication::get_header; -use crate::models; use actix_web::{dev::ServiceRequest, web, HttpMessage}; use std::sync::Arc; diff --git a/src/models/marketplace.rs b/src/models/marketplace.rs new file mode 100644 index 0000000..2931612 --- /dev/null +++ b/src/models/marketplace.rs @@ -0,0 +1,40 @@ +use chrono::{DateTime, Utc}; +use serde_derive::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, sqlx::FromRow)] +pub struct StackTemplate { + pub id: Uuid, + pub creator_user_id: String, + pub creator_name: Option, + pub name: String, + pub slug: String, + pub short_description: Option, + pub long_description: Option, + pub category_id: Option, + pub tags: serde_json::Value, + pub tech_stack: serde_json::Value, + pub status: String, + pub plan_type: Option, + pub price: Option, + pub currency: Option, + pub is_configurable: Option, + pub view_count: Option, + pub deploy_count: Option, + pub average_rating: Option, + pub created_at: Option>, + pub updated_at: Option>, + pub approved_at: Option>, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, sqlx::FromRow)] +pub struct StackTemplateVersion { + pub id: Uuid, + pub template_id: Uuid, + pub version: String, + pub stack_definition: serde_json::Value, + pub definition_format: Option, + pub changelog: Option, + pub is_latest: Option, + pub created_at: Option>, +} diff --git a/src/models/mod.rs b/src/models/mod.rs index 34e6c17..d4f0cd1 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -11,6 +11,7 @@ pub mod rating; mod rules; mod server; pub mod user; +pub mod marketplace; pub use agent::*; pub use agreement::*; @@ -25,3 +26,4 @@ pub use rating::*; pub use rules::*; pub use server::*; pub use user::*; +pub use marketplace::*; diff --git a/src/models/project.rs b/src/models/project.rs index 164f34c..62c4308 100644 --- a/src/models/project.rs +++ b/src/models/project.rs @@ -14,6 +14,8 @@ pub struct Project { pub request_json: Value, pub created_at: DateTime, pub updated_at: DateTime, + pub source_template_id: Option, // marketplace template UUID + pub template_version: Option, // marketplace template version } impl Project { @@ -27,6 +29,8 @@ impl Project { request_json, created_at: Utc::now(), updated_at: Utc::now(), + source_template_id: None, + template_version: None, } } } @@ -42,6 +46,8 @@ impl Default for Project { request_json: Default::default(), created_at: Default::default(), updated_at: Default::default(), + source_template_id: None, + template_version: None, } } } diff --git a/src/routes/marketplace/admin.rs b/src/routes/marketplace/admin.rs new file mode 100644 index 0000000..a1a2617 --- /dev/null +++ b/src/routes/marketplace/admin.rs @@ -0,0 +1,69 @@ +use crate::db; +use crate::helpers::JsonResponse; +use crate::models; +use actix_web::{get, post, web, Responder, Result}; +use sqlx::PgPool; +use std::sync::Arc; +use uuid; + +#[tracing::instrument(name = "List submitted templates (admin)")] +#[get("")] +pub async fn list_submitted_handler( + _admin: web::ReqData>, // role enforced by Casbin + pg_pool: web::Data, +) -> Result { + db::marketplace::admin_list_submitted(pg_pool.get_ref()) + .await + .map_err(|err| JsonResponse::>::build().internal_server_error(err)) + .map(|templates| JsonResponse::build().set_list(templates).ok("OK")) +} + +#[derive(serde::Deserialize, Debug)] +pub struct AdminDecisionRequest { + pub decision: String, // approved|rejected|needs_changes + pub reason: Option, +} + +#[tracing::instrument(name = "Approve template (admin)")] +#[post("/{id}/approve")] +pub async fn approve_handler( + admin: web::ReqData>, // role enforced by Casbin + path: web::Path<(String,)>, + pg_pool: web::Data, + body: web::Json, +) -> Result>> { + let id = uuid::Uuid::parse_str(&path.into_inner().0) + .map_err(|_| actix_web::error::ErrorBadRequest("Invalid UUID"))?; + let req = body.into_inner(); + let updated = db::marketplace::admin_decide(pg_pool.get_ref(), &id, &admin.id, "approved", req.reason.as_deref()) + .await + .map_err(|err| JsonResponse::::build().internal_server_error(err))?; + + if updated { + Ok(JsonResponse::::build().ok("Approved")) + } else { + Err(JsonResponse::::build().bad_request("Not updated")) + } +} + +#[tracing::instrument(name = "Reject template (admin)")] +#[post("/{id}/reject")] +pub async fn reject_handler( + admin: web::ReqData>, // role enforced by Casbin + path: web::Path<(String,)>, + pg_pool: web::Data, + body: web::Json, +) -> Result>> { + let id = uuid::Uuid::parse_str(&path.into_inner().0) + .map_err(|_| actix_web::error::ErrorBadRequest("Invalid UUID"))?; + let req = body.into_inner(); + let updated = db::marketplace::admin_decide(pg_pool.get_ref(), &id, &admin.id, "rejected", req.reason.as_deref()) + .await + .map_err(|err| JsonResponse::::build().internal_server_error(err))?; + + if updated { + Ok(JsonResponse::::build().ok("Rejected")) + } else { + Err(JsonResponse::::build().bad_request("Not updated")) + } +} diff --git a/src/routes/marketplace/creator.rs b/src/routes/marketplace/creator.rs new file mode 100644 index 0000000..9f0f10b --- /dev/null +++ b/src/routes/marketplace/creator.rs @@ -0,0 +1,174 @@ +use crate::db; +use crate::helpers::JsonResponse; +use crate::models; +use actix_web::{get, post, put, web, Responder, Result}; +use sqlx::PgPool; +use std::sync::Arc; +use uuid; + +#[derive(Debug, serde::Deserialize)] +pub struct CreateTemplateRequest { + pub name: String, + pub slug: String, + pub short_description: Option, + pub long_description: Option, + pub category_id: Option, + pub tags: Option, + pub tech_stack: Option, + pub version: Option, + pub stack_definition: Option, + pub definition_format: Option, +} + +#[tracing::instrument(name = "Create draft template")] +#[post("")] +pub async fn create_handler( + user: web::ReqData>, + pg_pool: web::Data, + body: web::Json, +) -> Result { + let req = body.into_inner(); + + let tags = req.tags.unwrap_or(serde_json::json!([])); + let tech_stack = req.tech_stack.unwrap_or(serde_json::json!({})); + + let creator_name = format!("{} {}", user.first_name, user.last_name); + let template = db::marketplace::create_draft( + pg_pool.get_ref(), + &user.id, + Some(&creator_name), + &req.name, + &req.slug, + req.short_description.as_deref(), + req.long_description.as_deref(), + req.category_id, + tags, + tech_stack, + ) + .await + .map_err(|err| JsonResponse::::build().internal_server_error(err))?; + + // Optional initial version + if let Some(def) = req.stack_definition { + let version = req.version.unwrap_or("1.0.0".to_string()); + let _ = db::marketplace::set_latest_version( + pg_pool.get_ref(), + &template.id, + &version, + def, + req.definition_format.as_deref(), + None, + ) + .await; + } + + Ok(JsonResponse::build().set_item(Some(template)).created("Created")) +} + +#[derive(Debug, serde::Deserialize)] +pub struct UpdateTemplateRequest { + pub name: Option, + pub short_description: Option, + pub long_description: Option, + pub category_id: Option, + pub tags: Option, + pub tech_stack: Option, + pub plan_type: Option, + pub price: Option, + pub currency: Option, +} + +#[tracing::instrument(name = "Update template metadata")] +#[put("/{id}")] +pub async fn update_handler( + user: web::ReqData>, + path: web::Path<(String,)>, + pg_pool: web::Data, + body: web::Json, +) -> Result>> { + let id = uuid::Uuid::parse_str(&path.into_inner().0) + .map_err(|_| actix_web::error::ErrorBadRequest("Invalid UUID"))?; + + // Ownership check + let owner_id = sqlx::query_scalar!( + r#"SELECT creator_user_id FROM stack_template WHERE id = $1"#, + id + ) + .fetch_one(pg_pool.get_ref()) + .await + .map_err(|_| JsonResponse::::build().not_found("Not Found"))?; + + if owner_id != user.id { + return Err(JsonResponse::::build().forbidden("Forbidden")); + } + + let req = body.into_inner(); + + let updated = db::marketplace::update_metadata( + pg_pool.get_ref(), + &id, + req.name.as_deref(), + req.short_description.as_deref(), + req.long_description.as_deref(), + req.category_id, + req.tags, + req.tech_stack, + req.plan_type.as_deref(), + req.price, + req.currency.as_deref(), + ) + .await + .map_err(|err| JsonResponse::::build().bad_request(err))?; + + if updated { + Ok(JsonResponse::::build().ok("Updated")) + } else { + Err(JsonResponse::::build().not_found("Not Found")) + } +} + +#[tracing::instrument(name = "Submit template for review")] +#[post("/{id}/submit")] +pub async fn submit_handler( + user: web::ReqData>, + path: web::Path<(String,)>, + pg_pool: web::Data, +) -> Result>> { + let id = uuid::Uuid::parse_str(&path.into_inner().0) + .map_err(|_| actix_web::error::ErrorBadRequest("Invalid UUID"))?; + + // Ownership check + let owner_id = sqlx::query_scalar!( + r#"SELECT creator_user_id FROM stack_template WHERE id = $1"#, + id + ) + .fetch_one(pg_pool.get_ref()) + .await + .map_err(|_| JsonResponse::::build().not_found("Not Found"))?; + + if owner_id != user.id { + return Err(JsonResponse::::build().forbidden("Forbidden")); + } + + let submitted = db::marketplace::submit_for_review(pg_pool.get_ref(), &id) + .await + .map_err(|err| JsonResponse::::build().internal_server_error(err))?; + + if submitted { + Ok(JsonResponse::::build().ok("Submitted")) + } else { + Err(JsonResponse::::build().bad_request("Invalid status")) + } +} + +#[tracing::instrument(name = "List my templates")] +#[get("/mine")] +pub async fn mine_handler( + user: web::ReqData>, + pg_pool: web::Data, +) -> Result { + db::marketplace::list_mine(pg_pool.get_ref(), &user.id) + .await + .map_err(|err| JsonResponse::>::build().internal_server_error(err)) + .map(|templates| JsonResponse::build().set_list(templates).ok("OK")) +} diff --git a/src/routes/marketplace/mod.rs b/src/routes/marketplace/mod.rs new file mode 100644 index 0000000..4201f40 --- /dev/null +++ b/src/routes/marketplace/mod.rs @@ -0,0 +1,7 @@ +pub mod public; +pub mod creator; +pub mod admin; + +pub use public::*; +pub use creator::*; +pub use admin::*; diff --git a/src/routes/marketplace/public.rs b/src/routes/marketplace/public.rs new file mode 100644 index 0000000..cf9e353 --- /dev/null +++ b/src/routes/marketplace/public.rs @@ -0,0 +1,49 @@ +use crate::db; +use crate::helpers::JsonResponse; +use actix_web::{get, web, Responder, Result}; +use sqlx::PgPool; + +#[tracing::instrument(name = "List approved templates (public)")] +#[get("")] +pub async fn list_handler( + query: web::Query, + pg_pool: web::Data, +) -> Result { + let category = query.category.as_deref(); + let tag = query.tag.as_deref(); + let sort = query.sort.as_deref(); + + db::marketplace::list_approved(pg_pool.get_ref(), category, tag, sort) + .await + .map_err(|err| JsonResponse::>::build().internal_server_error(err)) + .map(|templates| JsonResponse::build().set_list(templates).ok("OK")) +} + +#[derive(Debug, serde::Deserialize)] +pub struct TemplateListQuery { + pub category: Option, + pub tag: Option, + pub sort: Option, // recent|popular|rating +} + +#[tracing::instrument(name = "Get template by slug (public)")] +#[get("/{slug}")] +pub async fn detail_handler( + path: web::Path<(String,)>, + pg_pool: web::Data, +) -> Result { + let slug = path.into_inner().0; + + match db::marketplace::get_by_slug_with_latest(pg_pool.get_ref(), &slug).await { + Ok((template, version)) => { + let mut payload = serde_json::json!({ + "template": template, + }); + if let Some(ver) = version { + payload["latest_version"] = serde_json::to_value(ver).unwrap(); + } + Ok(JsonResponse::build().set_item(Some(payload)).ok("OK")) + } + Err(err) => Err(JsonResponse::::build().not_found(err)), + } +} diff --git a/src/routes/mod.rs b/src/routes/mod.rs index 447b6b9..54107f8 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -11,7 +11,9 @@ pub(crate) mod project; pub(crate) mod server; pub(crate) mod agreement; +pub(crate) mod marketplace; pub use project::*; pub use agreement::*; +pub use marketplace::*; diff --git a/src/startup.rs b/src/startup.rs index ea5f9f1..f8d4e6d 100644 --- a/src/startup.rs +++ b/src/startup.rs @@ -104,6 +104,27 @@ pub async fn run( .service(routes::agreement::get_handler), ), ) + .service( + web::scope("/api") + .service( + web::scope("/templates") + .service(crate::routes::marketplace::public::list_handler) + .service(crate::routes::marketplace::public::detail_handler) + .service(crate::routes::marketplace::creator::create_handler) + .service(crate::routes::marketplace::creator::update_handler) + .service(crate::routes::marketplace::creator::submit_handler) + .service(crate::routes::marketplace::creator::mine_handler), + ) + .service( + web::scope("/admin") + .service( + web::scope("/templates") + .service(crate::routes::marketplace::admin::list_submitted_handler) + .service(crate::routes::marketplace::admin::approve_handler) + .service(crate::routes::marketplace::admin::reject_handler), + ), + ), + ) .service( web::scope("/cloud") .service(crate::routes::cloud::get::item) From dc1fc2ee41690f1abe71efaaea1b159e66419ce3 Mon Sep 17 00:00:00 2001 From: vsilent Date: Tue, 30 Dec 2025 12:32:10 +0200 Subject: [PATCH 24/35] new migrations Marketplace added at Stacker --- .gitignore | 1 + ...db5ba2061ba4fb0604caef24943d936ad45d.json} | 46 +- ...62aacd9e2b56c57668f2dc1b6e3c771ee48d.json} | 46 +- ...2a8437cded8f1c6215c3e4a4fec2ed933643.json} | 46 +- ...29fbcfae670cbd222c492ffc9508ea96588e6.json | 130 +++++ ...af9d754d9f1d4a18121eb56d9a451b817fdf.json} | 46 +- ...246da9fcfc2e680937b66bb8aa3e24c9dd1f.json} | 9 +- README.md | 9 + configuration.yaml.dist | 18 + .../20251229120000_marketplace.down.sql | 36 +- migrations/20251229120000_marketplace.up.sql | 108 +--- ...1230094608_add_required_plan_name.down.sql | 2 + ...251230094608_add_required_plan_name.up.sql | 2 + ...100000_add_marketplace_plans_rule.down.sql | 2 + ...30100000_add_marketplace_plans_rule.up.sql | 3 + src/configuration.rs | 4 + src/connectors/README.md | 532 ++++++++++++++++++ src/db/marketplace.rs | 83 ++- src/lib.rs | 1 + src/mcp/protocol_tests.rs | 5 + src/models/marketplace.rs | 6 +- src/routes/marketplace/admin.rs | 31 + src/routes/marketplace/creator.rs | 10 +- src/routes/project/deploy.rs | 77 ++- src/startup.rs | 9 + 25 files changed, 995 insertions(+), 267 deletions(-) rename .sqlx/{query-fd4227629d262e5ef9ee83458441623b22207dc86d11b5d4227d5893a0199983.json => query-0612f433190f8ba51c17f57d6da2db5ba2061ba4fb0604caef24943d936ad45d.json} (74%) rename .sqlx/{query-8c4c8b7e304bbc02d727bcc2507d646a3305a10349e9422c45e8e47bbd911ab9.json => query-0a87c8c8bbe3c8d23b41d5929e6862aacd9e2b56c57668f2dc1b6e3c771ee48d.json} (74%) rename .sqlx/{query-073f2677aeaea2595771abbf5d2c3e6fe803644553f9cf879271e5b86fe11a5d.json => query-8e992908d43e75c0abb85fac1e3f2a8437cded8f1c6215c3e4a4fec2ed933643.json} (78%) create mode 100644 .sqlx/query-95c4b45907793ae202a5ef3d9c829fbcfae670cbd222c492ffc9508ea96588e6.json rename .sqlx/{query-8b44ddf6b3e98a100756fa1f80685b37f0bcfba5f07e131ab4d67df659344034.json => query-9adfcae76ff8e0f638a3da310e7eaf9d754d9f1d4a18121eb56d9a451b817fdf.json} (74%) rename .sqlx/{query-17560a0750685b4b5fc01f4df36bda940a334195e3f15cae22153762131a247b.json => query-cdb14c0aad0a5dbc45504608f820246da9fcfc2e680937b66bb8aa3e24c9dd1f.json} (61%) create mode 100644 migrations/20251230094608_add_required_plan_name.down.sql create mode 100644 migrations/20251230094608_add_required_plan_name.up.sql create mode 100644 migrations/20251230100000_add_marketplace_plans_rule.down.sql create mode 100644 migrations/20251230100000_add_marketplace_plans_rule.up.sql create mode 100644 src/connectors/README.md diff --git a/.gitignore b/.gitignore index add00bb..ad0581e 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ configuration.yaml.backup configuration.yaml.orig .vscode/ .env +docs/*.sql \ No newline at end of file diff --git a/.sqlx/query-fd4227629d262e5ef9ee83458441623b22207dc86d11b5d4227d5893a0199983.json b/.sqlx/query-0612f433190f8ba51c17f57d6da2db5ba2061ba4fb0604caef24943d936ad45d.json similarity index 74% rename from .sqlx/query-fd4227629d262e5ef9ee83458441623b22207dc86d11b5d4227d5893a0199983.json rename to .sqlx/query-0612f433190f8ba51c17f57d6da2db5ba2061ba4fb0604caef24943d936ad45d.json index 1ab486e..98dc7fe 100644 --- a/.sqlx/query-fd4227629d262e5ef9ee83458441623b22207dc86d11b5d4227d5893a0199983.json +++ b/.sqlx/query-0612f433190f8ba51c17f57d6da2db5ba2061ba4fb0604caef24943d936ad45d.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n tags,\n tech_stack,\n status,\n plan_type,\n price,\n currency,\n is_configurable,\n view_count,\n deploy_count,\n average_rating,\n created_at,\n updated_at,\n approved_at\n FROM stack_template WHERE slug = $1 AND status = 'approved'", + "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n product_id,\n tags,\n tech_stack,\n status,\n is_configurable,\n view_count,\n deploy_count,\n required_plan_name,\n created_at,\n updated_at,\n approved_at\n FROM stack_template WHERE creator_user_id = $1 ORDER BY created_at DESC", "describe": { "columns": [ { @@ -45,66 +45,56 @@ }, { "ordinal": 8, - "name": "tags", - "type_info": "Jsonb" + "name": "product_id", + "type_info": "Int4" }, { "ordinal": 9, - "name": "tech_stack", + "name": "tags", "type_info": "Jsonb" }, { "ordinal": 10, - "name": "status", - "type_info": "Varchar" + "name": "tech_stack", + "type_info": "Jsonb" }, { "ordinal": 11, - "name": "plan_type", + "name": "status", "type_info": "Varchar" }, { "ordinal": 12, - "name": "price", - "type_info": "Float8" - }, - { - "ordinal": 13, - "name": "currency", - "type_info": "Varchar" - }, - { - "ordinal": 14, "name": "is_configurable", "type_info": "Bool" }, { - "ordinal": 15, + "ordinal": 13, "name": "view_count", "type_info": "Int4" }, { - "ordinal": 16, + "ordinal": 14, "name": "deploy_count", "type_info": "Int4" }, { - "ordinal": 17, - "name": "average_rating", - "type_info": "Float4" + "ordinal": 15, + "name": "required_plan_name", + "type_info": "Varchar" }, { - "ordinal": 18, + "ordinal": 16, "name": "created_at", "type_info": "Timestamptz" }, { - "ordinal": 19, + "ordinal": 17, "name": "updated_at", "type_info": "Timestamptz" }, { - "ordinal": 20, + "ordinal": 18, "name": "approved_at", "type_info": "Timestamptz" } @@ -125,10 +115,8 @@ true, true, true, - false, - true, - true, true, + false, true, true, true, @@ -138,5 +126,5 @@ true ] }, - "hash": "fd4227629d262e5ef9ee83458441623b22207dc86d11b5d4227d5893a0199983" + "hash": "0612f433190f8ba51c17f57d6da2db5ba2061ba4fb0604caef24943d936ad45d" } diff --git a/.sqlx/query-8c4c8b7e304bbc02d727bcc2507d646a3305a10349e9422c45e8e47bbd911ab9.json b/.sqlx/query-0a87c8c8bbe3c8d23b41d5929e6862aacd9e2b56c57668f2dc1b6e3c771ee48d.json similarity index 74% rename from .sqlx/query-8c4c8b7e304bbc02d727bcc2507d646a3305a10349e9422c45e8e47bbd911ab9.json rename to .sqlx/query-0a87c8c8bbe3c8d23b41d5929e6862aacd9e2b56c57668f2dc1b6e3c771ee48d.json index 7f4f2d0..a59f80e 100644 --- a/.sqlx/query-8c4c8b7e304bbc02d727bcc2507d646a3305a10349e9422c45e8e47bbd911ab9.json +++ b/.sqlx/query-0a87c8c8bbe3c8d23b41d5929e6862aacd9e2b56c57668f2dc1b6e3c771ee48d.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n tags,\n tech_stack,\n status,\n plan_type,\n price,\n currency,\n is_configurable,\n view_count,\n deploy_count,\n average_rating,\n created_at,\n updated_at,\n approved_at\n FROM stack_template WHERE status = 'submitted' ORDER BY created_at ASC", + "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n product_id,\n tags,\n tech_stack,\n status,\n is_configurable,\n view_count,\n deploy_count,\n required_plan_name,\n created_at,\n updated_at,\n approved_at\n FROM stack_template WHERE status = 'submitted' ORDER BY created_at ASC", "describe": { "columns": [ { @@ -45,66 +45,56 @@ }, { "ordinal": 8, - "name": "tags", - "type_info": "Jsonb" + "name": "product_id", + "type_info": "Int4" }, { "ordinal": 9, - "name": "tech_stack", + "name": "tags", "type_info": "Jsonb" }, { "ordinal": 10, - "name": "status", - "type_info": "Varchar" + "name": "tech_stack", + "type_info": "Jsonb" }, { "ordinal": 11, - "name": "plan_type", + "name": "status", "type_info": "Varchar" }, { "ordinal": 12, - "name": "price", - "type_info": "Float8" - }, - { - "ordinal": 13, - "name": "currency", - "type_info": "Varchar" - }, - { - "ordinal": 14, "name": "is_configurable", "type_info": "Bool" }, { - "ordinal": 15, + "ordinal": 13, "name": "view_count", "type_info": "Int4" }, { - "ordinal": 16, + "ordinal": 14, "name": "deploy_count", "type_info": "Int4" }, { - "ordinal": 17, - "name": "average_rating", - "type_info": "Float4" + "ordinal": 15, + "name": "required_plan_name", + "type_info": "Varchar" }, { - "ordinal": 18, + "ordinal": 16, "name": "created_at", "type_info": "Timestamptz" }, { - "ordinal": 19, + "ordinal": 17, "name": "updated_at", "type_info": "Timestamptz" }, { - "ordinal": 20, + "ordinal": 18, "name": "approved_at", "type_info": "Timestamptz" } @@ -123,10 +113,8 @@ true, true, true, - false, - true, - true, true, + false, true, true, true, @@ -136,5 +124,5 @@ true ] }, - "hash": "8c4c8b7e304bbc02d727bcc2507d646a3305a10349e9422c45e8e47bbd911ab9" + "hash": "0a87c8c8bbe3c8d23b41d5929e6862aacd9e2b56c57668f2dc1b6e3c771ee48d" } diff --git a/.sqlx/query-073f2677aeaea2595771abbf5d2c3e6fe803644553f9cf879271e5b86fe11a5d.json b/.sqlx/query-8e992908d43e75c0abb85fac1e3f2a8437cded8f1c6215c3e4a4fec2ed933643.json similarity index 78% rename from .sqlx/query-073f2677aeaea2595771abbf5d2c3e6fe803644553f9cf879271e5b86fe11a5d.json rename to .sqlx/query-8e992908d43e75c0abb85fac1e3f2a8437cded8f1c6215c3e4a4fec2ed933643.json index 9735af5..0ed8fe7 100644 --- a/.sqlx/query-073f2677aeaea2595771abbf5d2c3e6fe803644553f9cf879271e5b86fe11a5d.json +++ b/.sqlx/query-8e992908d43e75c0abb85fac1e3f2a8437cded8f1c6215c3e4a4fec2ed933643.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "INSERT INTO stack_template (\n creator_user_id, creator_name, name, slug,\n short_description, long_description, category_id,\n tags, tech_stack, status\n ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,'draft')\n RETURNING \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n tags,\n tech_stack,\n status,\n plan_type,\n price,\n currency,\n is_configurable,\n view_count,\n deploy_count,\n average_rating,\n created_at,\n updated_at,\n approved_at\n ", + "query": "INSERT INTO stack_template (\n creator_user_id, creator_name, name, slug,\n short_description, long_description, category_id,\n tags, tech_stack, status\n ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,'draft')\n RETURNING \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n product_id,\n tags,\n tech_stack,\n status,\n is_configurable,\n view_count,\n deploy_count,\n required_plan_name,\n created_at,\n updated_at,\n approved_at\n ", "describe": { "columns": [ { @@ -45,66 +45,56 @@ }, { "ordinal": 8, - "name": "tags", - "type_info": "Jsonb" + "name": "product_id", + "type_info": "Int4" }, { "ordinal": 9, - "name": "tech_stack", + "name": "tags", "type_info": "Jsonb" }, { "ordinal": 10, - "name": "status", - "type_info": "Varchar" + "name": "tech_stack", + "type_info": "Jsonb" }, { "ordinal": 11, - "name": "plan_type", + "name": "status", "type_info": "Varchar" }, { "ordinal": 12, - "name": "price", - "type_info": "Float8" - }, - { - "ordinal": 13, - "name": "currency", - "type_info": "Varchar" - }, - { - "ordinal": 14, "name": "is_configurable", "type_info": "Bool" }, { - "ordinal": 15, + "ordinal": 13, "name": "view_count", "type_info": "Int4" }, { - "ordinal": 16, + "ordinal": 14, "name": "deploy_count", "type_info": "Int4" }, { - "ordinal": 17, - "name": "average_rating", - "type_info": "Float4" + "ordinal": 15, + "name": "required_plan_name", + "type_info": "Varchar" }, { - "ordinal": 18, + "ordinal": 16, "name": "created_at", "type_info": "Timestamptz" }, { - "ordinal": 19, + "ordinal": 17, "name": "updated_at", "type_info": "Timestamptz" }, { - "ordinal": 20, + "ordinal": 18, "name": "approved_at", "type_info": "Timestamptz" } @@ -133,10 +123,8 @@ true, true, true, - false, - true, - true, true, + false, true, true, true, @@ -146,5 +134,5 @@ true ] }, - "hash": "073f2677aeaea2595771abbf5d2c3e6fe803644553f9cf879271e5b86fe11a5d" + "hash": "8e992908d43e75c0abb85fac1e3f2a8437cded8f1c6215c3e4a4fec2ed933643" } diff --git a/.sqlx/query-95c4b45907793ae202a5ef3d9c829fbcfae670cbd222c492ffc9508ea96588e6.json b/.sqlx/query-95c4b45907793ae202a5ef3d9c829fbcfae670cbd222c492ffc9508ea96588e6.json new file mode 100644 index 0000000..377cf35 --- /dev/null +++ b/.sqlx/query-95c4b45907793ae202a5ef3d9c829fbcfae670cbd222c492ffc9508ea96588e6.json @@ -0,0 +1,130 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n product_id,\n tags,\n tech_stack,\n status,\n is_configurable,\n view_count,\n deploy_count,\n created_at,\n updated_at,\n approved_at,\n required_plan_name\n FROM stack_template WHERE id = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "creator_user_id", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "creator_name", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "name", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "slug", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "short_description", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "long_description", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "category_id", + "type_info": "Int4" + }, + { + "ordinal": 8, + "name": "product_id", + "type_info": "Int4" + }, + { + "ordinal": 9, + "name": "tags", + "type_info": "Jsonb" + }, + { + "ordinal": 10, + "name": "tech_stack", + "type_info": "Jsonb" + }, + { + "ordinal": 11, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 12, + "name": "is_configurable", + "type_info": "Bool" + }, + { + "ordinal": 13, + "name": "view_count", + "type_info": "Int4" + }, + { + "ordinal": 14, + "name": "deploy_count", + "type_info": "Int4" + }, + { + "ordinal": 15, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 16, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 17, + "name": "approved_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 18, + "name": "required_plan_name", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + true, + false, + false, + true, + true, + true, + true, + true, + true, + false, + true, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "95c4b45907793ae202a5ef3d9c829fbcfae670cbd222c492ffc9508ea96588e6" +} diff --git a/.sqlx/query-8b44ddf6b3e98a100756fa1f80685b37f0bcfba5f07e131ab4d67df659344034.json b/.sqlx/query-9adfcae76ff8e0f638a3da310e7eaf9d754d9f1d4a18121eb56d9a451b817fdf.json similarity index 74% rename from .sqlx/query-8b44ddf6b3e98a100756fa1f80685b37f0bcfba5f07e131ab4d67df659344034.json rename to .sqlx/query-9adfcae76ff8e0f638a3da310e7eaf9d754d9f1d4a18121eb56d9a451b817fdf.json index fa4b0fe..dfc34ca 100644 --- a/.sqlx/query-8b44ddf6b3e98a100756fa1f80685b37f0bcfba5f07e131ab4d67df659344034.json +++ b/.sqlx/query-9adfcae76ff8e0f638a3da310e7eaf9d754d9f1d4a18121eb56d9a451b817fdf.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n tags,\n tech_stack,\n status,\n plan_type,\n price,\n currency,\n is_configurable,\n view_count,\n deploy_count,\n average_rating,\n created_at,\n updated_at,\n approved_at\n FROM stack_template WHERE creator_user_id = $1 ORDER BY created_at DESC", + "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n product_id,\n tags,\n tech_stack,\n status,\n is_configurable,\n view_count,\n deploy_count,\n required_plan_name,\n created_at,\n updated_at,\n approved_at\n FROM stack_template WHERE slug = $1 AND status = 'approved'", "describe": { "columns": [ { @@ -45,66 +45,56 @@ }, { "ordinal": 8, - "name": "tags", - "type_info": "Jsonb" + "name": "product_id", + "type_info": "Int4" }, { "ordinal": 9, - "name": "tech_stack", + "name": "tags", "type_info": "Jsonb" }, { "ordinal": 10, - "name": "status", - "type_info": "Varchar" + "name": "tech_stack", + "type_info": "Jsonb" }, { "ordinal": 11, - "name": "plan_type", + "name": "status", "type_info": "Varchar" }, { "ordinal": 12, - "name": "price", - "type_info": "Float8" - }, - { - "ordinal": 13, - "name": "currency", - "type_info": "Varchar" - }, - { - "ordinal": 14, "name": "is_configurable", "type_info": "Bool" }, { - "ordinal": 15, + "ordinal": 13, "name": "view_count", "type_info": "Int4" }, { - "ordinal": 16, + "ordinal": 14, "name": "deploy_count", "type_info": "Int4" }, { - "ordinal": 17, - "name": "average_rating", - "type_info": "Float4" + "ordinal": 15, + "name": "required_plan_name", + "type_info": "Varchar" }, { - "ordinal": 18, + "ordinal": 16, "name": "created_at", "type_info": "Timestamptz" }, { - "ordinal": 19, + "ordinal": 17, "name": "updated_at", "type_info": "Timestamptz" }, { - "ordinal": 20, + "ordinal": 18, "name": "approved_at", "type_info": "Timestamptz" } @@ -125,10 +115,8 @@ true, true, true, - false, - true, - true, true, + false, true, true, true, @@ -138,5 +126,5 @@ true ] }, - "hash": "8b44ddf6b3e98a100756fa1f80685b37f0bcfba5f07e131ab4d67df659344034" + "hash": "9adfcae76ff8e0f638a3da310e7eaf9d754d9f1d4a18121eb56d9a451b817fdf" } diff --git a/.sqlx/query-17560a0750685b4b5fc01f4df36bda940a334195e3f15cae22153762131a247b.json b/.sqlx/query-cdb14c0aad0a5dbc45504608f820246da9fcfc2e680937b66bb8aa3e24c9dd1f.json similarity index 61% rename from .sqlx/query-17560a0750685b4b5fc01f4df36bda940a334195e3f15cae22153762131a247b.json rename to .sqlx/query-cdb14c0aad0a5dbc45504608f820246da9fcfc2e680937b66bb8aa3e24c9dd1f.json index 5cd8517..5daaa04 100644 --- a/.sqlx/query-17560a0750685b4b5fc01f4df36bda940a334195e3f15cae22153762131a247b.json +++ b/.sqlx/query-cdb14c0aad0a5dbc45504608f820246da9fcfc2e680937b66bb8aa3e24c9dd1f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "UPDATE stack_template SET \n name = COALESCE($2, name),\n short_description = COALESCE($3, short_description),\n long_description = COALESCE($4, long_description),\n category_id = COALESCE($5, category_id),\n tags = COALESCE($6, tags),\n tech_stack = COALESCE($7, tech_stack),\n plan_type = COALESCE($8, plan_type),\n price = COALESCE($9, price),\n currency = COALESCE($10, currency)\n WHERE id = $1::uuid", + "query": "UPDATE stack_template SET \n name = COALESCE($2, name),\n short_description = COALESCE($3, short_description),\n long_description = COALESCE($4, long_description),\n category_id = COALESCE($5, category_id),\n tags = COALESCE($6, tags),\n tech_stack = COALESCE($7, tech_stack)\n WHERE id = $1::uuid", "describe": { "columns": [], "parameters": { @@ -11,13 +11,10 @@ "Text", "Int4", "Jsonb", - "Jsonb", - "Varchar", - "Float8", - "Varchar" + "Jsonb" ] }, "nullable": [] }, - "hash": "17560a0750685b4b5fc01f4df36bda940a334195e3f15cae22153762131a247b" + "hash": "cdb14c0aad0a5dbc45504608f820246da9fcfc2e680937b66bb8aa3e24c9dd1f" } diff --git a/README.md b/README.md index edd60aa..86bae36 100644 --- a/README.md +++ b/README.md @@ -216,3 +216,12 @@ Test casbin rule ``` cargo r --bin console --features=explain debug casbin --path /client --action POST --subject admin_petru ``` + + + +"cargo sqlx prepare" requires setting the DATABASE_URL environment variable to a valid database URL. + +## TODOs +``` +export DATABASE_URL=postgres://postgres:postgres@localhost:5432/stacker +``` diff --git a/configuration.yaml.dist b/configuration.yaml.dist index 68f9b85..200af67 100644 --- a/configuration.yaml.dist +++ b/configuration.yaml.dist @@ -23,5 +23,23 @@ vault: # KV mount/prefix for agent tokens, e.g. 'kv/agent' or 'agent' agent_path_prefix: agent +# External service connectors +connectors: + user_service: + enabled: false + base_url: "https://dev.try.direct/server/user" + timeout_secs: 10 + retry_attempts: 3 + payment_service: + enabled: false + base_url: "http://localhost:8000" + timeout_secs: 15 + events: + enabled: false + amqp_url: "amqp://guest:guest@127.0.0.1:5672/%2f" + exchange: "stacker_events" + prefetch: 10 + # Env overrides (optional): # VAULT_ADDRESS, VAULT_TOKEN, VAULT_AGENT_PATH_PREFIX +# USER_SERVICE_AUTH_TOKEN, PAYMENT_SERVICE_AUTH_TOKEN diff --git a/migrations/20251229120000_marketplace.down.sql b/migrations/20251229120000_marketplace.down.sql index 1866d76..0af56cd 100644 --- a/migrations/20251229120000_marketplace.down.sql +++ b/migrations/20251229120000_marketplace.down.sql @@ -1,43 +1,31 @@ -- Rollback TryDirect Marketplace Schema -DROP TRIGGER IF EXISTS maintain_template_rating ON stack_template_rating; -DROP FUNCTION IF EXISTS update_template_average_rating(); +DROP TRIGGER IF EXISTS auto_create_product_on_approval ON stack_template; +DROP FUNCTION IF EXISTS create_product_for_approved_template(); -DROP TRIGGER IF EXISTS update_stack_template_plan_updated_at ON stack_template_plan; DROP TRIGGER IF EXISTS update_stack_template_updated_at ON stack_template; -DROP FUNCTION IF EXISTS update_updated_at_column(); +-- Drop indexes DROP INDEX IF EXISTS idx_project_source_template; - -DROP INDEX IF EXISTS idx_purchase_creator; -DROP INDEX IF EXISTS idx_purchase_buyer; -DROP INDEX IF EXISTS idx_purchase_template; - -DROP INDEX IF EXISTS idx_template_rating_user; -DROP INDEX IF EXISTS idx_template_rating_template; - DROP INDEX IF EXISTS idx_review_decision; DROP INDEX IF EXISTS idx_review_template; - DROP INDEX IF EXISTS idx_template_version_latest; DROP INDEX IF EXISTS idx_template_version_template; - +DROP INDEX IF EXISTS idx_stack_template_product; DROP INDEX IF EXISTS idx_stack_template_category; DROP INDEX IF EXISTS idx_stack_template_slug; DROP INDEX IF EXISTS idx_stack_template_status; DROP INDEX IF EXISTS idx_stack_template_creator; -ALTER TABLE IF EXISTS stack DROP COLUMN IF EXISTS is_user_submitted; -ALTER TABLE IF EXISTS stack DROP COLUMN IF EXISTS marketplace_template_id; +-- Remove columns from existing tables ALTER TABLE IF EXISTS project DROP COLUMN IF EXISTS template_version; ALTER TABLE IF EXISTS project DROP COLUMN IF EXISTS source_template_id; -DROP TABLE IF EXISTS template_purchase; -DROP TABLE IF EXISTS stack_template_plan; -DROP TABLE IF EXISTS stack_template_rating; -DROP TABLE IF EXISTS stack_template_review; -DROP TABLE IF EXISTS stack_template_version; -DROP TABLE IF EXISTS stack_template; +-- Drop marketplace tables (CASCADE to handle dependencies) +DROP TABLE IF EXISTS stack_template_review CASCADE; +DROP TABLE IF EXISTS stack_template_version CASCADE; +DROP TABLE IF EXISTS stack_template CASCADE; +DROP TABLE IF EXISTS stack_category CASCADE; --- Keep categories table if used elsewhere; comment out to drop --- DROP TABLE IF EXISTS stack_category; +-- Drop functions last +DROP FUNCTION IF EXISTS update_updated_at_column() CASCADE; diff --git a/migrations/20251229120000_marketplace.up.sql b/migrations/20251229120000_marketplace.up.sql index 3c44ed2..9bc0504 100644 --- a/migrations/20251229120000_marketplace.up.sql +++ b/migrations/20251229120000_marketplace.up.sql @@ -1,4 +1,5 @@ -- TryDirect Marketplace Schema Migration +-- Integrates with existing Product/Rating system -- Ensure UUID generation CREATE EXTENSION IF NOT EXISTS pgcrypto; @@ -9,7 +10,7 @@ CREATE TABLE IF NOT EXISTS stack_category ( name VARCHAR(255) UNIQUE NOT NULL ); --- 2. Core marketplace tables +-- 2. Core marketplace table - templates become products when approved CREATE TABLE IF NOT EXISTS stack_template ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), creator_user_id VARCHAR(50) NOT NULL, @@ -24,18 +25,14 @@ CREATE TABLE IF NOT EXISTS stack_template ( status VARCHAR(50) NOT NULL DEFAULT 'draft' CHECK ( status IN ('draft', 'submitted', 'under_review', 'approved', 'rejected', 'deprecated') ), - plan_type VARCHAR(50) DEFAULT 'free' CHECK ( - plan_type IN ('free', 'one_time', 'subscription') - ), - price DOUBLE PRECISION, - currency VARCHAR(3) DEFAULT 'USD', is_configurable BOOLEAN DEFAULT true, view_count INTEGER DEFAULT 0, deploy_count INTEGER DEFAULT 0, - average_rating REAL, + product_id INTEGER, -- Links to product table when approved for ratings created_at TIMESTAMP WITH TIME ZONE DEFAULT now(), updated_at TIMESTAMP WITH TIME ZONE DEFAULT now(), - approved_at TIMESTAMP WITH TIME ZONE + approved_at TIMESTAMP WITH TIME ZONE, + CONSTRAINT fk_product FOREIGN KEY(product_id) REFERENCES product(id) ON DELETE SET NULL ); CREATE TABLE IF NOT EXISTS stack_template_version ( @@ -68,49 +65,6 @@ CREATE TABLE IF NOT EXISTS stack_template_review ( reviewed_at TIMESTAMP WITH TIME ZONE ); -CREATE TABLE IF NOT EXISTS stack_template_rating ( - id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - template_id UUID NOT NULL REFERENCES stack_template(id) ON DELETE CASCADE, - user_id VARCHAR(50) NOT NULL, - rating INTEGER NOT NULL CHECK (rating >= 1 AND rating <= 5), - rate_category VARCHAR(100), - review_text TEXT, - is_flagged BOOLEAN DEFAULT false, - created_at TIMESTAMP WITH TIME ZONE DEFAULT now(), - updated_at TIMESTAMP WITH TIME ZONE DEFAULT now(), - UNIQUE(template_id, user_id, rate_category) -); - --- Monetization -CREATE TABLE IF NOT EXISTS stack_template_plan ( - id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - template_id UUID NOT NULL REFERENCES stack_template(id) ON DELETE CASCADE, - plan_code VARCHAR(50) NOT NULL, - price DOUBLE PRECISION, - currency VARCHAR(3) DEFAULT 'USD', - period VARCHAR(20) DEFAULT 'one_time', - description TEXT, - includes JSONB DEFAULT '[]'::jsonb, - created_at TIMESTAMP WITH TIME ZONE DEFAULT now(), - updated_at TIMESTAMP WITH TIME ZONE DEFAULT now() -); - -CREATE TABLE IF NOT EXISTS template_purchase ( - id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - template_id UUID NOT NULL REFERENCES stack_template(id), - plan_id UUID NOT NULL REFERENCES stack_template_plan(id), - buyer_user_id VARCHAR(50) NOT NULL, - creator_user_id VARCHAR(50) NOT NULL, - amount DOUBLE PRECISION, - currency VARCHAR(3), - stripe_charge_id VARCHAR(255), - creator_share DOUBLE PRECISION, - platform_share DOUBLE PRECISION, - status VARCHAR(50) DEFAULT 'completed', - purchased_at TIMESTAMP WITH TIME ZONE DEFAULT now(), - refunded_at TIMESTAMP WITH TIME ZONE -); - -- Extend existing tables DO $$ BEGIN IF NOT EXISTS ( @@ -135,6 +89,7 @@ CREATE INDEX IF NOT EXISTS idx_stack_template_creator ON stack_template(creator_ CREATE INDEX IF NOT EXISTS idx_stack_template_status ON stack_template(status); CREATE INDEX IF NOT EXISTS idx_stack_template_slug ON stack_template(slug); CREATE INDEX IF NOT EXISTS idx_stack_template_category ON stack_template(category_id); +CREATE INDEX IF NOT EXISTS idx_stack_template_product ON stack_template(product_id); CREATE INDEX IF NOT EXISTS idx_template_version_template ON stack_template_version(template_id); CREATE INDEX IF NOT EXISTS idx_template_version_latest ON stack_template_version(template_id, is_latest) WHERE is_latest = true; @@ -142,13 +97,6 @@ CREATE INDEX IF NOT EXISTS idx_template_version_latest ON stack_template_version CREATE INDEX IF NOT EXISTS idx_review_template ON stack_template_review(template_id); CREATE INDEX IF NOT EXISTS idx_review_decision ON stack_template_review(decision); -CREATE INDEX IF NOT EXISTS idx_template_rating_template ON stack_template_rating(template_id); -CREATE INDEX IF NOT EXISTS idx_template_rating_user ON stack_template_rating(user_id); - -CREATE INDEX IF NOT EXISTS idx_purchase_template ON template_purchase(template_id); -CREATE INDEX IF NOT EXISTS idx_purchase_buyer ON template_purchase(buyer_user_id); -CREATE INDEX IF NOT EXISTS idx_purchase_creator ON template_purchase(creator_user_id); - CREATE INDEX IF NOT EXISTS idx_project_source_template ON project(source_template_id); -- Triggers @@ -165,30 +113,35 @@ CREATE TRIGGER update_stack_template_updated_at BEFORE UPDATE ON stack_template FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -DROP TRIGGER IF EXISTS update_stack_template_plan_updated_at ON stack_template_plan; -CREATE TRIGGER update_stack_template_plan_updated_at - BEFORE UPDATE ON stack_template_plan - FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); - --- Maintain average_rating on stack_template -CREATE OR REPLACE FUNCTION update_template_average_rating() +-- Function to create product entry when template is approved +CREATE OR REPLACE FUNCTION create_product_for_approved_template() RETURNS TRIGGER AS $$ +DECLARE + new_product_id INTEGER; BEGIN - UPDATE stack_template - SET average_rating = ( - SELECT AVG(rating::DECIMAL) - FROM stack_template_rating - WHERE template_id = COALESCE(OLD.template_id, NEW.template_id) - ) - WHERE id = COALESCE(OLD.template_id, NEW.template_id); - RETURN NULL; + -- When status changes to 'approved' and no product exists yet + IF NEW.status = 'approved' AND OLD.status != 'approved' AND NEW.product_id IS NULL THEN + -- Generate product_id from template UUID (use hashtext for deterministic integer) + new_product_id := hashtext(NEW.id::text); + + -- Insert into product table + INSERT INTO product (id, obj_id, obj_type, created_at, updated_at) + VALUES (new_product_id, new_product_id, 'marketplace_template', now(), now()) + ON CONFLICT (id) DO NOTHING; + + -- Link template to product + NEW.product_id := new_product_id; + END IF; + RETURN NEW; END; $$ language 'plpgsql'; -DROP TRIGGER IF EXISTS maintain_template_rating ON stack_template_rating; -CREATE TRIGGER maintain_template_rating - AFTER INSERT OR UPDATE OR DELETE ON stack_template_rating - FOR EACH ROW EXECUTE FUNCTION update_template_average_rating(); +DROP TRIGGER IF EXISTS auto_create_product_on_approval ON stack_template; +CREATE TRIGGER auto_create_product_on_approval + BEFORE UPDATE ON stack_template + FOR EACH ROW + WHEN (NEW.status = 'approved' AND OLD.status != 'approved') + EXECUTE FUNCTION create_product_for_approved_template(); -- Seed sample categories INSERT INTO stack_category (name) @@ -199,3 +152,4 @@ VALUES ('Dev Tools'), ('Automation') ON CONFLICT DO NOTHING; + diff --git a/migrations/20251230094608_add_required_plan_name.down.sql b/migrations/20251230094608_add_required_plan_name.down.sql new file mode 100644 index 0000000..c6b04bc --- /dev/null +++ b/migrations/20251230094608_add_required_plan_name.down.sql @@ -0,0 +1,2 @@ +-- Add down migration script here +ALTER TABLE stack_template DROP COLUMN IF EXISTS required_plan_name; \ No newline at end of file diff --git a/migrations/20251230094608_add_required_plan_name.up.sql b/migrations/20251230094608_add_required_plan_name.up.sql new file mode 100644 index 0000000..fcd896d --- /dev/null +++ b/migrations/20251230094608_add_required_plan_name.up.sql @@ -0,0 +1,2 @@ +-- Add up migration script here +ALTER TABLE stack_template ADD COLUMN IF NOT EXISTS required_plan_name VARCHAR(50); \ No newline at end of file diff --git a/migrations/20251230100000_add_marketplace_plans_rule.down.sql b/migrations/20251230100000_add_marketplace_plans_rule.down.sql new file mode 100644 index 0000000..8658c29 --- /dev/null +++ b/migrations/20251230100000_add_marketplace_plans_rule.down.sql @@ -0,0 +1,2 @@ +DELETE FROM public.casbin_rule +WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/admin/marketplace/plans' AND v2 = 'GET' AND v3 = '' AND v4 = '' AND v5 = ''; diff --git a/migrations/20251230100000_add_marketplace_plans_rule.up.sql b/migrations/20251230100000_add_marketplace_plans_rule.up.sql new file mode 100644 index 0000000..eeeb407 --- /dev/null +++ b/migrations/20251230100000_add_marketplace_plans_rule.up.sql @@ -0,0 +1,3 @@ +-- Casbin rule for admin marketplace plans endpoint +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) +VALUES ('p', 'group_admin', '/admin/marketplace/plans', 'GET', '', '', ''); diff --git a/src/configuration.rs b/src/configuration.rs index e536b3e..e6deedc 100644 --- a/src/configuration.rs +++ b/src/configuration.rs @@ -1,4 +1,5 @@ use serde; +use crate::connectors::ConnectorConfig; #[derive(Debug, serde::Deserialize)] pub struct Settings { @@ -9,6 +10,8 @@ pub struct Settings { pub max_clients_number: i64, pub amqp: AmqpSettings, pub vault: VaultSettings, + #[serde(default)] + pub connectors: ConnectorConfig, } impl Default for Settings { @@ -21,6 +24,7 @@ impl Default for Settings { max_clients_number: 10, amqp: AmqpSettings::default(), vault: VaultSettings::default(), + connectors: ConnectorConfig::default(), } } } diff --git a/src/connectors/README.md b/src/connectors/README.md new file mode 100644 index 0000000..c7f0f01 --- /dev/null +++ b/src/connectors/README.md @@ -0,0 +1,532 @@ +# External Service Connectors + +This directory contains adapters for all external service integrations. **All communication with external services MUST go through connectors** - this is a core architectural rule for Stacker. + +## Why Connectors? + +| Benefit | Description | +|---------|-------------| +| **Independence** | Stacker works standalone; external services are optional | +| **Testability** | Mock connectors in tests without calling external APIs | +| **Replaceability** | Swap HTTP for gRPC without changing route code | +| **Configuration** | Enable/disable services per environment | +| **Separation of Concerns** | Routes contain business logic only, not HTTP details | +| **Error Handling** | Centralized retry logic, timeouts, circuit breakers | + +## Architecture Pattern + +``` +┌─────────────────────────────────────────────────────────┐ +│ Route Handler │ +│ (Pure business logic - no HTTP/AMQP knowledge) │ +└─────────────────────────┬───────────────────────────────┘ + │ Uses trait methods + ▼ +┌─────────────────────────────────────────────────────────┐ +│ Connector Trait (Interface) │ +│ pub trait UserServiceConnector: Send + Sync │ +└─────────────────────────┬───────────────────────────────┘ + │ Implemented by + ┌─────────┴─────────┐ + ▼ ▼ + ┌──────────────────┐ ┌──────────────────┐ + │ HTTP Client │ │ Mock Connector │ + │ (Production) │ │ (Tests/Dev) │ + └──────────────────┘ └──────────────────┘ +``` + +## Existing Connectors + +| Service | Status | Purpose | +|---------|--------|---------| +| User Service | ✅ Implemented | Create/manage stacks in TryDirect User Service | +| Payment Service | 🚧 Planned | Process marketplace template payments | +| Event Bus (RabbitMQ) | 🚧 Planned | Async notifications (template approved, deployment complete) | + +## Adding a New Connector + +### Step 1: Define Configuration + +Add your service config to `config.rs`: + +```rust +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PaymentServiceConfig { + pub enabled: bool, + pub base_url: String, + pub timeout_secs: u64, + #[serde(skip)] + pub auth_token: Option, +} + +impl Default for PaymentServiceConfig { + fn default() -> Self { + Self { + enabled: false, + base_url: "http://localhost:8000".to_string(), + timeout_secs: 15, + auth_token: None, + } + } +} +``` + +Then add to `ConnectorConfig`: +```rust +pub struct ConnectorConfig { + pub user_service: Option, + pub payment_service: Option, // Add this +} +``` + +### Step 2: Create Service File + +Create `src/connectors/payment_service.rs`: + +```rust +use super::config::PaymentServiceConfig; +use super::errors::ConnectorError; +use actix_web::web; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use tracing::Instrument; + +// 1. Define response types +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PaymentResponse { + pub payment_id: String, + pub status: String, + pub amount: f64, +} + +// 2. Define trait interface +#[async_trait::async_trait] +pub trait PaymentServiceConnector: Send + Sync { + async fn create_payment( + &self, + user_id: &str, + amount: f64, + currency: &str, + ) -> Result; + + async fn get_payment_status( + &self, + payment_id: &str, + ) -> Result; +} + +// 3. Implement HTTP client +pub struct PaymentServiceClient { + base_url: String, + http_client: reqwest::Client, + auth_token: Option, +} + +impl PaymentServiceClient { + pub fn new(config: PaymentServiceConfig) -> Self { + let timeout = std::time::Duration::from_secs(config.timeout_secs); + let http_client = reqwest::Client::builder() + .timeout(timeout) + .build() + .expect("Failed to create HTTP client"); + + Self { + base_url: config.base_url, + http_client, + auth_token: config.auth_token, + } + } + + fn auth_header(&self) -> Option { + self.auth_token + .as_ref() + .map(|token| format!("Bearer {}", token)) + } +} + +#[async_trait::async_trait] +impl PaymentServiceConnector for PaymentServiceClient { + async fn create_payment( + &self, + user_id: &str, + amount: f64, + currency: &str, + ) -> Result { + let span = tracing::info_span!( + "payment_service_create_payment", + user_id = %user_id, + amount = %amount + ); + + let url = format!("{}/api/payments", self.base_url); + let payload = serde_json::json!({ + "user_id": user_id, + "amount": amount, + "currency": currency, + }); + + let mut req = self.http_client.post(&url).json(&payload); + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + + let resp = req.send() + .instrument(span) + .await + .and_then(|resp| resp.error_for_status()) + .map_err(|e| { + tracing::error!("create_payment error: {:?}", e); + ConnectorError::HttpError(format!("Failed to create payment: {}", e)) + })?; + + let text = resp.text().await + .map_err(|e| ConnectorError::HttpError(e.to_string()))?; + + serde_json::from_str::(&text) + .map_err(|_| ConnectorError::InvalidResponse(text)) + } + + async fn get_payment_status( + &self, + payment_id: &str, + ) -> Result { + let span = tracing::info_span!( + "payment_service_get_status", + payment_id = %payment_id + ); + + let url = format!("{}/api/payments/{}", self.base_url, payment_id); + let mut req = self.http_client.get(&url); + + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + + let resp = req.send() + .instrument(span) + .await + .map_err(|e| { + if e.status().map_or(false, |s| s == 404) { + ConnectorError::NotFound(format!("Payment {} not found", payment_id)) + } else { + ConnectorError::HttpError(format!("Failed to get payment: {}", e)) + } + })?; + + if resp.status() == 404 { + return Err(ConnectorError::NotFound(format!("Payment {} not found", payment_id))); + } + + let text = resp.text().await + .map_err(|e| ConnectorError::HttpError(e.to_string()))?; + + serde_json::from_str::(&text) + .map_err(|_| ConnectorError::InvalidResponse(text)) + } +} + +// 4. Provide mock for testing +pub mod mock { + use super::*; + + pub struct MockPaymentServiceConnector; + + #[async_trait::async_trait] + impl PaymentServiceConnector for MockPaymentServiceConnector { + async fn create_payment( + &self, + user_id: &str, + amount: f64, + currency: &str, + ) -> Result { + Ok(PaymentResponse { + payment_id: "mock_payment_123".to_string(), + status: "completed".to_string(), + amount, + }) + } + + async fn get_payment_status( + &self, + payment_id: &str, + ) -> Result { + Ok(PaymentResponse { + payment_id: payment_id.to_string(), + status: "completed".to_string(), + amount: 99.99, + }) + } + } +} + +// 5. Add init function for startup.rs +pub fn init(connector_config: &super::config::ConnectorConfig) -> web::Data> { + let connector: Arc = if let Some(payment_config) = + connector_config.payment_service.as_ref().filter(|c| c.enabled) + { + let mut config = payment_config.clone(); + if config.auth_token.is_none() { + config.auth_token = std::env::var("PAYMENT_SERVICE_AUTH_TOKEN").ok(); + } + tracing::info!("Initializing Payment Service connector: {}", config.base_url); + Arc::new(PaymentServiceClient::new(config)) + } else { + tracing::warn!("Payment Service connector disabled - using mock"); + Arc::new(mock::MockPaymentServiceConnector) + }; + + web::Data::new(connector) +} +``` + +### Step 3: Export from mod.rs + +Update `src/connectors/mod.rs`: + +```rust +pub mod payment_service; + +pub use payment_service::{PaymentServiceConnector, PaymentServiceClient}; +pub use payment_service::init as init_payment_service; +``` + +### Step 4: Update Configuration Files + +Add to `configuration.yaml` and `configuration.yaml.dist`: + +```yaml +connectors: + payment_service: + enabled: false + base_url: "http://localhost:8000" + timeout_secs: 15 +``` + +### Step 5: Register in startup.rs + +Add to `src/startup.rs`: + +```rust +// Initialize connectors +let payment_service = connectors::init_payment_service(&settings.connectors); + +// In App builder: +App::new() + .app_data(payment_service) + // ... other middleware +``` + +### Step 6: Use in Routes + +```rust +use crate::connectors::PaymentServiceConnector; + +#[post("/purchase/{template_id}")] +pub async fn purchase_handler( + user: web::ReqData>, + payment_connector: web::Data>, + path: web::Path<(String,)>, +) -> Result { + let template_id = path.into_inner().0; + + // Route logic never knows about HTTP + let payment = payment_connector + .create_payment(&user.id, 99.99, "USD") + .await + .map_err(|e| JsonResponse::build().bad_request(e.to_string()))?; + + Ok(JsonResponse::build().ok(payment)) +} +``` + +## Testing Connectors + +### Unit Tests (with Mock) + +```rust +#[cfg(test)] +mod tests { + use super::*; + use crate::connectors::payment_service::mock::MockPaymentServiceConnector; + + #[tokio::test] + async fn test_purchase_without_external_api() { + let connector = Arc::new(MockPaymentServiceConnector); + + let result = connector.create_payment("user_123", 99.99, "USD").await; + assert!(result.is_ok()); + + let payment = result.unwrap(); + assert_eq!(payment.status, "completed"); + } +} +``` + +### Integration Tests (with Real Service) + +```rust +#[tokio::test] +#[ignore] // Run with: cargo test -- --ignored +async fn test_real_payment_service() { + let config = PaymentServiceConfig { + enabled: true, + base_url: "http://localhost:8000".to_string(), + timeout_secs: 10, + auth_token: Some("test_token".to_string()), + }; + + let connector = Arc::new(PaymentServiceClient::new(config)); + let result = connector.create_payment("test_user", 1.00, "USD").await; + + assert!(result.is_ok()); +} +``` + +## Best Practices + +### ✅ DO + +- **Use trait objects** (`Arc`) for flexibility +- **Add retries** for transient failures (network issues) +- **Log errors** with context (user_id, request_id) +- **Use tracing spans** for observability +- **Handle timeouts** explicitly +- **Validate responses** before deserializing +- **Return typed errors** (ConnectorError enum) +- **Mock for tests** - never call real APIs in unit tests + +### ❌ DON'T + +- **Call HTTP directly from routes** - always use connectors +- **Panic on errors** - return `Result` +- **Expose reqwest types** - wrap in ConnectorError +- **Hardcode URLs** - always use config +- **Share HTTP clients** across different services +- **Skip error context** - log with tracing for debugging +- **Test with real APIs** unless explicitly integration tests + +## Error Handling + +All connectors use `ConnectorError` enum: + +```rust +pub enum ConnectorError { + HttpError(String), // Network/HTTP errors + ServiceUnavailable(String), // Service down or timeout + InvalidResponse(String), // Bad JSON/unexpected format + Unauthorized(String), // 401/403 + NotFound(String), // 404 + RateLimited(String), // 429 + Internal(String), // Unexpected errors +} +``` + +Convert external errors: +```rust +.map_err(|e| { + if e.is_timeout() { + ConnectorError::ServiceUnavailable(e.to_string()) + } else if e.status() == Some(404) { + ConnectorError::NotFound("Resource not found".to_string()) + } else { + ConnectorError::HttpError(e.to_string()) + } +}) +``` + +## Environment Variables + +Connectors can load auth tokens from environment: + +```bash +# .env or export +export USER_SERVICE_AUTH_TOKEN="Bearer abc123..." +export PAYMENT_SERVICE_AUTH_TOKEN="Bearer xyz789..." +``` + +Tokens are loaded in the `init()` function: +```rust +if config.auth_token.is_none() { + config.auth_token = std::env::var("PAYMENT_SERVICE_AUTH_TOKEN").ok(); +} +``` + +## Configuration Reference + +### Enable/Disable Services + +```yaml +connectors: + user_service: + enabled: true # ← Toggle here +``` + +- `enabled: true` → Uses HTTP client (production) +- `enabled: false` → Uses mock connector (tests/development) + +### Timeouts + +```yaml +timeout_secs: 10 # Request timeout in seconds +``` + +Applies to entire request (connection + response). + +### Retries + +Implement retry logic in client: +```rust +retry_attempts: 3 # Number of retry attempts +``` + +Use exponential backoff between retries. + +## Debugging + +### Enable Connector Logs + +```bash +RUST_LOG=stacker::connectors=debug cargo run +``` + +### Check Initialization + +Look for these log lines at startup: +``` +INFO stacker::connectors::user_service: Initializing User Service connector: https://api.example.com +WARN stacker::connectors::payment_service: Payment Service connector disabled - using mock +``` + +### Trace HTTP Requests + +```rust +let span = tracing::info_span!( + "user_service_create_stack", + template_id = %marketplace_template_id, + user_id = %user_id +); + +req.send() + .instrument(span) // ← Adds tracing + .await +``` + +## Checklist for New Connector + +- [ ] Config struct in `config.rs` with `Default` impl +- [ ] Add to `ConnectorConfig` struct +- [ ] Create `{service}.rs` with trait, client, mock, `init()` +- [ ] Export in `mod.rs` +- [ ] Add to `configuration.yaml` and `.yaml.dist` +- [ ] Register in `startup.rs` +- [ ] Write unit tests with mock +- [ ] Write integration tests (optional, marked `#[ignore]`) +- [ ] Document in copilot instructions +- [ ] Update this README with new connector in table + +## Further Reading + +- [User Service API Documentation](../../docs/USER_SERVICE_API.md) +- [Payment Service Documentation](../../docs/PAYMENT_SERVICE.md) +- [Error Handling Patterns](../helpers/README.md) +- [Testing Guide](../../tests/README.md) diff --git a/src/db/marketplace.rs b/src/db/marketplace.rs index 632dd9f..29efc2e 100644 --- a/src/db/marketplace.rs +++ b/src/db/marketplace.rs @@ -13,16 +13,14 @@ pub async fn list_approved(pool: &PgPool, category: Option<&str>, tag: Option<&s short_description, long_description, category_id, + product_id, tags, tech_stack, status, - plan_type, - price, - currency, is_configurable, view_count, deploy_count, - average_rating, + required_plan_name, created_at, updated_at, approved_at @@ -39,7 +37,7 @@ pub async fn list_approved(pool: &PgPool, category: Option<&str>, tag: Option<&s match sort.unwrap_or("recent") { "popular" => base.push_str(" ORDER BY deploy_count DESC, view_count DESC"), - "rating" => base.push_str(" ORDER BY average_rating DESC NULLS LAST"), + "rating" => base.push_str(" ORDER BY (SELECT AVG(rate) FROM rating WHERE rating.product_id = stack_template.product_id) DESC NULLS LAST"), _ => base.push_str(" ORDER BY approved_at DESC NULLS LAST, created_at DESC"), } @@ -91,16 +89,14 @@ pub async fn get_by_slug_with_latest(pool: &PgPool, slug: &str) -> Result<(Stack short_description, long_description, category_id, + product_id, tags, tech_stack, status, - plan_type, - price, - currency, is_configurable, view_count, deploy_count, - average_rating, + required_plan_name, created_at, updated_at, approved_at @@ -140,6 +136,45 @@ pub async fn get_by_slug_with_latest(pool: &PgPool, slug: &str) -> Result<(Stack Ok((template, version)) } +pub async fn get_by_id(pool: &PgPool, template_id: uuid::Uuid) -> Result, String> { + let query_span = tracing::info_span!("marketplace_get_by_id", id = %template_id); + + let template = sqlx::query_as!( + StackTemplate, + r#"SELECT + id, + creator_user_id, + creator_name, + name, + slug, + short_description, + long_description, + category_id, + product_id, + tags, + tech_stack, + status, + is_configurable, + view_count, + deploy_count, + created_at, + updated_at, + approved_at, + required_plan_name + FROM stack_template WHERE id = $1"#, + template_id + ) + .fetch_optional(pool) + .instrument(query_span) + .await + .map_err(|e| { + tracing::error!("get_by_id error: {:?}", e); + "Internal Server Error".to_string() + })?; + + Ok(template) +} + pub async fn create_draft( pool: &PgPool, creator_user_id: &str, @@ -170,16 +205,14 @@ pub async fn create_draft( short_description, long_description, category_id, + product_id, tags, tech_stack, status, - plan_type, - price, - currency, is_configurable, view_count, deploy_count, - average_rating, + required_plan_name, created_at, updated_at, approved_at @@ -244,7 +277,7 @@ pub async fn set_latest_version(pool: &PgPool, template_id: &uuid::Uuid, version Ok(rec) } -pub async fn update_metadata(pool: &PgPool, template_id: &uuid::Uuid, name: Option<&str>, short_description: Option<&str>, long_description: Option<&str>, category_id: Option, tags: Option, tech_stack: Option, plan_type: Option<&str>, price: Option, currency: Option<&str>) -> Result { +pub async fn update_metadata(pool: &PgPool, template_id: &uuid::Uuid, name: Option<&str>, short_description: Option<&str>, long_description: Option<&str>, category_id: Option, tags: Option, tech_stack: Option) -> Result { let query_span = tracing::info_span!("marketplace_update_metadata", template_id = %template_id); // Update only allowed statuses @@ -271,10 +304,7 @@ pub async fn update_metadata(pool: &PgPool, template_id: &uuid::Uuid, name: Opti long_description = COALESCE($4, long_description), category_id = COALESCE($5, category_id), tags = COALESCE($6, tags), - tech_stack = COALESCE($7, tech_stack), - plan_type = COALESCE($8, plan_type), - price = COALESCE($9, price), - currency = COALESCE($10, currency) + tech_stack = COALESCE($7, tech_stack) WHERE id = $1::uuid"#, template_id, name, @@ -282,10 +312,7 @@ pub async fn update_metadata(pool: &PgPool, template_id: &uuid::Uuid, name: Opti long_description, category_id, tags, - tech_stack, - plan_type, - price, - currency + tech_stack ) .execute(pool) .instrument(query_span) @@ -330,16 +357,14 @@ pub async fn list_mine(pool: &PgPool, user_id: &str) -> Result Result, S short_description, long_description, category_id, + product_id, tags, tech_stack, status, - plan_type, - price, - currency, is_configurable, view_count, deploy_count, - average_rating, + required_plan_name, created_at, updated_at, approved_at diff --git a/src/lib.rs b/src/lib.rs index 03c6203..c5456d8 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,5 @@ pub mod configuration; +pub mod connectors; pub mod console; pub mod db; pub mod forms; diff --git a/src/mcp/protocol_tests.rs b/src/mcp/protocol_tests.rs index 864275b..b10388d 100644 --- a/src/mcp/protocol_tests.rs +++ b/src/mcp/protocol_tests.rs @@ -1,6 +1,11 @@ #[cfg(test)] mod tests { use super::*; + use crate::mcp::{ + CallToolRequest, CallToolResponse, InitializeParams, InitializeResult, JsonRpcError, + JsonRpcRequest, JsonRpcResponse, ServerCapabilities, ServerInfo, Tool, ToolContent, + ToolsCapability, + }; #[test] fn test_json_rpc_request_deserialize() { diff --git a/src/models/marketplace.rs b/src/models/marketplace.rs index 2931612..ad1f3ea 100644 --- a/src/models/marketplace.rs +++ b/src/models/marketplace.rs @@ -12,16 +12,14 @@ pub struct StackTemplate { pub short_description: Option, pub long_description: Option, pub category_id: Option, + pub product_id: Option, pub tags: serde_json::Value, pub tech_stack: serde_json::Value, pub status: String, - pub plan_type: Option, - pub price: Option, - pub currency: Option, pub is_configurable: Option, pub view_count: Option, pub deploy_count: Option, - pub average_rating: Option, + pub required_plan_name: Option, pub created_at: Option>, pub updated_at: Option>, pub approved_at: Option>, diff --git a/src/routes/marketplace/admin.rs b/src/routes/marketplace/admin.rs index a1a2617..6870700 100644 --- a/src/routes/marketplace/admin.rs +++ b/src/routes/marketplace/admin.rs @@ -1,4 +1,5 @@ use crate::db; +use crate::connectors::user_service::UserServiceConnector; use crate::helpers::JsonResponse; use crate::models; use actix_web::{get, post, web, Responder, Result}; @@ -67,3 +68,33 @@ pub async fn reject_handler( Err(JsonResponse::::build().bad_request("Not updated")) } } +#[tracing::instrument(name = "List available plans from User Service", skip(user_service))] +#[get("/plans")] +pub async fn list_plans_handler( + _admin: web::ReqData>, // role enforced by Casbin + user_service: web::Data>, +) -> Result { + user_service + .list_available_plans() + .await + .map_err(|err| { + tracing::error!("Failed to fetch available plans: {:?}", err); + JsonResponse::::build() + .internal_server_error("Failed to fetch available plans from User Service") + }) + .map(|plans| { + // Convert PlanDefinition to JSON for response + let plan_json: Vec = plans + .iter() + .map(|p| { + serde_json::json!({ + "name": p.name, + "description": p.description, + "tier": p.tier, + "features": p.features + }) + }) + .collect(); + JsonResponse::build().set_list(plan_json).ok("OK") + }) +} \ No newline at end of file diff --git a/src/routes/marketplace/creator.rs b/src/routes/marketplace/creator.rs index 9f0f10b..2c4d043 100644 --- a/src/routes/marketplace/creator.rs +++ b/src/routes/marketplace/creator.rs @@ -73,9 +73,6 @@ pub struct UpdateTemplateRequest { pub category_id: Option, pub tags: Option, pub tech_stack: Option, - pub plan_type: Option, - pub price: Option, - pub currency: Option, } #[tracing::instrument(name = "Update template metadata")] @@ -90,7 +87,7 @@ pub async fn update_handler( .map_err(|_| actix_web::error::ErrorBadRequest("Invalid UUID"))?; // Ownership check - let owner_id = sqlx::query_scalar!( + let owner_id: String = sqlx::query_scalar!( r#"SELECT creator_user_id FROM stack_template WHERE id = $1"#, id ) @@ -113,9 +110,6 @@ pub async fn update_handler( req.category_id, req.tags, req.tech_stack, - req.plan_type.as_deref(), - req.price, - req.currency.as_deref(), ) .await .map_err(|err| JsonResponse::::build().bad_request(err))?; @@ -138,7 +132,7 @@ pub async fn submit_handler( .map_err(|_| actix_web::error::ErrorBadRequest("Invalid UUID"))?; // Ownership check - let owner_id = sqlx::query_scalar!( + let owner_id: String = sqlx::query_scalar!( r#"SELECT creator_user_id FROM stack_template WHERE id = $1"#, id ) diff --git a/src/routes/project/deploy.rs b/src/routes/project/deploy.rs index dc07981..74ec1cc 100644 --- a/src/routes/project/deploy.rs +++ b/src/routes/project/deploy.rs @@ -1,4 +1,5 @@ use crate::configuration::Settings; +use crate::connectors::user_service::UserServiceConnector; use crate::db; use crate::forms; use crate::helpers::compressor::compress; @@ -11,7 +12,7 @@ use sqlx::PgPool; use std::sync::Arc; use uuid::Uuid; -#[tracing::instrument(name = "Deploy for every user")] +#[tracing::instrument(name = "Deploy for every user", skip(user_service))] #[post("/{id}/deploy")] pub async fn item( user: web::ReqData>, @@ -20,6 +21,7 @@ pub async fn item( pg_pool: Data, mq_manager: Data, sets: Data, + user_service: Data>, ) -> Result { let id = path.0; tracing::debug!("User {:?} is deploying project: {}", user, id); @@ -41,6 +43,41 @@ pub async fn item( None => Err(JsonResponse::::build().not_found("not found")), })?; + // Check marketplace template plan requirements if project was created from template + if let Some(template_id) = project.source_template_id { + if let Some(template) = db::marketplace::get_by_id(pg_pool.get_ref(), template_id) + .await + .map_err(|err| JsonResponse::::build().internal_server_error(err))? + { + // If template requires a specific plan, validate user has it + if let Some(required_plan) = template.required_plan_name { + let has_plan = user_service + .user_has_plan(&user.id, &required_plan) + .await + .map_err(|err| { + tracing::error!("Failed to validate plan: {:?}", err); + JsonResponse::::build() + .internal_server_error("Failed to validate subscription plan") + })?; + + if !has_plan { + tracing::warn!( + "User {} lacks required plan {} to deploy template {}", + user.id, + required_plan, + template_id + ); + return Err(JsonResponse::::build().forbidden( + format!( + "You require a '{}' subscription to deploy this template", + required_plan + ), + )); + } + } + } + } + // Build compose let id = project.id; let dc = DcBuilder::new(project); @@ -138,7 +175,7 @@ pub async fn item( .ok("Success") }) } -#[tracing::instrument(name = "Deploy, when cloud token is saved")] +#[tracing::instrument(name = "Deploy, when cloud token is saved", skip(user_service))] #[post("/{id}/deploy/{cloud_id}")] pub async fn saved_item( user: web::ReqData>, @@ -147,6 +184,7 @@ pub async fn saved_item( pg_pool: Data, mq_manager: Data, sets: Data, + user_service: Data>, ) -> Result { let id = path.0; let cloud_id = path.1; @@ -175,6 +213,41 @@ pub async fn saved_item( None => Err(JsonResponse::::build().not_found("Project not found")), })?; + // Check marketplace template plan requirements if project was created from template + if let Some(template_id) = project.source_template_id { + if let Some(template) = db::marketplace::get_by_id(pg_pool.get_ref(), template_id) + .await + .map_err(|err| JsonResponse::::build().internal_server_error(err))? + { + // If template requires a specific plan, validate user has it + if let Some(required_plan) = template.required_plan_name { + let has_plan = user_service + .user_has_plan(&user.id, &required_plan) + .await + .map_err(|err| { + tracing::error!("Failed to validate plan: {:?}", err); + JsonResponse::::build() + .internal_server_error("Failed to validate subscription plan") + })?; + + if !has_plan { + tracing::warn!( + "User {} lacks required plan {} to deploy template {}", + user.id, + required_plan, + template_id + ); + return Err(JsonResponse::::build().forbidden( + format!( + "You require a '{}' subscription to deploy this template", + required_plan + ), + )); + } + } + } + } + // Build compose let id = project.id; let dc = DcBuilder::new(project); diff --git a/src/startup.rs b/src/startup.rs index f8d4e6d..5e43401 100644 --- a/src/startup.rs +++ b/src/startup.rs @@ -1,4 +1,5 @@ use crate::configuration::Settings; +use crate::connectors; use crate::helpers; use crate::mcp; use crate::middleware; @@ -28,6 +29,9 @@ pub async fn run( let mcp_registry = Arc::new(mcp::ToolRegistry::new()); let mcp_registry = web::Data::new(mcp_registry); + // Initialize external service connectors (plugin pattern) + let user_service_connector = connectors::init_user_service(&settings.connectors); + let authorization = middleware::authorization::try_new(settings.database.connection_string()).await?; let json_config = web::JsonConfig::default().error_handler(|err, _req| { @@ -122,6 +126,10 @@ pub async fn run( .service(crate::routes::marketplace::admin::list_submitted_handler) .service(crate::routes::marketplace::admin::approve_handler) .service(crate::routes::marketplace::admin::reject_handler), + ) + .service( + web::scope("/marketplace") + .service(crate::routes::marketplace::admin::list_plans_handler), ), ), ) @@ -168,6 +176,7 @@ pub async fn run( .app_data(mq_manager.clone()) .app_data(vault_client.clone()) .app_data(mcp_registry.clone()) + .app_data(user_service_connector.clone()) .app_data(settings.clone()) }) .listen(listener)? From 51085bf2c929096170237d5603ac186af891fc00 Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 1 Jan 2026 15:33:59 +0200 Subject: [PATCH 25/35] marketplace + product + tests --- TODO.md | 556 +++++++++++++++++++++++--------- src/routes/marketplace/admin.rs | 81 ++++- 2 files changed, 473 insertions(+), 164 deletions(-) diff --git a/TODO.md b/TODO.md index 68bc84a..f799d67 100644 --- a/TODO.md +++ b/TODO.md @@ -1,156 +1,400 @@ -# Stacker Development TODO - -## MCP Tool Development - -- [ ] **GenerateComposeTool Implementation** - - Currently: Tool removed during Phase 3 due to ProjectForm schema complexity - - Issue: Needs proper understanding of ProjectForm structure (especially `custom.web` array and nested docker_image fields) - - TODO: - 1. Inspect actual ProjectForm structure in [src/forms/project/](src/forms/project/) - 2. Map correct field paths for docker_image (namespace, repository, tag) and port configuration - 3. Implement Docker Compose YAML generation from project metadata - - Reference: Previous implementation in [src/mcp/tools/compose.rs](src/mcp/tools/compose.rs) - - Status: Phase 3 complete with 15 tools (9 Phase 3 tools without GenerateComposeTool) - -- [ ] **MCP Browser-Based Client Support (Cookie Authentication)** - - Currently: Backend supports Bearer token auth (works for server-side clients like wscat, CLI tools) - - Issue: Browser WebSocket API cannot set `Authorization` header (W3C spec limitation) - - Impact: Browser-based MCP UI clients cannot connect (get 403 Forbidden) - - TODO: - 1. Create `src/middleware/authentication/method/f_cookie.rs` - Extract `access_token` from Cookie header - 2. Update `src/middleware/authentication/manager_middleware.rs` - Add `try_cookie()` after `try_oauth()` - 3. Export cookie method in `src/middleware/authentication/method/mod.rs` - 4. Test with wscat: `wscat -c ws://localhost:8000/mcp -H "Cookie: access_token=..."` - 5. Test with browser WebSocket connection - - Reference: Full implementation guide in [docs/MCP_BROWSER_AUTH.md](docs/MCP_BROWSER_AUTH.md) - - Priority: Medium (only needed for browser-based MCP clients) - - Status: Server-side clients work perfectly; browser support blocked until cookie auth added - - Note: Both auth methods should coexist - Bearer for servers, cookies for browsers - -## Agent Registration & Security - -- [ ] **Agent Registration Access Control** - - Currently: `POST /api/v1/agent/register` is public (no auth required) - - Issue: Any unauthenticated client can register agents - - TODO: Require user authentication or API client credentials - - Solution: Restore `user: web::ReqData>` parameter in [src/routes/agent/register.rs](src/routes/agent/register.rs#L28) and add authorization check to verify user owns the deployment - - Reference: See [src/routes/agent/register.rs](src/routes/agent/register.rs) line 28 - -- [ ] **Vault Client Testing** - - Currently: Vault token storage fails gracefully in tests (falls back to bearer token when Vault unreachable at localhost) - - TODO: Test against a real Vault instance - - Steps: - 1. Spin up Vault in Docker or use a test environment - 2. Update [src/middleware/authentication/method/f_agent.rs](src/middleware/authentication/method/f_agent.rs) to use realistic Vault configuration - 3. Remove the localhost fallback once production behavior is validated - 4. Run integration tests with real Vault credentials - -## OAuth & Authentication Improvements - -- [ ] **OAuth Mock Server Lifecycle** - - Issue: Mock auth server in tests logs "unable to connect" even though it's listening - - Current fix: OAuth middleware has loopback fallback that synthesizes test users - - TODO: Investigate why sanity check fails while actual requests succeed - - File: [tests/common/mod.rs](tests/common/mod.rs#L45-L50) - -- [ ] **Middleware Panic Prevention** - - Current: Changed `try_lock().expect()` to return `Poll::Pending` to avoid panics during concurrent requests - - TODO: Review this approach for correctness; consider if Mutex contention is expected - - File: [src/middleware/authentication/manager_middleware.rs](src/middleware/authentication/manager_middleware.rs#L23-L27) - -## Code Quality & Warnings - -- [ ] **Deprecated Config Merge** - - Warning: `config::Config::merge` is deprecated - - File: [src/configuration.rs](src/configuration.rs#L70) - - TODO: Use `ConfigBuilder` instead - -- [ ] **Snake Case Violations** - - Files with non-snake-case variable names: - - [src/console/commands/debug/casbin.rs](src/console/commands/debug/casbin.rs#L31) - `authorizationService` - - [src/console/commands/debug/dockerhub.rs](src/console/commands/debug/dockerhub.rs#L27) - `dockerImage` - - [src/console/commands/debug/dockerhub.rs](src/console/commands/debug/dockerhub.rs#L29) - `isActive` - - [src/helpers/dockerhub.rs](src/helpers/dockerhub.rs#L124) - `dockerHubToken` - -- [ ] **Unused Fields & Functions** - - [src/db/agreement.rs](src/db/agreement.rs#L30) - `fetch_by_user` unused - - [src/db/agreement.rs](src/db/agreement.rs#L79) - `fetch_one_by_name` unused - - [src/routes/agent/register.rs](src/routes/agent/register.rs#L9) - `public_key` field in RegisterAgentRequest never used - - [src/routes/agent/report.rs](src/routes/agent/report.rs#L14) - `started_at` and `completed_at` fields in CommandReportRequest never read - - [src/helpers/json.rs](src/helpers/json.rs#L100) - `no_content()` method never used - - [src/models/rules.rs](src/models/rules.rs#L4) - `comments_per_user` field never read - - [src/routes/test/deploy.rs](src/routes/test/deploy.rs#L8) - `DeployResponse` never constructed - - [src/forms/rating/useredit.rs](src/forms/rating/useredit.rs#L18, L22) - `insert()` calls with unused return values - - [src/forms/rating/adminedit.rs](src/forms/rating/adminedit.rs#L19, L23, L27) - `insert()` calls with unused return values - - [src/forms/project/app.rs](src/forms/project/app.rs#L138) - Loop over Option instead of if-let - -## Agent/Command Features - -- [ ] **Long-Polling Timeout Handling** - - Current: Wait endpoint holds connection for up to 30 seconds - - TODO: Document timeout behavior in API docs - - File: [src/routes/agent/wait.rs](src/routes/agent/wait.rs) - -- [ ] **Command Priority Ordering** - - Current: Commands returned in priority order (critical > high > normal > low) - - TODO: Add tests for priority edge cases and fairness among same-priority commands - -- [ ] **Agent Heartbeat & Status** - - Current: Agent status tracked in `agents.status` and `agents.last_heartbeat` - - TODO: Implement agent timeout detection (e.g., mark offline if no heartbeat > 5 minutes) - - TODO: Add health check endpoint for deployment dashboards - -## Deployment & Testing - -- [ ] **Full Test Suite** - - Current: Agent command flow tests pass (4/5 passing, 1 ignored) - - TODO: Run full `cargo test` suite and fix any remaining failures - - TODO: Add tests for project body→metadata migration edge cases - -- [ ] **Database Migration Safety** - - Current: Duplicate Casbin migration neutralized (20251223100000_casbin_agent_rules.up.sql is a no-op) - - TODO: Clean up or document why this file exists - - TODO: Add migration validation in CI/CD - -## Documentation - -- [ ] **API Documentation** - - TODO: Add OpenAPI/Swagger definitions for agent endpoints - - TODO: Document rate limiting policies for API clients - -- [ ] **Agent Developer Guide** - - TODO: Create quickstart for agent implementers - - TODO: Provide SDKs or client libraries for agent communication - -## Performance & Scalability - -- [ ] **Long-Polling Optimization** - - Current: Simple 30-second timeout poll - - TODO: Consider Server-Sent Events (SSE) or WebSocket for real-time command delivery - - TODO: Add metrics for long-poll latency and agent responsiveness - -- [ ] **Database Connection Pooling** - - TODO: Review SQLx pool configuration for production load - - TODO: Add connection pool metrics - -## Security - -- [ ] **Agent Token Rotation** - - TODO: Implement agent token expiration - - TODO: Add token refresh mechanism - -- [ ] **Casbin Rule Validation** - - Current: Casbin rules require manual maintenance - - TODO: Add schema validation for Casbin rules at startup - - TODO: Add lint/check command to validate rules - -## Known Issues - -- [ ] **SQLx Offline Mode** - - Current: Using `sqlx` in offline mode; some queries may not compile if schema changes - - TODO: Document how to regenerate `.sqlx` cache: `cargo sqlx prepare` - -- [ ] **Vault Fallback in Tests** - - Current: [src/middleware/authentication/method/f_agent.rs](src/middleware/authentication/method/f_agent.rs#L90-L103) has loopback fallback - - Risk: Could mask real Vault errors in non-test environments - - TODO: Add feature flag or config to control fallback behavior +# TODO: Stacker Marketplace Payment Integration + +## Context +Per [PAYMENT_MODEL.md](/PAYMENT_MODEL.md), Stacker now sends webhooks to User Service when templates are published/updated. User Service owns the `products` table for monetization, while Stacker owns `stack_template` (template definitions only). + +Stacker responsibilities: +1. **Maintain `stack_template` table** (template definitions, no pricing/monetization) +2. **Send webhook to User Service** when template status changes (approved, updated, rejected) +3. **Query User Service** for product information (pricing, vendor, etc.) +4. **Validate deployments** against User Service product ownership + +## Tasks + +### 1. Create User Service Connector +**File**: `app//connectors/user_service_connector.py` (in Stacker repo) + +**Required methods**: +```python +class UserServiceConnector: + def get_user_profile(self, user_token: str) -> dict: + """ + GET http://user:4100/oauth_server/api/me + Headers: Authorization: Bearer {user_token} + + Returns: + { + "email": "user@example.com", + "plan": { + "name": "plus", + "date_end": "2026-01-30" + }, + "products": [ + { + "product_id": "uuid", + "product_type": "template", + "code": "ai-agent-stack", + "external_id": 12345, # stack_template.id from Stacker + "name": "AI Agent Stack", + "price": "99.99", + "owned_since": "2025-01-15T..." + } + ] + } + """ + pass + + def get_template_product(self, stack_template_id: int) -> dict: + """ + GET http://user:4100/api/1.0/products?external_id={stack_template_id}&product_type=template + + Returns product info for a marketplace template (pricing, vendor, etc.) + """ + pass + + def user_owns_template(self, user_token: str, stack_template_id: int) -> bool: + """ + Check if user has purchased/owns this marketplace template + """ + profile = self.get_user_profile(user_token) + return any(p['external_id'] == stack_template_id and p['product_type'] == 'template' + for p in profile.get('products', [])) +``` + +**Implementation Note**: Use OAuth2 token that Stacker already has for the user. + +### 2. Create Webhook Sender to User Service (Marketplace Sync) +**File**: `app//webhooks/marketplace_webhook.py` (in Stacker repo) + +**When template status changes** (approved, updated, rejected): +```python +import requests +from os import environ + +class MarketplaceWebhookSender: + """ + Send template sync webhooks to User Service + Mirrors PAYMENT_MODEL.md Flow 3: Stacker template changes → User Service products + """ + + def send_template_approved(self, stack_template: dict, vendor_user: dict): + """ + POST http://user:4100/marketplace/sync + + Body: + { + "action": "template_approved", + "stack_template_id": 12345, + "external_id": 12345, # Same as stack_template_id + "code": "ai-agent-stack-pro", + "name": "AI Agent Stack Pro", + "description": "Advanced AI agent deployment...", + "price": 99.99, + "billing_cycle": "one_time", # or "monthly" + "currency": "USD", + "vendor_user_id": 456, + "vendor_name": "John Doe" + } + """ + headers = {'Authorization': f'Bearer {self.get_service_token()}'} + + payload = { + 'action': 'template_approved', + 'stack_template_id': stack_template['id'], + 'external_id': stack_template['id'], + 'code': stack_template.get('code'), + 'name': stack_template.get('name'), + 'description': stack_template.get('description'), + 'price': stack_template.get('price'), + 'billing_cycle': stack_template.get('billing_cycle', 'one_time'), + 'currency': stack_template.get('currency', 'USD'), + 'vendor_user_id': vendor_user['id'], + 'vendor_name': vendor_user.get('full_name', vendor_user.get('email')) + } + + response = requests.post( + f"{environ['URL_SERVER_USER']}/marketplace/sync", + json=payload, + headers=headers + ) + + if response.status_code != 200: + raise Exception(f"Webhook send failed: {response.text}") + + return response.json() + + def send_template_updated(self, stack_template: dict, vendor_user: dict): + """Send template updated webhook (same format as approved)""" + payload = {...} + payload['action'] = 'template_updated' + # Send like send_template_approved() + + def send_template_rejected(self, stack_template: dict): + """ + Notify User Service to deactivate product + + Body: + { + "action": "template_rejected", + "stack_template_id": 12345 + } + """ + headers = {'Authorization': f'Bearer {self.get_service_token()}'} + + payload = { + 'action': 'template_rejected', + 'stack_template_id': stack_template['id'] + } + + response = requests.post( + f"{environ['URL_SERVER_USER']}/marketplace/sync", + json=payload, + headers=headers + ) + + return response.json() + + @staticmethod + def get_service_token() -> str: + """Get Bearer token for service-to-service communication""" + # Option 1: Use static bearer token + return environ.get('STACKER_SERVICE_TOKEN') + + # Option 2: Use OAuth2 client credentials flow (preferred) + # See User Service `.github/copilot-instructions.md` for setup +``` + +**Integration points** (where to call webhook sender): + +1. **When template is approved by admin**: +```python +def approve_template(template_id: int): + template = StackTemplate.query.get(template_id) + vendor = User.query.get(template.created_by_user_id) + template.status = 'approved' + db.session.commit() + + # Send webhook to User Service to create product + webhook_sender = MarketplaceWebhookSender() + webhook_sender.send_template_approved(template.to_dict(), vendor.to_dict()) +``` + +2. **When template is updated**: +```python +def update_template(template_id: int, updates: dict): + template = StackTemplate.query.get(template_id) + template.update(updates) + db.session.commit() + + if template.status == 'approved': + vendor = User.query.get(template.created_by_user_id) + webhook_sender = MarketplaceWebhookSender() + webhook_sender.send_template_updated(template.to_dict(), vendor.to_dict()) +``` + +3. **When template is rejected**: +```python +def reject_template(template_id: int): + template = StackTemplate.query.get(template_id) + template.status = 'rejected' + db.session.commit() + + webhook_sender = MarketplaceWebhookSender() + webhook_sender.send_template_rejected(template.to_dict()) +``` + +### 3. Add Deployment Validation +**File**: `app//services/deployment_service.py` (update existing) + +**Before allowing deployment, validate**: +```python +from .connectors.user_service_connector import UserServiceConnector + +class DeploymentValidator: + def validate_marketplace_template(self, stack_template: dict, user_token: str): + """ + Check if user can deploy this marketplace template + + If template has a product in User Service: + - Check if user owns product (in user_products table) + - If not owned, block deployment + """ + connector = UserServiceConnector() + + # If template is not marketplace template, allow deployment + if not stack_template.get('is_from_marketplace'): + return True + + # Check if template has associated product + template_id = stack_template['id'] + product_info = connector.get_template_product(template_id) + + if not product_info: + # No product = free marketplace template, allow deployment + return True + + # Check if user owns this template product + user_owns = connector.user_owns_template(user_token, template_id) + + if not user_owns: + raise TemplateNotPurchasedError( + f"This verified pro stack requires purchase. " + f"Price: ${product_info.get('price')}. " + f"Please purchase from User Service." + ) + + return True +``` + +**Integrate into deployment flow**: +```python +def start_deployment(template_id: int, user_token: str): + template = StackTemplate.query.get(template_id) + + # Validate permission to deploy this template + validator = DeploymentValidator() + validator.validate_marketplace_template(template.to_dict(), user_token) + + # Continue with deployment... +``` + +## Environment Variables Needed (Stacker) +Add to Stacker's `.env`: +```bash +# User Service +URL_SERVER_USER=http://user:4100/ + +# Service-to-service auth token (for webhook sender) +STACKER_SERVICE_TOKEN= + +# Or use OAuth2 client credentials (preferred) +STACKER_CLIENT_ID= +STACKER_CLIENT_SECRET= +``` + +## Testing Checklist + +### Unit Tests +- [ ] `test_user_service_connector.py`: + - [ ] `get_user_profile()` returns user with products list + - [ ] `get_template_product()` returns product info + - [ ] `user_owns_template()` returns correct boolean +- [ ] `test_marketplace_webhook_sender.py`: + - [ ] `send_template_approved()` sends correct webhook payload + - [ ] `send_template_updated()` sends correct webhook payload + - [ ] `send_template_rejected()` sends correct webhook payload + - [ ] `get_service_token()` returns valid bearer token +- [ ] `test_deployment_validator.py`: + - [ ] `validate_marketplace_template()` allows free templates + - [ ] `validate_marketplace_template()` allows user-owned paid templates + - [ ] `validate_marketplace_template()` blocks non-owned paid templates + - [ ] Raises `TemplateNotPurchasedError` with correct message + +### Integration Tests +- [ ] `test_template_approval_flow.py`: + - [ ] Admin approves template in Stacker + - [ ] Webhook sent to User Service `/marketplace/sync` + - [ ] User Service creates product + - [ ] `/oauth_server/api/me` includes new product +- [ ] `test_template_update_flow.py`: + - [ ] Vendor updates template in Stacker + - [ ] Webhook sent to User Service + - [ ] Product updated in User Service +- [ ] `test_template_rejection_flow.py`: + - [ ] Admin rejects template + - [ ] Webhook sent to User Service + - [ ] Product deactivated in User Service +- [ ] `test_deployment_validation_flow.py`: + - [ ] User can deploy free marketplace template + - [ ] User cannot deploy paid template without purchase + - [ ] User can deploy paid template after product purchase + - [ ] Correct error messages in each scenario + +### Manual Testing +- [ ] Stacker can query User Service `/oauth_server/api/me` (with real user token) +- [ ] Stacker connector returns user profile with products list +- [ ] Approve template in Stacker admin → webhook sent to User Service +- [ ] User Service `/marketplace/sync` creates product +- [ ] Product appears in `/api/1.0/products` endpoint +- [ ] Deployment validation blocks unpurchased paid templates +- [ ] Deployment validation allows owned paid templates +- [ ] All environment variables configured correctly + +## Coordination + +**Dependencies**: +1. ✅ User Service - `/marketplace/sync` webhook endpoint (created in User Service TODO) +2. ✅ User Service - `products` + `user_products` tables (created in User Service TODO) +3. ⏳ Stacker - User Service connector + webhook sender (THIS TODO) +4. ✅ Payment Service - No changes needed (handles all webhooks same way) + +**Service Interaction Flow**: + +``` +Vendor Creates Template in Stacker + ↓ +Admin Approves in Stacker + ↓ +Stacker calls MarketplaceWebhookSender.send_template_approved() + ↓ +POST http://user:4100/marketplace/sync + { + "action": "template_approved", + "stack_template_id": 12345, + "price": 99.99, + "vendor_user_id": 456, + ... + } + ↓ +User Service creates `products` row + (product_type='template', external_id=12345, vendor_id=456, price=99.99) + ↓ +Template now available in User Service `/api/1.0/products?product_type=template` + ↓ +Blog queries User Service for marketplace templates + ↓ +User views template in marketplace, clicks "Deploy" + ↓ +User pays (Payment Service handles all payment flows) + ↓ +Payment Service webhook → User Service (adds row to `user_products`) + ↓ +Stacker queries User Service `/oauth_server/api/me` + ↓ +User Service returns products list (includes newly purchased template) + ↓ +DeploymentValidator.validate_marketplace_template() checks ownership + ↓ +Deployment proceeds (user owns product) +``` + +## Notes + +**Architecture Decisions**: +1. Stacker only sends webhooks to User Service (no bi-directional queries) +2. User Service owns monetization logic (products table) +3. Payment Service forwards webhooks to User Service (same handler for all product types) +4. `stack_template.id` (Stacker) links to `products.external_id` (User Service) via webhook +5. Deployment validation queries User Service for product ownership + +**Key Points**: +- DO NOT store pricing in Stacker `stack_template` table +- DO NOT create products table in Stacker (they're in User Service) +- DO send webhooks to User Service when template status changes +- DO use Bearer token for service-to-service auth in webhooks +- Webhook sender is simpler than Stacker querying User Service (one-way communication) + +## Timeline Estimate + +- Phase 1 (User Service connector): 1-2 hours +- Phase 2 (Webhook sender): 1-2 hours +- Phase 3 (Deployment validation): 1-2 hours +- Phase 4 (Testing): 3-4 hours +- **Total**: 6-10 hours (~1 day) + +## Reference Files +- [PAYMENT_MODEL.md](/PAYMENT_MODEL.md) - Architecture +- [try.direct.user.service/TODO.md](try.direct.user.service/TODO.md) - User Service implementation +- [try.direct.tools/TODO.md](try.direct.tools/TODO.md) - Shared utilities +- [blog/TODO.md](blog/TODO.md) - Frontend marketplace UI + diff --git a/src/routes/marketplace/admin.rs b/src/routes/marketplace/admin.rs index 6870700..0119f7e 100644 --- a/src/routes/marketplace/admin.rs +++ b/src/routes/marketplace/admin.rs @@ -1,11 +1,13 @@ use crate::db; use crate::connectors::user_service::UserServiceConnector; +use crate::connectors::{MarketplaceWebhookSender, WebhookSenderConfig}; use crate::helpers::JsonResponse; use crate::models; use actix_web::{get, post, web, Responder, Result}; use sqlx::PgPool; use std::sync::Arc; use uuid; +use tracing::Instrument; #[tracing::instrument(name = "List submitted templates (admin)")] #[get("")] @@ -36,15 +38,52 @@ pub async fn approve_handler( let id = uuid::Uuid::parse_str(&path.into_inner().0) .map_err(|_| actix_web::error::ErrorBadRequest("Invalid UUID"))?; let req = body.into_inner(); + let updated = db::marketplace::admin_decide(pg_pool.get_ref(), &id, &admin.id, "approved", req.reason.as_deref()) .await .map_err(|err| JsonResponse::::build().internal_server_error(err))?; - if updated { - Ok(JsonResponse::::build().ok("Approved")) - } else { - Err(JsonResponse::::build().bad_request("Not updated")) + if !updated { + return Err(JsonResponse::::build().bad_request("Not updated")); } + + // Fetch template details for webhook + let template = db::marketplace::get_by_id(pg_pool.get_ref(), id) + .await + .map_err(|err| { + tracing::error!("Failed to fetch template for webhook: {:?}", err); + JsonResponse::::build().internal_server_error(err) + })? + .ok_or_else(|| { + JsonResponse::::build().not_found("Template not found") + })?; + + // Send webhook asynchronously (non-blocking) + // Don't fail the approval if webhook send fails - template is already approved + let template_clone = template.clone(); + tokio::spawn(async move { + match WebhookSenderConfig::from_env() { + Ok(config) => { + let sender = MarketplaceWebhookSender::new(config); + let span = tracing::info_span!("send_approval_webhook", template_id = %template_clone.id); + + if let Err(e) = sender + .send_template_approved(&template_clone, &template_clone.creator_user_id) + .instrument(span) + .await + { + tracing::warn!("Failed to send template approval webhook: {:?}", e); + // Log but don't block - approval already persisted + } + } + Err(e) => { + tracing::warn!("Webhook sender config not available: {}", e); + // Gracefully handle missing config + } + } + }); + + Ok(JsonResponse::::build().ok("Approved")) } #[tracing::instrument(name = "Reject template (admin)")] @@ -58,15 +97,41 @@ pub async fn reject_handler( let id = uuid::Uuid::parse_str(&path.into_inner().0) .map_err(|_| actix_web::error::ErrorBadRequest("Invalid UUID"))?; let req = body.into_inner(); + let updated = db::marketplace::admin_decide(pg_pool.get_ref(), &id, &admin.id, "rejected", req.reason.as_deref()) .await .map_err(|err| JsonResponse::::build().internal_server_error(err))?; - if updated { - Ok(JsonResponse::::build().ok("Rejected")) - } else { - Err(JsonResponse::::build().bad_request("Not updated")) + if !updated { + return Err(JsonResponse::::build().bad_request("Not updated")); } + + // Send webhook asynchronously (non-blocking) + // Don't fail the rejection if webhook send fails - template is already rejected + let template_id = id.to_string(); + tokio::spawn(async move { + match WebhookSenderConfig::from_env() { + Ok(config) => { + let sender = MarketplaceWebhookSender::new(config); + let span = tracing::info_span!("send_rejection_webhook", template_id = %template_id); + + if let Err(e) = sender + .send_template_rejected(&template_id) + .instrument(span) + .await + { + tracing::warn!("Failed to send template rejection webhook: {:?}", e); + // Log but don't block - rejection already persisted + } + } + Err(e) => { + tracing::warn!("Webhook sender config not available: {}", e); + // Gracefully handle missing config + } + } + }); + + Ok(JsonResponse::::build().ok("Rejected")) } #[tracing::instrument(name = "List available plans from User Service", skip(user_service))] #[get("/plans")] From e1cd930914eadec06ebc7a8f4424a32cb3f1a3d0 Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 1 Jan 2026 15:52:25 +0200 Subject: [PATCH 26/35] marketplace + product + tests --- migrations/20251227000000_casbin_root_admin_group.up.sql | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/migrations/20251227000000_casbin_root_admin_group.up.sql b/migrations/20251227000000_casbin_root_admin_group.up.sql index d13cc20..8e2fd9b 100644 --- a/migrations/20251227000000_casbin_root_admin_group.up.sql +++ b/migrations/20251227000000_casbin_root_admin_group.up.sql @@ -1,3 +1,5 @@ -- Add root group assigned to group_admin for external application access -INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) -VALUES ('g', 'root', 'group_admin', '', '', '', ''); +-- Idempotent insert; ignore if the mapping already exists +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) +VALUES ('g', 'root', 'group_admin', '', '', '', '') +ON CONFLICT DO NOTHING; From fc423b61ace1bf5818b5c423693e6b72217ce1c1 Mon Sep 17 00:00:00 2001 From: vsilent Date: Thu, 1 Jan 2026 21:24:19 +0200 Subject: [PATCH 27/35] root inherits user rights --- .../20260101090000_casbin_admin_inherits_user.down.sql | 9 +++++++++ .../20260101090000_casbin_admin_inherits_user.up.sql | 4 ++++ 2 files changed, 13 insertions(+) create mode 100644 migrations/20260101090000_casbin_admin_inherits_user.down.sql create mode 100644 migrations/20260101090000_casbin_admin_inherits_user.up.sql diff --git a/migrations/20260101090000_casbin_admin_inherits_user.down.sql b/migrations/20260101090000_casbin_admin_inherits_user.down.sql new file mode 100644 index 0000000..3e60867 --- /dev/null +++ b/migrations/20260101090000_casbin_admin_inherits_user.down.sql @@ -0,0 +1,9 @@ +-- Remove the inheritance edge if rolled back +DELETE FROM public.casbin_rule +WHERE ptype = 'g' + AND v0 = 'group_admin' + AND v1 = 'group_user' + AND (v2 = '' OR v2 IS NULL) + AND (v3 = '' OR v3 IS NULL) + AND (v4 = '' OR v4 IS NULL) + AND (v5 = '' OR v5 IS NULL); diff --git a/migrations/20260101090000_casbin_admin_inherits_user.up.sql b/migrations/20260101090000_casbin_admin_inherits_user.up.sql new file mode 100644 index 0000000..7d34d4e --- /dev/null +++ b/migrations/20260101090000_casbin_admin_inherits_user.up.sql @@ -0,0 +1,4 @@ +-- Ensure group_admin inherits group_user so admin (and root) receive user permissions +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) +VALUES ('g', 'group_admin', 'group_user', '', '', '', '') +ON CONFLICT DO NOTHING; From 105628eae2fe6bd34879c4526e94badfbe4265ee Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 2 Jan 2026 15:22:23 +0200 Subject: [PATCH 28/35] category sync/category_code instead of category_id --- TODO.md | 81 +++++ ...0260102120000_add_category_fields.down.sql | 7 + .../20260102120000_add_category_fields.up.sql | 7 + src/db/marketplace.rs | 301 +++++++++++------- src/models/marketplace.rs | 10 +- src/routes/marketplace/admin.rs | 2 +- src/routes/marketplace/creator.rs | 8 +- src/startup.rs | 3 +- 8 files changed, 302 insertions(+), 117 deletions(-) create mode 100644 migrations/20260102120000_add_category_fields.down.sql create mode 100644 migrations/20260102120000_add_category_fields.up.sql diff --git a/TODO.md b/TODO.md index f799d67..27b2511 100644 --- a/TODO.md +++ b/TODO.md @@ -3,6 +3,11 @@ ## Context Per [PAYMENT_MODEL.md](/PAYMENT_MODEL.md), Stacker now sends webhooks to User Service when templates are published/updated. User Service owns the `products` table for monetization, while Stacker owns `stack_template` (template definitions only). +### Nginx Proxy Routing +**Browser → Stacker** (via nginx): `https://dev.try.direct/stacker/` → `stacker:8000` +**Stacker → User Service** (internal): `http://user:4100/marketplace/sync` (no nginx prefix) +**Stacker → Payment Service** (internal): `http://payment:8000/` (no nginx prefix) + Stacker responsibilities: 1. **Maintain `stack_template` table** (template definitions, no pricing/monetization) 2. **Send webhook to User Service** when template status changes (approved, updated, rejected) @@ -11,12 +16,86 @@ Stacker responsibilities: ## Tasks +### 0. Setup ACL Rules Migration (User Service) +**File**: `migrations/setup_acl_rules.py` (in Stacker repo) + +**Purpose**: Automatically configure Casbin ACL rules in User Service for Stacker endpoints + +**Required Casbin rules** (to be inserted in User Service `casbin_rule` table): +```python +# Allow root/admin to manage marketplace templates via Stacker +rules = [ + ('p', 'root', '/templates', 'POST', '', '', ''), # Create template + ('p', 'root', '/templates', 'GET', '', '', ''), # List templates + ('p', 'root', '/templates/*', 'GET', '', '', ''), # View template + ('p', 'root', '/templates/*', 'PUT', '', '', ''), # Update template + ('p', 'root', '/templates/*', 'DELETE', '', '', ''), # Delete template + ('p', 'admin', '/templates', 'POST', '', '', ''), + ('p', 'admin', '/templates', 'GET', '', '', ''), + ('p', 'admin', '/templates/*', 'GET', '', '', ''), + ('p', 'admin', '/templates/*', 'PUT', '', '', ''), + ('p', 'developer', '/templates', 'POST', '', '', ''), # Developers can create + ('p', 'developer', '/templates', 'GET', '', '', ''), # Developers can list own +] +``` + +**Implementation**: +- Run as part of Stacker setup/init +- Connect to User Service database +- Insert rules if not exist (idempotent) +- **Status**: NOT STARTED +- **Priority**: HIGH (Blocks template creation via Stack Builder) +- **ETA**: 30 minutes + +### 0.5. Add Category Table Fields & Sync (Stacker) +**File**: `migrations/add_category_fields.py` (in Stacker repo) + +**Purpose**: Add missing fields to Stacker's local `category` table and sync from User Service + +**Migration Steps**: +1. Add `title VARCHAR(255)` column to `category` table (currently only has `id`, `name`) +2. Add `metadata JSONB` column for flexible category data +3. Create `UserServiceConnector.sync_categories()` method +4. On application startup: Fetch categories from User Service `GET http://user:4100/api/1.0/category` +5. Populate/update local `category` table: + - Map User Service `name` → Stacker `name` (code) + - Map User Service `title` → Stacker `title` + - Store additional data in `metadata` JSONB + +**Example sync**: +```python +# User Service category +{"_id": 5, "name": "ai", "title": "AI Agents", "priority": 5} + +# Stacker local category (after sync) +{"id": 5, "name": "ai", "title": "AI Agents", "metadata": {"priority": 5}} +``` + +**Status**: NOT STARTED +**Priority**: HIGH (Required for Stack Builder UI) +**ETA**: 1 hour + ### 1. Create User Service Connector **File**: `app//connectors/user_service_connector.py` (in Stacker repo) **Required methods**: ```python class UserServiceConnector: + def get_categories(self) -> list: + """ + GET http://user:4100/api/1.0/category + + Returns list of available categories for stack classification: + [ + {"_id": 1, "name": "cms", "title": "CMS", "priority": 1}, + {"_id": 2, "name": "ecommerce", "title": "E-commerce", "priority": 2}, + {"_id": 5, "name": "ai", "title": "AI Agents", "priority": 5} + ] + + Used by: Stack Builder UI to populate category dropdown + """ + pass + def get_user_profile(self, user_token: str) -> dict: """ GET http://user:4100/oauth_server/api/me @@ -89,6 +168,7 @@ class MarketplaceWebhookSender: "code": "ai-agent-stack-pro", "name": "AI Agent Stack Pro", "description": "Advanced AI agent deployment...", + "category_code": "ai", # String code from local category.name (not ID) "price": 99.99, "billing_cycle": "one_time", # or "monthly" "currency": "USD", @@ -105,6 +185,7 @@ class MarketplaceWebhookSender: 'code': stack_template.get('code'), 'name': stack_template.get('name'), 'description': stack_template.get('description'), + 'category_code': stack_template.get('category'), # String code (e.g., "ai", "cms") 'price': stack_template.get('price'), 'billing_cycle': stack_template.get('billing_cycle', 'one_time'), 'currency': stack_template.get('currency', 'USD'), diff --git a/migrations/20260102120000_add_category_fields.down.sql b/migrations/20260102120000_add_category_fields.down.sql new file mode 100644 index 0000000..7b8aa8f --- /dev/null +++ b/migrations/20260102120000_add_category_fields.down.sql @@ -0,0 +1,7 @@ +-- Remove title and metadata fields from stack_category +ALTER TABLE stack_category +DROP COLUMN IF EXISTS metadata, +DROP COLUMN IF EXISTS title; + +-- Drop the index +DROP INDEX IF EXISTS idx_stack_category_title; diff --git a/migrations/20260102120000_add_category_fields.up.sql b/migrations/20260102120000_add_category_fields.up.sql new file mode 100644 index 0000000..7a2646d --- /dev/null +++ b/migrations/20260102120000_add_category_fields.up.sql @@ -0,0 +1,7 @@ +-- Add title and metadata fields to stack_category for User Service sync +ALTER TABLE stack_category +ADD COLUMN IF NOT EXISTS title VARCHAR(255), +ADD COLUMN IF NOT EXISTS metadata JSONB DEFAULT '{}'::jsonb; + +-- Create index on title for display queries +CREATE INDEX IF NOT EXISTS idx_stack_category_title ON stack_category(title); diff --git a/src/db/marketplace.rs b/src/db/marketplace.rs index 29efc2e..69afaa3 100644 --- a/src/db/marketplace.rs +++ b/src/db/marketplace.rs @@ -1,35 +1,38 @@ -use crate::models::{StackTemplate, StackTemplateVersion}; +use crate::models::{StackTemplate, StackTemplateVersion, StackCategory}; use sqlx::PgPool; use tracing::Instrument; pub async fn list_approved(pool: &PgPool, category: Option<&str>, tag: Option<&str>, sort: Option<&str>) -> Result, String> { let mut base = String::from( r#"SELECT - id, - creator_user_id, - creator_name, - name, - slug, - short_description, - long_description, - category_id, - product_id, - tags, - tech_stack, - status, - is_configurable, - view_count, - deploy_count, - required_plan_name, - created_at, - updated_at, - approved_at - FROM stack_template - WHERE status = 'approved'"#, + t.id, + t.creator_user_id, + t.creator_name, + t.name, + t.slug, + t.short_description, + t.long_description, + c.name AS "category_code?", + t.product_id, + t.tags, + t.tech_stack, + t.status, + t.is_configurable, + t.view_count, + t.deploy_count, + t.required_plan_name, + t.created_at, + t.updated_at, + t.approved_at + FROM stack_template t + LEFT JOIN stack_category c ON t.category_id = c.id + WHERE t.slug = $1 AND t.status = 'approved'"#, + LEFT JOIN stack_category c ON t.category_id = c.id + WHERE t.status = 'approved'"#, ); if category.is_some() { - base.push_str(" AND category_id = (SELECT id FROM stack_category WHERE name = $1)"); + base.push_str(" AND c.name = $1"); } if tag.is_some() { base.push_str(r" AND tags \? $2"); @@ -81,26 +84,28 @@ pub async fn get_by_slug_with_latest(pool: &PgPool, slug: &str) -> Result<(Stack let template = sqlx::query_as!( StackTemplate, r#"SELECT - id, - creator_user_id, - creator_name, - name, - slug, - short_description, - long_description, - category_id, - product_id, - tags, - tech_stack, - status, - is_configurable, - view_count, - deploy_count, - required_plan_name, - created_at, - updated_at, - approved_at - FROM stack_template WHERE slug = $1 AND status = 'approved'"#, + t.id, + t.creator_user_id, + t.creator_name, + t.name, + t.slug, + t.short_description, + t.long_description, + c.name AS "category_code?", + t.product_id, + t.tags, + t.tech_stack, + t.status, + t.is_configurable, + t.view_count, + t.deploy_count, + t.required_plan_name, + t.created_at, + t.updated_at, + t.approved_at + FROM stack_template t + LEFT JOIN stack_category c ON t.category_id = c.id + WHERE t.slug = $1 AND t.status = 'approved'"#, slug ) .fetch_one(pool) @@ -142,26 +147,28 @@ pub async fn get_by_id(pool: &PgPool, template_id: uuid::Uuid) -> Result, long_description: Option<&str>, - category_id: Option, + category_code: Option<&str>, tags: serde_json::Value, tech_stack: serde_json::Value, ) -> Result { @@ -195,7 +202,7 @@ pub async fn create_draft( creator_user_id, creator_name, name, slug, short_description, long_description, category_id, tags, tech_stack, status - ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,'draft') + ) VALUES ($1,$2,$3,$4,$5,$6,(SELECT id FROM stack_category WHERE name = $7),$8,$9,'draft') RETURNING id, creator_user_id, @@ -204,7 +211,7 @@ pub async fn create_draft( slug, short_description, long_description, - category_id, + (SELECT name FROM stack_category WHERE id = category_id) AS "category_code?", product_id, tags, tech_stack, @@ -223,7 +230,7 @@ pub async fn create_draft( slug, short_description, long_description, - category_id, + category_code, tags, tech_stack ) @@ -277,7 +284,7 @@ pub async fn set_latest_version(pool: &PgPool, template_id: &uuid::Uuid, version Ok(rec) } -pub async fn update_metadata(pool: &PgPool, template_id: &uuid::Uuid, name: Option<&str>, short_description: Option<&str>, long_description: Option<&str>, category_id: Option, tags: Option, tech_stack: Option) -> Result { +pub async fn update_metadata(pool: &PgPool, template_id: &uuid::Uuid, name: Option<&str>, short_description: Option<&str>, long_description: Option<&str>, category_code: Option<&str>, tags: Option, tech_stack: Option) -> Result { let query_span = tracing::info_span!("marketplace_update_metadata", template_id = %template_id); // Update only allowed statuses @@ -302,7 +309,7 @@ pub async fn update_metadata(pool: &PgPool, template_id: &uuid::Uuid, name: Opti name = COALESCE($2, name), short_description = COALESCE($3, short_description), long_description = COALESCE($4, long_description), - category_id = COALESCE($5, category_id), + category_id = COALESCE((SELECT id FROM stack_category WHERE name = $5), category_id), tags = COALESCE($6, tags), tech_stack = COALESCE($7, tech_stack) WHERE id = $1::uuid"#, @@ -310,7 +317,7 @@ pub async fn update_metadata(pool: &PgPool, template_id: &uuid::Uuid, name: Opti name, short_description, long_description, - category_id, + category_code, tags, tech_stack ) @@ -349,26 +356,29 @@ pub async fn list_mine(pool: &PgPool, user_id: &str) -> Result Result, S sqlx::query_as!( StackTemplate, r#"SELECT - id, - creator_user_id, - creator_name, - name, - slug, - short_description, - long_description, - category_id, - product_id, - tags, - tech_stack, - status, - is_configurable, - view_count, - deploy_count, - required_plan_name, - created_at, - updated_at, - approved_at - FROM stack_template WHERE status = 'submitted' ORDER BY created_at ASC"# + t.id, + t.creator_user_id, + t.creator_name, + t.name, + t.slug, + t.short_description, + t.long_description, + c.name AS "category_code?", + t.product_id, + t.tags, + t.tech_stack, + t.status, + t.is_configurable, + t.view_count, + t.deploy_count, + t.required_plan_name, + t.created_at, + t.updated_at, + t.approved_at + FROM stack_template t + LEFT JOIN stack_category c ON t.category_id = c.id + WHERE t.status = 'submitted' + ORDER BY t.created_at ASC"# ) .fetch_all(pool) .instrument(query_span) @@ -466,3 +479,71 @@ pub async fn admin_decide(pool: &PgPool, template_id: &uuid::Uuid, reviewer_user Ok(true) } + +/// Sync categories from User Service to local mirror +/// Upserts category data (id, name, title, metadata) +pub async fn sync_categories( + pool: &PgPool, + categories: Vec, +) -> Result { + let query_span = tracing::info_span!("sync_categories", count = categories.len()); + let _enter = query_span.enter(); + + if categories.is_empty() { + tracing::info!("No categories to sync"); + return Ok(0); + } + + let mut synced_count = 0; + + for category in categories { + // Use INSERT ... ON CONFLICT DO UPDATE to upsert + let result = sqlx::query( + r#" + INSERT INTO stack_category (id, name, title, metadata) + VALUES ($1, $2, $3, $4) + ON CONFLICT (id) DO UPDATE + SET name = EXCLUDED.name, + title = EXCLUDED.title, + metadata = EXCLUDED.metadata + "# + ) + .bind(category.id) + .bind(&category.name) + .bind(&category.title) + .bind(serde_json::json!({"priority": category.priority})) + .execute(pool) + .await + .map_err(|e| { + tracing::error!("Failed to sync category {}: {:?}", category.name, e); + format!("Failed to sync category: {}", e) + })?; + + if result.rows_affected() > 0 { + synced_count += 1; + } + } + + tracing::info!("Synced {} categories from User Service", synced_count); + Ok(synced_count) +} + +/// Get all categories from local mirror +pub async fn get_categories(pool: &PgPool) -> Result, String> { + let query_span = tracing::info_span!("get_categories"); + + sqlx::query_as::<_, StackCategory>( + r#" + SELECT id, name, title, metadata + FROM stack_category + ORDER BY id + "# + ) + .fetch_all(pool) + .instrument(query_span) + .await + .map_err(|e| { + tracing::error!("Failed to fetch categories: {:?}", e); + "Internal Server Error".to_string() + }) +} diff --git a/src/models/marketplace.rs b/src/models/marketplace.rs index ad1f3ea..366e2e9 100644 --- a/src/models/marketplace.rs +++ b/src/models/marketplace.rs @@ -2,6 +2,14 @@ use chrono::{DateTime, Utc}; use serde_derive::{Deserialize, Serialize}; use uuid::Uuid; +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, sqlx::FromRow)] +pub struct StackCategory { + pub id: i32, + pub name: String, + pub title: Option, + pub metadata: Option, +} + #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, sqlx::FromRow)] pub struct StackTemplate { pub id: Uuid, @@ -11,7 +19,7 @@ pub struct StackTemplate { pub slug: String, pub short_description: Option, pub long_description: Option, - pub category_id: Option, + pub category_code: Option, pub product_id: Option, pub tags: serde_json::Value, pub tech_stack: serde_json::Value, diff --git a/src/routes/marketplace/admin.rs b/src/routes/marketplace/admin.rs index 0119f7e..302556d 100644 --- a/src/routes/marketplace/admin.rs +++ b/src/routes/marketplace/admin.rs @@ -68,7 +68,7 @@ pub async fn approve_handler( let span = tracing::info_span!("send_approval_webhook", template_id = %template_clone.id); if let Err(e) = sender - .send_template_approved(&template_clone, &template_clone.creator_user_id) + .send_template_approved(&template_clone, &template_clone.creator_user_id, template_clone.category_code.clone()) .instrument(span) .await { diff --git a/src/routes/marketplace/creator.rs b/src/routes/marketplace/creator.rs index 2c4d043..79363b9 100644 --- a/src/routes/marketplace/creator.rs +++ b/src/routes/marketplace/creator.rs @@ -12,7 +12,7 @@ pub struct CreateTemplateRequest { pub slug: String, pub short_description: Option, pub long_description: Option, - pub category_id: Option, + pub category_code: Option, pub tags: Option, pub tech_stack: Option, pub version: Option, @@ -41,7 +41,7 @@ pub async fn create_handler( &req.slug, req.short_description.as_deref(), req.long_description.as_deref(), - req.category_id, + req.category_code.as_deref(), tags, tech_stack, ) @@ -70,7 +70,7 @@ pub struct UpdateTemplateRequest { pub name: Option, pub short_description: Option, pub long_description: Option, - pub category_id: Option, + pub category_code: Option, pub tags: Option, pub tech_stack: Option, } @@ -107,7 +107,7 @@ pub async fn update_handler( req.name.as_deref(), req.short_description.as_deref(), req.long_description.as_deref(), - req.category_id, + req.category_code.as_deref(), req.tags, req.tech_stack, ) diff --git a/src/startup.rs b/src/startup.rs index 5e43401..1cbf6fb 100644 --- a/src/startup.rs +++ b/src/startup.rs @@ -30,7 +30,8 @@ pub async fn run( let mcp_registry = web::Data::new(mcp_registry); // Initialize external service connectors (plugin pattern) - let user_service_connector = connectors::init_user_service(&settings.connectors); + // Connector handles category sync on startup + let user_service_connector = connectors::init_user_service(&settings.connectors, pg_pool.clone()); let authorization = middleware::authorization::try_new(settings.database.connection_string()).await?; From 550c1efd37a5c58bfed12b3fece7a80ddcf6d86b Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 2 Jan 2026 16:30:22 +0200 Subject: [PATCH 29/35] access categories --- ...bcfe5f968b31500e8c8cf97fe16814bc04164.json | 20 +++++ ...766573c91b2775a086c65bc9a5fdc91300bb0.json | 17 +++++ ...36247a328db780a48da47c9402e1d3ebd80c9.json | 12 +++ ...44610fb79a1b9330730c65953f0c1b88c2a53.json | 20 +++++ ...e78f2a23eff67925322bdd3646d063d710584.json | 62 +++++++++++++++ ...806b4c78b7aa2a9609c4eccb941c7dff7b107.json | 12 +++ ...7cb75a999041a3eb6a8f8177bebfa3c30d56f.json | 16 ++++ ...b89853785c32a5f83cb0b25609329c760428a.json | 19 +++++ ...043ceee664f67752c41bf06df6e51ed69362.json} | 12 +-- ...35b962e41b4e5b49d20e9d5fee3da051aeba.json} | 10 +-- ...faae78671d69c8935d2a2d57c0f9d1e91e832.json | 75 +++++++++++++++++++ ...6cc32d0e3ebc0611bd69013b6c3aa240b674.json} | 10 +-- ...ca951c761f6b9abd6c70158000e0c03ca7c7.json} | 10 +-- ...388884b133c79da6ed1a5809a3ca64f48f97.json} | 6 +- ...9d8ed8688d70ac5fcceaf41e1671f75dbaa8.json} | 10 +-- ...226ba97993ede9988a4c57d58bd066500a119.json | 20 +++++ ...21e00c42a3fad8082cf15c2af88cd8388f41b.json | 18 +++++ ...b37d46c5a2f4202e1b8dce1f66a65069beb0b.json | 15 ++++ ...102140000_casbin_categories_rules.down.sql | 4 + ...60102140000_casbin_categories_rules.up.sql | 6 ++ src/db/marketplace.rs | 10 +-- 21 files changed, 349 insertions(+), 35 deletions(-) create mode 100644 .sqlx/query-1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164.json create mode 100644 .sqlx/query-1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0.json create mode 100644 .sqlx/query-24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9.json create mode 100644 .sqlx/query-2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53.json create mode 100644 .sqlx/query-3ae7e28de7cb8896086c186dbc0e78f2a23eff67925322bdd3646d063d710584.json create mode 100644 .sqlx/query-438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107.json create mode 100644 .sqlx/query-4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f.json create mode 100644 .sqlx/query-4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a.json rename .sqlx/{query-8e992908d43e75c0abb85fac1e3f2a8437cded8f1c6215c3e4a4fec2ed933643.json => query-4ed4ce17b28e36898d9afabb96b7043ceee664f67752c41bf06df6e51ed69362.json} (74%) rename .sqlx/{query-9adfcae76ff8e0f638a3da310e7eaf9d754d9f1d4a18121eb56d9a451b817fdf.json => query-4fbb395f2080f29291ea091d2c4135b962e41b4e5b49d20e9d5fee3da051aeba.json} (71%) create mode 100644 .sqlx/query-530d3f59ba6d986d3354242ff25faae78671d69c8935d2a2d57c0f9d1e91e832.json rename .sqlx/{query-95c4b45907793ae202a5ef3d9c829fbcfae670cbd222c492ffc9508ea96588e6.json => query-722e059fca26aa3be81451ef5e266cc32d0e3ebc0611bd69013b6c3aa240b674.json} (72%) rename .sqlx/{query-0612f433190f8ba51c17f57d6da2db5ba2061ba4fb0604caef24943d936ad45d.json => query-970e2fc198c379a19849c4621adeca951c761f6b9abd6c70158000e0c03ca7c7.json} (72%) rename .sqlx/{query-cdb14c0aad0a5dbc45504608f820246da9fcfc2e680937b66bb8aa3e24c9dd1f.json => query-d81dbcf77d096403614b80165d66388884b133c79da6ed1a5809a3ca64f48f97.json} (56%) rename .sqlx/{query-0a87c8c8bbe3c8d23b41d5929e6862aacd9e2b56c57668f2dc1b6e3c771ee48d.json => query-e5956a76c15941c58fc9acb3886c9d8ed8688d70ac5fcceaf41e1671f75dbaa8.json} (71%) create mode 100644 .sqlx/query-f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119.json create mode 100644 .sqlx/query-f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b.json create mode 100644 .sqlx/query-fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b.json create mode 100644 migrations/20260102140000_casbin_categories_rules.down.sql create mode 100644 migrations/20260102140000_casbin_categories_rules.up.sql diff --git a/.sqlx/query-1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164.json b/.sqlx/query-1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164.json new file mode 100644 index 0000000..eb3a84f --- /dev/null +++ b/.sqlx/query-1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "INSERT INTO casbin_rule ( ptype, v0, v1, v2, v3, v4, v5 )\n VALUES ( $1, $2, $3, $4, $5, $6, $7 )", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164" +} diff --git a/.sqlx/query-1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0.json b/.sqlx/query-1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0.json new file mode 100644 index 0000000..1ea12e3 --- /dev/null +++ b/.sqlx/query-1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v3 is NULL OR v3 = COALESCE($2,v3)) AND\n (v4 is NULL OR v4 = COALESCE($3,v4)) AND\n (v5 is NULL OR v5 = COALESCE($4,v5))", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Varchar", + "Varchar", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0" +} diff --git a/.sqlx/query-24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9.json b/.sqlx/query-24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9.json new file mode 100644 index 0000000..8046c5d --- /dev/null +++ b/.sqlx/query-24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM casbin_rule", + "describe": { + "columns": [], + "parameters": { + "Left": [] + }, + "nullable": [] + }, + "hash": "24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9" +} diff --git a/.sqlx/query-2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53.json b/.sqlx/query-2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53.json new file mode 100644 index 0000000..e246e53 --- /dev/null +++ b/.sqlx/query-2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n v0 = $2 AND\n v1 = $3 AND\n v2 = $4 AND\n v3 = $5 AND\n v4 = $6 AND\n v5 = $7", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53" +} diff --git a/.sqlx/query-3ae7e28de7cb8896086c186dbc0e78f2a23eff67925322bdd3646d063d710584.json b/.sqlx/query-3ae7e28de7cb8896086c186dbc0e78f2a23eff67925322bdd3646d063d710584.json new file mode 100644 index 0000000..6f82475 --- /dev/null +++ b/.sqlx/query-3ae7e28de7cb8896086c186dbc0e78f2a23eff67925322bdd3646d063d710584.json @@ -0,0 +1,62 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT id, ptype, v0, v1, v2, v3, v4, v5 FROM casbin_rule", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "ptype", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "v0", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "v1", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "v2", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "v3", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "v4", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "v5", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false + ] + }, + "hash": "3ae7e28de7cb8896086c186dbc0e78f2a23eff67925322bdd3646d063d710584" +} diff --git a/.sqlx/query-438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107.json b/.sqlx/query-438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107.json new file mode 100644 index 0000000..75c6da3 --- /dev/null +++ b/.sqlx/query-438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "CREATE TABLE IF NOT EXISTS casbin_rule (\n id SERIAL PRIMARY KEY,\n ptype VARCHAR NOT NULL,\n v0 VARCHAR NOT NULL,\n v1 VARCHAR NOT NULL,\n v2 VARCHAR NOT NULL,\n v3 VARCHAR NOT NULL,\n v4 VARCHAR NOT NULL,\n v5 VARCHAR NOT NULL,\n CONSTRAINT unique_key_sqlx_adapter UNIQUE(ptype, v0, v1, v2, v3, v4, v5)\n );\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [] + }, + "nullable": [] + }, + "hash": "438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107" +} diff --git a/.sqlx/query-4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f.json b/.sqlx/query-4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f.json new file mode 100644 index 0000000..ce229dc --- /dev/null +++ b/.sqlx/query-4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v4 is NULL OR v4 = COALESCE($2,v4)) AND\n (v5 is NULL OR v5 = COALESCE($3,v5))", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Varchar", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f" +} diff --git a/.sqlx/query-4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a.json b/.sqlx/query-4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a.json new file mode 100644 index 0000000..4c4c1df --- /dev/null +++ b/.sqlx/query-4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v1 is NULL OR v1 = COALESCE($2,v1)) AND\n (v2 is NULL OR v2 = COALESCE($3,v2)) AND\n (v3 is NULL OR v3 = COALESCE($4,v3)) AND\n (v4 is NULL OR v4 = COALESCE($5,v4)) AND\n (v5 is NULL OR v5 = COALESCE($6,v5))", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a" +} diff --git a/.sqlx/query-8e992908d43e75c0abb85fac1e3f2a8437cded8f1c6215c3e4a4fec2ed933643.json b/.sqlx/query-4ed4ce17b28e36898d9afabb96b7043ceee664f67752c41bf06df6e51ed69362.json similarity index 74% rename from .sqlx/query-8e992908d43e75c0abb85fac1e3f2a8437cded8f1c6215c3e4a4fec2ed933643.json rename to .sqlx/query-4ed4ce17b28e36898d9afabb96b7043ceee664f67752c41bf06df6e51ed69362.json index 0ed8fe7..c3f8828 100644 --- a/.sqlx/query-8e992908d43e75c0abb85fac1e3f2a8437cded8f1c6215c3e4a4fec2ed933643.json +++ b/.sqlx/query-4ed4ce17b28e36898d9afabb96b7043ceee664f67752c41bf06df6e51ed69362.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "INSERT INTO stack_template (\n creator_user_id, creator_name, name, slug,\n short_description, long_description, category_id,\n tags, tech_stack, status\n ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,'draft')\n RETURNING \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n product_id,\n tags,\n tech_stack,\n status,\n is_configurable,\n view_count,\n deploy_count,\n required_plan_name,\n created_at,\n updated_at,\n approved_at\n ", + "query": "INSERT INTO stack_template (\n creator_user_id, creator_name, name, slug,\n short_description, long_description, category_id,\n tags, tech_stack, status\n ) VALUES ($1,$2,$3,$4,$5,$6,(SELECT id FROM stack_category WHERE name = $7),$8,$9,'draft')\n RETURNING \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n (SELECT name FROM stack_category WHERE id = category_id) AS \"category_code?\",\n product_id,\n tags,\n tech_stack,\n status,\n is_configurable,\n view_count,\n deploy_count,\n required_plan_name,\n created_at,\n updated_at,\n approved_at\n ", "describe": { "columns": [ { @@ -40,8 +40,8 @@ }, { "ordinal": 7, - "name": "category_id", - "type_info": "Int4" + "name": "category_code?", + "type_info": "Varchar" }, { "ordinal": 8, @@ -107,7 +107,7 @@ "Varchar", "Text", "Text", - "Int4", + "Text", "Jsonb", "Jsonb" ] @@ -120,7 +120,7 @@ false, true, true, - true, + null, true, true, true, @@ -134,5 +134,5 @@ true ] }, - "hash": "8e992908d43e75c0abb85fac1e3f2a8437cded8f1c6215c3e4a4fec2ed933643" + "hash": "4ed4ce17b28e36898d9afabb96b7043ceee664f67752c41bf06df6e51ed69362" } diff --git a/.sqlx/query-9adfcae76ff8e0f638a3da310e7eaf9d754d9f1d4a18121eb56d9a451b817fdf.json b/.sqlx/query-4fbb395f2080f29291ea091d2c4135b962e41b4e5b49d20e9d5fee3da051aeba.json similarity index 71% rename from .sqlx/query-9adfcae76ff8e0f638a3da310e7eaf9d754d9f1d4a18121eb56d9a451b817fdf.json rename to .sqlx/query-4fbb395f2080f29291ea091d2c4135b962e41b4e5b49d20e9d5fee3da051aeba.json index dfc34ca..49c82f0 100644 --- a/.sqlx/query-9adfcae76ff8e0f638a3da310e7eaf9d754d9f1d4a18121eb56d9a451b817fdf.json +++ b/.sqlx/query-4fbb395f2080f29291ea091d2c4135b962e41b4e5b49d20e9d5fee3da051aeba.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n product_id,\n tags,\n tech_stack,\n status,\n is_configurable,\n view_count,\n deploy_count,\n required_plan_name,\n created_at,\n updated_at,\n approved_at\n FROM stack_template WHERE slug = $1 AND status = 'approved'", + "query": "SELECT \n t.id,\n t.creator_user_id,\n t.creator_name,\n t.name,\n t.slug,\n t.short_description,\n t.long_description,\n c.name AS \"category_code?\",\n t.product_id,\n t.tags,\n t.tech_stack,\n t.status,\n t.is_configurable,\n t.view_count,\n t.deploy_count,\n t.required_plan_name,\n t.created_at,\n t.updated_at,\n t.approved_at\n FROM stack_template t\n LEFT JOIN stack_category c ON t.category_id = c.id\n WHERE t.creator_user_id = $1\n ORDER BY t.created_at DESC", "describe": { "columns": [ { @@ -40,8 +40,8 @@ }, { "ordinal": 7, - "name": "category_id", - "type_info": "Int4" + "name": "category_code?", + "type_info": "Varchar" }, { "ordinal": 8, @@ -112,7 +112,7 @@ false, true, true, - true, + false, true, true, true, @@ -126,5 +126,5 @@ true ] }, - "hash": "9adfcae76ff8e0f638a3da310e7eaf9d754d9f1d4a18121eb56d9a451b817fdf" + "hash": "4fbb395f2080f29291ea091d2c4135b962e41b4e5b49d20e9d5fee3da051aeba" } diff --git a/.sqlx/query-530d3f59ba6d986d3354242ff25faae78671d69c8935d2a2d57c0f9d1e91e832.json b/.sqlx/query-530d3f59ba6d986d3354242ff25faae78671d69c8935d2a2d57c0f9d1e91e832.json new file mode 100644 index 0000000..d0df28a --- /dev/null +++ b/.sqlx/query-530d3f59ba6d986d3354242ff25faae78671d69c8935d2a2d57c0f9d1e91e832.json @@ -0,0 +1,75 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT id, ptype, v0, v1, v2, v3, v4, v5 from casbin_rule WHERE (\n ptype LIKE 'g%' AND v0 LIKE $1 AND v1 LIKE $2 AND v2 LIKE $3 AND v3 LIKE $4 AND v4 LIKE $5 AND v5 LIKE $6 )\n OR (\n ptype LIKE 'p%' AND v0 LIKE $7 AND v1 LIKE $8 AND v2 LIKE $9 AND v3 LIKE $10 AND v4 LIKE $11 AND v5 LIKE $12 );\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "ptype", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "v0", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "v1", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "v2", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "v3", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "v4", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "v5", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false + ] + }, + "hash": "530d3f59ba6d986d3354242ff25faae78671d69c8935d2a2d57c0f9d1e91e832" +} diff --git a/.sqlx/query-95c4b45907793ae202a5ef3d9c829fbcfae670cbd222c492ffc9508ea96588e6.json b/.sqlx/query-722e059fca26aa3be81451ef5e266cc32d0e3ebc0611bd69013b6c3aa240b674.json similarity index 72% rename from .sqlx/query-95c4b45907793ae202a5ef3d9c829fbcfae670cbd222c492ffc9508ea96588e6.json rename to .sqlx/query-722e059fca26aa3be81451ef5e266cc32d0e3ebc0611bd69013b6c3aa240b674.json index 377cf35..65bb611 100644 --- a/.sqlx/query-95c4b45907793ae202a5ef3d9c829fbcfae670cbd222c492ffc9508ea96588e6.json +++ b/.sqlx/query-722e059fca26aa3be81451ef5e266cc32d0e3ebc0611bd69013b6c3aa240b674.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n product_id,\n tags,\n tech_stack,\n status,\n is_configurable,\n view_count,\n deploy_count,\n created_at,\n updated_at,\n approved_at,\n required_plan_name\n FROM stack_template WHERE id = $1", + "query": "SELECT \n t.id,\n t.creator_user_id,\n t.creator_name,\n t.name,\n t.slug,\n t.short_description,\n t.long_description,\n c.name AS \"category_code?\",\n t.product_id,\n t.tags,\n t.tech_stack,\n t.status,\n t.is_configurable,\n t.view_count,\n t.deploy_count,\n t.created_at,\n t.updated_at,\n t.approved_at,\n t.required_plan_name\n FROM stack_template t\n LEFT JOIN stack_category c ON t.category_id = c.id\n WHERE t.id = $1", "describe": { "columns": [ { @@ -40,8 +40,8 @@ }, { "ordinal": 7, - "name": "category_id", - "type_info": "Int4" + "name": "category_code?", + "type_info": "Varchar" }, { "ordinal": 8, @@ -112,7 +112,7 @@ false, true, true, - true, + false, true, true, true, @@ -126,5 +126,5 @@ true ] }, - "hash": "95c4b45907793ae202a5ef3d9c829fbcfae670cbd222c492ffc9508ea96588e6" + "hash": "722e059fca26aa3be81451ef5e266cc32d0e3ebc0611bd69013b6c3aa240b674" } diff --git a/.sqlx/query-0612f433190f8ba51c17f57d6da2db5ba2061ba4fb0604caef24943d936ad45d.json b/.sqlx/query-970e2fc198c379a19849c4621adeca951c761f6b9abd6c70158000e0c03ca7c7.json similarity index 72% rename from .sqlx/query-0612f433190f8ba51c17f57d6da2db5ba2061ba4fb0604caef24943d936ad45d.json rename to .sqlx/query-970e2fc198c379a19849c4621adeca951c761f6b9abd6c70158000e0c03ca7c7.json index 98dc7fe..0b5b79f 100644 --- a/.sqlx/query-0612f433190f8ba51c17f57d6da2db5ba2061ba4fb0604caef24943d936ad45d.json +++ b/.sqlx/query-970e2fc198c379a19849c4621adeca951c761f6b9abd6c70158000e0c03ca7c7.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n product_id,\n tags,\n tech_stack,\n status,\n is_configurable,\n view_count,\n deploy_count,\n required_plan_name,\n created_at,\n updated_at,\n approved_at\n FROM stack_template WHERE creator_user_id = $1 ORDER BY created_at DESC", + "query": "SELECT \n t.id,\n t.creator_user_id,\n t.creator_name,\n t.name,\n t.slug,\n t.short_description,\n t.long_description,\n c.name AS \"category_code?\",\n t.product_id,\n t.tags,\n t.tech_stack,\n t.status,\n t.is_configurable,\n t.view_count,\n t.deploy_count,\n t.required_plan_name,\n t.created_at,\n t.updated_at,\n t.approved_at\n FROM stack_template t\n LEFT JOIN stack_category c ON t.category_id = c.id\n WHERE t.slug = $1 AND t.status = 'approved'", "describe": { "columns": [ { @@ -40,8 +40,8 @@ }, { "ordinal": 7, - "name": "category_id", - "type_info": "Int4" + "name": "category_code?", + "type_info": "Varchar" }, { "ordinal": 8, @@ -112,7 +112,7 @@ false, true, true, - true, + false, true, true, true, @@ -126,5 +126,5 @@ true ] }, - "hash": "0612f433190f8ba51c17f57d6da2db5ba2061ba4fb0604caef24943d936ad45d" + "hash": "970e2fc198c379a19849c4621adeca951c761f6b9abd6c70158000e0c03ca7c7" } diff --git a/.sqlx/query-cdb14c0aad0a5dbc45504608f820246da9fcfc2e680937b66bb8aa3e24c9dd1f.json b/.sqlx/query-d81dbcf77d096403614b80165d66388884b133c79da6ed1a5809a3ca64f48f97.json similarity index 56% rename from .sqlx/query-cdb14c0aad0a5dbc45504608f820246da9fcfc2e680937b66bb8aa3e24c9dd1f.json rename to .sqlx/query-d81dbcf77d096403614b80165d66388884b133c79da6ed1a5809a3ca64f48f97.json index 5daaa04..769d0a5 100644 --- a/.sqlx/query-cdb14c0aad0a5dbc45504608f820246da9fcfc2e680937b66bb8aa3e24c9dd1f.json +++ b/.sqlx/query-d81dbcf77d096403614b80165d66388884b133c79da6ed1a5809a3ca64f48f97.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "UPDATE stack_template SET \n name = COALESCE($2, name),\n short_description = COALESCE($3, short_description),\n long_description = COALESCE($4, long_description),\n category_id = COALESCE($5, category_id),\n tags = COALESCE($6, tags),\n tech_stack = COALESCE($7, tech_stack)\n WHERE id = $1::uuid", + "query": "UPDATE stack_template SET \n name = COALESCE($2, name),\n short_description = COALESCE($3, short_description),\n long_description = COALESCE($4, long_description),\n category_id = COALESCE((SELECT id FROM stack_category WHERE name = $5), category_id),\n tags = COALESCE($6, tags),\n tech_stack = COALESCE($7, tech_stack)\n WHERE id = $1::uuid", "describe": { "columns": [], "parameters": { @@ -9,12 +9,12 @@ "Varchar", "Text", "Text", - "Int4", + "Text", "Jsonb", "Jsonb" ] }, "nullable": [] }, - "hash": "cdb14c0aad0a5dbc45504608f820246da9fcfc2e680937b66bb8aa3e24c9dd1f" + "hash": "d81dbcf77d096403614b80165d66388884b133c79da6ed1a5809a3ca64f48f97" } diff --git a/.sqlx/query-0a87c8c8bbe3c8d23b41d5929e6862aacd9e2b56c57668f2dc1b6e3c771ee48d.json b/.sqlx/query-e5956a76c15941c58fc9acb3886c9d8ed8688d70ac5fcceaf41e1671f75dbaa8.json similarity index 71% rename from .sqlx/query-0a87c8c8bbe3c8d23b41d5929e6862aacd9e2b56c57668f2dc1b6e3c771ee48d.json rename to .sqlx/query-e5956a76c15941c58fc9acb3886c9d8ed8688d70ac5fcceaf41e1671f75dbaa8.json index a59f80e..ee20b46 100644 --- a/.sqlx/query-0a87c8c8bbe3c8d23b41d5929e6862aacd9e2b56c57668f2dc1b6e3c771ee48d.json +++ b/.sqlx/query-e5956a76c15941c58fc9acb3886c9d8ed8688d70ac5fcceaf41e1671f75dbaa8.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n category_id,\n product_id,\n tags,\n tech_stack,\n status,\n is_configurable,\n view_count,\n deploy_count,\n required_plan_name,\n created_at,\n updated_at,\n approved_at\n FROM stack_template WHERE status = 'submitted' ORDER BY created_at ASC", + "query": "SELECT \n t.id,\n t.creator_user_id,\n t.creator_name,\n t.name,\n t.slug,\n t.short_description,\n t.long_description,\n c.name AS \"category_code?\",\n t.product_id,\n t.tags,\n t.tech_stack,\n t.status,\n t.is_configurable,\n t.view_count,\n t.deploy_count,\n t.required_plan_name,\n t.created_at,\n t.updated_at,\n t.approved_at\n FROM stack_template t\n LEFT JOIN stack_category c ON t.category_id = c.id\n WHERE t.status = 'submitted'\n ORDER BY t.created_at ASC", "describe": { "columns": [ { @@ -40,8 +40,8 @@ }, { "ordinal": 7, - "name": "category_id", - "type_info": "Int4" + "name": "category_code?", + "type_info": "Varchar" }, { "ordinal": 8, @@ -110,7 +110,7 @@ false, true, true, - true, + false, true, true, true, @@ -124,5 +124,5 @@ true ] }, - "hash": "0a87c8c8bbe3c8d23b41d5929e6862aacd9e2b56c57668f2dc1b6e3c771ee48d" + "hash": "e5956a76c15941c58fc9acb3886c9d8ed8688d70ac5fcceaf41e1671f75dbaa8" } diff --git a/.sqlx/query-f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119.json b/.sqlx/query-f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119.json new file mode 100644 index 0000000..ef54cdb --- /dev/null +++ b/.sqlx/query-f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v0 is NULL OR v0 = COALESCE($2,v0)) AND\n (v1 is NULL OR v1 = COALESCE($3,v1)) AND\n (v2 is NULL OR v2 = COALESCE($4,v2)) AND\n (v3 is NULL OR v3 = COALESCE($5,v3)) AND\n (v4 is NULL OR v4 = COALESCE($6,v4)) AND\n (v5 is NULL OR v5 = COALESCE($7,v5))", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119" +} diff --git a/.sqlx/query-f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b.json b/.sqlx/query-f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b.json new file mode 100644 index 0000000..0daaa8a --- /dev/null +++ b/.sqlx/query-f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v2 is NULL OR v2 = COALESCE($2,v2)) AND\n (v3 is NULL OR v3 = COALESCE($3,v3)) AND\n (v4 is NULL OR v4 = COALESCE($4,v4)) AND\n (v5 is NULL OR v5 = COALESCE($5,v5))", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Varchar", + "Varchar", + "Varchar", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b" +} diff --git a/.sqlx/query-fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b.json b/.sqlx/query-fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b.json new file mode 100644 index 0000000..4a5f7e8 --- /dev/null +++ b/.sqlx/query-fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v5 is NULL OR v5 = COALESCE($2,v5))", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b" +} diff --git a/migrations/20260102140000_casbin_categories_rules.down.sql b/migrations/20260102140000_casbin_categories_rules.down.sql new file mode 100644 index 0000000..4db07af --- /dev/null +++ b/migrations/20260102140000_casbin_categories_rules.down.sql @@ -0,0 +1,4 @@ +-- Rollback: Remove Casbin rules for Categories endpoint + +DELETE FROM public.casbin_rule +WHERE ptype = 'p' AND v1 = '/api/categories' AND v2 = 'GET'; diff --git a/migrations/20260102140000_casbin_categories_rules.up.sql b/migrations/20260102140000_casbin_categories_rules.up.sql new file mode 100644 index 0000000..b24dbc1 --- /dev/null +++ b/migrations/20260102140000_casbin_categories_rules.up.sql @@ -0,0 +1,6 @@ +-- Casbin rules for Categories endpoint +-- Categories are publicly readable for marketplace UI population + +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_anonymous', '/api/categories', 'GET', '', '', ''); +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/categories', 'GET', '', '', ''); +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/categories', 'GET', '', '', ''); diff --git a/src/db/marketplace.rs b/src/db/marketplace.rs index 69afaa3..8a3b2a8 100644 --- a/src/db/marketplace.rs +++ b/src/db/marketplace.rs @@ -26,8 +26,6 @@ pub async fn list_approved(pool: &PgPool, category: Option<&str>, tag: Option<&s t.approved_at FROM stack_template t LEFT JOIN stack_category c ON t.category_id = c.id - WHERE t.slug = $1 AND t.status = 'approved'"#, - LEFT JOIN stack_category c ON t.category_id = c.id WHERE t.status = 'approved'"#, ); @@ -35,13 +33,13 @@ pub async fn list_approved(pool: &PgPool, category: Option<&str>, tag: Option<&s base.push_str(" AND c.name = $1"); } if tag.is_some() { - base.push_str(r" AND tags \? $2"); + base.push_str(" AND t.tags ? $2"); } match sort.unwrap_or("recent") { - "popular" => base.push_str(" ORDER BY deploy_count DESC, view_count DESC"), - "rating" => base.push_str(" ORDER BY (SELECT AVG(rate) FROM rating WHERE rating.product_id = stack_template.product_id) DESC NULLS LAST"), - _ => base.push_str(" ORDER BY approved_at DESC NULLS LAST, created_at DESC"), + "popular" => base.push_str(" ORDER BY t.deploy_count DESC, t.view_count DESC"), + "rating" => base.push_str(" ORDER BY (SELECT AVG(rate) FROM rating WHERE rating.product_id = t.product_id) DESC NULLS LAST"), + _ => base.push_str(" ORDER BY t.approved_at DESC NULLS LAST, t.created_at DESC"), } let query_span = tracing::info_span!("marketplace_list_approved"); From 1e119457a97e7e6d39911726b3f673abe76056cb Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 2 Jan 2026 16:38:27 +0200 Subject: [PATCH 30/35] categories endpoint --- ...bcfe5f968b31500e8c8cf97fe16814bc04164.json | 20 ----- ...766573c91b2775a086c65bc9a5fdc91300bb0.json | 17 ----- ...36247a328db780a48da47c9402e1d3ebd80c9.json | 12 --- ...44610fb79a1b9330730c65953f0c1b88c2a53.json | 20 ----- ...e78f2a23eff67925322bdd3646d063d710584.json | 62 --------------- ...806b4c78b7aa2a9609c4eccb941c7dff7b107.json | 12 --- ...7cb75a999041a3eb6a8f8177bebfa3c30d56f.json | 16 ---- ...b89853785c32a5f83cb0b25609329c760428a.json | 19 ----- ...faae78671d69c8935d2a2d57c0f9d1e91e832.json | 75 ------------------- ...226ba97993ede9988a4c57d58bd066500a119.json | 20 ----- ...21e00c42a3fad8082cf15c2af88cd8388f41b.json | 18 ----- ...b37d46c5a2f4202e1b8dce1f66a65069beb0b.json | 15 ---- src/routes/marketplace/mod.rs | 2 + src/startup.rs | 1 + 14 files changed, 3 insertions(+), 306 deletions(-) delete mode 100644 .sqlx/query-1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164.json delete mode 100644 .sqlx/query-1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0.json delete mode 100644 .sqlx/query-24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9.json delete mode 100644 .sqlx/query-2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53.json delete mode 100644 .sqlx/query-3ae7e28de7cb8896086c186dbc0e78f2a23eff67925322bdd3646d063d710584.json delete mode 100644 .sqlx/query-438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107.json delete mode 100644 .sqlx/query-4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f.json delete mode 100644 .sqlx/query-4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a.json delete mode 100644 .sqlx/query-530d3f59ba6d986d3354242ff25faae78671d69c8935d2a2d57c0f9d1e91e832.json delete mode 100644 .sqlx/query-f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119.json delete mode 100644 .sqlx/query-f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b.json delete mode 100644 .sqlx/query-fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b.json diff --git a/.sqlx/query-1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164.json b/.sqlx/query-1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164.json deleted file mode 100644 index eb3a84f..0000000 --- a/.sqlx/query-1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO casbin_rule ( ptype, v0, v1, v2, v3, v4, v5 )\n VALUES ( $1, $2, $3, $4, $5, $6, $7 )", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "1cabd2f674da323da9e0da724d3bcfe5f968b31500e8c8cf97fe16814bc04164" -} diff --git a/.sqlx/query-1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0.json b/.sqlx/query-1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0.json deleted file mode 100644 index 1ea12e3..0000000 --- a/.sqlx/query-1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v3 is NULL OR v3 = COALESCE($2,v3)) AND\n (v4 is NULL OR v4 = COALESCE($3,v4)) AND\n (v5 is NULL OR v5 = COALESCE($4,v5))", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Varchar", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "1f299262f01a2c9d2ee94079a12766573c91b2775a086c65bc9a5fdc91300bb0" -} diff --git a/.sqlx/query-24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9.json b/.sqlx/query-24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9.json deleted file mode 100644 index 8046c5d..0000000 --- a/.sqlx/query-24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule", - "describe": { - "columns": [], - "parameters": { - "Left": [] - }, - "nullable": [] - }, - "hash": "24876462291b90324dfe3682e9f36247a328db780a48da47c9402e1d3ebd80c9" -} diff --git a/.sqlx/query-2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53.json b/.sqlx/query-2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53.json deleted file mode 100644 index e246e53..0000000 --- a/.sqlx/query-2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n v0 = $2 AND\n v1 = $3 AND\n v2 = $4 AND\n v3 = $5 AND\n v4 = $6 AND\n v5 = $7", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text" - ] - }, - "nullable": [] - }, - "hash": "2872b56bbc5bed96b1a303bf9cf44610fb79a1b9330730c65953f0c1b88c2a53" -} diff --git a/.sqlx/query-3ae7e28de7cb8896086c186dbc0e78f2a23eff67925322bdd3646d063d710584.json b/.sqlx/query-3ae7e28de7cb8896086c186dbc0e78f2a23eff67925322bdd3646d063d710584.json deleted file mode 100644 index 6f82475..0000000 --- a/.sqlx/query-3ae7e28de7cb8896086c186dbc0e78f2a23eff67925322bdd3646d063d710584.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT id, ptype, v0, v1, v2, v3, v4, v5 FROM casbin_rule", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "ptype", - "type_info": "Varchar" - }, - { - "ordinal": 2, - "name": "v0", - "type_info": "Varchar" - }, - { - "ordinal": 3, - "name": "v1", - "type_info": "Varchar" - }, - { - "ordinal": 4, - "name": "v2", - "type_info": "Varchar" - }, - { - "ordinal": 5, - "name": "v3", - "type_info": "Varchar" - }, - { - "ordinal": 6, - "name": "v4", - "type_info": "Varchar" - }, - { - "ordinal": 7, - "name": "v5", - "type_info": "Varchar" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - false, - false, - false, - false, - false, - false, - false - ] - }, - "hash": "3ae7e28de7cb8896086c186dbc0e78f2a23eff67925322bdd3646d063d710584" -} diff --git a/.sqlx/query-438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107.json b/.sqlx/query-438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107.json deleted file mode 100644 index 75c6da3..0000000 --- a/.sqlx/query-438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "CREATE TABLE IF NOT EXISTS casbin_rule (\n id SERIAL PRIMARY KEY,\n ptype VARCHAR NOT NULL,\n v0 VARCHAR NOT NULL,\n v1 VARCHAR NOT NULL,\n v2 VARCHAR NOT NULL,\n v3 VARCHAR NOT NULL,\n v4 VARCHAR NOT NULL,\n v5 VARCHAR NOT NULL,\n CONSTRAINT unique_key_sqlx_adapter UNIQUE(ptype, v0, v1, v2, v3, v4, v5)\n );\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [] - }, - "nullable": [] - }, - "hash": "438ee38e669be96e562d09d3bc5806b4c78b7aa2a9609c4eccb941c7dff7b107" -} diff --git a/.sqlx/query-4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f.json b/.sqlx/query-4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f.json deleted file mode 100644 index ce229dc..0000000 --- a/.sqlx/query-4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v4 is NULL OR v4 = COALESCE($2,v4)) AND\n (v5 is NULL OR v5 = COALESCE($3,v5))", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "4acfe0086a593b08177791bb3b47cb75a999041a3eb6a8f8177bebfa3c30d56f" -} diff --git a/.sqlx/query-4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a.json b/.sqlx/query-4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a.json deleted file mode 100644 index 4c4c1df..0000000 --- a/.sqlx/query-4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v1 is NULL OR v1 = COALESCE($2,v1)) AND\n (v2 is NULL OR v2 = COALESCE($3,v2)) AND\n (v3 is NULL OR v3 = COALESCE($4,v3)) AND\n (v4 is NULL OR v4 = COALESCE($5,v4)) AND\n (v5 is NULL OR v5 = COALESCE($6,v5))", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "4e7b82d256f7298564f46af6a45b89853785c32a5f83cb0b25609329c760428a" -} diff --git a/.sqlx/query-530d3f59ba6d986d3354242ff25faae78671d69c8935d2a2d57c0f9d1e91e832.json b/.sqlx/query-530d3f59ba6d986d3354242ff25faae78671d69c8935d2a2d57c0f9d1e91e832.json deleted file mode 100644 index d0df28a..0000000 --- a/.sqlx/query-530d3f59ba6d986d3354242ff25faae78671d69c8935d2a2d57c0f9d1e91e832.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT id, ptype, v0, v1, v2, v3, v4, v5 from casbin_rule WHERE (\n ptype LIKE 'g%' AND v0 LIKE $1 AND v1 LIKE $2 AND v2 LIKE $3 AND v3 LIKE $4 AND v4 LIKE $5 AND v5 LIKE $6 )\n OR (\n ptype LIKE 'p%' AND v0 LIKE $7 AND v1 LIKE $8 AND v2 LIKE $9 AND v3 LIKE $10 AND v4 LIKE $11 AND v5 LIKE $12 );\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "ptype", - "type_info": "Varchar" - }, - { - "ordinal": 2, - "name": "v0", - "type_info": "Varchar" - }, - { - "ordinal": 3, - "name": "v1", - "type_info": "Varchar" - }, - { - "ordinal": 4, - "name": "v2", - "type_info": "Varchar" - }, - { - "ordinal": 5, - "name": "v3", - "type_info": "Varchar" - }, - { - "ordinal": 6, - "name": "v4", - "type_info": "Varchar" - }, - { - "ordinal": 7, - "name": "v5", - "type_info": "Varchar" - } - ], - "parameters": { - "Left": [ - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - false, - false, - false - ] - }, - "hash": "530d3f59ba6d986d3354242ff25faae78671d69c8935d2a2d57c0f9d1e91e832" -} diff --git a/.sqlx/query-f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119.json b/.sqlx/query-f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119.json deleted file mode 100644 index ef54cdb..0000000 --- a/.sqlx/query-f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v0 is NULL OR v0 = COALESCE($2,v0)) AND\n (v1 is NULL OR v1 = COALESCE($3,v1)) AND\n (v2 is NULL OR v2 = COALESCE($4,v2)) AND\n (v3 is NULL OR v3 = COALESCE($5,v3)) AND\n (v4 is NULL OR v4 = COALESCE($6,v4)) AND\n (v5 is NULL OR v5 = COALESCE($7,v5))", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "f130c22d14ee2a99b9220ac1a45226ba97993ede9988a4c57d58bd066500a119" -} diff --git a/.sqlx/query-f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b.json b/.sqlx/query-f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b.json deleted file mode 100644 index 0daaa8a..0000000 --- a/.sqlx/query-f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v2 is NULL OR v2 = COALESCE($2,v2)) AND\n (v3 is NULL OR v3 = COALESCE($3,v3)) AND\n (v4 is NULL OR v4 = COALESCE($4,v4)) AND\n (v5 is NULL OR v5 = COALESCE($5,v5))", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Varchar", - "Varchar", - "Varchar", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "f8611a862ed1d3b982e8aa5ccab21e00c42a3fad8082cf15c2af88cd8388f41b" -} diff --git a/.sqlx/query-fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b.json b/.sqlx/query-fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b.json deleted file mode 100644 index 4a5f7e8..0000000 --- a/.sqlx/query-fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM casbin_rule WHERE\n ptype = $1 AND\n (v5 is NULL OR v5 = COALESCE($2,v5))", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Varchar" - ] - }, - "nullable": [] - }, - "hash": "fa51ae7af271fc17c848694fbf1b37d46c5a2f4202e1b8dce1f66a65069beb0b" -} diff --git a/src/routes/marketplace/mod.rs b/src/routes/marketplace/mod.rs index 4201f40..1dd055a 100644 --- a/src/routes/marketplace/mod.rs +++ b/src/routes/marketplace/mod.rs @@ -1,7 +1,9 @@ pub mod public; pub mod creator; pub mod admin; +pub mod categories; pub use public::*; pub use creator::*; pub use admin::*; +pub use categories::*; diff --git a/src/startup.rs b/src/startup.rs index 1cbf6fb..2190978 100644 --- a/src/startup.rs +++ b/src/startup.rs @@ -111,6 +111,7 @@ pub async fn run( ) .service( web::scope("/api") + .service(crate::routes::marketplace::categories::list_handler) .service( web::scope("/templates") .service(crate::routes::marketplace::public::list_handler) From 72c6cb774e2bbfb818876a418ada255675ad3675 Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 2 Jan 2026 17:26:27 +0200 Subject: [PATCH 31/35] categories endpoint --- src/routes/marketplace/categories.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 src/routes/marketplace/categories.rs diff --git a/src/routes/marketplace/categories.rs b/src/routes/marketplace/categories.rs new file mode 100644 index 0000000..6aac5df --- /dev/null +++ b/src/routes/marketplace/categories.rs @@ -0,0 +1,16 @@ +use crate::db; +use crate::helpers::JsonResponse; +use crate::models; +use actix_web::{get, web, Responder, Result}; +use sqlx::PgPool; + +#[tracing::instrument(name = "List categories")] +#[get("/categories")] +pub async fn list_handler( + pg_pool: web::Data, +) -> Result { + db::marketplace::get_categories(pg_pool.get_ref()) + .await + .map_err(|err| JsonResponse::>::build().internal_server_error(err)) + .map(|categories| JsonResponse::build().set_list(categories).ok("OK")) +} From 9c8eb4a16be984b071b58276157b2da13cfc833b Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 2 Jan 2026 20:57:12 +0200 Subject: [PATCH 32/35] marketplace, categories import from connectors --- .github/workflows/docker.yml | 2 ++ src/db/marketplace.rs | 54 ++++++++++++++++++++++++++++++------ 2 files changed, 47 insertions(+), 9 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 2942628..6a4a8c7 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -5,9 +5,11 @@ on: branches: - main - testing + - dev pull_request: branches: - main + - dev jobs: diff --git a/src/db/marketplace.rs b/src/db/marketplace.rs index 8a3b2a8..19b0b7a 100644 --- a/src/db/marketplace.rs +++ b/src/db/marketplace.rs @@ -493,9 +493,11 @@ pub async fn sync_categories( } let mut synced_count = 0; + let mut error_count = 0; for category in categories { // Use INSERT ... ON CONFLICT DO UPDATE to upsert + // Handle conflicts on both id and name (both have unique constraints) let result = sqlx::query( r#" INSERT INTO stack_category (id, name, title, metadata) @@ -511,18 +513,52 @@ pub async fn sync_categories( .bind(&category.title) .bind(serde_json::json!({"priority": category.priority})) .execute(pool) - .await - .map_err(|e| { - tracing::error!("Failed to sync category {}: {:?}", category.name, e); - format!("Failed to sync category: {}", e) - })?; - - if result.rows_affected() > 0 { - synced_count += 1; + .await; + + // If conflict on id fails, try conflict on name + let result = match result { + Ok(r) => Ok(r), + Err(e) if e.to_string().contains("stack_category_name_key") => { + sqlx::query( + r#" + INSERT INTO stack_category (id, name, title, metadata) + VALUES ($1, $2, $3, $4) + ON CONFLICT (name) DO UPDATE + SET id = EXCLUDED.id, + title = EXCLUDED.title, + metadata = EXCLUDED.metadata + "# + ) + .bind(category.id) + .bind(&category.name) + .bind(&category.title) + .bind(serde_json::json!({"priority": category.priority})) + .execute(pool) + .await + } + Err(e) => Err(e), + }; + + match result { + Ok(res) if res.rows_affected() > 0 => { + synced_count += 1; + } + Ok(_) => { + tracing::debug!("Category {} already up to date", category.name); + } + Err(e) => { + tracing::error!("Failed to sync category {}: {:?}", category.name, e); + error_count += 1; + } } } - tracing::info!("Synced {} categories from User Service", synced_count); + if error_count > 0 { + tracing::warn!("Synced {} categories with {} errors", synced_count, error_count); + } else { + tracing::info!("Synced {} categories from User Service", synced_count); + } + Ok(synced_count) } From 705167d057702541d566ca70dcf25a11c0a3d0a2 Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 2 Jan 2026 21:09:10 +0200 Subject: [PATCH 33/35] add connector example based on 3-d party auth service --- .github/workflows/docker.yml | 3 +- src/connectors/README.md | 5 +- src/connectors/config.rs | 96 ++ src/connectors/errors.rs | 79 ++ src/connectors/mod.rs | 55 + src/connectors/user_service/category_sync.rs | 95 ++ .../user_service/deployment_validator.rs | 234 +++++ .../user_service/marketplace_webhook.rs | 356 +++++++ src/connectors/user_service/mod.rs | 945 ++++++++++++++++++ 9 files changed, 1864 insertions(+), 4 deletions(-) create mode 100644 src/connectors/config.rs create mode 100644 src/connectors/errors.rs create mode 100644 src/connectors/mod.rs create mode 100644 src/connectors/user_service/category_sync.rs create mode 100644 src/connectors/user_service/deployment_validator.rs create mode 100644 src/connectors/user_service/marketplace_webhook.rs create mode 100644 src/connectors/user_service/mod.rs diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 6a4a8c7..b0fc4b0 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -15,7 +15,8 @@ jobs: cicd-docker: name: Cargo and npm build - runs-on: ubuntu-latest + #runs-on: ubuntu-latest + runs-on: self-hosted env: SQLX_OFFLINE: true steps: diff --git a/src/connectors/README.md b/src/connectors/README.md index c7f0f01..422832d 100644 --- a/src/connectors/README.md +++ b/src/connectors/README.md @@ -1,6 +1,7 @@ # External Service Connectors -This directory contains adapters for all external service integrations. **All communication with external services MUST go through connectors** - this is a core architectural rule for Stacker. +This directory contains adapters for all external service integrations for your project. + **All communication with external services MUST go through connectors** - this is a core architectural rule for Stacker. ## Why Connectors? @@ -526,7 +527,5 @@ req.send() ## Further Reading -- [User Service API Documentation](../../docs/USER_SERVICE_API.md) -- [Payment Service Documentation](../../docs/PAYMENT_SERVICE.md) - [Error Handling Patterns](../helpers/README.md) - [Testing Guide](../../tests/README.md) diff --git a/src/connectors/config.rs b/src/connectors/config.rs new file mode 100644 index 0000000..474bf4f --- /dev/null +++ b/src/connectors/config.rs @@ -0,0 +1,96 @@ +use serde::{Deserialize, Serialize}; + +/// Configuration for external service connectors +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ConnectorConfig { + pub user_service: Option, + pub payment_service: Option, + pub events: Option, +} + +/// User Service connector configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UserServiceConfig { + /// Enable/disable User Service integration + pub enabled: bool, + /// Base URL for User Service API (e.g., http://localhost:4100/server/user) + pub base_url: String, + /// HTTP request timeout in seconds + pub timeout_secs: u64, + /// Number of retry attempts for failed requests + pub retry_attempts: usize, + /// OAuth token for inter-service authentication (from env: USER_SERVICE_AUTH_TOKEN) + #[serde(skip)] + pub auth_token: Option, +} + +impl Default for UserServiceConfig { + fn default() -> Self { + Self { + enabled: false, + base_url: "http://localhost:4100/server/user".to_string(), + timeout_secs: 10, + retry_attempts: 3, + auth_token: None, + } + } +} + +/// Payment Service connector configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PaymentServiceConfig { + /// Enable/disable Payment Service integration + pub enabled: bool, + /// Base URL for Payment Service API (e.g., http://localhost:8000) + pub base_url: String, + /// HTTP request timeout in seconds + pub timeout_secs: u64, + /// Bearer token for authentication + #[serde(skip)] + pub auth_token: Option, +} + +impl Default for PaymentServiceConfig { + fn default() -> Self { + Self { + enabled: false, + base_url: "http://localhost:8000".to_string(), + timeout_secs: 15, + auth_token: None, + } + } +} + +/// RabbitMQ Events configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EventsConfig { + /// Enable/disable async event publishing + pub enabled: bool, + /// AMQP connection string (amqp://user:password@host:port/%2f) + pub amqp_url: String, + /// Event exchange name + pub exchange: String, + /// Prefetch count for consumer + pub prefetch: u16, +} + +impl Default for EventsConfig { + fn default() -> Self { + Self { + enabled: false, + amqp_url: "amqp://guest:guest@localhost:5672/%2f".to_string(), + exchange: "stacker_events".to_string(), + prefetch: 10, + } + } +} + +impl Default for ConnectorConfig { + fn default() -> Self { + Self { + user_service: Some(UserServiceConfig::default()), + payment_service: Some(PaymentServiceConfig::default()), + events: Some(EventsConfig::default()), + } + } +} diff --git a/src/connectors/errors.rs b/src/connectors/errors.rs new file mode 100644 index 0000000..dee4bc8 --- /dev/null +++ b/src/connectors/errors.rs @@ -0,0 +1,79 @@ +use actix_web::{error::ResponseError, http::StatusCode, HttpResponse}; +use serde_json::json; +use std::fmt; + +/// Errors that can occur during external service communication +#[derive(Debug)] +pub enum ConnectorError { + /// HTTP request/response error + HttpError(String), + /// Service unreachable or timeout + ServiceUnavailable(String), + /// Invalid response format from external service + InvalidResponse(String), + /// Authentication error (401/403) + Unauthorized(String), + /// Not found (404) + NotFound(String), + /// Rate limited or exceeded quota + RateLimited(String), + /// Internal error in connector + Internal(String), +} + +impl fmt::Display for ConnectorError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::HttpError(msg) => write!(f, "HTTP error: {}", msg), + Self::ServiceUnavailable(msg) => write!(f, "Service unavailable: {}", msg), + Self::InvalidResponse(msg) => write!(f, "Invalid response: {}", msg), + Self::Unauthorized(msg) => write!(f, "Unauthorized: {}", msg), + Self::NotFound(msg) => write!(f, "Not found: {}", msg), + Self::RateLimited(msg) => write!(f, "Rate limited: {}", msg), + Self::Internal(msg) => write!(f, "Internal error: {}", msg), + } + } +} + +impl ResponseError for ConnectorError { + fn error_response(&self) -> HttpResponse { + let (status, message) = match self { + Self::HttpError(_) => (StatusCode::BAD_GATEWAY, "External service error"), + Self::ServiceUnavailable(_) => (StatusCode::SERVICE_UNAVAILABLE, "Service unavailable"), + Self::InvalidResponse(_) => (StatusCode::BAD_GATEWAY, "Invalid external service response"), + Self::Unauthorized(_) => (StatusCode::UNAUTHORIZED, "Unauthorized"), + Self::NotFound(_) => (StatusCode::NOT_FOUND, "Resource not found"), + Self::RateLimited(_) => (StatusCode::TOO_MANY_REQUESTS, "Rate limit exceeded"), + Self::Internal(_) => (StatusCode::INTERNAL_SERVER_ERROR, "Internal error"), + }; + + HttpResponse::build(status).json(json!({ + "error": message, + "details": self.to_string(), + })) + } + + fn status_code(&self) -> StatusCode { + match self { + Self::HttpError(_) => StatusCode::BAD_GATEWAY, + Self::ServiceUnavailable(_) => StatusCode::SERVICE_UNAVAILABLE, + Self::InvalidResponse(_) => StatusCode::BAD_GATEWAY, + Self::Unauthorized(_) => StatusCode::UNAUTHORIZED, + Self::NotFound(_) => StatusCode::NOT_FOUND, + Self::RateLimited(_) => StatusCode::TOO_MANY_REQUESTS, + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} + +impl From for ConnectorError { + fn from(err: reqwest::Error) -> Self { + if err.is_timeout() { + Self::ServiceUnavailable(format!("Request timeout: {}", err)) + } else if err.is_connect() { + Self::ServiceUnavailable(format!("Connection failed: {}", err)) + } else { + Self::HttpError(err.to_string()) + } + } +} diff --git a/src/connectors/mod.rs b/src/connectors/mod.rs new file mode 100644 index 0000000..a3c9673 --- /dev/null +++ b/src/connectors/mod.rs @@ -0,0 +1,55 @@ +//! External Service Connectors +//! +//! This module provides adapters for communicating with external services (User Service, Payment Service, etc.). +//! All external integrations must go through connectors to keep Stacker independent and testable. +//! +//! ## Architecture Pattern +//! +//! 1. Define trait in `{service}.rs` → allows mocking in tests +//! 2. Implement HTTP client in same file +//! 3. Configuration in `config.rs` → enable/disable per environment +//! 4. Inject trait object into routes → routes never depend on HTTP implementation +//! +//! ## Usage in Routes +//! +//! ```ignore +//! // In route handler +//! pub async fn deploy_template( +//! connector: web::Data>, +//! ) -> Result { +//! // Routes use trait methods, never care about HTTP details +//! connector.create_stack_from_template(...).await?; +//! } +//! ``` +//! +//! ## Testing +//! +//! ```ignore +//! #[cfg(test)] +//! mod tests { +//! use super::*; +//! use connectors::user_service::mock::MockUserServiceConnector; +//! +//! #[tokio::test] +//! async fn test_deploy_without_http() { +//! let connector = Arc::new(MockUserServiceConnector); +//! // Test route logic without external API calls +//! } +//! } +//! ``` + +pub mod config; +pub mod errors; +pub mod user_service; + +pub use config::{ConnectorConfig, UserServiceConfig, PaymentServiceConfig, EventsConfig}; +pub use errors::ConnectorError; +pub use user_service::{ + UserServiceConnector, UserServiceClient, StackResponse, UserProfile, UserProduct, ProductInfo, + UserPlanInfo, PlanDefinition, CategoryInfo, + DeploymentValidator, DeploymentValidationError, + MarketplaceWebhookSender, WebhookSenderConfig, MarketplaceWebhookPayload, WebhookResponse, +}; + +// Re-export init functions for convenient access +pub use user_service::init as init_user_service; diff --git a/src/connectors/user_service/category_sync.rs b/src/connectors/user_service/category_sync.rs new file mode 100644 index 0000000..f1540a4 --- /dev/null +++ b/src/connectors/user_service/category_sync.rs @@ -0,0 +1,95 @@ +/// Category synchronization from User Service to local Stacker mirror +/// +/// Implements automatic category sync on startup to keep local category table +/// in sync with User Service as the source of truth. + +use sqlx::PgPool; +use std::sync::Arc; +use tracing::Instrument; + +use super::{CategoryInfo, UserServiceConnector}; +use crate::connectors::ConnectorError; + +/// Sync categories from User Service to local database +/// +/// Fetches categories from User Service and upserts them into local stack_category table. +/// This maintains a local mirror for fast lookups and offline capability. +/// +/// # Arguments +/// * `connector` - User Service connector to fetch categories from +/// * `pool` - Database connection pool for local upsert +/// +/// # Returns +/// Number of categories synced, or error if sync fails +pub async fn sync_categories_from_user_service( + connector: Arc, + pool: &PgPool, +) -> Result { + let span = tracing::info_span!("sync_categories_from_user_service"); + + // Fetch categories from User Service + let categories = connector + .get_categories() + .instrument(span.clone()) + .await + .map_err(|e| format!("Failed to fetch categories from User Service: {:?}", e))?; + + tracing::info!("Fetched {} categories from User Service", categories.len()); + + if categories.is_empty() { + tracing::warn!("No categories returned from User Service"); + return Ok(0); + } + + // Upsert categories to local database + let synced_count = upsert_categories(pool, categories) + .instrument(span) + .await?; + + tracing::info!( + "Successfully synced {} categories from User Service to local mirror", + synced_count + ); + + Ok(synced_count) +} + +/// Upsert categories into local database +async fn upsert_categories(pool: &PgPool, categories: Vec) -> Result { + let mut synced_count = 0; + + for category in categories { + // Use INSERT ... ON CONFLICT DO UPDATE to upsert + let result = sqlx::query( + r#" + INSERT INTO stack_category (id, name, title, metadata) + VALUES ($1, $2, $3, $4) + ON CONFLICT (id) DO UPDATE + SET name = EXCLUDED.name, + title = EXCLUDED.title, + metadata = EXCLUDED.metadata + "#, + ) + .bind(category.id) + .bind(&category.name) + .bind(&category.title) + .bind(serde_json::json!({"priority": category.priority})) + .execute(pool) + .await + .map_err(|e| { + tracing::error!("Failed to upsert category {}: {:?}", category.name, e); + format!("Failed to upsert category: {}", e) + })?; + + if result.rows_affected() > 0 { + synced_count += 1; + tracing::debug!( + "Synced category: {} ({})", + category.name, + category.title + ); + } + } + + Ok(synced_count) +} diff --git a/src/connectors/user_service/deployment_validator.rs b/src/connectors/user_service/deployment_validator.rs new file mode 100644 index 0000000..5f4b618 --- /dev/null +++ b/src/connectors/user_service/deployment_validator.rs @@ -0,0 +1,234 @@ +/// Deployment validator for marketplace template ownership +/// +/// Validates that users can deploy marketplace templates they own. +/// Implements plan gating (if template requires specific plan tier) and +/// product ownership checks (if template is a paid marketplace product). + +use std::sync::Arc; +use tracing::Instrument; + +use crate::connectors::{ConnectorError, UserServiceConnector}; +use crate::models; + +/// Custom error types for deployment validation +#[derive(Debug, Clone)] +pub enum DeploymentValidationError { + /// User's plan is insufficient for this template + InsufficientPlan { + required_plan: String, + user_plan: String, + }, + + /// User has not purchased this marketplace template + TemplateNotPurchased { + template_id: String, + product_price: Option, + }, + + /// Template not found in User Service + TemplateNotFound { + template_id: String, + }, + + /// Failed to validate with User Service (unavailable, auth error, etc.) + ValidationFailed { + reason: String, + }, +} + +impl std::fmt::Display for DeploymentValidationError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::InsufficientPlan { + required_plan, + user_plan, + } => write!( + f, + "You require a '{}' subscription to deploy this template (you have '{}')", + required_plan, user_plan + ), + Self::TemplateNotPurchased { + template_id, + product_price, + } => { + if let Some(price) = product_price { + write!( + f, + "This verified pro stack requires purchase (${:.2}). Please purchase from marketplace.", + price + ) + } else { + write!( + f, + "You must purchase this template to deploy it. Template ID: {}", + template_id + ) + } + } + Self::TemplateNotFound { template_id } => { + write!(f, "Template {} not found in marketplace", template_id) + } + Self::ValidationFailed { reason } => { + write!(f, "Failed to validate deployment: {}", reason) + } + } + } +} + +/// Validator for marketplace template deployments +pub struct DeploymentValidator { + user_service_connector: Arc, +} + +impl DeploymentValidator { + /// Create new deployment validator + pub fn new(user_service_connector: Arc) -> Self { + Self { + user_service_connector, + } + } + + /// Validate that user can deploy a marketplace template + /// + /// Checks: + /// 1. If template requires a plan tier, verify user has it + /// 2. If template is a paid marketplace product, verify user owns it + /// + /// # Arguments + /// * `template` - The stack template being deployed + /// * `user_token` - User's OAuth token for User Service queries + /// + /// # Returns + /// Ok(()) if validation passes, Err(DeploymentValidationError) otherwise + pub async fn validate_template_deployment( + &self, + template: &models::marketplace::StackTemplate, + user_token: &str, + ) -> Result<(), DeploymentValidationError> { + let span = tracing::info_span!( + "validate_template_deployment", + template_id = %template.id + ); + + // Check plan requirement first (if specified) + if let Some(required_plan) = &template.required_plan_name { + self.validate_plan_access(user_token, required_plan) + .instrument(span.clone()) + .await?; + } + + // Check marketplace template purchase (if it's a marketplace template with a product) + if template.product_id.is_some() { + self.validate_template_ownership(user_token, &template.id.to_string()) + .instrument(span) + .await?; + } + + tracing::info!("Template deployment validation successful"); + Ok(()) + } + + /// Validate user has required plan tier + async fn validate_plan_access( + &self, + user_token: &str, + required_plan: &str, + ) -> Result<(), DeploymentValidationError> { + let span = tracing::info_span!( + "validate_plan_access", + required_plan = required_plan + ); + + // Extract user ID from token (or use token directly for User Service query) + // For now, we'll rely on User Service to validate the token + let has_plan = self + .user_service_connector + .user_has_plan(user_token, required_plan) + .instrument(span.clone()) + .await + .map_err(|e| DeploymentValidationError::ValidationFailed { + reason: format!("Failed to check plan access: {}", e), + })?; + + if !has_plan { + // Get user's actual plan for error message + let user_plan = self + .user_service_connector + .get_user_plan(user_token) + .instrument(span) + .await + .map(|info| info.plan_name) + .unwrap_or_else(|_| "unknown".to_string()); + + return Err(DeploymentValidationError::InsufficientPlan { + required_plan: required_plan.to_string(), + user_plan, + }); + } + + Ok(()) + } + + /// Validate user owns a marketplace template product + async fn validate_template_ownership( + &self, + user_token: &str, + stack_template_id: &str, + ) -> Result<(), DeploymentValidationError> { + let span = tracing::info_span!( + "validate_template_ownership", + template_id = stack_template_id + ); + + // First check if template even has a product + // Note: We need template ID as i32 for User Service query + // For now, we'll just check ownership directly + let owns_template = self + .user_service_connector + .user_owns_template(user_token, stack_template_id) + .instrument(span.clone()) + .await + .map_err(|e| DeploymentValidationError::ValidationFailed { + reason: format!("Failed to check template ownership: {}", e), + })?; + + if !owns_template { + // If user doesn't own, they may need to purchase + // In a real scenario, we'd fetch price from User Service + return Err(DeploymentValidationError::TemplateNotPurchased { + template_id: stack_template_id.to_string(), + product_price: None, + }); + } + + tracing::info!("User owns template, allowing deployment"); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_validation_error_display() { + let err = DeploymentValidationError::InsufficientPlan { + required_plan: "professional".to_string(), + user_plan: "basic".to_string(), + }; + let msg = err.to_string(); + assert!(msg.contains("professional")); + assert!(msg.contains("basic")); + } + + #[test] + fn test_template_not_purchased_error() { + let err = DeploymentValidationError::TemplateNotPurchased { + template_id: "template-123".to_string(), + product_price: Some(99.99), + }; + let msg = err.to_string(); + assert!(msg.contains("99.99")); + assert!(msg.contains("purchase")); + } +} diff --git a/src/connectors/user_service/marketplace_webhook.rs b/src/connectors/user_service/marketplace_webhook.rs new file mode 100644 index 0000000..4d269fe --- /dev/null +++ b/src/connectors/user_service/marketplace_webhook.rs @@ -0,0 +1,356 @@ +/// Marketplace webhook sender for User Service integration +/// +/// Sends webhooks to User Service when marketplace templates change status. +/// This implements Flow 3 from PAYMENT_MODEL.md: Creator publishes template → Product created in User Service +/// +/// **Architecture**: One-way webhooks from Stacker to User Service. +/// - No bi-directional queries on approval +/// - Bearer token authentication using STACKER_SERVICE_TOKEN +/// - Template approval does not block if webhook send fails (async/retry pattern) + +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use tokio::sync::Mutex; +use tracing::Instrument; + +use crate::connectors::ConnectorError; +use crate::models; + +/// Marketplace webhook payload sent to User Service +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MarketplaceWebhookPayload { + /// Action type: "template_approved", "template_updated", or "template_rejected" + pub action: String, + + /// Stacker template UUID (as string) + pub stack_template_id: String, + + /// External ID for User Service product (UUID as string or i32, same as stack_template_id) + pub external_id: String, + + /// Product code (slug-based identifier) + pub code: Option, + + /// Template name + pub name: Option, + + /// Template description + pub description: Option, + + /// Price in specified currency (if not free) + pub price: Option, + + /// Billing cycle: "one_time" or "monthly"/"yearly" + #[serde(skip_serializing_if = "Option::is_none")] + pub billing_cycle: Option, + + /// Currency code (USD, EUR, etc.) + #[serde(skip_serializing_if = "Option::is_none")] + pub currency: Option, + + /// Creator/vendor user ID from Stacker + pub vendor_user_id: Option, + + /// Vendor name or email + pub vendor_name: Option, + + /// Category of template + #[serde(skip_serializing_if = "Option::is_none")] + pub category: Option, + + /// Tags/keywords + #[serde(skip_serializing_if = "Option::is_none")] + pub tags: Option, +} + +/// Response from User Service webhook endpoint +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WebhookResponse { + pub success: bool, + pub message: Option, + pub product_id: Option, +} + +/// Configuration for webhook sender +#[derive(Debug, Clone)] +pub struct WebhookSenderConfig { + /// User Service base URL (e.g., "http://user:4100") + pub base_url: String, + + /// Bearer token for service-to-service authentication + pub bearer_token: String, + + /// HTTP client timeout in seconds + pub timeout_secs: u64, + + /// Number of retry attempts on failure + pub retry_attempts: usize, +} + +impl WebhookSenderConfig { + /// Create from environment variables + pub fn from_env() -> Result { + let base_url = std::env::var("URL_SERVER_USER") + .or_else(|_| std::env::var("USER_SERVICE_BASE_URL")) + .map_err(|_| "USER_SERVICE_BASE_URL not configured".to_string())?; + + let bearer_token = std::env::var("STACKER_SERVICE_TOKEN") + .map_err(|_| "STACKER_SERVICE_TOKEN not configured".to_string())?; + + Ok(Self { + base_url, + bearer_token, + timeout_secs: 10, + retry_attempts: 3, + }) + } +} + +/// Sends webhooks to User Service when marketplace templates change +pub struct MarketplaceWebhookSender { + config: WebhookSenderConfig, + http_client: reqwest::Client, + // Track webhook deliveries in-memory (simple approach) + pending_webhooks: Arc>>, +} + +impl MarketplaceWebhookSender { + /// Create new webhook sender with configuration + pub fn new(config: WebhookSenderConfig) -> Self { + let timeout = std::time::Duration::from_secs(config.timeout_secs); + let http_client = reqwest::Client::builder() + .timeout(timeout) + .build() + .expect("Failed to create HTTP client"); + + Self { + config, + http_client, + pending_webhooks: Arc::new(Mutex::new(Vec::new())), + } + } + + /// Create from environment variables + pub fn from_env() -> Result { + let config = WebhookSenderConfig::from_env()?; + Ok(Self::new(config)) + } + + /// Send template approved webhook to User Service + /// Creates/updates product in User Service marketplace + pub async fn send_template_approved( + &self, + template: &models::marketplace::StackTemplate, + vendor_id: &str, + category_code: Option, + ) -> Result { + let span = tracing::info_span!( + "send_template_approved_webhook", + template_id = %template.id, + vendor_id = vendor_id + ); + + let payload = MarketplaceWebhookPayload { + action: "template_approved".to_string(), + stack_template_id: template.id.to_string(), + external_id: template.id.to_string(), + code: Some(template.slug.clone()), + name: Some(template.name.clone()), + description: template.short_description.clone().or_else(|| template.long_description.clone()), + price: None, // Pricing not stored in Stacker (User Service responsibility) + billing_cycle: None, + currency: None, + vendor_user_id: Some(vendor_id.to_string()), + vendor_name: Some(vendor_id.to_string()), + category: category_code, + tags: if let serde_json::Value::Array(_) = template.tags { + Some(template.tags.clone()) + } else { + None + }, + }; + + self.send_webhook(&payload).instrument(span).await + } + + /// Send template updated webhook to User Service + /// Updates product metadata/details in User Service + pub async fn send_template_updated( + &self, + template: &models::marketplace::StackTemplate, + vendor_id: &str, + category_code: Option, + ) -> Result { + let span = tracing::info_span!( + "send_template_updated_webhook", + template_id = %template.id + ); + + let payload = MarketplaceWebhookPayload { + action: "template_updated".to_string(), + stack_template_id: template.id.to_string(), + external_id: template.id.to_string(), + code: Some(template.slug.clone()), + name: Some(template.name.clone()), + description: template.short_description.clone().or_else(|| template.long_description.clone()), + price: None, + billing_cycle: None, + currency: None, + vendor_user_id: Some(vendor_id.to_string()), + vendor_name: Some(vendor_id.to_string()), + category: category_code, + tags: if let serde_json::Value::Array(_) = template.tags { + Some(template.tags.clone()) + } else { + None + }, + }; + + self.send_webhook(&payload).instrument(span).await + } + + /// Send template rejected webhook to User Service + /// Deactivates product in User Service + pub async fn send_template_rejected( + &self, + stack_template_id: &str, + ) -> Result { + let span = tracing::info_span!("send_template_rejected_webhook", template_id = stack_template_id); + + let payload = MarketplaceWebhookPayload { + action: "template_rejected".to_string(), + stack_template_id: stack_template_id.to_string(), + external_id: stack_template_id.to_string(), + code: None, + name: None, + description: None, + price: None, + billing_cycle: None, + currency: None, + vendor_user_id: None, + vendor_name: None, + category: None, + tags: None, + }; + + self.send_webhook(&payload).instrument(span).await + } + + /// Internal method to send webhook with retries + async fn send_webhook(&self, payload: &MarketplaceWebhookPayload) -> Result { + let url = format!("{}/marketplace/sync", self.config.base_url); + + let mut attempt = 0; + loop { + attempt += 1; + + let req = self + .http_client + .post(&url) + .json(payload) + .header("Authorization", format!("Bearer {}", self.config.bearer_token)) + .header("Content-Type", "application/json"); + + match req.send().await { + Ok(resp) => match resp.status().as_u16() { + 200 | 201 => { + let text = resp.text().await.map_err(|e| ConnectorError::HttpError(e.to_string()))?; + return serde_json::from_str::(&text) + .map_err(|_| ConnectorError::InvalidResponse(text)); + } + 401 => { + return Err(ConnectorError::Unauthorized( + "Invalid service token for User Service webhook".to_string(), + )); + } + 404 => { + return Err(ConnectorError::NotFound("/marketplace/sync endpoint not found".to_string())); + } + 500..=599 => { + // Retry on server errors + if attempt < self.config.retry_attempts { + let backoff = std::time::Duration::from_millis(100 * 2_u64.pow((attempt - 1) as u32)); + tracing::warn!( + "User Service webhook failed with {}, retrying after {:?}", + resp.status(), + backoff + ); + tokio::time::sleep(backoff).await; + continue; + } + return Err(ConnectorError::ServiceUnavailable(format!( + "User Service returned {}: webhook send failed", + resp.status() + ))); + } + status => { + return Err(ConnectorError::HttpError(format!("Unexpected status code: {}", status))); + } + }, + Err(e) if e.is_timeout() => { + if attempt < self.config.retry_attempts { + let backoff = std::time::Duration::from_millis(100 * 2_u64.pow((attempt - 1) as u32)); + tracing::warn!("User Service webhook timeout, retrying after {:?}", backoff); + tokio::time::sleep(backoff).await; + continue; + } + return Err(ConnectorError::ServiceUnavailable("Webhook send timeout".to_string())); + } + Err(e) => { + return Err(ConnectorError::HttpError(format!("Webhook send failed: {}", e))); + } + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_webhook_payload_serialization() { + let payload = MarketplaceWebhookPayload { + action: "template_approved".to_string(), + stack_template_id: "550e8400-e29b-41d4-a716-446655440000".to_string(), + external_id: "550e8400-e29b-41d4-a716-446655440000".to_string(), + code: Some("ai-agent-stack-pro".to_string()), + name: Some("AI Agent Stack Pro".to_string()), + description: Some("Advanced AI agent template".to_string()), + price: Some(99.99), + billing_cycle: Some("one_time".to_string()), + currency: Some("USD".to_string()), + vendor_user_id: Some("user-456".to_string()), + vendor_name: Some("alice@example.com".to_string()), + category: Some("AI Agents".to_string()), + tags: Some(serde_json::json!(["ai", "agents"])), + }; + + let json = serde_json::to_string(&payload).expect("Failed to serialize"); + assert!(json.contains("template_approved")); + assert!(json.contains("ai-agent-stack-pro")); + } + + #[test] + fn test_webhook_payload_with_rejection() { + let payload = MarketplaceWebhookPayload { + action: "template_rejected".to_string(), + stack_template_id: "550e8400-e29b-41d4-a716-446655440000".to_string(), + external_id: "550e8400-e29b-41d4-a716-446655440000".to_string(), + code: None, + name: None, + description: None, + price: None, + billing_cycle: None, + currency: None, + vendor_user_id: None, + vendor_name: None, + category: None, + tags: None, + }; + + let json = serde_json::to_string(&payload).expect("Failed to serialize"); + assert!(json.contains("template_rejected")); + assert!(!json.contains("ai-agent")); + } +} diff --git a/src/connectors/user_service/mod.rs b/src/connectors/user_service/mod.rs new file mode 100644 index 0000000..070aa40 --- /dev/null +++ b/src/connectors/user_service/mod.rs @@ -0,0 +1,945 @@ +pub mod deployment_validator; +pub mod marketplace_webhook; +pub mod category_sync; + +pub use deployment_validator::{DeploymentValidator, DeploymentValidationError}; +pub use marketplace_webhook::{MarketplaceWebhookSender, WebhookSenderConfig, MarketplaceWebhookPayload, WebhookResponse}; +pub use category_sync::sync_categories_from_user_service; + +use super::config::UserServiceConfig; +use super::errors::ConnectorError; +use actix_web::web; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use tracing::Instrument; +use uuid::Uuid; + +/// Response from User Service when creating a stack from marketplace template +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct StackResponse { + pub id: i32, + pub user_id: String, + pub name: String, + pub marketplace_template_id: Option, + pub is_from_marketplace: bool, + pub template_version: Option, +} + +/// User's current plan information +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UserPlanInfo { + pub user_id: String, + pub plan_name: String, + pub plan_description: Option, + pub tier: Option, + pub active: bool, + pub started_at: Option, + pub expires_at: Option, +} + +/// Available plan definition +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PlanDefinition { + pub name: String, + pub description: Option, + pub tier: Option, + pub features: Option, +} + +/// Product owned by a user (from /oauth_server/api/me response) +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UserProduct { + pub id: Option, + pub name: String, + pub code: String, + pub product_type: String, + #[serde(default)] + pub external_id: Option, // Stack template ID from Stacker + #[serde(default)] + pub owned_since: Option, +} + +/// User profile with ownership information +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UserProfile { + pub email: String, + pub plan: Option, // Plan details from existing endpoint + #[serde(default)] + pub products: Vec, // List of owned products +} + +/// Product information from User Service catalog +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProductInfo { + pub id: String, + pub name: String, + pub code: String, + pub product_type: String, + pub external_id: Option, + pub price: Option, + pub billing_cycle: Option, + pub currency: Option, + pub vendor_id: Option, + pub is_active: bool, +} + +/// Category information from User Service +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CategoryInfo { + #[serde(rename = "_id")] + pub id: i32, + pub name: String, + pub title: String, + #[serde(default)] + pub priority: Option, +} + +/// Trait for User Service integration +/// Allows mocking in tests and swapping implementations +#[async_trait::async_trait] +pub trait UserServiceConnector: Send + Sync { + /// Create a new stack in User Service from a marketplace template + async fn create_stack_from_template( + &self, + marketplace_template_id: &Uuid, + user_id: &str, + template_version: &str, + name: &str, + stack_definition: serde_json::Value, + ) -> Result; + + /// Fetch stack details from User Service + async fn get_stack(&self, stack_id: i32, user_id: &str) -> Result; + + /// List user's stacks + async fn list_stacks(&self, user_id: &str) -> Result, ConnectorError>; + + /// Check if user has access to a specific plan + /// Returns true if user's current plan allows access to required_plan_name + async fn user_has_plan( + &self, + user_id: &str, + required_plan_name: &str, + ) -> Result; + + /// Get user's current plan information + async fn get_user_plan(&self, user_id: &str) -> Result; + + /// List all available plans that users can subscribe to + async fn list_available_plans(&self) -> Result, ConnectorError>; + + /// Get user profile with owned products list + /// Calls GET /oauth_server/api/me and returns profile with products array + async fn get_user_profile(&self, user_token: &str) -> Result; + + /// Get product information for a marketplace template + /// Calls GET /api/1.0/products?external_id={template_id}&product_type=template + async fn get_template_product( + &self, + stack_template_id: i32, + ) -> Result, ConnectorError>; + + /// Check if user owns a specific template product + /// Returns true if user has the template in their products list + async fn user_owns_template( + &self, + user_token: &str, + stack_template_id: &str, + ) -> Result; + + /// Get list of categories from User Service + /// Calls GET /api/1.0/category and returns available categories + async fn get_categories(&self) -> Result, ConnectorError>; +} + +/// HTTP-based User Service client +pub struct UserServiceClient { + base_url: String, + http_client: reqwest::Client, + auth_token: Option, + retry_attempts: usize, +} + +impl UserServiceClient { + /// Create new User Service client + pub fn new(config: UserServiceConfig) -> Self { + let timeout = std::time::Duration::from_secs(config.timeout_secs); + let http_client = reqwest::Client::builder() + .timeout(timeout) + .build() + .expect("Failed to create HTTP client"); + + Self { + base_url: config.base_url, + http_client, + auth_token: config.auth_token, + retry_attempts: config.retry_attempts, + } + } + + /// Build authorization header if token configured + fn auth_header(&self) -> Option { + self.auth_token + .as_ref() + .map(|token| format!("Bearer {}", token)) + } + + /// Retry helper with exponential backoff + async fn retry_request(&self, mut f: F) -> Result + where + F: FnMut() -> futures::future::BoxFuture<'static, Result>, + { + let mut attempt = 0; + loop { + match f().await { + Ok(result) => return Ok(result), + Err(err) => { + attempt += 1; + if attempt >= self.retry_attempts { + return Err(err); + } + // Exponential backoff: 100ms, 200ms, 400ms, etc. + let backoff = std::time::Duration::from_millis(100 * 2_u64.pow(attempt as u32)); + tokio::time::sleep(backoff).await; + } + } + } + } +} + +#[async_trait::async_trait] +impl UserServiceConnector for UserServiceClient { + async fn create_stack_from_template( + &self, + marketplace_template_id: &Uuid, + user_id: &str, + template_version: &str, + name: &str, + stack_definition: serde_json::Value, + ) -> Result { + let span = tracing::info_span!( + "user_service_create_stack", + template_id = %marketplace_template_id, + user_id = %user_id + ); + + let url = format!("{}/api/1.0/stacks", self.base_url); + let payload = serde_json::json!({ + "name": name, + "marketplace_template_id": marketplace_template_id.to_string(), + "is_from_marketplace": true, + "template_version": template_version, + "stack_definition": stack_definition, + "user_id": user_id, + }); + + let mut req = self.http_client.post(&url).json(&payload); + + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + + let resp = req.send() + .instrument(span) + .await + .and_then(|resp| resp.error_for_status()) + .map_err(|e| { + tracing::error!("create_stack error: {:?}", e); + ConnectorError::HttpError(format!("Failed to create stack: {}", e)) + })?; + + let text = resp.text().await.map_err(|e| ConnectorError::HttpError(e.to_string()))?; + serde_json::from_str::(&text) + .map_err(|_| ConnectorError::InvalidResponse(text)) + } + + async fn get_stack(&self, stack_id: i32, user_id: &str) -> Result { + let span = tracing::info_span!("user_service_get_stack", stack_id = stack_id, user_id = %user_id); + + let url = format!("{}/api/1.0/stacks/{}", self.base_url, stack_id); + let mut req = self.http_client.get(&url); + + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + + let resp = req.send() + .instrument(span) + .await + .map_err(|e| { + if e.status().map_or(false, |s| s == 404) { + ConnectorError::NotFound(format!("Stack {} not found", stack_id)) + } else { + ConnectorError::HttpError(format!("Failed to get stack: {}", e)) + } + })?; + + if resp.status() == 404 { + return Err(ConnectorError::NotFound(format!("Stack {} not found", stack_id))); + } + + let text = resp.text().await.map_err(|e| ConnectorError::HttpError(e.to_string()))?; + serde_json::from_str::(&text) + .map_err(|_| ConnectorError::InvalidResponse(text)) + } + + async fn list_stacks(&self, user_id: &str) -> Result, ConnectorError> { + let span = tracing::info_span!("user_service_list_stacks", user_id = %user_id); + + let url = format!( + "{}/api/1.0/stacks?where={{\"user_id\":\"{}\"}}", + self.base_url, user_id + ); + let mut req = self.http_client.get(&url); + + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + + #[derive(Deserialize)] + struct ListResponse { + _items: Vec, + } + + let resp = req.send() + .instrument(span) + .await + .and_then(|resp| resp.error_for_status()) + .map_err(|e| { + tracing::error!("list_stacks error: {:?}", e); + ConnectorError::HttpError(format!("Failed to list stacks: {}", e)) + })?; + + let text = resp.text().await.map_err(|e| ConnectorError::HttpError(e.to_string()))?; + serde_json::from_str::(&text) + .map(|r| r._items) + .map_err(|_| ConnectorError::InvalidResponse(text)) + } + + async fn user_has_plan( + &self, + user_id: &str, + required_plan_name: &str, + ) -> Result { + let span = tracing::info_span!( + "user_service_check_plan", + user_id = %user_id, + required_plan = %required_plan_name + ); + + // Get user's current plan via /oauth_server/api/me endpoint + let url = format!("{}/oauth_server/api/me", self.base_url); + let mut req = self.http_client.get(&url); + + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + + #[derive(serde::Deserialize)] + struct UserMeResponse { + #[serde(default)] + plan: Option, + } + + #[derive(serde::Deserialize)] + struct PlanInfo { + name: Option, + } + + let resp = req.send() + .instrument(span.clone()) + .await + .map_err(|e| { + tracing::error!("user_has_plan error: {:?}", e); + ConnectorError::HttpError(format!("Failed to check plan: {}", e)) + })?; + + match resp.status().as_u16() { + 200 => { + let text = resp.text().await.map_err(|e| ConnectorError::HttpError(e.to_string()))?; + serde_json::from_str::(&text) + .map(|response| { + let user_plan = response + .plan + .and_then(|p| p.name) + .unwrap_or_default(); + // Check if user's plan matches or is higher tier than required + if user_plan.is_empty() || required_plan_name.is_empty() { + return user_plan == required_plan_name; + } + user_plan == required_plan_name || is_plan_upgrade(&user_plan, required_plan_name) + }) + .map_err(|_| ConnectorError::InvalidResponse(text)) + } + 401 | 403 => { + tracing::debug!(parent: &span, "User not authenticated or authorized"); + Ok(false) + } + 404 => { + tracing::debug!(parent: &span, "User or plan not found"); + Ok(false) + } + _ => Err(ConnectorError::HttpError(format!( + "Unexpected status code: {}", + resp.status() + ))), + } + } + + async fn get_user_plan(&self, user_id: &str) -> Result { + let span = tracing::info_span!("user_service_get_plan", user_id = %user_id); + + // Use /oauth_server/api/me endpoint to get user's current plan via OAuth + let url = format!("{}/oauth_server/api/me", self.base_url); + let mut req = self.http_client.get(&url); + + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + + #[derive(serde::Deserialize)] + struct PlanInfoResponse { + #[serde(default)] + plan: Option, + #[serde(default)] + plan_name: Option, + #[serde(default)] + user_id: Option, + #[serde(default)] + description: Option, + #[serde(default)] + active: Option, + } + + let resp = req.send() + .instrument(span) + .await + .and_then(|resp| resp.error_for_status()) + .map_err(|e| { + tracing::error!("get_user_plan error: {:?}", e); + ConnectorError::HttpError(format!("Failed to get user plan: {}", e)) + })?; + + let text = resp.text().await.map_err(|e| ConnectorError::HttpError(e.to_string()))?; + serde_json::from_str::(&text) + .map(|info| UserPlanInfo { + user_id: info.user_id.unwrap_or_else(|| user_id.to_string()), + plan_name: info.plan.or(info.plan_name).unwrap_or_default(), + plan_description: info.description, + tier: None, + active: info.active.unwrap_or(true), + started_at: None, + expires_at: None, + }) + .map_err(|_| ConnectorError::InvalidResponse(text)) + } + + async fn list_available_plans(&self) -> Result, ConnectorError> { + let span = tracing::info_span!("user_service_list_plans"); + + // Query plan_description via Eve REST API (PostgREST endpoint) + let url = format!("{}/api/1.0/plan_description", self.base_url); + let mut req = self.http_client.get(&url); + + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + + #[derive(serde::Deserialize)] + struct EveResponse { + #[serde(default)] + _items: Vec, + } + + #[derive(serde::Deserialize)] + struct PlanItem { + name: String, + #[serde(default)] + description: Option, + #[serde(default)] + tier: Option, + #[serde(default)] + features: Option, + } + + let resp = req.send() + .instrument(span) + .await + .and_then(|resp| resp.error_for_status()) + .map_err(|e| { + tracing::error!("list_available_plans error: {:?}", e); + ConnectorError::HttpError(format!("Failed to list plans: {}", e)) + })?; + + let text = resp.text().await.map_err(|e| ConnectorError::HttpError(e.to_string()))?; + + // Try Eve format first, fallback to direct array + if let Ok(eve_resp) = serde_json::from_str::(&text) { + Ok(eve_resp._items) + } else { + serde_json::from_str::>(&text) + .map_err(|_| ConnectorError::InvalidResponse(text)) + } + } + + async fn get_user_profile(&self, user_token: &str) -> Result { + let span = tracing::info_span!("user_service_get_profile"); + + // Query /oauth_server/api/me with user's token + let url = format!("{}/oauth_server/api/me", self.base_url); + let req = self + .http_client + .get(&url) + .header("Authorization", format!("Bearer {}", user_token)); + + let resp = req + .send() + .instrument(span.clone()) + .await + .map_err(|e| { + tracing::error!("get_user_profile error: {:?}", e); + ConnectorError::HttpError(format!("Failed to get user profile: {}", e)) + })?; + + if resp.status() == 401 { + return Err(ConnectorError::Unauthorized( + "Invalid or expired user token".to_string(), + )); + } + + let text = resp.text().await.map_err(|e| ConnectorError::HttpError(e.to_string()))?; + serde_json::from_str::(&text) + .map_err(|e| { + tracing::error!("Failed to parse user profile: {:?}", e); + ConnectorError::InvalidResponse(text) + }) + } + + async fn get_template_product( + &self, + stack_template_id: i32, + ) -> Result, ConnectorError> { + let span = tracing::info_span!( + "user_service_get_template_product", + template_id = stack_template_id + ); + + // Query /api/1.0/products?external_id={template_id}&product_type=template + let url = format!( + "{}/api/1.0/products?where={{\"external_id\":{},\"product_type\":\"template\"}}", + self.base_url, stack_template_id + ); + + let mut req = self.http_client.get(&url); + + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + + #[derive(serde::Deserialize)] + struct ProductsResponse { + #[serde(default)] + _items: Vec, + } + + let resp = req + .send() + .instrument(span) + .await + .map_err(|e| { + tracing::error!("get_template_product error: {:?}", e); + ConnectorError::HttpError(format!("Failed to get template product: {}", e)) + })?; + + let text = resp.text().await.map_err(|e| ConnectorError::HttpError(e.to_string()))?; + + // Try Eve format first (with _items wrapper) + if let Ok(products_resp) = serde_json::from_str::(&text) { + Ok(products_resp._items.into_iter().next()) + } else { + // Try direct array format + serde_json::from_str::>(&text) + .map(|mut items| items.pop()) + .map_err(|_| ConnectorError::InvalidResponse(text)) + } + } + + async fn user_owns_template( + &self, + user_token: &str, + stack_template_id: &str, + ) -> Result { + let span = tracing::info_span!( + "user_service_check_template_ownership", + template_id = stack_template_id + ); + + // Get user profile (includes products list) + let profile = self.get_user_profile(user_token).instrument(span.clone()).await?; + + // Try to parse stack_template_id as i32 first (for backward compatibility with integer IDs) + let owns_template = if let Ok(template_id_int) = stack_template_id.parse::() { + profile + .products + .iter() + .any(|p| { + p.product_type == "template" && p.external_id == Some(template_id_int) + }) + } else { + // If not i32, try comparing as string (UUID or slug) + profile + .products + .iter() + .any(|p| { + if p.product_type != "template" { + return false; + } + // Compare with code (slug) + if p.code == stack_template_id { + return true; + } + // Compare with id if available + if let Some(id) = &p.id { + if id == stack_template_id { + return true; + } + } + false + }) + }; + + tracing::info!( + owned = owns_template, + "User template ownership check complete" + ); + + Ok(owns_template) + } + + async fn get_categories(&self) -> Result, ConnectorError> { + let span = tracing::info_span!("user_service_get_categories"); + let url = format!("{}/api/1.0/category", self.base_url); + + let mut attempt = 0; + loop { + attempt += 1; + + let mut req = self.http_client.get(&url); + + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + + match req.send().instrument(span.clone()).await { + Ok(resp) => match resp.status().as_u16() { + 200 => { + let text = resp + .text() + .await + .map_err(|e| ConnectorError::HttpError(e.to_string()))?; + + // User Service returns {_items: [...]} + #[derive(Deserialize)] + struct CategoriesResponse { + #[serde(rename = "_items")] + items: Vec, + } + + return serde_json::from_str::(&text) + .map(|resp| resp.items) + .map_err(|e| { + tracing::error!("Failed to parse categories response: {:?}", e); + ConnectorError::InvalidResponse(text) + }); + } + 404 => { + return Err(ConnectorError::NotFound( + "Category endpoint not found".to_string(), + )); + } + 500..=599 => { + if attempt < self.retry_attempts { + let backoff = std::time::Duration::from_millis( + 100 * 2_u64.pow((attempt - 1) as u32), + ); + tracing::warn!( + "User Service categories request failed with {}, retrying after {:?}", + resp.status(), + backoff + ); + tokio::time::sleep(backoff).await; + continue; + } + return Err(ConnectorError::ServiceUnavailable(format!( + "User Service returned {}: get categories failed", + resp.status() + ))); + } + status => { + return Err(ConnectorError::HttpError(format!( + "Unexpected status code: {}", + status + ))); + } + }, + Err(e) if e.is_timeout() => { + if attempt < self.retry_attempts { + let backoff = + std::time::Duration::from_millis(100 * 2_u64.pow((attempt - 1) as u32)); + tracing::warn!("User Service get categories timeout, retrying after {:?}", backoff); + tokio::time::sleep(backoff).await; + continue; + } + return Err(ConnectorError::ServiceUnavailable( + "Get categories timeout".to_string(), + )); + } + Err(e) => { + return Err(ConnectorError::HttpError(format!( + "Get categories request failed: {}", + e + ))); + } + } + } + } +} + +/// Mock connector for testing/development +pub mod mock { + use super::*; + + /// Mock User Service for testing - always succeeds + pub struct MockUserServiceConnector; + + #[async_trait::async_trait] + impl UserServiceConnector for MockUserServiceConnector { + async fn create_stack_from_template( + &self, + marketplace_template_id: &Uuid, + user_id: &str, + template_version: &str, + name: &str, + _stack_definition: serde_json::Value, + ) -> Result { + Ok(StackResponse { + id: 1, + user_id: user_id.to_string(), + name: name.to_string(), + marketplace_template_id: Some(*marketplace_template_id), + is_from_marketplace: true, + template_version: Some(template_version.to_string()), + }) + } + + async fn get_stack(&self, stack_id: i32, user_id: &str) -> Result { + Ok(StackResponse { + id: stack_id, + user_id: user_id.to_string(), + name: "Test Stack".to_string(), + marketplace_template_id: None, + is_from_marketplace: false, + template_version: None, + }) + } + + async fn list_stacks(&self, user_id: &str) -> Result, ConnectorError> { + Ok(vec![StackResponse { + id: 1, + user_id: user_id.to_string(), + name: "Test Stack".to_string(), + marketplace_template_id: None, + is_from_marketplace: false, + template_version: None, + }]) + } + + async fn user_has_plan( + &self, + _user_id: &str, + _required_plan_name: &str, + ) -> Result { + // Mock always grants access for testing + Ok(true) + } + + async fn get_user_plan(&self, user_id: &str) -> Result { + Ok(UserPlanInfo { + user_id: user_id.to_string(), + plan_name: "professional".to_string(), + plan_description: Some("Professional Plan".to_string()), + tier: Some("pro".to_string()), + active: true, + started_at: Some("2025-01-01T00:00:00Z".to_string()), + expires_at: None, + }) + } + + async fn list_available_plans(&self) -> Result, ConnectorError> { + Ok(vec![ + PlanDefinition { + name: "basic".to_string(), + description: Some("Basic Plan".to_string()), + tier: Some("basic".to_string()), + features: None, + }, + PlanDefinition { + name: "professional".to_string(), + description: Some("Professional Plan".to_string()), + tier: Some("pro".to_string()), + features: None, + }, + PlanDefinition { + name: "enterprise".to_string(), + description: Some("Enterprise Plan".to_string()), + tier: Some("enterprise".to_string()), + features: None, + }, + ]) + } + + async fn get_user_profile(&self, _user_token: &str) -> Result { + Ok(UserProfile { + email: "test@example.com".to_string(), + plan: Some(serde_json::json!({ + "name": "professional", + "date_end": "2026-12-31" + })), + products: vec![ + UserProduct { + id: Some("uuid-plan-pro".to_string()), + name: "Professional Plan".to_string(), + code: "professional".to_string(), + product_type: "plan".to_string(), + external_id: None, + owned_since: Some("2025-01-01T00:00:00Z".to_string()), + }, + UserProduct { + id: Some("uuid-template-ai".to_string()), + name: "AI Agent Stack Pro".to_string(), + code: "ai-agent-stack-pro".to_string(), + product_type: "template".to_string(), + external_id: Some(100), // Mock template ID + owned_since: Some("2025-01-15T00:00:00Z".to_string()), + }, + ], + }) + } + + async fn get_template_product( + &self, + stack_template_id: i32, + ) -> Result, ConnectorError> { + // Return mock product only if template_id is our test ID + if stack_template_id == 100 { + Ok(Some(ProductInfo { + id: "uuid-product-ai".to_string(), + name: "AI Agent Stack Pro".to_string(), + code: "ai-agent-stack-pro".to_string(), + product_type: "template".to_string(), + external_id: Some(100), + price: Some(99.99), + billing_cycle: Some("one_time".to_string()), + currency: Some("USD".to_string()), + vendor_id: Some(456), + is_active: true, + })) + } else { + Ok(None) // No product for other template IDs + } + } + + async fn user_owns_template( + &self, + _user_token: &str, + stack_template_id: &str, + ) -> Result { + // Mock user owns template if ID is "100" or contains "ai-agent" + Ok(stack_template_id == "100" || stack_template_id.contains("ai-agent")) + } + + async fn get_categories(&self) -> Result, ConnectorError> { + // Return mock categories + Ok(vec![ + CategoryInfo { + id: 1, + name: "cms".to_string(), + title: "CMS".to_string(), + priority: Some(1), + }, + CategoryInfo { + id: 2, + name: "ecommerce".to_string(), + title: "E-commerce".to_string(), + priority: Some(2), + }, + CategoryInfo { + id: 5, + name: "ai".to_string(), + title: "AI Agents".to_string(), + priority: Some(5), + }, + ]) + } + } +} + +/// Initialize User Service connector with config from Settings +/// +/// Returns configured connector wrapped in web::Data for injection into Actix app +/// Also spawns background task to sync categories from User Service +/// +/// # Example +/// ```ignore +/// // In startup.rs +/// let user_service = connectors::user_service::init(&settings.connectors, pg_pool.clone()); +/// App::new().app_data(user_service) +/// ``` +pub fn init( + connector_config: &super::config::ConnectorConfig, + pg_pool: web::Data, +) -> web::Data> { + let connector: Arc = if let Some(user_service_config) = + connector_config.user_service.as_ref().filter(|c| c.enabled) + { + let mut config = user_service_config.clone(); + // Load auth token from environment if not set in config + if config.auth_token.is_none() { + config.auth_token = std::env::var("USER_SERVICE_AUTH_TOKEN").ok(); + } + tracing::info!("Initializing User Service connector: {}", config.base_url); + Arc::new(UserServiceClient::new(config)) + } else { + tracing::warn!("User Service connector disabled - using mock"); + Arc::new(mock::MockUserServiceConnector) + }; + + // Spawn background task to sync categories on startup + let connector_clone = connector.clone(); + let pg_pool_clone = pg_pool.clone(); + tokio::spawn(async move { + match connector_clone.get_categories().await { + Ok(categories) => { + tracing::info!("Fetched {} categories from User Service", categories.len()); + match crate::db::marketplace::sync_categories(pg_pool_clone.get_ref(), categories).await { + Ok(count) => tracing::info!("Successfully synced {} categories", count), + Err(e) => tracing::error!("Failed to sync categories to database: {}", e), + } + } + Err(e) => tracing::warn!("Failed to fetch categories from User Service (will retry later): {:?}", e), + } + }); + + web::Data::new(connector) +} + +/// Helper function to determine if a plan tier can access a required plan +/// Basic idea: enterprise >= professional >= basic +fn is_plan_upgrade(user_plan: &str, required_plan: &str) -> bool { + let plan_hierarchy = vec!["basic", "professional", "enterprise"]; + + let user_level = plan_hierarchy.iter().position(|&p| p == user_plan).unwrap_or(0); + let required_level = plan_hierarchy.iter().position(|&p| p == required_plan).unwrap_or(0); + + user_level > required_level +} From c64008c612ca79441badbd17257f201c7b85d7cd Mon Sep 17 00:00:00 2001 From: vsilent Date: Fri, 2 Jan 2026 21:39:58 +0200 Subject: [PATCH 34/35] build on self-hosted, ssl problem --- .github/workflows/docker.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index b0fc4b0..c0bd14b 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -23,6 +23,12 @@ jobs: - name: Checkout sources uses: actions/checkout@v4 + - name: Install OpenSSL build deps + if: runner.os == 'Linux' + run: | + sudo apt-get update + sudo apt-get install -y pkg-config libssl-dev + - name: Verify .sqlx cache exists run: | ls -lh .sqlx/ || echo ".sqlx directory not found" From 9fc5e52ddce14d3a39fb831631cb043b93d0c568 Mon Sep 17 00:00:00 2001 From: vsilent Date: Sat, 3 Jan 2026 13:29:56 +0200 Subject: [PATCH 35/35] Casbin rules allow CRUD templates operations to groupd_admin --- ...03103000_casbin_marketplace_admin_creator_rules.down.sql | 4 ++++ ...0103103000_casbin_marketplace_admin_creator_rules.up.sql | 6 ++++++ 2 files changed, 10 insertions(+) create mode 100644 migrations/20260103103000_casbin_marketplace_admin_creator_rules.down.sql create mode 100644 migrations/20260103103000_casbin_marketplace_admin_creator_rules.up.sql diff --git a/migrations/20260103103000_casbin_marketplace_admin_creator_rules.down.sql b/migrations/20260103103000_casbin_marketplace_admin_creator_rules.down.sql new file mode 100644 index 0000000..c717ab0 --- /dev/null +++ b/migrations/20260103103000_casbin_marketplace_admin_creator_rules.down.sql @@ -0,0 +1,4 @@ +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/templates' AND v2 = 'POST'; +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/templates/:id' AND v2 = 'PUT'; +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/templates/:id/submit' AND v2 = 'POST'; +DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/templates/mine' AND v2 = 'GET'; diff --git a/migrations/20260103103000_casbin_marketplace_admin_creator_rules.up.sql b/migrations/20260103103000_casbin_marketplace_admin_creator_rules.up.sql new file mode 100644 index 0000000..3553a9a --- /dev/null +++ b/migrations/20260103103000_casbin_marketplace_admin_creator_rules.up.sql @@ -0,0 +1,6 @@ +-- Allow admin service accounts (e.g., root) to call marketplace creator endpoints +-- Admins previously lacked creator privileges which caused 403 responses +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/templates', 'POST', '', '', ''); +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/templates/:id', 'PUT', '', '', ''); +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/templates/:id/submit', 'POST', '', '', ''); +INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/templates/mine', 'GET', '', '', '');