diff --git a/src/services/stack.rs b/.dockerignore
similarity index 100%
rename from src/services/stack.rs
rename to .dockerignore
diff --git a/.env b/.env
index dffc672..3bac035 100644
--- a/.env
+++ b/.env
@@ -1,5 +1,20 @@
-DATABASE_URL=postgres://postgres:postgres@127.0.0.1:5432/stacker
+DATABASE_URL=postgres://postgres:postgres@stackerdb:5432/stacker
POSTGRES_USER=postgres
POSTGRES_PASSWORD=postgres
POSTGRES_DB=stacker
-POSTGRES_PORT=5432
\ No newline at end of file
+POSTGRES_PORT=5432
+SECURITY_KEY=SECURITY_KEY_SHOULD_BE_OF_LEN_32
+
+REDIS_URL=redis://127.0.0.1/
+# SQLX_OFFLINE=true
+
+# Vault Configuration
+VAULT_ADDRESS=http://127.0.0.1:8200
+VAULT_TOKEN=your_vault_token_here
+VAULT_AGENT_PATH_PREFIX=agent
+
+STACKER_CASBIN_RELOAD_ENABLED=true
+STACKER_CASBIN_RELOAD_INTERVAL_SECS=60
+
+STACKER_AGENT_POLL_TIMEOUT_SECS=30
+STACKER_AGENT_POLL_INTERVAL_SECS=2
\ No newline at end of file
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index bf3ee4c..2b66f12 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -3,19 +3,36 @@ name: Docker CICD
on:
push:
branches:
- - master
+ - main
- testing
+ - dev
pull_request:
branches:
- - master
+ - main
+ - dev
jobs:
- cicd-linux-docker:
+
+ cicd-docker:
name: Cargo and npm build
runs-on: ubuntu-latest
+ #runs-on: self-hosted
+ env:
+ SQLX_OFFLINE: true
steps:
- name: Checkout sources
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
+
+ - name: Install OpenSSL build deps
+ if: runner.os == 'Linux'
+ run: |
+ sudo apt-get update
+ sudo apt-get install -y pkg-config libssl-dev
+
+ - name: Verify .sqlx cache exists
+ run: |
+ ls -lh .sqlx/ || echo ".sqlx directory not found"
+ find .sqlx -type f 2>/dev/null | wc -l
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
@@ -26,7 +43,7 @@ jobs:
components: rustfmt, clippy
- name: Cache cargo registry
- uses: actions/cache@v3.0.7
+ uses: actions/cache@v4
with:
path: ~/.cargo/registry
key: docker-registry-${{ hashFiles('**/Cargo.lock') }}
@@ -35,7 +52,7 @@ jobs:
docker-
- name: Cache cargo index
- uses: actions/cache@v3.0.7
+ uses: actions/cache@v4
with:
path: ~/.cargo/git
key: docker-index-${{ hashFiles('**/Cargo.lock') }}
@@ -48,7 +65,7 @@ jobs:
head -c16 /dev/urandom > src/secret.key
- name: Cache cargo build
- uses: actions/cache@v3.0.7
+ uses: actions/cache@v4
with:
path: target
key: docker-build-${{ hashFiles('**/Cargo.lock') }}
@@ -87,13 +104,14 @@ jobs:
command: clippy
args: -- -D warnings
- - name: Run cargo build
+ - name: Build server (release)
uses: actions-rs/cargo@v1
with:
command: build
- args: --release
+ args: --release --bin server
- name: npm install, build, and test
+ if: ${{ hashFiles('web/package.json') != '' }}
working-directory: ./web
run: |
npm install
@@ -101,7 +119,8 @@ jobs:
# npm test
- name: Archive production artifacts
- uses: actions/upload-artifact@v2
+ if: ${{ hashFiles('web/package.json') != '' }}
+ uses: actions/upload-artifact@v4
with:
name: dist-without-markdown
path: |
@@ -109,32 +128,33 @@ jobs:
!web/dist/**/*.md
- name: Display structure of downloaded files
+ if: ${{ hashFiles('web/package.json') != '' }}
run: ls -R web/dist
- name: Copy app files and zip
run: |
mkdir -p app/stacker/dist
- cp target/release/stacker app/stacker
- cp -a web/dist/. app/stacker
- cp docker/prod/Dockerfile app/Dockerfile
+ cp target/release/server app/stacker/server
+ if [ -d web/dist ]; then cp -a web/dist/. app/stacker; fi
+ cp Dockerfile app/Dockerfile
cd app
touch .env
tar -czvf ../app.tar.gz .
cd ..
- name: Upload app archive for Docker job
- uses: actions/upload-artifact@v2.2.2
+ uses: actions/upload-artifact@v4
with:
name: artifact-linux-docker
path: app.tar.gz
- cicd-docker:
+ cicd-linux-docker:
name: CICD Docker
runs-on: ubuntu-latest
- needs: cicd-linux-docker
+ needs: cicd-docker
steps:
- name: Download app archive
- uses: actions/download-artifact@v2
+ uses: actions/download-artifact@v4
with:
name: artifact-linux-docker
@@ -144,12 +164,21 @@ jobs:
- name: Display structure of downloaded files
run: ls -R
- - name: Docker build and publish
- uses: docker/build-push-action@v1
+ -
+ name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+ -
+ name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+ -
+ name: Login to Docker Hub
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- repository: trydirect/stacker
- add_git_labels: true
- tag_with_ref: true
- #no-cache: true
\ No newline at end of file
+ -
+ name: Build and push
+ uses: docker/build-push-action@v6
+ with:
+ push: true
+ tags: trydirect/stacker:latest
diff --git a/.github/workflows/notifier.yml b/.github/workflows/notifier.yml
index ba3ed81..33822fc 100644
--- a/.github/workflows/notifier.yml
+++ b/.github/workflows/notifier.yml
@@ -9,6 +9,7 @@ jobs:
notifyTelegram:
runs-on: ubuntu-latest
+ concurrency: build
steps:
- name: send custom message
uses: appleboy/telegram-action@master
@@ -16,4 +17,4 @@ jobs:
to: ${{ secrets.TELEGRAM_TO }}
token: ${{ secrets.TELEGRAM_TOKEN }}
message: |
- "Issue ${{ github.event.action }}: \n${{ github.event.issue.html_url }}"
\ No newline at end of file
+ "Github actions on push: build in progress .. ${{ github.event.action }} "
diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml
index 31000a2..11da4de 100644
--- a/.github/workflows/rust.yml
+++ b/.github/workflows/rust.yml
@@ -1,22 +1,81 @@
name: Rust
+permissions:
+ contents: read
on:
push:
- branches: [ "main" ]
+ branches: [ dev, main ]
pull_request:
- branches: [ "main" ]
+ branches: [ dev, main ]
env:
CARGO_TERM_COLOR: always
jobs:
build:
-
- runs-on: ubuntu-latest
-
+ name: Build binaries (Linux/macOS)
+ env:
+ SQLX_OFFLINE: true
+ strategy:
+ matrix:
+ include:
+ - os: ubuntu-latest
+ target: x86_64-unknown-linux-gnu
+ artifact_name: stacker-linux-x86_64
+ - os: macos-latest
+ target: x86_64-apple-darwin
+ artifact_name: stacker-macos-x86_64
+ - os: macos-latest
+ target: aarch64-apple-darwin
+ artifact_name: stacker-macos-aarch64
+ runs-on: ${{ matrix.os }}
steps:
- - uses: actions/checkout@v3
- - name: Build
- run: cargo build --verbose
- - name: Run tests
- run: cargo test --verbose
+ - uses: actions/checkout@v4
+ - name: Verify .sqlx cache exists
+ run: |
+ ls -lh .sqlx/ || echo ".sqlx directory not found"
+ find .sqlx -type f 2>/dev/null | wc -l
+ - name: Install Rust toolchain
+ uses: actions-rs/toolchain@v1
+ with:
+ toolchain: stable
+ target: ${{ matrix.target }}
+ override: true
+ - name: Cache cargo registry
+ uses: actions/cache@v4
+ with:
+ path: ~/.cargo/registry
+ key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
+ restore-keys: |
+ ${{ runner.os }}-cargo-registry-
+ - name: Cache cargo index
+ uses: actions/cache@v4
+ with:
+ path: ~/.cargo/git
+ key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
+ restore-keys: |
+ ${{ runner.os }}-cargo-index-
+ - name: Cache target directory
+ uses: actions/cache@v4
+ with:
+ path: target
+ key: ${{ runner.os }}-target-${{ matrix.target }}-${{ hashFiles('**/Cargo.lock') }}
+ restore-keys: |
+ ${{ runner.os }}-target-${{ matrix.target }}-
+ - name: Build server (release)
+ run: cargo build --release --target ${{ matrix.target }} --bin server --verbose
+
+ - name: Build console (release with features)
+ run: cargo build --release --target ${{ matrix.target }} --bin console --features explain --verbose
+ - name: Prepare binaries
+ run: |
+ mkdir -p artifacts
+ cp target/${{ matrix.target }}/release/server artifacts/server
+ cp target/${{ matrix.target }}/release/console artifacts/console
+ tar -czf ${{ matrix.artifact_name }}.tar.gz -C artifacts .
+ - name: Upload binaries
+ uses: actions/upload-artifact@v4
+ with:
+ name: ${{ matrix.artifact_name }}
+ path: ${{ matrix.artifact_name }}.tar.gz
+ retention-days: 7
diff --git a/.gitignore b/.gitignore
index c507849..ad0581e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,10 @@
target
-.idea
+.idea/
+files
+access_control.conf
+configuration.yaml
+configuration.yaml.backup
+configuration.yaml.orig
+.vscode/
+.env
+docs/*.sql
\ No newline at end of file
diff --git a/.idea/.gitignore b/.idea/.gitignore
deleted file mode 100644
index 26d3352..0000000
--- a/.idea/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-# Default ignored files
-/shelf/
-/workspace.xml
diff --git a/.idea/dataSources.xml b/.idea/dataSources.xml
deleted file mode 100644
index a9f6743..0000000
--- a/.idea/dataSources.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
-
- postgresql
- true
- org.postgresql.Driver
- jdbc:postgresql://localhost:5432/stacker
- $ProjectFileDir$
-
-
-
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml
deleted file mode 100644
index 105ce2d..0000000
--- a/.idea/inspectionProfiles/profiles_settings.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
deleted file mode 100644
index 812ab5a..0000000
--- a/.idea/misc.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
deleted file mode 100644
index 7ad61f2..0000000
--- a/.idea/modules.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.idea/sqldialects.xml b/.idea/sqldialects.xml
deleted file mode 100644
index 7692097..0000000
--- a/.idea/sqldialects.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.idea/stacker.iml b/.idea/stacker.iml
deleted file mode 100644
index 227e58a..0000000
--- a/.idea/stacker.iml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
deleted file mode 100644
index 94a25f7..0000000
--- a/.idea/vcs.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..99ebb1c
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,7 @@
+repos:
+ - repo: https://github.com/gitguardian/ggshield
+ rev: v1.28.0
+ hooks:
+ - id: ggshield
+ language_version: python3
+ stages: [commit]
diff --git a/.sqlx/query-0a1da2fad9e02675e88f31a77fc43010c534673240007b76da8b92288c5223e9.json b/.sqlx/query-0a1da2fad9e02675e88f31a77fc43010c534673240007b76da8b92288c5223e9.json
new file mode 100644
index 0000000..f4f076b
--- /dev/null
+++ b/.sqlx/query-0a1da2fad9e02675e88f31a77fc43010c534673240007b76da8b92288c5223e9.json
@@ -0,0 +1,104 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n UPDATE server\n SET\n user_id=$2,\n project_id=$3,\n region=$4,\n zone=$5,\n server=$6,\n os=$7,\n disk_type=$8,\n updated_at=NOW() at time zone 'utc',\n srv_ip=$9,\n ssh_user=$10,\n ssh_port=$11\n WHERE id = $1\n RETURNING *\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "project_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 3,
+ "name": "region",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "zone",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "server",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "os",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 7,
+ "name": "disk_type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 8,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 9,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 10,
+ "name": "srv_ip",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 11,
+ "name": "ssh_user",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 12,
+ "name": "ssh_port",
+ "type_info": "Int4"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4",
+ "Varchar",
+ "Int4",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ true,
+ true,
+ false,
+ false,
+ true,
+ true,
+ true
+ ]
+ },
+ "hash": "0a1da2fad9e02675e88f31a77fc43010c534673240007b76da8b92288c5223e9"
+}
diff --git a/.sqlx/query-0bb6c35cba6f3c5573cf45c42b93709286b2a50446caa2a609aaf77af12b30bb.json b/.sqlx/query-0bb6c35cba6f3c5573cf45c42b93709286b2a50446caa2a609aaf77af12b30bb.json
new file mode 100644
index 0000000..5f0a36e
--- /dev/null
+++ b/.sqlx/query-0bb6c35cba6f3c5573cf45c42b93709286b2a50446caa2a609aaf77af12b30bb.json
@@ -0,0 +1,17 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "INSERT INTO stack_template_review (template_id, reviewer_user_id, decision, review_reason, reviewed_at) VALUES ($1::uuid, $2, $3, $4, now())",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Uuid",
+ "Varchar",
+ "Varchar",
+ "Text"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "0bb6c35cba6f3c5573cf45c42b93709286b2a50446caa2a609aaf77af12b30bb"
+}
diff --git a/.sqlx/query-0dab58aa1022e2c1f4320f232195f54d89279057657c92305f606522fa142cf7.json b/.sqlx/query-0dab58aa1022e2c1f4320f232195f54d89279057657c92305f606522fa142cf7.json
new file mode 100644
index 0000000..3e6250a
--- /dev/null
+++ b/.sqlx/query-0dab58aa1022e2c1f4320f232195f54d89279057657c92305f606522fa142cf7.json
@@ -0,0 +1,14 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "UPDATE stack_template_version SET is_latest = false WHERE template_id = $1 AND is_latest = true",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Uuid"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "0dab58aa1022e2c1f4320f232195f54d89279057657c92305f606522fa142cf7"
+}
diff --git a/.sqlx/query-0f9023a3cea267596e9f99b3887012242345a8b4e4f9d838dc6d44cc34a89433.json b/.sqlx/query-0f9023a3cea267596e9f99b3887012242345a8b4e4f9d838dc6d44cc34a89433.json
new file mode 100644
index 0000000..a4c80ab
--- /dev/null
+++ b/.sqlx/query-0f9023a3cea267596e9f99b3887012242345a8b4e4f9d838dc6d44cc34a89433.json
@@ -0,0 +1,46 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n *\n FROM agreement\n WHERE id=$1\n LIMIT 1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "text",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 3,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 4,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false
+ ]
+ },
+ "hash": "0f9023a3cea267596e9f99b3887012242345a8b4e4f9d838dc6d44cc34a89433"
+}
diff --git a/.sqlx/query-0faf1a2932ba1b37fc9f982bc86c323869489c6dc7e17479b647f0aa799df910.json b/.sqlx/query-0faf1a2932ba1b37fc9f982bc86c323869489c6dc7e17479b647f0aa799df910.json
new file mode 100644
index 0000000..5b7cb8e
--- /dev/null
+++ b/.sqlx/query-0faf1a2932ba1b37fc9f982bc86c323869489c6dc7e17479b647f0aa799df910.json
@@ -0,0 +1,14 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "UPDATE stack_template SET status = 'submitted' WHERE id = $1::uuid AND status IN ('draft','rejected')",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Uuid"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "0faf1a2932ba1b37fc9f982bc86c323869489c6dc7e17479b647f0aa799df910"
+}
diff --git a/.sqlx/query-172dbb0c3947fa99e8522510096cd8dbfd785bb982a0622d3c05afb2ab3e260f.json b/.sqlx/query-172dbb0c3947fa99e8522510096cd8dbfd785bb982a0622d3c05afb2ab3e260f.json
new file mode 100644
index 0000000..963dd77
--- /dev/null
+++ b/.sqlx/query-172dbb0c3947fa99e8522510096cd8dbfd785bb982a0622d3c05afb2ab3e260f.json
@@ -0,0 +1,76 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT id, project_id, deployment_hash, user_id, deleted, status, metadata,\n last_seen_at, created_at, updated_at\n FROM deployment\n WHERE id=$1\n LIMIT 1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "project_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 2,
+ "name": "deployment_hash",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "deleted",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 5,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "metadata",
+ "type_info": "Json"
+ },
+ {
+ "ordinal": 7,
+ "name": "last_seen_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 8,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 9,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ true,
+ true,
+ false,
+ false,
+ true,
+ false,
+ false
+ ]
+ },
+ "hash": "172dbb0c3947fa99e8522510096cd8dbfd785bb982a0622d3c05afb2ab3e260f"
+}
diff --git a/.sqlx/query-17f59e9f273d48aaf85b09c227f298f6d6f6f231554d80ed621076157af7f80a.json b/.sqlx/query-17f59e9f273d48aaf85b09c227f298f6d6f6f231554d80ed621076157af7f80a.json
new file mode 100644
index 0000000..c0f6288
--- /dev/null
+++ b/.sqlx/query-17f59e9f273d48aaf85b09c227f298f6d6f6f231554d80ed621076157af7f80a.json
@@ -0,0 +1,25 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO agreement (name, text, created_at, updated_at)\n VALUES ($1, $2, $3, $4)\n RETURNING id;\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Varchar",
+ "Text",
+ "Timestamptz",
+ "Timestamptz"
+ ]
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "17f59e9f273d48aaf85b09c227f298f6d6f6f231554d80ed621076157af7f80a"
+}
diff --git a/.sqlx/query-1f1b8182d59d8253662da0ea73b69b6857e5f3c8f4292ba9c4491e062591575b.json b/.sqlx/query-1f1b8182d59d8253662da0ea73b69b6857e5f3c8f4292ba9c4491e062591575b.json
new file mode 100644
index 0000000..4fe673b
--- /dev/null
+++ b/.sqlx/query-1f1b8182d59d8253662da0ea73b69b6857e5f3c8f4292ba9c4491e062591575b.json
@@ -0,0 +1,28 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO project (stack_id, user_id, name, metadata, created_at, updated_at, request_json)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n RETURNING id;\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Uuid",
+ "Varchar",
+ "Text",
+ "Json",
+ "Timestamptz",
+ "Timestamptz",
+ "Json"
+ ]
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "1f1b8182d59d8253662da0ea73b69b6857e5f3c8f4292ba9c4491e062591575b"
+}
diff --git a/.sqlx/query-2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f.json b/.sqlx/query-2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f.json
new file mode 100644
index 0000000..4c5595e
--- /dev/null
+++ b/.sqlx/query-2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f.json
@@ -0,0 +1,76 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n *\n FROM project\n WHERE user_id=$1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "stack_id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 2,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "name",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 4,
+ "name": "metadata",
+ "type_info": "Json"
+ },
+ {
+ "ordinal": 5,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 6,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 7,
+ "name": "request_json",
+ "type_info": "Json"
+ },
+ {
+ "ordinal": 8,
+ "name": "source_template_id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 9,
+ "name": "template_version",
+ "type_info": "Varchar"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "2c7065ccf4a0a527087754db39a2077a054026cb2bc0c010aba218506e76110f"
+}
diff --git a/.sqlx/query-309c79e9f4b28e19488e71ca49974e0c9173f355d69459333acf181ff2a82a1c.json b/.sqlx/query-309c79e9f4b28e19488e71ca49974e0c9173f355d69459333acf181ff2a82a1c.json
new file mode 100644
index 0000000..1e22508
--- /dev/null
+++ b/.sqlx/query-309c79e9f4b28e19488e71ca49974e0c9173f355d69459333acf181ff2a82a1c.json
@@ -0,0 +1,15 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n UPDATE agents \n SET last_heartbeat = NOW(), status = $2, updated_at = NOW()\n WHERE id = $1\n ",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Uuid",
+ "Varchar"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "309c79e9f4b28e19488e71ca49974e0c9173f355d69459333acf181ff2a82a1c"
+}
diff --git a/.sqlx/query-327394e1777395afda4a1f6c1ca07431de81f886f6a8d6e0fbcd7b6633d30b98.json b/.sqlx/query-327394e1777395afda4a1f6c1ca07431de81f886f6a8d6e0fbcd7b6633d30b98.json
new file mode 100644
index 0000000..4916207
--- /dev/null
+++ b/.sqlx/query-327394e1777395afda4a1f6c1ca07431de81f886f6a8d6e0fbcd7b6633d30b98.json
@@ -0,0 +1,100 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n FROM commands\n WHERE deployment_hash = $1\n ORDER BY created_at DESC\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "command_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "deployment_hash",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "priority",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "parameters",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 7,
+ "name": "result",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 8,
+ "name": "error",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 9,
+ "name": "created_by",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 10,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 11,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 12,
+ "name": "timeout_seconds",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 13,
+ "name": "metadata",
+ "type_info": "Jsonb"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "327394e1777395afda4a1f6c1ca07431de81f886f6a8d6e0fbcd7b6633d30b98"
+}
diff --git a/.sqlx/query-32d118e607db4364979c52831e0c30a215779928a041ef51e93383e93288aac2.json b/.sqlx/query-32d118e607db4364979c52831e0c30a215779928a041ef51e93383e93288aac2.json
new file mode 100644
index 0000000..e23eb43
--- /dev/null
+++ b/.sqlx/query-32d118e607db4364979c52831e0c30a215779928a041ef51e93383e93288aac2.json
@@ -0,0 +1,70 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT * FROM cloud WHERE id=$1 LIMIT 1 ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "provider",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "cloud_token",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "cloud_key",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "cloud_secret",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "save_token",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 7,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 8,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ true,
+ false,
+ false
+ ]
+ },
+ "hash": "32d118e607db4364979c52831e0c30a215779928a041ef51e93383e93288aac2"
+}
diff --git a/.sqlx/query-36f6c8ba5c553e6c13d0041482910bc38e48635c4df0c73c211d345a26cccf4e.json b/.sqlx/query-36f6c8ba5c553e6c13d0041482910bc38e48635c4df0c73c211d345a26cccf4e.json
new file mode 100644
index 0000000..fbcc830
--- /dev/null
+++ b/.sqlx/query-36f6c8ba5c553e6c13d0041482910bc38e48635c4df0c73c211d345a26cccf4e.json
@@ -0,0 +1,46 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n *\n FROM agreement\n WHERE name=$1\n LIMIT 1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "text",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 3,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 4,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false
+ ]
+ },
+ "hash": "36f6c8ba5c553e6c13d0041482910bc38e48635c4df0c73c211d345a26cccf4e"
+}
diff --git a/.sqlx/query-3b6ec5ef58cb3b234d8c8d45641339d172624d59fff7494f1929c8fe37f564a4.json b/.sqlx/query-3b6ec5ef58cb3b234d8c8d45641339d172624d59fff7494f1929c8fe37f564a4.json
new file mode 100644
index 0000000..bbcd341
--- /dev/null
+++ b/.sqlx/query-3b6ec5ef58cb3b234d8c8d45641339d172624d59fff7494f1929c8fe37f564a4.json
@@ -0,0 +1,34 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n id,\n user_id,\n secret \n FROM client c\n WHERE c.id = $1\n LIMIT 1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "secret",
+ "type_info": "Varchar"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ true
+ ]
+ },
+ "hash": "3b6ec5ef58cb3b234d8c8d45641339d172624d59fff7494f1929c8fe37f564a4"
+}
diff --git a/.sqlx/query-3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e.json b/.sqlx/query-3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e.json
new file mode 100644
index 0000000..f8f958e
--- /dev/null
+++ b/.sqlx/query-3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e.json
@@ -0,0 +1,76 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n *\n FROM project\n WHERE name=$1\n LIMIT 1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "stack_id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 2,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "name",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 4,
+ "name": "metadata",
+ "type_info": "Json"
+ },
+ {
+ "ordinal": 5,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 6,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 7,
+ "name": "request_json",
+ "type_info": "Json"
+ },
+ {
+ "ordinal": 8,
+ "name": "source_template_id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 9,
+ "name": "template_version",
+ "type_info": "Varchar"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "3dd9013b8856be2d991a656c3cdd77692bd1a336be4d06ff6e0ac6831164617e"
+}
diff --git a/.sqlx/query-3efacedb58ab13dad5eeaa4454a4d82beb1dedc0f62405d008f18045df981277.json b/.sqlx/query-3efacedb58ab13dad5eeaa4454a4d82beb1dedc0f62405d008f18045df981277.json
new file mode 100644
index 0000000..ec0c073
--- /dev/null
+++ b/.sqlx/query-3efacedb58ab13dad5eeaa4454a4d82beb1dedc0f62405d008f18045df981277.json
@@ -0,0 +1,22 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT creator_user_id FROM stack_template WHERE id = $1",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "creator_user_id",
+ "type_info": "Varchar"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Uuid"
+ ]
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "3efacedb58ab13dad5eeaa4454a4d82beb1dedc0f62405d008f18045df981277"
+}
diff --git a/.sqlx/query-41edb5195e8e68b8c80c8412f5bb93cf4838bd1e7e668dafd0fffbd13c90d5aa.json b/.sqlx/query-41edb5195e8e68b8c80c8412f5bb93cf4838bd1e7e668dafd0fffbd13c90d5aa.json
new file mode 100644
index 0000000..6af6017
--- /dev/null
+++ b/.sqlx/query-41edb5195e8e68b8c80c8412f5bb93cf4838bd1e7e668dafd0fffbd13c90d5aa.json
@@ -0,0 +1,14 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n DELETE FROM command_queue\n WHERE command_id = $1\n ",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "41edb5195e8e68b8c80c8412f5bb93cf4838bd1e7e668dafd0fffbd13c90d5aa"
+}
diff --git a/.sqlx/query-4bdcd8d475ffd8aab728ec2b9d0d8c578770e2d52bf531de6e69561a4adbb21c.json b/.sqlx/query-4bdcd8d475ffd8aab728ec2b9d0d8c578770e2d52bf531de6e69561a4adbb21c.json
new file mode 100644
index 0000000..35db09e
--- /dev/null
+++ b/.sqlx/query-4bdcd8d475ffd8aab728ec2b9d0d8c578770e2d52bf531de6e69561a4adbb21c.json
@@ -0,0 +1,94 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n *\n FROM server\n WHERE user_id=$1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "project_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 3,
+ "name": "region",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "zone",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "server",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "os",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 7,
+ "name": "disk_type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 8,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 9,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 10,
+ "name": "srv_ip",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 11,
+ "name": "ssh_user",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 12,
+ "name": "ssh_port",
+ "type_info": "Int4"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ true,
+ true,
+ false,
+ false,
+ true,
+ true,
+ true
+ ]
+ },
+ "hash": "4bdcd8d475ffd8aab728ec2b9d0d8c578770e2d52bf531de6e69561a4adbb21c"
+}
diff --git a/.sqlx/query-4e375cca55b0f106578474e5736094044e237999123952be7c78b46c937b8778.json b/.sqlx/query-4e375cca55b0f106578474e5736094044e237999123952be7c78b46c937b8778.json
new file mode 100644
index 0000000..09cd0c0
--- /dev/null
+++ b/.sqlx/query-4e375cca55b0f106578474e5736094044e237999123952be7c78b46c937b8778.json
@@ -0,0 +1,100 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n UPDATE commands\n SET status = 'cancelled', updated_at = NOW()\n WHERE command_id = $1\n RETURNING id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "command_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "deployment_hash",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "priority",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "parameters",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 7,
+ "name": "result",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 8,
+ "name": "error",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 9,
+ "name": "created_by",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 10,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 11,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 12,
+ "name": "timeout_seconds",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 13,
+ "name": "metadata",
+ "type_info": "Jsonb"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "4e375cca55b0f106578474e5736094044e237999123952be7c78b46c937b8778"
+}
diff --git a/.sqlx/query-4ed4ce17b28e36898d9afabb96b7043ceee664f67752c41bf06df6e51ed69362.json b/.sqlx/query-4ed4ce17b28e36898d9afabb96b7043ceee664f67752c41bf06df6e51ed69362.json
new file mode 100644
index 0000000..c3f8828
--- /dev/null
+++ b/.sqlx/query-4ed4ce17b28e36898d9afabb96b7043ceee664f67752c41bf06df6e51ed69362.json
@@ -0,0 +1,138 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "INSERT INTO stack_template (\n creator_user_id, creator_name, name, slug,\n short_description, long_description, category_id,\n tags, tech_stack, status\n ) VALUES ($1,$2,$3,$4,$5,$6,(SELECT id FROM stack_category WHERE name = $7),$8,$9,'draft')\n RETURNING \n id,\n creator_user_id,\n creator_name,\n name,\n slug,\n short_description,\n long_description,\n (SELECT name FROM stack_category WHERE id = category_id) AS \"category_code?\",\n product_id,\n tags,\n tech_stack,\n status,\n is_configurable,\n view_count,\n deploy_count,\n required_plan_name,\n created_at,\n updated_at,\n approved_at\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "creator_user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "creator_name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "slug",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "short_description",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 6,
+ "name": "long_description",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 7,
+ "name": "category_code?",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 8,
+ "name": "product_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 9,
+ "name": "tags",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 10,
+ "name": "tech_stack",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 11,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 12,
+ "name": "is_configurable",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 13,
+ "name": "view_count",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 14,
+ "name": "deploy_count",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 15,
+ "name": "required_plan_name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 16,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 17,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 18,
+ "name": "approved_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Text",
+ "Text",
+ "Text",
+ "Jsonb",
+ "Jsonb"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ true,
+ false,
+ false,
+ true,
+ true,
+ null,
+ true,
+ true,
+ true,
+ false,
+ true,
+ true,
+ true,
+ true,
+ true,
+ true,
+ true
+ ]
+ },
+ "hash": "4ed4ce17b28e36898d9afabb96b7043ceee664f67752c41bf06df6e51ed69362"
+}
diff --git a/.sqlx/query-4f54a93856a693345a9f63552dabf3192c3108a2776bb56f36787af3fa884554.json b/.sqlx/query-4f54a93856a693345a9f63552dabf3192c3108a2776bb56f36787af3fa884554.json
new file mode 100644
index 0000000..f76fff6
--- /dev/null
+++ b/.sqlx/query-4f54a93856a693345a9f63552dabf3192c3108a2776bb56f36787af3fa884554.json
@@ -0,0 +1,14 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n DELETE FROM agents WHERE id = $1\n ",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Uuid"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "4f54a93856a693345a9f63552dabf3192c3108a2776bb56f36787af3fa884554"
+}
diff --git a/.sqlx/query-4fbb395f2080f29291ea091d2c4135b962e41b4e5b49d20e9d5fee3da051aeba.json b/.sqlx/query-4fbb395f2080f29291ea091d2c4135b962e41b4e5b49d20e9d5fee3da051aeba.json
new file mode 100644
index 0000000..49c82f0
--- /dev/null
+++ b/.sqlx/query-4fbb395f2080f29291ea091d2c4135b962e41b4e5b49d20e9d5fee3da051aeba.json
@@ -0,0 +1,130 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT \n t.id,\n t.creator_user_id,\n t.creator_name,\n t.name,\n t.slug,\n t.short_description,\n t.long_description,\n c.name AS \"category_code?\",\n t.product_id,\n t.tags,\n t.tech_stack,\n t.status,\n t.is_configurable,\n t.view_count,\n t.deploy_count,\n t.required_plan_name,\n t.created_at,\n t.updated_at,\n t.approved_at\n FROM stack_template t\n LEFT JOIN stack_category c ON t.category_id = c.id\n WHERE t.creator_user_id = $1\n ORDER BY t.created_at DESC",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "creator_user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "creator_name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "slug",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "short_description",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 6,
+ "name": "long_description",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 7,
+ "name": "category_code?",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 8,
+ "name": "product_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 9,
+ "name": "tags",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 10,
+ "name": "tech_stack",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 11,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 12,
+ "name": "is_configurable",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 13,
+ "name": "view_count",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 14,
+ "name": "deploy_count",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 15,
+ "name": "required_plan_name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 16,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 17,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 18,
+ "name": "approved_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ true,
+ false,
+ false,
+ true,
+ true,
+ false,
+ true,
+ true,
+ true,
+ false,
+ true,
+ true,
+ true,
+ true,
+ true,
+ true,
+ true
+ ]
+ },
+ "hash": "4fbb395f2080f29291ea091d2c4135b962e41b4e5b49d20e9d5fee3da051aeba"
+}
diff --git a/.sqlx/query-546d2bb7ff653c0ae1f6dcc5e68b12a670230de592557d27159acd2fc09400c6.json b/.sqlx/query-546d2bb7ff653c0ae1f6dcc5e68b12a670230de592557d27159acd2fc09400c6.json
new file mode 100644
index 0000000..a6cbf2b
--- /dev/null
+++ b/.sqlx/query-546d2bb7ff653c0ae1f6dcc5e68b12a670230de592557d27159acd2fc09400c6.json
@@ -0,0 +1,76 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT id, project_id, deployment_hash, user_id, deleted, status, metadata,\n last_seen_at, created_at, updated_at\n FROM deployment\n WHERE deployment_hash = $1\n LIMIT 1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "project_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 2,
+ "name": "deployment_hash",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "deleted",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 5,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "metadata",
+ "type_info": "Json"
+ },
+ {
+ "ordinal": 7,
+ "name": "last_seen_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 8,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 9,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ true,
+ true,
+ false,
+ false,
+ true,
+ false,
+ false
+ ]
+ },
+ "hash": "546d2bb7ff653c0ae1f6dcc5e68b12a670230de592557d27159acd2fc09400c6"
+}
diff --git a/.sqlx/query-55e886a505d00b70674a19fd3228915ab4494cbd7058fdec868ab93c0fcfb4d8.json b/.sqlx/query-55e886a505d00b70674a19fd3228915ab4494cbd7058fdec868ab93c0fcfb4d8.json
new file mode 100644
index 0000000..bd0e16f
--- /dev/null
+++ b/.sqlx/query-55e886a505d00b70674a19fd3228915ab4494cbd7058fdec868ab93c0fcfb4d8.json
@@ -0,0 +1,17 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n UPDATE rating\n SET \n comment=$1,\n rate=$2,\n hidden=$3,\n updated_at=NOW() at time zone 'utc'\n WHERE id = $4\n ",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Text",
+ "Int4",
+ "Bool",
+ "Int4"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "55e886a505d00b70674a19fd3228915ab4494cbd7058fdec868ab93c0fcfb4d8"
+}
diff --git a/.sqlx/query-5bf9f8aacbe676339d0811d305abace6cc4a4d068392f7b58f2d165042ab509e.json b/.sqlx/query-5bf9f8aacbe676339d0811d305abace6cc4a4d068392f7b58f2d165042ab509e.json
new file mode 100644
index 0000000..e01c813
--- /dev/null
+++ b/.sqlx/query-5bf9f8aacbe676339d0811d305abace6cc4a4d068392f7b58f2d165042ab509e.json
@@ -0,0 +1,16 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "UPDATE stack_template SET status = $2, approved_at = CASE WHEN $3 THEN now() ELSE approved_at END WHERE id = $1::uuid",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Uuid",
+ "Varchar",
+ "Bool"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "5bf9f8aacbe676339d0811d305abace6cc4a4d068392f7b58f2d165042ab509e"
+}
diff --git a/.sqlx/query-5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc.json b/.sqlx/query-5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc.json
new file mode 100644
index 0000000..cd18bf7
--- /dev/null
+++ b/.sqlx/query-5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc.json
@@ -0,0 +1,76 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n *\n FROM project\n WHERE id=$1\n LIMIT 1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "stack_id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 2,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "name",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 4,
+ "name": "metadata",
+ "type_info": "Json"
+ },
+ {
+ "ordinal": 5,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 6,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 7,
+ "name": "request_json",
+ "type_info": "Json"
+ },
+ {
+ "ordinal": 8,
+ "name": "source_template_id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 9,
+ "name": "template_version",
+ "type_info": "Varchar"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "5fea60d7574cfd238a7cbae4d93423869bd7b79dd5b246d80f0b6f39ce4659dc"
+}
diff --git a/.sqlx/query-6cdfab7ffca4a98abcd7fb2325289ccf3035f08340bf80a345ff74570cd62043.json b/.sqlx/query-6cdfab7ffca4a98abcd7fb2325289ccf3035f08340bf80a345ff74570cd62043.json
new file mode 100644
index 0000000..2bbb52c
--- /dev/null
+++ b/.sqlx/query-6cdfab7ffca4a98abcd7fb2325289ccf3035f08340bf80a345ff74570cd62043.json
@@ -0,0 +1,103 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n UPDATE commands\n SET status = $2, result = $3, error = $4, updated_at = NOW()\n WHERE command_id = $1\n RETURNING id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "command_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "deployment_hash",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "priority",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "parameters",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 7,
+ "name": "result",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 8,
+ "name": "error",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 9,
+ "name": "created_by",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 10,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 11,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 12,
+ "name": "timeout_seconds",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 13,
+ "name": "metadata",
+ "type_info": "Jsonb"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text",
+ "Varchar",
+ "Jsonb",
+ "Jsonb"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "6cdfab7ffca4a98abcd7fb2325289ccf3035f08340bf80a345ff74570cd62043"
+}
diff --git a/.sqlx/query-6e44fd63bcb2075e9515a7ce3d0be7a3759a98b5f1c637eb632aa440a1ffadb6.json b/.sqlx/query-6e44fd63bcb2075e9515a7ce3d0be7a3759a98b5f1c637eb632aa440a1ffadb6.json
new file mode 100644
index 0000000..b6c5726
--- /dev/null
+++ b/.sqlx/query-6e44fd63bcb2075e9515a7ce3d0be7a3759a98b5f1c637eb632aa440a1ffadb6.json
@@ -0,0 +1,85 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT \n id,\n user_id,\n obj_id,\n category as \"category: _\",\n comment,\n hidden,\n rate,\n created_at,\n updated_at\n FROM rating\n WHERE hidden = false \n ORDER BY id DESC\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "obj_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 3,
+ "name": "category: _",
+ "type_info": {
+ "Custom": {
+ "name": "rate_category",
+ "kind": {
+ "Enum": [
+ "application",
+ "cloud",
+ "project",
+ "deploymentSpeed",
+ "documentation",
+ "design",
+ "techSupport",
+ "price",
+ "memoryUsage"
+ ]
+ }
+ }
+ }
+ },
+ {
+ "ordinal": 4,
+ "name": "comment",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 5,
+ "name": "hidden",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 6,
+ "name": "rate",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 7,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 8,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": []
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false
+ ]
+ },
+ "hash": "6e44fd63bcb2075e9515a7ce3d0be7a3759a98b5f1c637eb632aa440a1ffadb6"
+}
diff --git a/.sqlx/query-6ff761b4fa0b1ccc22722b481b37bb2395caa02475163facde831cc9ada1ff30.json b/.sqlx/query-6ff761b4fa0b1ccc22722b481b37bb2395caa02475163facde831cc9ada1ff30.json
new file mode 100644
index 0000000..2a91bb1
--- /dev/null
+++ b/.sqlx/query-6ff761b4fa0b1ccc22722b481b37bb2395caa02475163facde831cc9ada1ff30.json
@@ -0,0 +1,31 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO server (\n user_id,\n project_id,\n region,\n zone,\n server,\n os,\n disk_type,\n created_at,\n updated_at,\n srv_ip,\n ssh_user,\n ssh_port\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, NOW() at time zone 'utc',NOW() at time zone 'utc', $8, $9, $10)\n RETURNING id;\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Varchar",
+ "Int4",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "6ff761b4fa0b1ccc22722b481b37bb2395caa02475163facde831cc9ada1ff30"
+}
diff --git a/.sqlx/query-722e059fca26aa3be81451ef5e266cc32d0e3ebc0611bd69013b6c3aa240b674.json b/.sqlx/query-722e059fca26aa3be81451ef5e266cc32d0e3ebc0611bd69013b6c3aa240b674.json
new file mode 100644
index 0000000..65bb611
--- /dev/null
+++ b/.sqlx/query-722e059fca26aa3be81451ef5e266cc32d0e3ebc0611bd69013b6c3aa240b674.json
@@ -0,0 +1,130 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT \n t.id,\n t.creator_user_id,\n t.creator_name,\n t.name,\n t.slug,\n t.short_description,\n t.long_description,\n c.name AS \"category_code?\",\n t.product_id,\n t.tags,\n t.tech_stack,\n t.status,\n t.is_configurable,\n t.view_count,\n t.deploy_count,\n t.created_at,\n t.updated_at,\n t.approved_at,\n t.required_plan_name\n FROM stack_template t\n LEFT JOIN stack_category c ON t.category_id = c.id\n WHERE t.id = $1",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "creator_user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "creator_name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "slug",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "short_description",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 6,
+ "name": "long_description",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 7,
+ "name": "category_code?",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 8,
+ "name": "product_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 9,
+ "name": "tags",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 10,
+ "name": "tech_stack",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 11,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 12,
+ "name": "is_configurable",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 13,
+ "name": "view_count",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 14,
+ "name": "deploy_count",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 15,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 16,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 17,
+ "name": "approved_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 18,
+ "name": "required_plan_name",
+ "type_info": "Varchar"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Uuid"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ true,
+ false,
+ false,
+ true,
+ true,
+ false,
+ true,
+ true,
+ true,
+ false,
+ true,
+ true,
+ true,
+ true,
+ true,
+ true,
+ true
+ ]
+ },
+ "hash": "722e059fca26aa3be81451ef5e266cc32d0e3ebc0611bd69013b6c3aa240b674"
+}
diff --git a/.sqlx/query-7b6c7e798237d0c08b7c1126d7044df13c46ef2eb373398a535090edf738cb5a.json b/.sqlx/query-7b6c7e798237d0c08b7c1126d7044df13c46ef2eb373398a535090edf738cb5a.json
new file mode 100644
index 0000000..ed0cd48
--- /dev/null
+++ b/.sqlx/query-7b6c7e798237d0c08b7c1126d7044df13c46ef2eb373398a535090edf738cb5a.json
@@ -0,0 +1,76 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n UPDATE cloud\n SET\n user_id=$2,\n provider=$3,\n cloud_token=$4,\n cloud_key=$5,\n cloud_secret=$6,\n save_token=$7,\n updated_at=NOW() at time zone 'utc'\n WHERE id = $1\n RETURNING *\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "provider",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "cloud_token",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "cloud_key",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "cloud_secret",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "save_token",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 7,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 8,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Bool"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ true,
+ false,
+ false
+ ]
+ },
+ "hash": "7b6c7e798237d0c08b7c1126d7044df13c46ef2eb373398a535090edf738cb5a"
+}
diff --git a/.sqlx/query-7c087b528df89eb0bf41a4e46bcc48ab4946535a96baf0f49996d79387a3791c.json b/.sqlx/query-7c087b528df89eb0bf41a4e46bcc48ab4946535a96baf0f49996d79387a3791c.json
new file mode 100644
index 0000000..b6d94b3
--- /dev/null
+++ b/.sqlx/query-7c087b528df89eb0bf41a4e46bcc48ab4946535a96baf0f49996d79387a3791c.json
@@ -0,0 +1,94 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n *\n FROM server\n WHERE project_id=$1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "project_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 3,
+ "name": "region",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "zone",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "server",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "os",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 7,
+ "name": "disk_type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 8,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 9,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 10,
+ "name": "srv_ip",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 11,
+ "name": "ssh_user",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 12,
+ "name": "ssh_port",
+ "type_info": "Int4"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ true,
+ true,
+ false,
+ false,
+ true,
+ true,
+ true
+ ]
+ },
+ "hash": "7c087b528df89eb0bf41a4e46bcc48ab4946535a96baf0f49996d79387a3791c"
+}
diff --git a/.sqlx/query-8038cec278228a04f83f4d67f8e2fd0382be589bf5d6dcde690b63f281160159.json b/.sqlx/query-8038cec278228a04f83f4d67f8e2fd0382be589bf5d6dcde690b63f281160159.json
new file mode 100644
index 0000000..aafa449
--- /dev/null
+++ b/.sqlx/query-8038cec278228a04f83f4d67f8e2fd0382be589bf5d6dcde690b63f281160159.json
@@ -0,0 +1,15 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n UPDATE client\n SET \n secret=$1,\n updated_at=NOW() at time zone 'utc'\n WHERE id = $2\n ",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Varchar",
+ "Int4"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "8038cec278228a04f83f4d67f8e2fd0382be589bf5d6dcde690b63f281160159"
+}
diff --git a/.sqlx/query-8218dc7f0a2d15d19391bdcde1dfe27d2ee90aa4598b17d90e5db82244ad6ff1.json b/.sqlx/query-8218dc7f0a2d15d19391bdcde1dfe27d2ee90aa4598b17d90e5db82244ad6ff1.json
new file mode 100644
index 0000000..17b8891
--- /dev/null
+++ b/.sqlx/query-8218dc7f0a2d15d19391bdcde1dfe27d2ee90aa4598b17d90e5db82244ad6ff1.json
@@ -0,0 +1,14 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n DELETE FROM rating\n WHERE id = $1\n ",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Int4"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "8218dc7f0a2d15d19391bdcde1dfe27d2ee90aa4598b17d90e5db82244ad6ff1"
+}
diff --git a/.sqlx/query-82eb411b1d8f6f3bed3db367ea147fbcd0626347744c7f8de6dce25d6e9a1fe7.json b/.sqlx/query-82eb411b1d8f6f3bed3db367ea147fbcd0626347744c7f8de6dce25d6e9a1fe7.json
new file mode 100644
index 0000000..d95a94c
--- /dev/null
+++ b/.sqlx/query-82eb411b1d8f6f3bed3db367ea147fbcd0626347744c7f8de6dce25d6e9a1fe7.json
@@ -0,0 +1,46 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n *\n FROM user_agreement\n WHERE user_id=$1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "agrt_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 2,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 4,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false
+ ]
+ },
+ "hash": "82eb411b1d8f6f3bed3db367ea147fbcd0626347744c7f8de6dce25d6e9a1fe7"
+}
diff --git a/.sqlx/query-836ec7786ee20369b6b49aa89587480579468a5cb4ecdf7b315920b5e0bd894c.json b/.sqlx/query-836ec7786ee20369b6b49aa89587480579468a5cb4ecdf7b315920b5e0bd894c.json
new file mode 100644
index 0000000..6dabdee
--- /dev/null
+++ b/.sqlx/query-836ec7786ee20369b6b49aa89587480579468a5cb4ecdf7b315920b5e0bd894c.json
@@ -0,0 +1,106 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT \n id,\n user_id,\n obj_id,\n category as \"category: _\",\n comment,\n hidden,\n rate,\n created_at,\n updated_at\n FROM rating\n WHERE user_id=$1\n AND obj_id=$2\n AND category=$3\n LIMIT 1",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "obj_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 3,
+ "name": "category: _",
+ "type_info": {
+ "Custom": {
+ "name": "rate_category",
+ "kind": {
+ "Enum": [
+ "application",
+ "cloud",
+ "project",
+ "deploymentSpeed",
+ "documentation",
+ "design",
+ "techSupport",
+ "price",
+ "memoryUsage"
+ ]
+ }
+ }
+ }
+ },
+ {
+ "ordinal": 4,
+ "name": "comment",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 5,
+ "name": "hidden",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 6,
+ "name": "rate",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 7,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 8,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text",
+ "Int4",
+ {
+ "Custom": {
+ "name": "rate_category",
+ "kind": {
+ "Enum": [
+ "application",
+ "cloud",
+ "project",
+ "deploymentSpeed",
+ "documentation",
+ "design",
+ "techSupport",
+ "price",
+ "memoryUsage"
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false
+ ]
+ },
+ "hash": "836ec7786ee20369b6b49aa89587480579468a5cb4ecdf7b315920b5e0bd894c"
+}
diff --git a/.sqlx/query-83cd9d573480c8a83e9e58f375653b4d76ec4c4dea338877ef5ba72fa49c28ad.json b/.sqlx/query-83cd9d573480c8a83e9e58f375653b4d76ec4c4dea338877ef5ba72fa49c28ad.json
new file mode 100644
index 0000000..44d0fe6
--- /dev/null
+++ b/.sqlx/query-83cd9d573480c8a83e9e58f375653b4d76ec4c4dea338877ef5ba72fa49c28ad.json
@@ -0,0 +1,22 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n count(*) as found\n FROM client c \n WHERE c.secret = $1\n LIMIT 1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "found",
+ "type_info": "Int8"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ null
+ ]
+ },
+ "hash": "83cd9d573480c8a83e9e58f375653b4d76ec4c4dea338877ef5ba72fa49c28ad"
+}
diff --git a/.sqlx/query-8aafae4565e572dc36aef3bb3d7b82a392e59683b9dfa1c457974e8fa8b7d00f.json b/.sqlx/query-8aafae4565e572dc36aef3bb3d7b82a392e59683b9dfa1c457974e8fa8b7d00f.json
new file mode 100644
index 0000000..6d69a7d
--- /dev/null
+++ b/.sqlx/query-8aafae4565e572dc36aef3bb3d7b82a392e59683b9dfa1c457974e8fa8b7d00f.json
@@ -0,0 +1,22 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n count(*) as client_count\n FROM client c \n WHERE c.user_id = $1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "client_count",
+ "type_info": "Int8"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ null
+ ]
+ },
+ "hash": "8aafae4565e572dc36aef3bb3d7b82a392e59683b9dfa1c457974e8fa8b7d00f"
+}
diff --git a/.sqlx/query-8cfb2d3a45ff6c5d1d51a98f6a37ba89da5a49c211c8627c314b8a32c92a62e1.json b/.sqlx/query-8cfb2d3a45ff6c5d1d51a98f6a37ba89da5a49c211c8627c314b8a32c92a62e1.json
new file mode 100644
index 0000000..991ef36
--- /dev/null
+++ b/.sqlx/query-8cfb2d3a45ff6c5d1d51a98f6a37ba89da5a49c211c8627c314b8a32c92a62e1.json
@@ -0,0 +1,94 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT * FROM server WHERE id=$1 LIMIT 1 ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "project_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 3,
+ "name": "region",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "zone",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "server",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "os",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 7,
+ "name": "disk_type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 8,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 9,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 10,
+ "name": "srv_ip",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 11,
+ "name": "ssh_user",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 12,
+ "name": "ssh_port",
+ "type_info": "Int4"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ true,
+ true,
+ false,
+ false,
+ true,
+ true,
+ true
+ ]
+ },
+ "hash": "8cfb2d3a45ff6c5d1d51a98f6a37ba89da5a49c211c8627c314b8a32c92a62e1"
+}
diff --git a/.sqlx/query-8db13c16e29b4aecd87646859296790f3e5971d7a2bff2d32f2d92590ec3393d.json b/.sqlx/query-8db13c16e29b4aecd87646859296790f3e5971d7a2bff2d32f2d92590ec3393d.json
new file mode 100644
index 0000000..dea9192
--- /dev/null
+++ b/.sqlx/query-8db13c16e29b4aecd87646859296790f3e5971d7a2bff2d32f2d92590ec3393d.json
@@ -0,0 +1,87 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT \n id,\n user_id,\n obj_id,\n category as \"category: _\",\n comment,\n hidden,\n rate,\n created_at,\n updated_at\n FROM rating\n WHERE id=$1\n LIMIT 1",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "obj_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 3,
+ "name": "category: _",
+ "type_info": {
+ "Custom": {
+ "name": "rate_category",
+ "kind": {
+ "Enum": [
+ "application",
+ "cloud",
+ "project",
+ "deploymentSpeed",
+ "documentation",
+ "design",
+ "techSupport",
+ "price",
+ "memoryUsage"
+ ]
+ }
+ }
+ }
+ },
+ {
+ "ordinal": 4,
+ "name": "comment",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 5,
+ "name": "hidden",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 6,
+ "name": "rate",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 7,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 8,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false
+ ]
+ },
+ "hash": "8db13c16e29b4aecd87646859296790f3e5971d7a2bff2d32f2d92590ec3393d"
+}
diff --git a/.sqlx/query-8ec4c1e77a941efe4c1c36e26c5e1dfcb0e7769f0333d2acf7d6e0fb97ca12dc.json b/.sqlx/query-8ec4c1e77a941efe4c1c36e26c5e1dfcb0e7769f0333d2acf7d6e0fb97ca12dc.json
new file mode 100644
index 0000000..0679752
--- /dev/null
+++ b/.sqlx/query-8ec4c1e77a941efe4c1c36e26c5e1dfcb0e7769f0333d2acf7d6e0fb97ca12dc.json
@@ -0,0 +1,27 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO cloud (\n user_id,\n provider,\n cloud_token,\n cloud_key,\n cloud_secret,\n save_token,\n created_at,\n updated_at\n )\n VALUES ($1, $2, $3, $4, $5, $6, NOW() at time zone 'utc', NOW() at time zone 'utc')\n RETURNING id;\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Bool"
+ ]
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "8ec4c1e77a941efe4c1c36e26c5e1dfcb0e7769f0333d2acf7d6e0fb97ca12dc"
+}
diff --git a/.sqlx/query-91966b9578edeb2303bbba93cfc756595265b21dd6f7a06a2f7a846d162b340c.json b/.sqlx/query-91966b9578edeb2303bbba93cfc756595265b21dd6f7a06a2f7a846d162b340c.json
new file mode 100644
index 0000000..0146a6a
--- /dev/null
+++ b/.sqlx/query-91966b9578edeb2303bbba93cfc756595265b21dd6f7a06a2f7a846d162b340c.json
@@ -0,0 +1,100 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT c.id, c.command_id, c.deployment_hash, c.type, c.status, c.priority,\n c.parameters, c.result, c.error, c.created_by, c.created_at, c.updated_at,\n c.timeout_seconds, c.metadata\n FROM commands c\n INNER JOIN command_queue q ON c.command_id = q.command_id\n WHERE q.deployment_hash = $1\n ORDER BY q.priority DESC, q.created_at ASC\n LIMIT 1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "command_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "deployment_hash",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "priority",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "parameters",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 7,
+ "name": "result",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 8,
+ "name": "error",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 9,
+ "name": "created_by",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 10,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 11,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 12,
+ "name": "timeout_seconds",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 13,
+ "name": "metadata",
+ "type_info": "Jsonb"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "91966b9578edeb2303bbba93cfc756595265b21dd6f7a06a2f7a846d162b340c"
+}
diff --git a/.sqlx/query-954605527a3ca7b9d6cbf1fbc03dc00c95626c94f0f02cbc69336836f95ec45e.json b/.sqlx/query-954605527a3ca7b9d6cbf1fbc03dc00c95626c94f0f02cbc69336836f95ec45e.json
new file mode 100644
index 0000000..e181206
--- /dev/null
+++ b/.sqlx/query-954605527a3ca7b9d6cbf1fbc03dc00c95626c94f0f02cbc69336836f95ec45e.json
@@ -0,0 +1,46 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT\n *\n FROM product\n WHERE obj_id = $1\n LIMIT 1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "obj_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 2,
+ "name": "obj_type",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 3,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 4,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false
+ ]
+ },
+ "hash": "954605527a3ca7b9d6cbf1fbc03dc00c95626c94f0f02cbc69336836f95ec45e"
+}
diff --git a/.sqlx/query-970e2fc198c379a19849c4621adeca951c761f6b9abd6c70158000e0c03ca7c7.json b/.sqlx/query-970e2fc198c379a19849c4621adeca951c761f6b9abd6c70158000e0c03ca7c7.json
new file mode 100644
index 0000000..0b5b79f
--- /dev/null
+++ b/.sqlx/query-970e2fc198c379a19849c4621adeca951c761f6b9abd6c70158000e0c03ca7c7.json
@@ -0,0 +1,130 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT \n t.id,\n t.creator_user_id,\n t.creator_name,\n t.name,\n t.slug,\n t.short_description,\n t.long_description,\n c.name AS \"category_code?\",\n t.product_id,\n t.tags,\n t.tech_stack,\n t.status,\n t.is_configurable,\n t.view_count,\n t.deploy_count,\n t.required_plan_name,\n t.created_at,\n t.updated_at,\n t.approved_at\n FROM stack_template t\n LEFT JOIN stack_category c ON t.category_id = c.id\n WHERE t.slug = $1 AND t.status = 'approved'",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "creator_user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "creator_name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "slug",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "short_description",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 6,
+ "name": "long_description",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 7,
+ "name": "category_code?",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 8,
+ "name": "product_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 9,
+ "name": "tags",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 10,
+ "name": "tech_stack",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 11,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 12,
+ "name": "is_configurable",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 13,
+ "name": "view_count",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 14,
+ "name": "deploy_count",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 15,
+ "name": "required_plan_name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 16,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 17,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 18,
+ "name": "approved_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ true,
+ false,
+ false,
+ true,
+ true,
+ false,
+ true,
+ true,
+ true,
+ false,
+ true,
+ true,
+ true,
+ true,
+ true,
+ true,
+ true
+ ]
+ },
+ "hash": "970e2fc198c379a19849c4621adeca951c761f6b9abd6c70158000e0c03ca7c7"
+}
diff --git a/.sqlx/query-9d821bd27d5202d2c3d49a2f148ff7f21bafde8c7c1306cc7efc976a9eae0071.json b/.sqlx/query-9d821bd27d5202d2c3d49a2f148ff7f21bafde8c7c1306cc7efc976a9eae0071.json
new file mode 100644
index 0000000..8adc74c
--- /dev/null
+++ b/.sqlx/query-9d821bd27d5202d2c3d49a2f148ff7f21bafde8c7c1306cc7efc976a9eae0071.json
@@ -0,0 +1,25 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO user_agreement (agrt_id, user_id, created_at, updated_at)\n VALUES ($1, $2, $3, $4)\n RETURNING id;\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4",
+ "Varchar",
+ "Timestamptz",
+ "Timestamptz"
+ ]
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "9d821bd27d5202d2c3d49a2f148ff7f21bafde8c7c1306cc7efc976a9eae0071"
+}
diff --git a/.sqlx/query-9e4f216c828c7d53547c33da062153f90eefabe5a252f86d5e8d1964785025c0.json b/.sqlx/query-9e4f216c828c7d53547c33da062153f90eefabe5a252f86d5e8d1964785025c0.json
new file mode 100644
index 0000000..67d8c69
--- /dev/null
+++ b/.sqlx/query-9e4f216c828c7d53547c33da062153f90eefabe5a252f86d5e8d1964785025c0.json
@@ -0,0 +1,16 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO command_queue (command_id, deployment_hash, priority)\n VALUES ($1, $2, $3)\n ",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Varchar",
+ "Varchar",
+ "Int4"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "9e4f216c828c7d53547c33da062153f90eefabe5a252f86d5e8d1964785025c0"
+}
diff --git a/.sqlx/query-aa21279e6479dd588317bbb4c522094f0cf8736710de08963fff1178f2b62974.json b/.sqlx/query-aa21279e6479dd588317bbb4c522094f0cf8736710de08963fff1178f2b62974.json
new file mode 100644
index 0000000..ae2f5d9
--- /dev/null
+++ b/.sqlx/query-aa21279e6479dd588317bbb4c522094f0cf8736710de08963fff1178f2b62974.json
@@ -0,0 +1,100 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n FROM commands\n WHERE id = $1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "command_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "deployment_hash",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "priority",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "parameters",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 7,
+ "name": "result",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 8,
+ "name": "error",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 9,
+ "name": "created_by",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 10,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 11,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 12,
+ "name": "timeout_seconds",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 13,
+ "name": "metadata",
+ "type_info": "Jsonb"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Uuid"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "aa21279e6479dd588317bbb4c522094f0cf8736710de08963fff1178f2b62974"
+}
diff --git a/.sqlx/query-ab22f5f84d90a3c2717cea339f6444c6c2656615fb29b4c04031a090cf103bdd.json b/.sqlx/query-ab22f5f84d90a3c2717cea339f6444c6c2656615fb29b4c04031a090cf103bdd.json
new file mode 100644
index 0000000..f684d17
--- /dev/null
+++ b/.sqlx/query-ab22f5f84d90a3c2717cea339f6444c6c2656615fb29b4c04031a090cf103bdd.json
@@ -0,0 +1,68 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "INSERT INTO stack_template_version (\n template_id, version, stack_definition, definition_format, changelog, is_latest\n ) VALUES ($1,$2,$3,$4,$5,true)\n RETURNING id, template_id, version, stack_definition, definition_format, changelog, is_latest, created_at",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "template_id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 2,
+ "name": "version",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "stack_definition",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 4,
+ "name": "definition_format",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "changelog",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 6,
+ "name": "is_latest",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 7,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Uuid",
+ "Varchar",
+ "Jsonb",
+ "Varchar",
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ true
+ ]
+ },
+ "hash": "ab22f5f84d90a3c2717cea339f6444c6c2656615fb29b4c04031a090cf103bdd"
+}
diff --git a/.sqlx/query-b8296183bd28695d3a7574e57db445dc1f4b2d659a3805f92f6f5f83b562266b.json b/.sqlx/query-b8296183bd28695d3a7574e57db445dc1f4b2d659a3805f92f6f5f83b562266b.json
new file mode 100644
index 0000000..a924adf
--- /dev/null
+++ b/.sqlx/query-b8296183bd28695d3a7574e57db445dc1f4b2d659a3805f92f6f5f83b562266b.json
@@ -0,0 +1,70 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n *\n FROM cloud\n WHERE user_id=$1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "provider",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "cloud_token",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "cloud_key",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "cloud_secret",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "save_token",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 7,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 8,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ true,
+ false,
+ false
+ ]
+ },
+ "hash": "b8296183bd28695d3a7574e57db445dc1f4b2d659a3805f92f6f5f83b562266b"
+}
diff --git a/.sqlx/query-b92417574329b82cae2347027db12f4794c1fc48b67d64c34c88fd9caf4508f5.json b/.sqlx/query-b92417574329b82cae2347027db12f4794c1fc48b67d64c34c88fd9caf4508f5.json
new file mode 100644
index 0000000..d77b472
--- /dev/null
+++ b/.sqlx/query-b92417574329b82cae2347027db12f4794c1fc48b67d64c34c88fd9caf4508f5.json
@@ -0,0 +1,30 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO deployment (\n project_id, user_id, deployment_hash, deleted, status, metadata, last_seen_at, created_at, updated_at\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)\n RETURNING id;\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4",
+ "Varchar",
+ "Varchar",
+ "Bool",
+ "Varchar",
+ "Json",
+ "Timestamptz",
+ "Timestamptz",
+ "Timestamptz"
+ ]
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "b92417574329b82cae2347027db12f4794c1fc48b67d64c34c88fd9caf4508f5"
+}
diff --git a/.sqlx/query-bc798b1837501109ff69f44c01d39c1cc03348eb4b4fe698ad06283ba7072b7f.json b/.sqlx/query-bc798b1837501109ff69f44c01d39c1cc03348eb4b4fe698ad06283ba7072b7f.json
new file mode 100644
index 0000000..0f85900
--- /dev/null
+++ b/.sqlx/query-bc798b1837501109ff69f44c01d39c1cc03348eb4b4fe698ad06283ba7072b7f.json
@@ -0,0 +1,113 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO commands (\n id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)\n RETURNING id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "command_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "deployment_hash",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "priority",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "parameters",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 7,
+ "name": "result",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 8,
+ "name": "error",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 9,
+ "name": "created_by",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 10,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 11,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 12,
+ "name": "timeout_seconds",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 13,
+ "name": "metadata",
+ "type_info": "Jsonb"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Uuid",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Varchar",
+ "Jsonb",
+ "Jsonb",
+ "Jsonb",
+ "Varchar",
+ "Timestamptz",
+ "Timestamptz",
+ "Int4",
+ "Jsonb"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "bc798b1837501109ff69f44c01d39c1cc03348eb4b4fe698ad06283ba7072b7f"
+}
diff --git a/.sqlx/query-c28d645182680aaeaf265abcb687ea36f2a01b6b778fd61921e0046ad3f2efb2.json b/.sqlx/query-c28d645182680aaeaf265abcb687ea36f2a01b6b778fd61921e0046ad3f2efb2.json
new file mode 100644
index 0000000..155c1fc
--- /dev/null
+++ b/.sqlx/query-c28d645182680aaeaf265abcb687ea36f2a01b6b778fd61921e0046ad3f2efb2.json
@@ -0,0 +1,47 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n *\n FROM user_agreement\n WHERE user_id=$1\n AND agrt_id=$2\n LIMIT 1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "agrt_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 2,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 4,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text",
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false
+ ]
+ },
+ "hash": "c28d645182680aaeaf265abcb687ea36f2a01b6b778fd61921e0046ad3f2efb2"
+}
diff --git a/.sqlx/query-c59246b73cf3c5a0fd961d2709477ce724f60cdb03492eef912a9fe89aee2ac4.json b/.sqlx/query-c59246b73cf3c5a0fd961d2709477ce724f60cdb03492eef912a9fe89aee2ac4.json
new file mode 100644
index 0000000..838d20a
--- /dev/null
+++ b/.sqlx/query-c59246b73cf3c5a0fd961d2709477ce724f60cdb03492eef912a9fe89aee2ac4.json
@@ -0,0 +1,83 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n UPDATE deployment\n SET\n project_id=$2,\n user_id=$3,\n deployment_hash=$4,\n deleted=$5,\n status=$6,\n metadata=$7,\n last_seen_at=$8,\n updated_at=NOW() at time zone 'utc'\n WHERE id = $1\n RETURNING *\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "project_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 2,
+ "name": "metadata",
+ "type_info": "Json"
+ },
+ {
+ "ordinal": 3,
+ "name": "deleted",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 4,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 6,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 7,
+ "name": "deployment_hash",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 8,
+ "name": "last_seen_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 9,
+ "name": "user_id",
+ "type_info": "Varchar"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4",
+ "Int4",
+ "Varchar",
+ "Varchar",
+ "Bool",
+ "Varchar",
+ "Json",
+ "Timestamptz"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ true,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "c59246b73cf3c5a0fd961d2709477ce724f60cdb03492eef912a9fe89aee2ac4"
+}
diff --git a/.sqlx/query-cd6ddae34b29c15924e0ec26ea55c23d56315ad817bea716d6a71c8b2bb18087.json b/.sqlx/query-cd6ddae34b29c15924e0ec26ea55c23d56315ad817bea716d6a71c8b2bb18087.json
new file mode 100644
index 0000000..64f052c
--- /dev/null
+++ b/.sqlx/query-cd6ddae34b29c15924e0ec26ea55c23d56315ad817bea716d6a71c8b2bb18087.json
@@ -0,0 +1,44 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO rating (user_id, obj_id, category, comment, hidden, rate, created_at, updated_at)\n VALUES ($1, $2, $3, $4, $5, $6, NOW() at time zone 'utc', NOW() at time zone 'utc')\n RETURNING id\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Varchar",
+ "Int4",
+ {
+ "Custom": {
+ "name": "rate_category",
+ "kind": {
+ "Enum": [
+ "application",
+ "cloud",
+ "project",
+ "deploymentSpeed",
+ "documentation",
+ "design",
+ "techSupport",
+ "price",
+ "memoryUsage"
+ ]
+ }
+ }
+ },
+ "Text",
+ "Bool",
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "cd6ddae34b29c15924e0ec26ea55c23d56315ad817bea716d6a71c8b2bb18087"
+}
diff --git a/.sqlx/query-cf85345c0c38d7ba1c347a9cf027a55dccaaeb0fe55d5eabb7319a90cbdfe951.json b/.sqlx/query-cf85345c0c38d7ba1c347a9cf027a55dccaaeb0fe55d5eabb7319a90cbdfe951.json
new file mode 100644
index 0000000..e24d9cb
--- /dev/null
+++ b/.sqlx/query-cf85345c0c38d7ba1c347a9cf027a55dccaaeb0fe55d5eabb7319a90cbdfe951.json
@@ -0,0 +1,85 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT \n id,\n user_id,\n obj_id,\n category as \"category: _\",\n comment,\n hidden,\n rate,\n created_at,\n updated_at\n FROM rating\n ORDER BY id DESC\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "obj_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 3,
+ "name": "category: _",
+ "type_info": {
+ "Custom": {
+ "name": "rate_category",
+ "kind": {
+ "Enum": [
+ "application",
+ "cloud",
+ "project",
+ "deploymentSpeed",
+ "documentation",
+ "design",
+ "techSupport",
+ "price",
+ "memoryUsage"
+ ]
+ }
+ }
+ }
+ },
+ {
+ "ordinal": 4,
+ "name": "comment",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 5,
+ "name": "hidden",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 6,
+ "name": "rate",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 7,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 8,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": []
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false
+ ]
+ },
+ "hash": "cf85345c0c38d7ba1c347a9cf027a55dccaaeb0fe55d5eabb7319a90cbdfe951"
+}
diff --git a/.sqlx/query-d81dbcf77d096403614b80165d66388884b133c79da6ed1a5809a3ca64f48f97.json b/.sqlx/query-d81dbcf77d096403614b80165d66388884b133c79da6ed1a5809a3ca64f48f97.json
new file mode 100644
index 0000000..769d0a5
--- /dev/null
+++ b/.sqlx/query-d81dbcf77d096403614b80165d66388884b133c79da6ed1a5809a3ca64f48f97.json
@@ -0,0 +1,20 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "UPDATE stack_template SET \n name = COALESCE($2, name),\n short_description = COALESCE($3, short_description),\n long_description = COALESCE($4, long_description),\n category_id = COALESCE((SELECT id FROM stack_category WHERE name = $5), category_id),\n tags = COALESCE($6, tags),\n tech_stack = COALESCE($7, tech_stack)\n WHERE id = $1::uuid",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Uuid",
+ "Varchar",
+ "Text",
+ "Text",
+ "Text",
+ "Jsonb",
+ "Jsonb"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "d81dbcf77d096403614b80165d66388884b133c79da6ed1a5809a3ca64f48f97"
+}
diff --git a/.sqlx/query-db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83.json b/.sqlx/query-db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83.json
new file mode 100644
index 0000000..0300aa2
--- /dev/null
+++ b/.sqlx/query-db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83.json
@@ -0,0 +1,81 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n UPDATE project\n SET \n stack_id=$2,\n user_id=$3,\n name=$4,\n metadata=$5,\n request_json=$6,\n updated_at=NOW() at time zone 'utc'\n WHERE id = $1\n RETURNING *\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "stack_id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 2,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "name",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 4,
+ "name": "metadata",
+ "type_info": "Json"
+ },
+ {
+ "ordinal": 5,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 6,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 7,
+ "name": "request_json",
+ "type_info": "Json"
+ },
+ {
+ "ordinal": 8,
+ "name": "source_template_id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 9,
+ "name": "template_version",
+ "type_info": "Varchar"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4",
+ "Uuid",
+ "Varchar",
+ "Text",
+ "Json",
+ "Json"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "db15f82b91377978db22c48cf2fb4d54ef603448c0c44272aec8f2ff04920b83"
+}
diff --git a/.sqlx/query-dd36c2beb4867d36db9dc0fe47e6310aea0a7dd4c8fc5f7c2cff4dac327cf3f7.json b/.sqlx/query-dd36c2beb4867d36db9dc0fe47e6310aea0a7dd4c8fc5f7c2cff4dac327cf3f7.json
new file mode 100644
index 0000000..2091a8b
--- /dev/null
+++ b/.sqlx/query-dd36c2beb4867d36db9dc0fe47e6310aea0a7dd4c8fc5f7c2cff4dac327cf3f7.json
@@ -0,0 +1,23 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO client (user_id, secret, created_at, updated_at)\n VALUES ($1, $2, NOW() at time zone 'utc', NOW() at time zone 'utc')\n RETURNING id\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Varchar",
+ "Varchar"
+ ]
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "dd36c2beb4867d36db9dc0fe47e6310aea0a7dd4c8fc5f7c2cff4dac327cf3f7"
+}
diff --git a/.sqlx/query-e5956a76c15941c58fc9acb3886c9d8ed8688d70ac5fcceaf41e1671f75dbaa8.json b/.sqlx/query-e5956a76c15941c58fc9acb3886c9d8ed8688d70ac5fcceaf41e1671f75dbaa8.json
new file mode 100644
index 0000000..ee20b46
--- /dev/null
+++ b/.sqlx/query-e5956a76c15941c58fc9acb3886c9d8ed8688d70ac5fcceaf41e1671f75dbaa8.json
@@ -0,0 +1,128 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT \n t.id,\n t.creator_user_id,\n t.creator_name,\n t.name,\n t.slug,\n t.short_description,\n t.long_description,\n c.name AS \"category_code?\",\n t.product_id,\n t.tags,\n t.tech_stack,\n t.status,\n t.is_configurable,\n t.view_count,\n t.deploy_count,\n t.required_plan_name,\n t.created_at,\n t.updated_at,\n t.approved_at\n FROM stack_template t\n LEFT JOIN stack_category c ON t.category_id = c.id\n WHERE t.status = 'submitted'\n ORDER BY t.created_at ASC",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "creator_user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "creator_name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "slug",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "short_description",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 6,
+ "name": "long_description",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 7,
+ "name": "category_code?",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 8,
+ "name": "product_id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 9,
+ "name": "tags",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 10,
+ "name": "tech_stack",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 11,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 12,
+ "name": "is_configurable",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 13,
+ "name": "view_count",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 14,
+ "name": "deploy_count",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 15,
+ "name": "required_plan_name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 16,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 17,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 18,
+ "name": "approved_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": []
+ },
+ "nullable": [
+ false,
+ false,
+ true,
+ false,
+ false,
+ true,
+ true,
+ false,
+ true,
+ true,
+ true,
+ false,
+ true,
+ true,
+ true,
+ true,
+ true,
+ true,
+ true
+ ]
+ },
+ "hash": "e5956a76c15941c58fc9acb3886c9d8ed8688d70ac5fcceaf41e1671f75dbaa8"
+}
diff --git a/.sqlx/query-e5a60eb49da1cd42fc6c1bac36f038846f0cb4440e4b377d495ffe0f0bfc11b6.json b/.sqlx/query-e5a60eb49da1cd42fc6c1bac36f038846f0cb4440e4b377d495ffe0f0bfc11b6.json
new file mode 100644
index 0000000..966ab27
--- /dev/null
+++ b/.sqlx/query-e5a60eb49da1cd42fc6c1bac36f038846f0cb4440e4b377d495ffe0f0bfc11b6.json
@@ -0,0 +1,34 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT id, user_id, secret FROM client c WHERE c.id = $1",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "user_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "secret",
+ "type_info": "Varchar"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ true
+ ]
+ },
+ "hash": "e5a60eb49da1cd42fc6c1bac36f038846f0cb4440e4b377d495ffe0f0bfc11b6"
+}
diff --git a/.sqlx/query-f0af06a2002ce933966cf6cfe8289ea77781df5a251a6731b42f8ddefb8a4c8b.json b/.sqlx/query-f0af06a2002ce933966cf6cfe8289ea77781df5a251a6731b42f8ddefb8a4c8b.json
new file mode 100644
index 0000000..0b08ecb
--- /dev/null
+++ b/.sqlx/query-f0af06a2002ce933966cf6cfe8289ea77781df5a251a6731b42f8ddefb8a4c8b.json
@@ -0,0 +1,100 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n FROM commands\n WHERE command_id = $1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "command_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "deployment_hash",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "priority",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "parameters",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 7,
+ "name": "result",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 8,
+ "name": "error",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 9,
+ "name": "created_by",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 10,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 11,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 12,
+ "name": "timeout_seconds",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 13,
+ "name": "metadata",
+ "type_info": "Jsonb"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "f0af06a2002ce933966cf6cfe8289ea77781df5a251a6731b42f8ddefb8a4c8b"
+}
diff --git a/.sqlx/query-f167d43c97ad2a1b75c7c598fd9adf0bb815a11266e33880196cf6fb974b95f4.json b/.sqlx/query-f167d43c97ad2a1b75c7c598fd9adf0bb815a11266e33880196cf6fb974b95f4.json
new file mode 100644
index 0000000..ec57ef0
--- /dev/null
+++ b/.sqlx/query-f167d43c97ad2a1b75c7c598fd9adf0bb815a11266e33880196cf6fb974b95f4.json
@@ -0,0 +1,102 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n FROM commands\n WHERE deployment_hash = $1\n AND updated_at > $2\n ORDER BY updated_at DESC\n LIMIT $3\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "command_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "deployment_hash",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "priority",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "parameters",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 7,
+ "name": "result",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 8,
+ "name": "error",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 9,
+ "name": "created_by",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 10,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 11,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 12,
+ "name": "timeout_seconds",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 13,
+ "name": "metadata",
+ "type_info": "Jsonb"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text",
+ "Timestamptz",
+ "Int8"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "f167d43c97ad2a1b75c7c598fd9adf0bb815a11266e33880196cf6fb974b95f4"
+}
diff --git a/.sqlx/query-f93b65a30034b0558781a3173986706ad8a6255bba2812d4e32da205773c6de9.json b/.sqlx/query-f93b65a30034b0558781a3173986706ad8a6255bba2812d4e32da205773c6de9.json
new file mode 100644
index 0000000..7dff911
--- /dev/null
+++ b/.sqlx/query-f93b65a30034b0558781a3173986706ad8a6255bba2812d4e32da205773c6de9.json
@@ -0,0 +1,64 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT \n id,\n template_id,\n version,\n stack_definition,\n definition_format,\n changelog,\n is_latest,\n created_at\n FROM stack_template_version WHERE template_id = $1 AND is_latest = true LIMIT 1",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "template_id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 2,
+ "name": "version",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "stack_definition",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 4,
+ "name": "definition_format",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "changelog",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 6,
+ "name": "is_latest",
+ "type_info": "Bool"
+ },
+ {
+ "ordinal": 7,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Uuid"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ true
+ ]
+ },
+ "hash": "f93b65a30034b0558781a3173986706ad8a6255bba2812d4e32da205773c6de9"
+}
diff --git a/.sqlx/query-fb07f53c015c852c4ef9e0ce52541f06835f8687122987d87fad751981b0c2b1.json b/.sqlx/query-fb07f53c015c852c4ef9e0ce52541f06835f8687122987d87fad751981b0c2b1.json
new file mode 100644
index 0000000..58b296c
--- /dev/null
+++ b/.sqlx/query-fb07f53c015c852c4ef9e0ce52541f06835f8687122987d87fad751981b0c2b1.json
@@ -0,0 +1,101 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n UPDATE commands\n SET status = $2, updated_at = NOW()\n WHERE command_id = $1\n RETURNING id, command_id, deployment_hash, type, status, priority,\n parameters, result, error, created_by, created_at, updated_at,\n timeout_seconds, metadata\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Uuid"
+ },
+ {
+ "ordinal": 1,
+ "name": "command_id",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "deployment_hash",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 3,
+ "name": "type",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 4,
+ "name": "status",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 5,
+ "name": "priority",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 6,
+ "name": "parameters",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 7,
+ "name": "result",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 8,
+ "name": "error",
+ "type_info": "Jsonb"
+ },
+ {
+ "ordinal": 9,
+ "name": "created_by",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 10,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 11,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 12,
+ "name": "timeout_seconds",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 13,
+ "name": "metadata",
+ "type_info": "Jsonb"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text",
+ "Varchar"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true,
+ true,
+ true,
+ false,
+ false,
+ false,
+ true,
+ true
+ ]
+ },
+ "hash": "fb07f53c015c852c4ef9e0ce52541f06835f8687122987d87fad751981b0c2b1"
+}
diff --git a/.sqlx/query-ffb567ac44b9a0525bd41392c3a865d0612bc0d3f620d5cba76a6b44a8812417.json b/.sqlx/query-ffb567ac44b9a0525bd41392c3a865d0612bc0d3f620d5cba76a6b44a8812417.json
new file mode 100644
index 0000000..12efb85
--- /dev/null
+++ b/.sqlx/query-ffb567ac44b9a0525bd41392c3a865d0612bc0d3f620d5cba76a6b44a8812417.json
@@ -0,0 +1,48 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n UPDATE agreement\n SET\n name=$2,\n text=$3,\n updated_at=NOW() at time zone 'utc'\n WHERE id = $1\n RETURNING *\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": "Int4"
+ },
+ {
+ "ordinal": 1,
+ "name": "name",
+ "type_info": "Varchar"
+ },
+ {
+ "ordinal": 2,
+ "name": "text",
+ "type_info": "Text"
+ },
+ {
+ "ordinal": 3,
+ "name": "created_at",
+ "type_info": "Timestamptz"
+ },
+ {
+ "ordinal": 4,
+ "name": "updated_at",
+ "type_info": "Timestamptz"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Int4",
+ "Varchar",
+ "Text"
+ ]
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false
+ ]
+ },
+ "hash": "ffb567ac44b9a0525bd41392c3a865d0612bc0d3f620d5cba76a6b44a8812417"
+}
diff --git a/.sqlx/query-ffd49d0e0354d8d4010863204b1a1f5406b31542b6b0219d7daa1705bf7b2f37.json b/.sqlx/query-ffd49d0e0354d8d4010863204b1a1f5406b31542b6b0219d7daa1705bf7b2f37.json
new file mode 100644
index 0000000..fd95a35
--- /dev/null
+++ b/.sqlx/query-ffd49d0e0354d8d4010863204b1a1f5406b31542b6b0219d7daa1705bf7b2f37.json
@@ -0,0 +1,22 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "SELECT status FROM stack_template WHERE id = $1::uuid",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "status",
+ "type_info": "Varchar"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Uuid"
+ ]
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "ffd49d0e0354d8d4010863204b1a1f5406b31542b6b0219d7daa1705bf7b2f37"
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..58aa40b
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,25 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+## 2026-01-06
+
+### Added
+- Real HTTP-mocked tests for `UserServiceClient` covering user profile retrieval, product lookups, and template ownership checks.
+- Integration-style webhook tests that verify the payloads emitted by `MarketplaceWebhookSender` for approved, updated, and rejected templates.
+- Deployment validation tests ensuring plan gating and marketplace ownership logic behave correctly for free, paid, and plan-restricted templates.
+
+## 2026-01-16
+
+### Added
+- Configurable agent command polling defaults via config and environment variables.
+- Configurable Casbin reload enablement and interval.
+
+### Changed
+- OAuth token validation uses a shared HTTP client and short-lived cache for reduced latency.
+- Agent command polling endpoint accepts optional `timeout` and `interval` parameters.
+- Casbin reload is guarded to avoid blocking request handling and re-applies route matching after reload.
+
+### Fixed
+- Status panel command updates query uses explicit bindings to avoid SQLx type inference errors.
+
diff --git a/Cargo.lock b/Cargo.lock
index a4b8f33..093f6fe 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1,14 +1,51 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
-version = 3
+version = 4
+
+[[package]]
+name = "actix"
+version = "0.13.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "de7fa236829ba0841304542f7614c42b80fca007455315c45c785ccfa873a85b"
+dependencies = [
+ "actix-macros",
+ "actix-rt",
+ "actix_derive",
+ "bitflags 2.10.0",
+ "bytes",
+ "crossbeam-channel",
+ "futures-core",
+ "futures-sink",
+ "futures-task",
+ "futures-util",
+ "log",
+ "once_cell",
+ "parking_lot",
+ "pin-project-lite",
+ "smallvec",
+ "tokio",
+ "tokio-util",
+]
+
+[[package]]
+name = "actix-casbin-auth"
+version = "1.1.0"
+source = "git+https://github.com/casbin-rs/actix-casbin-auth.git#d7cde82f76fa8d7e415650dda9f2daefcc575caa"
+dependencies = [
+ "actix-service",
+ "actix-web",
+ "casbin",
+ "futures",
+ "tokio",
+]
[[package]]
name = "actix-codec"
-version = "0.5.1"
+version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "617a8268e3537fe1d8c9ead925fca49ef6400927ee7bc26750e90ecee14ce4b8"
+checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a"
dependencies = [
- "bitflags 1.3.2",
+ "bitflags 2.10.0",
"bytes",
"futures-core",
"futures-sink",
@@ -21,13 +58,13 @@ dependencies = [
[[package]]
name = "actix-cors"
-version = "0.6.4"
+version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b340e9cfa5b08690aae90fb61beb44e9b06f44fe3d0f93781aaa58cfba86245e"
+checksum = "0346d8c1f762b41b458ed3145eea914966bb9ad20b9be0d6d463b20d45586370"
dependencies = [
"actix-utils",
"actix-web",
- "derive_more",
+ "derive_more 0.99.20",
"futures-util",
"log",
"once_cell",
@@ -36,23 +73,23 @@ dependencies = [
[[package]]
name = "actix-http"
-version = "3.4.0"
+version = "3.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a92ef85799cba03f76e4f7c10f533e66d87c9a7e7055f3391f09000ad8351bc9"
+checksum = "7926860314cbe2fb5d1f13731e387ab43bd32bca224e82e6e2db85de0a3dba49"
dependencies = [
"actix-codec",
"actix-rt",
"actix-service",
"actix-utils",
- "ahash 0.8.3",
- "base64 0.21.4",
- "bitflags 2.4.1",
- "brotli",
+ "base64 0.22.1",
+ "bitflags 2.10.0",
+ "brotli 8.0.2",
"bytes",
"bytestring",
- "derive_more",
+ "derive_more 2.1.1",
"encoding_rs",
"flate2",
+ "foldhash",
"futures-core",
"h2",
"http",
@@ -64,7 +101,7 @@ dependencies = [
"mime",
"percent-encoding",
"pin-project-lite",
- "rand",
+ "rand 0.9.2",
"sha1",
"smallvec",
"tokio",
@@ -80,27 +117,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb"
dependencies = [
"quote",
- "syn 2.0.38",
+ "syn 2.0.111",
]
[[package]]
name = "actix-router"
-version = "0.5.1"
+version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d66ff4d247d2b160861fa2866457e85706833527840e4133f8f49aa423a38799"
+checksum = "13d324164c51f63867b57e73ba5936ea151b8a41a1d23d1031eeb9f70d0236f8"
dependencies = [
"bytestring",
+ "cfg-if",
"http",
"regex",
+ "regex-lite",
"serde",
"tracing",
]
[[package]]
name = "actix-rt"
-version = "2.9.0"
+version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "28f32d40287d3f402ae0028a9d54bef51af15c8769492826a69d28f81893151d"
+checksum = "92589714878ca59a7626ea19734f0e07a6a875197eec751bb5d3f99e64998c63"
dependencies = [
"futures-core",
"tokio",
@@ -108,9 +147,9 @@ dependencies = [
[[package]]
name = "actix-server"
-version = "2.3.0"
+version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3eb13e7eef0423ea6eab0e59f6c72e7cb46d33691ad56a726b3cd07ddec2c2d4"
+checksum = "a65064ea4a457eaf07f2fba30b4c695bf43b721790e9530d26cb6f9019ff7502"
dependencies = [
"actix-rt",
"actix-service",
@@ -118,19 +157,18 @@ dependencies = [
"futures-core",
"futures-util",
"mio",
- "socket2 0.5.4",
+ "socket2 0.5.10",
"tokio",
"tracing",
]
[[package]]
name = "actix-service"
-version = "2.0.2"
+version = "2.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3b894941f818cfdc7ccc4b9e60fa7e53b5042a2e8567270f9147d5591893373a"
+checksum = "9e46f36bf0e5af44bdc4bdb36fbbd421aa98c79a9bce724e1edeb3894e10dc7f"
dependencies = [
"futures-core",
- "paste",
"pin-project-lite",
]
@@ -146,9 +184,9 @@ dependencies = [
[[package]]
name = "actix-web"
-version = "4.4.0"
+version = "4.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0e4a5b5e29603ca8c94a77c65cf874718ceb60292c5a5c3e5f4ace041af462b9"
+checksum = "1654a77ba142e37f049637a3e5685f864514af11fcbc51cb51eb6596afe5b8d6"
dependencies = [
"actix-codec",
"actix-http",
@@ -159,15 +197,16 @@ dependencies = [
"actix-service",
"actix-utils",
"actix-web-codegen",
- "ahash 0.8.3",
"bytes",
"bytestring",
"cfg-if",
"cookie",
- "derive_more",
+ "derive_more 2.1.1",
"encoding_rs",
+ "foldhash",
"futures-core",
"futures-util",
+ "impl-more",
"itoa",
"language-tags",
"log",
@@ -175,85 +214,129 @@ dependencies = [
"once_cell",
"pin-project-lite",
"regex",
+ "regex-lite",
"serde",
"serde_json",
"serde_urlencoded",
"smallvec",
- "socket2 0.5.4",
- "time 0.3.30",
+ "socket2 0.6.1",
+ "time",
+ "tracing",
"url",
]
+[[package]]
+name = "actix-web-actors"
+version = "4.3.1+deprecated"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f98c5300b38fd004fe7d2a964f9a90813fdbe8a81fed500587e78b1b71c6f980"
+dependencies = [
+ "actix",
+ "actix-codec",
+ "actix-http",
+ "actix-web",
+ "bytes",
+ "bytestring",
+ "futures-core",
+ "pin-project-lite",
+ "tokio",
+ "tokio-util",
+]
+
[[package]]
name = "actix-web-codegen"
-version = "4.2.2"
+version = "4.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb1f50ebbb30eca122b188319a4398b3f7bb4a8cdf50ecfb73bfc6a3c3ce54f5"
+checksum = "f591380e2e68490b5dfaf1dd1aa0ebe78d84ba7067078512b4ea6e4492d622b8"
dependencies = [
"actix-router",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.111",
]
[[package]]
-name = "actix-web-httpauth"
-version = "0.8.1"
+name = "actix_derive"
+version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1d613edf08a42ccc6864c941d30fe14e1b676a77d16f1dbadc1174d065a0a775"
+checksum = "b6ac1e58cded18cb28ddc17143c4dea5345b3ad575e14f32f66e4054a56eb271"
dependencies = [
- "actix-utils",
- "actix-web",
- "base64 0.21.4",
- "futures-core",
- "futures-util",
- "log",
- "pin-project-lite",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "adler2"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
+
+[[package]]
+name = "aead"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0"
+dependencies = [
+ "crypto-common",
+ "generic-array",
]
[[package]]
-name = "addr2line"
-version = "0.21.0"
+name = "aes"
+version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
+checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0"
dependencies = [
- "gimli",
+ "cfg-if",
+ "cipher",
+ "cpufeatures",
]
[[package]]
-name = "adler"
-version = "1.0.2"
+name = "aes-gcm"
+version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1"
+dependencies = [
+ "aead",
+ "aes",
+ "cipher",
+ "ctr",
+ "ghash",
+ "subtle",
+]
[[package]]
name = "ahash"
-version = "0.7.6"
+version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
+checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9"
dependencies = [
- "getrandom",
+ "getrandom 0.2.16",
"once_cell",
"version_check",
]
[[package]]
name = "ahash"
-version = "0.8.3"
+version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
+checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
dependencies = [
"cfg-if",
- "getrandom",
+ "const-random",
+ "getrandom 0.3.4",
"once_cell",
"version_check",
+ "zerocopy",
]
[[package]]
name = "aho-corasick"
-version = "1.1.2"
+version = "1.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
+checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
dependencies = [
"memchr",
]
@@ -275,2646 +358,5339 @@ dependencies = [
[[package]]
name = "allocator-api2"
-version = "0.2.16"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
-
-[[package]]
-name = "android-tzdata"
-version = "0.1.1"
+version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
+checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
-name = "android_system_properties"
-version = "0.1.5"
+name = "amq-protocol"
+version = "7.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
+checksum = "587d313f3a8b4a40f866cc84b6059fe83133bf172165ac3b583129dd211d8e1c"
dependencies = [
- "libc",
+ "amq-protocol-tcp",
+ "amq-protocol-types",
+ "amq-protocol-uri",
+ "cookie-factory",
+ "nom",
+ "serde",
]
[[package]]
-name = "async-trait"
-version = "0.1.74"
+name = "amq-protocol-tcp"
+version = "7.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9"
+checksum = "dc707ab9aa964a85d9fc25908a3fdc486d2e619406883b3105b48bf304a8d606"
dependencies = [
- "proc-macro2",
- "quote",
- "syn 2.0.38",
+ "amq-protocol-uri",
+ "tcp-stream",
+ "tracing",
]
[[package]]
-name = "atoi"
-version = "1.0.0"
+name = "amq-protocol-types"
+version = "7.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d7c57d12312ff59c811c0643f4d80830505833c9ffaebd193d819392b265be8e"
+checksum = "bf99351d92a161c61ec6ecb213bc7057f5b837dd4e64ba6cb6491358efd770c4"
dependencies = [
- "num-traits",
+ "cookie-factory",
+ "nom",
+ "serde",
+ "serde_json",
]
[[package]]
-name = "autocfg"
-version = "1.1.0"
+name = "amq-protocol-uri"
+version = "7.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+checksum = "f89f8273826a676282208e5af38461a07fe939def57396af6ad5997fcf56577d"
+dependencies = [
+ "amq-protocol-types",
+ "percent-encoding",
+ "url",
+]
[[package]]
-name = "backtrace"
-version = "0.3.69"
+name = "android_system_properties"
+version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837"
+checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
dependencies = [
- "addr2line",
- "cc",
- "cfg-if",
"libc",
- "miniz_oxide",
- "object",
- "rustc-demangle",
]
[[package]]
-name = "base64"
-version = "0.13.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
-
-[[package]]
-name = "base64"
-version = "0.21.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2"
-
-[[package]]
-name = "bitflags"
-version = "1.3.2"
+name = "anstream"
+version = "0.6.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a"
+dependencies = [
+ "anstyle",
+ "anstyle-parse",
+ "anstyle-query",
+ "anstyle-wincon",
+ "colorchoice",
+ "is_terminal_polyfill",
+ "utf8parse",
+]
[[package]]
-name = "bitflags"
-version = "2.4.1"
+name = "anstyle"
+version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
+checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
[[package]]
-name = "block-buffer"
-version = "0.10.4"
+name = "anstyle-parse"
+version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
+checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
dependencies = [
- "generic-array",
+ "utf8parse",
]
[[package]]
-name = "brotli"
-version = "3.4.0"
+name = "anstyle-query"
+version = "1.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "516074a47ef4bce09577a3b379392300159ce5b1ba2e501ff1c819950066100f"
+checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc"
dependencies = [
- "alloc-no-stdlib",
- "alloc-stdlib",
- "brotli-decompressor",
+ "windows-sys 0.61.2",
]
[[package]]
-name = "brotli-decompressor"
-version = "2.5.0"
+name = "anstyle-wincon"
+version = "3.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da74e2b81409b1b743f8f0c62cc6254afefb8b8e50bbfe3735550f7aeefa3448"
+checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d"
dependencies = [
- "alloc-no-stdlib",
- "alloc-stdlib",
+ "anstyle",
+ "once_cell_polyfill",
+ "windows-sys 0.61.2",
]
[[package]]
-name = "bumpalo"
-version = "3.14.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec"
-
-[[package]]
-name = "byteorder"
-version = "1.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
-
-[[package]]
-name = "bytes"
-version = "1.5.0"
+name = "anyhow"
+version = "1.0.100"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
+checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
[[package]]
-name = "bytestring"
-version = "1.3.0"
+name = "arc-swap"
+version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "238e4886760d98c4f899360c834fa93e62cf7f721ac3c2da375cbdf4b8679aae"
+checksum = "51d03449bb8ca2cc2ef70869af31463d1ae5ccc8fa3e334b307203fbf815207e"
dependencies = [
- "bytes",
+ "rustversion",
]
[[package]]
-name = "cc"
-version = "1.0.83"
+name = "asn1-rs"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
+checksum = "56624a96882bb8c26d61312ae18cb45868e5a9992ea73c58e45c3101e56a1e60"
dependencies = [
- "jobserver",
- "libc",
+ "asn1-rs-derive",
+ "asn1-rs-impl",
+ "displaydoc",
+ "nom",
+ "num-traits",
+ "rusticata-macros",
+ "thiserror 2.0.17",
+ "time",
]
[[package]]
-name = "cfg-if"
-version = "1.0.0"
+name = "asn1-rs-derive"
+version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+checksum = "3109e49b1e4909e9db6515a30c633684d68cdeaa252f215214cb4fa1a5bfee2c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+ "synstructure",
+]
[[package]]
-name = "chrono"
-version = "0.4.29"
+name = "asn1-rs-impl"
+version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d87d9d13be47a5b7c3907137f1290b0459a7f80efb26be8c52afb11963bccb02"
+checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7"
dependencies = [
- "android-tzdata",
- "iana-time-zone",
- "js-sys",
- "num-traits",
- "serde",
- "time 0.1.45",
- "wasm-bindgen",
- "windows-targets",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
]
[[package]]
-name = "config"
-version = "0.13.3"
+name = "assert-json-diff"
+version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d379af7f68bfc21714c6c7dea883544201741d2ce8274bb12fa54f89507f52a7"
+checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12"
dependencies = [
- "async-trait",
- "json5",
- "lazy_static",
- "nom",
- "pathdiff",
- "ron",
- "rust-ini",
"serde",
"serde_json",
- "toml",
- "yaml-rust",
]
[[package]]
-name = "convert_case"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
-
-[[package]]
-name = "cookie"
-version = "0.16.2"
+name = "async-channel"
+version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb"
+checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35"
dependencies = [
- "percent-encoding",
- "time 0.3.30",
- "version_check",
+ "concurrent-queue",
+ "event-listener 2.5.3",
+ "futures-core",
]
[[package]]
-name = "core-foundation"
-version = "0.9.3"
+name = "async-channel"
+version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146"
+checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2"
dependencies = [
- "core-foundation-sys",
- "libc",
+ "concurrent-queue",
+ "event-listener-strategy",
+ "futures-core",
+ "pin-project-lite",
]
[[package]]
-name = "core-foundation-sys"
-version = "0.8.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa"
-
-[[package]]
-name = "cpufeatures"
-version = "0.2.9"
+name = "async-executor"
+version = "1.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1"
+checksum = "497c00e0fd83a72a79a39fcbd8e3e2f055d6f6c7e025f3b3d91f4f8e76527fb8"
dependencies = [
- "libc",
+ "async-task",
+ "concurrent-queue",
+ "fastrand 2.3.0",
+ "futures-lite 2.6.1",
+ "pin-project-lite",
+ "slab",
]
[[package]]
-name = "crc"
-version = "3.0.1"
+name = "async-global-executor"
+version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe"
+checksum = "13f937e26114b93193065fd44f507aa2e9169ad0cdabbb996920b1fe1ddea7ba"
dependencies = [
- "crc-catalog",
+ "async-channel 2.5.0",
+ "async-executor",
+ "async-io 2.6.0",
+ "async-lock 3.4.2",
+ "blocking",
+ "futures-lite 2.6.1",
]
[[package]]
-name = "crc-catalog"
+name = "async-global-executor-trait"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484"
+checksum = "9af57045d58eeb1f7060e7025a1631cbc6399e0a1d10ad6735b3d0ea7f8346ce"
+dependencies = [
+ "async-global-executor",
+ "async-trait",
+ "executor-trait",
+]
[[package]]
-name = "crc32fast"
-version = "1.3.2"
+name = "async-io"
+version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af"
dependencies = [
+ "async-lock 2.8.0",
+ "autocfg",
"cfg-if",
+ "concurrent-queue",
+ "futures-lite 1.13.0",
+ "log",
+ "parking",
+ "polling 2.8.0",
+ "rustix 0.37.28",
+ "slab",
+ "socket2 0.4.10",
+ "waker-fn",
]
[[package]]
-name = "crossbeam-queue"
-version = "0.3.8"
+name = "async-io"
+version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add"
+checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc"
dependencies = [
+ "autocfg",
"cfg-if",
- "crossbeam-utils",
+ "concurrent-queue",
+ "futures-io",
+ "futures-lite 2.6.1",
+ "parking",
+ "polling 3.11.0",
+ "rustix 1.1.3",
+ "slab",
+ "windows-sys 0.61.2",
]
[[package]]
-name = "crossbeam-utils"
-version = "0.8.16"
+name = "async-lock"
+version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294"
+checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b"
dependencies = [
- "cfg-if",
+ "event-listener 2.5.3",
]
[[package]]
-name = "crypto-common"
-version = "0.1.6"
+name = "async-lock"
+version = "3.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
+checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311"
dependencies = [
- "generic-array",
- "typenum",
+ "event-listener 5.4.1",
+ "event-listener-strategy",
+ "pin-project-lite",
]
[[package]]
-name = "deranged"
-version = "0.3.9"
+name = "async-reactor-trait"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3"
+checksum = "7a6012d170ad00de56c9ee354aef2e358359deb1ec504254e0e5a3774771de0e"
dependencies = [
- "powerfmt",
+ "async-io 1.13.0",
+ "async-trait",
+ "futures-core",
+ "reactor-trait",
]
[[package]]
-name = "derive_more"
-version = "0.99.17"
+name = "async-task"
+version = "4.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
-dependencies = [
- "convert_case",
- "proc-macro2",
- "quote",
- "rustc_version",
- "syn 1.0.109",
-]
-
-[[package]]
-name = "digest"
-version = "0.10.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
-dependencies = [
- "block-buffer",
- "crypto-common",
- "subtle",
-]
+checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de"
[[package]]
-name = "dirs"
-version = "4.0.0"
+name = "async-trait"
+version = "0.1.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059"
+checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb"
dependencies = [
- "dirs-sys",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
]
[[package]]
-name = "dirs-sys"
-version = "0.3.7"
+name = "atoi"
+version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6"
+checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528"
dependencies = [
- "libc",
- "redox_users",
- "winapi",
+ "num-traits",
]
[[package]]
-name = "dlv-list"
-version = "0.3.0"
+name = "atomic-waker"
+version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0688c2a7f92e427f44895cd63841bff7b29f8d7a1648b9e7e07a4a365b2e1257"
+checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
[[package]]
-name = "dotenvy"
-version = "0.15.7"
+name = "autocfg"
+version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
+checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
[[package]]
-name = "either"
-version = "1.9.0"
+name = "backon"
+version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
+checksum = "cffb0e931875b666fc4fcb20fee52e9bbd1ef836fd9e9e04ec21555f9f85f7ef"
dependencies = [
- "serde",
+ "fastrand 2.3.0",
]
[[package]]
-name = "encoding_rs"
-version = "0.8.33"
+name = "base64"
+version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1"
-dependencies = [
- "cfg-if",
-]
+checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
-name = "errno"
-version = "0.3.5"
+name = "base64"
+version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860"
-dependencies = [
- "libc",
- "windows-sys",
-]
+checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
-name = "event-listener"
-version = "2.5.3"
+name = "base64"
+version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
+checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
[[package]]
-name = "fastrand"
-version = "2.0.1"
+name = "base64ct"
+version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
+checksum = "0e050f626429857a27ddccb31e0aca21356bfa709c04041aefddac081a8f068a"
[[package]]
-name = "finl_unicode"
-version = "1.2.0"
+name = "bitflags"
+version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
-name = "flate2"
-version = "1.0.28"
+name = "bitflags"
+version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e"
+checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
dependencies = [
- "crc32fast",
- "miniz_oxide",
+ "serde_core",
]
[[package]]
-name = "fnv"
-version = "1.0.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
-
-[[package]]
-name = "foreign-types"
-version = "0.3.2"
+name = "block-buffer"
+version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
+checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
- "foreign-types-shared",
+ "generic-array",
]
[[package]]
-name = "foreign-types-shared"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
-
-[[package]]
-name = "form_urlencoded"
-version = "1.2.0"
+name = "block-padding"
+version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
+checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93"
dependencies = [
- "percent-encoding",
+ "generic-array",
]
[[package]]
-name = "futures"
-version = "0.3.29"
+name = "blocking"
+version = "1.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335"
+checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21"
dependencies = [
- "futures-channel",
- "futures-core",
- "futures-executor",
+ "async-channel 2.5.0",
+ "async-task",
"futures-io",
- "futures-sink",
- "futures-task",
- "futures-util",
+ "futures-lite 2.6.1",
+ "piper",
]
[[package]]
-name = "futures-channel"
-version = "0.3.29"
+name = "brotli"
+version = "3.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ff4dd66668b557604244583e3e1e1eada8c5c2e96a6d0d6653ede395b78bbacb"
+checksum = "d640d25bc63c50fb1f0b545ffd80207d2e10a4c965530809b40ba3386825c391"
dependencies = [
- "futures-core",
- "futures-sink",
+ "alloc-no-stdlib",
+ "alloc-stdlib",
+ "brotli-decompressor 2.5.1",
]
[[package]]
-name = "futures-core"
-version = "0.3.29"
+name = "brotli"
+version = "8.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c"
+checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560"
+dependencies = [
+ "alloc-no-stdlib",
+ "alloc-stdlib",
+ "brotli-decompressor 5.0.0",
+]
[[package]]
-name = "futures-executor"
-version = "0.3.29"
+name = "brotli-decompressor"
+version = "2.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc"
+checksum = "4e2e4afe60d7dd600fdd3de8d0f08c2b7ec039712e3b6137ff98b7004e82de4f"
dependencies = [
- "futures-core",
- "futures-task",
- "futures-util",
+ "alloc-no-stdlib",
+ "alloc-stdlib",
]
[[package]]
-name = "futures-intrusive"
-version = "0.4.2"
+name = "brotli-decompressor"
+version = "5.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5"
+checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03"
dependencies = [
- "futures-core",
- "lock_api",
- "parking_lot 0.11.2",
+ "alloc-no-stdlib",
+ "alloc-stdlib",
]
[[package]]
-name = "futures-io"
-version = "0.3.29"
+name = "bumpalo"
+version = "3.19.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8bf34a163b5c4c52d0478a4d757da8fb65cabef42ba90515efee0f6f9fa45aaa"
+checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510"
[[package]]
-name = "futures-macro"
-version = "0.3.29"
+name = "bytecount"
+version = "0.6.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn 2.0.38",
-]
+checksum = "175812e0be2bccb6abe50bb8d566126198344f707e304f45c648fd8f2cc0365e"
[[package]]
-name = "futures-sink"
-version = "0.3.29"
+name = "byteorder"
+version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817"
+checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
-name = "futures-task"
-version = "0.3.29"
+name = "bytes"
+version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "efd193069b0ddadc69c46389b740bbccdd97203899b48d09c5f7969591d6bae2"
+checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3"
[[package]]
-name = "futures-util"
-version = "0.3.29"
+name = "bytestring"
+version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a19526d624e703a3179b3d322efec918b6246ea0fa51d41124525f00f1cc8104"
+checksum = "113b4343b5f6617e7ad401ced8de3cc8b012e73a594347c307b90db3e9271289"
dependencies = [
- "futures-channel",
- "futures-core",
- "futures-io",
- "futures-macro",
- "futures-sink",
- "futures-task",
- "memchr",
- "pin-project-lite",
- "pin-utils",
- "slab",
+ "bytes",
]
[[package]]
-name = "generic-array"
-version = "0.14.7"
+name = "camino"
+version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
+checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48"
dependencies = [
- "typenum",
- "version_check",
+ "serde_core",
]
[[package]]
-name = "gethostname"
-version = "0.2.3"
+name = "cargo-platform"
+version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1ebd34e35c46e00bb73e81363248d627782724609fe1b6396f553f68fe3862e"
+checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea"
dependencies = [
- "libc",
- "winapi",
+ "serde",
]
[[package]]
-name = "getrandom"
-version = "0.2.10"
+name = "cargo_metadata"
+version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427"
+checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa"
dependencies = [
- "cfg-if",
- "libc",
- "wasi 0.11.0+wasi-snapshot-preview1",
+ "camino",
+ "cargo-platform",
+ "semver",
+ "serde",
+ "serde_json",
]
[[package]]
-name = "gimli"
-version = "0.28.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0"
-
-[[package]]
-name = "h2"
-version = "0.3.21"
+name = "casbin"
+version = "2.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833"
+checksum = "4b12705127ab9fcf4fbc22a0c93f441514fe7bd7a7248ce443e4bf531c54b7ee"
dependencies = [
- "bytes",
- "fnv",
- "futures-core",
- "futures-sink",
- "futures-util",
- "http",
- "indexmap",
- "slab",
+ "async-trait",
+ "fixedbitset",
+ "getrandom 0.3.4",
+ "hashlink 0.9.1",
+ "mini-moka",
+ "once_cell",
+ "parking_lot",
+ "petgraph",
+ "regex",
+ "rhai",
+ "serde",
+ "serde_json",
+ "slog",
+ "slog-async",
+ "slog-term",
+ "thiserror 1.0.69",
"tokio",
- "tokio-util",
- "tracing",
+ "wasm-bindgen-test",
]
[[package]]
-name = "hashbrown"
-version = "0.12.3"
+name = "cast"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
-dependencies = [
- "ahash 0.7.6",
-]
+checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
-name = "hashbrown"
-version = "0.14.1"
+name = "cbc"
+version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12"
+checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6"
dependencies = [
- "ahash 0.8.3",
- "allocator-api2",
+ "cipher",
]
[[package]]
-name = "hashlink"
-version = "0.8.4"
+name = "cc"
+version = "1.2.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
+checksum = "9f50d563227a1c37cc0a263f64eca3334388c01c5e4c4861a9def205c614383c"
dependencies = [
- "hashbrown 0.14.1",
+ "find-msvc-tools",
+ "jobserver",
+ "libc",
+ "shlex",
]
[[package]]
-name = "heck"
-version = "0.4.1"
+name = "cfg-if"
+version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
-dependencies = [
- "unicode-segmentation",
-]
+checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
[[package]]
-name = "hermit-abi"
-version = "0.3.3"
+name = "chrono"
+version = "0.4.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7"
+checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118"
+dependencies = [
+ "iana-time-zone",
+ "js-sys",
+ "num-traits",
+ "serde",
+ "wasm-bindgen",
+ "windows-link",
+]
[[package]]
-name = "hex"
-version = "0.4.3"
+name = "cipher"
+version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
+dependencies = [
+ "crypto-common",
+ "inout",
+]
[[package]]
-name = "hkdf"
-version = "0.12.3"
+name = "clap"
+version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437"
+checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
dependencies = [
- "hmac",
+ "clap_builder",
+ "clap_derive",
]
[[package]]
-name = "hmac"
-version = "0.12.1"
+name = "clap_builder"
+version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
+checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
dependencies = [
- "digest",
+ "anstream",
+ "anstyle",
+ "clap_lex",
+ "strsim 0.11.1",
]
[[package]]
-name = "http"
-version = "0.2.9"
+name = "clap_derive"
+version = "4.5.49"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482"
+checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671"
dependencies = [
- "bytes",
- "fnv",
- "itoa",
+ "heck",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
]
[[package]]
-name = "http-body"
-version = "0.4.5"
+name = "clap_lex"
+version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1"
-dependencies = [
- "bytes",
- "http",
- "pin-project-lite",
-]
+checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d"
[[package]]
-name = "httparse"
-version = "1.8.0"
+name = "cms"
+version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904"
+checksum = "7b77c319abfd5219629c45c34c89ba945ed3c5e49fcde9d16b6c3885f118a730"
+dependencies = [
+ "const-oid",
+ "der",
+ "spki",
+ "x509-cert",
+]
[[package]]
-name = "httpdate"
-version = "1.0.3"
+name = "colorchoice"
+version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
+checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
[[package]]
-name = "hyper"
-version = "0.14.27"
+name = "combine"
+version = "4.6.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468"
+checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd"
dependencies = [
"bytes",
- "futures-channel",
"futures-core",
- "futures-util",
- "h2",
- "http",
- "http-body",
- "httparse",
- "httpdate",
- "itoa",
+ "memchr",
"pin-project-lite",
- "socket2 0.4.9",
"tokio",
- "tower-service",
- "tracing",
- "want",
+ "tokio-util",
]
[[package]]
-name = "hyper-tls"
-version = "0.5.0"
+name = "concurrent-queue"
+version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
+checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973"
dependencies = [
- "bytes",
- "hyper",
- "native-tls",
- "tokio",
- "tokio-native-tls",
+ "crossbeam-utils",
]
[[package]]
-name = "iana-time-zone"
-version = "0.1.58"
+name = "config"
+version = "0.13.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20"
+checksum = "23738e11972c7643e4ec947840fc463b6a571afcd3e735bdfce7d03c7a784aca"
dependencies = [
- "android_system_properties",
- "core-foundation-sys",
- "iana-time-zone-haiku",
- "js-sys",
- "wasm-bindgen",
- "windows-core",
+ "async-trait",
+ "json5",
+ "lazy_static",
+ "nom",
+ "pathdiff",
+ "ron",
+ "rust-ini",
+ "serde",
+ "serde_json",
+ "toml",
+ "yaml-rust",
]
[[package]]
-name = "iana-time-zone-haiku"
-version = "0.1.2"
+name = "const-oid"
+version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
-dependencies = [
- "cc",
-]
+checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
-name = "idna"
-version = "0.4.0"
+name = "const-random"
+version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"
+checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359"
dependencies = [
- "unicode-bidi",
- "unicode-normalization",
+ "const-random-macro",
]
[[package]]
-name = "indexmap"
-version = "1.9.3"
+name = "const-random-macro"
+version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
+checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e"
dependencies = [
- "autocfg",
- "hashbrown 0.12.3",
- "serde",
+ "getrandom 0.2.16",
+ "once_cell",
+ "tiny-keccak",
]
[[package]]
-name = "instant"
-version = "0.1.12"
+name = "convert_case"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
-dependencies = [
- "cfg-if",
-]
+checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]]
-name = "ipnet"
-version = "2.8.0"
+name = "convert_case"
+version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6"
+checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9"
+dependencies = [
+ "unicode-segmentation",
+]
[[package]]
-name = "itertools"
-version = "0.10.5"
+name = "cookie"
+version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
+checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb"
dependencies = [
- "either",
+ "percent-encoding",
+ "time",
+ "version_check",
]
[[package]]
-name = "itertools"
-version = "0.11.0"
+name = "cookie-factory"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9885fa71e26b8ab7855e2ec7cae6e9b380edff76cd052e07c683a0319d51b3a2"
+
+[[package]]
+name = "core-foundation"
+version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
+checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
dependencies = [
- "either",
+ "core-foundation-sys",
+ "libc",
]
[[package]]
-name = "itoa"
-version = "1.0.9"
+name = "core-foundation-sys"
+version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
+checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]]
-name = "jobserver"
-version = "0.1.27"
+name = "cpufeatures"
+version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c37f63953c4c63420ed5fd3d6d398c719489b9f872b9fa683262f8edd363c7d"
+checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
dependencies = [
"libc",
]
[[package]]
-name = "js-sys"
-version = "0.3.64"
+name = "crc"
+version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a"
+checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d"
dependencies = [
- "wasm-bindgen",
+ "crc-catalog",
]
[[package]]
-name = "json5"
-version = "0.4.1"
+name = "crc-catalog"
+version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "96b0db21af676c1ce64250b5f40f3ce2cf27e4e47cb91ed91eb6fe9350b430c1"
-dependencies = [
- "pest",
- "pest_derive",
- "serde",
-]
+checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
[[package]]
-name = "language-tags"
-version = "0.3.2"
+name = "crc32fast"
+version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388"
+checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511"
+dependencies = [
+ "cfg-if",
+]
[[package]]
-name = "lazy_static"
-version = "1.4.0"
+name = "crossbeam-channel"
+version = "0.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
+dependencies = [
+ "crossbeam-utils",
+]
[[package]]
-name = "libc"
-version = "0.2.149"
+name = "crossbeam-queue"
+version = "0.3.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b"
+checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115"
+dependencies = [
+ "crossbeam-utils",
+]
[[package]]
-name = "linked-hash-map"
-version = "0.5.6"
+name = "crossbeam-utils"
+version = "0.8.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
+checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
[[package]]
-name = "linux-raw-sys"
-version = "0.4.10"
+name = "crunchy"
+version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"
+checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
-name = "local-channel"
-version = "0.1.4"
+name = "crypto-common"
+version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e0a493488de5f18c8ffcba89eebb8532ffc562dc400490eb65b84893fae0b178"
+checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
dependencies = [
- "futures-core",
- "futures-sink",
- "local-waker",
+ "generic-array",
+ "rand_core 0.6.4",
+ "typenum",
]
[[package]]
-name = "local-waker"
-version = "0.1.3"
+name = "ctr"
+version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e34f76eb3611940e0e7d53a9aaa4e6a3151f69541a282fd0dad5571420c53ff1"
+checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835"
+dependencies = [
+ "cipher",
+]
[[package]]
-name = "lock_api"
-version = "0.4.11"
+name = "darling"
+version = "0.14.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45"
+checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850"
dependencies = [
- "autocfg",
- "scopeguard",
+ "darling_core",
+ "darling_macro",
]
[[package]]
-name = "log"
-version = "0.4.20"
+name = "darling_core"
+version = "0.14.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
+checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0"
+dependencies = [
+ "fnv",
+ "ident_case",
+ "proc-macro2",
+ "quote",
+ "strsim 0.10.0",
+ "syn 1.0.109",
+]
[[package]]
-name = "matchers"
-version = "0.1.0"
+name = "darling_macro"
+version = "0.14.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e"
dependencies = [
- "regex-automata 0.1.10",
+ "darling_core",
+ "quote",
+ "syn 1.0.109",
]
[[package]]
-name = "md-5"
-version = "0.10.6"
+name = "dashmap"
+version = "5.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf"
+checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
dependencies = [
"cfg-if",
- "digest",
+ "hashbrown 0.14.5",
+ "lock_api",
+ "once_cell",
+ "parking_lot_core",
]
[[package]]
-name = "memchr"
-version = "2.6.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
-
-[[package]]
-name = "mime"
-version = "0.3.17"
+name = "data-encoding"
+version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476"
[[package]]
-name = "minimal-lexical"
-version = "0.2.1"
+name = "deadpool"
+version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+checksum = "421fe0f90f2ab22016f32a9881be5134fdd71c65298917084b0c7477cbc3856e"
+dependencies = [
+ "async-trait",
+ "deadpool-runtime",
+ "num_cpus",
+ "retain_mut",
+ "tokio",
+]
[[package]]
-name = "miniz_oxide"
-version = "0.7.1"
+name = "deadpool"
+version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
+checksum = "0be2b1d1d6ec8d846f05e137292d0b89133caf95ef33695424c09568bdd39b1b"
dependencies = [
- "adler",
+ "deadpool-runtime",
+ "lazy_static",
+ "num_cpus",
+ "tokio",
]
[[package]]
-name = "mio"
-version = "0.8.8"
+name = "deadpool-lapin"
+version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2"
+checksum = "33c7b14064f854a3969735e7c948c677a57ef17ca7f0bc029da8fe2e5e0fc1eb"
dependencies = [
- "libc",
- "log",
- "wasi 0.11.0+wasi-snapshot-preview1",
- "windows-sys",
+ "deadpool 0.12.3",
+ "lapin",
+ "tokio-executor-trait",
]
[[package]]
-name = "native-tls"
-version = "0.2.11"
+name = "deadpool-runtime"
+version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e"
+checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b"
dependencies = [
- "lazy_static",
- "libc",
- "log",
- "openssl",
- "openssl-probe",
- "openssl-sys",
- "schannel",
- "security-framework",
- "security-framework-sys",
- "tempfile",
+ "tokio",
]
[[package]]
-name = "nom"
-version = "7.1.3"
+name = "der"
+version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
dependencies = [
- "memchr",
- "minimal-lexical",
+ "const-oid",
+ "der_derive",
+ "flagset",
+ "pem-rfc7468",
+ "zeroize",
]
[[package]]
-name = "nu-ansi-term"
-version = "0.46.0"
+name = "der-parser"
+version = "10.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
+checksum = "07da5016415d5a3c4dd39b11ed26f915f52fc4e0dc197d87908bc916e51bc1a6"
dependencies = [
- "overload",
- "winapi",
+ "asn1-rs",
+ "displaydoc",
+ "nom",
+ "num-bigint",
+ "num-traits",
+ "rusticata-macros",
]
[[package]]
-name = "num-traits"
-version = "0.2.17"
+name = "der_derive"
+version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c"
+checksum = "8034092389675178f570469e6c3b0465d3d30b4505c294a6550db47f3c17ad18"
dependencies = [
- "autocfg",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
]
[[package]]
-name = "num_cpus"
-version = "1.16.0"
+name = "deranged"
+version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
+checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587"
dependencies = [
- "hermit-abi",
- "libc",
+ "powerfmt",
]
[[package]]
-name = "object"
-version = "0.32.1"
+name = "derive_builder"
+version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0"
+checksum = "8d67778784b508018359cbc8696edb3db78160bab2c2a28ba7f56ef6932997f8"
dependencies = [
- "memchr",
+ "derive_builder_macro 0.12.0",
]
[[package]]
-name = "once_cell"
-version = "1.18.0"
+name = "derive_builder"
+version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
+checksum = "8f59169f400d8087f238c5c0c7db6a28af18681717f3b623227d92f397e938c7"
+dependencies = [
+ "derive_builder_macro 0.13.1",
+]
[[package]]
-name = "openssl"
-version = "0.10.57"
+name = "derive_builder_core"
+version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c"
+checksum = "c11bdc11a0c47bc7d37d582b5285da6849c96681023680b906673c5707af7b0f"
dependencies = [
- "bitflags 2.4.1",
- "cfg-if",
- "foreign-types",
- "libc",
- "once_cell",
- "openssl-macros",
- "openssl-sys",
+ "darling",
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
]
[[package]]
-name = "openssl-macros"
-version = "0.1.1"
+name = "derive_builder_core"
+version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
+checksum = "a4ec317cc3e7ef0928b0ca6e4a634a4d6c001672ae210438cf114a83e56b018d"
dependencies = [
+ "darling",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 1.0.109",
]
[[package]]
-name = "openssl-probe"
-version = "0.1.5"
+name = "derive_builder_macro"
+version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
+checksum = "ebcda35c7a396850a55ffeac740804b40ffec779b98fffbb1738f4033f0ee79e"
+dependencies = [
+ "derive_builder_core 0.12.0",
+ "syn 1.0.109",
+]
[[package]]
-name = "openssl-sys"
-version = "0.9.93"
+name = "derive_builder_macro"
+version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d"
+checksum = "870368c3fb35b8031abb378861d4460f573b92238ec2152c927a21f77e3e0127"
dependencies = [
- "cc",
- "libc",
- "pkg-config",
- "vcpkg",
+ "derive_builder_core 0.13.1",
+ "syn 1.0.109",
]
[[package]]
-name = "ordered-multimap"
-version = "0.4.3"
+name = "derive_more"
+version = "0.99.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ccd746e37177e1711c20dd619a1620f34f5c8b569c53590a72dedd5344d8924a"
+checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f"
dependencies = [
- "dlv-list",
- "hashbrown 0.12.3",
+ "convert_case 0.4.0",
+ "proc-macro2",
+ "quote",
+ "rustc_version",
+ "syn 2.0.111",
]
[[package]]
-name = "overload"
-version = "0.1.1"
+name = "derive_more"
+version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
+checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134"
+dependencies = [
+ "derive_more-impl",
+]
[[package]]
-name = "parking_lot"
-version = "0.11.2"
+name = "derive_more-impl"
+version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
+checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb"
dependencies = [
- "instant",
- "lock_api",
- "parking_lot_core 0.8.6",
+ "convert_case 0.10.0",
+ "proc-macro2",
+ "quote",
+ "rustc_version",
+ "syn 2.0.111",
+ "unicode-xid",
]
[[package]]
-name = "parking_lot"
-version = "0.12.1"
+name = "des"
+version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
+checksum = "ffdd80ce8ce993de27e9f063a444a4d53ce8e8db4c1f00cc03af5ad5a9867a1e"
dependencies = [
- "lock_api",
- "parking_lot_core 0.9.9",
+ "cipher",
]
[[package]]
-name = "parking_lot_core"
-version = "0.8.6"
+name = "digest"
+version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
+checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
- "cfg-if",
- "instant",
- "libc",
- "redox_syscall 0.2.16",
- "smallvec",
- "winapi",
+ "block-buffer",
+ "const-oid",
+ "crypto-common",
+ "subtle",
]
[[package]]
-name = "parking_lot_core"
-version = "0.9.9"
+name = "displaydoc"
+version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
+checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
- "cfg-if",
- "libc",
- "redox_syscall 0.4.1",
- "smallvec",
- "windows-targets",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
]
[[package]]
-name = "paste"
-version = "1.0.14"
+name = "dlv-list"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
+checksum = "0688c2a7f92e427f44895cd63841bff7b29f8d7a1648b9e7e07a4a365b2e1257"
[[package]]
-name = "pathdiff"
-version = "0.2.1"
+name = "doc-comment"
+version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd"
+checksum = "780955b8b195a21ab8e4ac6b60dd1dbdcec1dc6c51c0617964b08c81785e12c9"
[[package]]
-name = "percent-encoding"
-version = "2.3.0"
+name = "docker-compose-types"
+version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
+checksum = "6d6fdd6fa1c9e8e716f5f73406b868929f468702449621e7397066478b9bf89c"
+dependencies = [
+ "derive_builder 0.13.1",
+ "indexmap",
+ "serde",
+ "serde_yaml",
+]
[[package]]
-name = "pest"
-version = "2.7.4"
+name = "dotenvy"
+version = "0.15.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c022f1e7b65d6a24c0dbbd5fb344c66881bc01f3e5ae74a1c8100f2f985d98a4"
-dependencies = [
- "memchr",
- "thiserror",
- "ucd-trie",
-]
+checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
[[package]]
-name = "pest_derive"
-version = "2.7.4"
+name = "either"
+version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "35513f630d46400a977c4cb58f78e1bfbe01434316e60c37d27b9ad6139c66d8"
+checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
dependencies = [
- "pest",
- "pest_generator",
+ "serde",
]
[[package]]
-name = "pest_generator"
-version = "2.7.4"
+name = "encoding_rs"
+version = "0.8.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bc9fc1b9e7057baba189b5c626e2d6f40681ae5b6eb064dc7c7834101ec8123a"
+checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3"
dependencies = [
- "pest",
- "pest_meta",
- "proc-macro2",
- "quote",
- "syn 2.0.38",
+ "cfg-if",
]
[[package]]
-name = "pest_meta"
-version = "2.7.4"
+name = "equivalent"
+version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1df74e9e7ec4053ceb980e7c0c8bd3594e977fde1af91daba9c928e8e8c6708d"
-dependencies = [
- "once_cell",
- "pest",
- "sha2",
-]
+checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
-name = "pin-project"
-version = "1.1.3"
+name = "erased-serde"
+version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422"
+checksum = "6c138974f9d5e7fe373eb04df7cae98833802ae4b11c24ac7039a21d5af4b26c"
dependencies = [
- "pin-project-internal",
+ "serde",
]
[[package]]
-name = "pin-project-internal"
-version = "1.1.3"
+name = "errno"
+version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
+checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
dependencies = [
- "proc-macro2",
- "quote",
- "syn 2.0.38",
+ "libc",
+ "windows-sys 0.61.2",
]
[[package]]
-name = "pin-project-lite"
-version = "0.2.13"
+name = "error-chain"
+version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58"
+checksum = "2d2f06b9cac1506ece98fe3231e3cc9c4410ec3d5b1f24ae1c8946f0742cdefc"
+dependencies = [
+ "version_check",
+]
[[package]]
-name = "pin-utils"
-version = "0.1.0"
+name = "etcetera"
+version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943"
+dependencies = [
+ "cfg-if",
+ "home",
+ "windows-sys 0.48.0",
+]
[[package]]
-name = "pkg-config"
-version = "0.3.27"
+name = "event-listener"
+version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964"
+checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
[[package]]
-name = "powerfmt"
-version = "0.2.0"
+name = "event-listener"
+version = "5.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
+checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab"
+dependencies = [
+ "concurrent-queue",
+ "parking",
+ "pin-project-lite",
+]
[[package]]
-name = "ppv-lite86"
-version = "0.2.17"
+name = "event-listener-strategy"
+version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
+checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93"
+dependencies = [
+ "event-listener 5.4.1",
+ "pin-project-lite",
+]
[[package]]
-name = "proc-macro-error"
-version = "1.0.4"
+name = "executor-trait"
+version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
+checksum = "13c39dff9342e4e0e16ce96be751eb21a94e94a87bb2f6e63ad1961c2ce109bf"
dependencies = [
- "proc-macro-error-attr",
- "proc-macro2",
- "quote",
- "syn 1.0.109",
- "version_check",
+ "async-trait",
]
[[package]]
-name = "proc-macro-error-attr"
-version = "1.0.4"
+name = "fastrand"
+version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
+checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be"
dependencies = [
- "proc-macro2",
- "quote",
- "version_check",
+ "instant",
]
[[package]]
-name = "proc-macro2"
-version = "1.0.69"
+name = "fastrand"
+version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
-dependencies = [
- "unicode-ident",
-]
+checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
-name = "quote"
-version = "1.0.33"
+name = "find-msvc-tools"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844"
+
+[[package]]
+name = "fixedbitset"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
+
+[[package]]
+name = "flagset"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7ac824320a75a52197e8f2d787f6a38b6718bb6897a35142d749af3c0e8f4fe"
+
+[[package]]
+name = "flate2"
+version = "1.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
+checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb"
dependencies = [
- "proc-macro2",
+ "crc32fast",
+ "miniz_oxide",
]
[[package]]
-name = "rand"
-version = "0.8.5"
+name = "flume"
+version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095"
dependencies = [
- "libc",
- "rand_chacha",
- "rand_core",
+ "futures-core",
+ "futures-sink",
+ "spin 0.9.8",
]
[[package]]
-name = "rand_chacha"
-version = "0.3.1"
+name = "fnv"
+version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "foldhash"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
+
+[[package]]
+name = "foreign-types"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
dependencies = [
- "ppv-lite86",
- "rand_core",
+ "foreign-types-shared",
]
[[package]]
-name = "rand_core"
-version = "0.6.4"
+name = "foreign-types-shared"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf"
dependencies = [
- "getrandom",
+ "percent-encoding",
]
[[package]]
-name = "redox_syscall"
-version = "0.2.16"
+name = "futures"
+version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
+checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
dependencies = [
- "bitflags 1.3.2",
+ "futures-channel",
+ "futures-core",
+ "futures-executor",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "futures-util",
]
[[package]]
-name = "redox_syscall"
-version = "0.3.5"
+name = "futures-channel"
+version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
+checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
dependencies = [
- "bitflags 1.3.2",
+ "futures-core",
+ "futures-sink",
]
[[package]]
-name = "redox_syscall"
-version = "0.4.1"
+name = "futures-core"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
+
+[[package]]
+name = "futures-executor"
+version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
+checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f"
dependencies = [
- "bitflags 1.3.2",
+ "futures-core",
+ "futures-task",
+ "futures-util",
]
[[package]]
-name = "redox_users"
-version = "0.4.3"
+name = "futures-intrusive"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b"
+checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f"
dependencies = [
- "getrandom",
- "redox_syscall 0.2.16",
- "thiserror",
+ "futures-core",
+ "lock_api",
+ "parking_lot",
]
[[package]]
-name = "regex"
-version = "1.10.2"
+name = "futures-io"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
+
+[[package]]
+name = "futures-lite"
+version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"
+checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce"
dependencies = [
- "aho-corasick",
+ "fastrand 1.9.0",
+ "futures-core",
+ "futures-io",
"memchr",
- "regex-automata 0.4.3",
- "regex-syntax 0.8.2",
+ "parking",
+ "pin-project-lite",
+ "waker-fn",
]
[[package]]
-name = "regex-automata"
-version = "0.1.10"
+name = "futures-lite"
+version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad"
dependencies = [
- "regex-syntax 0.6.29",
+ "fastrand 2.3.0",
+ "futures-core",
+ "futures-io",
+ "parking",
+ "pin-project-lite",
]
[[package]]
-name = "regex-automata"
-version = "0.4.3"
+name = "futures-macro"
+version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"
+checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
dependencies = [
- "aho-corasick",
- "memchr",
- "regex-syntax 0.8.2",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
]
[[package]]
-name = "regex-syntax"
-version = "0.6.29"
+name = "futures-sink"
+version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
+checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7"
[[package]]
-name = "regex-syntax"
-version = "0.8.2"
+name = "futures-task"
+version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
+checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
[[package]]
-name = "reqwest"
-version = "0.11.22"
+name = "futures-timer"
+version = "3.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b"
-dependencies = [
- "base64 0.21.4",
- "bytes",
- "encoding_rs",
- "futures-core",
- "futures-util",
- "h2",
- "http",
- "http-body",
- "hyper",
- "hyper-tls",
- "ipnet",
- "js-sys",
- "log",
- "mime",
- "native-tls",
- "once_cell",
- "percent-encoding",
- "pin-project-lite",
- "serde",
- "serde_json",
- "serde_urlencoded",
- "system-configuration",
- "tokio",
- "tokio-native-tls",
- "tower-service",
- "url",
- "wasm-bindgen",
- "wasm-bindgen-futures",
- "web-sys",
- "winreg",
-]
+checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24"
[[package]]
-name = "ring"
-version = "0.16.20"
+name = "futures-util"
+version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc"
+checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
dependencies = [
- "cc",
- "libc",
- "once_cell",
- "spin 0.5.2",
- "untrusted 0.7.1",
- "web-sys",
- "winapi",
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-macro",
+ "futures-sink",
+ "futures-task",
+ "memchr",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
]
[[package]]
-name = "ring"
-version = "0.17.4"
+name = "generic-array"
+version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fce3045ffa7c981a6ee93f640b538952e155f1ae3a1a02b84547fc7a56b7059a"
+checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
- "cc",
- "getrandom",
- "libc",
- "spin 0.9.8",
- "untrusted 0.9.0",
- "windows-sys",
+ "typenum",
+ "version_check",
]
[[package]]
-name = "ron"
-version = "0.7.1"
+name = "gethostname"
+version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "88073939a61e5b7680558e6be56b419e208420c2adb92be54921fa6b72283f1a"
+checksum = "c1ebd34e35c46e00bb73e81363248d627782724609fe1b6396f553f68fe3862e"
dependencies = [
- "base64 0.13.1",
- "bitflags 1.3.2",
- "serde",
+ "libc",
+ "winapi",
]
[[package]]
-name = "rust-ini"
-version = "0.18.0"
+name = "getrandom"
+version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f6d5f2436026b4f6e79dc829837d467cc7e9a55ee40e750d716713540715a2df"
+checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
"cfg-if",
- "ordered-multimap",
-]
-
-[[package]]
-name = "rustc-demangle"
-version = "0.1.23"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
-
-[[package]]
-name = "rustc_version"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
-dependencies = [
- "semver",
+ "libc",
+ "wasi 0.9.0+wasi-snapshot-preview1",
]
[[package]]
-name = "rustix"
-version = "0.38.19"
+name = "getrandom"
+version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed"
+checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
dependencies = [
- "bitflags 2.4.1",
- "errno",
+ "cfg-if",
"libc",
- "linux-raw-sys",
- "windows-sys",
+ "wasi 0.11.1+wasi-snapshot-preview1",
]
[[package]]
-name = "rustls"
-version = "0.20.9"
+name = "getrandom"
+version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99"
+checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
dependencies = [
- "log",
- "ring 0.16.20",
- "sct",
- "webpki",
+ "cfg-if",
+ "js-sys",
+ "libc",
+ "r-efi",
+ "wasip2",
+ "wasm-bindgen",
]
[[package]]
-name = "rustls-pemfile"
-version = "1.0.3"
+name = "ghash"
+version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2"
+checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1"
dependencies = [
- "base64 0.21.4",
+ "opaque-debug",
+ "polyval",
]
[[package]]
-name = "ryu"
-version = "1.0.15"
+name = "glob"
+version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
+checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
[[package]]
-name = "schannel"
-version = "0.1.22"
+name = "h2"
+version = "0.3.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88"
+checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d"
dependencies = [
- "windows-sys",
+ "bytes",
+ "fnv",
+ "futures-core",
+ "futures-sink",
+ "futures-util",
+ "http",
+ "indexmap",
+ "slab",
+ "tokio",
+ "tokio-util",
+ "tracing",
]
[[package]]
-name = "scopeguard"
-version = "1.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
-
-[[package]]
-name = "sct"
-version = "0.7.0"
+name = "hashbrown"
+version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4"
+checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
dependencies = [
- "ring 0.16.20",
- "untrusted 0.7.1",
+ "ahash 0.7.8",
]
[[package]]
-name = "security-framework"
-version = "2.9.2"
+name = "hashbrown"
+version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de"
+checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
dependencies = [
- "bitflags 1.3.2",
- "core-foundation",
- "core-foundation-sys",
- "libc",
- "security-framework-sys",
+ "ahash 0.8.12",
]
[[package]]
-name = "security-framework-sys"
-version = "2.9.1"
+name = "hashbrown"
+version = "0.15.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a"
+checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
dependencies = [
- "core-foundation-sys",
- "libc",
+ "allocator-api2",
+ "equivalent",
+ "foldhash",
]
[[package]]
-name = "semver"
-version = "1.0.20"
+name = "hashbrown"
+version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090"
+checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
[[package]]
-name = "serde"
-version = "1.0.189"
+name = "hashlink"
+version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e422a44e74ad4001bdc8eede9a4570ab52f71190e9c076d14369f38b9200537"
+checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af"
dependencies = [
- "serde_derive",
+ "hashbrown 0.14.5",
]
[[package]]
-name = "serde_derive"
-version = "1.0.189"
+name = "hashlink"
+version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e48d1f918009ce3145511378cf68d613e3b3d9137d67272562080d68a2b32d5"
+checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
dependencies = [
- "proc-macro2",
- "quote",
- "syn 2.0.38",
+ "hashbrown 0.15.5",
]
[[package]]
-name = "serde_json"
-version = "1.0.107"
+name = "heck"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65"
-dependencies = [
- "itoa",
- "ryu",
- "serde",
-]
+checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
-name = "serde_urlencoded"
-version = "0.7.1"
+name = "hermit-abi"
+version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
-dependencies = [
- "form_urlencoded",
- "itoa",
- "ryu",
- "serde",
-]
+checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
[[package]]
-name = "serde_valid"
-version = "0.16.3"
+name = "hermit-abi"
+version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0adc7a19d45e581abc6d169c865a0b14b84bb43a9e966d1cca4d733e70f7f35a"
-dependencies = [
- "indexmap",
- "itertools 0.10.5",
- "num-traits",
- "once_cell",
- "paste",
- "regex",
- "serde",
- "serde_json",
- "serde_valid_derive",
- "serde_valid_literal",
- "thiserror",
- "unicode-segmentation",
-]
+checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c"
[[package]]
-name = "serde_valid_derive"
-version = "0.16.3"
+name = "hex"
+version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "071237362e267e2a76ffe4434094e089dcd8b5e9d8423ada499e5550dcb0181d"
-dependencies = [
- "paste",
- "proc-macro-error",
- "proc-macro2",
- "quote",
- "strsim",
- "syn 1.0.109",
-]
+checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
-name = "serde_valid_literal"
-version = "0.16.3"
+name = "hkdf"
+version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f57df292b1d64449f90794fc7a67efca0b21acca91493e64a46418a29bbe36b4"
+checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7"
dependencies = [
- "paste",
- "regex",
+ "hmac",
]
[[package]]
-name = "sha1"
-version = "0.10.6"
+name = "hmac"
+version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
+checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
dependencies = [
- "cfg-if",
- "cpufeatures",
"digest",
]
[[package]]
-name = "sha2"
-version = "0.10.8"
+name = "home"
+version = "0.5.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
+checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d"
dependencies = [
- "cfg-if",
- "cpufeatures",
- "digest",
+ "windows-sys 0.61.2",
]
[[package]]
-name = "sharded-slab"
-version = "0.1.7"
+name = "http"
+version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
+checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1"
dependencies = [
- "lazy_static",
+ "bytes",
+ "fnv",
+ "itoa",
]
[[package]]
-name = "signal-hook-registry"
-version = "1.4.1"
+name = "http-body"
+version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1"
+checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2"
dependencies = [
- "libc",
+ "bytes",
+ "http",
+ "pin-project-lite",
]
[[package]]
-name = "slab"
-version = "0.4.9"
+name = "http-types"
+version = "2.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
+checksum = "6e9b187a72d63adbfba487f48095306ac823049cb504ee195541e91c7775f5ad"
dependencies = [
- "autocfg",
+ "anyhow",
+ "async-channel 1.9.0",
+ "base64 0.13.1",
+ "futures-lite 1.13.0",
+ "http",
+ "infer",
+ "pin-project-lite",
+ "rand 0.7.3",
+ "serde",
+ "serde_json",
+ "serde_qs",
+ "serde_urlencoded",
+ "url",
]
[[package]]
-name = "smallvec"
-version = "1.11.1"
+name = "httparse"
+version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a"
+checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
[[package]]
-name = "socket2"
-version = "0.4.9"
+name = "httpdate"
+version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
-dependencies = [
- "libc",
- "winapi",
-]
+checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
-name = "socket2"
-version = "0.5.4"
+name = "hyper"
+version = "0.14.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e"
+checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7"
dependencies = [
- "libc",
- "windows-sys",
-]
-
-[[package]]
-name = "spin"
-version = "0.5.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
+ "bytes",
+ "futures-channel",
+ "futures-core",
+ "futures-util",
+ "h2",
+ "http",
+ "http-body",
+ "httparse",
+ "httpdate",
+ "itoa",
+ "pin-project-lite",
+ "socket2 0.5.10",
+ "tokio",
+ "tower-service",
+ "tracing",
+ "want",
+]
+
+[[package]]
+name = "hyper-tls"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
+dependencies = [
+ "bytes",
+ "hyper",
+ "native-tls",
+ "tokio",
+ "tokio-native-tls",
+]
+
+[[package]]
+name = "iana-time-zone"
+version = "0.1.64"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb"
+dependencies = [
+ "android_system_properties",
+ "core-foundation-sys",
+ "iana-time-zone-haiku",
+ "js-sys",
+ "log",
+ "wasm-bindgen",
+ "windows-core",
+]
+
+[[package]]
+name = "iana-time-zone-haiku"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "icu_collections"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43"
+dependencies = [
+ "displaydoc",
+ "potential_utf",
+ "yoke",
+ "zerofrom",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locale_core"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6"
+dependencies = [
+ "displaydoc",
+ "litemap",
+ "tinystr",
+ "writeable",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599"
+dependencies = [
+ "icu_collections",
+ "icu_normalizer_data",
+ "icu_properties",
+ "icu_provider",
+ "smallvec",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer_data"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a"
+
+[[package]]
+name = "icu_properties"
+version = "2.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec"
+dependencies = [
+ "icu_collections",
+ "icu_locale_core",
+ "icu_properties_data",
+ "icu_provider",
+ "zerotrie",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_properties_data"
+version = "2.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af"
+
+[[package]]
+name = "icu_provider"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614"
+dependencies = [
+ "displaydoc",
+ "icu_locale_core",
+ "writeable",
+ "yoke",
+ "zerofrom",
+ "zerotrie",
+ "zerovec",
+]
+
+[[package]]
+name = "ident_case"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
+
+[[package]]
+name = "idna"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de"
+dependencies = [
+ "idna_adapter",
+ "smallvec",
+ "utf8_iter",
+]
+
+[[package]]
+name = "idna_adapter"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
+dependencies = [
+ "icu_normalizer",
+ "icu_properties",
+]
+
+[[package]]
+name = "impl-more"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2"
+
+[[package]]
+name = "indexmap"
+version = "2.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
+dependencies = [
+ "equivalent",
+ "hashbrown 0.16.1",
+ "serde",
+ "serde_core",
+]
+
+[[package]]
+name = "infer"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac"
+
+[[package]]
+name = "inout"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01"
+dependencies = [
+ "block-padding",
+ "generic-array",
+]
+
+[[package]]
+name = "instant"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "io-lifetimes"
+version = "1.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2"
+dependencies = [
+ "hermit-abi 0.3.9",
+ "libc",
+ "windows-sys 0.48.0",
+]
+
+[[package]]
+name = "ipnet"
+version = "2.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
+
+[[package]]
+name = "ipnetwork"
+version = "0.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf466541e9d546596ee94f9f69590f89473455f88372423e0008fc1a7daf100e"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "is-terminal"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46"
+dependencies = [
+ "hermit-abi 0.5.2",
+ "libc",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "is_terminal_polyfill"
+version = "1.70.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
+
+[[package]]
+name = "itertools"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itertools"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ee5b5339afb4c41626dde77b7a611bd4f2c202b897852b4bcf5d03eddc61010"
+
+[[package]]
+name = "jobserver"
+version = "0.1.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
+dependencies = [
+ "getrandom 0.3.4",
+ "libc",
+]
+
+[[package]]
+name = "js-sys"
+version = "0.3.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8"
+dependencies = [
+ "once_cell",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "json5"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96b0db21af676c1ce64250b5f40f3ce2cf27e4e47cb91ed91eb6fe9350b430c1"
+dependencies = [
+ "pest",
+ "pest_derive",
+ "serde",
+]
+
+[[package]]
+name = "language-tags"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388"
+
+[[package]]
+name = "lapin"
+version = "2.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "02d2aa4725b9607915fa1a73e940710a3be6af508ce700e56897cbe8847fbb07"
+dependencies = [
+ "amq-protocol",
+ "async-global-executor-trait",
+ "async-reactor-trait",
+ "async-trait",
+ "executor-trait",
+ "flume",
+ "futures-core",
+ "futures-io",
+ "parking_lot",
+ "pinky-swear",
+ "reactor-trait",
+ "serde",
+ "serde_json",
+ "tracing",
+ "waker-fn",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
+dependencies = [
+ "spin 0.9.8",
+]
+
+[[package]]
+name = "libc"
+version = "0.2.178"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091"
+
+[[package]]
+name = "libm"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
+
+[[package]]
+name = "libredox"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df15f6eac291ed1cf25865b1ee60399f57e7c227e7f51bdbd4c5270396a9ed50"
+dependencies = [
+ "bitflags 2.10.0",
+ "libc",
+ "redox_syscall 0.6.0",
+]
+
+[[package]]
+name = "libsqlite3-sys"
+version = "0.30.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149"
+dependencies = [
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "linked-hash-map"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
+
+[[package]]
+name = "litemap"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
+
+[[package]]
+name = "local-channel"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6cbc85e69b8df4b8bb8b89ec634e7189099cea8927a276b7384ce5488e53ec8"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+ "local-waker",
+]
+
+[[package]]
+name = "local-waker"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487"
+
+[[package]]
+name = "lock_api"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965"
+dependencies = [
+ "scopeguard",
+]
+
+[[package]]
+name = "log"
+version = "0.4.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
+
+[[package]]
+name = "matchers"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
+dependencies = [
+ "regex-automata",
+]
+
+[[package]]
+name = "md-5"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf"
+dependencies = [
+ "cfg-if",
+ "digest",
+]
+
+[[package]]
+name = "memchr"
+version = "2.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
+
+[[package]]
+name = "mime"
+version = "0.3.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+
+[[package]]
+name = "mini-moka"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c325dfab65f261f386debee8b0969da215b3fa0037e74c8a1234db7ba986d803"
+dependencies = [
+ "crossbeam-channel",
+ "crossbeam-utils",
+ "dashmap",
+ "skeptic",
+ "smallvec",
+ "tagptr",
+ "triomphe",
+]
+
+[[package]]
+name = "minicov"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4869b6a491569605d66d3952bcdf03df789e5b536e5f0cf7758a7f08a55ae24d"
+dependencies = [
+ "cc",
+ "walkdir",
+]
+
+[[package]]
+name = "minimal-lexical"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+
+[[package]]
+name = "miniz_oxide"
+version = "0.8.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
+dependencies = [
+ "adler2",
+ "simd-adler32",
+]
+
+[[package]]
+name = "mio"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc"
+dependencies = [
+ "libc",
+ "log",
+ "wasi 0.11.1+wasi-snapshot-preview1",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "mutually_exclusive_features"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e94e1e6445d314f972ff7395df2de295fe51b71821694f0b0e1e79c4f12c8577"
+
+[[package]]
+name = "native-tls"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e"
+dependencies = [
+ "libc",
+ "log",
+ "openssl",
+ "openssl-probe",
+ "openssl-sys",
+ "schannel",
+ "security-framework",
+ "security-framework-sys",
+ "tempfile",
+]
+
+[[package]]
+name = "no-std-compat"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c"
+dependencies = [
+ "spin 0.5.2",
+]
+
+[[package]]
+name = "nom"
+version = "7.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+dependencies = [
+ "memchr",
+ "minimal-lexical",
+]
+
+[[package]]
+name = "nu-ansi-term"
+version = "0.50.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
+dependencies = [
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "num-bigint"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
+dependencies = [
+ "num-integer",
+ "num-traits",
+]
+
+[[package]]
+name = "num-bigint-dig"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7"
+dependencies = [
+ "lazy_static",
+ "libm",
+ "num-integer",
+ "num-iter",
+ "num-traits",
+ "rand 0.8.5",
+ "smallvec",
+ "zeroize",
+]
+
+[[package]]
+name = "num-conv"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
+
+[[package]]
+name = "num-integer"
+version = "0.1.46"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
+dependencies = [
+ "num-traits",
+]
+
+[[package]]
+name = "num-iter"
+version = "0.1.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf"
+dependencies = [
+ "autocfg",
+ "num-integer",
+ "num-traits",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
+dependencies = [
+ "autocfg",
+ "libm",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b"
+dependencies = [
+ "hermit-abi 0.5.2",
+ "libc",
+]
+
+[[package]]
+name = "oid-registry"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "12f40cff3dde1b6087cc5d5f5d4d65712f34016a03ed60e9c08dcc392736b5b7"
+dependencies = [
+ "asn1-rs",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.21.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
+dependencies = [
+ "portable-atomic",
+]
+
+[[package]]
+name = "once_cell_polyfill"
+version = "1.70.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe"
+
+[[package]]
+name = "oorandom"
+version = "11.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e"
+
+[[package]]
+name = "opaque-debug"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381"
+
+[[package]]
+name = "openssl"
+version = "0.10.75"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328"
+dependencies = [
+ "bitflags 2.10.0",
+ "cfg-if",
+ "foreign-types",
+ "libc",
+ "once_cell",
+ "openssl-macros",
+ "openssl-sys",
+]
+
+[[package]]
+name = "openssl-macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "openssl-probe"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e"
+
+[[package]]
+name = "openssl-sys"
+version = "0.9.111"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321"
+dependencies = [
+ "cc",
+ "libc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "ordered-multimap"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ccd746e37177e1711c20dd619a1620f34f5c8b569c53590a72dedd5344d8924a"
+dependencies = [
+ "dlv-list",
+ "hashbrown 0.12.3",
+]
+
+[[package]]
+name = "p12-keystore"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3cae83056e7cb770211494a0ecf66d9fa7eba7d00977e5bb91f0e925b40b937f"
+dependencies = [
+ "cbc",
+ "cms",
+ "der",
+ "des",
+ "hex",
+ "hmac",
+ "pkcs12",
+ "pkcs5",
+ "rand 0.9.2",
+ "rc2",
+ "sha1",
+ "sha2",
+ "thiserror 2.0.17",
+ "x509-parser",
+]
+
+[[package]]
+name = "parking"
+version = "2.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
+
+[[package]]
+name = "parking_lot"
+version = "0.12.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a"
+dependencies = [
+ "lock_api",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall 0.5.18",
+ "smallvec",
+ "windows-link",
+]
+
+[[package]]
+name = "paste"
+version = "1.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
+
+[[package]]
+name = "pathdiff"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3"
+
+[[package]]
+name = "pbkdf2"
+version = "0.12.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2"
+dependencies = [
+ "digest",
+ "hmac",
+]
+
+[[package]]
+name = "pem-rfc7468"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412"
+dependencies = [
+ "base64ct",
+]
+
+[[package]]
+name = "percent-encoding"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
+
+[[package]]
+name = "pest"
+version = "2.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22"
+dependencies = [
+ "memchr",
+ "ucd-trie",
+]
+
+[[package]]
+name = "pest_derive"
+version = "2.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "51f72981ade67b1ca6adc26ec221be9f463f2b5839c7508998daa17c23d94d7f"
+dependencies = [
+ "pest",
+ "pest_generator",
+]
+
+[[package]]
+name = "pest_generator"
+version = "2.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dee9efd8cdb50d719a80088b76f81aec7c41ed6d522ee750178f83883d271625"
+dependencies = [
+ "pest",
+ "pest_meta",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "pest_meta"
+version = "2.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf1d70880e76bdc13ba52eafa6239ce793d85c8e43896507e43dd8984ff05b82"
+dependencies = [
+ "pest",
+ "sha2",
+]
+
+[[package]]
+name = "petgraph"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db"
+dependencies = [
+ "fixedbitset",
+ "indexmap",
+]
+
+[[package]]
+name = "pin-project"
+version = "1.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a"
+dependencies = [
+ "pin-project-internal",
+]
+
+[[package]]
+name = "pin-project-internal"
+version = "1.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
+
+[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
+name = "pinky-swear"
+version = "6.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1ea6e230dd3a64d61bcb8b79e597d3ab6b4c94ec7a234ce687dd718b4f2e657"
+dependencies = [
+ "doc-comment",
+ "flume",
+ "parking_lot",
+ "tracing",
+]
+
+[[package]]
+name = "piper"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066"
+dependencies = [
+ "atomic-waker",
+ "fastrand 2.3.0",
+ "futures-io",
+]
+
+[[package]]
+name = "pkcs1"
+version = "0.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f"
+dependencies = [
+ "der",
+ "pkcs8",
+ "spki",
+]
+
+[[package]]
+name = "pkcs12"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "695b3df3d3cc1015f12d70235e35b6b79befc5fa7a9b95b951eab1dd07c9efc2"
+dependencies = [
+ "cms",
+ "const-oid",
+ "der",
+ "digest",
+ "spki",
+ "x509-cert",
+ "zeroize",
+]
+
+[[package]]
+name = "pkcs5"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e847e2c91a18bfa887dd028ec33f2fe6f25db77db3619024764914affe8b69a6"
+dependencies = [
+ "aes",
+ "cbc",
+ "der",
+ "pbkdf2",
+ "scrypt",
+ "sha2",
+ "spki",
+]
+
+[[package]]
+name = "pkcs8"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
+dependencies = [
+ "der",
+ "spki",
+]
+
+[[package]]
+name = "pkg-config"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
+
+[[package]]
+name = "polling"
+version = "2.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce"
+dependencies = [
+ "autocfg",
+ "bitflags 1.3.2",
+ "cfg-if",
+ "concurrent-queue",
+ "libc",
+ "log",
+ "pin-project-lite",
+ "windows-sys 0.48.0",
+]
+
+[[package]]
+name = "polling"
+version = "3.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218"
+dependencies = [
+ "cfg-if",
+ "concurrent-queue",
+ "hermit-abi 0.5.2",
+ "pin-project-lite",
+ "rustix 1.1.3",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "polyval"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "opaque-debug",
+ "universal-hash",
+]
+
+[[package]]
+name = "portable-atomic"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f59e70c4aef1e55797c2e8fd94a4f2a973fc972cfde0e0b05f683667b0cd39dd"
+
+[[package]]
+name = "potential_utf"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77"
+dependencies = [
+ "zerovec",
+]
+
+[[package]]
+name = "powerfmt"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
+dependencies = [
+ "zerocopy",
+]
+
+[[package]]
+name = "proc-macro-error"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
+dependencies = [
+ "proc-macro-error-attr",
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro-error-attr"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.103"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "pulldown-cmark"
+version = "0.9.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b"
+dependencies = [
+ "bitflags 2.10.0",
+ "memchr",
+ "unicase",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.42"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "r-efi"
+version = "5.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
+
+[[package]]
+name = "rand"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
+dependencies = [
+ "getrandom 0.1.16",
+ "libc",
+ "rand_chacha 0.2.2",
+ "rand_core 0.5.1",
+ "rand_hc",
+]
+
+[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "libc",
+ "rand_chacha 0.3.1",
+ "rand_core 0.6.4",
+]
+
+[[package]]
+name = "rand"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
+dependencies = [
+ "rand_chacha 0.9.0",
+ "rand_core 0.9.3",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
+dependencies = [
+ "ppv-lite86",
+ "rand_core 0.5.1",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core 0.6.4",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
+dependencies = [
+ "ppv-lite86",
+ "rand_core 0.9.3",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
+dependencies = [
+ "getrandom 0.1.16",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+dependencies = [
+ "getrandom 0.2.16",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
+dependencies = [
+ "getrandom 0.3.4",
+]
+
+[[package]]
+name = "rand_hc"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
+dependencies = [
+ "rand_core 0.5.1",
+]
+
+[[package]]
+name = "rc2"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62c64daa8e9438b84aaae55010a93f396f8e60e3911590fcba770d04643fc1dd"
+dependencies = [
+ "cipher",
+]
+
+[[package]]
+name = "reactor-trait"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "438a4293e4d097556730f4711998189416232f009c137389e0f961d2bc0ddc58"
+dependencies = [
+ "async-trait",
+ "futures-core",
+ "futures-io",
+]
+
+[[package]]
+name = "redis"
+version = "0.27.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09d8f99a4090c89cc489a94833c901ead69bfbf3877b4867d5482e321ee875bc"
+dependencies = [
+ "arc-swap",
+ "async-trait",
+ "backon",
+ "bytes",
+ "combine",
+ "futures",
+ "futures-util",
+ "itertools 0.13.0",
+ "itoa",
+ "num-bigint",
+ "percent-encoding",
+ "pin-project-lite",
+ "ryu",
+ "sha1_smol",
+ "socket2 0.5.10",
+ "tokio",
+ "tokio-util",
+ "url",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.5.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
+dependencies = [
+ "bitflags 2.10.0",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec96166dafa0886eb81fe1c0a388bece180fbef2135f97c1e2cf8302e74b43b5"
+dependencies = [
+ "bitflags 2.10.0",
+]
+
+[[package]]
+name = "regex"
+version = "1.12.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-automata",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.4.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-lite"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8d942b98df5e658f56f20d592c7f868833fe38115e65c33003d8cd224b0155da"
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
+
+[[package]]
+name = "reqwest"
+version = "0.11.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62"
+dependencies = [
+ "base64 0.21.7",
+ "bytes",
+ "encoding_rs",
+ "futures-core",
+ "futures-util",
+ "h2",
+ "http",
+ "http-body",
+ "hyper",
+ "hyper-tls",
+ "ipnet",
+ "js-sys",
+ "log",
+ "mime",
+ "native-tls",
+ "once_cell",
+ "percent-encoding",
+ "pin-project-lite",
+ "rustls-pemfile 1.0.4",
+ "serde",
+ "serde_json",
+ "serde_urlencoded",
+ "sync_wrapper",
+ "system-configuration",
+ "tokio",
+ "tokio-native-tls",
+ "tower-service",
+ "url",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "web-sys",
+ "winreg",
+]
+
+[[package]]
+name = "retain_mut"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0"
+
+[[package]]
+name = "rhai"
+version = "1.23.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f4e35aaaa439a5bda2f8d15251bc375e4edfac75f9865734644782c9701b5709"
+dependencies = [
+ "ahash 0.8.12",
+ "bitflags 2.10.0",
+ "instant",
+ "no-std-compat",
+ "num-traits",
+ "once_cell",
+ "rhai_codegen",
+ "serde",
+ "smallvec",
+ "smartstring",
+ "thin-vec",
+]
+
+[[package]]
+name = "rhai_codegen"
+version = "3.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4322a2a4e8cf30771dd9f27f7f37ca9ac8fe812dddd811096a98483080dabe6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "ring"
+version = "0.17.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7"
+dependencies = [
+ "cc",
+ "cfg-if",
+ "getrandom 0.2.16",
+ "libc",
+ "untrusted",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "ron"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88073939a61e5b7680558e6be56b419e208420c2adb92be54921fa6b72283f1a"
+dependencies = [
+ "base64 0.13.1",
+ "bitflags 1.3.2",
+ "serde",
+]
+
+[[package]]
+name = "rsa"
+version = "0.9.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88"
+dependencies = [
+ "const-oid",
+ "digest",
+ "num-bigint-dig",
+ "num-integer",
+ "num-traits",
+ "pkcs1",
+ "pkcs8",
+ "rand_core 0.6.4",
+ "signature",
+ "spki",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "rust-ini"
+version = "0.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6d5f2436026b4f6e79dc829837d467cc7e9a55ee40e750d716713540715a2df"
+dependencies = [
+ "cfg-if",
+ "ordered-multimap",
+]
+
+[[package]]
+name = "rustc_version"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92"
+dependencies = [
+ "semver",
+]
+
+[[package]]
+name = "rusticata-macros"
+version = "4.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632"
+dependencies = [
+ "nom",
+]
+
+[[package]]
+name = "rustix"
+version = "0.37.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "519165d378b97752ca44bbe15047d5d3409e875f39327546b42ac81d7e18c1b6"
+dependencies = [
+ "bitflags 1.3.2",
+ "errno",
+ "io-lifetimes",
+ "libc",
+ "linux-raw-sys 0.3.8",
+ "windows-sys 0.48.0",
+]
+
+[[package]]
+name = "rustix"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34"
+dependencies = [
+ "bitflags 2.10.0",
+ "errno",
+ "libc",
+ "linux-raw-sys 0.11.0",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "rustls"
+version = "0.23.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f"
+dependencies = [
+ "once_cell",
+ "ring",
+ "rustls-pki-types",
+ "rustls-webpki",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "rustls-connector"
+version = "0.20.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70cc376c6ba1823ae229bacf8ad93c136d93524eab0e4e5e0e4f96b9c4e5b212"
+dependencies = [
+ "log",
+ "rustls",
+ "rustls-native-certs",
+ "rustls-pki-types",
+ "rustls-webpki",
+]
+
+[[package]]
+name = "rustls-native-certs"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5"
+dependencies = [
+ "openssl-probe",
+ "rustls-pemfile 2.2.0",
+ "rustls-pki-types",
+ "schannel",
+ "security-framework",
+]
+
+[[package]]
+name = "rustls-pemfile"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c"
+dependencies = [
+ "base64 0.21.7",
+]
+
+[[package]]
+name = "rustls-pemfile"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50"
+dependencies = [
+ "rustls-pki-types",
+]
+
+[[package]]
+name = "rustls-pki-types"
+version = "1.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282"
+dependencies = [
+ "zeroize",
+]
+
+[[package]]
+name = "rustls-webpki"
+version = "0.103.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52"
+dependencies = [
+ "ring",
+ "rustls-pki-types",
+ "untrusted",
+]
+
+[[package]]
+name = "rustversion"
+version = "1.0.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
+
+[[package]]
+name = "ryu"
+version = "1.0.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62049b2877bf12821e8f9ad256ee38fdc31db7387ec2d3b3f403024de2034aea"
+
+[[package]]
+name = "salsa20"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213"
+dependencies = [
+ "cipher",
+]
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "schannel"
+version = "0.1.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1"
+dependencies = [
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "scopeguard"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+
+[[package]]
+name = "scrypt"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0516a385866c09368f0b5bcd1caff3366aace790fcd46e2bb032697bb172fd1f"
+dependencies = [
+ "pbkdf2",
+ "salsa20",
+ "sha2",
+]
+
+[[package]]
+name = "security-framework"
+version = "2.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
+dependencies = [
+ "bitflags 2.10.0",
+ "core-foundation",
+ "core-foundation-sys",
+ "libc",
+ "security-framework-sys",
+]
+
+[[package]]
+name = "security-framework-sys"
+version = "2.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "semver"
+version = "1.0.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
+dependencies = [
+ "serde",
+ "serde_core",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
+dependencies = [
+ "serde_core",
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_core"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.147"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6af14725505314343e673e9ecb7cd7e8a36aa9791eb936235a3567cc31447ae4"
+dependencies = [
+ "itoa",
+ "memchr",
+ "serde",
+ "serde_core",
+ "zmij",
+]
+
+[[package]]
+name = "serde_path_to_error"
+version = "0.1.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457"
+dependencies = [
+ "itoa",
+ "serde",
+ "serde_core",
+]
+
+[[package]]
+name = "serde_qs"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6"
+dependencies = [
+ "percent-encoding",
+ "serde",
+ "thiserror 1.0.69",
+]
+
+[[package]]
+name = "serde_urlencoded"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
+dependencies = [
+ "form_urlencoded",
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_valid"
+version = "0.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70c0e00fab6460447391a1981c21341746bc2d0178a7c46a3bbf667f450ac6e4"
+dependencies = [
+ "indexmap",
+ "itertools 0.12.1",
+ "num-traits",
+ "once_cell",
+ "paste",
+ "regex",
+ "serde",
+ "serde_json",
+ "serde_valid_derive",
+ "serde_valid_literal",
+ "thiserror 1.0.69",
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "serde_valid_derive"
+version = "0.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88c60a851514741a6088b2cd18eefb3f0d02ff3a1c87234de47153f2724d395d"
+dependencies = [
+ "paste",
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "strsim 0.11.1",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "serde_valid_literal"
+version = "0.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aced4f1b31605a2b55eeacf2ec4dcbd96583263e9ded17eed1d41ab75915d12e"
+dependencies = [
+ "paste",
+ "regex",
+]
+
+[[package]]
+name = "serde_yaml"
+version = "0.9.34+deprecated"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
+dependencies = [
+ "indexmap",
+ "itoa",
+ "ryu",
+ "serde",
+ "unsafe-libyaml",
+]
+
+[[package]]
+name = "sha1"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+]
+
+[[package]]
+name = "sha1_smol"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d"
+
+[[package]]
+name = "sha2"
+version = "0.10.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+]
+
+[[package]]
+name = "sharded-slab"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
+name = "shlex"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
+
+[[package]]
+name = "signal-hook-registry"
+version = "1.4.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b"
+dependencies = [
+ "errno",
+ "libc",
+]
+
+[[package]]
+name = "signature"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
+dependencies = [
+ "digest",
+ "rand_core 0.6.4",
+]
+
+[[package]]
+name = "simd-adler32"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2"
+
+[[package]]
+name = "skeptic"
+version = "0.13.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "16d23b015676c90a0f01c197bfdc786c20342c73a0afdda9025adb0bc42940a8"
+dependencies = [
+ "bytecount",
+ "cargo_metadata",
+ "error-chain",
+ "glob",
+ "pulldown-cmark",
+ "tempfile",
+ "walkdir",
+]
+
+[[package]]
+name = "slab"
+version = "0.4.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589"
+
+[[package]]
+name = "slog"
+version = "2.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b3b8565691b22d2bdfc066426ed48f837fc0c5f2c8cad8d9718f7f99d6995c1"
+dependencies = [
+ "anyhow",
+ "erased-serde",
+ "rustversion",
+ "serde_core",
+]
+
+[[package]]
+name = "slog-async"
+version = "2.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72c8038f898a2c79507940990f05386455b3a317d8f18d4caea7cbc3d5096b84"
+dependencies = [
+ "crossbeam-channel",
+ "slog",
+ "take_mut",
+ "thread_local",
+]
+
+[[package]]
+name = "slog-term"
+version = "2.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5cb1fc680b38eed6fad4c02b3871c09d2c81db8c96aa4e9c0a34904c830f09b5"
+dependencies = [
+ "chrono",
+ "is-terminal",
+ "slog",
+ "term",
+ "thread_local",
+ "time",
+]
+
+[[package]]
+name = "smallvec"
+version = "1.15.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "smartstring"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3fb72c633efbaa2dd666986505016c32c3044395ceaf881518399d2f4127ee29"
+dependencies = [
+ "autocfg",
+ "serde",
+ "static_assertions",
+ "version_check",
+]
+
+[[package]]
+name = "socket2"
+version = "0.4.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "socket2"
+version = "0.5.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678"
+dependencies = [
+ "libc",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "socket2"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881"
+dependencies = [
+ "libc",
+ "windows-sys 0.60.2",
+]
+
+[[package]]
+name = "spin"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]]
-name = "spin"
-version = "0.9.8"
+name = "spin"
+version = "0.9.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
+dependencies = [
+ "lock_api",
+]
+
+[[package]]
+name = "spki"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
+dependencies = [
+ "base64ct",
+ "der",
+]
+
+[[package]]
+name = "sqlx"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc"
+dependencies = [
+ "sqlx-core",
+ "sqlx-macros",
+ "sqlx-mysql",
+ "sqlx-postgres",
+ "sqlx-sqlite",
+]
+
+[[package]]
+name = "sqlx-adapter"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2a88e13f5aaf770420184c9e2955345f157953fb7ed9f26df59a4a0664478daf"
+dependencies = [
+ "async-trait",
+ "casbin",
+ "dotenvy",
+ "sqlx",
+]
+
+[[package]]
+name = "sqlx-core"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6"
+dependencies = [
+ "base64 0.22.1",
+ "bytes",
+ "chrono",
+ "crc",
+ "crossbeam-queue",
+ "either",
+ "event-listener 5.4.1",
+ "futures-core",
+ "futures-intrusive",
+ "futures-io",
+ "futures-util",
+ "hashbrown 0.15.5",
+ "hashlink 0.10.0",
+ "indexmap",
+ "ipnetwork",
+ "log",
+ "memchr",
+ "native-tls",
+ "once_cell",
+ "percent-encoding",
+ "rustls",
+ "serde",
+ "serde_json",
+ "sha2",
+ "smallvec",
+ "thiserror 2.0.17",
+ "tokio",
+ "tokio-stream",
+ "tracing",
+ "url",
+ "uuid",
+ "webpki-roots 0.26.11",
+]
+
+[[package]]
+name = "sqlx-macros"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "sqlx-core",
+ "sqlx-macros-core",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "sqlx-macros-core"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b"
+dependencies = [
+ "dotenvy",
+ "either",
+ "heck",
+ "hex",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "serde",
+ "serde_json",
+ "sha2",
+ "sqlx-core",
+ "sqlx-mysql",
+ "sqlx-postgres",
+ "sqlx-sqlite",
+ "syn 2.0.111",
+ "tokio",
+ "url",
+]
+
+[[package]]
+name = "sqlx-mysql"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526"
+dependencies = [
+ "atoi",
+ "base64 0.22.1",
+ "bitflags 2.10.0",
+ "byteorder",
+ "bytes",
+ "chrono",
+ "crc",
+ "digest",
+ "dotenvy",
+ "either",
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-util",
+ "generic-array",
+ "hex",
+ "hkdf",
+ "hmac",
+ "itoa",
+ "log",
+ "md-5",
+ "memchr",
+ "once_cell",
+ "percent-encoding",
+ "rand 0.8.5",
+ "rsa",
+ "serde",
+ "sha1",
+ "sha2",
+ "smallvec",
+ "sqlx-core",
+ "stringprep",
+ "thiserror 2.0.17",
+ "tracing",
+ "uuid",
+ "whoami",
+]
+
+[[package]]
+name = "sqlx-postgres"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46"
+dependencies = [
+ "atoi",
+ "base64 0.22.1",
+ "bitflags 2.10.0",
+ "byteorder",
+ "chrono",
+ "crc",
+ "dotenvy",
+ "etcetera",
+ "futures-channel",
+ "futures-core",
+ "futures-util",
+ "hex",
+ "hkdf",
+ "hmac",
+ "home",
+ "ipnetwork",
+ "itoa",
+ "log",
+ "md-5",
+ "memchr",
+ "once_cell",
+ "rand 0.8.5",
+ "serde",
+ "serde_json",
+ "sha2",
+ "smallvec",
+ "sqlx-core",
+ "stringprep",
+ "thiserror 2.0.17",
+ "tracing",
+ "uuid",
+ "whoami",
+]
+
+[[package]]
+name = "sqlx-sqlite"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea"
+dependencies = [
+ "atoi",
+ "chrono",
+ "flume",
+ "futures-channel",
+ "futures-core",
+ "futures-executor",
+ "futures-intrusive",
+ "futures-util",
+ "libsqlite3-sys",
+ "log",
+ "percent-encoding",
+ "serde",
+ "serde_urlencoded",
+ "sqlx-core",
+ "thiserror 2.0.17",
+ "tracing",
+ "url",
+ "uuid",
+]
+
+[[package]]
+name = "stable_deref_trait"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
+
+[[package]]
+name = "stacker"
+version = "0.2.1"
+dependencies = [
+ "actix",
+ "actix-casbin-auth",
+ "actix-cors",
+ "actix-http",
+ "actix-web",
+ "actix-web-actors",
+ "aes-gcm",
+ "async-trait",
+ "base64 0.22.1",
+ "brotli 3.5.0",
+ "casbin",
+ "chrono",
+ "clap",
+ "config",
+ "deadpool-lapin",
+ "derive_builder 0.12.0",
+ "docker-compose-types",
+ "dotenvy",
+ "futures",
+ "futures-lite 2.6.1",
+ "futures-util",
+ "glob",
+ "hmac",
+ "indexmap",
+ "lapin",
+ "rand 0.8.5",
+ "redis",
+ "regex",
+ "reqwest",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "serde_path_to_error",
+ "serde_valid",
+ "serde_yaml",
+ "sha2",
+ "sqlx",
+ "sqlx-adapter",
+ "thiserror 1.0.69",
+ "tokio",
+ "tokio-stream",
+ "tracing",
+ "tracing-actix-web",
+ "tracing-bunyan-formatter",
+ "tracing-log 0.1.4",
+ "tracing-subscriber",
+ "urlencoding",
+ "uuid",
+ "wiremock",
+]
+
+[[package]]
+name = "static_assertions"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
+
+[[package]]
+name = "stringprep"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1"
+dependencies = [
+ "unicode-bidi",
+ "unicode-normalization",
+ "unicode-properties",
+]
+
+[[package]]
+name = "strsim"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
+
+[[package]]
+name = "strsim"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
+
+[[package]]
+name = "subtle"
+version = "2.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
+
+[[package]]
+name = "syn"
+version = "1.0.109"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "syn"
+version = "2.0.111"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "sync_wrapper"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160"
+
+[[package]]
+name = "synstructure"
+version = "0.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "system-configuration"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7"
+dependencies = [
+ "bitflags 1.3.2",
+ "core-foundation",
+ "system-configuration-sys",
+]
+
+[[package]]
+name = "system-configuration-sys"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "tagptr"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417"
+
+[[package]]
+name = "take_mut"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60"
+
+[[package]]
+name = "tcp-stream"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "495b0abdce3dc1f8fd27240651c9e68890c14e9d9c61527b1ce44d8a5a7bd3d5"
+dependencies = [
+ "cfg-if",
+ "p12-keystore",
+ "rustls-connector",
+ "rustls-pemfile 2.2.0",
+]
+
+[[package]]
+name = "tempfile"
+version = "3.24.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c"
+dependencies = [
+ "fastrand 2.3.0",
+ "getrandom 0.3.4",
+ "once_cell",
+ "rustix 1.1.3",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "term"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d8c27177b12a6399ffc08b98f76f7c9a1f4fe9fc967c784c5a071fa8d93cf7e1"
+dependencies = [
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "thin-vec"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "144f754d318415ac792f9d69fc87abbbfc043ce2ef041c60f16ad828f638717d"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "thiserror"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
+dependencies = [
+ "thiserror-impl 1.0.69",
+]
+
+[[package]]
+name = "thiserror"
+version = "2.0.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8"
+dependencies = [
+ "thiserror-impl 2.0.17",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "2.0.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "thread_local"
+version = "1.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "time"
+version = "0.3.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d"
+dependencies = [
+ "deranged",
+ "itoa",
+ "num-conv",
+ "powerfmt",
+ "serde",
+ "time-core",
+ "time-macros",
+]
+
+[[package]]
+name = "time-core"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b"
+
+[[package]]
+name = "time-macros"
+version = "0.2.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3"
+dependencies = [
+ "num-conv",
+ "time-core",
+]
+
+[[package]]
+name = "tiny-keccak"
+version = "2.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237"
+dependencies = [
+ "crunchy",
+]
+
+[[package]]
+name = "tinystr"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869"
+dependencies = [
+ "displaydoc",
+ "zerovec",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
+checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
-name = "sqlformat"
-version = "0.2.2"
+name = "tokio"
+version = "1.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6b7b278788e7be4d0d29c0f39497a0eef3fba6bbc8e70d8bf7fde46edeaa9e85"
+checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408"
dependencies = [
- "itertools 0.11.0",
- "nom",
- "unicode_categories",
+ "bytes",
+ "libc",
+ "mio",
+ "parking_lot",
+ "pin-project-lite",
+ "signal-hook-registry",
+ "socket2 0.6.1",
+ "tokio-macros",
+ "windows-sys 0.61.2",
]
[[package]]
-name = "sqlx"
-version = "0.6.3"
+name = "tokio-executor-trait"
+version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8de3b03a925878ed54a954f621e64bf55a3c1bd29652d0d1a17830405350188"
+checksum = "6278565f9fd60c2d205dfbc827e8bb1236c2b1a57148708e95861eff7a6b3bad"
dependencies = [
- "sqlx-core",
- "sqlx-macros",
+ "async-trait",
+ "executor-trait",
+ "tokio",
]
[[package]]
-name = "sqlx-core"
-version = "0.6.3"
+name = "tokio-macros"
+version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029"
+checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "tokio-native-tls"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
+dependencies = [
+ "native-tls",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-stream"
+version = "0.1.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047"
+dependencies = [
+ "futures-core",
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-util"
+version = "0.7.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594"
dependencies = [
- "ahash 0.7.6",
- "atoi",
- "base64 0.13.1",
- "bitflags 1.3.2",
- "byteorder",
"bytes",
- "chrono",
- "crc",
- "crossbeam-queue",
- "dirs",
- "dotenvy",
- "either",
- "event-listener",
- "futures-channel",
"futures-core",
- "futures-intrusive",
- "futures-util",
- "hashlink",
- "hex",
- "hkdf",
- "hmac",
- "indexmap",
- "itoa",
- "libc",
- "log",
- "md-5",
- "memchr",
- "once_cell",
- "paste",
- "percent-encoding",
- "rand",
- "rustls",
- "rustls-pemfile",
+ "futures-sink",
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "toml"
+version = "0.5.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234"
+dependencies = [
"serde",
- "serde_json",
- "sha1",
- "sha2",
- "smallvec",
- "sqlformat",
- "sqlx-rt",
- "stringprep",
- "thiserror",
- "tokio-stream",
- "url",
+]
+
+[[package]]
+name = "tower-service"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
+
+[[package]]
+name = "tracing"
+version = "0.1.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
+dependencies = [
+ "log",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-actix-web"
+version = "0.7.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f28f45dd524790b44a7b372f7c3aec04a3af6b42d494e861b67de654cb25a5e"
+dependencies = [
+ "actix-web",
+ "mutually_exclusive_features",
+ "pin-project",
+ "tracing",
"uuid",
- "webpki-roots",
- "whoami",
]
[[package]]
-name = "sqlx-macros"
-version = "0.6.3"
+name = "tracing-attributes"
+version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9966e64ae989e7e575b19d7265cb79d7fc3cbbdf179835cb0d716f294c2049c9"
+checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
dependencies = [
- "dotenvy",
- "either",
- "heck",
- "hex",
- "once_cell",
"proc-macro2",
"quote",
+ "syn 2.0.111",
+]
+
+[[package]]
+name = "tracing-bunyan-formatter"
+version = "0.3.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d637245a0d8774bd48df6482e086c59a8b5348a910c3b0579354045a9d82411"
+dependencies = [
+ "ahash 0.8.12",
+ "gethostname",
+ "log",
"serde",
"serde_json",
- "sha2",
- "sqlx-core",
- "sqlx-rt",
- "syn 1.0.109",
- "url",
+ "time",
+ "tracing",
+ "tracing-core",
+ "tracing-log 0.1.4",
+ "tracing-subscriber",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
+dependencies = [
+ "once_cell",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2"
+dependencies = [
+ "log",
+ "once_cell",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
+dependencies = [
+ "log",
+ "once_cell",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
+dependencies = [
+ "matchers",
+ "nu-ansi-term",
+ "once_cell",
+ "regex-automata",
+ "sharded-slab",
+ "smallvec",
+ "thread_local",
+ "tracing",
+ "tracing-core",
+ "tracing-log 0.2.0",
+]
+
+[[package]]
+name = "triomphe"
+version = "0.1.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dd69c5aa8f924c7519d6372789a74eac5b94fb0f8fcf0d4a97eb0bfc3e785f39"
+
+[[package]]
+name = "try-lock"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
+
+[[package]]
+name = "typenum"
+version = "1.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
+
+[[package]]
+name = "ucd-trie"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971"
+
+[[package]]
+name = "unicase"
+version = "2.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539"
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5"
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "unicode-properties"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d"
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
+
+[[package]]
+name = "universal-hash"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea"
+dependencies = [
+ "crypto-common",
+ "subtle",
+]
+
+[[package]]
+name = "unsafe-libyaml"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
+
+[[package]]
+name = "untrusted"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
+
+[[package]]
+name = "url"
+version = "2.5.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "percent-encoding",
+ "serde",
]
[[package]]
-name = "sqlx-rt"
-version = "0.6.3"
+name = "urlencoding"
+version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "804d3f245f894e61b1e6263c84b23ca675d96753b5abfd5cc8597d86806e8024"
-dependencies = [
- "once_cell",
- "tokio",
- "tokio-rustls",
-]
+checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
[[package]]
-name = "stacker"
-version = "0.1.0"
-dependencies = [
- "actix-cors",
- "actix-http",
- "actix-web",
- "actix-web-httpauth",
- "chrono",
- "config",
- "futures",
- "futures-util",
- "hmac",
- "rand",
- "regex",
- "reqwest",
- "serde",
- "serde_derive",
- "serde_json",
- "serde_valid",
- "sha2",
- "sqlx",
- "thiserror",
- "tokio",
- "tokio-stream",
- "tracing",
- "tracing-actix-web",
- "tracing-bunyan-formatter",
- "tracing-log",
- "tracing-subscriber",
- "uuid",
-]
+name = "utf8_iter"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
[[package]]
-name = "stringprep"
-version = "0.1.4"
+name = "utf8parse"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
+
+[[package]]
+name = "uuid"
+version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6"
+checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a"
dependencies = [
- "finl_unicode",
- "unicode-bidi",
- "unicode-normalization",
+ "getrandom 0.3.4",
+ "js-sys",
+ "serde_core",
+ "wasm-bindgen",
]
[[package]]
-name = "strsim"
-version = "0.10.0"
+name = "valuable"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
+checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
[[package]]
-name = "subtle"
-version = "2.5.0"
+name = "vcpkg"
+version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
+checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
-name = "syn"
-version = "1.0.109"
+name = "version_check"
+version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
-dependencies = [
- "proc-macro2",
- "quote",
- "unicode-ident",
-]
+checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
-name = "syn"
-version = "2.0.38"
+name = "waker-fn"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "317211a0dc0ceedd78fb2ca9a44aed3d7b9b26f81870d485c07122b4350673b7"
+
+[[package]]
+name = "walkdir"
+version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b"
+checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
dependencies = [
- "proc-macro2",
- "quote",
- "unicode-ident",
+ "same-file",
+ "winapi-util",
]
[[package]]
-name = "system-configuration"
-version = "0.5.1"
+name = "want"
+version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7"
+checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
dependencies = [
- "bitflags 1.3.2",
- "core-foundation",
- "system-configuration-sys",
+ "try-lock",
]
[[package]]
-name = "system-configuration-sys"
-version = "0.5.0"
+name = "wasi"
+version = "0.9.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9"
+checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
+
+[[package]]
+name = "wasi"
+version = "0.11.1+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
+
+[[package]]
+name = "wasip2"
+version = "1.0.1+wasi-0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
dependencies = [
- "core-foundation-sys",
- "libc",
+ "wit-bindgen",
]
[[package]]
-name = "tempfile"
-version = "3.8.0"
+name = "wasite"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef"
+checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd"
dependencies = [
"cfg-if",
- "fastrand",
- "redox_syscall 0.3.5",
- "rustix",
- "windows-sys",
+ "once_cell",
+ "rustversion",
+ "wasm-bindgen-macro",
+ "wasm-bindgen-shared",
]
[[package]]
-name = "thiserror"
-version = "1.0.49"
+name = "wasm-bindgen-futures"
+version = "0.4.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1177e8c6d7ede7afde3585fd2513e611227efd6481bd78d2e82ba1ce16557ed4"
+checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c"
dependencies = [
- "thiserror-impl",
+ "cfg-if",
+ "js-sys",
+ "once_cell",
+ "wasm-bindgen",
+ "web-sys",
]
[[package]]
-name = "thiserror-impl"
-version = "1.0.49"
+name = "wasm-bindgen-macro"
+version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc"
+checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3"
dependencies = [
- "proc-macro2",
"quote",
- "syn 2.0.38",
+ "wasm-bindgen-macro-support",
]
[[package]]
-name = "thread_local"
-version = "1.1.7"
+name = "wasm-bindgen-macro-support"
+version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
+checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40"
dependencies = [
- "cfg-if",
- "once_cell",
+ "bumpalo",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+ "wasm-bindgen-shared",
]
[[package]]
-name = "time"
-version = "0.1.45"
+name = "wasm-bindgen-shared"
+version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a"
+checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4"
dependencies = [
- "libc",
- "wasi 0.10.0+wasi-snapshot-preview1",
- "winapi",
+ "unicode-ident",
]
[[package]]
-name = "time"
-version = "0.3.30"
+name = "wasm-bindgen-test"
+version = "0.3.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5"
+checksum = "25e90e66d265d3a1efc0e72a54809ab90b9c0c515915c67cdf658689d2c22c6c"
dependencies = [
- "deranged",
- "itoa",
- "powerfmt",
+ "async-trait",
+ "cast",
+ "js-sys",
+ "libm",
+ "minicov",
+ "nu-ansi-term",
+ "num-traits",
+ "oorandom",
"serde",
- "time-core",
- "time-macros",
+ "serde_json",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "wasm-bindgen-test-macro",
]
[[package]]
-name = "time-core"
-version = "0.1.2"
+name = "wasm-bindgen-test-macro"
+version = "0.3.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
+checksum = "7150335716dce6028bead2b848e72f47b45e7b9422f64cccdc23bedca89affc1"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+]
[[package]]
-name = "time-macros"
-version = "0.2.15"
+name = "web-sys"
+version = "0.3.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20"
+checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac"
dependencies = [
- "time-core",
+ "js-sys",
+ "wasm-bindgen",
]
[[package]]
-name = "tinyvec"
-version = "1.6.0"
+name = "webpki-roots"
+version = "0.26.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9"
dependencies = [
- "tinyvec_macros",
+ "webpki-roots 1.0.4",
]
[[package]]
-name = "tinyvec_macros"
-version = "0.1.1"
+name = "webpki-roots"
+version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+checksum = "b2878ef029c47c6e8cf779119f20fcf52bde7ad42a731b2a304bc221df17571e"
+dependencies = [
+ "rustls-pki-types",
+]
[[package]]
-name = "tokio"
-version = "1.33.0"
+name = "whoami"
+version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f38200e3ef7995e5ef13baec2f432a6da0aa9ac495b2c0e8f3b7eec2c92d653"
+checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d"
dependencies = [
- "backtrace",
- "bytes",
- "libc",
- "mio",
- "num_cpus",
- "parking_lot 0.12.1",
- "pin-project-lite",
- "signal-hook-registry",
- "socket2 0.5.4",
- "tokio-macros",
- "windows-sys",
+ "libredox",
+ "wasite",
]
[[package]]
-name = "tokio-macros"
-version = "2.1.0"
+name = "winapi"
+version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
- "proc-macro2",
- "quote",
- "syn 2.0.38",
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
]
[[package]]
-name = "tokio-native-tls"
-version = "0.3.1"
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
-dependencies = [
- "native-tls",
- "tokio",
-]
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
-name = "tokio-rustls"
-version = "0.23.4"
+name = "winapi-util"
+version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59"
+checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
dependencies = [
- "rustls",
- "tokio",
- "webpki",
+ "windows-sys 0.61.2",
]
[[package]]
-name = "tokio-stream"
-version = "0.1.14"
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "windows-core"
+version = "0.62.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842"
+checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb"
dependencies = [
- "futures-core",
- "pin-project-lite",
- "tokio",
+ "windows-implement",
+ "windows-interface",
+ "windows-link",
+ "windows-result",
+ "windows-strings",
]
[[package]]
-name = "tokio-util"
-version = "0.7.9"
+name = "windows-implement"
+version = "0.60.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d"
+checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf"
dependencies = [
- "bytes",
- "futures-core",
- "futures-sink",
- "pin-project-lite",
- "tokio",
- "tracing",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
]
[[package]]
-name = "toml"
-version = "0.5.11"
+name = "windows-interface"
+version = "0.59.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234"
+checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358"
dependencies = [
- "serde",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
]
[[package]]
-name = "tower-service"
-version = "0.3.2"
+name = "windows-link"
+version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
+checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
-name = "tracing"
-version = "0.1.39"
+name = "windows-result"
+version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ee2ef2af84856a50c1d430afce2fdded0a4ec7eda868db86409b4543df0797f9"
+checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5"
dependencies = [
- "log",
- "pin-project-lite",
- "tracing-attributes",
- "tracing-core",
+ "windows-link",
]
[[package]]
-name = "tracing-actix-web"
-version = "0.7.7"
+name = "windows-strings"
+version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94982c2ad939d5d0bfd71c2f9b7ed273c72348485c72bb87bb4db6bd69df10cb"
+checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091"
dependencies = [
- "actix-web",
- "pin-project",
- "tracing",
- "uuid",
+ "windows-link",
]
[[package]]
-name = "tracing-attributes"
-version = "0.1.27"
+name = "windows-sys"
+version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
+checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
- "proc-macro2",
- "quote",
- "syn 2.0.38",
+ "windows-targets 0.48.5",
]
[[package]]
-name = "tracing-bunyan-formatter"
-version = "0.3.9"
+name = "windows-sys"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b5c266b9ac83dedf0e0385ad78514949e6d89491269e7065bee51d2bb8ec7373"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
- "ahash 0.8.3",
- "gethostname",
- "log",
- "serde",
- "serde_json",
- "time 0.3.30",
- "tracing",
- "tracing-core",
- "tracing-log",
- "tracing-subscriber",
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.60.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
+dependencies = [
+ "windows-targets 0.53.5",
]
[[package]]
-name = "tracing-core"
-version = "0.1.32"
+name = "windows-sys"
+version = "0.61.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
+checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
dependencies = [
- "once_cell",
- "valuable",
+ "windows-link",
]
[[package]]
-name = "tracing-log"
-version = "0.1.3"
+name = "windows-targets"
+version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
+checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
dependencies = [
- "lazy_static",
- "log",
- "tracing-core",
+ "windows_aarch64_gnullvm 0.48.5",
+ "windows_aarch64_msvc 0.48.5",
+ "windows_i686_gnu 0.48.5",
+ "windows_i686_msvc 0.48.5",
+ "windows_x86_64_gnu 0.48.5",
+ "windows_x86_64_gnullvm 0.48.5",
+ "windows_x86_64_msvc 0.48.5",
]
[[package]]
-name = "tracing-subscriber"
-version = "0.3.17"
+name = "windows-targets"
+version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
- "matchers",
- "nu-ansi-term",
- "once_cell",
- "regex",
- "sharded-slab",
- "smallvec",
- "thread_local",
- "tracing",
- "tracing-core",
- "tracing-log",
+ "windows_aarch64_gnullvm 0.52.6",
+ "windows_aarch64_msvc 0.52.6",
+ "windows_i686_gnu 0.52.6",
+ "windows_i686_gnullvm 0.52.6",
+ "windows_i686_msvc 0.52.6",
+ "windows_x86_64_gnu 0.52.6",
+ "windows_x86_64_gnullvm 0.52.6",
+ "windows_x86_64_msvc 0.52.6",
]
[[package]]
-name = "try-lock"
-version = "0.2.4"
+name = "windows-targets"
+version = "0.53.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed"
+checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3"
+dependencies = [
+ "windows-link",
+ "windows_aarch64_gnullvm 0.53.1",
+ "windows_aarch64_msvc 0.53.1",
+ "windows_i686_gnu 0.53.1",
+ "windows_i686_gnullvm 0.53.1",
+ "windows_i686_msvc 0.53.1",
+ "windows_x86_64_gnu 0.53.1",
+ "windows_x86_64_gnullvm 0.53.1",
+ "windows_x86_64_msvc 0.53.1",
+]
[[package]]
-name = "typenum"
-version = "1.17.0"
+name = "windows_aarch64_gnullvm"
+version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
+checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
-name = "ucd-trie"
-version = "0.1.6"
+name = "windows_aarch64_gnullvm"
+version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
-name = "unicode-bidi"
-version = "0.3.13"
+name = "windows_aarch64_gnullvm"
+version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
+checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53"
[[package]]
-name = "unicode-ident"
-version = "1.0.12"
+name = "windows_aarch64_msvc"
+version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
+checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
-name = "unicode-normalization"
-version = "0.1.22"
+name = "windows_aarch64_msvc"
+version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
-dependencies = [
- "tinyvec",
-]
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
-name = "unicode-segmentation"
-version = "1.10.1"
+name = "windows_aarch64_msvc"
+version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
+checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006"
[[package]]
-name = "unicode_categories"
-version = "0.1.1"
+name = "windows_i686_gnu"
+version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e"
+checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
-name = "untrusted"
-version = "0.7.1"
+name = "windows_i686_gnu"
+version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
-name = "untrusted"
-version = "0.9.0"
+name = "windows_i686_gnu"
+version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
+checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3"
[[package]]
-name = "url"
-version = "2.4.1"
+name = "windows_i686_gnullvm"
+version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5"
-dependencies = [
- "form_urlencoded",
- "idna",
- "percent-encoding",
-]
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
-name = "uuid"
-version = "1.5.0"
+name = "windows_i686_gnullvm"
+version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc"
-dependencies = [
- "getrandom",
- "serde",
-]
+checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c"
[[package]]
-name = "valuable"
-version = "0.1.0"
+name = "windows_i686_msvc"
+version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
-name = "vcpkg"
-version = "0.2.15"
+name = "windows_i686_msvc"
+version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
-name = "version_check"
-version = "0.9.4"
+name = "windows_i686_msvc"
+version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2"
[[package]]
-name = "want"
-version = "0.3.1"
+name = "windows_x86_64_gnu"
+version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
-dependencies = [
- "try-lock",
-]
+checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
-name = "wasi"
-version = "0.10.0+wasi-snapshot-preview1"
+name = "windows_x86_64_gnu"
+version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
-name = "wasi"
-version = "0.11.0+wasi-snapshot-preview1"
+name = "windows_x86_64_gnu"
+version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499"
[[package]]
-name = "wasm-bindgen"
-version = "0.2.87"
+name = "windows_x86_64_gnullvm"
+version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342"
-dependencies = [
- "cfg-if",
- "wasm-bindgen-macro",
-]
+checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
-name = "wasm-bindgen-backend"
-version = "0.2.87"
+name = "windows_x86_64_gnullvm"
+version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd"
-dependencies = [
- "bumpalo",
- "log",
- "once_cell",
- "proc-macro2",
- "quote",
- "syn 2.0.38",
- "wasm-bindgen-shared",
-]
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
-name = "wasm-bindgen-futures"
-version = "0.4.37"
+name = "windows_x86_64_gnullvm"
+version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03"
-dependencies = [
- "cfg-if",
- "js-sys",
- "wasm-bindgen",
- "web-sys",
-]
+checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1"
[[package]]
-name = "wasm-bindgen-macro"
-version = "0.2.87"
+name = "windows_x86_64_msvc"
+version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d"
-dependencies = [
- "quote",
- "wasm-bindgen-macro-support",
-]
+checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
-name = "wasm-bindgen-macro-support"
-version = "0.2.87"
+name = "windows_x86_64_msvc"
+version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn 2.0.38",
- "wasm-bindgen-backend",
- "wasm-bindgen-shared",
-]
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
-name = "wasm-bindgen-shared"
-version = "0.2.87"
+name = "windows_x86_64_msvc"
+version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1"
+checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
[[package]]
-name = "web-sys"
-version = "0.3.64"
+name = "winreg"
+version = "0.50.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b"
+checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1"
dependencies = [
- "js-sys",
- "wasm-bindgen",
+ "cfg-if",
+ "windows-sys 0.48.0",
]
[[package]]
-name = "webpki"
-version = "0.22.4"
+name = "wiremock"
+version = "0.5.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53"
+checksum = "13a3a53eaf34f390dd30d7b1b078287dd05df2aa2e21a589ccb80f5c7253c2e9"
dependencies = [
- "ring 0.17.4",
- "untrusted 0.9.0",
+ "assert-json-diff",
+ "async-trait",
+ "base64 0.21.7",
+ "deadpool 0.9.5",
+ "futures",
+ "futures-timer",
+ "http-types",
+ "hyper",
+ "log",
+ "once_cell",
+ "regex",
+ "serde",
+ "serde_json",
+ "tokio",
]
[[package]]
-name = "webpki-roots"
-version = "0.22.6"
+name = "wit-bindgen"
+version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87"
-dependencies = [
- "webpki",
-]
+checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
[[package]]
-name = "whoami"
-version = "1.4.1"
+name = "writeable"
+version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50"
-dependencies = [
- "wasm-bindgen",
- "web-sys",
-]
+checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
[[package]]
-name = "winapi"
-version = "0.3.9"
+name = "x509-cert"
+version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+checksum = "1301e935010a701ae5f8655edc0ad17c44bad3ac5ce8c39185f75453b720ae94"
dependencies = [
- "winapi-i686-pc-windows-gnu",
- "winapi-x86_64-pc-windows-gnu",
+ "const-oid",
+ "der",
+ "spki",
]
[[package]]
-name = "winapi-i686-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
-
-[[package]]
-name = "winapi-x86_64-pc-windows-gnu"
-version = "0.4.0"
+name = "x509-parser"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+checksum = "4569f339c0c402346d4a75a9e39cf8dad310e287eef1ff56d4c68e5067f53460"
+dependencies = [
+ "asn1-rs",
+ "data-encoding",
+ "der-parser",
+ "lazy_static",
+ "nom",
+ "oid-registry",
+ "rusticata-macros",
+ "thiserror 2.0.17",
+ "time",
+]
[[package]]
-name = "windows-core"
-version = "0.51.1"
+name = "yaml-rust"
+version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64"
+checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [
- "windows-targets",
+ "linked-hash-map",
]
[[package]]
-name = "windows-sys"
-version = "0.48.0"
+name = "yoke"
+version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
+checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954"
dependencies = [
- "windows-targets",
+ "stable_deref_trait",
+ "yoke-derive",
+ "zerofrom",
]
[[package]]
-name = "windows-targets"
-version = "0.48.5"
+name = "yoke-derive"
+version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
+checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d"
dependencies = [
- "windows_aarch64_gnullvm",
- "windows_aarch64_msvc",
- "windows_i686_gnu",
- "windows_i686_msvc",
- "windows_x86_64_gnu",
- "windows_x86_64_gnullvm",
- "windows_x86_64_msvc",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+ "synstructure",
]
[[package]]
-name = "windows_aarch64_gnullvm"
-version = "0.48.5"
+name = "zerocopy"
+version = "0.8.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
+checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3"
+dependencies = [
+ "zerocopy-derive",
+]
[[package]]
-name = "windows_aarch64_msvc"
-version = "0.48.5"
+name = "zerocopy-derive"
+version = "0.8.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
+checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+]
[[package]]
-name = "windows_i686_gnu"
-version = "0.48.5"
+name = "zerofrom"
+version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
+checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
+dependencies = [
+ "zerofrom-derive",
+]
[[package]]
-name = "windows_i686_msvc"
-version = "0.48.5"
+name = "zerofrom-derive"
+version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
+checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
+ "synstructure",
+]
[[package]]
-name = "windows_x86_64_gnu"
-version = "0.48.5"
+name = "zeroize"
+version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
+checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
[[package]]
-name = "windows_x86_64_gnullvm"
-version = "0.48.5"
+name = "zerotrie"
+version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
+checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851"
+dependencies = [
+ "displaydoc",
+ "yoke",
+ "zerofrom",
+]
[[package]]
-name = "windows_x86_64_msvc"
-version = "0.48.5"
+name = "zerovec"
+version = "0.11.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
+checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002"
+dependencies = [
+ "yoke",
+ "zerofrom",
+ "zerovec-derive",
+]
[[package]]
-name = "winreg"
-version = "0.50.0"
+name = "zerovec-derive"
+version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1"
+checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3"
dependencies = [
- "cfg-if",
- "windows-sys",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.111",
]
[[package]]
-name = "yaml-rust"
-version = "0.4.5"
+name = "zmij"
+version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
-dependencies = [
- "linked-hash-map",
-]
+checksum = "d0095ecd462946aa3927d9297b63ef82fb9a5316d7a37d134eeb36e58228615a"
[[package]]
name = "zstd"
-version = "0.12.4"
+version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a27595e173641171fc74a1232b7b1c7a7cb6e18222c11e9dfb9888fa424c53c"
+checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
dependencies = [
"zstd-safe",
]
[[package]]
name = "zstd-safe"
-version = "6.0.6"
+version = "7.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ee98ffd0b48ee95e6c5168188e44a54550b1564d9d530ee21d5f0eaed1069581"
+checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
dependencies = [
- "libc",
"zstd-sys",
]
[[package]]
name = "zstd-sys"
-version = "2.0.9+zstd.1.5.5"
+version = "2.0.16+zstd.1.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e16efa8a874a0481a574084d34cc26fdb3b99627480f785888deb6386506656"
+checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748"
dependencies = [
"cc",
"pkg-config",
diff --git a/Cargo.toml b/Cargo.toml
index 9153031..8bbdb7b 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,34 +1,42 @@
[package]
name = "stacker"
-version = "0.1.0"
+version = "0.2.1"
edition = "2021"
+default-run= "server"
[lib]
path="src/lib.rs"
[[bin]]
path = "src/main.rs"
-name = "stacker"
+name = "server"
+
+[[bin]]
+path = "src/console/main.rs"
+name = "console"
+required-features = ["explain"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
actix-web = "4.3.1"
-chrono = { version = "0.4.26", features = ["time", "serde"] }
-config = "0.13.3"
-reqwest = { version = "0.11.17", features = ["json"] }
-serde = { version = "1.0.162", features = ["derive"] }
+actix = "0.13.5"
+actix-web-actors = "4.3.1"
+chrono = { version = "0.4.39", features = ["serde", "clock"] }
+config = "0.13.4"
+reqwest = { version = "0.11.23", features = ["json", "blocking"] }
+serde = { version = "1.0.195", features = ["derive"] }
tokio = { version = "1.28.1", features = ["full"] }
-tracing = { version = "0.1.37", features = ["log"] }
+tracing = { version = "0.1.40", features = ["log"] }
tracing-bunyan-formatter = "0.3.8"
-tracing-log = "0.1.3"
-tracing-subscriber = { version = "0.3.17", features = ["registry", "env-filter"] }
+tracing-log = "0.1.4"
+tracing-subscriber = { version = "0.3.18", features = ["registry", "env-filter"] }
uuid = { version = "1.3.4", features = ["v4", "serde"] }
thiserror = "1.0"
-serde_valid = "0.16.3"
-serde_json = { version = "1.0.105", features = [] }
-serde_derive = "1.0.188"
-actix-web-httpauth = "0.8.1"
+serde_valid = "0.18.0"
+serde_json = { version = "1.0.111", features = [] }
+async-trait = "0.1.77"
+serde_derive = "1.0.195"
actix-cors = "0.6.4"
tracing-actix-web = "0.7.7"
regex = "1.10.2"
@@ -39,15 +47,44 @@ tokio-stream = "0.1.14"
actix-http = "3.4.0"
hmac = "0.12.1"
sha2 = "0.10.8"
+sqlx-adapter = { version = "1.8.0", default-features = false, features = ["postgres", "runtime-tokio-native-tls"]}
+dotenvy = "0.15"
+
+# dctypes
+derive_builder = "0.12.0"
+indexmap = { version = "2.0.0", features = ["serde"], optional = true }
+serde_yaml = "0.9"
+lapin = { version = "2.3.1", features = ["serde_json"] }
+futures-lite = "2.2.0"
+clap = { version = "4.4.8", features = ["derive"] }
+brotli = "3.4.0"
+serde_path_to_error = "0.1.14"
+deadpool-lapin = "0.12.1"
+docker-compose-types = "0.7.0"
+actix-casbin-auth = { git = "https://github.com/casbin-rs/actix-casbin-auth.git"}
+casbin = "2.2.0"
+aes-gcm = "0.10.3"
+base64 = "0.22.1"
+redis = { version = "0.27.5", features = ["tokio-comp", "connection-manager"] }
+urlencoding = "2.1.3"
[dependencies.sqlx]
-version = "0.6.3"
+version = "0.8.2"
features = [
- 'runtime-actix-rustls',
+ "runtime-tokio-rustls",
"postgres",
"uuid",
- "tls",
"chrono",
"json",
- "offline"
+ "ipnetwork",
+ "macros"
]
+
+[features]
+default = ["indexmap"]
+indexmap = ["dep:indexmap"]
+explain = ["actix-casbin-auth/explain", "actix-casbin-auth/logging"]
+
+[dev-dependencies]
+glob = "0.3"
+wiremock = "0.5.22"
diff --git a/Dockerfile b/Dockerfile
index 666567e..c325f65 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,60 +1,59 @@
FROM rust:bookworm as builder
-RUN apt-get update; \
- #apt-get install --no-install-recommends -y libpq-dev libssl-dev pkg-config; \
- apt-get install --no-install-recommends -y libssl-dev; \
- rm -rf /var/lib/apt/lists/*; \
- USER=root cargo new --bin app;
+#RUN apt-get update; \
+# apt-get install --no-install-recommends -y libssl-dev; \
+# rm -rf /var/lib/apt/lists/*; \
+# USER=root cargo new --bin app;
RUN cargo install sqlx-cli
WORKDIR /app
# copy manifests
-COPY ../Cargo.toml .
-COPY ../Cargo.lock .
-COPY ../rustfmt.toml .
-COPY ../Makefile .
-COPY ../docker/local/.env .
-COPY ../docker/local/configuration.yaml .
+COPY ./Cargo.toml .
+COPY ./Cargo.lock .
+COPY ./rustfmt.toml .
+COPY ./Makefile .
+COPY ./docker/local/.env .
+COPY ./docker/local/configuration.yaml .
+COPY .sqlx .sqlx/
# build this project to cache dependencies
#RUN sqlx database create && sqlx migrate run
-RUN cargo build --release; \
- rm src/*.rs
+# build skeleton and remove src after
+#RUN cargo build --release; \
+# rm src/*.rs
-# add .env and secret.key for Docker env
-#RUN touch .env;
-# copy project source and necessary files
-COPY ../src ./src
+COPY ./src ./src
+
+# for ls output use BUILDKIT_PROGRESS=plain docker build .
+#RUN ls -la /app/ >&2
#RUN sqlx migrate run
#RUN cargo sqlx prepare -- --bin stacker
+ENV SQLX_OFFLINE true
+
+RUN apt-get update && apt-get install --no-install-recommends -y libssl-dev; \
+ cargo build --release --bin server
-# rebuild app with project source
-RUN rm -rf ./target/release/deps/stacker*; \
- cargo build --release
+#RUN ls -la /app/target/release/ >&2
-# deploy stage
-FROM debian:bookworm as production
+# deploy production
+FROM debian:bookworm-slim as production
+RUN apt-get update && apt-get install --no-install-recommends -y libssl-dev ca-certificates;
# create app directory
WORKDIR /app
RUN mkdir ./files && chmod 0777 ./files
-# install libpq
-RUN apt-get update; \
- apt-get install --no-install-recommends -y libssl-dev \
- && rm -rf /var/lib/apt/lists/*
-
# copy binary and configuration files
-#COPY --from=builder ~/.cargo/bin/sqlx-cli sqlx-cli
-COPY --from=builder /app/target/release/stacker .
+COPY --from=builder /app/target/release/server .
COPY --from=builder /app/.env .
COPY --from=builder /app/configuration.yaml .
-COPY --from=builder /usr/local/cargo/bin/sqlx sqlx
+COPY --from=builder /usr/local/cargo/bin/sqlx /usr/local/bin/sqlx
+COPY ./access_control.conf.dist ./access_control.conf
EXPOSE 8000
# run the binary
-ENTRYPOINT ["/app/stacker"]
+ENTRYPOINT ["/app/server"]
diff --git a/README.md b/README.md
index 53879ed..a766ff6 100644
--- a/README.md
+++ b/README.md
@@ -1,23 +1,162 @@
-# Stacker
-
+
+
+
+
+
+# Stacker Project Overview
Stacker - is an application that helps users to create custom IT solutions based on dockerized open
-source apps and user's custom applications docker containers. Users can build their own stack of applications, and
-deploy the final result to their favorite clouds using TryDirect API.
-
-Application development will include:
-- Web UI (Application Stack builder)
-- Command line interface
-- Back-end RESTful API, includes:
- - [ ] Security module.
- - [ ] User Authorization
- - [ ] Restful API client Application Management
- - [ ] Application Key Management
- - [ ] Cloud Provider Key Management
- - [ ] docker-compose.yml generator
- - [ ] TryDirect API Client
- - [ ] Rating module
-
+source apps and user's custom applications docker containers. Users can build their own project of applications, and
+deploy the final result to their favorite clouds using TryDirect API. See [CHANGELOG.md](CHANGELOG.md) for the latest platform updates.
+
+## Startup Banner
+When you start the Stacker server, you'll see a welcome banner displaying version and configuration info:
+
+```
+ ██████ ████████ █████ ██████ ██ ██ ███████ ██████
+██ ██ ██ ██ ██ ██ ██ ██ ██ ██
+███████ ██ ███████ ██ █████ █████ ██████
+ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
+██████ ██ ██ ██ █████ ██ ██ ███████ ██ ██
+
+╭────────────────────────────────────────────────────────╮
+│ Stacker │
+│ Version: 0.2.1t │
+│ Build: 0.2.0 │
+│ Edition: 2021 │
+╰────────────────────────────────────────────────────────╯
+
+📋 Configuration Loaded
+ 🌐 Server Address: http://127.0.0.1:8000
+ 📦 Ready to accept connections
+```
+
+## Core Purpose
+- Allows users to build projects using both open source and custom Docker containers
+- Provides deployment capabilities to various cloud platforms through TryDirect API
+- Helps manage and orchestrate Docker-based application stacks
+
+## Main Components
+
+1. **Project Structure**
+- Web UI (Stack Builder)
+- Command Line Interface
+- RESTful API Backend
+
+2. **Key Features**
+- User Authentication (via TryDirect OAuth)
+- API Client Management
+- Cloud Provider Key Management
+- Docker Compose Generation
+- Project Rating System
+- Project Deployment Management
+
+3. **Technical Architecture**
+- Written in Rust
+- Uses PostgreSQL database
+- Implements REST API endpoints
+- Includes Docker image validation
+- Supports project deployment workflows
+- Has RabbitMQ integration for deployment status updates
+
+4. **Data Models**
+The core Project model includes:
+- Unique identifiers (id, stack_id)
+- User identification
+- Project metadata (name, metadata, request_json)
+- Timestamps (created_at, updated_at)
+
+5. **API Endpoints (user-facing)**
+- `/project` - Project management
+- `/project/deploy` - Deployment handling
+- `/project/deploy/status` - Deployment status tracking
+- `/rating` - Rating system
+- `/client` - API client management
+
+6. **Agent + Command Flow (self-hosted runner)**
+- Register agent (no auth required): `POST /api/v1/agent/register`
+ - Body: `deployment_hash`, optional `capabilities`, `system_info`
+ - Response: `agent_id`, `agent_token`
+- Agent long-poll for commands: `GET /api/v1/agent/commands/wait/:deployment_hash`
+ - Headers: `X-Agent-Id: `, `Authorization: Bearer `
+ - Optional query params: `timeout` (seconds), `interval` (seconds)
+- Agent report command result: `POST /api/v1/agent/commands/report`
+ - Headers: `X-Agent-Id`, `Authorization: Bearer `
+ - Body: `command_id`, `deployment_hash`, `status` (`completed|failed`), `result`/`error`, optional `started_at`, required `completed_at`
+- Create command (user auth via OAuth Bearer): `POST /api/v1/commands`
+ - Body: `deployment_hash`, `command_type`, `priority` (`low|normal|high|critical`), `parameters`, optional `timeout_seconds`
+- List commands for a deployment: `GET /api/v1/commands/:deployment_hash`
+
+7. **Stacker → Agent HMAC-signed POSTs (v2)**
+- All POST calls from Stacker to the agent must be signed per [STACKER_INTEGRATION_REQUIREMENTS.md](STACKER_INTEGRATION_REQUIREMENTS.md)
+- Required headers: `X-Agent-Id`, `X-Timestamp`, `X-Request-Id`, `X-Agent-Signature`
+- Signature: base64(HMAC_SHA256(AGENT_TOKEN, raw_body_bytes))
+- Helper available: `helpers::AgentClient`
+ - Base URL: set `AGENT_BASE_URL` to point Stacker at the target agent (e.g., `http://agent:5000`).
+
+Example:
+```rust
+use stacker::helpers::AgentClient;
+use serde_json::json;
+
+let client = AgentClient::new("http://agent:5000", agent_id, agent_token);
+let payload = json!({"deployment_hash": dh, "type": "restart_service", "parameters": {"service": "web"}});
+let resp = client.get("/api/v1/status").await?;
+```
+
+### Pull-Only Command Architecture
+
+Stacker uses a pull-only architecture for agent communication. **Stacker never dials out to agents.** Commands are enqueued in the database; agents poll and sign their own requests.
+
+**Flow:**
+1. UI/API calls `POST /api/v1/commands` or `POST /api/v1/agent/commands/enqueue`
+2. Command is inserted into `commands` + `command_queue` tables
+3. Agent polls `GET /api/v1/agent/commands/wait/{deployment_hash}` with HMAC headers
+4. Stacker verifies agent's HMAC, returns queued commands
+5. Agent executes locally and calls `POST /api/v1/agent/commands/report`
+
+**Note:** `AGENT_BASE_URL` environment variable is NOT required for Status Panel commands.
+
+Token rotation (writes to Vault; agent pulls latest):
+```rust
+use stacker::services::agent_dispatcher;
+
+// Rotate token - stored in Vault, agent fetches on next poll
+agent_dispatcher::rotate_token(&pg, &vault, &deployment_hash, "NEW_TOKEN").await?;
+```
+
+Console token rotation:
+```bash
+cargo run --bin console -- Agent rotate-token \
+ --deployment-hash \
+ --new-token
+```
+
+### Configuration: Vault
+- In configuration.yaml.dist, set:
+ - vault.address: Vault URL (e.g., http://127.0.0.1:8200)
+ - vault.token: Vault access token (dev/test only)
+ - vault.agent_path_prefix: KV mount/prefix for agent tokens (e.g., agent or kv/agent)
+- Environment variable overrides (optional): VAULT_ADDRESS, VAULT_TOKEN, VAULT_AGENT_PATH_PREFIX
+- Agent tokens are stored at: {vault.agent_path_prefix}/{deployment_hash}/token
+
+### Configuration: Agent Polling & Casbin Reload
+- `agent_command_poll_timeout_secs` (default 30)
+- `agent_command_poll_interval_secs` (default 3)
+- `casbin_reload_enabled` (default true)
+- `casbin_reload_interval_secs` (default 10)
+
+Environment overrides:
+- `STACKER_AGENT_POLL_TIMEOUT_SECS`
+- `STACKER_AGENT_POLL_INTERVAL_SECS`
+- `STACKER_CASBIN_RELOAD_ENABLED`
+- `STACKER_CASBIN_RELOAD_INTERVAL_SECS`
+
+The project appears to be a sophisticated orchestration platform that bridges the gap between Docker container management and cloud deployment, with a focus on user-friendly application stack building and management.
+
+This is a high-level overview based on the code snippets provided. The project seems to be actively developed with features being added progressively, as indicated by the TODO sections in the documentation.
+
+
## How to start
@@ -36,9 +175,9 @@ Stacker (API) - Serves API clients
Authentication made through TryDirect OAuth, here we have only client
Database (Read only)
Logging/Tracing (Files) / Quickwit for future
-/stack (WebUI, as a result we have a JSON)
-/stack/deploy -> sends deploy command to TryDirect Install service
-/stack/deploy/status - get installation progress (rabbitmq client),
+/project (WebUI, as a result we have a JSON)
+/project/deploy -> sends deploy command to TryDirect Install service
+/project/deploy/status - get installation progress (rabbitmq client),
#### TODO
Find out how to get user's token for queue
@@ -64,7 +203,28 @@ sqlx migrate revert
```
+## Testing
+
+Stacker ships targeted tests for the new User Service marketplace integrations. Run them with:
+
+```
+cargo test user_service_client
+cargo test marketplace_webhook
+cargo test deployment_validator
+```
+
+Each suite uses WireMock-backed HTTP servers, so they run offline and cover the actual request/response flows for the connector, webhook sender, and deployment validator.
+
+
## CURL examples
+
+
+#### Authentication
+
+
+curl -X POST
+
+
#### Rate Product
```
@@ -76,12 +236,30 @@ sqlx migrate revert
#### Deploy
```
-curl -X POST -H "Content-Type: application/json" -d @custom-stack-payload-2.json http://127.0.0.1:8000/stack
+curl -X POST -H "Content-Type: application/json" -d @tests/mock_data/custom-stack-payload.json http://127.0.0.1:8000/project -H "Authorization: Bearer $TD_BEARER"
```
#### Create API Client
+```
curl -X POST http://localhost:8000/client --header 'Content-Type: application/json' -H "Authorization: Bearer $TD_BEARER"
+```
+
test client deploy
-http://localhost:8000/test/deploy
\ No newline at end of file
+http://localhost:8000/test/deploy
+
+
+Test casbin rule
+```
+cargo r --bin console --features=explain debug casbin --path /client --action POST --subject admin_petru
+```
+
+
+
+"cargo sqlx prepare" requires setting the DATABASE_URL environment variable to a valid database URL.
+
+## TODOs
+```
+export DATABASE_URL=postgres://postgres:postgres@localhost:5432/stacker
+```
diff --git a/TODO.md b/TODO.md
new file mode 100644
index 0000000..717a2eb
--- /dev/null
+++ b/TODO.md
@@ -0,0 +1,994 @@
+# TODO: Stacker Marketplace Payment Integration
+
+> Canonical note: keep all Stacker TODO updates in this file (`stacker/TODO.md`); do not create or update a separate `STACKER_TODO.md` going forward.
+
+## Context
+Per [PAYMENT_MODEL.md](/PAYMENT_MODEL.md), Stacker now sends webhooks to User Service when templates are published/updated. User Service owns the `products` table for monetization, while Stacker owns `stack_template` (template definitions only).
+
+### New Open Questions (Status Panel & MCP)
+
+**Status**: ✅ PROPOSED ANSWERS DOCUMENTED
+**See**: [OPEN_QUESTIONS_RESOLUTIONS.md](docs/OPEN_QUESTIONS_RESOLUTIONS.md)
+
+**Questions** (awaiting team confirmation):
+- Health check contract per app: exact URL/expected status/timeout that Status Panel should register and return.
+- Per-app deploy trigger rate limits: allowed requests per minute/hour to expose in User Service.
+- Log redaction patterns: which env var names/secret regexes to strip before returning logs via Stacker/User Service.
+- Container→app_code mapping: confirm canonical source (deployment_apps.metadata.container_name) for Status Panel health/logs responses.
+
+**Current Proposals**:
+1. **Health Check**: `GET /api/health/deployment/{deployment_hash}/app/{app_code}` with 10s timeout
+2. **Rate Limits**: Deploy 10/min, Restart 5/min, Logs 20/min (configurable by plan tier)
+3. **Log Redaction**: 6 pattern categories + 20 env var blacklist (regex-based)
+4. **Container Mapping**: `app_code` is canonical; requires `deployment_apps` table in User Service
+
+### Status Panel Command Payloads (proposed)
+- Commands flow over existing agent endpoints (`/api/v1/commands/execute` or `/enqueue`) signed with HMAC headers from `AgentClient`.
+- **Health** request:
+ ```json
+ {"type":"health","deployment_hash":"","app_code":"","include_metrics":true}
+ ```
+ **Health report** (agent → `/api/v1/commands/report`):
+ ```json
+ {"type":"health","deployment_hash":"","app_code":"","status":"ok|unhealthy|unknown","container_state":"running|exited|starting|unknown","last_heartbeat_at":"2026-01-09T00:00:00Z","metrics":{"cpu_pct":0.12,"mem_mb":256},"errors":[]}
+ ```
+- **Logs** request:
+ ```json
+ {"type":"logs","deployment_hash":"","app_code":"","cursor":"","limit":400,"streams":["stdout","stderr"],"redact":true}
+ ```
+ **Logs report**:
+ ```json
+ {"type":"logs","deployment_hash":"","app_code":"","cursor":"","lines":[{"ts":"2026-01-09T00:00:00Z","stream":"stdout","message":"...","redacted":false}],"truncated":false}
+ ```
+- **Restart** request:
+ ```json
+ {"type":"restart","deployment_hash":"","app_code":"","force":false}
+ ```
+ **Restart report**:
+ ```json
+ {"type":"restart","deployment_hash":"","app_code":"","status":"ok|failed","container_state":"running|failed|unknown","errors":[]}
+ ```
+- Errors: agent reports `{ "type":"", "deployment_hash":..., "app_code":..., "status":"failed", "errors":[{"code":"timeout","message":"..."}] }`.
+- Tasks progress:
+ 1. ✅ add schemas/validation for these command payloads → implemented in `src/forms/status_panel.rs` and enforced via `/api/v1/commands` create/report handlers.
+ 2. ✅ document in agent docs → see `docs/AGENT_REGISTRATION_SPEC.md`, `docs/STACKER_INTEGRATION_REQUIREMENTS.md`, and `docs/QUICK_REFERENCE.md` (field reference + auth note).
+ 3. ✅ expose in Stacker UI/Status Panel integration notes → new `docs/STATUS_PANEL_INTEGRATION_NOTES.md` consumed by dashboard team.
+ 4. ⏳ ensure Vault token/HMAC headers remain the auth path (UI + ops playbook updates pending).
+
+### Dynamic Agent Capabilities Endpoint
+- [x] Expose `GET /api/v1/deployments/{deployment_hash}/capabilities` returning available commands based on `agents.capabilities` JSONB (implemented in `routes::deployment::capabilities_handler`).
+- [x] Define command→capability mapping (static config) embedded in the handler:
+ ```json
+ {
+ "restart": { "requires": "docker", "scope": "container", "label": "Restart", "icon": "fas fa-redo" },
+ "start": { "requires": "docker", "scope": "container", "label": "Start", "icon": "fas fa-play" },
+ "stop": { "requires": "docker", "scope": "container", "label": "Stop", "icon": "fas fa-stop" },
+ "pause": { "requires": "docker", "scope": "container", "label": "Pause", "icon": "fas fa-pause" },
+ "logs": { "requires": "logs", "scope": "container", "label": "Logs", "icon": "fas fa-file-alt" },
+ "rebuild": { "requires": "compose", "scope": "deployment", "label": "Rebuild Stack", "icon": "fas fa-sync" },
+ "backup": { "requires": "backup", "scope": "deployment", "label": "Backup", "icon": "fas fa-download" }
+ }
+ ```
+- [x] Return only commands whose `requires` capability is present in the agent's capabilities array (see `filter_commands` helper).
+- [x] Include agent status (online/offline) and last_heartbeat plus existing metadata in the response so Blog can gate UI.
+
+### Pull-Only Command Architecture (No Push)
+**Key principle**: Stacker never dials out to agents. Commands are enqueued in the database; agents poll and sign their own requests.
+- [x] `POST /api/v1/agent/commands/enqueue` validates user auth, inserts into `commands` + `command_queue` tables, returns 202. No outbound HTTP to agent.
+- [x] Agent polls `GET /api/v1/agent/commands/wait/{deployment_hash}` with HMAC headers it generates using its Vault-fetched token.
+- [x] Stacker verifies agent's HMAC, returns queued commands.
+- [x] Agent executes locally and calls `POST /api/v1/agent/commands/report` (HMAC-signed).
+- [x] Remove any legacy `agent_dispatcher::execute/enqueue` code that attempted to push to agents; keep only `rotate_token` for Vault token management.
+- [x] Document that `AGENT_BASE_URL` env var is NOT required for Status Panel; Stacker is server-only (see README.md).
+
+### Dual Endpoint Strategy (Status Panel + Compose Agent)
+- [ ] Maintain legacy proxy routes under `/api/v1/deployments/{hash}/containers/*` for hosts without Compose Agent; ensure regression tests continue to cover restart/start/stop/logs flows.
+- [ ] Add Compose control-plane routes (`/api/v1/compose/{hash}/status|logs|restart|metrics`) that translate into cagent API calls using the new `compose_agent_token` from Vault.
+- [ ] For Compose Agent path only: `agent_dispatcher` may push commands if cagent exposes an HTTP API; this is the exception, not the rule.
+- [ ] Return `"compose_agent": true|false` in `/capabilities` response plus a `"fallback_reason"` field when Compose Agent is unavailable (missing registration, unhealthy heartbeat, token fetch failure).
+- [ ] Write ops playbook entry + automated alert when Compose Agent is offline for >15 minutes so we can investigate hosts stuck on the legacy path.
+
+### Coordination Note
+Sub-agents can communicate with the team lead via the shared memory tool (see /memories/subagents.md). If questions remain, record them in TODO.md and log work in CHANGELOG.md.
+
+### Nginx Proxy Routing
+**Browser → Stacker** (via nginx): `https://dev.try.direct/stacker/` → `stacker:8000`
+**Stacker → User Service** (internal): `http://user:4100/marketplace/sync` (no nginx prefix)
+**Stacker → Payment Service** (internal): `http://payment:8000/` (no nginx prefix)
+
+Stacker responsibilities:
+1. **Maintain `stack_template` table** (template definitions, no pricing/monetization)
+2. **Send webhook to User Service** when template status changes (approved, updated, rejected)
+3. **Query User Service** for product information (pricing, vendor, etc.)
+4. **Validate deployments** against User Service product ownership
+
+## Improvements
+### Top improvements
+- [x] Cache OAuth token validation in Stacker (30–60s TTL) to avoid a User Service call on every request.
+- [x] Reuse/persist the HTTP client with keep-alive and a shared connection pool for User Service; avoid starting new connections per request.
+- [x] Stop reloading Casbin policies on every request; reload on policy change.
+- [x] Reduce polling frequency and batch command status queries; prefer streaming/long-poll responses.
+- [ ] Add server-side aggregation: return only latest command states instead of fetching full 150+ rows each time.
+- [x] Add gzip/br on internal HTTP responses and trim response payloads.
+- [x] Co-locate Stacker and User Service (same network/region) or use private networking to cut latency.
+
+### Backlog hygiene
+- [ ] Capture ongoing UX friction points from Stack Builder usage and log them here.
+- [ ] Track recurring operational pain points (timeouts, retries, auth failures) for batch fixes.
+- [ ] Record documentation gaps that slow down onboarding or integration work.
+
+## Tasks
+
+### Data Contract Notes (2026-01-04)
+- `project_id` in Stacker is the same identifier as `stack_id` in the User Service `installation` table; use it to link records across services.
+- Include `deployment_hash` from Stacker in payloads sent to Install Service (RabbitMQ) and User Service so both can track deployments by the unique deployment key. Coordinate with try.direct.tools to propagate this field through shared publishers/helpers.
+
+### 0. Setup ACL Rules Migration (User Service)
+**File**: `migrations/setup_acl_rules.py` (in Stacker repo)
+
+**Purpose**: Automatically configure Casbin ACL rules in User Service for Stacker endpoints
+
+**Required Casbin rules** (to be inserted in User Service `casbin_rule` table):
+```python
+# Allow root/admin to manage marketplace templates via Stacker
+rules = [
+ ('p', 'root', '/templates', 'POST', '', '', ''), # Create template
+ ('p', 'root', '/templates', 'GET', '', '', ''), # List templates
+ ('p', 'root', '/templates/*', 'GET', '', '', ''), # View template
+ ('p', 'root', '/templates/*', 'PUT', '', '', ''), # Update template
+ ('p', 'root', '/templates/*', 'DELETE', '', '', ''), # Delete template
+ ('p', 'admin', '/templates', 'POST', '', '', ''),
+ ('p', 'admin', '/templates', 'GET', '', '', ''),
+ ('p', 'admin', '/templates/*', 'GET', '', '', ''),
+ ('p', 'admin', '/templates/*', 'PUT', '', '', ''),
+ ('p', 'developer', '/templates', 'POST', '', '', ''), # Developers can create
+ ('p', 'developer', '/templates', 'GET', '', '', ''), # Developers can list own
+]
+```
+
+**Implementation**:
+- Run as part of Stacker setup/init
+- Connect to User Service database
+- Insert rules if not exist (idempotent)
+- **Status**: NOT STARTED
+- **Priority**: HIGH (Blocks template creation via Stack Builder)
+- **ETA**: 30 minutes
+
+### 0.5. Add Category Table Fields & Sync (Stacker)
+**File**: `migrations/add_category_fields.py` (in Stacker repo)
+
+**Purpose**: Add missing fields to Stacker's local `category` table and sync from User Service
+
+**Migration Steps**:
+1. Add `title VARCHAR(255)` column to `category` table (currently only has `id`, `name`)
+2. Add `metadata JSONB` column for flexible category data
+3. Create `UserServiceConnector.sync_categories()` method
+4. On application startup: Fetch categories from User Service `GET http://user:4100/api/1.0/category`
+5. Populate/update local `category` table:
+ - Map User Service `name` → Stacker `name` (code)
+ - Map User Service `title` → Stacker `title`
+ - Store additional data in `metadata` JSONB
+
+**Example sync**:
+```python
+# User Service category
+{"_id": 5, "name": "ai", "title": "AI Agents", "priority": 5}
+
+# Stacker local category (after sync)
+{"id": 5, "name": "ai", "title": "AI Agents", "metadata": {"priority": 5}}
+```
+
+**Status**: NOT STARTED
+**Priority**: HIGH (Required for Stack Builder UI)
+**ETA**: 1 hour
+
+### 1. Create User Service Connector
+**File**: `app//connectors/user_service_connector.py` (in Stacker repo)
+
+**Required methods**:
+```python
+class UserServiceConnector:
+ def get_categories(self) -> list:
+ """
+ GET http://user:4100/api/1.0/category
+
+ Returns list of available categories for stack classification:
+ [
+ {"_id": 1, "name": "cms", "title": "CMS", "priority": 1},
+ {"_id": 2, "name": "ecommerce", "title": "E-commerce", "priority": 2},
+ {"_id": 5, "name": "ai", "title": "AI Agents", "priority": 5}
+ ]
+
+ Used by: Stack Builder UI to populate category dropdown
+ """
+ pass
+
+ def get_user_profile(self, user_token: str) -> dict:
+ """
+ GET http://user:4100/oauth_server/api/me
+ Headers: Authorization: Bearer {user_token}
+
+ Returns:
+ {
+ "email": "user@example.com",
+ "plan": {
+ "name": "plus",
+ "date_end": "2026-01-30"
+ },
+ "products": [
+ {
+ "product_id": "uuid",
+ "product_type": "template",
+ "code": "ai-agent-stack",
+ "external_id": 12345, # stack_template.id from Stacker
+ "name": "AI Agent Stack",
+ "price": "99.99",
+ "owned_since": "2025-01-15T..."
+ }
+ ]
+ }
+ """
+ pass
+
+ def get_template_product(self, stack_template_id: int) -> dict:
+ """
+ GET http://user:4100/api/1.0/products?external_id={stack_template_id}&product_type=template
+
+ Returns product info for a marketplace template (pricing, vendor, etc.)
+ """
+ pass
+
+ def user_owns_template(self, user_token: str, stack_template_id: int) -> bool:
+ """
+ Check if user has purchased/owns this marketplace template
+ """
+ profile = self.get_user_profile(user_token)
+ return any(p['external_id'] == stack_template_id and p['product_type'] == 'template'
+ for p in profile.get('products', []))
+```
+
+**Implementation Note**: Use OAuth2 token that Stacker already has for the user.
+
+### 2. Create Webhook Sender to User Service (Marketplace Sync)
+**File**: `app//webhooks/marketplace_webhook.py` (in Stacker repo)
+
+**When template status changes** (approved, updated, rejected):
+```python
+import requests
+from os import environ
+
+class MarketplaceWebhookSender:
+ """
+ Send template sync webhooks to User Service
+ Mirrors PAYMENT_MODEL.md Flow 3: Stacker template changes → User Service products
+ """
+
+ def send_template_approved(self, stack_template: dict, vendor_user: dict):
+ """
+ POST http://user:4100/marketplace/sync
+
+ Body:
+ {
+ "action": "template_approved",
+ "stack_template_id": 12345,
+ "external_id": 12345, # Same as stack_template_id
+ "code": "ai-agent-stack-pro",
+ "name": "AI Agent Stack Pro",
+ "description": "Advanced AI agent deployment...",
+ "category_code": "ai", # String code from local category.name (not ID)
+ "price": 99.99,
+ "billing_cycle": "one_time", # or "monthly"
+ "currency": "USD",
+ "vendor_user_id": 456,
+ "vendor_name": "John Doe"
+ }
+ """
+ headers = {'Authorization': f'Bearer {self.get_service_token()}'}
+
+ payload = {
+ 'action': 'template_approved',
+ 'stack_template_id': stack_template['id'],
+ 'external_id': stack_template['id'],
+ 'code': stack_template.get('code'),
+ 'name': stack_template.get('name'),
+ 'description': stack_template.get('description'),
+ 'category_code': stack_template.get('category'), # String code (e.g., "ai", "cms")
+ 'price': stack_template.get('price'),
+ 'billing_cycle': stack_template.get('billing_cycle', 'one_time'),
+ 'currency': stack_template.get('currency', 'USD'),
+ 'vendor_user_id': vendor_user['id'],
+ 'vendor_name': vendor_user.get('full_name', vendor_user.get('email'))
+ }
+
+ response = requests.post(
+ f"{environ['URL_SERVER_USER']}/marketplace/sync",
+ json=payload,
+ headers=headers
+ )
+
+ if response.status_code != 200:
+ raise Exception(f"Webhook send failed: {response.text}")
+
+ return response.json()
+
+ def send_template_updated(self, stack_template: dict, vendor_user: dict):
+ """Send template updated webhook (same format as approved)"""
+ payload = {...}
+ payload['action'] = 'template_updated'
+ # Send like send_template_approved()
+
+ def send_template_rejected(self, stack_template: dict):
+ """
+ Notify User Service to deactivate product
+
+ Body:
+ {
+ "action": "template_rejected",
+ "stack_template_id": 12345
+ }
+ """
+ headers = {'Authorization': f'Bearer {self.get_service_token()}'}
+
+ payload = {
+ 'action': 'template_rejected',
+ 'stack_template_id': stack_template['id']
+ }
+
+ response = requests.post(
+ f"{environ['URL_SERVER_USER']}/marketplace/sync",
+ json=payload,
+ headers=headers
+ )
+
+ return response.json()
+
+ @staticmethod
+ def get_service_token() -> str:
+ """Get Bearer token for service-to-service communication"""
+ # Option 1: Use static bearer token
+ return environ.get('STACKER_SERVICE_TOKEN')
+
+ # Option 2: Use OAuth2 client credentials flow (preferred)
+ # See User Service `.github/copilot-instructions.md` for setup
+```
+
+**Integration points** (where to call webhook sender):
+
+1. **When template is approved by admin**:
+```python
+def approve_template(template_id: int):
+ template = StackTemplate.query.get(template_id)
+ vendor = User.query.get(template.created_by_user_id)
+ template.status = 'approved'
+ db.session.commit()
+
+ # Send webhook to User Service to create product
+ webhook_sender = MarketplaceWebhookSender()
+ webhook_sender.send_template_approved(template.to_dict(), vendor.to_dict())
+```
+
+2. **When template is updated**:
+```python
+def update_template(template_id: int, updates: dict):
+ template = StackTemplate.query.get(template_id)
+ template.update(updates)
+ db.session.commit()
+
+ if template.status == 'approved':
+ vendor = User.query.get(template.created_by_user_id)
+ webhook_sender = MarketplaceWebhookSender()
+ webhook_sender.send_template_updated(template.to_dict(), vendor.to_dict())
+```
+
+3. **When template is rejected**:
+```python
+def reject_template(template_id: int):
+ template = StackTemplate.query.get(template_id)
+ template.status = 'rejected'
+ db.session.commit()
+
+ webhook_sender = MarketplaceWebhookSender()
+ webhook_sender.send_template_rejected(template.to_dict())
+```
+
+### 3. Add Deployment Validation
+**File**: `app//services/deployment_service.py` (update existing)
+
+**Before allowing deployment, validate**:
+```python
+from .connectors.user_service_connector import UserServiceConnector
+
+class DeploymentValidator:
+ def validate_marketplace_template(self, stack_template: dict, user_token: str):
+ """
+ Check if user can deploy this marketplace template
+
+ If template has a product in User Service:
+ - Check if user owns product (in user_products table)
+ - If not owned, block deployment
+ """
+ connector = UserServiceConnector()
+
+ # If template is not marketplace template, allow deployment
+ if not stack_template.get('is_from_marketplace'):
+ return True
+
+ # Check if template has associated product
+ template_id = stack_template['id']
+ product_info = connector.get_template_product(template_id)
+
+ if not product_info:
+ # No product = free marketplace template, allow deployment
+ return True
+
+ # Check if user owns this template product
+ user_owns = connector.user_owns_template(user_token, template_id)
+
+ if not user_owns:
+ raise TemplateNotPurchasedError(
+ f"This verified pro stack requires purchase. "
+ f"Price: ${product_info.get('price')}. "
+ f"Please purchase from User Service."
+ )
+
+ return True
+```
+
+**Integrate into deployment flow**:
+```python
+def start_deployment(template_id: int, user_token: str):
+ template = StackTemplate.query.get(template_id)
+
+ # Validate permission to deploy this template
+ validator = DeploymentValidator()
+ validator.validate_marketplace_template(template.to_dict(), user_token)
+
+ # Continue with deployment...
+```
+
+## Environment Variables Needed (Stacker)
+Add to Stacker's `.env`:
+```bash
+# User Service
+URL_SERVER_USER=http://user:4100/
+
+# Service-to-service auth token (for webhook sender)
+STACKER_SERVICE_TOKEN=
+
+# Or use OAuth2 client credentials (preferred)
+STACKER_CLIENT_ID=
+STACKER_CLIENT_SECRET=
+```
+
+## Testing Checklist
+
+### Unit Tests
+- [ ] `test_user_service_connector.py`:
+ - [ ] `get_user_profile()` returns user with products list
+ - [ ] `get_template_product()` returns product info
+ - [ ] `user_owns_template()` returns correct boolean
+- [ ] `test_marketplace_webhook_sender.py`:
+ - [ ] `send_template_approved()` sends correct webhook payload
+ - [ ] `send_template_updated()` sends correct webhook payload
+ - [ ] `send_template_rejected()` sends correct webhook payload
+ - [ ] `get_service_token()` returns valid bearer token
+- [ ] `test_deployment_validator.py`:
+ - [ ] `validate_marketplace_template()` allows free templates
+ - [ ] `validate_marketplace_template()` allows user-owned paid templates
+ - [ ] `validate_marketplace_template()` blocks non-owned paid templates
+ - [ ] Raises `TemplateNotPurchasedError` with correct message
+
+### Integration Tests
+- [ ] `test_template_approval_flow.py`:
+ - [ ] Admin approves template in Stacker
+ - [ ] Webhook sent to User Service `/marketplace/sync`
+ - [ ] User Service creates product
+ - [ ] `/oauth_server/api/me` includes new product
+- [ ] `test_template_update_flow.py`:
+ - [ ] Vendor updates template in Stacker
+ - [ ] Webhook sent to User Service
+ - [ ] Product updated in User Service
+- [ ] `test_template_rejection_flow.py`:
+ - [ ] Admin rejects template
+ - [ ] Webhook sent to User Service
+ - [ ] Product deactivated in User Service
+- [ ] `test_deployment_validation_flow.py`:
+ - [ ] User can deploy free marketplace template
+ - [ ] User cannot deploy paid template without purchase
+ - [ ] User can deploy paid template after product purchase
+ - [ ] Correct error messages in each scenario
+
+### Manual Testing
+- [ ] Stacker can query User Service `/oauth_server/api/me` (with real user token)
+- [ ] Stacker connector returns user profile with products list
+- [ ] Approve template in Stacker admin → webhook sent to User Service
+- [ ] User Service `/marketplace/sync` creates product
+- [ ] Product appears in `/api/1.0/products` endpoint
+- [ ] Deployment validation blocks unpurchased paid templates
+- [ ] Deployment validation allows owned paid templates
+- [ ] All environment variables configured correctly
+
+## Coordination
+
+**Dependencies**:
+1. ✅ User Service - `/marketplace/sync` webhook endpoint (created in User Service TODO)
+2. ✅ User Service - `products` + `user_products` tables (created in User Service TODO)
+3. ⏳ Stacker - User Service connector + webhook sender (THIS TODO)
+4. ✅ Payment Service - No changes needed (handles all webhooks same way)
+
+**Service Interaction Flow**:
+
+```
+Vendor Creates Template in Stacker
+ ↓
+Admin Approves in Stacker
+ ↓
+Stacker calls MarketplaceWebhookSender.send_template_approved()
+ ↓
+POST http://user:4100/marketplace/sync
+ {
+ "action": "template_approved",
+ "stack_template_id": 12345,
+ "price": 99.99,
+ "vendor_user_id": 456,
+ ...
+ }
+ ↓
+User Service creates `products` row
+ (product_type='template', external_id=12345, vendor_id=456, price=99.99)
+ ↓
+Template now available in User Service `/api/1.0/products?product_type=template`
+ ↓
+Blog queries User Service for marketplace templates
+ ↓
+User views template in marketplace, clicks "Deploy"
+ ↓
+User pays (Payment Service handles all payment flows)
+ ↓
+Payment Service webhook → User Service (adds row to `user_products`)
+ ↓
+Stacker queries User Service `/oauth_server/api/me`
+ ↓
+User Service returns products list (includes newly purchased template)
+ ↓
+DeploymentValidator.validate_marketplace_template() checks ownership
+ ↓
+Deployment proceeds (user owns product)
+```
+
+## Notes
+
+**Architecture Decisions**:
+1. Stacker only sends webhooks to User Service (no bi-directional queries)
+2. User Service owns monetization logic (products table)
+3. Payment Service forwards webhooks to User Service (same handler for all product types)
+4. `stack_template.id` (Stacker) links to `products.external_id` (User Service) via webhook
+5. Deployment validation queries User Service for product ownership
+
+**Key Points**:
+- DO NOT store pricing in Stacker `stack_template` table
+- DO NOT create products table in Stacker (they're in User Service)
+- DO send webhooks to User Service when template status changes
+- DO use Bearer token for service-to-service auth in webhooks
+- Webhook sender is simpler than Stacker querying User Service (one-way communication)
+
+## Timeline Estimate
+
+- Phase 1 (User Service connector): 1-2 hours
+- Phase 2 (Webhook sender): 1-2 hours
+- Phase 3 (Deployment validation): 1-2 hours
+- Phase 4 (Testing): 3-4 hours
+- **Total**: 6-10 hours (~1 day)
+
+## Reference Files
+- [PAYMENT_MODEL.md](/PAYMENT_MODEL.md) - Architecture
+- [try.direct.user.service/TODO.md](try.direct.user.service/TODO.md) - User Service implementation
+- [try.direct.tools/TODO.md](try.direct.tools/TODO.md) - Shared utilities
+- [blog/TODO.md](blog/TODO.md) - Frontend marketplace UI
+
+---
+
+## Synced copy from /STACKER_TODO.md (2026-01-03)
+
+# TODO: Stacker Marketplace Payment Integration
+
+## Context
+Per [PAYMENT_MODEL.md](/PAYMENT_MODEL.md), Stacker now sends webhooks to User Service when templates are published/updated. User Service owns the `products` table for monetization, while Stacker owns `stack_template` (template definitions only).
+
+Stacker responsibilities:
+1. **Maintain `stack_template` table** (template definitions, no pricing/monetization)
+2. **Send webhook to User Service** when template status changes (approved, updated, rejected)
+3. **Query User Service** for product information (pricing, vendor, etc.)
+4. **Validate deployments** against User Service product ownership
+
+## Tasks
+
+### Bugfix: Return clear duplicate slug error
+- [ ] When `stack_template.slug` violates uniqueness (code 23505), return 409/400 with a descriptive message (e.g., "slug already exists") instead of 500 so clients (blog/stack-builder) can surface a user-friendly error.
+
+### 1. Create User Service Connector
+**File**: `app//connectors/user_service_connector.py` (in Stacker repo)
+
+**Required methods**:
+```python
+class UserServiceConnector:
+ def get_user_profile(self, user_token: str) -> dict:
+ """
+ GET http://user:4100/oauth_server/api/me
+ Headers: Authorization: Bearer {user_token}
+
+ Returns:
+ {
+ "email": "user@example.com",
+ "plan": {
+ "name": "plus",
+ "date_end": "2026-01-30"
+ },
+ "products": [
+ {
+ "product_id": "uuid",
+ "product_type": "template",
+ "code": "ai-agent-stack",
+ "external_id": 12345, # stack_template.id from Stacker
+ "name": "AI Agent Stack",
+ "price": "99.99",
+ "owned_since": "2025-01-15T..."
+ }
+ ]
+ }
+ """
+ pass
+
+ def get_template_product(self, stack_template_id: int) -> dict:
+ """
+ GET http://user:4100/api/1.0/products?external_id={stack_template_id}&product_type=template
+
+ Returns product info for a marketplace template (pricing, vendor, etc.)
+ """
+ pass
+
+ def user_owns_template(self, user_token: str, stack_template_id: int) -> bool:
+ """
+ Check if user has purchased/owns this marketplace template
+ """
+ profile = self.get_user_profile(user_token)
+ return any(p['external_id'] == stack_template_id and p['product_type'] == 'template'
+ for p in profile.get('products', []))
+```
+
+**Implementation Note**: Use OAuth2 token that Stacker already has for the user.
+
+### 2. Create Webhook Sender to User Service (Marketplace Sync)
+**File**: `app//webhooks/marketplace_webhook.py` (in Stacker repo)
+
+**When template status changes** (approved, updated, rejected):
+```python
+import requests
+from os import environ
+
+class MarketplaceWebhookSender:
+ """
+ Send template sync webhooks to User Service
+ Mirrors PAYMENT_MODEL.md Flow 3: Stacker template changes → User Service products
+ """
+
+ def send_template_approved(self, stack_template: dict, vendor_user: dict):
+ """
+ POST http://user:4100/marketplace/sync
+
+ Body:
+ {
+ "action": "template_approved",
+ "stack_template_id": 12345,
+ "external_id": 12345, # Same as stack_template_id
+ "code": "ai-agent-stack-pro",
+ "name": "AI Agent Stack Pro",
+ "description": "Advanced AI agent deployment...",
+ "price": 99.99,
+ "billing_cycle": "one_time", # or "monthly"
+ "currency": "USD",
+ "vendor_user_id": 456,
+ "vendor_name": "John Doe"
+ }
+ """
+ headers = {'Authorization': f'Bearer {self.get_service_token()}'}
+
+ payload = {
+ 'action': 'template_approved',
+ 'stack_template_id': stack_template['id'],
+ 'external_id': stack_template['id'],
+ 'code': stack_template.get('code'),
+ 'name': stack_template.get('name'),
+ 'description': stack_template.get('description'),
+ 'price': stack_template.get('price'),
+ 'billing_cycle': stack_template.get('billing_cycle', 'one_time'),
+ 'currency': stack_template.get('currency', 'USD'),
+ 'vendor_user_id': vendor_user['id'],
+ 'vendor_name': vendor_user.get('full_name', vendor_user.get('email'))
+ }
+
+ response = requests.post(
+ f"{environ['URL_SERVER_USER']}/marketplace/sync",
+ json=payload,
+ headers=headers
+ )
+
+ if response.status_code != 200:
+ raise Exception(f"Webhook send failed: {response.text}")
+
+ return response.json()
+
+ def send_template_updated(self, stack_template: dict, vendor_user: dict):
+ """Send template updated webhook (same format as approved)"""
+ payload = {...}
+ payload['action'] = 'template_updated'
+ # Send like send_template_approved()
+
+ def send_template_rejected(self, stack_template: dict):
+ """
+ Notify User Service to deactivate product
+
+ Body:
+ {
+ "action": "template_rejected",
+ "stack_template_id": 12345
+ }
+ """
+ headers = {'Authorization': f'Bearer {self.get_service_token()}'}
+
+ payload = {
+ 'action': 'template_rejected',
+ 'stack_template_id': stack_template['id']
+ }
+
+ response = requests.post(
+ f"{environ['URL_SERVER_USER']}/marketplace/sync",
+ json=payload,
+ headers=headers
+ )
+
+ return response.json()
+
+ @staticmethod
+ def get_service_token() -> str:
+ """Get Bearer token for service-to-service communication"""
+ # Option 1: Use static bearer token
+ return environ.get('STACKER_SERVICE_TOKEN')
+
+ # Option 2: Use OAuth2 client credentials flow (preferred)
+ # See User Service `.github/copilot-instructions.md` for setup
+```
+
+**Integration points** (where to call webhook sender):
+
+1. **When template is approved by admin**:
+```python
+def approve_template(template_id: int):
+ template = StackTemplate.query.get(template_id)
+ vendor = User.query.get(template.created_by_user_id)
+ template.status = 'approved'
+ db.session.commit()
+
+ # Send webhook to User Service to create product
+ webhook_sender = MarketplaceWebhookSender()
+ webhook_sender.send_template_approved(template.to_dict(), vendor.to_dict())
+```
+
+2. **When template is updated**:
+```python
+def update_template(template_id: int, updates: dict):
+ template = StackTemplate.query.get(template_id)
+ template.update(updates)
+ db.session.commit()
+
+ if template.status == 'approved':
+ vendor = User.query.get(template.created_by_user_id)
+ webhook_sender = MarketplaceWebhookSender()
+ webhook_sender.send_template_updated(template.to_dict(), vendor.to_dict())
+```
+
+3. **When template is rejected**:
+```python
+def reject_template(template_id: int):
+ template = StackTemplate.query.get(template_id)
+ template.status = 'rejected'
+ db.session.commit()
+
+ webhook_sender = MarketplaceWebhookSender()
+ webhook_sender.send_template_rejected(template.to_dict())
+```
+
+### 3. Add Deployment Validation
+**File**: `app//services/deployment_service.py` (update existing)
+
+**Before allowing deployment, validate**:
+```python
+from .connectors.user_service_connector import UserServiceConnector
+
+class DeploymentValidator:
+ def validate_marketplace_template(self, stack_template: dict, user_token: str):
+ """
+ Check if user can deploy this marketplace template
+
+ If template has a product in User Service:
+ - Check if user owns product (in user_products table)
+ - If not owned, block deployment
+ """
+ connector = UserServiceConnector()
+
+ # If template is not marketplace template, allow deployment
+ if not stack_template.get('is_from_marketplace'):
+ return True
+
+ # Check if template has associated product
+ template_id = stack_template['id']
+ product_info = connector.get_template_product(template_id)
+
+ if not product_info:
+ # No product = free marketplace template, allow deployment
+ return True
+
+ # Check if user owns this template product
+ user_owns = connector.user_owns_template(user_token, template_id)
+
+ if not user_owns:
+ raise TemplateNotPurchasedError(
+ f"This verified pro stack requires purchase. "
+ f"Price: ${product_info.get('price')}. "
+ f"Please purchase from User Service."
+ )
+
+ return True
+```
+
+**Integrate into deployment flow**:
+```python
+def start_deployment(template_id: int, user_token: str):
+ template = StackTemplate.query.get(template_id)
+
+ # Validate permission to deploy this template
+ validator = DeploymentValidator()
+ validator.validate_marketplace_template(template.to_dict(), user_token)
+
+ # Continue with deployment...
+```
+
+## Environment Variables Needed (Stacker)
+Add to Stacker's `.env`:
+```bash
+# User Service
+URL_SERVER_USER=http://user:4100/
+
+# Service-to-service auth token (for webhook sender)
+STACKER_SERVICE_TOKEN=
+
+# Or use OAuth2 client credentials (preferred)
+STACKER_CLIENT_ID=
+STACKER_CLIENT_SECRET=
+```
+
+## Testing Checklist
+
+### Unit Tests
+- [ ] `test_user_service_connector.py`:
+ - [ ] `get_user_profile()` returns user with products list
+ - [ ] `get_template_product()` returns product info
+ - [ ] `user_owns_template()` returns correct boolean
+- [ ] `test_marketplace_webhook_sender.py`:
+ - [ ] `send_template_approved()` sends correct webhook payload
+ - [ ] `send_template_updated()` sends correct webhook payload
+ - [ ] `send_template_rejected()` sends correct webhook payload
+ - [ ] `get_service_token()` returns valid bearer token
+- [ ] `test_deployment_validator.py`:
+ - [ ] `validate_marketplace_template()` allows free templates
+ - [ ] `validate_marketplace_template()` allows user-owned paid templates
+ - [ ] `validate_marketplace_template()` blocks non-owned paid templates
+ - [ ] Raises `TemplateNotPurchasedError` with correct message
+
+### Integration Tests
+- [ ] `test_template_approval_flow.py`:
+ - [ ] Admin approves template in Stacker
+ - [ ] Webhook sent to User Service `/marketplace/sync`
+ - [ ] User Service creates product
+ - [ ] `/oauth_server/api/me` includes new product
+- [ ] `test_template_update_flow.py`:
+ - [ ] Vendor updates template in Stacker
+ - [ ] Webhook sent to User Service
+ - [ ] Product updated in User Service
+- [ ] `test_template_rejection_flow.py`:
+ - [ ] Admin rejects template
+ - [ ] Webhook sent to User Service
+ - [ ] Product deactivated in User Service
+- [ ] `test_deployment_validation_flow.py`:
+ - [ ] User can deploy free marketplace template
+ - [ ] User cannot deploy paid template without purchase
+ - [ ] User can deploy paid template after product purchase
+ - [ ] Correct error messages in each scenario
+
+### Manual Testing
+- [ ] Stacker can query User Service `/oauth_server/api/me` (with real user token)
+- [ ] Stacker connector returns user profile with products list
+- [ ] Approve template in Stacker admin → webhook sent to User Service
+- [ ] User Service `/marketplace/sync` creates product
+- [ ] Product appears in `/api/1.0/products` endpoint
+- [ ] Deployment validation blocks unpurchased paid templates
+- [ ] Deployment validation allows owned paid templates
+- [ ] All environment variables configured correctly
+
+## Coordination
+
+**Dependencies**:
+1. ✅ User Service - `/marketplace/sync` webhook endpoint (created in User Service TODO)
+2. ✅ User Service - `products` + `user_products` tables (created in User Service TODO)
+3. ⏳ Stacker - User Service connector + webhook sender (THIS TODO)
+4. ✅ Payment Service - No changes needed (handles all webhooks same way)
+
+**Service Interaction Flow**:
+
+```
+Vendor Creates Template in Stacker
+ ↓
+Admin Approves in Stacker
+ ↓
+Stacker calls MarketplaceWebhookSender.send_template_approved()
+ ↓
+POST http://user:4100/marketplace/sync
+ {
+ "action": "template_approved",
+ "stack_template_id": 12345,
+ "price": 99.99,
+ "vendor_user_id": 456,
+ ...
+ }
+ ↓
+User Service creates `products` row
+ (product_type='template', external_id=12345, vendor_id=456, price=99.99)
+ ↓
+Template now available in User Service `/api/1.0/products?product_type=template`
+ ↓
+Blog queries User Service for marketplace templates
+ ↓
+User views template in marketplace, clicks "Deploy"
+ ↓
+User pays (Payment Service handles all payment flows)
+ ↓
+Payment Service webhook → User Service (adds row to `user_products`)
+ ↓
+Stacker queries User Service `/oauth_server/api/me`
+ ↓
+User Service returns products list (includes newly purchased template)
+ ↓
+DeploymentValidator.validate_marketplace_template() checks ownership
+ ↓
+Deployment proceeds (user owns product)
+```
+
+## Notes
+
+**Architecture Decisions**:
+1. Stacker only sends webhooks to User Service (no bi-directional queries)
+2. User Service owns monetization logic (products table)
+3. Payment Service forwards webhooks to User Service (same handler for all product types)
+4. `stack_template.id` (Stacker) links to `products.external_id` (User Service) via webhook
+5. Deployment validation queries User Service for product ownership
+
+**Key Points**:
+- DO NOT store pricing in Stacker `stack_template` table
+- DO NOT create products table in Stacker (they're in User Service)
+- DO send webhooks to User Service when template status changes
+- DO use Bearer token for service-to-service auth in webhooks
+- Webhook sender is simpler than Stacker querying User Service (one-way communication)
+
+## Timeline Estimate
+
+- Phase 1 (User Service connector): 1-2 hours
+- Phase 2 (Webhook sender): 1-2 hours
+- Phase 3 (Deployment validation): 1-2 hours
+- Phase 4 (Testing): 3-4 hours
+- **Total**: 6-10 hours (~1 day)
+
+## Reference Files
+- [PAYMENT_MODEL.md](/PAYMENT_MODEL.md) - Architecture
+- [try.direct.user.service/TODO.md](try.direct.user.service/TODO.md) - User Service implementation
+- [try.direct.tools/TODO.md](try.direct.tools/TODO.md) - Shared utilities
+- [blog/TODO.md](blog/TODO.md) - Frontend marketplace UI
diff --git a/access_control.conf.dist b/access_control.conf.dist
new file mode 100644
index 0000000..f164af1
--- /dev/null
+++ b/access_control.conf.dist
@@ -0,0 +1,14 @@
+[request_definition]
+r = sub, obj, act
+
+[policy_definition]
+p = sub, obj, act
+
+[role_definition]
+g = _, _
+
+[policy_effect]
+e = some(where (p.eft == allow))
+
+[matchers]
+m = g(r.sub, p.sub) && keyMatch2(r.obj, p.obj) && r.act == p.act
diff --git a/assets/logo/stacker.png b/assets/logo/stacker.png
new file mode 100644
index 0000000..c10321e
Binary files /dev/null and b/assets/logo/stacker.png differ
diff --git a/configuration.yaml b/configuration.yaml
deleted file mode 100644
index 5099d3d..0000000
--- a/configuration.yaml
+++ /dev/null
@@ -1,11 +0,0 @@
-#auth_url: http://127.0.0.1:8080/me
-app_host: 127.0.0.1
-app_port: 8000
-auth_url: https://dev.try.direct/server/user/oauth_server/api/me
-max_clients_number: 2
-database:
- host: 127.0.0.1
- port: 5432
- username: postgres
- password: postgres
- database_name: stacker
diff --git a/configuration.yaml.dist b/configuration.yaml.dist
new file mode 100644
index 0000000..2a84fba
--- /dev/null
+++ b/configuration.yaml.dist
@@ -0,0 +1,64 @@
+#auth_url: http://127.0.0.1:8080/me
+app_host: 127.0.0.1
+app_port: 8000
+auth_url: https://dev.try.direct/server/user/oauth_server/api/me
+max_clients_number: 2
+agent_command_poll_timeout_secs: 30
+agent_command_poll_interval_secs: 3
+casbin_reload_enabled: true
+casbin_reload_interval_secs: 10
+database:
+ host: 127.0.0.1
+ port: 5432
+ username: postgres
+ password: postgres
+ database_name: stacker
+
+amqp:
+ host: 127.0.0.1
+ port: 5672
+ username: guest
+ password: guest
+
+# Vault configuration (can be overridden by environment variables)
+vault:
+ address: http://127.0.0.1:8200
+ token: change-me-dev-token
+ # API prefix (Vault uses /v1 by default). Set empty to omit.
+ api_prefix: v1
+ # Path under the mount (without deployment_hash), e.g. 'secret/debug/status_panel' or 'agent'
+ # Final path: {address}/{api_prefix}/{agent_path_prefix}/{deployment_hash}/token
+ agent_path_prefix: agent
+
+# External service connectors
+connectors:
+ user_service:
+ enabled: false
+ base_url: "https://dev.try.direct/server/user"
+ timeout_secs: 10
+ retry_attempts: 3
+ payment_service:
+ enabled: false
+ base_url: "http://localhost:8000"
+ timeout_secs: 15
+ events:
+ enabled: false
+ amqp_url: "amqp://guest:guest@127.0.0.1:5672/%2f"
+ exchange: "stacker_events"
+ prefetch: 10
+ dockerhub_service:
+ enabled: true
+ base_url: "https://hub.docker.com"
+ timeout_secs: 10
+ retry_attempts: 3
+ page_size: 50
+ redis_url: "redis://127.0.0.1/0"
+ cache_ttl_namespaces_secs: 86400
+ cache_ttl_repositories_secs: 21600
+ cache_ttl_tags_secs: 3600
+ username: ~
+ personal_access_token: ~
+
+# Env overrides (optional):
+# VAULT_ADDRESS, VAULT_TOKEN, VAULT_AGENT_PATH_PREFIX
+# USER_SERVICE_AUTH_TOKEN, PAYMENT_SERVICE_AUTH_TOKEN
diff --git a/custom-stack-payload-2.json b/custom-stack-payload-2.json
deleted file mode 100644
index e64ec97..0000000
--- a/custom-stack-payload-2.json
+++ /dev/null
@@ -1 +0,0 @@
-{"commonDomain":"","domainList":{},"region":"fsn1","zone":null,"server":"cx21","os":"ubuntu-20.04","ssl":"letsencrypt","vars":[],"integrated_features":[],"extended_features":[],"subscriptions":["stack_migration"],"save_token":false,"cloud_token":"r6LAjqrynVt7pUwctVkzBlJmKjLOCxJIWjZFMLTkPYCCB4rsgphhEVhiL4DuO757","provider":"htz","stack_code":"custom-stack","selected_plan":"plan-individual-monthly","custom":{"web":[{"name":"Smarty Bot","code":"smarty-bot","domain":"smartybot.xyz","sharedPorts":["8000"],"versions":[],"custom":true,"type":"web","main":true,"_id":"lltkpq6p347kystct","dockerhub_user":"trydirect","dockerhub_name":"smarty-bot","url_app":"smartybot.xyz","url_git":"https://github.com/vsilent/smarty.git","disk_size":"1Gb","ram_size":"1Gb","cpu":1}],"feature":[{"_etag":null,"_id":198,"_created":"2022-04-27T14:10:27.280327","_updated":"2023-08-03T08:24:18.958721","name":"Portainer CE Feature","code":"portainer_ce_feature","role":["portainer-ce-feature"],"type":"feature","default":null,"popularity":null,"descr":null,"ports":{"public":["9000","8000"]},"commercial":null,"subscription":null,"autodeploy":null,"suggested":null,"dependency":null,"avoid_render":null,"price":null,"icon":{"light":{"width":1138,"height":1138,"image":"08589075-44e6-430e-98a5-f9dcf711e054.svg"},"dark":{}},"category_id":2,"parent_app_id":null,"full_description":null,"description":"Portainer is a lightweight management UI which allows you to easily manage your different Docker environments (Docker hosts or Swarm clusters)
","plan_type":null,"ansible_var":null,"repo_dir":null,"cpu":"0.6","ram_size":"1Gb","disk_size":"1Gb","dockerhub_image":"portainer-ce-feature","versions":[{"_etag":null,"_id":456,"_created":"2022-04-25T12:44:30.964547","_updated":"2023-03-17T13:46:51.433539","app_id":198,"name":"latest","version":"latest","update_status":"published","tag":"latest"}],"domain":"","sharedPorts":["9000"],"main":true,"version":{"_etag":null,"_id":456,"_created":"2022-04-25T12:44:30.964547","_updated":"2023-03-17T13:46:51.433539","app_id":198,"name":"latest","version":"latest","update_status":"published","tag":"latest"}}],"service":[{"_etag":null,"_id":230,"_created":"2023-05-24T12:51:52.108972","_updated":"2023-08-04T12:18:34.670194","name":"pgrst","code":"pgrst","role":null,"type":"service","default":null,"popularity":null,"descr":null,"ports":null,"commercial":null,"subscription":null,"autodeploy":null,"suggested":null,"dependency":null,"avoid_render":null,"price":null,"icon":null,"category_id":null,"parent_app_id":null,"full_description":null,"description":"PostgREST description
","plan_type":null,"ansible_var":null,"repo_dir":null,"cpu":"1","ram_size":"1Gb","disk_size":"1Gb","dockerhub_image":"pgrst","versions":[{"_etag":"566","_id":566,"_created":"2023-08-15T12:10:44","_updated":"2023-08-15T12:10:44.905249","app_id":230,"name":"PostgreSQL","version":"15_4","update_status":"ready_for_testing","tag":"unstable"},{"_etag":null,"_id":563,"_created":null,"_updated":"2023-05-24T12:52:15.351522","app_id":230,"name":"0.0.5","version":"0.0.5","update_status":"ready_for_testing","tag":"0.0.5"}],"domain":"","sharedPorts":["9999"],"main":true,"version":{"_etag":"566","_id":566,"_created":"2023-08-15T12:10:44","_updated":"2023-08-15T12:10:44.905249","app_id":230,"name":"PostgreSQL","version":"15_4","update_status":"ready_for_testing","tag":"unstable"}}],"servers_count":3,"custom_stack_name":"mysampleproject","custom_stack_code":"smarty-bot","custom_stack_category":["New"],"custom_stack_short_description":"sample short description","custom_stack_description":"stack description","custom_stack_publish":false,"project_name":"Smarty Bot","project_git_url":"https://github.com/vsilent/smarty.git","project_overview":"my product 1","project_description":"my product 1"}}
diff --git a/custom-stack-payload-3.json b/custom-stack-payload-3.json
deleted file mode 100644
index 4008848..0000000
--- a/custom-stack-payload-3.json
+++ /dev/null
@@ -1 +0,0 @@
-{"commonDomain":"","domainList":{},"region":"fsn1","zone":null,"server":"cx21","os":"ubuntu-20.04","ssl":"letsencrypt","vars":[],"integrated_features":[],"extended_features":[],"subscriptions":["stack_migration"],"save_token":false,"cloud_token":"r6LAjqrynVt7pUwctVkzBlJmKjLOCxJIWjZFMLTkPYCCB4rsgphhEVhiL4DuO757","provider":"htz","stack_code":"custom-stack","selected_plan":"plan-individual-monthly","custom":{"web":[{"name":"Smarty Bot","code":"smarty-bot","domain":"smartybot.xyz","sharedPorts":["8000"],"versions":[],"custom":true,"type":"web","main":true,"_id":"lltkpq6p347kystct","dockerhub_user":"trydirect","dockerhub_name":"smarty-bot","url_app":"smartybot.xyz","url_git":"https://github.com/vsilent/smarty.git","disk_size":"1Gb","ram_size":"1Gb","cpu":1}],"feature":[{"_etag":null,"_id":198,"_created":"2022-04-27T14:10:27.280327","_updated":"2023-08-03T08:24:18.958721","name":"Portainer CE Feature","code":"portainer_ce_feature","role":["portainer-ce-feature"],"type":"feature","default":null,"popularity":null,"descr":null,"ports":{"public":["9000","8000"]},"commercial":null,"subscription":null,"autodeploy":null,"suggested":null,"dependency":null,"avoid_render":null,"price":null,"icon":{"light":{"width":1138,"height":1138,"image":"08589075-44e6-430e-98a5-f9dcf711e054.svg"},"dark":{}},"category_id":2,"parent_app_id":null,"full_description":null,"description":"Portainer is a lightweight management UI which allows you to easily manage your different Docker environments (Docker hosts or Swarm clusters)
","plan_type":null,"ansible_var":null,"repo_dir":null,"cpu":"0.6","ram_size":"1Gb","disk_size":"1Gb","dockerhub_image":"portainer-ce-feature","versions":[{"_etag":null,"_id":456,"_created":"2022-04-25T12:44:30.964547","_updated":"2023-03-17T13:46:51.433539","app_id":198,"name":"latest","version":"latest","update_status":"published","tag":"latest"}],"domain":"","sharedPorts":["9000"],"main":true,"version":{"_etag":null,"_id":456,"_created":"2022-04-25T12:44:30.964547","_updated":"2023-03-17T13:46:51.433539","app_id":198,"name":"latest","version":"latest","update_status":"published","tag":"latest"}}],"service":[{"_etag":null,"_id":230,"_created":"2023-05-24T12:51:52.108972","_updated":"2023-08-04T12:18:34.670194","name":"pgrst","code":"pgrst","role":null,"type":"service","default":null,"popularity":null,"descr":null,"ports":null,"commercial":null,"subscription":null,"autodeploy":null,"suggested":null,"dependency":null,"avoid_render":null,"price":null,"icon":null,"category_id":null,"parent_app_id":null,"full_description":null,"description":"PostgREST description
","plan_type":null,"ansible_var":null,"repo_dir":null,"cpu":"1","ram_size":"1Gb","disk_size":"1Gb","dockerhub_image":"pgrst","versions":[{"_etag":"566","_id":566,"_created":"2023-08-15T12:10:44","_updated":"2023-08-15T12:10:44.905249","app_id":230,"name":"PostgreSQL","version":"15_4","update_status":"ready_for_testing","tag":"unstable"},{"_etag":null,"_id":563,"_created":null,"_updated":"2023-05-24T12:52:15.351522","app_id":230,"name":"0.0.5","version":"0.0.5","update_status":"ready_for_testing","tag":"0.0.5"}],"domain":"","sharedPorts":["9999"],"main":true,"version":{"_etag":"566","_id":566,"_created":"2023-08-15T12:10:44","_updated":"2023-08-15T12:10:44.905249","app_id":230,"name":"PostgreSQL","version":"15_4","update_status":"ready_for_testing","tag":"unstable"}}],"servers_count":3,"custom_stack_name":"mysampleproject","custom_stack_code":"another-bot","custom_stack_category":["New"],"custom_stack_short_description":"sample short description","custom_stack_description":"stack description","custom_stack_publish":false,"project_name":"Smarty Bot","project_git_url":"https://github.com/vsilent/smarty.git","project_overview":"my product 1","project_description":"my product 1"}}
diff --git a/custom-stack-payload-singleapp.json b/custom-stack-payload-singleapp.json
deleted file mode 100644
index e1b3998..0000000
--- a/custom-stack-payload-singleapp.json
+++ /dev/null
@@ -1 +0,0 @@
-{"commonDomain":"","domainList":{},"region":"fsn1","zone":null,"server":"cx11","os":"ubuntu-20.04","ssl":"letsencrypt","vars":[],"integrated_features":[],"extended_features":[],"subscriptions":[],"save_token":false,"cloud_token":"nUDKdUk0b6fUOcW6I4zhmdMfhH8kR4nJrxWjRPxrfqTJ9smOSoKB4qZpsYjS8As6","provider":"htz","stack_code":"custom-stack","selected_plan":"plan-free-periodically","custom":{"web":[{"name":"Smarty Bot","code":"smarty-bot","domain":"smartybot.com","sharedPorts":["8000"],"versions":[],"custom":true,"type":"web","main":true,"_id":"lmg1mg6c1acxn9bs7","dockerhub_user":"vsilent","dockerhub_name":"smarty"}],"feature":[],"service":[],"servers_count":3,"project_name":"sample1","custom_stack_code":"sample1"}}
diff --git a/custom-stack-payload.json b/custom-stack-payload.json
deleted file mode 100644
index a9ca754..0000000
--- a/custom-stack-payload.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{"commonDomain":"","domainList":{},"region":"fsn1","zone":null,"server":"cx21","os":"ubuntu-20.04","ssl":"letsencrypt","vars":[],"integrated_features":[],"extended_features":[],"subscriptions":["stack_migration","stack_health_monitoring","stack_security_monitoring"],"save_token":true,"cloud_token":"r6LAjqrynVt7pUwctVkzBlJmKjLOCxJIWjZFMLTkPYCCB4rsgphhEVhiL4DuO757","provider":"htz","stack_code":"custom-stack","selected_plan":"plan-individual-monthly","custom":{"web":[{"name":"smarty database","code":"smarty-database","domain":"smarty-db.example.com","sharedPorts":["6532"],"versions":[],"custom":true,"type":"feature","main":true,"_id":"lm0gdh732y2qrojfl","dockerhub_user":"trydirect","dockerhub_name":"smarty-db","ram_size":"1Gb","cpu":1,"disk_size":"1Gb"}],"feature":[{"_etag":null,"_id":235,"_created":"2023-08-11T07:07:12.123355","_updated":"2023-08-15T13:07:30.597485","name":"Nginx Proxy Manager","code":"nginx_proxy_manager","role":["nginx_proxy_manager"],"type":"feature","default":null,"popularity":null,"descr":null,"ports":{"public":["80","81","443"]},"commercial":null,"subscription":null,"autodeploy":null,"suggested":null,"dependency":null,"avoid_render":null,"price":null,"icon":{"light":{"width":192,"height":192,"image":"205128e6-0303-4b62-b946-9810b61f3d04.png"},"dark":{}},"category_id":2,"parent_app_id":null,"full_description":null,"description":"Nginx Proxy Manager is a user-friendly software application designed to effortlessly route traffic to your websites, whether they're hosted at home or elsewhere. It comes equipped with free SSL capabilities, eliminating the need for extensive Nginx or Letsencrypt knowledge. This tool proves especially handy for simplifying SSL generation and seamlessly proxying your docker containers.
","plan_type":null,"ansible_var":null,"repo_dir":null,"cpu":"1","ram_size":"1Gb","disk_size":"0.3Gb","dockerhub_image":"nginx-proxy-manager","versions":[{"_etag":"599","_id":599,"_created":"2023-08-11T10:23:33","_updated":"2023-08-11T10:23:34.420583","app_id":235,"name":"Nginx proxy manager","version":"2.10.4","update_status":"ready_for_testing","tag":"unstable"},{"_etag":"601","_id":601,"_created":null,"_updated":"2023-08-15T08:11:19.703882","app_id":235,"name":"Nginx proxy manager","version":"2.10.4","update_status":"published","tag":"stable"},{"_etag":null,"_id":600,"_created":null,"_updated":"2023-08-11T07:08:43.944998","app_id":235,"name":"Nginx proxy manager","version":"2.10.4","update_status":"ready_for_testing","tag":"latest"}],"domain":"","sharedPorts":["443"],"main":true}],"service":[{"_etag":null,"_id":24,"_created":"2020-06-19T13:07:24.228389","_updated":"2023-08-08T10:34:13.4985","name":"PostgreSQL","code":"postgres","role":[],"type":"service","default":null,"popularity":null,"descr":null,"ports":null,"commercial":null,"subscription":null,"autodeploy":null,"suggested":null,"dependency":null,"avoid_render":null,"price":null,"icon":{"light":{"width":576,"height":594,"image":"fd23f54c-e250-4228-8d56-7e5d93ffb925.svg"},"dark":{}},"category_id":null,"parent_app_id":null,"full_description":null,"description":null,"plan_type":null,"ansible_var":null,"repo_dir":null,"cpu":null,"ram_size":null,"disk_size":null,"dockerhub_image":"postgres","versions":[{"_etag":null,"_id":458,"_created":"2022-10-20T07:57:05.88997","_updated":"2023-04-05T07:24:39.637749","app_id":24,"name":"15","version":"15","update_status":"published","tag":"15"},{"_etag":null,"_id":288,"_created":"2022-10-20T07:56:16.160116","_updated":"2023-03-17T13:46:51.433539","app_id":24,"name":"10.22","version":"10.22","update_status":"published","tag":"10.22"},{"_etag":null,"_id":303,"_created":"2022-10-20T07:57:24.710286","_updated":"2023-03-17T13:46:51.433539","app_id":24,"name":"13.8","version":"13.8","update_status":"published","tag":"13.8"},{"_etag":null,"_id":266,"_created":"2022-10-20T07:56:32.360852","_updated":"2023-04-05T06:49:31.782132","app_id":24,"name":"11","version":"11","update_status":"published","tag":"11"},{"_etag":null,"_id":267,"_created":"2022-10-20T07:57:35.552085","_updated":"2023-03-17T13:46:51.433539","app_id":24,"name":"12.12","version":"12.12","update_status":"published","tag":"12.12"},{"_etag":null,"_id":38,"_created":"2020-06-19T13:07:24.258724","_updated":"2022-10-20T07:58:06.882602","app_id":24,"name":"14.5","version":"14.5","update_status":"published","tag":"14.5"},{"_etag":null,"_id":564,"_created":null,"_updated":"2023-05-24T12:55:57.894215","app_id":24,"name":"0.0.5","version":"0.0.5","update_status":"ready_for_testing","tag":"0.0.5"},{"_etag":null,"_id":596,"_created":null,"_updated":"2023-08-09T11:00:33.004267","app_id":24,"name":"Postgres","version":"15.1","update_status":"published","tag":"15.1"}],"domain":"","sharedPorts":["5432"],"main":true}],"servers_count":3,"custom_stack_name":"SMBO","custom_stack_code":"sample-stack","custom_stack_git_url":"https://github.com/vsilent/smbo.git","custom_stack_category":["New","Marketing Automation"],"custom_stack_short_description":"Should be what is my project about shortly","custom_stack_description":"what is my project about more detailed","project_name":"sample stack","project_overview":"my short description, stack to marketplace, keep my token","project_description":"my full description, stack to marketplace, keep my token"}}
-
-
-
diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml
new file mode 100644
index 0000000..864d1ce
--- /dev/null
+++ b/docker-compose.dev.yml
@@ -0,0 +1,77 @@
+version: "2.2"
+
+volumes:
+ stackerdb:
+ driver: local
+
+ redis-data:
+ driver: local
+
+networks:
+ stacker-network:
+ driver: bridge
+
+services:
+ stacker:
+ image: trydirect/stacker:0.0.9
+ container_name: stacker-dev
+ restart: always
+ networks:
+ - stacker-network
+ volumes:
+ # Mount local compiled binary for fast iteration
+ - ./target/debug/server:/app/server:ro
+ # Project configuration and assets
+ - ./files:/app/files
+ - ./docker/local/configuration.yaml:/app/configuration.yaml
+ - ./access_control.conf:/app/access_control.conf
+ - ./migrations:/app/migrations
+ - ./docker/local/.env:/app/.env
+ ports:
+ - "8000:8000"
+ env_file:
+ - ./docker/local/.env
+ environment:
+ - RUST_LOG=debug
+ - RUST_BACKTRACE=1
+ depends_on:
+ stackerdb:
+ condition: service_healthy
+ entrypoint: ["/app/server"]
+
+ redis:
+ container_name: redis-dev
+ image: redis
+ restart: always
+ networks:
+ - stacker-network
+ ports:
+ - 6379:6379
+ volumes:
+ - redis-data:/data
+ sysctls:
+ net.core.somaxconn: 1024
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "10m"
+ tag: "container_{{.Name}}"
+
+ stackerdb:
+ container_name: stackerdb-dev
+ networks:
+ - stacker-network
+ healthcheck:
+ test: ["CMD-SHELL", "pg_isready -U postgres"]
+ interval: 10s
+ timeout: 5s
+ retries: 5
+ image: postgres:16.0
+ restart: always
+ ports:
+ - 5432:5432
+ env_file:
+ - ./docker/local/.env
+ volumes:
+ - stackerdb:/var/lib/postgresql/data
+ - ./docker/local/postgresql.conf:/etc/postgresql/postgresql.conf
diff --git a/docker-compose.yml b/docker-compose.yml
index 2d3b934..139b902 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -4,22 +4,20 @@ volumes:
stackerdb:
driver: local
-networks:
- backend:
- driver: bridge
- name: backend
- external: true
+ redis-data:
+ driver: local
services:
stacker:
- image: trydirect/stacker:0.0.3
+ image: trydirect/stacker:0.0.9
build: .
container_name: stacker
restart: always
volumes:
- ./files:/app/files
- ./docker/local/configuration.yaml:/app/configuration.yaml
+ - ./access_control.conf:/app/access_control.conf
- ./migrations:/app/migrations
- ./docker/local/.env:/app/.env
ports:
@@ -32,8 +30,25 @@ services:
depends_on:
stackerdb:
condition: service_healthy
- networks:
- - backend
+
+
+ redis:
+ container_name: redis
+ image: redis
+ restart: always
+ ports:
+ - 6379:6379
+ volumes:
+ - redis-data:/data
+# - ./redis/rc.local:/etc/rc.local
+# - ./redis/redis.conf:/usr/local/etc/redis/redis.conf
+ sysctls:
+ net.core.somaxconn: 1024
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "10m"
+ tag: "container_{{.Name}}"
stackerdb:
@@ -51,6 +66,4 @@ services:
- ./docker/local/.env
volumes:
- stackerdb:/var/lib/postgresql/data
- - ./docker/local/postgresql.conf:/etc/postgresql/postgresql.conf
- networks:
- - backend
\ No newline at end of file
+ - ./docker/local/postgresql.conf:/etc/postgresql/postgresql.conf
\ No newline at end of file
diff --git a/docker/dev/.env b/docker/dev/.env
index 6371a97..a397928 100644
--- a/docker/dev/.env
+++ b/docker/dev/.env
@@ -1,5 +1,12 @@
+SECURITY_KEY=SECURITY_KEY_SHOULD_BE_OF_LEN_32
+
DATABASE_URL=postgres://postgres:postgres@stackerdb:5432/stacker
POSTGRES_USER=postgres
POSTGRES_PASSWORD=postgres
POSTGRES_DB=stacker
-POSTGRES_PORT=5432
\ No newline at end of file
+POSTGRES_PORT=5432
+
+# Vault Configuration
+VAULT_ADDRESS=http://127.0.0.1:8200
+VAULT_TOKEN=your_vault_token_here
+VAULT_AGENT_PATH_PREFIX=agent
\ No newline at end of file
diff --git a/docker/dev/configuration.yaml b/docker/dev/configuration.yaml
index 5eef969..141a67e 100644
--- a/docker/dev/configuration.yaml
+++ b/docker/dev/configuration.yaml
@@ -1,9 +1,17 @@
app_host: 0.0.0.0
app_port: 8000
auth_url: https://dev.try.direct/server/user/oauth_server/api/me
+max_clients_number: 2
+
database:
host: stackerdb
port: 5432
username: postgres
password: postgres
database_name: stacker
+
+amqp:
+ host: 127.0.0.1
+ port: 5672
+ username: guest
+ password: guest
diff --git a/docker/dev/docker-compose.yml b/docker/dev/docker-compose.yml
index 1ba68f2..6f8c0ab 100644
--- a/docker/dev/docker-compose.yml
+++ b/docker/dev/docker-compose.yml
@@ -4,28 +4,61 @@ volumes:
stackerdb:
driver: local
+ stacker-redis-data:
+ driver: local
+
+networks:
+ backend:
+ driver: bridge
+ name: backend
+ external: true
+
+
services:
stacker:
- image: trydirect/stacker:0.0.3
+ image: trydirect/stacker:0.0.8
build: .
container_name: stacker
restart: always
volumes:
- ./stacker/files:/app/files
- ./configuration.yaml:/app/configuration.yaml
+ - ./access_control.conf:/app/access_control.conf
- ./migrations:/app/migrations
- ./.env:/app/.env
ports:
- "8000:8000"
env_file:
- ./.env
+ environment:
+ - RUST_LOG=debug
+ - RUST_BACKTRACE=full
+ depends_on:
+ stackerdb:
+ condition: service_healthy
+ networks:
+ - backend
+
+
+ stacker_queue:
+ image: trydirect/stacker:0.0.7
+ container_name: stacker_queue
+ restart: always
+ volumes:
+ - ./configuration.yaml:/app/configuration.yaml
+ - ./.env:/app/.env
environment:
- RUST_LOG=debug
- RUST_BACKTRACE=1
+ env_file:
+ - ./.env
depends_on:
stackerdb:
condition: service_healthy
+ entrypoint: /app/console mq listen
+ networks:
+ - backend
stackerdb:
@@ -43,4 +76,26 @@ services:
- ./.env
volumes:
- stackerdb:/var/lib/postgresql/data
- - ./postgresql.conf:/etc/postgresql/postgresql.conf
\ No newline at end of file
+ - ./postgresql.conf:/etc/postgresql/postgresql.conf
+ networks:
+ - backend
+
+ stackerredis:
+ container_name: stackerredis
+ image: redis:latest
+ restart: always
+ ports:
+ - 127.0.0.1:6379:6379
+ volumes:
+ - stacker-redis-data:/data
+ # - ./redis/rc.local:/etc/rc.local
+ # - ./redis/redis.conf:/usr/local/etc/redis/redis.conf
+ sysctls:
+ net.core.somaxconn: 1024
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "10m"
+ tag: "container_{{.Name}}"
+
+
diff --git a/docker/local/.env b/docker/local/.env
index 247a3fd..6371a97 100644
--- a/docker/local/.env
+++ b/docker/local/.env
@@ -1,4 +1,4 @@
-DATABASE_URL=postgres://postgres:postgres@172.17.0.2:5432/stacker
+DATABASE_URL=postgres://postgres:postgres@stackerdb:5432/stacker
POSTGRES_USER=postgres
POSTGRES_PASSWORD=postgres
POSTGRES_DB=stacker
diff --git a/docker/local/configuration.yaml b/docker/local/configuration.yaml
index 9c1848f..141a67e 100644
--- a/docker/local/configuration.yaml
+++ b/docker/local/configuration.yaml
@@ -1,9 +1,17 @@
app_host: 0.0.0.0
app_port: 8000
auth_url: https://dev.try.direct/server/user/oauth_server/api/me
+max_clients_number: 2
+
database:
- host: 172.17.0.2
+ host: stackerdb
port: 5432
username: postgres
password: postgres
database_name: stacker
+
+amqp:
+ host: 127.0.0.1
+ port: 5672
+ username: guest
+ password: guest
diff --git a/docker/local/postgresql.conf b/docker/local/postgresql.conf
index 4e89674..9fed453 100644
--- a/docker/local/postgresql.conf
+++ b/docker/local/postgresql.conf
@@ -795,4 +795,4 @@ listen_addresses = '*'
# CUSTOMIZED OPTIONS
#------------------------------------------------------------------------------
-# Add settings for extensions here
+# Add settings for extensions here
\ No newline at end of file
diff --git a/docs/MCP_PHASE1_SUMMARY.md b/docs/MCP_PHASE1_SUMMARY.md
new file mode 100644
index 0000000..d0f1042
--- /dev/null
+++ b/docs/MCP_PHASE1_SUMMARY.md
@@ -0,0 +1,253 @@
+# MCP Server Implementation - Phase 1 Complete ✅
+
+## What Was Implemented
+
+### Core Protocol Support (`src/mcp/protocol.rs`)
+- ✅ JSON-RPC 2.0 request/response structures
+- ✅ MCP-specific types (Tool, ToolContent, InitializeParams, etc.)
+- ✅ Error handling with standard JSON-RPC error codes
+- ✅ Full type safety with Serde serialization
+
+### WebSocket Handler (`src/mcp/websocket.rs`)
+- ✅ Actix WebSocket actor for persistent connections
+- ✅ Heartbeat mechanism (5s interval, 10s timeout)
+- ✅ JSON-RPC message routing
+- ✅ Three core methods implemented:
+ - `initialize` - Client handshake
+ - `tools/list` - List available tools
+ - `tools/call` - Execute tools
+- ✅ OAuth authentication integration (via middleware)
+- ✅ Structured logging with tracing
+
+### Tool Registry (`src/mcp/registry.rs`)
+- ✅ Pluggable tool handler architecture
+- ✅ `ToolHandler` trait for async tool execution
+- ✅ `ToolContext` with user, database pool, settings
+- ✅ Dynamic tool registration system
+- ✅ Tool schema validation support
+
+### Session Management (`src/mcp/session.rs`)
+- ✅ Per-connection session state
+- ✅ Context storage (for multi-turn conversations)
+- ✅ Initialization tracking
+- ✅ UUID-based session IDs
+
+### Integration
+- ✅ Route registered: `GET /mcp` (WebSocket upgrade)
+- ✅ Authentication: OAuth bearer token required
+- ✅ Authorization: Casbin rules added for `group_user` and `group_admin`
+- ✅ Migration: `20251227140000_casbin_mcp_endpoint.up.sql`
+
+### Dependencies Added
+```toml
+actix = "0.13.5"
+actix-web-actors = "4.3.1"
+async-trait = "0.1.77"
+```
+
+## Architecture
+
+```
+┌─────────────────────────────────────────────────────┐
+│ HTTP Request: GET /mcp │
+│ Headers: Authorization: Bearer │
+└──────────────────┬──────────────────────────────────┘
+ │
+ ▼
+┌─────────────────────────────────────────────────────┐
+│ Authentication Middleware │
+│ - OAuth token validation │
+│ - User object from TryDirect service │
+└──────────────────┬──────────────────────────────────┘
+ │
+ ▼
+┌─────────────────────────────────────────────────────┐
+│ Authorization Middleware (Casbin) │
+│ - Check: user.role → group_user/group_admin │
+│ - Rule: p, group_user, /mcp, GET │
+└──────────────────┬──────────────────────────────────┘
+ │
+ ▼
+┌─────────────────────────────────────────────────────┐
+│ mcp_websocket Handler │
+│ - Upgrade HTTP → WebSocket │
+│ - Create McpWebSocket actor │
+└──────────────────┬──────────────────────────────────┘
+ │
+ ▼
+┌─────────────────────────────────────────────────────┐
+│ McpWebSocket Actor (persistent connection) │
+│ │
+│ JSON-RPC Message Loop: │
+│ 1. Receive text message │
+│ 2. Parse JsonRpcRequest │
+│ 3. Route to method handler: │
+│ - initialize → return server capabilities │
+│ - tools/list → return tool schemas │
+│ - tools/call → execute tool via registry │
+│ 4. Send JsonRpcResponse │
+│ │
+│ Heartbeat: Ping every 5s, timeout after 10s │
+└─────────────────────────────────────────────────────┘
+```
+
+## Testing Status
+
+### Unit Tests
+- ✅ JSON-RPC protocol serialization/deserialization
+- ✅ Error code generation
+- ✅ Tool schema structures
+- ✅ Initialize handshake
+- ⏳ WebSocket integration tests (requires database)
+
+### Manual Testing
+To test the WebSocket connection:
+
+```bash
+# 1. Start the server
+make dev
+
+# 2. Connect with wscat (install: npm install -g wscat)
+wscat -c "ws://localhost:8000/mcp" -H "Authorization: Bearer "
+
+# 3. Send initialize request
+{"jsonrpc":"2.0","id":1,"method":"initialize","params":{"protocolVersion":"2024-11-05","capabilities":{}}}
+
+# Expected response:
+{
+ "jsonrpc": "2.0",
+ "id": 1,
+ "result": {
+ "protocolVersion": "2024-11-05",
+ "capabilities": {
+ "tools": {
+ "listChanged": false
+ }
+ },
+ "serverInfo": {
+ "name": "stacker-mcp",
+ "version": "0.2.0"
+ }
+ }
+}
+
+# 4. List tools
+{"jsonrpc":"2.0","id":2,"method":"tools/list","params":{}}
+
+# Expected response (initially empty):
+{
+ "jsonrpc": "2.0",
+ "id": 2,
+ "result": {
+ "tools": []
+ }
+}
+```
+
+## Next Steps (Phase 2: Core Tools)
+
+### 1. Project Management Tools
+- [ ] `src/mcp/tools/project.rs`
+ - [ ] `CreateProjectTool` - Create new stack
+ - [ ] `ListProjectsTool` - List user's projects
+ - [ ] `GetProjectTool` - Get project details
+ - [ ] `UpdateProjectTool` - Update project
+ - [ ] `DeleteProjectTool` - Delete project
+
+### 2. Composition & Deployment
+- [ ] `src/mcp/tools/deployment.rs`
+ - [ ] `GenerateComposeTool` - Generate docker-compose.yml
+ - [ ] `DeployProjectTool` - Deploy to cloud
+ - [ ] `GetDeploymentStatusTool` - Check deployment status
+
+### 3. Templates & Discovery
+- [ ] `src/mcp/tools/templates.rs`
+ - [ ] `ListTemplatesTool` - Browse public templates
+ - [ ] `GetTemplateTool` - Get template details
+ - [ ] `SuggestResourcesTool` - AI resource recommendations
+
+### 4. Tool Registration
+Update `src/mcp/registry.rs`:
+```rust
+pub fn new() -> Self {
+ let mut registry = Self {
+ handlers: HashMap::new(),
+ };
+
+ registry.register("create_project", Box::new(CreateProjectTool));
+ registry.register("list_projects", Box::new(ListProjectsTool));
+ registry.register("suggest_resources", Box::new(SuggestResourcesTool));
+ // ... register all tools
+
+ registry
+}
+```
+
+## Files Modified/Created
+
+### New Files
+- `src/mcp/mod.rs` - Module exports
+- `src/mcp/protocol.rs` - MCP protocol types
+- `src/mcp/session.rs` - Session management
+- `src/mcp/registry.rs` - Tool registry
+- `src/mcp/websocket.rs` - WebSocket handler
+- `src/mcp/protocol_tests.rs` - Unit tests
+- `migrations/20251227140000_casbin_mcp_endpoint.up.sql` - Authorization rules
+- `migrations/20251227140000_casbin_mcp_endpoint.down.sql` - Rollback
+
+### Modified Files
+- `src/lib.rs` - Added `pub mod mcp;`
+- `src/startup.rs` - Registered `/mcp` route, initialized registry
+- `Cargo.toml` - Added `actix`, `actix-web-actors`, `async-trait`
+
+## Known Limitations
+
+1. **No tools registered yet** - Tools list returns empty array
+2. **Session persistence** - Sessions only live in memory (not Redis)
+3. **Rate limiting** - Not yet implemented (planned for Phase 4)
+4. **Metrics** - No Prometheus metrics yet
+5. **Database tests** - Cannot run tests without database connection
+
+## Security
+
+- ✅ OAuth authentication required
+- ✅ Casbin authorization enforced
+- ✅ User isolation (ToolContext includes authenticated user)
+- ⏳ Rate limiting (planned)
+- ⏳ Input validation (will be added per-tool)
+
+## Performance
+
+- Connection pooling: Yes (reuses app's PgPool)
+- Concurrent connections: Limited by Actix worker pool
+- WebSocket overhead: ~2KB per connection
+- Heartbeat interval: 5s (configurable)
+- Tool execution: Async (non-blocking)
+
+## Deployment
+
+### Environment Variables
+No new environment variables needed. Uses existing:
+- `DATABASE_URL` - PostgreSQL connection
+- `RUST_LOG` - Logging level
+- OAuth settings from `configuration.yaml`
+
+### Database Migration
+```bash
+sqlx migrate run
+```
+
+### Docker
+No changes needed to existing Dockerfile.
+
+## Documentation
+
+- ✅ Backend plan: `docs/MCP_SERVER_BACKEND_PLAN.md`
+- ✅ Frontend integration: `docs/MCP_SERVER_FRONTEND_INTEGRATION.md`
+- ✅ This README: `docs/MCP_PHASE1_SUMMARY.md`
+
+## Questions?
+
+- MCP Protocol Spec: https://spec.modelcontextprotocol.io/
+- Actix WebSocket Docs: https://actix.rs/docs/websockets/
+- Tool implementation examples: See planning docs in `docs/`
diff --git a/docs/MCP_SERVER_BACKEND_PLAN.md b/docs/MCP_SERVER_BACKEND_PLAN.md
new file mode 100644
index 0000000..d78db97
--- /dev/null
+++ b/docs/MCP_SERVER_BACKEND_PLAN.md
@@ -0,0 +1,1215 @@
+# MCP Server Backend Implementation Plan
+
+## Overview
+This document outlines the implementation plan for adding Model Context Protocol (MCP) server capabilities to the Stacker backend. The MCP server will expose Stacker's functionality as tools that AI assistants can use to help users build and deploy application stacks.
+
+## Architecture
+
+```
+┌─────────────────────────────────────────────────────────┐
+│ Stacker Backend (Rust/Actix-web) │
+│ │
+│ ┌──────────────────┐ ┌────────────────────┐ │
+│ │ REST API │ │ MCP Server │ │
+│ │ (Existing) │ │ (New) │ │
+│ │ │ │ │ │
+│ │ /project │◄───────┤ Tool Registry │ │
+│ │ /cloud │ │ - create_project │ │
+│ │ /rating │ │ - list_projects │ │
+│ │ /deployment │ │ - get_templates │ │
+│ └──────────────────┘ │ - deploy_project │ │
+│ │ │ - etc... │ │
+│ │ └────────────────────┘ │
+│ │ │ │
+│ │ │ │
+│ └───────────┬───────────────┘ │
+│ ▼ │
+│ ┌─────────────────┐ │
+│ │ PostgreSQL DB │ │
+│ │ + Session Store │ │
+│ └─────────────────┘ │
+└─────────────────────────────────────────────────────────┘
+ │
+ │ WebSocket (JSON-RPC 2.0)
+ ▼
+┌─────────────────────────────────────────────────────────┐
+│ Frontend (React) or AI Client │
+│ - Sends tool requests │
+│ - Receives tool results │
+│ - Manages conversation context │
+└─────────────────────────────────────────────────────────┘
+```
+
+## Technology Stack
+
+### Core Dependencies
+```toml
+[dependencies]
+# MCP Protocol
+tokio-tungstenite = "0.21" # WebSocket server
+serde_json = "1.0" # JSON-RPC 2.0 serialization
+uuid = { version = "1.0", features = ["v4"] } # Request IDs
+
+# Existing (reuse)
+actix-web = "4.4" # HTTP server
+sqlx = "0.8" # Database
+tokio = { version = "1", features = ["full"] }
+```
+
+### MCP Protocol Specification
+- **Protocol**: JSON-RPC 2.0 over WebSocket
+- **Version**: MCP 2024-11-05
+- **Transport**: `wss://api.try.direct/mcp` (production)
+- **Authentication**: OAuth Bearer token (reuse existing auth)
+
+## Implementation Phases
+
+---
+
+## Phase 1: Foundation (Week 1-2)
+
+### 1.1 MCP Protocol Implementation
+
+**Create core protocol structures:**
+
+```rust
+// src/mcp/protocol.rs
+use serde::{Deserialize, Serialize};
+use serde_json::Value;
+
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(tag = "jsonrpc")]
+pub struct JsonRpcRequest {
+ pub jsonrpc: String, // "2.0"
+ pub id: Option,
+ pub method: String,
+ pub params: Option,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct JsonRpcResponse {
+ pub jsonrpc: String,
+ pub id: Option,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub result: Option,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub error: Option,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct JsonRpcError {
+ pub code: i32,
+ pub message: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option,
+}
+
+// MCP-specific types
+#[derive(Debug, Serialize, Deserialize)]
+pub struct Tool {
+ pub name: String,
+ pub description: String,
+ #[serde(rename = "inputSchema")]
+ pub input_schema: Value, // JSON Schema for parameters
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct ToolListResponse {
+ pub tools: Vec,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct CallToolRequest {
+ pub name: String,
+ pub arguments: Option,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct CallToolResponse {
+ pub content: Vec,
+ #[serde(rename = "isError", skip_serializing_if = "Option::is_none")]
+ pub is_error: Option,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(tag = "type")]
+pub enum ToolContent {
+ #[serde(rename = "text")]
+ Text { text: String },
+ #[serde(rename = "image")]
+ Image {
+ data: String, // base64
+ #[serde(rename = "mimeType")]
+ mime_type: String
+ },
+}
+```
+
+### 1.2 WebSocket Handler
+
+```rust
+// src/mcp/websocket.rs
+use actix::{Actor, StreamHandler};
+use actix_web::{web, Error, HttpRequest, HttpResponse};
+use actix_web_actors::ws;
+use tokio_tungstenite::tungstenite::protocol::Message;
+
+pub struct McpWebSocket {
+ user: Arc,
+ session: McpSession,
+}
+
+impl Actor for McpWebSocket {
+ type Context = ws::WebsocketContext;
+}
+
+impl StreamHandler> for McpWebSocket {
+ fn handle(&mut self, msg: Result, ctx: &mut Self::Context) {
+ match msg {
+ Ok(ws::Message::Text(text)) => {
+ let request: JsonRpcRequest = serde_json::from_str(&text).unwrap();
+ let response = self.handle_jsonrpc(request).await;
+ ctx.text(serde_json::to_string(&response).unwrap());
+ }
+ Ok(ws::Message::Close(reason)) => {
+ ctx.close(reason);
+ ctx.stop();
+ }
+ _ => {}
+ }
+ }
+}
+
+impl McpWebSocket {
+ async fn handle_jsonrpc(&self, req: JsonRpcRequest) -> JsonRpcResponse {
+ match req.method.as_str() {
+ "initialize" => self.handle_initialize(req).await,
+ "tools/list" => self.handle_tools_list(req).await,
+ "tools/call" => self.handle_tools_call(req).await,
+ _ => JsonRpcResponse {
+ jsonrpc: "2.0".to_string(),
+ id: req.id,
+ result: None,
+ error: Some(JsonRpcError {
+ code: -32601,
+ message: "Method not found".to_string(),
+ data: None,
+ }),
+ },
+ }
+ }
+}
+
+// Route registration
+pub async fn mcp_websocket(
+ req: HttpRequest,
+ stream: web::Payload,
+ user: web::ReqData>,
+ pg_pool: web::Data,
+) -> Result {
+ let ws = McpWebSocket {
+ user: user.into_inner(),
+ session: McpSession::new(),
+ };
+ ws::start(ws, &req, stream)
+}
+```
+
+### 1.3 Tool Registry
+
+```rust
+// src/mcp/registry.rs
+use std::collections::HashMap;
+use async_trait::async_trait;
+
+#[async_trait]
+pub trait ToolHandler: Send + Sync {
+ async fn execute(
+ &self,
+ args: Value,
+ context: &ToolContext,
+ ) -> Result;
+
+ fn schema(&self) -> Tool;
+}
+
+pub struct ToolRegistry {
+ handlers: HashMap>,
+}
+
+impl ToolRegistry {
+ pub fn new() -> Self {
+ let mut registry = Self {
+ handlers: HashMap::new(),
+ };
+
+ // Register all tools
+ registry.register("create_project", Box::new(CreateProjectTool));
+ registry.register("list_projects", Box::new(ListProjectsTool));
+ registry.register("get_project", Box::new(GetProjectTool));
+ registry.register("update_project", Box::new(UpdateProjectTool));
+ registry.register("delete_project", Box::new(DeleteProjectTool));
+ registry.register("generate_compose", Box::new(GenerateComposeTool));
+ registry.register("deploy_project", Box::new(DeployProjectTool));
+ registry.register("list_templates", Box::new(ListTemplatesTool));
+ registry.register("get_template", Box::new(GetTemplateTool));
+ registry.register("list_clouds", Box::new(ListCloudsTool));
+ registry.register("suggest_resources", Box::new(SuggestResourcesTool));
+
+ registry
+ }
+
+ pub fn get(&self, name: &str) -> Option<&Box> {
+ self.handlers.get(name)
+ }
+
+ pub fn list_tools(&self) -> Vec {
+ self.handlers.values().map(|h| h.schema()).collect()
+ }
+}
+
+pub struct ToolContext {
+ pub user: Arc,
+ pub pg_pool: PgPool,
+ pub settings: Arc,
+}
+```
+
+### 1.4 Session Management
+
+```rust
+// src/mcp/session.rs
+use std::collections::HashMap;
+
+pub struct McpSession {
+ pub id: String,
+ pub created_at: chrono::DateTime,
+ pub context: HashMap, // Store conversation state
+}
+
+impl McpSession {
+ pub fn new() -> Self {
+ Self {
+ id: uuid::Uuid::new_v4().to_string(),
+ created_at: chrono::Utc::now(),
+ context: HashMap::new(),
+ }
+ }
+
+ pub fn set_context(&mut self, key: String, value: Value) {
+ self.context.insert(key, value);
+ }
+
+ pub fn get_context(&self, key: &str) -> Option<&Value> {
+ self.context.get(key)
+ }
+}
+```
+
+**Deliverables:**
+- [ ] MCP protocol types in `src/mcp/protocol.rs`
+- [ ] WebSocket handler in `src/mcp/websocket.rs`
+- [ ] Tool registry in `src/mcp/registry.rs`
+- [ ] Session management in `src/mcp/session.rs`
+- [ ] Route registration: `web::resource("/mcp").route(web::get().to(mcp_websocket))`
+
+---
+
+## Phase 2: Core Tools (Week 3-4)
+
+### 2.1 Project Management Tools
+
+```rust
+// src/mcp/tools/project.rs
+
+pub struct CreateProjectTool;
+
+#[async_trait]
+impl ToolHandler for CreateProjectTool {
+ async fn execute(&self, args: Value, ctx: &ToolContext) -> Result {
+ let form: forms::project::Add = serde_json::from_value(args)
+ .map_err(|e| format!("Invalid arguments: {}", e))?;
+
+ let project = db::project::insert(
+ &ctx.pg_pool,
+ &ctx.user.id,
+ &form,
+ ).await
+ .map_err(|e| format!("Database error: {}", e))?;
+
+ Ok(ToolContent::Text {
+ text: serde_json::to_string(&project).unwrap(),
+ })
+ }
+
+ fn schema(&self) -> Tool {
+ Tool {
+ name: "create_project".to_string(),
+ description: "Create a new application stack project with services, networking, and deployment configuration".to_string(),
+ input_schema: serde_json::json!({
+ "type": "object",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Project name (required)"
+ },
+ "description": {
+ "type": "string",
+ "description": "Project description (optional)"
+ },
+ "apps": {
+ "type": "array",
+ "description": "List of applications/services",
+ "items": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "dockerImage": {
+ "type": "object",
+ "properties": {
+ "namespace": { "type": "string" },
+ "repository": { "type": "string" },
+ "password": { "type": "string" }
+ },
+ "required": ["repository"]
+ },
+ "resources": {
+ "type": "object",
+ "properties": {
+ "cpu": { "type": "number", "description": "CPU cores (0-8)" },
+ "ram": { "type": "number", "description": "RAM in GB (0-16)" },
+ "storage": { "type": "number", "description": "Storage in GB (0-100)" }
+ }
+ },
+ "ports": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "hostPort": { "type": "number" },
+ "containerPort": { "type": "number" }
+ }
+ }
+ }
+ },
+ "required": ["name", "dockerImage"]
+ }
+ }
+ },
+ "required": ["name", "apps"]
+ }),
+ }
+ }
+}
+
+pub struct ListProjectsTool;
+
+#[async_trait]
+impl ToolHandler for ListProjectsTool {
+ async fn execute(&self, _args: Value, ctx: &ToolContext) -> Result {
+ let projects = db::project::fetch_by_user(&ctx.pg_pool, &ctx.user.id)
+ .await
+ .map_err(|e| format!("Database error: {}", e))?;
+
+ Ok(ToolContent::Text {
+ text: serde_json::to_string(&projects).unwrap(),
+ })
+ }
+
+ fn schema(&self) -> Tool {
+ Tool {
+ name: "list_projects".to_string(),
+ description: "List all projects owned by the authenticated user".to_string(),
+ input_schema: serde_json::json!({
+ "type": "object",
+ "properties": {}
+ }),
+ }
+ }
+}
+```
+
+### 2.2 Template & Discovery Tools
+
+```rust
+// src/mcp/tools/templates.rs
+
+pub struct ListTemplatesTool;
+
+#[async_trait]
+impl ToolHandler for ListTemplatesTool {
+ async fn execute(&self, args: Value, ctx: &ToolContext) -> Result {
+ #[derive(Deserialize)]
+ struct Args {
+ category: Option,
+ search: Option,
+ }
+
+ let params: Args = serde_json::from_value(args).unwrap_or_default();
+
+ // Fetch public templates from rating table
+ let templates = db::rating::fetch_public_templates(&ctx.pg_pool, params.category)
+ .await
+ .map_err(|e| format!("Database error: {}", e))?;
+
+ // Filter by search term if provided
+ let filtered = if let Some(search) = params.search {
+ templates.into_iter()
+ .filter(|t| t.name.to_lowercase().contains(&search.to_lowercase()))
+ .collect()
+ } else {
+ templates
+ };
+
+ Ok(ToolContent::Text {
+ text: serde_json::to_string(&filtered).unwrap(),
+ })
+ }
+
+ fn schema(&self) -> Tool {
+ Tool {
+ name: "list_templates".to_string(),
+ description: "List available stack templates (WordPress, Node.js, Django, etc.) with ratings and descriptions".to_string(),
+ input_schema: serde_json::json!({
+ "type": "object",
+ "properties": {
+ "category": {
+ "type": "string",
+ "enum": ["web", "api", "database", "cms", "ecommerce"],
+ "description": "Filter by category (optional)"
+ },
+ "search": {
+ "type": "string",
+ "description": "Search templates by name (optional)"
+ }
+ }
+ }),
+ }
+ }
+}
+
+pub struct SuggestResourcesTool;
+
+#[async_trait]
+impl ToolHandler for SuggestResourcesTool {
+ async fn execute(&self, args: Value, _ctx: &ToolContext) -> Result {
+ #[derive(Deserialize)]
+ struct Args {
+ app_type: String,
+ expected_traffic: Option, // "low", "medium", "high"
+ }
+
+ let params: Args = serde_json::from_value(args)
+ .map_err(|e| format!("Invalid arguments: {}", e))?;
+
+ // Simple heuristic-based suggestions
+ let (cpu, ram, storage) = match params.app_type.to_lowercase().as_str() {
+ "wordpress" | "cms" => (1, 2, 20),
+ "nodejs" | "express" => (1, 1, 10),
+ "django" | "flask" => (2, 2, 15),
+ "nextjs" | "react" => (1, 2, 10),
+ "mysql" | "postgresql" => (2, 4, 50),
+ "redis" | "memcached" => (1, 1, 5),
+ "nginx" | "traefik" => (1, 0.5, 5),
+ _ => (1, 1, 10), // default
+ };
+
+ // Adjust for traffic
+ let multiplier = match params.expected_traffic.as_deref() {
+ Some("high") => 2.0,
+ Some("medium") => 1.5,
+ _ => 1.0,
+ };
+
+ let suggestion = serde_json::json!({
+ "cpu": (cpu as f64 * multiplier).ceil() as i32,
+ "ram": (ram as f64 * multiplier).ceil() as i32,
+ "storage": (storage as f64 * multiplier).ceil() as i32,
+ "recommendation": format!(
+ "For {} with {} traffic: {}x{} CPU, {} GB RAM, {} GB storage",
+ params.app_type,
+ params.expected_traffic.as_deref().unwrap_or("low"),
+ (cpu as f64 * multiplier).ceil(),
+ if multiplier > 1.0 { "vCPU" } else { "core" },
+ (ram as f64 * multiplier).ceil(),
+ (storage as f64 * multiplier).ceil()
+ )
+ });
+
+ Ok(ToolContent::Text {
+ text: serde_json::to_string(&suggestion).unwrap(),
+ })
+ }
+
+ fn schema(&self) -> Tool {
+ Tool {
+ name: "suggest_resources".to_string(),
+ description: "Suggest appropriate CPU, RAM, and storage limits for an application type".to_string(),
+ input_schema: serde_json::json!({
+ "type": "object",
+ "properties": {
+ "app_type": {
+ "type": "string",
+ "description": "Application type (e.g., 'wordpress', 'nodejs', 'postgresql')"
+ },
+ "expected_traffic": {
+ "type": "string",
+ "enum": ["low", "medium", "high"],
+ "description": "Expected traffic level (optional, default: low)"
+ }
+ },
+ "required": ["app_type"]
+ }),
+ }
+ }
+}
+```
+
+**Deliverables:**
+- [ ] Project CRUD tools (create, list, get, update, delete)
+- [ ] Deployment tools (generate_compose, deploy)
+- [ ] Template discovery tools (list_templates, get_template)
+- [ ] Resource suggestion tool
+- [ ] Cloud provider tools (list_clouds, add_cloud)
+
+---
+
+## Phase 3: Advanced Features (Week 5-6)
+
+### 3.1 Context & State Management
+
+```rust
+// Store partial project data during multi-turn conversations
+session.set_context("draft_project".to_string(), serde_json::json!({
+ "name": "My API",
+ "apps": [
+ {
+ "name": "api",
+ "dockerImage": { "repository": "node:18-alpine" }
+ }
+ ],
+ "step": 2 // User is on step 2 of 5
+}));
+```
+
+### 3.2 Validation Tools
+
+```rust
+pub struct ValidateDomainTool;
+
+#[async_trait]
+impl ToolHandler for ValidateDomainTool {
+ async fn execute(&self, args: Value, _ctx: &ToolContext) -> Result {
+ #[derive(Deserialize)]
+ struct Args {
+ domain: String,
+ }
+
+ let params: Args = serde_json::from_value(args)
+ .map_err(|e| format!("Invalid arguments: {}", e))?;
+
+ // Simple regex validation
+ let domain_regex = regex::Regex::new(r"^([a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,}$").unwrap();
+ let is_valid = domain_regex.is_match(¶ms.domain);
+
+ let result = serde_json::json!({
+ "domain": params.domain,
+ "valid": is_valid,
+ "message": if is_valid {
+ "Domain format is valid"
+ } else {
+ "Invalid domain format. Use lowercase letters, numbers, hyphens, and dots only"
+ }
+ });
+
+ Ok(ToolContent::Text {
+ text: serde_json::to_string(&result).unwrap(),
+ })
+ }
+
+ fn schema(&self) -> Tool {
+ Tool {
+ name: "validate_domain".to_string(),
+ description: "Validate domain name format".to_string(),
+ input_schema: serde_json::json!({
+ "type": "object",
+ "properties": {
+ "domain": {
+ "type": "string",
+ "description": "Domain to validate (e.g., 'example.com')"
+ }
+ },
+ "required": ["domain"]
+ }),
+ }
+ }
+}
+```
+
+### 3.3 Deployment Status Tools
+
+```rust
+pub struct GetDeploymentStatusTool;
+
+#[async_trait]
+impl ToolHandler for GetDeploymentStatusTool {
+ async fn execute(&self, args: Value, ctx: &ToolContext) -> Result {
+ #[derive(Deserialize)]
+ struct Args {
+ deployment_id: i32,
+ }
+
+ let params: Args = serde_json::from_value(args)
+ .map_err(|e| format!("Invalid arguments: {}", e))?;
+
+ let deployment = db::deployment::fetch(&ctx.pg_pool, params.deployment_id)
+ .await
+ .map_err(|e| format!("Database error: {}", e))?;
+
+ Ok(ToolContent::Text {
+ text: serde_json::to_string(&deployment).unwrap(),
+ })
+ }
+
+ fn schema(&self) -> Tool {
+ Tool {
+ name: "get_deployment_status".to_string(),
+ description: "Get current deployment status and details".to_string(),
+ input_schema: serde_json::json!({
+ "type": "object",
+ "properties": {
+ "deployment_id": {
+ "type": "number",
+ "description": "Deployment ID"
+ }
+ },
+ "required": ["deployment_id"]
+ }),
+ }
+ }
+}
+```
+
+**Deliverables:**
+- [ ] Session context persistence
+- [ ] Domain validation tool
+- [ ] Port validation tool
+- [ ] Git repository parsing tool
+- [ ] Deployment status monitoring tool
+
+---
+
+## Phase 4: Security & Production (Week 7-8)
+
+### 4.1 Authentication & Authorization
+
+```rust
+// Reuse existing OAuth middleware
+// src/mcp/websocket.rs
+
+pub async fn mcp_websocket(
+ req: HttpRequest,
+ stream: web::Payload,
+ user: web::ReqData>, // ← Injected by auth middleware
+ pg_pool: web::Data,
+) -> Result {
+ // User is already authenticated via Bearer token
+ // Casbin rules apply: only admin/user roles can access MCP
+
+ let ws = McpWebSocket {
+ user: user.into_inner(),
+ session: McpSession::new(),
+ };
+ ws::start(ws, &req, stream)
+}
+```
+
+**Casbin Rules for MCP:**
+```sql
+-- migrations/20251228000000_casbin_mcp_rules.up.sql
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES
+ ('p', 'group_admin', '/mcp', 'GET', '', '', ''),
+ ('p', 'group_user', '/mcp', 'GET', '', '', '')
+ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+```
+
+### 4.2 Rate Limiting
+
+```rust
+// src/mcp/rate_limit.rs
+use std::collections::HashMap;
+use std::sync::{Arc, Mutex};
+use std::time::{Duration, Instant};
+
+pub struct RateLimiter {
+ limits: Arc>>>,
+ max_requests: usize,
+ window: Duration,
+}
+
+impl RateLimiter {
+ pub fn new(max_requests: usize, window: Duration) -> Self {
+ Self {
+ limits: Arc::new(Mutex::new(HashMap::new())),
+ max_requests,
+ window,
+ }
+ }
+
+ pub fn check(&self, user_id: &str) -> Result<(), String> {
+ let mut limits = self.limits.lock().unwrap();
+ let now = Instant::now();
+
+ let requests = limits.entry(user_id.to_string()).or_insert_with(Vec::new);
+
+ // Remove expired entries
+ requests.retain(|&time| now.duration_since(time) < self.window);
+
+ if requests.len() >= self.max_requests {
+ return Err(format!(
+ "Rate limit exceeded: {} requests per {} seconds",
+ self.max_requests,
+ self.window.as_secs()
+ ));
+ }
+
+ requests.push(now);
+ Ok(())
+ }
+}
+
+// Usage in McpWebSocket
+impl McpWebSocket {
+ async fn handle_tools_call(&self, req: JsonRpcRequest) -> JsonRpcResponse {
+ // Rate limit: 100 tool calls per minute per user
+ if let Err(msg) = self.rate_limiter.check(&self.user.id) {
+ return JsonRpcResponse {
+ jsonrpc: "2.0".to_string(),
+ id: req.id,
+ result: None,
+ error: Some(JsonRpcError {
+ code: -32000,
+ message: msg,
+ data: None,
+ }),
+ };
+ }
+
+ // ... proceed with tool execution
+ }
+}
+```
+
+### 4.3 Error Handling & Logging
+
+```rust
+// Enhanced error responses with tracing
+impl McpWebSocket {
+ async fn handle_tools_call(&self, req: JsonRpcRequest) -> JsonRpcResponse {
+ let call_req: CallToolRequest = match serde_json::from_value(req.params.unwrap()) {
+ Ok(r) => r,
+ Err(e) => {
+ tracing::error!("Invalid tool call params: {:?}", e);
+ return JsonRpcResponse {
+ jsonrpc: "2.0".to_string(),
+ id: req.id,
+ result: None,
+ error: Some(JsonRpcError {
+ code: -32602,
+ message: "Invalid params".to_string(),
+ data: Some(serde_json::json!({ "error": e.to_string() })),
+ }),
+ };
+ }
+ };
+
+ let tool_span = tracing::info_span!("mcp_tool_call", tool = %call_req.name, user = %self.user.id);
+ let _enter = tool_span.enter();
+
+ match self.registry.get(&call_req.name) {
+ Some(handler) => {
+ match handler.execute(
+ call_req.arguments.unwrap_or(serde_json::json!({})),
+ &self.context(),
+ ).await {
+ Ok(content) => {
+ tracing::info!("Tool executed successfully");
+ JsonRpcResponse {
+ jsonrpc: "2.0".to_string(),
+ id: req.id,
+ result: Some(serde_json::to_value(CallToolResponse {
+ content: vec![content],
+ is_error: None,
+ }).unwrap()),
+ error: None,
+ }
+ }
+ Err(e) => {
+ tracing::error!("Tool execution failed: {}", e);
+ JsonRpcResponse {
+ jsonrpc: "2.0".to_string(),
+ id: req.id,
+ result: Some(serde_json::to_value(CallToolResponse {
+ content: vec![ToolContent::Text {
+ text: format!("Error: {}", e),
+ }],
+ is_error: Some(true),
+ }).unwrap()),
+ error: None,
+ }
+ }
+ }
+ }
+ None => {
+ tracing::warn!("Unknown tool requested: {}", call_req.name);
+ JsonRpcResponse {
+ jsonrpc: "2.0".to_string(),
+ id: req.id,
+ result: None,
+ error: Some(JsonRpcError {
+ code: -32601,
+ message: format!("Tool not found: {}", call_req.name),
+ data: None,
+ }),
+ }
+ }
+ }
+ }
+}
+```
+
+**Deliverables:**
+- [ ] Casbin rules for MCP endpoint
+- [ ] Rate limiting (100 calls/min per user)
+- [ ] Comprehensive error handling
+- [ ] Structured logging with tracing
+- [ ] Input validation for all tools
+
+---
+
+## Phase 5: Testing & Documentation (Week 9)
+
+### 5.1 Unit Tests
+
+```rust
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[tokio::test]
+ async fn test_create_project_tool() {
+ let tool = CreateProjectTool;
+ let ctx = create_test_context().await;
+
+ let args = serde_json::json!({
+ "name": "Test Project",
+ "apps": [{
+ "name": "web",
+ "dockerImage": { "repository": "nginx" }
+ }]
+ });
+
+ let result = tool.execute(args, &ctx).await;
+ assert!(result.is_ok());
+
+ let ToolContent::Text { text } = result.unwrap();
+ let project: models::Project = serde_json::from_str(&text).unwrap();
+ assert_eq!(project.name, "Test Project");
+ }
+
+ #[tokio::test]
+ async fn test_list_templates_tool() {
+ let tool = ListTemplatesTool;
+ let ctx = create_test_context().await;
+
+ let result = tool.execute(serde_json::json!({}), &ctx).await;
+ assert!(result.is_ok());
+ }
+}
+```
+
+### 5.2 Integration Tests
+
+```rust
+// tests/mcp_integration.rs
+use actix_web::test;
+use tokio_tungstenite::connect_async;
+
+#[actix_web::test]
+async fn test_mcp_websocket_connection() {
+ let app = spawn_app().await;
+
+ let ws_url = format!("ws://{}/mcp", app.address);
+ let (ws_stream, _) = connect_async(ws_url).await.unwrap();
+
+ // Send initialize request
+ let init_msg = serde_json::json!({
+ "jsonrpc": "2.0",
+ "id": 1,
+ "method": "initialize",
+ "params": {
+ "protocolVersion": "2024-11-05",
+ "capabilities": {}
+ }
+ });
+
+ // ... test flow
+}
+
+#[actix_web::test]
+async fn test_create_project_via_mcp() {
+ // Test full create project flow via MCP
+}
+```
+
+### 5.3 Documentation
+
+**API Documentation:**
+- Generate OpenAPI/Swagger spec for MCP tools
+- Document all tool schemas with examples
+- Create integration guide for frontend developers
+
+**Example Documentation:**
+```markdown
+## MCP Tool: create_project
+
+**Description**: Create a new application stack project
+
+**Parameters:**
+```json
+{
+ "name": "My WordPress Site",
+ "apps": [
+ {
+ "name": "wordpress",
+ "dockerImage": {
+ "repository": "wordpress",
+ "tag": "latest"
+ },
+ "resources": {
+ "cpu": 2,
+ "ram": 4,
+ "storage": 20
+ },
+ "ports": [
+ { "hostPort": 80, "containerPort": 80 }
+ ]
+ }
+ ]
+}
+```
+
+**Response:**
+```json
+{
+ "id": 123,
+ "name": "My WordPress Site",
+ "user_id": "user_abc",
+ "created_at": "2025-12-27T10:00:00Z",
+ ...
+}
+```
+```
+
+**Deliverables:**
+- [ ] Unit tests for all tools (>80% coverage)
+- [ ] Integration tests for WebSocket connection
+- [ ] End-to-end tests for tool execution flow
+- [ ] API documentation (MCP tool schemas)
+- [ ] Integration guide for frontend
+
+---
+
+## Deployment Configuration
+
+### Update `startup.rs`
+
+```rust
+// src/startup.rs
+use crate::mcp;
+
+pub async fn run(
+ listener: TcpListener,
+ pg_pool: Pool,
+ settings: Settings,
+) -> Result {
+ // ... existing setup ...
+
+ // Initialize MCP registry
+ let mcp_registry = web::Data::new(mcp::ToolRegistry::new());
+
+ let server = HttpServer::new(move || {
+ App::new()
+ // ... existing middleware and routes ...
+
+ // Add MCP WebSocket endpoint
+ .service(
+ web::resource("/mcp")
+ .route(web::get().to(mcp::mcp_websocket))
+ )
+ .app_data(mcp_registry.clone())
+ })
+ .listen(listener)?
+ .run();
+
+ Ok(server)
+}
+```
+
+### Update `Cargo.toml`
+
+```toml
+[dependencies]
+tokio-tungstenite = "0.21"
+uuid = { version = "1.0", features = ["v4", "serde"] }
+async-trait = "0.1"
+regex = "1.10"
+
+# Consider adding MCP SDK if available
+# mcp-server = "0.1" # Hypothetical official SDK
+```
+
+---
+
+## Monitoring & Metrics
+
+### Key Metrics to Track
+
+```rust
+// src/mcp/metrics.rs
+use prometheus::{IntCounterVec, HistogramVec, Registry};
+
+pub struct McpMetrics {
+ pub tool_calls_total: IntCounterVec,
+ pub tool_duration: HistogramVec,
+ pub websocket_connections: IntCounterVec,
+ pub errors_total: IntCounterVec,
+}
+
+impl McpMetrics {
+ pub fn new(registry: &Registry) -> Self {
+ let tool_calls_total = IntCounterVec::new(
+ prometheus::Opts::new("mcp_tool_calls_total", "Total MCP tool calls"),
+ &["tool", "user_id", "status"]
+ ).unwrap();
+ registry.register(Box::new(tool_calls_total.clone())).unwrap();
+
+ // ... register other metrics
+
+ Self {
+ tool_calls_total,
+ // ...
+ }
+ }
+}
+```
+
+**Metrics to expose:**
+- `mcp_tool_calls_total{tool, user_id, status}` - Counter
+- `mcp_tool_duration_seconds{tool}` - Histogram
+- `mcp_websocket_connections_active` - Gauge
+- `mcp_errors_total{tool, error_type}` - Counter
+
+---
+
+## Complete Tool List (Initial Release)
+
+### Project Management (7 tools)
+1. ✅ `create_project` - Create new project
+2. ✅ `list_projects` - List user's projects
+3. ✅ `get_project` - Get project details
+4. ✅ `update_project` - Update project
+5. ✅ `delete_project` - Delete project
+6. ✅ `generate_compose` - Generate docker-compose.yml
+7. ✅ `deploy_project` - Deploy to cloud
+
+### Template & Discovery (3 tools)
+8. ✅ `list_templates` - List available templates
+9. ✅ `get_template` - Get template details
+10. ✅ `suggest_resources` - Suggest resource limits
+
+### Cloud Management (2 tools)
+11. ✅ `list_clouds` - List cloud providers
+12. ✅ `add_cloud` - Add cloud credentials
+
+### Validation (3 tools)
+13. ✅ `validate_domain` - Validate domain format
+14. ✅ `validate_ports` - Validate port configuration
+15. ✅ `parse_git_repo` - Parse Git repository URL
+
+### Deployment (2 tools)
+16. ✅ `list_deployments` - List deployments
+17. ✅ `get_deployment_status` - Get deployment status
+
+**Total: 17 tools for MVP**
+
+---
+
+## Success Criteria
+
+### Functional Requirements
+- [ ] All 17 tools implemented and tested
+- [ ] WebSocket connection stable for >1 hour
+- [ ] Handle 100 concurrent WebSocket connections
+- [ ] Rate limiting prevents abuse
+- [ ] Authentication/authorization enforced
+
+### Performance Requirements
+- [ ] Tool execution <500ms (p95)
+- [ ] WebSocket latency <50ms
+- [ ] Support 10 tool calls/second per user
+- [ ] No memory leaks in long-running sessions
+
+### Security Requirements
+- [ ] OAuth authentication required
+- [ ] Casbin ACL enforced
+- [ ] Input validation on all parameters
+- [ ] SQL injection protection (via sqlx)
+- [ ] Rate limiting (100 calls/min per user)
+
+---
+
+## Migration Path
+
+1. **Week 1-2**: Core protocol + 3 basic tools (create_project, list_projects, list_templates)
+2. **Week 3-4**: All 17 tools implemented
+3. **Week 5-6**: Advanced features (validation, suggestions)
+4. **Week 7-8**: Security hardening + production readiness
+5. **Week 9**: Testing + documentation
+6. **Week 10**: Beta release with frontend integration
+
+---
+
+## Questions & Decisions
+
+### Open Questions
+1. **Session persistence**: Store in PostgreSQL or Redis?
+ - **Recommendation**: Redis for ephemeral session data
+
+2. **Tool versioning**: How to handle breaking changes?
+ - **Recommendation**: Version in tool name (`create_project_v1`)
+
+3. **Error recovery**: Retry failed tool calls?
+ - **Recommendation**: Let AI/client decide on retry
+
+### Technical Decisions
+- ✅ Use tokio-tungstenite for WebSocket
+- ✅ JSON-RPC 2.0 over WebSocket (not HTTP SSE)
+- ✅ Reuse existing auth middleware
+- ✅ Store sessions in memory (move to Redis later)
+- ✅ Rate limit at WebSocket level (not per-tool)
+
+---
+
+## Contact & Resources
+
+**References:**
+- MCP Specification: https://spec.modelcontextprotocol.io/
+- Example Rust MCP Server: https://github.com/modelcontextprotocol/servers
+- Actix WebSocket: https://actix.rs/docs/websockets/
+
+**Team Contacts:**
+- Backend Lead: [Your Name]
+- Frontend Integration: [Frontend Lead]
+- DevOps: [DevOps Contact]
diff --git a/docs/MCP_SERVER_FRONTEND_INTEGRATION.md b/docs/MCP_SERVER_FRONTEND_INTEGRATION.md
new file mode 100644
index 0000000..c23eda7
--- /dev/null
+++ b/docs/MCP_SERVER_FRONTEND_INTEGRATION.md
@@ -0,0 +1,1355 @@
+# MCP Server Frontend Integration Guide
+
+## Overview
+This document provides comprehensive guidance for integrating the Stacker MCP (Model Context Protocol) server with the ReactJS Stack Builder frontend. The integration enables an AI-powered chat assistant that helps users build and deploy application stacks through natural language interactions.
+
+## Architecture Overview
+
+```
+┌──────────────────────────────────────────────────────────────┐
+│ React Frontend (Stack Builder UI) │
+│ │
+│ ┌────────────────┐ ┌──────────────────────────┐ │
+│ │ Project Form │◄────────┤ AI Chat Assistant │ │
+│ │ - Name │ fills │ - Chat Messages │ │
+│ │ - Services │◄────────┤ - Input Box │ │
+│ │ - Resources │ │ - Context Display │ │
+│ │ - Domains │ │ - Suggestions │ │
+│ └────────────────┘ └──────────────────────────┘ │
+│ │ │ │
+│ │ │ │
+│ └──────────┬───────────────────┘ │
+│ │ │
+│ ┌───────▼───────┐ │
+│ │ MCP Client │ │
+│ │ (WebSocket) │ │
+│ └───────────────┘ │
+│ │ │
+└────────────────────┼─────────────────────────────────────────┘
+ │ WebSocket (JSON-RPC 2.0)
+ ▼
+┌──────────────────────────────────────────────────────────────┐
+│ Stacker Backend (MCP Server) │
+│ - Tool Registry (17+ tools) │
+│ - Session Management │
+│ - OAuth Authentication │
+└──────────────────────────────────────────────────────────────┘
+```
+
+## Technology Stack
+
+### Core Dependencies
+
+```json
+{
+ "dependencies": {
+ "@modelcontextprotocol/sdk": "^0.5.0",
+ "react": "^18.2.0",
+ "react-dom": "^18.2.0",
+ "zustand": "^4.4.0",
+ "@tanstack/react-query": "^5.0.0",
+ "ws": "^8.16.0"
+ },
+ "devDependencies": {
+ "@types/react": "^18.2.0",
+ "@types/ws": "^8.5.0",
+ "typescript": "^5.0.0"
+ }
+}
+```
+
+### TypeScript Configuration
+
+```json
+{
+ "compilerOptions": {
+ "target": "ES2020",
+ "lib": ["ES2020", "DOM", "DOM.Iterable"],
+ "jsx": "react-jsx",
+ "module": "ESNext",
+ "moduleResolution": "bundler",
+ "resolveJsonModule": true,
+ "allowJs": true,
+ "strict": true,
+ "esModuleInterop": true,
+ "skipLibCheck": true
+ }
+}
+```
+
+---
+
+## Phase 1: MCP Client Setup (Week 1)
+
+### 1.1 WebSocket Client
+
+```typescript
+// src/lib/mcp/client.ts
+import { Client } from '@modelcontextprotocol/sdk/client/index.js';
+import { WebSocketClientTransport } from '@modelcontextprotocol/sdk/client/websocket.js';
+
+export interface McpClientConfig {
+ url: string;
+ authToken: string;
+}
+
+export class StackerMcpClient {
+ private client: Client | null = null;
+ private transport: WebSocketClientTransport | null = null;
+ private config: McpClientConfig;
+
+ constructor(config: McpClientConfig) {
+ this.config = config;
+ }
+
+ async connect(): Promise {
+ // Create WebSocket transport with auth headers
+ this.transport = new WebSocketClientTransport(
+ new URL(this.config.url),
+ {
+ headers: {
+ 'Authorization': `Bearer ${this.config.authToken}`
+ }
+ }
+ );
+
+ // Initialize MCP client
+ this.client = new Client(
+ {
+ name: 'stacker-ui',
+ version: '1.0.0',
+ },
+ {
+ capabilities: {
+ tools: {}
+ }
+ }
+ );
+
+ // Connect to server
+ await this.client.connect(this.transport);
+
+ console.log('MCP client connected');
+ }
+
+ async disconnect(): Promise {
+ if (this.client) {
+ await this.client.close();
+ this.client = null;
+ }
+ if (this.transport) {
+ await this.transport.close();
+ this.transport = null;
+ }
+ }
+
+ async listTools(): Promise> {
+ if (!this.client) {
+ throw new Error('MCP client not connected');
+ }
+
+ const response = await this.client.listTools();
+ return response.tools;
+ }
+
+ async callTool(
+ name: string,
+ args: Record
+ ): Promise<{
+ content: Array<{ type: string; text?: string; data?: string }>;
+ isError?: boolean;
+ }> {
+ if (!this.client) {
+ throw new Error('MCP client not connected');
+ }
+
+ const response = await this.client.callTool({
+ name,
+ arguments: args
+ });
+
+ return response;
+ }
+
+ isConnected(): boolean {
+ return this.client !== null;
+ }
+}
+```
+
+### 1.2 MCP Context Provider
+
+```typescript
+// src/contexts/McpContext.tsx
+import React, { createContext, useContext, useEffect, useState } from 'react';
+import { StackerMcpClient } from '@/lib/mcp/client';
+import { useAuth } from '@/hooks/useAuth';
+
+interface McpContextValue {
+ client: StackerMcpClient | null;
+ isConnected: boolean;
+ error: string | null;
+ reconnect: () => Promise;
+}
+
+const McpContext = createContext(undefined);
+
+export const McpProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => {
+ const { token } = useAuth();
+ const [client, setClient] = useState(null);
+ const [isConnected, setIsConnected] = useState(false);
+ const [error, setError] = useState(null);
+
+ const connect = async () => {
+ if (!token) {
+ setError('Authentication required');
+ return;
+ }
+
+ try {
+ const mcpClient = new StackerMcpClient({
+ url: process.env.REACT_APP_MCP_URL || 'ws://localhost:8000/mcp',
+ authToken: token
+ });
+
+ await mcpClient.connect();
+ setClient(mcpClient);
+ setIsConnected(true);
+ setError(null);
+ } catch (err) {
+ setError(err instanceof Error ? err.message : 'Connection failed');
+ setIsConnected(false);
+ }
+ };
+
+ const reconnect = async () => {
+ if (client) {
+ await client.disconnect();
+ }
+ await connect();
+ };
+
+ useEffect(() => {
+ connect();
+
+ return () => {
+ if (client) {
+ client.disconnect();
+ }
+ };
+ }, [token]);
+
+ return (
+
+ {children}
+
+ );
+};
+
+export const useMcp = () => {
+ const context = useContext(McpContext);
+ if (!context) {
+ throw new Error('useMcp must be used within McpProvider');
+ }
+ return context;
+};
+```
+
+### 1.3 Connection Setup in App
+
+```typescript
+// src/App.tsx
+import { McpProvider } from '@/contexts/McpContext';
+import { AuthProvider } from '@/contexts/AuthContext';
+import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
+
+const queryClient = new QueryClient();
+
+function App() {
+ return (
+
+
+
+
+
+
+
+ );
+}
+
+export default App;
+```
+
+---
+
+## Phase 2: Chat Interface Components (Week 2)
+
+### 2.1 Chat Message Types
+
+```typescript
+// src/types/chat.ts
+export interface ChatMessage {
+ id: string;
+ role: 'user' | 'assistant' | 'system';
+ content: string;
+ timestamp: Date;
+ toolCalls?: ToolCall[];
+ metadata?: {
+ projectId?: number;
+ step?: number;
+ suggestions?: string[];
+ };
+}
+
+export interface ToolCall {
+ id: string;
+ toolName: string;
+ arguments: Record;
+ result?: {
+ success: boolean;
+ data?: any;
+ error?: string;
+ };
+ status: 'pending' | 'completed' | 'failed';
+}
+
+export interface ChatContext {
+ currentProject?: {
+ id?: number;
+ name?: string;
+ apps?: any[];
+ step?: number;
+ };
+ lastAction?: string;
+ availableTools?: string[];
+}
+```
+
+### 2.2 Chat Store (Zustand)
+
+```typescript
+// src/stores/chatStore.ts
+import { create } from 'zustand';
+import { ChatMessage, ChatContext } from '@/types/chat';
+
+interface ChatStore {
+ messages: ChatMessage[];
+ context: ChatContext;
+ isProcessing: boolean;
+
+ addMessage: (message: Omit) => void;
+ updateMessage: (id: string, updates: Partial) => void;
+ clearMessages: () => void;
+ setContext: (context: Partial) => void;
+ setProcessing: (processing: boolean) => void;
+}
+
+export const useChatStore = create((set) => ({
+ messages: [],
+ context: {},
+ isProcessing: false,
+
+ addMessage: (message) =>
+ set((state) => ({
+ messages: [
+ ...state.messages,
+ {
+ ...message,
+ id: crypto.randomUUID(),
+ timestamp: new Date(),
+ },
+ ],
+ })),
+
+ updateMessage: (id, updates) =>
+ set((state) => ({
+ messages: state.messages.map((msg) =>
+ msg.id === id ? { ...msg, ...updates } : msg
+ ),
+ })),
+
+ clearMessages: () => set({ messages: [], context: {} }),
+
+ setContext: (context) =>
+ set((state) => ({
+ context: { ...state.context, ...context },
+ })),
+
+ setProcessing: (processing) => set({ isProcessing: processing }),
+}));
+```
+
+### 2.3 Chat Sidebar Component
+
+```tsx
+// src/components/chat/ChatSidebar.tsx
+import React, { useRef, useEffect } from 'react';
+import { useChatStore } from '@/stores/chatStore';
+import { ChatMessage } from './ChatMessage';
+import { ChatInput } from './ChatInput';
+import { ChatHeader } from './ChatHeader';
+
+export const ChatSidebar: React.FC = () => {
+ const messages = useChatStore((state) => state.messages);
+ const messagesEndRef = useRef(null);
+
+ useEffect(() => {
+ messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' });
+ }, [messages]);
+
+ return (
+
+
+
+
+ {messages.length === 0 ? (
+
+
+
+
+
Ask me anything!
+
+ I can help you create projects, suggest configurations,
+ and deploy your applications to the cloud.
+
+
+ ) : (
+ messages.map((message) => (
+
+ ))
+ )}
+
+
+
+
+
+ );
+};
+```
+
+### 2.4 Chat Message Component
+
+```tsx
+// src/components/chat/ChatMessage.tsx
+import React from 'react';
+import { ChatMessage as ChatMessageType } from '@/types/chat';
+import { ToolCallDisplay } from './ToolCallDisplay';
+import ReactMarkdown from 'react-markdown';
+
+interface Props {
+ message: ChatMessageType;
+}
+
+export const ChatMessage: React.FC = ({ message }) => {
+ const isUser = message.role === 'user';
+
+ return (
+
+
+ {!isUser && (
+
+ )}
+
+
+ {message.content}
+
+
+ {message.toolCalls && message.toolCalls.length > 0 && (
+
+ {message.toolCalls.map((toolCall) => (
+
+ ))}
+
+ )}
+
+
+ {message.timestamp.toLocaleTimeString()}
+
+
+
+ );
+};
+```
+
+### 2.5 Chat Input Component
+
+```tsx
+// src/components/chat/ChatInput.tsx
+import React, { useState } from 'react';
+import { useChatStore } from '@/stores/chatStore';
+import { useAiAssistant } from '@/hooks/useAiAssistant';
+
+export const ChatInput: React.FC = () => {
+ const [input, setInput] = useState('');
+ const isProcessing = useChatStore((state) => state.isProcessing);
+ const { sendMessage } = useAiAssistant();
+
+ const handleSubmit = async (e: React.FormEvent) => {
+ e.preventDefault();
+ if (!input.trim() || isProcessing) return;
+
+ await sendMessage(input);
+ setInput('');
+ };
+
+ return (
+
+ );
+};
+
+const QuickAction: React.FC<{ action: string }> = ({ action }) => {
+ const { sendMessage } = useAiAssistant();
+
+ return (
+ sendMessage(action)}
+ className="text-xs px-3 py-1 bg-gray-100 hover:bg-gray-200 rounded-full text-gray-700"
+ >
+ {action}
+
+ );
+};
+```
+
+---
+
+## Phase 3: AI Assistant Hook (Week 3)
+
+### 3.1 AI Assistant Logic
+
+```typescript
+// src/hooks/useAiAssistant.ts
+import { useMcp } from '@/contexts/McpContext';
+import { useChatStore } from '@/stores/chatStore';
+import { OpenAI } from 'openai';
+
+const openai = new OpenAI({
+ apiKey: process.env.REACT_APP_OPENAI_API_KEY,
+ dangerouslyAllowBrowser: true // Only for demo; use backend proxy in production
+});
+
+export const useAiAssistant = () => {
+ const { client } = useMcp();
+ const addMessage = useChatStore((state) => state.addMessage);
+ const updateMessage = useChatStore((state) => state.updateMessage);
+ const setProcessing = useChatStore((state) => state.setProcessing);
+ const context = useChatStore((state) => state.context);
+ const messages = useChatStore((state) => state.messages);
+
+ const sendMessage = async (userMessage: string) => {
+ if (!client?.isConnected()) {
+ addMessage({
+ role: 'system',
+ content: 'MCP connection lost. Please refresh the page.',
+ });
+ return;
+ }
+
+ // Add user message
+ addMessage({
+ role: 'user',
+ content: userMessage,
+ });
+
+ setProcessing(true);
+
+ try {
+ // Get available tools from MCP server
+ const tools = await client.listTools();
+
+ // Convert MCP tools to OpenAI function format
+ const openaiTools = tools.map((tool) => ({
+ type: 'function' as const,
+ function: {
+ name: tool.name,
+ description: tool.description,
+ parameters: tool.inputSchema,
+ },
+ }));
+
+ // Build conversation history for OpenAI
+ const conversationMessages = [
+ {
+ role: 'system' as const,
+ content: buildSystemPrompt(context),
+ },
+ ...messages.slice(-10).map((msg) => ({
+ role: msg.role as 'user' | 'assistant',
+ content: msg.content,
+ })),
+ {
+ role: 'user' as const,
+ content: userMessage,
+ },
+ ];
+
+ // Call OpenAI with tools
+ const response = await openai.chat.completions.create({
+ model: 'gpt-4-turbo-preview',
+ messages: conversationMessages,
+ tools: openaiTools,
+ tool_choice: 'auto',
+ });
+
+ const assistantMessage = response.choices[0].message;
+
+ // Handle tool calls
+ if (assistantMessage.tool_calls) {
+ const messageId = crypto.randomUUID();
+
+ addMessage({
+ role: 'assistant',
+ content: 'Let me help you with that...',
+ toolCalls: assistantMessage.tool_calls.map((tc) => ({
+ id: tc.id,
+ toolName: tc.function.name,
+ arguments: JSON.parse(tc.function.arguments),
+ status: 'pending' as const,
+ })),
+ });
+
+ // Execute tools via MCP
+ for (const toolCall of assistantMessage.tool_calls) {
+ try {
+ const result = await client.callTool(
+ toolCall.function.name,
+ JSON.parse(toolCall.function.arguments)
+ );
+
+ updateMessage(messageId, {
+ toolCalls: assistantMessage.tool_calls.map((tc) =>
+ tc.id === toolCall.id
+ ? {
+ id: tc.id,
+ toolName: tc.function.name,
+ arguments: JSON.parse(tc.function.arguments),
+ result: {
+ success: !result.isError,
+ data: result.content[0].text,
+ },
+ status: 'completed' as const,
+ }
+ : tc
+ ),
+ });
+
+ // Parse result and update context
+ if (toolCall.function.name === 'create_project' && result.content[0].text) {
+ const project = JSON.parse(result.content[0].text);
+ useChatStore.getState().setContext({
+ currentProject: {
+ id: project.id,
+ name: project.name,
+ apps: project.apps,
+ },
+ });
+ }
+ } catch (error) {
+ updateMessage(messageId, {
+ toolCalls: assistantMessage.tool_calls.map((tc) =>
+ tc.id === toolCall.id
+ ? {
+ id: tc.id,
+ toolName: tc.function.name,
+ arguments: JSON.parse(tc.function.arguments),
+ result: {
+ success: false,
+ error: error instanceof Error ? error.message : 'Unknown error',
+ },
+ status: 'failed' as const,
+ }
+ : tc
+ ),
+ });
+ }
+ }
+
+ // Get final response after tool execution
+ const finalResponse = await openai.chat.completions.create({
+ model: 'gpt-4-turbo-preview',
+ messages: [
+ ...conversationMessages,
+ assistantMessage,
+ ...assistantMessage.tool_calls.map((tc) => ({
+ role: 'tool' as const,
+ tool_call_id: tc.id,
+ content: 'Tool executed successfully',
+ })),
+ ],
+ });
+
+ addMessage({
+ role: 'assistant',
+ content: finalResponse.choices[0].message.content || 'Done!',
+ });
+ } else {
+ // No tool calls, just add assistant response
+ addMessage({
+ role: 'assistant',
+ content: assistantMessage.content || 'I understand. How can I help further?',
+ });
+ }
+ } catch (error) {
+ addMessage({
+ role: 'system',
+ content: `Error: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ });
+ } finally {
+ setProcessing(false);
+ }
+ };
+
+ return { sendMessage };
+};
+
+function buildSystemPrompt(context: any): string {
+ return `You are an AI assistant for the Stacker platform, helping users build and deploy Docker-based application stacks.
+
+Current context:
+${context.currentProject ? `- Working on project: "${context.currentProject.name}" (ID: ${context.currentProject.id})` : '- No active project'}
+${context.lastAction ? `- Last action: ${context.lastAction}` : ''}
+
+You can help users with:
+1. Creating new projects with multiple services
+2. Suggesting appropriate resource limits (CPU, RAM, storage)
+3. Listing available templates (WordPress, Node.js, Django, etc.)
+4. Deploying projects to cloud providers
+5. Managing cloud credentials
+6. Validating domains and ports
+
+Always be helpful, concise, and guide users through multi-step processes one step at a time.
+When creating projects, ask for all necessary details before calling the create_project tool.`;
+}
+```
+
+---
+
+## Phase 4: Form Integration (Week 4)
+
+### 4.1 Enhanced Project Form with AI
+
+```tsx
+// src/components/project/ProjectFormWithAI.tsx
+import React, { useState } from 'react';
+import { useChatStore } from '@/stores/chatStore';
+import { ChatSidebar } from '@/components/chat/ChatSidebar';
+import { ProjectForm } from '@/components/project/ProjectForm';
+
+export const ProjectFormWithAI: React.FC = () => {
+ const [showChat, setShowChat] = useState(true);
+ const context = useChatStore((state) => state.context);
+
+ // Auto-fill form from AI context
+ const formData = context.currentProject || {
+ name: '',
+ apps: [],
+ };
+
+ return (
+
+ {/* Main Form Area */}
+
+
+
+
Create New Project
+
setShowChat(!showChat)}
+ className="flex items-center gap-2 px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700"
+ >
+
+
+
+ {showChat ? 'Hide' : 'Show'} AI Assistant
+
+
+
+
+
+
+
+ {/* Chat Sidebar */}
+ {showChat && (
+
+
+
+ )}
+
+ );
+};
+```
+
+### 4.2 Progressive Form Steps
+
+```tsx
+// src/components/project/ProgressiveProjectForm.tsx
+import React, { useState } from 'react';
+import { useAiAssistant } from '@/hooks/useAiAssistant';
+import { useChatStore } from '@/stores/chatStore';
+
+const STEPS = [
+ { id: 1, name: 'Basic Info', description: 'Project name and description' },
+ { id: 2, name: 'Services', description: 'Add applications and Docker images' },
+ { id: 3, name: 'Resources', description: 'Configure CPU, RAM, and storage' },
+ { id: 4, name: 'Networking', description: 'Set up domains and ports' },
+ { id: 5, name: 'Review', description: 'Review and deploy' },
+];
+
+export const ProgressiveProjectForm: React.FC = () => {
+ const [currentStep, setCurrentStep] = useState(1);
+ const context = useChatStore((state) => state.context);
+ const { sendMessage } = useAiAssistant();
+
+ const project = context.currentProject || {
+ name: '',
+ description: '',
+ apps: [],
+ };
+
+ const handleAiSuggestion = (prompt: string) => {
+ sendMessage(prompt);
+ };
+
+ return (
+
+ {/* Progress Stepper */}
+
+
+ {STEPS.map((step, index) => (
+
+
+
+ {step.id < currentStep ? '✓' : step.id}
+
+
{step.name}
+
{step.description}
+
+
+ ))}
+
+
+
+ {/* AI Suggestions */}
+
+
+
+
+
+
+
+ AI Suggestion for Step {currentStep}:
+
+ {currentStep === 1 && (
+
handleAiSuggestion('Suggest a good name for a WordPress blog project')}
+ className="text-sm text-blue-700 hover:underline"
+ >
+ Need help naming your project?
+
+ )}
+ {currentStep === 2 && (
+
handleAiSuggestion('What services do I need for a typical WordPress site?')}
+ className="text-sm text-blue-700 hover:underline"
+ >
+ Not sure which services to add?
+
+ )}
+ {currentStep === 3 && (
+
+ handleAiSuggestion(
+ `Suggest appropriate resources for ${project.apps.length} services: ${project.apps.map((a: any) => a.name).join(', ')}`
+ )
+ }
+ className="text-sm text-blue-700 hover:underline"
+ >
+ Let AI suggest resource limits
+
+ )}
+
+
+
+
+ {/* Step Content */}
+
+ {currentStep === 1 && }
+ {currentStep === 2 && }
+ {currentStep === 3 && }
+ {currentStep === 4 && }
+ {currentStep === 5 && }
+
+
+ {/* Navigation */}
+
+ setCurrentStep(Math.max(1, currentStep - 1))}
+ disabled={currentStep === 1}
+ className="px-6 py-2 border border-gray-300 rounded-lg hover:bg-gray-50 disabled:opacity-50"
+ >
+ Previous
+
+ setCurrentStep(Math.min(STEPS.length, currentStep + 1))}
+ disabled={currentStep === STEPS.length}
+ className="px-6 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 disabled:opacity-50"
+ >
+ {currentStep === STEPS.length ? 'Deploy' : 'Next'}
+
+
+
+ );
+};
+```
+
+---
+
+## Phase 5: Testing & Optimization (Week 5)
+
+### 5.1 Unit Tests
+
+```typescript
+// src/lib/mcp/__tests__/client.test.ts
+import { describe, it, expect, beforeEach, afterEach } from 'vitest';
+import { StackerMcpClient } from '../client';
+
+describe('StackerMcpClient', () => {
+ let client: StackerMcpClient;
+
+ beforeEach(() => {
+ client = new StackerMcpClient({
+ url: 'ws://localhost:8000/mcp',
+ authToken: 'test-token',
+ });
+ });
+
+ afterEach(async () => {
+ if (client.isConnected()) {
+ await client.disconnect();
+ }
+ });
+
+ it('should connect successfully', async () => {
+ await client.connect();
+ expect(client.isConnected()).toBe(true);
+ });
+
+ it('should list available tools', async () => {
+ await client.connect();
+ const tools = await client.listTools();
+
+ expect(tools).toBeInstanceOf(Array);
+ expect(tools.length).toBeGreaterThan(0);
+ expect(tools[0]).toHaveProperty('name');
+ expect(tools[0]).toHaveProperty('description');
+ });
+
+ it('should call create_project tool', async () => {
+ await client.connect();
+
+ const result = await client.callTool('create_project', {
+ name: 'Test Project',
+ apps: [
+ {
+ name: 'web',
+ dockerImage: { repository: 'nginx' },
+ },
+ ],
+ });
+
+ expect(result.content).toBeInstanceOf(Array);
+ expect(result.isError).toBeFalsy();
+ });
+});
+```
+
+### 5.2 Integration Tests
+
+```typescript
+// src/components/chat/__tests__/ChatSidebar.integration.test.tsx
+import { render, screen, waitFor } from '@testing-library/react';
+import userEvent from '@testing-library/user-event';
+import { ChatSidebar } from '../ChatSidebar';
+import { McpProvider } from '@/contexts/McpContext';
+
+describe('ChatSidebar Integration', () => {
+ it('should send message and receive response', async () => {
+ render(
+
+
+
+ );
+
+ const input = screen.getByPlaceholderText(/ask me to create/i);
+ const sendButton = screen.getByRole('button', { name: /send/i });
+
+ await userEvent.type(input, 'Create a WordPress project');
+ await userEvent.click(sendButton);
+
+ await waitFor(() => {
+ expect(screen.getByText('Create a WordPress project')).toBeInTheDocument();
+ });
+
+ await waitFor(() => {
+ expect(screen.getByText(/let me help/i)).toBeInTheDocument();
+ }, { timeout: 5000 });
+ });
+});
+```
+
+### 5.3 Performance Optimization
+
+```typescript
+// src/lib/mcp/optimizations.ts
+
+// 1. Debounce AI calls to prevent spam
+import { useMemo } from 'react';
+import debounce from 'lodash/debounce';
+
+export const useDebouncedAi = () => {
+ const { sendMessage } = useAiAssistant();
+
+ const debouncedSend = useMemo(
+ () => debounce(sendMessage, 500),
+ [sendMessage]
+ );
+
+ return { sendMessage: debouncedSend };
+};
+
+// 2. Cache tool list
+export const useToolsCache = () => {
+ const { client } = useMcp();
+ const { data: tools, isLoading } = useQuery({
+ queryKey: ['mcp-tools'],
+ queryFn: () => client?.listTools(),
+ staleTime: 5 * 60 * 1000, // 5 minutes
+ enabled: !!client?.isConnected(),
+ });
+
+ return { tools, isLoading };
+};
+
+// 3. Lazy load chat component
+import { lazy, Suspense } from 'react';
+
+const ChatSidebar = lazy(() => import('@/components/chat/ChatSidebar'));
+
+export const LazyChat = () => (
+ }>
+
+
+);
+```
+
+---
+
+## Environment Configuration
+
+### Production Setup
+
+```bash
+# .env.production
+REACT_APP_MCP_URL=wss://api.try.direct/mcp
+REACT_APP_API_URL=https://api.try.direct
+REACT_APP_OPENAI_API_KEY=your_openai_key_here
+```
+
+### Development Setup
+
+```bash
+# .env.development
+REACT_APP_MCP_URL=ws://localhost:8000/mcp
+REACT_APP_API_URL=http://localhost:8000
+REACT_APP_OPENAI_API_KEY=your_openai_key_here
+```
+
+---
+
+## Error Handling Best Practices
+
+```typescript
+// src/lib/mcp/errorHandler.ts
+
+export class McpError extends Error {
+ constructor(
+ message: string,
+ public code: string,
+ public recoverable: boolean = true
+ ) {
+ super(message);
+ this.name = 'McpError';
+ }
+}
+
+export const handleMcpError = (error: unknown): McpError => {
+ if (error instanceof McpError) {
+ return error;
+ }
+
+ if (error instanceof Error) {
+ if (error.message.includes('WebSocket')) {
+ return new McpError(
+ 'Connection lost. Please refresh the page.',
+ 'CONNECTION_LOST',
+ true
+ );
+ }
+
+ if (error.message.includes('auth')) {
+ return new McpError(
+ 'Authentication failed. Please log in again.',
+ 'AUTH_FAILED',
+ false
+ );
+ }
+ }
+
+ return new McpError(
+ 'An unexpected error occurred.',
+ 'UNKNOWN_ERROR',
+ true
+ );
+};
+```
+
+---
+
+## Deployment Checklist
+
+### Pre-Launch
+- [ ] All MCP tools tested and working
+- [ ] WebSocket connection stable for extended periods
+- [ ] Error handling covers all edge cases
+- [ ] Loading states implemented for all async operations
+- [ ] Mobile responsive design verified
+- [ ] Authentication integrated with existing OAuth
+- [ ] Rate limiting enforced on frontend
+- [ ] CORS configured for production domain
+
+### Production
+- [ ] Environment variables set correctly
+- [ ] HTTPS/WSS enabled for secure connections
+- [ ] CDN configured for static assets
+- [ ] Analytics tracking added
+- [ ] Error logging (Sentry, LogRocket)
+- [ ] Performance monitoring
+- [ ] User feedback mechanism
+
+---
+
+## User Flows & Examples
+
+### Example 1: Create WordPress Site
+
+**User**: "Create a WordPress site"
+
+**AI Response**: "I'll help you create a WordPress site. Let me ask a few questions:
+1. What would you like to name your project?
+2. Do you need a database (MySQL)?
+3. Expected traffic level (low/medium/high)?"
+
+**User**: "Call it 'My Blog', yes I need MySQL, low traffic"
+
+**AI**: *Calls tools:*
+```
+suggest_resources({ app_type: "wordpress", expected_traffic: "low" })
+suggest_resources({ app_type: "mysql", expected_traffic: "low" })
+create_project({
+ name: "My Blog",
+ apps: [
+ { name: "wordpress", dockerImage: { repository: "wordpress" }, resources: { cpu: 1, ram: 2, storage: 20 } },
+ { name: "mysql", dockerImage: { repository: "mysql" }, resources: { cpu: 2, ram: 4, storage: 50 } }
+ ]
+})
+```
+
+**AI Response**: "✓ Created project 'My Blog' with WordPress and MySQL! Resource suggestions:
+- WordPress: 1 CPU, 2GB RAM, 20GB storage
+- MySQL: 2 CPU, 4GB RAM, 50GB storage
+
+Would you like to deploy this now?"
+
+### Example 2: List Projects
+
+**User**: "Show my projects"
+
+**AI**: *Calls `list_projects()`*
+
+**AI Response**: "You have 3 projects:
+1. My Blog (WordPress + MySQL) - Created Dec 27
+2. API Server (Node.js) - Created Dec 26
+3. E-commerce (Next.js + PostgreSQL) - Created Dec 25
+
+Which one would you like to work on?"
+
+---
+
+## Troubleshooting Guide
+
+### Common Issues
+
+#### 1. WebSocket Connection Fails
+```typescript
+// Check: Is MCP server running?
+// Check: Is auth token valid?
+// Check: CORS headers configured?
+
+// Solution:
+console.log('MCP URL:', process.env.REACT_APP_MCP_URL);
+console.log('Auth token:', token ? 'Present' : 'Missing');
+```
+
+#### 2. Tool Calls Timeout
+```typescript
+// Increase timeout in client
+const result = await client.callTool(name, args, { timeout: 30000 });
+```
+
+#### 3. Context Not Persisting
+```typescript
+// Check: Is Zustand store properly configured?
+// Ensure setContext is called after tool execution
+useChatStore.getState().setContext({ currentProject: project });
+```
+
+---
+
+## Future Enhancements
+
+### Phase 2 Features
+- **Voice Input**: Add speech-to-text for hands-free interaction
+- **Template Marketplace**: Browse and install community templates
+- **Multi-language Support**: Internationalization for non-English users
+- **Collaborative Editing**: Multiple users working on same project
+- **Version Control**: Git integration for project configurations
+- **Cost Estimation**: Show estimated monthly costs for deployments
+
+### Advanced AI Features
+- **Proactive Suggestions**: AI monitors form and suggests improvements
+- **Error Prevention**: Validate before deployment and warn about issues
+- **Learning Mode**: AI learns from user preferences over time
+- **Guided Tutorials**: Step-by-step walkthroughs for beginners
+
+---
+
+## Performance Targets
+
+- **Initial Load**: < 2 seconds
+- **Chat Message Latency**: < 500ms
+- **Tool Execution**: < 3 seconds (p95)
+- **WebSocket Reconnect**: < 5 seconds
+- **Memory Usage**: < 50MB per tab
+
+---
+
+## Security Considerations
+
+1. **Token Security**: Never expose OpenAI API key in frontend; use backend proxy
+2. **Input Sanitization**: Validate all user inputs before sending to AI
+3. **Rate Limiting**: Implement frontend rate limiting to prevent abuse
+4. **XSS Prevention**: Sanitize AI responses before rendering as HTML
+5. **CSP Headers**: Configure Content Security Policy for production
+
+---
+
+## Team Coordination
+
+### Frontend Team Responsibilities
+- Implement React components
+- Design chat UI/UX
+- Handle state management
+- Write unit/integration tests
+
+### Backend Team Responsibilities
+- Ensure MCP server is production-ready
+- Provide WebSocket endpoint
+- Maintain tool schemas
+- Monitor performance
+
+### Shared Responsibilities
+- Define tool contracts (JSON schemas)
+- End-to-end testing
+- Documentation
+- Deployment coordination
+
+---
+
+## Resources & Links
+
+- **MCP SDK Docs**: https://github.com/modelcontextprotocol/sdk
+- **OpenAI API**: https://platform.openai.com/docs
+- **WebSocket API**: https://developer.mozilla.org/en-US/docs/Web/API/WebSocket
+- **React Query**: https://tanstack.com/query/latest
+- **Zustand**: https://github.com/pmndrs/zustand
+
+---
+
+## Contact
+
+**Frontend Lead**: [Your Name]
+**Questions**: Open GitHub issue or Slack #stacker-ai channel
diff --git a/migrations/20230903063840_creating_rating_tables.down.sql b/migrations/20230903063840_creating_rating_tables.down.sql
index e12e4ab..b32b52b 100644
--- a/migrations/20230903063840_creating_rating_tables.down.sql
+++ b/migrations/20230903063840_creating_rating_tables.down.sql
@@ -6,3 +6,5 @@ DROP INDEX idx_obj_id_rating_id;
DROP table rating;
DROP table product;
+
+DROP TYPE rate_category;
diff --git a/migrations/20230903063840_creating_rating_tables.up.sql b/migrations/20230903063840_creating_rating_tables.up.sql
index 579bef6..156c722 100644
--- a/migrations/20230903063840_creating_rating_tables.up.sql
+++ b/migrations/20230903063840_creating_rating_tables.up.sql
@@ -1,5 +1,17 @@
-- Add up migration script here
+CREATE TYPE rate_category AS ENUM (
+ 'application',
+ 'cloud',
+ 'project',
+ 'deploymentSpeed',
+ 'documentation',
+ 'design',
+ 'techSupport',
+ 'price',
+ 'memoryUsage'
+);
+
CREATE TABLE product (
id integer NOT NULL, PRIMARY KEY(id),
obj_id integer NOT NULL,
@@ -12,7 +24,7 @@ CREATE TABLE rating (
id serial,
user_id VARCHAR(50) NOT NULL,
obj_id integer NOT NULL,
- category VARCHAR(255) NOT NULL,
+ category rate_category NOT NULL,
comment TEXT DEFAULT NULL,
hidden BOOLEAN DEFAULT FALSE,
rate INTEGER,
diff --git a/migrations/20230905145525_creating_stack_tables.down.sql b/migrations/20230905145525_creating_stack_tables.down.sql
index 203a95a..7f367df 100644
--- a/migrations/20230905145525_creating_stack_tables.down.sql
+++ b/migrations/20230905145525_creating_stack_tables.down.sql
@@ -1,3 +1,2 @@
-- Add down migration script here
-
-DROP TABLE user_stack;
+DROP TABLE project;
diff --git a/migrations/20230905145525_creating_stack_tables.up.sql b/migrations/20230905145525_creating_stack_tables.up.sql
index e908e97..c002beb 100644
--- a/migrations/20230905145525_creating_stack_tables.up.sql
+++ b/migrations/20230905145525_creating_stack_tables.up.sql
@@ -1,12 +1,14 @@
--- Add up migration script here
--- Add migration script here
-CREATE TABLE user_stack (
- id serial,
+CREATE TABLE project (
+ id serial4 NOT NULL,
stack_id uuid NOT NULL,
user_id VARCHAR(50) NOT NULL,
- name TEXT NOT NULL UNIQUE,
+ name TEXT NOT NULL,
body JSON NOT NULL,
created_at timestamptz NOT NULL,
- updated_at timestamptz NOT NULL
-)
+ updated_at timestamptz NOT NULL,
+ CONSTRAINT project_pkey PRIMARY KEY (id)
+);
+CREATE INDEX idx_project_stack_id ON project(stack_id);
+CREATE INDEX idx_project_user_id ON project(user_id);
+CREATE INDEX idx_project_name ON project(name);
diff --git a/migrations/20230917162549_creating_test_product.down.sql b/migrations/20230917162549_creating_test_product.down.sql
index f9f6339..eafea95 100644
--- a/migrations/20230917162549_creating_test_product.down.sql
+++ b/migrations/20230917162549_creating_test_product.down.sql
@@ -1 +1 @@
-delete from product where id=1;
+DELETE FROM product WHERE id=1;
diff --git a/migrations/20230917162549_creating_test_product.up.sql b/migrations/20230917162549_creating_test_product.up.sql
index 7a1d8d6..9aae3c5 100644
--- a/migrations/20230917162549_creating_test_product.up.sql
+++ b/migrations/20230917162549_creating_test_product.up.sql
@@ -1 +1 @@
-INSERT INTO public.product (id, obj_id, obj_type, created_at, updated_at) VALUES(1, 1, 'Application', '2023-09-17 10:30:02.579', '2023-09-17 10:30:02.579');
\ No newline at end of file
+INSERT INTO product (id, obj_id, obj_type, created_at, updated_at) VALUES(1, 1, 'Application', '2023-09-17 10:30:02.579', '2023-09-17 10:30:02.579');
diff --git a/migrations/20231028161917_client.up.sql b/migrations/20231028161917_client.up.sql
index fcb9065..e0470c3 100644
--- a/migrations/20231028161917_client.up.sql
+++ b/migrations/20231028161917_client.up.sql
@@ -1,5 +1,5 @@
-- Add up migration script here
-CREATE TABLE public.client (
+CREATE TABLE client (
id serial4 NOT NULL,
user_id varchar(50) NOT NULL,
secret varchar(255),
diff --git a/migrations/20240128174529_casbin_rule.down.sql b/migrations/20240128174529_casbin_rule.down.sql
new file mode 100644
index 0000000..ef4c417
--- /dev/null
+++ b/migrations/20240128174529_casbin_rule.down.sql
@@ -0,0 +1,2 @@
+-- Add down migration script here
+DROP TABLE casbin_rule;
diff --git a/migrations/20240128174529_casbin_rule.up.sql b/migrations/20240128174529_casbin_rule.up.sql
new file mode 100644
index 0000000..ef9ddec
--- /dev/null
+++ b/migrations/20240128174529_casbin_rule.up.sql
@@ -0,0 +1,12 @@
+-- Add up migration script here
+CREATE TABLE IF NOT EXISTS casbin_rule (
+ id SERIAL PRIMARY KEY,
+ ptype VARCHAR NOT NULL,
+ v0 VARCHAR NOT NULL,
+ v1 VARCHAR NOT NULL,
+ v2 VARCHAR NOT NULL,
+ v3 VARCHAR NOT NULL,
+ v4 VARCHAR NOT NULL,
+ v5 VARCHAR NOT NULL,
+ CONSTRAINT unique_key_sqlx_adapter UNIQUE(ptype, v0, v1, v2, v3, v4, v5)
+)
diff --git a/migrations/20240228125751_creating_deployments.down.sql b/migrations/20240228125751_creating_deployments.down.sql
new file mode 100644
index 0000000..228cc13
--- /dev/null
+++ b/migrations/20240228125751_creating_deployments.down.sql
@@ -0,0 +1,2 @@
+-- Add up migration script here
+DROP table deployment;
\ No newline at end of file
diff --git a/migrations/20240228125751_creating_deployments.up.sql b/migrations/20240228125751_creating_deployments.up.sql
new file mode 100644
index 0000000..7a06d3b
--- /dev/null
+++ b/migrations/20240228125751_creating_deployments.up.sql
@@ -0,0 +1,14 @@
+-- Add up migration script here
+CREATE TABLE deployment (
+ id serial4 NOT NULL,
+ project_id integer NOT NULL,
+ body JSON NOT NULL,
+ deleted BOOLEAN DEFAULT FALSE,
+ status VARCHAR(32) NOT NULL,
+ created_at timestamptz NOT NULL,
+ updated_at timestamptz NOT NULL,
+ CONSTRAINT fk_project FOREIGN KEY(project_id) REFERENCES project(id),
+ CONSTRAINT deployment_pkey PRIMARY KEY (id)
+);
+
+CREATE INDEX idx_deployment_project_id ON deployment(project_id);
diff --git a/migrations/20240229072555_creating_cloud.down.sql b/migrations/20240229072555_creating_cloud.down.sql
new file mode 100644
index 0000000..2a04e92
--- /dev/null
+++ b/migrations/20240229072555_creating_cloud.down.sql
@@ -0,0 +1,2 @@
+-- Add down migration script here
+DROP table cloud;
diff --git a/migrations/20240229072555_creating_cloud.up.sql b/migrations/20240229072555_creating_cloud.up.sql
new file mode 100644
index 0000000..c842d3f
--- /dev/null
+++ b/migrations/20240229072555_creating_cloud.up.sql
@@ -0,0 +1,14 @@
+CREATE TABLE cloud (
+ id serial4 NOT NULL,
+ user_id VARCHAR(50) NOT NULL,
+ provider VARCHAR(50) NOT NULL,
+ cloud_token VARCHAR(255) ,
+ cloud_key VARCHAR(255),
+ cloud_secret VARCHAR(255),
+ save_token BOOLEAN DEFAULT FALSE,
+ created_at timestamptz NOT NULL,
+ updated_at timestamptz NOT NULL,
+ CONSTRAINT user_cloud_pkey PRIMARY KEY (id)
+);
+
+CREATE INDEX idx_deployment_user_cloud_user_id ON cloud(user_id);
\ No newline at end of file
diff --git a/migrations/20240229075843_creating_user_stack_cloud_relation.down.sql b/migrations/20240229075843_creating_user_stack_cloud_relation.down.sql
new file mode 100644
index 0000000..02d2fe5
--- /dev/null
+++ b/migrations/20240229075843_creating_user_stack_cloud_relation.down.sql
@@ -0,0 +1,2 @@
+-- Add down migration script here
+ALTER table project DROP COLUMN cloud_id;
\ No newline at end of file
diff --git a/migrations/20240229075843_creating_user_stack_cloud_relation.up.sql b/migrations/20240229075843_creating_user_stack_cloud_relation.up.sql
new file mode 100644
index 0000000..5f65c66
--- /dev/null
+++ b/migrations/20240229075843_creating_user_stack_cloud_relation.up.sql
@@ -0,0 +1,3 @@
+-- Add up migration script here
+ALTER table project ADD COLUMN cloud_id INT CONSTRAINT project_cloud_id REFERENCES cloud(id) ON UPDATE CASCADE ON DELETE CASCADE;
+
diff --git a/migrations/20240229080559_creating_cloud_server.down.sql b/migrations/20240229080559_creating_cloud_server.down.sql
new file mode 100644
index 0000000..f0fa982
--- /dev/null
+++ b/migrations/20240229080559_creating_cloud_server.down.sql
@@ -0,0 +1,3 @@
+DROP INDEX idx_server_user_id;
+DROP INDEX idx_server_cloud_id;
+DROP table server;
diff --git a/migrations/20240229080559_creating_cloud_server.up.sql b/migrations/20240229080559_creating_cloud_server.up.sql
new file mode 100644
index 0000000..e4ed91b
--- /dev/null
+++ b/migrations/20240229080559_creating_cloud_server.up.sql
@@ -0,0 +1,22 @@
+-- Add up migration script here
+
+CREATE TABLE server (
+ id serial4 NOT NULL,
+ user_id VARCHAR(50) NOT NULL,
+ cloud_id integer NOT NULL,
+ project_id integer NOT NULL,
+ region VARCHAR(50) NOT NULL,
+ zone VARCHAR(50),
+ server VARCHAR(255) NOT NULL,
+ os VARCHAR(100) NOT NULL,
+ disk_type VARCHAR(100),
+ created_at timestamptz NOT NULL,
+ updated_at timestamptz NOT NULL,
+ CONSTRAINT user_server_pkey PRIMARY KEY (id),
+ CONSTRAINT fk_server FOREIGN KEY(cloud_id) REFERENCES cloud(id),
+ CONSTRAINT fk_server_project FOREIGN KEY(project_id) REFERENCES project(id) ON UPDATE CASCADE ON DELETE CASCADE
+);
+
+CREATE INDEX idx_server_user_id ON server(user_id);
+CREATE INDEX idx_server_cloud_id ON server(cloud_id);
+CREATE INDEX idx_server_project_id ON server(project_id);
diff --git a/migrations/20240302081015_creating_original_request_column_project.down.sql b/migrations/20240302081015_creating_original_request_column_project.down.sql
new file mode 100644
index 0000000..93549b5
--- /dev/null
+++ b/migrations/20240302081015_creating_original_request_column_project.down.sql
@@ -0,0 +1,2 @@
+-- Add down migration script here
+ALTER table project DROP COLUMN request_json;
diff --git a/migrations/20240302081015_creating_original_request_column_project.up.sql b/migrations/20240302081015_creating_original_request_column_project.up.sql
new file mode 100644
index 0000000..2c1ba74
--- /dev/null
+++ b/migrations/20240302081015_creating_original_request_column_project.up.sql
@@ -0,0 +1 @@
+ALTER table project ADD COLUMN request_json JSON NOT NULL DEFAULT '{}';
\ No newline at end of file
diff --git a/migrations/20240307113718_alter_cloud_alter_project.down.sql b/migrations/20240307113718_alter_cloud_alter_project.down.sql
new file mode 100644
index 0000000..06f51ab
--- /dev/null
+++ b/migrations/20240307113718_alter_cloud_alter_project.down.sql
@@ -0,0 +1,3 @@
+-- Add down migration script here
+ALTER table project ADD COLUMN cloud_id INT CONSTRAINT project_cloud_id REFERENCES cloud(id) ON UPDATE CASCADE ON DELETE CASCADE;
+ALTER table cloud DROP COLUMN project_id;
\ No newline at end of file
diff --git a/migrations/20240307113718_alter_cloud_alter_project.up.sql b/migrations/20240307113718_alter_cloud_alter_project.up.sql
new file mode 100644
index 0000000..554a24a
--- /dev/null
+++ b/migrations/20240307113718_alter_cloud_alter_project.up.sql
@@ -0,0 +1,3 @@
+-- Add up migration script here
+ALTER table project DROP COLUMN cloud_id;
+ALTER table cloud ADD COLUMN project_id INT CONSTRAINT cloud_project_id REFERENCES project(id) ON UPDATE CASCADE ON DELETE CASCADE;
diff --git a/migrations/20240315143712_remove_cloud_id_from_server.down.sql b/migrations/20240315143712_remove_cloud_id_from_server.down.sql
new file mode 100644
index 0000000..72dd11e
--- /dev/null
+++ b/migrations/20240315143712_remove_cloud_id_from_server.down.sql
@@ -0,0 +1,3 @@
+-- Add down migration script here
+DROP INDEX idx_server_cloud_id;
+alter table server ADD column cloud_id integer NOT NULL;
diff --git a/migrations/20240315143712_remove_cloud_id_from_server.up.sql b/migrations/20240315143712_remove_cloud_id_from_server.up.sql
new file mode 100644
index 0000000..be9027c
--- /dev/null
+++ b/migrations/20240315143712_remove_cloud_id_from_server.up.sql
@@ -0,0 +1,2 @@
+-- Add up migration script here
+alter table server drop column cloud_id;
diff --git a/migrations/20240401103123_casbin_initial_rules.down.sql b/migrations/20240401103123_casbin_initial_rules.down.sql
new file mode 100644
index 0000000..d2f607c
--- /dev/null
+++ b/migrations/20240401103123_casbin_initial_rules.down.sql
@@ -0,0 +1 @@
+-- Add down migration script here
diff --git a/migrations/20240401103123_casbin_initial_rules.up.sql b/migrations/20240401103123_casbin_initial_rules.up.sql
new file mode 100644
index 0000000..ee2cd49
--- /dev/null
+++ b/migrations/20240401103123_casbin_initial_rules.up.sql
@@ -0,0 +1,40 @@
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('g', 'anonym', 'group_anonymous', '', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('g', 'group_admin', 'group_anonymous', '', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('g', 'group_user', 'group_anonymous', '', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('g', 'user', 'group_user', '', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_anonymous', '/health_check', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_anonymous', '/rating/:id', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_anonymous', '/rating', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/client', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/rating', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/admin/client/:id/disable', 'PUT', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/admin/client/:id/enable', 'PUT', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/admin/client/:id', 'PUT', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/admin/project/user/:userid', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/rating/:id', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/client/:id/enable', 'PUT', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/client/:id', 'PUT', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/client/:id/disable', 'PUT', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/rating/:id', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/rating', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/rating', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/project', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/project', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/project/:id', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/project/:id', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/project/:id', 'PUT', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/project/:id', 'DELETE', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/project/:id/compose', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/project/:id/compose', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/project/:id/deploy', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/project/:id/deploy/:cloud_id', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/server', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/server', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/server/:id', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/server/:id', 'PUT', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/cloud', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/cloud', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/cloud/:id', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/cloud/:id', 'PUT', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/cloud/:id', 'DELETE', '', '', '');
+
diff --git a/migrations/20240401184313_remove_project_id_from_cloud.down.sql b/migrations/20240401184313_remove_project_id_from_cloud.down.sql
new file mode 100644
index 0000000..3b99d4c
--- /dev/null
+++ b/migrations/20240401184313_remove_project_id_from_cloud.down.sql
@@ -0,0 +1,2 @@
+-- Add down migration script here
+ALTER table cloud ADD COLUMN project_id INT CONSTRAINT cloud_project_id REFERENCES project(id) ON UPDATE CASCADE ON DELETE CASCADE;
diff --git a/migrations/20240401184313_remove_project_id_from_cloud.up.sql b/migrations/20240401184313_remove_project_id_from_cloud.up.sql
new file mode 100644
index 0000000..4974d95
--- /dev/null
+++ b/migrations/20240401184313_remove_project_id_from_cloud.up.sql
@@ -0,0 +1,3 @@
+-- Add up migration script here
+
+alter table cloud DROP column project_id;
diff --git a/migrations/20240412141011_casbin_user_rating_edit.down.sql b/migrations/20240412141011_casbin_user_rating_edit.down.sql
new file mode 100644
index 0000000..41c5e57
--- /dev/null
+++ b/migrations/20240412141011_casbin_user_rating_edit.down.sql
@@ -0,0 +1,18 @@
+-- Add down migration script here
+DELETE FROM casbin_rule
+WHERE ptype = 'p' and v0 = 'group_user' and v1 = '/rating/:id' and v2 = 'PUT';
+
+DELETE FROM casbin_rule
+WHERE ptype = 'p' and v0 = 'group_admin' and v1 = '/admin/rating/:id' and v2 = 'PUT';
+
+DELETE FROM casbin_rule
+WHERE ptype = 'p' and v0 = 'group_user' and v1 = '/rating/:id' and v2 = 'DELETE';
+
+DELETE FROM casbin_rule
+WHERE ptype = 'p' and v0 = 'group_admin' and v1 = '/admin/rating/:id' and v2 = 'DELETE';
+
+DELETE FROM casbin_rule
+WHERE ptype = 'p' and v0 = 'group_admin' and v1 = '/admin/rating/:id' and v2 = 'GET';
+
+DELETE FROM casbin_rule
+WHERE ptype = 'p' and v0 = 'group_admin' and v1 = '/admin/rating' and v2 = 'GET';
diff --git a/migrations/20240412141011_casbin_user_rating_edit.up.sql b/migrations/20240412141011_casbin_user_rating_edit.up.sql
new file mode 100644
index 0000000..6b435cf
--- /dev/null
+++ b/migrations/20240412141011_casbin_user_rating_edit.up.sql
@@ -0,0 +1,18 @@
+-- Add up migration script here
+INSERT INTO casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_user', '/rating/:id', 'PUT', '', '', '');
+
+INSERT INTO casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/admin/rating/:id', 'PUT', '', '', '');
+
+INSERT INTO casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_user', '/rating/:id', 'DELETE', '', '', '');
+
+INSERT INTO casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/admin/rating/:id', 'DELETE', '', '', '');
+
+INSERT INTO casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/admin/rating/:id', 'GET', '', '', '');
+
+INSERT INTO casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/admin/rating', 'GET', '', '', '');
diff --git a/migrations/20240709162041_add_server_ip_ssh_user_port.down.sql b/migrations/20240709162041_add_server_ip_ssh_user_port.down.sql
new file mode 100644
index 0000000..7b64145
--- /dev/null
+++ b/migrations/20240709162041_add_server_ip_ssh_user_port.down.sql
@@ -0,0 +1,5 @@
+ -- Add up migration script here
+
+ ALTER table server DROP COLUMN srv_ip;
+ ALTER table server DROP COLUMN ssh_user;
+ ALTER table server DROP COLUMN ssh_port;
diff --git a/migrations/20240709162041_add_server_ip_ssh_user_port.up.sql b/migrations/20240709162041_add_server_ip_ssh_user_port.up.sql
new file mode 100644
index 0000000..38cfc7d
--- /dev/null
+++ b/migrations/20240709162041_add_server_ip_ssh_user_port.up.sql
@@ -0,0 +1,5 @@
+-- Add up migration script here
+
+ALTER table server ADD COLUMN srv_ip VARCHAR(50) DEFAULT NULL;
+ALTER table server ADD COLUMN ssh_user VARCHAR(50) DEFAULT NULL;
+ALTER table server ADD COLUMN ssh_port INT DEFAULT NULL;
diff --git a/migrations/20240711134750_server_nullable_fields.down.sql b/migrations/20240711134750_server_nullable_fields.down.sql
new file mode 100644
index 0000000..e8d6c4f
--- /dev/null
+++ b/migrations/20240711134750_server_nullable_fields.down.sql
@@ -0,0 +1,6 @@
+-- Add down migration script here
+
+ALTER TABLE server ALTER COLUMN region SET NOT NULL;
+ALTER TABLE server ALTER COLUMN server SET NOT NULL;
+ALTER TABLE server ALTER COLUMN zone SET NOT NULL;
+ALTER TABLE server ALTER COLUMN os SET NOT NULL;
diff --git a/migrations/20240711134750_server_nullable_fields.up.sql b/migrations/20240711134750_server_nullable_fields.up.sql
new file mode 100644
index 0000000..95931fe
--- /dev/null
+++ b/migrations/20240711134750_server_nullable_fields.up.sql
@@ -0,0 +1,6 @@
+-- Add up migration script here
+
+ALTER TABLE server ALTER COLUMN region DROP NOT NULL;
+ALTER TABLE server ALTER COLUMN server DROP NOT NULL;
+ALTER TABLE server ALTER COLUMN zone DROP NOT NULL;
+ALTER TABLE server ALTER COLUMN os DROP NOT NULL;
diff --git a/migrations/20240716114826_agreement_tables.down.sql b/migrations/20240716114826_agreement_tables.down.sql
new file mode 100644
index 0000000..847a983
--- /dev/null
+++ b/migrations/20240716114826_agreement_tables.down.sql
@@ -0,0 +1,8 @@
+-- Add down migration script here
+
+-- Add up migration script here
+
+DROP INDEX idx_agreement_name;
+CREATE INDEX idx_user_agreement_user_id;
+DROP TABLE agreement;
+DROP TABLE user_agreement;
\ No newline at end of file
diff --git a/migrations/20240716114826_agreement_tables.up.sql b/migrations/20240716114826_agreement_tables.up.sql
new file mode 100644
index 0000000..7b8b0aa
--- /dev/null
+++ b/migrations/20240716114826_agreement_tables.up.sql
@@ -0,0 +1,24 @@
+-- Add up migration script here
+
+CREATE TABLE agreement (
+ id serial4 NOT NULL,
+ name VARCHAR(255) NOT NULL,
+ text TEXT NOT NULL,
+ created_at timestamptz NOT NULL,
+ updated_at timestamptz NOT NULL,
+ CONSTRAINT agreement_pkey PRIMARY KEY (id)
+);
+
+CREATE INDEX idx_agreement_name ON agreement(name);
+
+CREATE TABLE user_agreement (
+ id serial4 NOT NULL,
+ agrt_id integer NOT NULL,
+ user_id VARCHAR(50) NOT NULL,
+ created_at timestamptz NOT NULL,
+ updated_at timestamptz NOT NULL,
+ CONSTRAINT user_agreement_pkey PRIMARY KEY (id),
+ CONSTRAINT fk_agreement FOREIGN KEY(agrt_id) REFERENCES agreement(id)
+);
+
+CREATE INDEX idx_user_agreement_user_id ON user_agreement(user_id);
\ No newline at end of file
diff --git a/migrations/20240717070823_agreement_casbin_rules.down.sql b/migrations/20240717070823_agreement_casbin_rules.down.sql
new file mode 100644
index 0000000..12d9b50
--- /dev/null
+++ b/migrations/20240717070823_agreement_casbin_rules.down.sql
@@ -0,0 +1,3 @@
+-- Add down migration script here
+
+DELETE FROM public.casbin_rule where id IN (49,50,51,52,53,54,55,56,57,58);
\ No newline at end of file
diff --git a/migrations/20240717070823_agreement_casbin_rules.up.sql b/migrations/20240717070823_agreement_casbin_rules.up.sql
new file mode 100644
index 0000000..8c5c757
--- /dev/null
+++ b/migrations/20240717070823_agreement_casbin_rules.up.sql
@@ -0,0 +1,12 @@
+-- Add up migration script here
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/agreement', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/agreement/:id', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/agreement', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/agreement/:id', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/admin/agreement', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/admin/agreement/:id', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/admin/agreement/:id', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/admin/agreement/:id', 'PUT', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/admin/agreement/:id', 'DELETE', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/agreement', 'POST', '', '', '');
diff --git a/migrations/20240717100131_agreement_created_updated_default_now.down.sql b/migrations/20240717100131_agreement_created_updated_default_now.down.sql
new file mode 100644
index 0000000..d2f607c
--- /dev/null
+++ b/migrations/20240717100131_agreement_created_updated_default_now.down.sql
@@ -0,0 +1 @@
+-- Add down migration script here
diff --git a/migrations/20240717100131_agreement_created_updated_default_now.up.sql b/migrations/20240717100131_agreement_created_updated_default_now.up.sql
new file mode 100644
index 0000000..a259ed6
--- /dev/null
+++ b/migrations/20240717100131_agreement_created_updated_default_now.up.sql
@@ -0,0 +1,6 @@
+-- Add up migration script here
+ALTER TABLE public.agreement ALTER COLUMN created_at SET NOT NULL;
+ALTER TABLE public.agreement ALTER COLUMN created_at SET DEFAULT NOW();
+
+ALTER TABLE public.agreement ALTER COLUMN updated_at SET NOT NULL;
+ALTER TABLE public.agreement ALTER COLUMN updated_at SET DEFAULT NOW();
diff --git a/migrations/20240718082702_agreement_accepted.down.sql b/migrations/20240718082702_agreement_accepted.down.sql
new file mode 100644
index 0000000..fd2397e
--- /dev/null
+++ b/migrations/20240718082702_agreement_accepted.down.sql
@@ -0,0 +1,2 @@
+-- Add down migration script here
+DELETE FROM public.casbin_rule where id IN (59);
diff --git a/migrations/20240718082702_agreement_accepted.up.sql b/migrations/20240718082702_agreement_accepted.up.sql
new file mode 100644
index 0000000..1e01c7e
--- /dev/null
+++ b/migrations/20240718082702_agreement_accepted.up.sql
@@ -0,0 +1,2 @@
+-- Add up migration script here
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/agreement/accepted/:id', 'GET', '', '', '');
\ No newline at end of file
diff --git a/migrations/20251222160218_update_deployment_for_agents.down.sql b/migrations/20251222160218_update_deployment_for_agents.down.sql
new file mode 100644
index 0000000..bd8eb32
--- /dev/null
+++ b/migrations/20251222160218_update_deployment_for_agents.down.sql
@@ -0,0 +1,5 @@
+-- Revert deployment table changes
+ALTER TABLE deployment DROP COLUMN IF EXISTS user_id;
+ALTER TABLE deployment DROP COLUMN IF EXISTS last_seen_at;
+ALTER TABLE deployment DROP COLUMN IF EXISTS deployment_hash;
+ALTER TABLE deployment RENAME COLUMN metadata TO body;
diff --git a/migrations/20251222160218_update_deployment_for_agents.up.sql b/migrations/20251222160218_update_deployment_for_agents.up.sql
new file mode 100644
index 0000000..4b876a0
--- /dev/null
+++ b/migrations/20251222160218_update_deployment_for_agents.up.sql
@@ -0,0 +1,19 @@
+-- Add deployment_hash, last_seen_at, and rename body to metadata in deployment table
+ALTER TABLE deployment
+ADD COLUMN deployment_hash VARCHAR(64) UNIQUE,
+ADD COLUMN last_seen_at TIMESTAMP,
+ADD COLUMN user_id VARCHAR(255);
+
+-- Rename body to metadata
+ALTER TABLE deployment RENAME COLUMN body TO metadata;
+
+-- Generate deployment_hash for existing deployments (simple hash based on id)
+UPDATE deployment
+SET deployment_hash = md5(CONCAT('deployment_', id::text))
+WHERE deployment_hash IS NULL;
+
+-- Make deployment_hash NOT NULL after populating
+ALTER TABLE deployment ALTER COLUMN deployment_hash SET NOT NULL;
+
+CREATE INDEX idx_deployment_hash ON deployment(deployment_hash);
+CREATE INDEX idx_deployment_user_id ON deployment(user_id);
diff --git a/migrations/20251222160219_create_agents_and_audit_log.down.sql b/migrations/20251222160219_create_agents_and_audit_log.down.sql
new file mode 100644
index 0000000..c6568c6
--- /dev/null
+++ b/migrations/20251222160219_create_agents_and_audit_log.down.sql
@@ -0,0 +1,3 @@
+-- Drop audit_log and agents tables
+DROP TABLE IF EXISTS audit_log;
+DROP TABLE IF EXISTS agents;
diff --git a/migrations/20251222160219_create_agents_and_audit_log.up.sql b/migrations/20251222160219_create_agents_and_audit_log.up.sql
new file mode 100644
index 0000000..8cd5476
--- /dev/null
+++ b/migrations/20251222160219_create_agents_and_audit_log.up.sql
@@ -0,0 +1,35 @@
+-- Create agents table
+CREATE TABLE agents (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ deployment_hash VARCHAR(64) UNIQUE NOT NULL REFERENCES deployment(deployment_hash) ON DELETE CASCADE,
+ capabilities JSONB DEFAULT '[]'::jsonb,
+ version VARCHAR(50),
+ system_info JSONB DEFAULT '{}'::jsonb,
+ last_heartbeat TIMESTAMP,
+ status VARCHAR(50) DEFAULT 'offline',
+ created_at TIMESTAMP DEFAULT NOW(),
+ updated_at TIMESTAMP DEFAULT NOW(),
+ CONSTRAINT chk_agent_status CHECK (status IN ('online', 'offline', 'degraded'))
+);
+
+CREATE INDEX idx_agents_deployment_hash ON agents(deployment_hash);
+CREATE INDEX idx_agents_status ON agents(status);
+CREATE INDEX idx_agents_last_heartbeat ON agents(last_heartbeat);
+
+-- Create audit_log table
+CREATE TABLE audit_log (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ agent_id UUID REFERENCES agents(id) ON DELETE SET NULL,
+ deployment_hash VARCHAR(64),
+ action VARCHAR(100) NOT NULL,
+ status VARCHAR(50),
+ details JSONB DEFAULT '{}'::jsonb,
+ ip_address INET,
+ user_agent TEXT,
+ created_at TIMESTAMP DEFAULT NOW()
+);
+
+CREATE INDEX idx_audit_log_agent_id ON audit_log(agent_id);
+CREATE INDEX idx_audit_log_deployment_hash ON audit_log(deployment_hash);
+CREATE INDEX idx_audit_log_action ON audit_log(action);
+CREATE INDEX idx_audit_log_created_at ON audit_log(created_at);
diff --git a/migrations/20251222160220_casbin_agent_rules.down.sql b/migrations/20251222160220_casbin_agent_rules.down.sql
new file mode 100644
index 0000000..00528cc
--- /dev/null
+++ b/migrations/20251222160220_casbin_agent_rules.down.sql
@@ -0,0 +1,18 @@
+-- Remove agent casbin rules
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p' AND v0 = 'agent' AND v1 = '/api/v1/agent/commands/report' AND v2 = 'POST';
+
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p' AND v0 = 'agent' AND v1 = '/api/v1/agent/commands/wait/:deployment_hash' AND v2 = 'GET';
+
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p' AND v0 = 'group_anonymous' AND v1 = '/api/v1/agent/register' AND v2 = 'POST';
+
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p' AND v0 = 'group_user' AND v1 = '/api/v1/agent/register' AND v2 = 'POST';
+
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/v1/agent/register' AND v2 = 'POST';
+
+DELETE FROM public.casbin_rule
+WHERE ptype = 'g' AND v0 = 'agent' AND v1 = 'group_anonymous';
diff --git a/migrations/20251222160220_casbin_agent_rules.up.sql b/migrations/20251222160220_casbin_agent_rules.up.sql
new file mode 100644
index 0000000..44e0217
--- /dev/null
+++ b/migrations/20251222160220_casbin_agent_rules.up.sql
@@ -0,0 +1,24 @@
+-- Add agent role group and permissions
+
+-- Create agent role group (inherits from group_anonymous for health checks)
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('g', 'agent', 'group_anonymous', '', '', '', '');
+
+-- Agent registration (anonymous, users, and admin can register agents)
+-- This allows agents to bootstrap themselves during deployment
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_anonymous', '/api/v1/agent/register', 'POST', '', '', '');
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_user', '/api/v1/agent/register', 'POST', '', '', '');
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/api/v1/agent/register', 'POST', '', '', '');
+
+-- Agent long-poll for commands (only agents can do this)
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'agent', '/api/v1/agent/commands/wait/:deployment_hash', 'GET', '', '', '');
+
+-- Agent report command results (only agents can do this)
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'agent', '/api/v1/agent/commands/report', 'POST', '', '', '');
diff --git a/migrations/20251222163002_create_commands_and_queue.down.sql b/migrations/20251222163002_create_commands_and_queue.down.sql
new file mode 100644
index 0000000..6186a0c
--- /dev/null
+++ b/migrations/20251222163002_create_commands_and_queue.down.sql
@@ -0,0 +1,3 @@
+-- Drop command_queue and commands tables
+DROP TABLE IF EXISTS command_queue;
+DROP TABLE IF EXISTS commands;
diff --git a/migrations/20251222163002_create_commands_and_queue.up.sql b/migrations/20251222163002_create_commands_and_queue.up.sql
new file mode 100644
index 0000000..3b34222
--- /dev/null
+++ b/migrations/20251222163002_create_commands_and_queue.up.sql
@@ -0,0 +1,40 @@
+-- Create commands table
+CREATE TABLE commands (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ command_id VARCHAR(64) UNIQUE NOT NULL,
+ deployment_hash VARCHAR(64) NOT NULL REFERENCES deployment(deployment_hash) ON DELETE CASCADE,
+ type VARCHAR(100) NOT NULL,
+ status VARCHAR(50) DEFAULT 'queued' NOT NULL,
+ priority VARCHAR(20) DEFAULT 'normal' NOT NULL,
+ parameters JSONB DEFAULT '{}'::jsonb,
+ result JSONB,
+ error JSONB,
+ created_by VARCHAR(255) NOT NULL,
+ created_at TIMESTAMP DEFAULT NOW() NOT NULL,
+ scheduled_for TIMESTAMP,
+ sent_at TIMESTAMP,
+ started_at TIMESTAMP,
+ completed_at TIMESTAMP,
+ timeout_seconds INTEGER DEFAULT 300,
+ metadata JSONB DEFAULT '{}'::jsonb,
+ CONSTRAINT chk_command_status CHECK (status IN ('queued', 'sent', 'executing', 'completed', 'failed', 'cancelled')),
+ CONSTRAINT chk_command_priority CHECK (priority IN ('low', 'normal', 'high', 'critical'))
+);
+
+CREATE INDEX idx_commands_deployment_hash ON commands(deployment_hash);
+CREATE INDEX idx_commands_status ON commands(status);
+CREATE INDEX idx_commands_created_by ON commands(created_by);
+CREATE INDEX idx_commands_created_at ON commands(created_at);
+CREATE INDEX idx_commands_command_id ON commands(command_id);
+
+-- Create command_queue table for long polling
+CREATE TABLE command_queue (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ command_id UUID NOT NULL REFERENCES commands(id) ON DELETE CASCADE,
+ deployment_hash VARCHAR(64) NOT NULL,
+ priority INTEGER DEFAULT 0 NOT NULL,
+ created_at TIMESTAMP DEFAULT NOW() NOT NULL
+);
+
+CREATE INDEX idx_queue_deployment ON command_queue(deployment_hash, priority DESC, created_at ASC);
+CREATE INDEX idx_queue_command_id ON command_queue(command_id);
diff --git a/migrations/20251222163632_casbin_command_rules.down.sql b/migrations/20251222163632_casbin_command_rules.down.sql
new file mode 100644
index 0000000..ffc2124
--- /dev/null
+++ b/migrations/20251222163632_casbin_command_rules.down.sql
@@ -0,0 +1,4 @@
+-- Remove Casbin rules for command management endpoints
+DELETE FROM public.casbin_rule
+WHERE (ptype = 'p' AND v0 = 'group_user' AND v1 LIKE '/api/v1/commands%')
+ OR (ptype = 'p' AND v0 = 'group_admin' AND v1 LIKE '/api/v1/commands%');
diff --git a/migrations/20251222163632_casbin_command_rules.up.sql b/migrations/20251222163632_casbin_command_rules.up.sql
new file mode 100644
index 0000000..5e4241b
--- /dev/null
+++ b/migrations/20251222163632_casbin_command_rules.up.sql
@@ -0,0 +1,18 @@
+-- Add Casbin rules for command management endpoints
+-- Users and admins can create, list, get, and cancel commands
+
+-- User permissions: manage commands for their own deployments
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES
+ ('p', 'group_user', '/api/v1/commands', 'POST', '', '', ''), -- Create command
+ ('p', 'group_user', '/api/v1/commands/:deployment_hash', 'GET', '', '', ''), -- List commands for deployment
+ ('p', 'group_user', '/api/v1/commands/:deployment_hash/:command_id', 'GET', '', '', ''), -- Get specific command
+ ('p', 'group_user', '/api/v1/commands/:deployment_hash/:command_id/cancel', 'POST', '', '', ''); -- Cancel command
+
+-- Admin permissions: inherit all user permissions + full access
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES
+ ('p', 'group_admin', '/api/v1/commands', 'POST', '', '', ''),
+ ('p', 'group_admin', '/api/v1/commands/:deployment_hash', 'GET', '', '', ''),
+ ('p', 'group_admin', '/api/v1/commands/:deployment_hash/:command_id', 'GET', '', '', ''),
+ ('p', 'group_admin', '/api/v1/commands/:deployment_hash/:command_id/cancel', 'POST', '', '', '');
diff --git a/migrations/20251222223450_fix_commands_queue_and_updated_at.down.sql b/migrations/20251222223450_fix_commands_queue_and_updated_at.down.sql
new file mode 100644
index 0000000..035fefa
--- /dev/null
+++ b/migrations/20251222223450_fix_commands_queue_and_updated_at.down.sql
@@ -0,0 +1,13 @@
+-- Revert updated_at addition and command_queue command_id type change
+ALTER TABLE commands
+ DROP COLUMN IF EXISTS updated_at;
+
+ALTER TABLE command_queue
+ DROP CONSTRAINT IF EXISTS command_queue_command_id_fkey;
+
+ALTER TABLE command_queue
+ ALTER COLUMN command_id TYPE UUID USING command_id::uuid;
+
+ALTER TABLE command_queue
+ ADD CONSTRAINT command_queue_command_id_fkey
+ FOREIGN KEY (command_id) REFERENCES commands(id) ON DELETE CASCADE;
diff --git a/migrations/20251222223450_fix_commands_queue_and_updated_at.up.sql b/migrations/20251222223450_fix_commands_queue_and_updated_at.up.sql
new file mode 100644
index 0000000..066f50b
--- /dev/null
+++ b/migrations/20251222223450_fix_commands_queue_and_updated_at.up.sql
@@ -0,0 +1,15 @@
+-- Add updated_at to commands and fix command_queue command_id type
+
+ALTER TABLE commands
+ADD COLUMN IF NOT EXISTS updated_at TIMESTAMP DEFAULT NOW() NOT NULL;
+
+-- Ensure command_queue.command_id matches commands.command_id (varchar)
+ALTER TABLE command_queue
+ DROP CONSTRAINT IF EXISTS command_queue_command_id_fkey;
+
+ALTER TABLE command_queue
+ ALTER COLUMN command_id TYPE VARCHAR(64);
+
+ALTER TABLE command_queue
+ ADD CONSTRAINT command_queue_command_id_fkey
+ FOREIGN KEY (command_id) REFERENCES commands(command_id) ON DELETE CASCADE;
diff --git a/migrations/20251222224041_fix_timestamp_columns.down.sql b/migrations/20251222224041_fix_timestamp_columns.down.sql
new file mode 100644
index 0000000..b8bfbaf
--- /dev/null
+++ b/migrations/20251222224041_fix_timestamp_columns.down.sql
@@ -0,0 +1,8 @@
+-- Revert timestamp conversions
+ALTER TABLE deployment
+ ALTER COLUMN last_seen_at TYPE timestamp;
+
+ALTER TABLE agents
+ ALTER COLUMN last_heartbeat TYPE timestamp,
+ ALTER COLUMN created_at TYPE timestamp,
+ ALTER COLUMN updated_at TYPE timestamp;
diff --git a/migrations/20251222224041_fix_timestamp_columns.up.sql b/migrations/20251222224041_fix_timestamp_columns.up.sql
new file mode 100644
index 0000000..1c01049
--- /dev/null
+++ b/migrations/20251222224041_fix_timestamp_columns.up.sql
@@ -0,0 +1,8 @@
+-- Convert deployment.last_seen_at to timestamptz and agents timestamps to timestamptz
+ALTER TABLE deployment
+ ALTER COLUMN last_seen_at TYPE timestamptz;
+
+ALTER TABLE agents
+ ALTER COLUMN last_heartbeat TYPE timestamptz,
+ ALTER COLUMN created_at TYPE timestamptz,
+ ALTER COLUMN updated_at TYPE timestamptz;
diff --git a/migrations/20251222225538_timestamptz_for_agents_deployments_commands.down.sql b/migrations/20251222225538_timestamptz_for_agents_deployments_commands.down.sql
new file mode 100644
index 0000000..95f4c57
--- /dev/null
+++ b/migrations/20251222225538_timestamptz_for_agents_deployments_commands.down.sql
@@ -0,0 +1,26 @@
+-- Revert timestamptz changes back to timestamp (non-tz)
+
+-- command_queue
+ALTER TABLE command_queue
+ ALTER COLUMN created_at TYPE timestamp;
+
+-- commands
+ALTER TABLE commands
+ ALTER COLUMN completed_at TYPE timestamp,
+ ALTER COLUMN started_at TYPE timestamp,
+ ALTER COLUMN sent_at TYPE timestamp,
+ ALTER COLUMN scheduled_for TYPE timestamp,
+ ALTER COLUMN updated_at TYPE timestamp,
+ ALTER COLUMN created_at TYPE timestamp;
+
+-- agents
+ALTER TABLE agents
+ ALTER COLUMN last_heartbeat TYPE timestamp,
+ ALTER COLUMN updated_at TYPE timestamp,
+ ALTER COLUMN created_at TYPE timestamp;
+
+-- deployment
+ALTER TABLE deployment
+ ALTER COLUMN last_seen_at TYPE timestamp,
+ ALTER COLUMN updated_at TYPE timestamp,
+ ALTER COLUMN created_at TYPE timestamp;
diff --git a/migrations/20251222225538_timestamptz_for_agents_deployments_commands.up.sql b/migrations/20251222225538_timestamptz_for_agents_deployments_commands.up.sql
new file mode 100644
index 0000000..804cce9
--- /dev/null
+++ b/migrations/20251222225538_timestamptz_for_agents_deployments_commands.up.sql
@@ -0,0 +1,26 @@
+-- Convert key timestamp columns to timestamptz so Rust can use DateTime
+
+-- deployment
+ALTER TABLE deployment
+ ALTER COLUMN created_at TYPE timestamptz,
+ ALTER COLUMN updated_at TYPE timestamptz,
+ ALTER COLUMN last_seen_at TYPE timestamptz;
+
+-- agents
+ALTER TABLE agents
+ ALTER COLUMN created_at TYPE timestamptz,
+ ALTER COLUMN updated_at TYPE timestamptz,
+ ALTER COLUMN last_heartbeat TYPE timestamptz;
+
+-- commands
+ALTER TABLE commands
+ ALTER COLUMN created_at TYPE timestamptz,
+ ALTER COLUMN updated_at TYPE timestamptz,
+ ALTER COLUMN scheduled_for TYPE timestamptz,
+ ALTER COLUMN sent_at TYPE timestamptz,
+ ALTER COLUMN started_at TYPE timestamptz,
+ ALTER COLUMN completed_at TYPE timestamptz;
+
+-- command_queue
+ALTER TABLE command_queue
+ ALTER COLUMN created_at TYPE timestamptz;
diff --git a/migrations/20251223100000_casbin_agent_rules.up.sql b/migrations/20251223100000_casbin_agent_rules.up.sql
new file mode 100644
index 0000000..7a26ca0
--- /dev/null
+++ b/migrations/20251223100000_casbin_agent_rules.up.sql
@@ -0,0 +1 @@
+-- Duplicate of 20251222160220_casbin_agent_rules.up.sql; intentionally left empty
diff --git a/migrations/20251223120000_project_body_to_metadata.down.sql b/migrations/20251223120000_project_body_to_metadata.down.sql
new file mode 100644
index 0000000..f5c3c77
--- /dev/null
+++ b/migrations/20251223120000_project_body_to_metadata.down.sql
@@ -0,0 +1,2 @@
+-- Revert project.metadata back to project.body
+ALTER TABLE project RENAME COLUMN metadata TO body;
diff --git a/migrations/20251223120000_project_body_to_metadata.up.sql b/migrations/20251223120000_project_body_to_metadata.up.sql
new file mode 100644
index 0000000..5e33594
--- /dev/null
+++ b/migrations/20251223120000_project_body_to_metadata.up.sql
@@ -0,0 +1,2 @@
+-- Rename project.body to project.metadata to align with model changes
+ALTER TABLE project RENAME COLUMN body TO metadata;
diff --git a/migrations/20251225120000_casbin_agent_and_commands_rules.down.sql b/migrations/20251225120000_casbin_agent_and_commands_rules.down.sql
new file mode 100644
index 0000000..db8ed1e
--- /dev/null
+++ b/migrations/20251225120000_casbin_agent_and_commands_rules.down.sql
@@ -0,0 +1,24 @@
+-- Rollback Casbin rules for agent and commands endpoints
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_user' AND v1='/api/v1/agent/register' AND v2='POST' AND v3='' AND v4='' AND v5='';
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_admin' AND v1='/api/v1/agent/register' AND v2='POST' AND v3='' AND v4='' AND v5='';
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='client' AND v1='/api/v1/agent/register' AND v2='POST' AND v3='' AND v4='' AND v5='';
+
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_user' AND v1='/api/v1/agent/commands/report' AND v2='POST' AND v3='' AND v4='' AND v5='';
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_admin' AND v1='/api/v1/agent/commands/report' AND v2='POST' AND v3='' AND v4='' AND v5='';
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='client' AND v1='/api/v1/agent/commands/report' AND v2='POST' AND v3='' AND v4='' AND v5='';
+
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_user' AND v1='/api/v1/agent/commands/wait/:deployment_hash' AND v2='GET' AND v3='' AND v4='' AND v5='';
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_admin' AND v1='/api/v1/agent/commands/wait/:deployment_hash' AND v2='GET' AND v3='' AND v4='' AND v5='';
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='client' AND v1='/api/v1/agent/commands/wait/:deployment_hash' AND v2='GET' AND v3='' AND v4='' AND v5='';
+
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_user' AND v1='/api/v1/commands' AND v2='POST' AND v3='' AND v4='' AND v5='';
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_admin' AND v1='/api/v1/commands' AND v2='POST' AND v3='' AND v4='' AND v5='';
+
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_user' AND v1='/api/v1/commands/:deployment_hash' AND v2='GET' AND v3='' AND v4='' AND v5='';
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_admin' AND v1='/api/v1/commands/:deployment_hash' AND v2='GET' AND v3='' AND v4='' AND v5='';
+
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_user' AND v1='/api/v1/commands/:deployment_hash/:command_id' AND v2='GET' AND v3='' AND v4='' AND v5='';
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_admin' AND v1='/api/v1/commands/:deployment_hash/:command_id' AND v2='GET' AND v3='' AND v4='' AND v5='';
+
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_user' AND v1='/api/v1/commands/:deployment_hash/:command_id/cancel' AND v2='POST' AND v3='' AND v4='' AND v5='';
+DELETE FROM public.casbin_rule WHERE ptype='p' AND v0='group_admin' AND v1='/api/v1/commands/:deployment_hash/:command_id/cancel' AND v2='POST' AND v3='' AND v4='' AND v5='';
diff --git a/migrations/20251225120000_casbin_agent_and_commands_rules.up.sql b/migrations/20251225120000_casbin_agent_and_commands_rules.up.sql
new file mode 100644
index 0000000..7c72aec
--- /dev/null
+++ b/migrations/20251225120000_casbin_agent_and_commands_rules.up.sql
@@ -0,0 +1,27 @@
+-- Casbin rules for agent and commands endpoints
+-- Allow user and admin to access agent registration and reporting
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/v1/agent/register', 'POST', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/v1/agent/register', 'POST', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'client', '/api/v1/agent/register', 'POST', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/v1/agent/commands/report', 'POST', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/v1/agent/commands/report', 'POST', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'client', '/api/v1/agent/commands/report', 'POST', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+
+-- Wait endpoint (GET) with path parameter
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/v1/agent/commands/wait/:deployment_hash', 'GET', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/v1/agent/commands/wait/:deployment_hash', 'GET', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'client', '/api/v1/agent/commands/wait/:deployment_hash', 'GET', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+
+-- Commands endpoints
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/v1/commands', 'POST', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/v1/commands', 'POST', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/v1/commands/:deployment_hash', 'GET', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/v1/commands/:deployment_hash', 'GET', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/v1/commands/:deployment_hash/:command_id', 'GET', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/v1/commands/:deployment_hash/:command_id', 'GET', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/v1/commands/:deployment_hash/:command_id/cancel', 'POST', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/v1/commands/:deployment_hash/:command_id/cancel', 'POST', '', '', '') ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
diff --git a/migrations/20251227000000_casbin_root_admin_group.down.sql b/migrations/20251227000000_casbin_root_admin_group.down.sql
new file mode 100644
index 0000000..6eaf28b
--- /dev/null
+++ b/migrations/20251227000000_casbin_root_admin_group.down.sql
@@ -0,0 +1,3 @@
+-- Rollback: Remove root group from group_admin
+DELETE FROM public.casbin_rule
+WHERE ptype = 'g' AND v0 = 'root' AND v1 = 'group_admin';
diff --git a/migrations/20251227000000_casbin_root_admin_group.up.sql b/migrations/20251227000000_casbin_root_admin_group.up.sql
new file mode 100644
index 0000000..8e2fd9b
--- /dev/null
+++ b/migrations/20251227000000_casbin_root_admin_group.up.sql
@@ -0,0 +1,5 @@
+-- Add root group assigned to group_admin for external application access
+-- Idempotent insert; ignore if the mapping already exists
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('g', 'root', 'group_admin', '', '', '', '')
+ON CONFLICT DO NOTHING;
diff --git a/migrations/20251227132000_add_group_admin_project_get_rule.down.sql b/migrations/20251227132000_add_group_admin_project_get_rule.down.sql
new file mode 100644
index 0000000..d737da4
--- /dev/null
+++ b/migrations/20251227132000_add_group_admin_project_get_rule.down.sql
@@ -0,0 +1,3 @@
+-- Rollback: remove the group_admin GET /project rule
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/project' AND v2 = 'GET' AND v3 = '' AND v4 = '' AND v5 = '';
diff --git a/migrations/20251227132000_add_group_admin_project_get_rule.up.sql b/migrations/20251227132000_add_group_admin_project_get_rule.up.sql
new file mode 100644
index 0000000..8a9e2d3
--- /dev/null
+++ b/migrations/20251227132000_add_group_admin_project_get_rule.up.sql
@@ -0,0 +1,4 @@
+-- Ensure group_admin can GET /project
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/project', 'GET', '', '', '')
+ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
diff --git a/migrations/20251227140000_casbin_mcp_endpoint.down.sql b/migrations/20251227140000_casbin_mcp_endpoint.down.sql
new file mode 100644
index 0000000..6f26ad9
--- /dev/null
+++ b/migrations/20251227140000_casbin_mcp_endpoint.down.sql
@@ -0,0 +1,7 @@
+-- Remove Casbin rules for MCP WebSocket endpoint
+
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p'
+ AND v0 IN ('group_admin', 'group_user')
+ AND v1 = '/mcp'
+ AND v2 = 'GET';
diff --git a/migrations/20251227140000_casbin_mcp_endpoint.up.sql b/migrations/20251227140000_casbin_mcp_endpoint.up.sql
new file mode 100644
index 0000000..9eb3a28
--- /dev/null
+++ b/migrations/20251227140000_casbin_mcp_endpoint.up.sql
@@ -0,0 +1,8 @@
+-- Add Casbin rules for MCP WebSocket endpoint
+-- Allow authenticated users and admins to access MCP
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES
+ ('p', 'group_admin', '/mcp', 'GET', '', '', ''),
+ ('p', 'group_user', '/mcp', 'GET', '', '', '')
+ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
diff --git a/migrations/20251229120000_marketplace.down.sql b/migrations/20251229120000_marketplace.down.sql
new file mode 100644
index 0000000..0af56cd
--- /dev/null
+++ b/migrations/20251229120000_marketplace.down.sql
@@ -0,0 +1,31 @@
+-- Rollback TryDirect Marketplace Schema
+
+DROP TRIGGER IF EXISTS auto_create_product_on_approval ON stack_template;
+DROP FUNCTION IF EXISTS create_product_for_approved_template();
+
+DROP TRIGGER IF EXISTS update_stack_template_updated_at ON stack_template;
+
+-- Drop indexes
+DROP INDEX IF EXISTS idx_project_source_template;
+DROP INDEX IF EXISTS idx_review_decision;
+DROP INDEX IF EXISTS idx_review_template;
+DROP INDEX IF EXISTS idx_template_version_latest;
+DROP INDEX IF EXISTS idx_template_version_template;
+DROP INDEX IF EXISTS idx_stack_template_product;
+DROP INDEX IF EXISTS idx_stack_template_category;
+DROP INDEX IF EXISTS idx_stack_template_slug;
+DROP INDEX IF EXISTS idx_stack_template_status;
+DROP INDEX IF EXISTS idx_stack_template_creator;
+
+-- Remove columns from existing tables
+ALTER TABLE IF EXISTS project DROP COLUMN IF EXISTS template_version;
+ALTER TABLE IF EXISTS project DROP COLUMN IF EXISTS source_template_id;
+
+-- Drop marketplace tables (CASCADE to handle dependencies)
+DROP TABLE IF EXISTS stack_template_review CASCADE;
+DROP TABLE IF EXISTS stack_template_version CASCADE;
+DROP TABLE IF EXISTS stack_template CASCADE;
+DROP TABLE IF EXISTS stack_category CASCADE;
+
+-- Drop functions last
+DROP FUNCTION IF EXISTS update_updated_at_column() CASCADE;
diff --git a/migrations/20251229120000_marketplace.up.sql b/migrations/20251229120000_marketplace.up.sql
new file mode 100644
index 0000000..9bc0504
--- /dev/null
+++ b/migrations/20251229120000_marketplace.up.sql
@@ -0,0 +1,155 @@
+-- TryDirect Marketplace Schema Migration
+-- Integrates with existing Product/Rating system
+
+-- Ensure UUID generation
+CREATE EXTENSION IF NOT EXISTS pgcrypto;
+
+-- 1. Categories (needed by templates)
+CREATE TABLE IF NOT EXISTS stack_category (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255) UNIQUE NOT NULL
+);
+
+-- 2. Core marketplace table - templates become products when approved
+CREATE TABLE IF NOT EXISTS stack_template (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ creator_user_id VARCHAR(50) NOT NULL,
+ creator_name VARCHAR(255),
+ name VARCHAR(255) NOT NULL,
+ slug VARCHAR(255) UNIQUE NOT NULL,
+ short_description TEXT,
+ long_description TEXT,
+ category_id INTEGER REFERENCES stack_category(id),
+ tags JSONB DEFAULT '[]'::jsonb,
+ tech_stack JSONB DEFAULT '{}'::jsonb,
+ status VARCHAR(50) NOT NULL DEFAULT 'draft' CHECK (
+ status IN ('draft', 'submitted', 'under_review', 'approved', 'rejected', 'deprecated')
+ ),
+ is_configurable BOOLEAN DEFAULT true,
+ view_count INTEGER DEFAULT 0,
+ deploy_count INTEGER DEFAULT 0,
+ product_id INTEGER, -- Links to product table when approved for ratings
+ created_at TIMESTAMP WITH TIME ZONE DEFAULT now(),
+ updated_at TIMESTAMP WITH TIME ZONE DEFAULT now(),
+ approved_at TIMESTAMP WITH TIME ZONE,
+ CONSTRAINT fk_product FOREIGN KEY(product_id) REFERENCES product(id) ON DELETE SET NULL
+);
+
+CREATE TABLE IF NOT EXISTS stack_template_version (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ template_id UUID NOT NULL REFERENCES stack_template(id) ON DELETE CASCADE,
+ version VARCHAR(20) NOT NULL,
+ stack_definition JSONB NOT NULL,
+ definition_format VARCHAR(20) DEFAULT 'yaml',
+ changelog TEXT,
+ is_latest BOOLEAN DEFAULT false,
+ created_at TIMESTAMP WITH TIME ZONE DEFAULT now(),
+ UNIQUE(template_id, version)
+);
+
+CREATE TABLE IF NOT EXISTS stack_template_review (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ template_id UUID NOT NULL REFERENCES stack_template(id) ON DELETE CASCADE,
+ reviewer_user_id VARCHAR(50),
+ decision VARCHAR(50) NOT NULL DEFAULT 'pending' CHECK (
+ decision IN ('pending', 'approved', 'rejected', 'needs_changes')
+ ),
+ review_reason TEXT,
+ security_checklist JSONB DEFAULT '{
+ "no_secrets": null,
+ "no_hardcoded_creds": null,
+ "valid_docker_syntax": null,
+ "no_malicious_code": null
+ }'::jsonb,
+ submitted_at TIMESTAMP WITH TIME ZONE DEFAULT now(),
+ reviewed_at TIMESTAMP WITH TIME ZONE
+);
+
+-- Extend existing tables
+DO $$ BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'project' AND column_name = 'source_template_id'
+ ) THEN
+ ALTER TABLE project ADD COLUMN source_template_id UUID REFERENCES stack_template(id);
+ END IF;
+END $$;
+
+DO $$ BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'project' AND column_name = 'template_version'
+ ) THEN
+ ALTER TABLE project ADD COLUMN template_version VARCHAR(20);
+ END IF;
+END $$;
+
+-- Indexes
+CREATE INDEX IF NOT EXISTS idx_stack_template_creator ON stack_template(creator_user_id);
+CREATE INDEX IF NOT EXISTS idx_stack_template_status ON stack_template(status);
+CREATE INDEX IF NOT EXISTS idx_stack_template_slug ON stack_template(slug);
+CREATE INDEX IF NOT EXISTS idx_stack_template_category ON stack_template(category_id);
+CREATE INDEX IF NOT EXISTS idx_stack_template_product ON stack_template(product_id);
+
+CREATE INDEX IF NOT EXISTS idx_template_version_template ON stack_template_version(template_id);
+CREATE INDEX IF NOT EXISTS idx_template_version_latest ON stack_template_version(template_id, is_latest) WHERE is_latest = true;
+
+CREATE INDEX IF NOT EXISTS idx_review_template ON stack_template_review(template_id);
+CREATE INDEX IF NOT EXISTS idx_review_decision ON stack_template_review(decision);
+
+CREATE INDEX IF NOT EXISTS idx_project_source_template ON project(source_template_id);
+
+-- Triggers
+CREATE OR REPLACE FUNCTION update_updated_at_column()
+RETURNS TRIGGER AS $$
+BEGIN
+ NEW.updated_at = now();
+ RETURN NEW;
+END;
+$$ language 'plpgsql';
+
+DROP TRIGGER IF EXISTS update_stack_template_updated_at ON stack_template;
+CREATE TRIGGER update_stack_template_updated_at
+ BEFORE UPDATE ON stack_template
+ FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
+
+-- Function to create product entry when template is approved
+CREATE OR REPLACE FUNCTION create_product_for_approved_template()
+RETURNS TRIGGER AS $$
+DECLARE
+ new_product_id INTEGER;
+BEGIN
+ -- When status changes to 'approved' and no product exists yet
+ IF NEW.status = 'approved' AND OLD.status != 'approved' AND NEW.product_id IS NULL THEN
+ -- Generate product_id from template UUID (use hashtext for deterministic integer)
+ new_product_id := hashtext(NEW.id::text);
+
+ -- Insert into product table
+ INSERT INTO product (id, obj_id, obj_type, created_at, updated_at)
+ VALUES (new_product_id, new_product_id, 'marketplace_template', now(), now())
+ ON CONFLICT (id) DO NOTHING;
+
+ -- Link template to product
+ NEW.product_id := new_product_id;
+ END IF;
+ RETURN NEW;
+END;
+$$ language 'plpgsql';
+
+DROP TRIGGER IF EXISTS auto_create_product_on_approval ON stack_template;
+CREATE TRIGGER auto_create_product_on_approval
+ BEFORE UPDATE ON stack_template
+ FOR EACH ROW
+ WHEN (NEW.status = 'approved' AND OLD.status != 'approved')
+ EXECUTE FUNCTION create_product_for_approved_template();
+
+-- Seed sample categories
+INSERT INTO stack_category (name)
+VALUES
+ ('AI Agents'),
+ ('Data Pipelines'),
+ ('SaaS Starter'),
+ ('Dev Tools'),
+ ('Automation')
+ON CONFLICT DO NOTHING;
+
diff --git a/migrations/20251229121000_casbin_marketplace_rules.down.sql b/migrations/20251229121000_casbin_marketplace_rules.down.sql
new file mode 100644
index 0000000..29018e0
--- /dev/null
+++ b/migrations/20251229121000_casbin_marketplace_rules.down.sql
@@ -0,0 +1,12 @@
+-- Rollback Casbin rules for Marketplace endpoints
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_anonymous' AND v1 = '/api/templates' AND v2 = 'GET';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_anonymous' AND v1 = '/api/templates/:slug' AND v2 = 'GET';
+
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_user' AND v1 = '/api/templates' AND v2 = 'POST';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_user' AND v1 = '/api/templates/:id' AND v2 = 'PUT';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_user' AND v1 = '/api/templates/:id/submit' AND v2 = 'POST';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_user' AND v1 = '/api/templates/mine' AND v2 = 'GET';
+
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/admin/templates' AND v2 = 'GET';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/admin/templates/:id/approve' AND v2 = 'POST';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/admin/templates/:id/reject' AND v2 = 'POST';
diff --git a/migrations/20251229121000_casbin_marketplace_rules.up.sql b/migrations/20251229121000_casbin_marketplace_rules.up.sql
new file mode 100644
index 0000000..03f2917
--- /dev/null
+++ b/migrations/20251229121000_casbin_marketplace_rules.up.sql
@@ -0,0 +1,16 @@
+-- Casbin rules for Marketplace endpoints
+
+-- Public read rules
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_anonymous', '/api/templates', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_anonymous', '/api/templates/:slug', 'GET', '', '', '');
+
+-- Creator rules
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/templates', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/templates/:id', 'PUT', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/templates/:id/submit', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/templates/mine', 'GET', '', '', '');
+
+-- Admin moderation rules
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/admin/templates', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/admin/templates/:id/approve', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/admin/templates/:id/reject', 'POST', '', '', '');
diff --git a/migrations/20251230094608_add_required_plan_name.down.sql b/migrations/20251230094608_add_required_plan_name.down.sql
new file mode 100644
index 0000000..c6b04bc
--- /dev/null
+++ b/migrations/20251230094608_add_required_plan_name.down.sql
@@ -0,0 +1,2 @@
+-- Add down migration script here
+ALTER TABLE stack_template DROP COLUMN IF EXISTS required_plan_name;
\ No newline at end of file
diff --git a/migrations/20251230094608_add_required_plan_name.up.sql b/migrations/20251230094608_add_required_plan_name.up.sql
new file mode 100644
index 0000000..fcd896d
--- /dev/null
+++ b/migrations/20251230094608_add_required_plan_name.up.sql
@@ -0,0 +1,2 @@
+-- Add up migration script here
+ALTER TABLE stack_template ADD COLUMN IF NOT EXISTS required_plan_name VARCHAR(50);
\ No newline at end of file
diff --git a/migrations/20251230100000_add_marketplace_plans_rule.down.sql b/migrations/20251230100000_add_marketplace_plans_rule.down.sql
new file mode 100644
index 0000000..8658c29
--- /dev/null
+++ b/migrations/20251230100000_add_marketplace_plans_rule.down.sql
@@ -0,0 +1,2 @@
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/admin/marketplace/plans' AND v2 = 'GET' AND v3 = '' AND v4 = '' AND v5 = '';
diff --git a/migrations/20251230100000_add_marketplace_plans_rule.up.sql b/migrations/20251230100000_add_marketplace_plans_rule.up.sql
new file mode 100644
index 0000000..eeeb407
--- /dev/null
+++ b/migrations/20251230100000_add_marketplace_plans_rule.up.sql
@@ -0,0 +1,3 @@
+-- Casbin rule for admin marketplace plans endpoint
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/admin/marketplace/plans', 'GET', '', '', '');
diff --git a/migrations/20260101090000_casbin_admin_inherits_user.down.sql b/migrations/20260101090000_casbin_admin_inherits_user.down.sql
new file mode 100644
index 0000000..3e60867
--- /dev/null
+++ b/migrations/20260101090000_casbin_admin_inherits_user.down.sql
@@ -0,0 +1,9 @@
+-- Remove the inheritance edge if rolled back
+DELETE FROM public.casbin_rule
+WHERE ptype = 'g'
+ AND v0 = 'group_admin'
+ AND v1 = 'group_user'
+ AND (v2 = '' OR v2 IS NULL)
+ AND (v3 = '' OR v3 IS NULL)
+ AND (v4 = '' OR v4 IS NULL)
+ AND (v5 = '' OR v5 IS NULL);
diff --git a/migrations/20260101090000_casbin_admin_inherits_user.up.sql b/migrations/20260101090000_casbin_admin_inherits_user.up.sql
new file mode 100644
index 0000000..7d34d4e
--- /dev/null
+++ b/migrations/20260101090000_casbin_admin_inherits_user.up.sql
@@ -0,0 +1,4 @@
+-- Ensure group_admin inherits group_user so admin (and root) receive user permissions
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('g', 'group_admin', 'group_user', '', '', '', '')
+ON CONFLICT DO NOTHING;
diff --git a/migrations/20260102120000_add_category_fields.down.sql b/migrations/20260102120000_add_category_fields.down.sql
new file mode 100644
index 0000000..7b8aa8f
--- /dev/null
+++ b/migrations/20260102120000_add_category_fields.down.sql
@@ -0,0 +1,7 @@
+-- Remove title and metadata fields from stack_category
+ALTER TABLE stack_category
+DROP COLUMN IF EXISTS metadata,
+DROP COLUMN IF EXISTS title;
+
+-- Drop the index
+DROP INDEX IF EXISTS idx_stack_category_title;
diff --git a/migrations/20260102120000_add_category_fields.up.sql b/migrations/20260102120000_add_category_fields.up.sql
new file mode 100644
index 0000000..7a2646d
--- /dev/null
+++ b/migrations/20260102120000_add_category_fields.up.sql
@@ -0,0 +1,7 @@
+-- Add title and metadata fields to stack_category for User Service sync
+ALTER TABLE stack_category
+ADD COLUMN IF NOT EXISTS title VARCHAR(255),
+ADD COLUMN IF NOT EXISTS metadata JSONB DEFAULT '{}'::jsonb;
+
+-- Create index on title for display queries
+CREATE INDEX IF NOT EXISTS idx_stack_category_title ON stack_category(title);
diff --git a/migrations/20260102140000_casbin_categories_rules.down.sql b/migrations/20260102140000_casbin_categories_rules.down.sql
new file mode 100644
index 0000000..4db07af
--- /dev/null
+++ b/migrations/20260102140000_casbin_categories_rules.down.sql
@@ -0,0 +1,4 @@
+-- Rollback: Remove Casbin rules for Categories endpoint
+
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p' AND v1 = '/api/categories' AND v2 = 'GET';
diff --git a/migrations/20260102140000_casbin_categories_rules.up.sql b/migrations/20260102140000_casbin_categories_rules.up.sql
new file mode 100644
index 0000000..b24dbc1
--- /dev/null
+++ b/migrations/20260102140000_casbin_categories_rules.up.sql
@@ -0,0 +1,6 @@
+-- Casbin rules for Categories endpoint
+-- Categories are publicly readable for marketplace UI population
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_anonymous', '/api/categories', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_user', '/api/categories', 'GET', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/categories', 'GET', '', '', '');
diff --git a/migrations/20260103103000_casbin_marketplace_admin_creator_rules.down.sql b/migrations/20260103103000_casbin_marketplace_admin_creator_rules.down.sql
new file mode 100644
index 0000000..c717ab0
--- /dev/null
+++ b/migrations/20260103103000_casbin_marketplace_admin_creator_rules.down.sql
@@ -0,0 +1,4 @@
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/templates' AND v2 = 'POST';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/templates/:id' AND v2 = 'PUT';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/templates/:id/submit' AND v2 = 'POST';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'group_admin' AND v1 = '/api/templates/mine' AND v2 = 'GET';
diff --git a/migrations/20260103103000_casbin_marketplace_admin_creator_rules.up.sql b/migrations/20260103103000_casbin_marketplace_admin_creator_rules.up.sql
new file mode 100644
index 0000000..3553a9a
--- /dev/null
+++ b/migrations/20260103103000_casbin_marketplace_admin_creator_rules.up.sql
@@ -0,0 +1,6 @@
+-- Allow admin service accounts (e.g., root) to call marketplace creator endpoints
+-- Admins previously lacked creator privileges which caused 403 responses
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/templates', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/templates/:id', 'PUT', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/templates/:id/submit', 'POST', '', '', '');
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5) VALUES ('p', 'group_admin', '/api/templates/mine', 'GET', '', '', '');
diff --git a/migrations/20260103120000_casbin_health_metrics_rules.down.sql b/migrations/20260103120000_casbin_health_metrics_rules.down.sql
new file mode 100644
index 0000000..19ea2ac
--- /dev/null
+++ b/migrations/20260103120000_casbin_health_metrics_rules.down.sql
@@ -0,0 +1,7 @@
+-- Remove Casbin rules for health check metrics endpoint
+
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p'
+ AND v0 IN ('group_anonymous', 'group_user', 'group_admin')
+ AND v1 = '/health_check/metrics'
+ AND v2 = 'GET';
diff --git a/migrations/20260103120000_casbin_health_metrics_rules.up.sql b/migrations/20260103120000_casbin_health_metrics_rules.up.sql
new file mode 100644
index 0000000..1519480
--- /dev/null
+++ b/migrations/20260103120000_casbin_health_metrics_rules.up.sql
@@ -0,0 +1,17 @@
+-- Add Casbin rules for health check metrics endpoint
+-- Allow all groups to access health check metrics for monitoring
+
+-- Anonymous users can check health metrics
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_anonymous', '/health_check/metrics', 'GET', '', '', '')
+ON CONFLICT DO NOTHING;
+
+-- Regular users can check health metrics
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_user', '/health_check/metrics', 'GET', '', '', '')
+ON CONFLICT DO NOTHING;
+
+-- Admins can check health metrics
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/health_check/metrics', 'GET', '', '', '')
+ON CONFLICT DO NOTHING;
diff --git a/migrations/20260104120000_casbin_admin_service_rules.down.sql b/migrations/20260104120000_casbin_admin_service_rules.down.sql
new file mode 100644
index 0000000..3a1649c
--- /dev/null
+++ b/migrations/20260104120000_casbin_admin_service_rules.down.sql
@@ -0,0 +1,7 @@
+-- Remove Casbin rules for admin_service role
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'admin_service' AND v1 = '/stacker/admin/templates' AND v2 = 'GET';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'admin_service' AND v1 = '/stacker/admin/templates/:id/approve' AND v2 = 'POST';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'admin_service' AND v1 = '/stacker/admin/templates/:id/reject' AND v2 = 'POST';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'admin_service' AND v1 = '/api/admin/templates' AND v2 = 'GET';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'admin_service' AND v1 = '/api/admin/templates/:id/approve' AND v2 = 'POST';
+DELETE FROM public.casbin_rule WHERE ptype = 'p' AND v0 = 'admin_service' AND v1 = '/api/admin/templates/:id/reject' AND v2 = 'POST';
diff --git a/migrations/20260104120000_casbin_admin_service_rules.up.sql b/migrations/20260104120000_casbin_admin_service_rules.up.sql
new file mode 100644
index 0000000..5531851
--- /dev/null
+++ b/migrations/20260104120000_casbin_admin_service_rules.up.sql
@@ -0,0 +1,24 @@
+-- Add Casbin rules for admin_service role (internal service authentication)
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'admin_service', '/stacker/admin/templates', 'GET', '', '', '')
+ON CONFLICT DO NOTHING;
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'admin_service', '/stacker/admin/templates/:id/approve', 'POST', '', '', '')
+ON CONFLICT DO NOTHING;
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'admin_service', '/stacker/admin/templates/:id/reject', 'POST', '', '', '')
+ON CONFLICT DO NOTHING;
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'admin_service', '/api/admin/templates', 'GET', '', '', '')
+ON CONFLICT DO NOTHING;
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'admin_service', '/api/admin/templates/:id/approve', 'POST', '', '', '')
+ON CONFLICT DO NOTHING;
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'admin_service', '/api/admin/templates/:id/reject', 'POST', '', '', '')
+ON CONFLICT DO NOTHING;
diff --git a/migrations/20260105214000_casbin_dockerhub_rules.down.sql b/migrations/20260105214000_casbin_dockerhub_rules.down.sql
new file mode 100644
index 0000000..f03eb15
--- /dev/null
+++ b/migrations/20260105214000_casbin_dockerhub_rules.down.sql
@@ -0,0 +1,8 @@
+DELETE FROM public.casbin_rule
+WHERE v1 = '/dockerhub/namespaces' AND v2 = 'GET';
+
+DELETE FROM public.casbin_rule
+WHERE v1 = '/dockerhub/:namespace/repositories' AND v2 = 'GET';
+
+DELETE FROM public.casbin_rule
+WHERE v1 = '/dockerhub/:namespace/repositories/:repository/tags' AND v2 = 'GET';
diff --git a/migrations/20260105214000_casbin_dockerhub_rules.up.sql b/migrations/20260105214000_casbin_dockerhub_rules.up.sql
new file mode 100644
index 0000000..282211a
--- /dev/null
+++ b/migrations/20260105214000_casbin_dockerhub_rules.up.sql
@@ -0,0 +1,17 @@
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_user', '/dockerhub/namespaces', 'GET', '', '', '');
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/dockerhub/namespaces', 'GET', '', '', '');
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_user', '/dockerhub/:namespace/repositories', 'GET', '', '', '');
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/dockerhub/:namespace/repositories', 'GET', '', '', '');
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_user', '/dockerhub/:namespace/repositories/:repository/tags', 'GET', '', '', '');
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/dockerhub/:namespace/repositories/:repository/tags', 'GET', '', '', '');
diff --git a/migrations/20260106142135_remove_agents_deployment_fk.down.sql b/migrations/20260106142135_remove_agents_deployment_fk.down.sql
new file mode 100644
index 0000000..8ffd69e
--- /dev/null
+++ b/migrations/20260106142135_remove_agents_deployment_fk.down.sql
@@ -0,0 +1,7 @@
+-- Restore foreign key constraint (only if deployment table has matching records)
+-- Note: This will fail if orphaned agents exist. Clean up orphans before rollback.
+ALTER TABLE agents
+ADD CONSTRAINT agents_deployment_hash_fkey
+FOREIGN KEY (deployment_hash)
+REFERENCES deployment(deployment_hash)
+ON DELETE CASCADE;
diff --git a/migrations/20260106142135_remove_agents_deployment_fk.up.sql b/migrations/20260106142135_remove_agents_deployment_fk.up.sql
new file mode 100644
index 0000000..fddc63d
--- /dev/null
+++ b/migrations/20260106142135_remove_agents_deployment_fk.up.sql
@@ -0,0 +1,6 @@
+-- Remove foreign key constraint from agents table to allow agents without deployments in Stacker
+-- Deployments may exist in User Service "installations" table instead
+ALTER TABLE agents DROP CONSTRAINT IF EXISTS agents_deployment_hash_fkey;
+
+-- Keep the deployment_hash column indexed for queries
+-- Index already exists: idx_agents_deployment_hash
diff --git a/migrations/20260106143528_20260106_casbin_user_rating_idempotent.down.sql b/migrations/20260106143528_20260106_casbin_user_rating_idempotent.down.sql
new file mode 100644
index 0000000..dc7c3ea
--- /dev/null
+++ b/migrations/20260106143528_20260106_casbin_user_rating_idempotent.down.sql
@@ -0,0 +1 @@
+-- No-op: this migration only ensured idempotency and did not create new rows
diff --git a/migrations/20260106143528_20260106_casbin_user_rating_idempotent.up.sql b/migrations/20260106143528_20260106_casbin_user_rating_idempotent.up.sql
new file mode 100644
index 0000000..8cb3282
--- /dev/null
+++ b/migrations/20260106143528_20260106_casbin_user_rating_idempotent.up.sql
@@ -0,0 +1,24 @@
+-- Ensure rating Casbin rules are idempotent for future migration reruns
+INSERT INTO casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_user', '/rating/:id', 'PUT', '', '', '')
+ON CONFLICT DO NOTHING;
+
+INSERT INTO casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/admin/rating/:id', 'PUT', '', '', '')
+ON CONFLICT DO NOTHING;
+
+INSERT INTO casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_user', '/rating/:id', 'DELETE', '', '', '')
+ON CONFLICT DO NOTHING;
+
+INSERT INTO casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/admin/rating/:id', 'DELETE', '', '', '')
+ON CONFLICT DO NOTHING;
+
+INSERT INTO casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/admin/rating/:id', 'GET', '', '', '')
+ON CONFLICT DO NOTHING;
+
+INSERT INTO casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/admin/rating', 'GET', '', '', '')
+ON CONFLICT DO NOTHING;
diff --git a/migrations/20260107123000_admin_service_role_inheritance.down.sql b/migrations/20260107123000_admin_service_role_inheritance.down.sql
new file mode 100644
index 0000000..e78adbe
--- /dev/null
+++ b/migrations/20260107123000_admin_service_role_inheritance.down.sql
@@ -0,0 +1,9 @@
+-- Revoke admin_service inheritance from admin permissions
+DELETE FROM public.casbin_rule
+WHERE ptype = 'g'
+ AND v0 = 'admin_service'
+ AND v1 = 'group_admin'
+ AND v2 = ''
+ AND v3 = ''
+ AND v4 = ''
+ AND v5 = '';
diff --git a/migrations/20260107123000_admin_service_role_inheritance.up.sql b/migrations/20260107123000_admin_service_role_inheritance.up.sql
new file mode 100644
index 0000000..6c6a663
--- /dev/null
+++ b/migrations/20260107123000_admin_service_role_inheritance.up.sql
@@ -0,0 +1,4 @@
+-- Allow admin_service JWT role to inherit all admin permissions
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('g', 'admin_service', 'group_admin', '', '', '', '')
+ON CONFLICT DO NOTHING;
diff --git a/migrations/20260109133000_extend_deployment_hash_length.down.sql b/migrations/20260109133000_extend_deployment_hash_length.down.sql
new file mode 100644
index 0000000..77b626b
--- /dev/null
+++ b/migrations/20260109133000_extend_deployment_hash_length.down.sql
@@ -0,0 +1,21 @@
+-- Revert deployment_hash column length to the previous limit
+ALTER TABLE commands DROP CONSTRAINT IF EXISTS commands_deployment_hash_fkey;
+
+ALTER TABLE deployment
+ ALTER COLUMN deployment_hash TYPE VARCHAR(64);
+
+ALTER TABLE agents
+ ALTER COLUMN deployment_hash TYPE VARCHAR(64);
+
+ALTER TABLE audit_log
+ ALTER COLUMN deployment_hash TYPE VARCHAR(64);
+
+ALTER TABLE commands
+ ALTER COLUMN deployment_hash TYPE VARCHAR(64);
+
+ALTER TABLE command_queue
+ ALTER COLUMN deployment_hash TYPE VARCHAR(64);
+
+ALTER TABLE commands
+ ADD CONSTRAINT commands_deployment_hash_fkey
+ FOREIGN KEY (deployment_hash) REFERENCES deployment(deployment_hash) ON DELETE CASCADE;
diff --git a/migrations/20260109133000_extend_deployment_hash_length.up.sql b/migrations/20260109133000_extend_deployment_hash_length.up.sql
new file mode 100644
index 0000000..9606d66
--- /dev/null
+++ b/migrations/20260109133000_extend_deployment_hash_length.up.sql
@@ -0,0 +1,21 @@
+-- Increase deployment_hash column length to accommodate longer identifiers
+ALTER TABLE commands DROP CONSTRAINT IF EXISTS commands_deployment_hash_fkey;
+
+ALTER TABLE deployment
+ ALTER COLUMN deployment_hash TYPE VARCHAR(128);
+
+ALTER TABLE agents
+ ALTER COLUMN deployment_hash TYPE VARCHAR(128);
+
+ALTER TABLE audit_log
+ ALTER COLUMN deployment_hash TYPE VARCHAR(128);
+
+ALTER TABLE commands
+ ALTER COLUMN deployment_hash TYPE VARCHAR(128);
+
+ALTER TABLE command_queue
+ ALTER COLUMN deployment_hash TYPE VARCHAR(128);
+
+ALTER TABLE commands
+ ADD CONSTRAINT commands_deployment_hash_fkey
+ FOREIGN KEY (deployment_hash) REFERENCES deployment(deployment_hash) ON DELETE CASCADE;
diff --git a/migrations/20260112120000_remove_commands_deployment_fk.down.sql b/migrations/20260112120000_remove_commands_deployment_fk.down.sql
new file mode 100644
index 0000000..f300690
--- /dev/null
+++ b/migrations/20260112120000_remove_commands_deployment_fk.down.sql
@@ -0,0 +1,3 @@
+-- Restore FK constraint on commands.deployment_hash back to deployment(deployment_hash)
+ALTER TABLE commands ADD CONSTRAINT commands_deployment_hash_fkey
+ FOREIGN KEY (deployment_hash) REFERENCES deployment(deployment_hash) ON DELETE CASCADE;
diff --git a/migrations/20260112120000_remove_commands_deployment_fk.up.sql b/migrations/20260112120000_remove_commands_deployment_fk.up.sql
new file mode 100644
index 0000000..84b6ad6
--- /dev/null
+++ b/migrations/20260112120000_remove_commands_deployment_fk.up.sql
@@ -0,0 +1,2 @@
+-- Remove FK constraint from commands.deployment_hash to allow hashes from external installations
+ALTER TABLE commands DROP CONSTRAINT IF EXISTS commands_deployment_hash_fkey;
diff --git a/migrations/20260113000001_fix_command_queue_fk.down.sql b/migrations/20260113000001_fix_command_queue_fk.down.sql
new file mode 100644
index 0000000..c2f9b63
--- /dev/null
+++ b/migrations/20260113000001_fix_command_queue_fk.down.sql
@@ -0,0 +1,12 @@
+-- Revert: Fix foreign key in command_queue to reference commands.command_id (VARCHAR) instead of commands.id (UUID)
+
+-- Drop the new foreign key constraint
+ALTER TABLE command_queue DROP CONSTRAINT command_queue_command_id_fkey;
+
+-- Change command_id column back to UUID
+ALTER TABLE command_queue ALTER COLUMN command_id TYPE UUID USING command_id::UUID;
+
+-- Restore old foreign key constraint
+ALTER TABLE command_queue
+ADD CONSTRAINT command_queue_command_id_fkey
+FOREIGN KEY (command_id) REFERENCES commands(id) ON DELETE CASCADE;
diff --git a/migrations/20260113000001_fix_command_queue_fk.up.sql b/migrations/20260113000001_fix_command_queue_fk.up.sql
new file mode 100644
index 0000000..9dd2196
--- /dev/null
+++ b/migrations/20260113000001_fix_command_queue_fk.up.sql
@@ -0,0 +1,12 @@
+-- Fix foreign key in command_queue to reference commands.command_id (VARCHAR) instead of commands.id (UUID)
+
+-- Drop the old foreign key constraint
+ALTER TABLE command_queue DROP CONSTRAINT command_queue_command_id_fkey;
+
+-- Change command_id column from UUID to VARCHAR(64)
+ALTER TABLE command_queue ALTER COLUMN command_id TYPE VARCHAR(64);
+
+-- Add new foreign key constraint referencing commands.command_id instead
+ALTER TABLE command_queue
+ADD CONSTRAINT command_queue_command_id_fkey
+FOREIGN KEY (command_id) REFERENCES commands(command_id) ON DELETE CASCADE;
diff --git a/migrations/20260113000002_fix_audit_log_timestamp.down.sql b/migrations/20260113000002_fix_audit_log_timestamp.down.sql
new file mode 100644
index 0000000..4fb6213
--- /dev/null
+++ b/migrations/20260113000002_fix_audit_log_timestamp.down.sql
@@ -0,0 +1,3 @@
+-- Revert: Fix audit_log.created_at type from TIMESTAMP to TIMESTAMPTZ
+
+ALTER TABLE audit_log ALTER COLUMN created_at TYPE TIMESTAMP;
diff --git a/migrations/20260113000002_fix_audit_log_timestamp.up.sql b/migrations/20260113000002_fix_audit_log_timestamp.up.sql
new file mode 100644
index 0000000..2372a29
--- /dev/null
+++ b/migrations/20260113000002_fix_audit_log_timestamp.up.sql
@@ -0,0 +1,3 @@
+-- Fix audit_log.created_at type from TIMESTAMP to TIMESTAMPTZ
+
+ALTER TABLE audit_log ALTER COLUMN created_at TYPE TIMESTAMPTZ;
diff --git a/migrations/20260113120000_add_deployment_capabilities_acl.up.sql b/migrations/20260113120000_add_deployment_capabilities_acl.up.sql
new file mode 100644
index 0000000..ee70b8c
--- /dev/null
+++ b/migrations/20260113120000_add_deployment_capabilities_acl.up.sql
@@ -0,0 +1,5 @@
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_user', '/api/v1/deployments/:deployment_hash/capabilities', 'GET', '', '', '');
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/api/v1/deployments/:deployment_hash/capabilities', 'GET', '', '', '');
diff --git a/migrations/20260114120000_casbin_agent_enqueue_rules.down.sql b/migrations/20260114120000_casbin_agent_enqueue_rules.down.sql
new file mode 100644
index 0000000..69b620a
--- /dev/null
+++ b/migrations/20260114120000_casbin_agent_enqueue_rules.down.sql
@@ -0,0 +1,4 @@
+-- Remove Casbin ACL rules for /api/v1/agent/commands/enqueue endpoint
+
+DELETE FROM public.casbin_rule
+WHERE ptype='p' AND v1='/api/v1/agent/commands/enqueue' AND v2='POST';
diff --git a/migrations/20260114120000_casbin_agent_enqueue_rules.up.sql b/migrations/20260114120000_casbin_agent_enqueue_rules.up.sql
new file mode 100644
index 0000000..0ba4d95
--- /dev/null
+++ b/migrations/20260114120000_casbin_agent_enqueue_rules.up.sql
@@ -0,0 +1,14 @@
+-- Add Casbin ACL rules for /api/v1/agent/commands/enqueue endpoint
+-- This endpoint allows authenticated users to enqueue commands for their deployments
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_user', '/api/v1/agent/commands/enqueue', 'POST', '', '', '')
+ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'group_admin', '/api/v1/agent/commands/enqueue', 'POST', '', '', '')
+ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'client', '/api/v1/agent/commands/enqueue', 'POST', '', '', '')
+ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
diff --git a/migrations/20260114160000_casbin_agent_role_fix.down.sql b/migrations/20260114160000_casbin_agent_role_fix.down.sql
new file mode 100644
index 0000000..d014e70
--- /dev/null
+++ b/migrations/20260114160000_casbin_agent_role_fix.down.sql
@@ -0,0 +1,10 @@
+-- Rollback agent role permissions fix
+
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p' AND v0 = 'agent' AND v1 = '/api/v1/agent/commands/report' AND v2 = 'POST';
+
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p' AND v0 = 'agent' AND v1 = '/api/v1/agent/commands/wait/:deployment_hash' AND v2 = 'GET';
+
+DELETE FROM public.casbin_rule
+WHERE ptype = 'g' AND v0 = 'agent' AND v1 = 'group_anonymous';
diff --git a/migrations/20260114160000_casbin_agent_role_fix.up.sql b/migrations/20260114160000_casbin_agent_role_fix.up.sql
new file mode 100644
index 0000000..24aba0c
--- /dev/null
+++ b/migrations/20260114160000_casbin_agent_role_fix.up.sql
@@ -0,0 +1,18 @@
+-- Ensure agent role has access to agent endpoints (idempotent fix)
+-- This migration ensures agent role permissions are in place regardless of previous migration state
+-- Addresses 403 error when Status Panel agent tries to report command results
+
+-- Agent role should be able to report command results
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'agent', '/api/v1/agent/commands/report', 'POST', '', '', '')
+ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+
+-- Agent role should be able to poll for commands
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('p', 'agent', '/api/v1/agent/commands/wait/:deployment_hash', 'GET', '', '', '')
+ON CONFLICT ON CONSTRAINT unique_key_sqlx_adapter DO NOTHING;
+
+-- Ensure agent role group exists (inherits from group_anonymous for health checks)
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES ('g', 'agent', 'group_anonymous', '', '', '', '')
+ON CONFLICT DO NOTHING;
diff --git a/migrations/20260115120000_casbin_command_client_rules.down.sql b/migrations/20260115120000_casbin_command_client_rules.down.sql
new file mode 100644
index 0000000..f29cfc1
--- /dev/null
+++ b/migrations/20260115120000_casbin_command_client_rules.down.sql
@@ -0,0 +1,12 @@
+-- Remove Casbin rules for command endpoints for client role
+
+DELETE FROM public.casbin_rule
+WHERE ptype = 'p'
+ AND v0 = 'client'
+ AND v1 IN (
+ '/api/v1/commands',
+ '/api/v1/commands/:deployment_hash',
+ '/api/v1/commands/:deployment_hash/:command_id',
+ '/api/v1/commands/:deployment_hash/:command_id/cancel'
+ )
+ AND v2 IN ('GET', 'POST');
diff --git a/migrations/20260115120000_casbin_command_client_rules.up.sql b/migrations/20260115120000_casbin_command_client_rules.up.sql
new file mode 100644
index 0000000..9f44b31
--- /dev/null
+++ b/migrations/20260115120000_casbin_command_client_rules.up.sql
@@ -0,0 +1,13 @@
+-- Add Casbin rules for command endpoints for client role
+
+INSERT INTO public.casbin_rule (ptype, v0, v1, v2, v3, v4, v5)
+VALUES
+ ('p', 'client', '/api/v1/commands', 'GET', '', '', ''),
+ ('p', 'client', '/api/v1/commands/:deployment_hash', 'GET', '', '', ''),
+ ('p', 'client', '/api/v1/commands/:deployment_hash/:command_id', 'GET', '', '', ''),
+ ('p', 'client', '/api/v1/commands/:deployment_hash/:command_id/cancel', 'POST', '', '', ''),
+ ('p', 'group_user', '/api/v1/commands', 'GET', '', '', '')
+ ('p', 'root', '/api/v1/commands', 'GET', '', '', ''),
+ ('p', 'root', '/api/v1/commands/:deployment_hash', 'GET', '', '', ''),
+ ('p', 'root', '/api/v1/commands/:deployment_hash/:command_id', 'GET', '', '', ''),
+ ('p', 'root', '/api/v1/commands/:deployment_hash/:command_id/cancel', 'POST', '', '', '');
diff --git a/renovate.json b/renovate.json
new file mode 100644
index 0000000..5db72dd
--- /dev/null
+++ b/renovate.json
@@ -0,0 +1,6 @@
+{
+ "$schema": "https://docs.renovatebot.com/renovate-schema.json",
+ "extends": [
+ "config:recommended"
+ ]
+}
diff --git a/scripts/init_db.sh b/scripts/init_db.sh
index 8d84403..06693cd 100755
--- a/scripts/init_db.sh
+++ b/scripts/init_db.sh
@@ -38,4 +38,5 @@ export DATABASE_URL=postgres://${DB_USER}:${DB_PASSWORD}@localhost:${DB_PORT}/${
sqlx database create
sqlx migrate run
->&2 echo "Postgres has been migrated, ready to go!"
\ No newline at end of file
+>&2 echo "Postgres has been migrated, ready to go!"
+
diff --git a/src/banner.rs b/src/banner.rs
new file mode 100644
index 0000000..bbd5c30
--- /dev/null
+++ b/src/banner.rs
@@ -0,0 +1,64 @@
+/// Display a banner with version and useful information
+pub fn print_banner() {
+ let version = env!("CARGO_PKG_VERSION");
+ let name = env!("CARGO_PKG_NAME");
+
+ let banner = format!(
+ r#"
+ _ | |
+ ___ _| |_ _____ ____| | _ _____ ____
+ /___|_ _|____ |/ ___) |_/ ) ___ |/ ___)
+|___ | | |_/ ___ ( (___| _ (| ____| |
+(___/ \__)_____|\____)_| \_)_____)_|
+
+──────────────────────────────────────────
+ {}
+ Version: {}
+ Build: {}
+ Edition: {}
+─────────────────────────────────────────
+
+"#,
+ capitalize(name),
+ version,
+ env!("CARGO_PKG_VERSION"),
+ "2021"
+ );
+
+ println!("{}", banner);
+}
+
+/// Display startup information
+pub fn print_startup_info(host: &str, port: u16) {
+ let info = format!(
+ r#"
+📋 Configuration Loaded
+ 🌐 Server Address: http://{}:{}
+ 📦 Ready to accept connections
+
+"#,
+ host, port
+ );
+
+ println!("{}", info);
+}
+
+fn capitalize(s: &str) -> String {
+ let mut chars = s.chars();
+ match chars.next() {
+ None => String::new(),
+ Some(first) => first.to_uppercase().collect::() + chars.as_str(),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_capitalize() {
+ assert_eq!(capitalize("stacker"), "Stacker");
+ assert_eq!(capitalize("hello"), "Hello");
+ assert_eq!(capitalize(""), "");
+ }
+}
diff --git a/src/configuration.rs b/src/configuration.rs
index a3beeaf..cf7570d 100644
--- a/src/configuration.rs
+++ b/src/configuration.rs
@@ -1,15 +1,65 @@
+use crate::connectors::ConnectorConfig;
use serde;
-#[derive(Debug, serde::Deserialize)]
+#[derive(Debug, Clone, serde::Deserialize)]
pub struct Settings {
pub database: DatabaseSettings,
pub app_port: u16,
pub app_host: String,
pub auth_url: String,
pub max_clients_number: i64,
+ #[serde(default = "Settings::default_agent_command_poll_timeout_secs")]
+ pub agent_command_poll_timeout_secs: u64,
+ #[serde(default = "Settings::default_agent_command_poll_interval_secs")]
+ pub agent_command_poll_interval_secs: u64,
+ #[serde(default = "Settings::default_casbin_reload_enabled")]
+ pub casbin_reload_enabled: bool,
+ #[serde(default = "Settings::default_casbin_reload_interval_secs")]
+ pub casbin_reload_interval_secs: u64,
+ pub amqp: AmqpSettings,
+ pub vault: VaultSettings,
+ #[serde(default)]
+ pub connectors: ConnectorConfig,
}
-#[derive(Debug, serde::Deserialize)]
+impl Default for Settings {
+ fn default() -> Self {
+ Self {
+ database: DatabaseSettings::default(),
+ app_port: 8000,
+ app_host: "127.0.0.1".to_string(),
+ auth_url: "http://localhost:8080/me".to_string(),
+ max_clients_number: 10,
+ agent_command_poll_timeout_secs: Self::default_agent_command_poll_timeout_secs(),
+ agent_command_poll_interval_secs: Self::default_agent_command_poll_interval_secs(),
+ casbin_reload_enabled: Self::default_casbin_reload_enabled(),
+ casbin_reload_interval_secs: Self::default_casbin_reload_interval_secs(),
+ amqp: AmqpSettings::default(),
+ vault: VaultSettings::default(),
+ connectors: ConnectorConfig::default(),
+ }
+ }
+}
+
+impl Settings {
+ fn default_agent_command_poll_timeout_secs() -> u64 {
+ 30
+ }
+
+ fn default_agent_command_poll_interval_secs() -> u64 {
+ 3
+ }
+
+ fn default_casbin_reload_enabled() -> bool {
+ true
+ }
+
+ fn default_casbin_reload_interval_secs() -> u64 {
+ 10
+ }
+}
+
+#[derive(Debug, serde::Deserialize, Clone)]
pub struct DatabaseSettings {
pub username: String,
pub password: String,
@@ -18,6 +68,80 @@ pub struct DatabaseSettings {
pub database_name: String,
}
+impl Default for DatabaseSettings {
+ fn default() -> Self {
+ Self {
+ username: "postgres".to_string(),
+ password: "postgres".to_string(),
+ host: "127.0.0.1".to_string(),
+ port: 5432,
+ database_name: "stacker".to_string(),
+ }
+ }
+}
+
+#[derive(Debug, serde::Deserialize, Clone)]
+pub struct AmqpSettings {
+ pub username: String,
+ pub password: String,
+ pub host: String,
+ pub port: u16,
+}
+
+impl Default for AmqpSettings {
+ fn default() -> Self {
+ Self {
+ username: "guest".to_string(),
+ password: "guest".to_string(),
+ host: "127.0.0.1".to_string(),
+ port: 5672,
+ }
+ }
+}
+
+#[derive(Debug, serde::Deserialize, Clone)]
+pub struct VaultSettings {
+ pub address: String,
+ pub token: String,
+ pub agent_path_prefix: String,
+ #[serde(default = "VaultSettings::default_api_prefix")]
+ pub api_prefix: String,
+}
+
+impl Default for VaultSettings {
+ fn default() -> Self {
+ Self {
+ address: "http://127.0.0.1:8200".to_string(),
+ token: "dev-token".to_string(),
+ agent_path_prefix: "agent".to_string(),
+ api_prefix: Self::default_api_prefix(),
+ }
+ }
+}
+
+impl VaultSettings {
+ fn default_api_prefix() -> String {
+ "v1".to_string()
+ }
+
+ /// Overlay Vault settings from environment variables, if present.
+ /// If an env var is missing, keep the existing file-provided value.
+ pub fn overlay_env(self) -> Self {
+ let address = std::env::var("VAULT_ADDRESS").unwrap_or(self.address);
+ let token = std::env::var("VAULT_TOKEN").unwrap_or(self.token);
+ let agent_path_prefix =
+ std::env::var("VAULT_AGENT_PATH_PREFIX").unwrap_or(self.agent_path_prefix);
+ let api_prefix = std::env::var("VAULT_API_PREFIX").unwrap_or(self.api_prefix);
+
+ VaultSettings {
+ address,
+ token,
+ agent_path_prefix,
+ api_prefix,
+ }
+ }
+}
+
impl DatabaseSettings {
// Connection string: postgresql://:@:/
pub fn connection_string(&self) -> String {
@@ -35,15 +159,63 @@ impl DatabaseSettings {
}
}
+impl AmqpSettings {
+ pub fn connection_string(&self) -> String {
+ format!(
+ "amqp://{}:{}@{}:{}/%2f",
+ self.username, self.password, self.host, self.port,
+ )
+ }
+}
+
pub fn get_configuration() -> Result {
- // Initialize our configuration reader
- let mut settings = config::Config::default();
+ // Load environment variables from .env file
+ dotenvy::dotenv().ok();
+
+ // Start with defaults
+ let mut config = Settings::default();
- // Add configuration values from a file named `configuration`
- // with the .yaml extension
- settings.merge(config::File::with_name("configuration"))?; // .json, .toml, .yaml, .yml
+ // Prefer real config, fall back to dist samples; layer multiple formats
+ let settings = config::Config::builder()
+ // Primary local config
+ .add_source(config::File::with_name("configuration.yaml").required(false))
+ .add_source(config::File::with_name("configuration.yml").required(false))
+ .add_source(config::File::with_name("configuration").required(false))
+ // Fallback samples
+ .add_source(config::File::with_name("configuration.yaml.dist").required(false))
+ .add_source(config::File::with_name("configuration.yml.dist").required(false))
+ .add_source(config::File::with_name("configuration.dist").required(false))
+ .build()?;
+
+ // Try to convert the configuration values it read into our Settings type
+ if let Ok(loaded) = settings.try_deserialize::() {
+ config = loaded;
+ }
+
+ // Overlay Vault settings with environment variables if present
+ config.vault = config.vault.overlay_env();
+
+ if let Ok(timeout) = std::env::var("STACKER_AGENT_POLL_TIMEOUT_SECS") {
+ if let Ok(parsed) = timeout.parse::() {
+ config.agent_command_poll_timeout_secs = parsed;
+ }
+ }
+
+ if let Ok(interval) = std::env::var("STACKER_AGENT_POLL_INTERVAL_SECS") {
+ if let Ok(parsed) = interval.parse::() {
+ config.agent_command_poll_interval_secs = parsed;
+ }
+ }
+
+ if let Ok(enabled) = std::env::var("STACKER_CASBIN_RELOAD_ENABLED") {
+ config.casbin_reload_enabled = matches!(enabled.as_str(), "1" | "true" | "TRUE");
+ }
+
+ if let Ok(interval) = std::env::var("STACKER_CASBIN_RELOAD_INTERVAL_SECS") {
+ if let Ok(parsed) = interval.parse::() {
+ config.casbin_reload_interval_secs = parsed;
+ }
+ }
- // Try to convert the configuration values it read into
- // our Settings type
- settings.try_deserialize()
+ Ok(config)
}
diff --git a/src/connectors/README.md b/src/connectors/README.md
new file mode 100644
index 0000000..422832d
--- /dev/null
+++ b/src/connectors/README.md
@@ -0,0 +1,531 @@
+# External Service Connectors
+
+This directory contains adapters for all external service integrations for your project.
+ **All communication with external services MUST go through connectors** - this is a core architectural rule for Stacker.
+
+## Why Connectors?
+
+| Benefit | Description |
+|---------|-------------|
+| **Independence** | Stacker works standalone; external services are optional |
+| **Testability** | Mock connectors in tests without calling external APIs |
+| **Replaceability** | Swap HTTP for gRPC without changing route code |
+| **Configuration** | Enable/disable services per environment |
+| **Separation of Concerns** | Routes contain business logic only, not HTTP details |
+| **Error Handling** | Centralized retry logic, timeouts, circuit breakers |
+
+## Architecture Pattern
+
+```
+┌─────────────────────────────────────────────────────────┐
+│ Route Handler │
+│ (Pure business logic - no HTTP/AMQP knowledge) │
+└─────────────────────────┬───────────────────────────────┘
+ │ Uses trait methods
+ ▼
+┌─────────────────────────────────────────────────────────┐
+│ Connector Trait (Interface) │
+│ pub trait UserServiceConnector: Send + Sync │
+└─────────────────────────┬───────────────────────────────┘
+ │ Implemented by
+ ┌─────────┴─────────┐
+ ▼ ▼
+ ┌──────────────────┐ ┌──────────────────┐
+ │ HTTP Client │ │ Mock Connector │
+ │ (Production) │ │ (Tests/Dev) │
+ └──────────────────┘ └──────────────────┘
+```
+
+## Existing Connectors
+
+| Service | Status | Purpose |
+|---------|--------|---------|
+| User Service | ✅ Implemented | Create/manage stacks in TryDirect User Service |
+| Payment Service | 🚧 Planned | Process marketplace template payments |
+| Event Bus (RabbitMQ) | 🚧 Planned | Async notifications (template approved, deployment complete) |
+
+## Adding a New Connector
+
+### Step 1: Define Configuration
+
+Add your service config to `config.rs`:
+
+```rust
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct PaymentServiceConfig {
+ pub enabled: bool,
+ pub base_url: String,
+ pub timeout_secs: u64,
+ #[serde(skip)]
+ pub auth_token: Option,
+}
+
+impl Default for PaymentServiceConfig {
+ fn default() -> Self {
+ Self {
+ enabled: false,
+ base_url: "http://localhost:8000".to_string(),
+ timeout_secs: 15,
+ auth_token: None,
+ }
+ }
+}
+```
+
+Then add to `ConnectorConfig`:
+```rust
+pub struct ConnectorConfig {
+ pub user_service: Option,
+ pub payment_service: Option, // Add this
+}
+```
+
+### Step 2: Create Service File
+
+Create `src/connectors/payment_service.rs`:
+
+```rust
+use super::config::PaymentServiceConfig;
+use super::errors::ConnectorError;
+use actix_web::web;
+use serde::{Deserialize, Serialize};
+use std::sync::Arc;
+use tracing::Instrument;
+
+// 1. Define response types
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct PaymentResponse {
+ pub payment_id: String,
+ pub status: String,
+ pub amount: f64,
+}
+
+// 2. Define trait interface
+#[async_trait::async_trait]
+pub trait PaymentServiceConnector: Send + Sync {
+ async fn create_payment(
+ &self,
+ user_id: &str,
+ amount: f64,
+ currency: &str,
+ ) -> Result;
+
+ async fn get_payment_status(
+ &self,
+ payment_id: &str,
+ ) -> Result;
+}
+
+// 3. Implement HTTP client
+pub struct PaymentServiceClient {
+ base_url: String,
+ http_client: reqwest::Client,
+ auth_token: Option,
+}
+
+impl PaymentServiceClient {
+ pub fn new(config: PaymentServiceConfig) -> Self {
+ let timeout = std::time::Duration::from_secs(config.timeout_secs);
+ let http_client = reqwest::Client::builder()
+ .timeout(timeout)
+ .build()
+ .expect("Failed to create HTTP client");
+
+ Self {
+ base_url: config.base_url,
+ http_client,
+ auth_token: config.auth_token,
+ }
+ }
+
+ fn auth_header(&self) -> Option {
+ self.auth_token
+ .as_ref()
+ .map(|token| format!("Bearer {}", token))
+ }
+}
+
+#[async_trait::async_trait]
+impl PaymentServiceConnector for PaymentServiceClient {
+ async fn create_payment(
+ &self,
+ user_id: &str,
+ amount: f64,
+ currency: &str,
+ ) -> Result {
+ let span = tracing::info_span!(
+ "payment_service_create_payment",
+ user_id = %user_id,
+ amount = %amount
+ );
+
+ let url = format!("{}/api/payments", self.base_url);
+ let payload = serde_json::json!({
+ "user_id": user_id,
+ "amount": amount,
+ "currency": currency,
+ });
+
+ let mut req = self.http_client.post(&url).json(&payload);
+ if let Some(auth) = self.auth_header() {
+ req = req.header("Authorization", auth);
+ }
+
+ let resp = req.send()
+ .instrument(span)
+ .await
+ .and_then(|resp| resp.error_for_status())
+ .map_err(|e| {
+ tracing::error!("create_payment error: {:?}", e);
+ ConnectorError::HttpError(format!("Failed to create payment: {}", e))
+ })?;
+
+ let text = resp.text().await
+ .map_err(|e| ConnectorError::HttpError(e.to_string()))?;
+
+ serde_json::from_str::(&text)
+ .map_err(|_| ConnectorError::InvalidResponse(text))
+ }
+
+ async fn get_payment_status(
+ &self,
+ payment_id: &str,
+ ) -> Result {
+ let span = tracing::info_span!(
+ "payment_service_get_status",
+ payment_id = %payment_id
+ );
+
+ let url = format!("{}/api/payments/{}", self.base_url, payment_id);
+ let mut req = self.http_client.get(&url);
+
+ if let Some(auth) = self.auth_header() {
+ req = req.header("Authorization", auth);
+ }
+
+ let resp = req.send()
+ .instrument(span)
+ .await
+ .map_err(|e| {
+ if e.status().map_or(false, |s| s == 404) {
+ ConnectorError::NotFound(format!("Payment {} not found", payment_id))
+ } else {
+ ConnectorError::HttpError(format!("Failed to get payment: {}", e))
+ }
+ })?;
+
+ if resp.status() == 404 {
+ return Err(ConnectorError::NotFound(format!("Payment {} not found", payment_id)));
+ }
+
+ let text = resp.text().await
+ .map_err(|e| ConnectorError::HttpError(e.to_string()))?;
+
+ serde_json::from_str::(&text)
+ .map_err(|_| ConnectorError::InvalidResponse(text))
+ }
+}
+
+// 4. Provide mock for testing
+pub mod mock {
+ use super::*;
+
+ pub struct MockPaymentServiceConnector;
+
+ #[async_trait::async_trait]
+ impl PaymentServiceConnector for MockPaymentServiceConnector {
+ async fn create_payment(
+ &self,
+ user_id: &str,
+ amount: f64,
+ currency: &str,
+ ) -> Result {
+ Ok(PaymentResponse {
+ payment_id: "mock_payment_123".to_string(),
+ status: "completed".to_string(),
+ amount,
+ })
+ }
+
+ async fn get_payment_status(
+ &self,
+ payment_id: &str,
+ ) -> Result {
+ Ok(PaymentResponse {
+ payment_id: payment_id.to_string(),
+ status: "completed".to_string(),
+ amount: 99.99,
+ })
+ }
+ }
+}
+
+// 5. Add init function for startup.rs
+pub fn init(connector_config: &super::config::ConnectorConfig) -> web::Data> {
+ let connector: Arc = if let Some(payment_config) =
+ connector_config.payment_service.as_ref().filter(|c| c.enabled)
+ {
+ let mut config = payment_config.clone();
+ if config.auth_token.is_none() {
+ config.auth_token = std::env::var("PAYMENT_SERVICE_AUTH_TOKEN").ok();
+ }
+ tracing::info!("Initializing Payment Service connector: {}", config.base_url);
+ Arc::new(PaymentServiceClient::new(config))
+ } else {
+ tracing::warn!("Payment Service connector disabled - using mock");
+ Arc::new(mock::MockPaymentServiceConnector)
+ };
+
+ web::Data::new(connector)
+}
+```
+
+### Step 3: Export from mod.rs
+
+Update `src/connectors/mod.rs`:
+
+```rust
+pub mod payment_service;
+
+pub use payment_service::{PaymentServiceConnector, PaymentServiceClient};
+pub use payment_service::init as init_payment_service;
+```
+
+### Step 4: Update Configuration Files
+
+Add to `configuration.yaml` and `configuration.yaml.dist`:
+
+```yaml
+connectors:
+ payment_service:
+ enabled: false
+ base_url: "http://localhost:8000"
+ timeout_secs: 15
+```
+
+### Step 5: Register in startup.rs
+
+Add to `src/startup.rs`:
+
+```rust
+// Initialize connectors
+let payment_service = connectors::init_payment_service(&settings.connectors);
+
+// In App builder:
+App::new()
+ .app_data(payment_service)
+ // ... other middleware
+```
+
+### Step 6: Use in Routes
+
+```rust
+use crate::connectors::PaymentServiceConnector;
+
+#[post("/purchase/{template_id}")]
+pub async fn purchase_handler(
+ user: web::ReqData>,
+ payment_connector: web::Data>,
+ path: web::Path<(String,)>,
+) -> Result {
+ let template_id = path.into_inner().0;
+
+ // Route logic never knows about HTTP
+ let payment = payment_connector
+ .create_payment(&user.id, 99.99, "USD")
+ .await
+ .map_err(|e| JsonResponse::build().bad_request(e.to_string()))?;
+
+ Ok(JsonResponse::build().ok(payment))
+}
+```
+
+## Testing Connectors
+
+### Unit Tests (with Mock)
+
+```rust
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::connectors::payment_service::mock::MockPaymentServiceConnector;
+
+ #[tokio::test]
+ async fn test_purchase_without_external_api() {
+ let connector = Arc::new(MockPaymentServiceConnector);
+
+ let result = connector.create_payment("user_123", 99.99, "USD").await;
+ assert!(result.is_ok());
+
+ let payment = result.unwrap();
+ assert_eq!(payment.status, "completed");
+ }
+}
+```
+
+### Integration Tests (with Real Service)
+
+```rust
+#[tokio::test]
+#[ignore] // Run with: cargo test -- --ignored
+async fn test_real_payment_service() {
+ let config = PaymentServiceConfig {
+ enabled: true,
+ base_url: "http://localhost:8000".to_string(),
+ timeout_secs: 10,
+ auth_token: Some("test_token".to_string()),
+ };
+
+ let connector = Arc::new(PaymentServiceClient::new(config));
+ let result = connector.create_payment("test_user", 1.00, "USD").await;
+
+ assert!(result.is_ok());
+}
+```
+
+## Best Practices
+
+### ✅ DO
+
+- **Use trait objects** (`Arc`) for flexibility
+- **Add retries** for transient failures (network issues)
+- **Log errors** with context (user_id, request_id)
+- **Use tracing spans** for observability
+- **Handle timeouts** explicitly
+- **Validate responses** before deserializing
+- **Return typed errors** (ConnectorError enum)
+- **Mock for tests** - never call real APIs in unit tests
+
+### ❌ DON'T
+
+- **Call HTTP directly from routes** - always use connectors
+- **Panic on errors** - return `Result`
+- **Expose reqwest types** - wrap in ConnectorError
+- **Hardcode URLs** - always use config
+- **Share HTTP clients** across different services
+- **Skip error context** - log with tracing for debugging
+- **Test with real APIs** unless explicitly integration tests
+
+## Error Handling
+
+All connectors use `ConnectorError` enum:
+
+```rust
+pub enum ConnectorError {
+ HttpError(String), // Network/HTTP errors
+ ServiceUnavailable(String), // Service down or timeout
+ InvalidResponse(String), // Bad JSON/unexpected format
+ Unauthorized(String), // 401/403
+ NotFound(String), // 404
+ RateLimited(String), // 429
+ Internal(String), // Unexpected errors
+}
+```
+
+Convert external errors:
+```rust
+.map_err(|e| {
+ if e.is_timeout() {
+ ConnectorError::ServiceUnavailable(e.to_string())
+ } else if e.status() == Some(404) {
+ ConnectorError::NotFound("Resource not found".to_string())
+ } else {
+ ConnectorError::HttpError(e.to_string())
+ }
+})
+```
+
+## Environment Variables
+
+Connectors can load auth tokens from environment:
+
+```bash
+# .env or export
+export USER_SERVICE_AUTH_TOKEN="Bearer abc123..."
+export PAYMENT_SERVICE_AUTH_TOKEN="Bearer xyz789..."
+```
+
+Tokens are loaded in the `init()` function:
+```rust
+if config.auth_token.is_none() {
+ config.auth_token = std::env::var("PAYMENT_SERVICE_AUTH_TOKEN").ok();
+}
+```
+
+## Configuration Reference
+
+### Enable/Disable Services
+
+```yaml
+connectors:
+ user_service:
+ enabled: true # ← Toggle here
+```
+
+- `enabled: true` → Uses HTTP client (production)
+- `enabled: false` → Uses mock connector (tests/development)
+
+### Timeouts
+
+```yaml
+timeout_secs: 10 # Request timeout in seconds
+```
+
+Applies to entire request (connection + response).
+
+### Retries
+
+Implement retry logic in client:
+```rust
+retry_attempts: 3 # Number of retry attempts
+```
+
+Use exponential backoff between retries.
+
+## Debugging
+
+### Enable Connector Logs
+
+```bash
+RUST_LOG=stacker::connectors=debug cargo run
+```
+
+### Check Initialization
+
+Look for these log lines at startup:
+```
+INFO stacker::connectors::user_service: Initializing User Service connector: https://api.example.com
+WARN stacker::connectors::payment_service: Payment Service connector disabled - using mock
+```
+
+### Trace HTTP Requests
+
+```rust
+let span = tracing::info_span!(
+ "user_service_create_stack",
+ template_id = %marketplace_template_id,
+ user_id = %user_id
+);
+
+req.send()
+ .instrument(span) // ← Adds tracing
+ .await
+```
+
+## Checklist for New Connector
+
+- [ ] Config struct in `config.rs` with `Default` impl
+- [ ] Add to `ConnectorConfig` struct
+- [ ] Create `{service}.rs` with trait, client, mock, `init()`
+- [ ] Export in `mod.rs`
+- [ ] Add to `configuration.yaml` and `.yaml.dist`
+- [ ] Register in `startup.rs`
+- [ ] Write unit tests with mock
+- [ ] Write integration tests (optional, marked `#[ignore]`)
+- [ ] Document in copilot instructions
+- [ ] Update this README with new connector in table
+
+## Further Reading
+
+- [Error Handling Patterns](../helpers/README.md)
+- [Testing Guide](../../tests/README.md)
diff --git a/src/connectors/admin_service/jwt.rs b/src/connectors/admin_service/jwt.rs
new file mode 100644
index 0000000..0335654
--- /dev/null
+++ b/src/connectors/admin_service/jwt.rs
@@ -0,0 +1,134 @@
+use crate::models;
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Serialize, Deserialize, Clone)]
+pub struct JwtClaims {
+ pub role: String,
+ pub email: String,
+ pub exp: i64,
+}
+
+/// Parse and validate JWT payload from internal admin services
+///
+/// WARNING: This verifies expiration only, not cryptographic signature.
+/// Use only for internal service-to-service auth where issuer is trusted.
+/// For production with untrusted clients, add full JWT verification.
+pub fn parse_jwt_claims(token: &str) -> Result {
+ use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine};
+
+ // JWT format: header.payload.signature
+ let parts: Vec<&str> = token.split('.').collect();
+ if parts.len() != 3 {
+ return Err("Invalid JWT format: expected 3 parts (header.payload.signature)".to_string());
+ }
+
+ let payload = parts[1];
+
+ // Decode base64url payload
+ let decoded = URL_SAFE_NO_PAD
+ .decode(payload)
+ .map_err(|e| format!("Failed to decode JWT payload: {}", e))?;
+
+ let json: JwtClaims = serde_json::from_slice(&decoded)
+ .map_err(|e| format!("Failed to parse JWT claims: {}", e))?;
+
+ Ok(json)
+}
+
+/// Validate JWT token expiration
+pub fn validate_jwt_expiration(claims: &JwtClaims) -> Result<(), String> {
+ let now = chrono::Utc::now().timestamp();
+ if claims.exp < now {
+ return Err(format!(
+ "JWT token expired (exp: {}, now: {})",
+ claims.exp, now
+ ));
+ }
+ Ok(())
+}
+
+/// Create a User model from JWT claims
+/// Used for admin service authentication
+pub fn user_from_jwt_claims(claims: &JwtClaims) -> models::User {
+ models::User {
+ id: claims.role.clone(),
+ role: claims.role.clone(),
+ email: claims.email.clone(),
+ email_confirmed: false,
+ first_name: "Service".to_string(),
+ last_name: "Account".to_string(),
+ }
+}
+
+/// Extract Bearer token from Authorization header
+pub fn extract_bearer_token(authorization: &str) -> Result<&str, String> {
+ let parts: Vec<&str> = authorization.split_whitespace().collect();
+ if parts.len() != 2 {
+ return Err("Invalid Authorization header format".to_string());
+ }
+ if parts[0] != "Bearer" {
+ return Err("Expected Bearer scheme in Authorization header".to_string());
+ }
+ Ok(parts[1])
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine};
+ use serde_json::json;
+
+ fn create_test_jwt(role: &str, email: &str, exp: i64) -> String {
+ let header = json!({"alg": "HS256", "typ": "JWT"});
+ let payload = json!({"role": role, "email": email, "exp": exp});
+
+ let header_b64 = URL_SAFE_NO_PAD.encode(header.to_string());
+ let payload_b64 = URL_SAFE_NO_PAD.encode(payload.to_string());
+ let signature = "fake_signature"; // For testing, signature validation is not performed
+
+ format!("{}.{}.{}", header_b64, payload_b64, signature)
+ }
+
+ #[test]
+ fn test_parse_valid_jwt() {
+ let future_exp = chrono::Utc::now().timestamp() + 3600;
+ let token = create_test_jwt("admin_service", "admin@test.com", future_exp);
+
+ let claims = parse_jwt_claims(&token).expect("Failed to parse valid JWT");
+ assert_eq!(claims.role, "admin_service");
+ assert_eq!(claims.email, "admin@test.com");
+ }
+
+ #[test]
+ fn test_validate_expired_jwt() {
+ let past_exp = chrono::Utc::now().timestamp() - 3600;
+ let claims = JwtClaims {
+ role: "admin_service".to_string(),
+ email: "admin@test.com".to_string(),
+ exp: past_exp,
+ };
+
+ assert!(validate_jwt_expiration(&claims).is_err());
+ }
+
+ #[test]
+ fn test_extract_bearer_token() {
+ let auth_header = "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.xyz.abc";
+ let token = extract_bearer_token(auth_header).expect("Failed to extract token");
+ assert_eq!(token, "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.xyz.abc");
+ }
+
+ #[test]
+ fn test_user_from_claims() {
+ let claims = JwtClaims {
+ role: "admin_service".to_string(),
+ email: "admin@test.com".to_string(),
+ exp: chrono::Utc::now().timestamp() + 3600,
+ };
+
+ let user = user_from_jwt_claims(&claims);
+ assert_eq!(user.role, "admin_service");
+ assert_eq!(user.email, "admin@test.com");
+ assert_eq!(user.first_name, "Service");
+ }
+}
diff --git a/src/connectors/admin_service/mod.rs b/src/connectors/admin_service/mod.rs
new file mode 100644
index 0000000..164e3f0
--- /dev/null
+++ b/src/connectors/admin_service/mod.rs
@@ -0,0 +1,10 @@
+//! Admin Service connector module
+//!
+//! Provides helper utilities for authenticating internal admin services via JWT tokens.
+
+pub mod jwt;
+
+pub use jwt::{
+ extract_bearer_token, parse_jwt_claims, user_from_jwt_claims, validate_jwt_expiration,
+ JwtClaims,
+};
diff --git a/src/connectors/config.rs b/src/connectors/config.rs
new file mode 100644
index 0000000..7122ed3
--- /dev/null
+++ b/src/connectors/config.rs
@@ -0,0 +1,168 @@
+use serde::{Deserialize, Serialize};
+
+/// Configuration for external service connectors
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct ConnectorConfig {
+ pub user_service: Option,
+ pub payment_service: Option,
+ pub events: Option,
+ pub dockerhub_service: Option,
+}
+
+/// User Service connector configuration
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct UserServiceConfig {
+ /// Enable/disable User Service integration
+ pub enabled: bool,
+ /// Base URL for User Service API (e.g., http://localhost:4100/server/user)
+ pub base_url: String,
+ /// HTTP request timeout in seconds
+ pub timeout_secs: u64,
+ /// Number of retry attempts for failed requests
+ pub retry_attempts: usize,
+ /// OAuth token for inter-service authentication (from env: USER_SERVICE_AUTH_TOKEN)
+ #[serde(skip)]
+ pub auth_token: Option,
+}
+
+impl Default for UserServiceConfig {
+ fn default() -> Self {
+ Self {
+ enabled: false,
+ base_url: "http://localhost:4100/server/user".to_string(),
+ timeout_secs: 10,
+ retry_attempts: 3,
+ auth_token: None,
+ }
+ }
+}
+
+/// Payment Service connector configuration
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct PaymentServiceConfig {
+ /// Enable/disable Payment Service integration
+ pub enabled: bool,
+ /// Base URL for Payment Service API (e.g., http://localhost:8000)
+ pub base_url: String,
+ /// HTTP request timeout in seconds
+ pub timeout_secs: u64,
+ /// Bearer token for authentication
+ #[serde(skip)]
+ pub auth_token: Option,
+}
+
+impl Default for PaymentServiceConfig {
+ fn default() -> Self {
+ Self {
+ enabled: false,
+ base_url: "http://localhost:8000".to_string(),
+ timeout_secs: 15,
+ auth_token: None,
+ }
+ }
+}
+
+/// RabbitMQ Events configuration
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct EventsConfig {
+ /// Enable/disable async event publishing
+ pub enabled: bool,
+ /// AMQP connection string (amqp://user:password@host:port/%2f)
+ pub amqp_url: String,
+ /// Event exchange name
+ pub exchange: String,
+ /// Prefetch count for consumer
+ pub prefetch: u16,
+}
+
+impl Default for EventsConfig {
+ fn default() -> Self {
+ Self {
+ enabled: false,
+ amqp_url: "amqp://guest:guest@localhost:5672/%2f".to_string(),
+ exchange: "stacker_events".to_string(),
+ prefetch: 10,
+ }
+ }
+}
+
+impl Default for ConnectorConfig {
+ fn default() -> Self {
+ Self {
+ user_service: Some(UserServiceConfig::default()),
+ payment_service: Some(PaymentServiceConfig::default()),
+ events: Some(EventsConfig::default()),
+ dockerhub_service: Some(DockerHubConnectorConfig::default()),
+ }
+ }
+}
+
+/// Docker Hub caching connector configuration
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct DockerHubConnectorConfig {
+ /// Enable/disable Docker Hub connector
+ pub enabled: bool,
+ /// Docker Hub API base URL
+ pub base_url: String,
+ /// HTTP timeout in seconds
+ pub timeout_secs: u64,
+ /// Number of retry attempts for transient failures
+ pub retry_attempts: usize,
+ /// Page size when fetching namespaces/repositories/tags
+ #[serde(default = "DockerHubConnectorConfig::default_page_size")]
+ pub page_size: u32,
+ /// Optional Redis connection string override
+ #[serde(default)]
+ pub redis_url: Option,
+ /// Cache TTL for namespace search results
+ #[serde(default = "DockerHubConnectorConfig::default_namespaces_ttl")]
+ pub cache_ttl_namespaces_secs: u64,
+ /// Cache TTL for repository listings
+ #[serde(default = "DockerHubConnectorConfig::default_repositories_ttl")]
+ pub cache_ttl_repositories_secs: u64,
+ /// Cache TTL for tag listings
+ #[serde(default = "DockerHubConnectorConfig::default_tags_ttl")]
+ pub cache_ttl_tags_secs: u64,
+ /// Optional Docker Hub username (falls back to DOCKERHUB_USERNAME env)
+ #[serde(default)]
+ pub username: Option,
+ /// Optional Docker Hub personal access token (falls back to DOCKERHUB_TOKEN env)
+ #[serde(default)]
+ pub personal_access_token: Option,
+}
+
+impl DockerHubConnectorConfig {
+ const fn default_page_size() -> u32 {
+ 50
+ }
+
+ const fn default_namespaces_ttl() -> u64 {
+ 86_400
+ }
+
+ const fn default_repositories_ttl() -> u64 {
+ 21_600
+ }
+
+ const fn default_tags_ttl() -> u64 {
+ 3_600
+ }
+}
+
+impl Default for DockerHubConnectorConfig {
+ fn default() -> Self {
+ Self {
+ enabled: true,
+ base_url: "https://hub.docker.com".to_string(),
+ timeout_secs: 10,
+ retry_attempts: 3,
+ page_size: Self::default_page_size(),
+ redis_url: Some("redis://127.0.0.1/0".to_string()),
+ cache_ttl_namespaces_secs: Self::default_namespaces_ttl(),
+ cache_ttl_repositories_secs: Self::default_repositories_ttl(),
+ cache_ttl_tags_secs: Self::default_tags_ttl(),
+ username: None,
+ personal_access_token: None,
+ }
+ }
+}
diff --git a/src/connectors/dockerhub_service.rs b/src/connectors/dockerhub_service.rs
new file mode 100644
index 0000000..e9aaefd
--- /dev/null
+++ b/src/connectors/dockerhub_service.rs
@@ -0,0 +1,722 @@
+use super::config::{ConnectorConfig, DockerHubConnectorConfig};
+use super::errors::ConnectorError;
+use actix_web::web;
+use async_trait::async_trait;
+use base64::{engine::general_purpose, Engine as _};
+use redis::aio::ConnectionManager;
+use redis::AsyncCommands;
+use reqwest::{Method, StatusCode};
+use serde::de::DeserializeOwned;
+use serde::{Deserialize, Serialize};
+use serde_json::Value;
+use std::collections::HashSet;
+use std::sync::Arc;
+use std::time::Duration;
+use tokio::sync::Mutex;
+use tracing::Instrument;
+
+#[derive(Debug, Clone, Serialize, Deserialize, Default)]
+pub struct NamespaceSummary {
+ pub name: String,
+ #[serde(default)]
+ pub namespace_type: Option,
+ #[serde(default)]
+ pub description: Option,
+ pub is_user: bool,
+ pub is_organization: bool,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, Default)]
+pub struct RepositorySummary {
+ pub name: String,
+ pub namespace: String,
+ #[serde(default)]
+ pub description: Option,
+ #[serde(default)]
+ pub last_updated: Option,
+ pub is_private: bool,
+ #[serde(default)]
+ pub star_count: Option,
+ #[serde(default)]
+ pub pull_count: Option,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, Default)]
+pub struct TagSummary {
+ pub name: String,
+ #[serde(default)]
+ pub digest: Option,
+ #[serde(default)]
+ pub last_updated: Option,
+ #[serde(default)]
+ pub tag_status: Option,
+ #[serde(default)]
+ pub content_type: Option,
+}
+
+#[async_trait]
+pub trait DockerHubConnector: Send + Sync {
+ async fn search_namespaces(&self, query: &str)
+ -> Result, ConnectorError>;
+ async fn list_repositories(
+ &self,
+ namespace: &str,
+ query: Option<&str>,
+ ) -> Result, ConnectorError>;
+ async fn list_tags(
+ &self,
+ namespace: &str,
+ repository: &str,
+ query: Option<&str>,
+ ) -> Result, ConnectorError>;
+}
+
+#[derive(Clone)]
+struct RedisCache {
+ connection: Arc>,
+}
+
+impl RedisCache {
+ async fn new(redis_url: &str) -> Result {
+ let client = redis::Client::open(redis_url).map_err(|err| {
+ ConnectorError::Internal(format!("Invalid Redis URL for Docker Hub cache: {}", err))
+ })?;
+
+ let connection = ConnectionManager::new(client).await.map_err(|err| {
+ ConnectorError::ServiceUnavailable(format!("Redis unavailable: {}", err))
+ })?;
+
+ Ok(Self {
+ connection: Arc::new(Mutex::new(connection)),
+ })
+ }
+
+ async fn get(&self, key: &str) -> Result, ConnectorError>
+ where
+ T: DeserializeOwned,
+ {
+ let mut conn = self.connection.lock().await;
+ let value: Option = conn.get(key).await.map_err(|err| {
+ ConnectorError::ServiceUnavailable(format!("Redis GET failed: {}", err))
+ })?;
+
+ if let Some(payload) = value {
+ if payload.is_empty() {
+ return Ok(None);
+ }
+ serde_json::from_str::(&payload)
+ .map(Some)
+ .map_err(|err| ConnectorError::Internal(format!("Cache decode failed: {}", err)))
+ } else {
+ Ok(None)
+ }
+ }
+
+ async fn set(&self, key: &str, value: &T, ttl_secs: u64) -> Result<(), ConnectorError>
+ where
+ T: Serialize,
+ {
+ if ttl_secs == 0 {
+ return Ok(());
+ }
+
+ let payload = serde_json::to_string(value)
+ .map_err(|err| ConnectorError::Internal(format!("Cache encode failed: {}", err)))?;
+
+ let mut conn = self.connection.lock().await;
+ let (): () = conn
+ .set_ex(key, payload, ttl_secs as u64)
+ .await
+ .map_err(|err| {
+ ConnectorError::ServiceUnavailable(format!("Redis SET failed: {}", err))
+ })?;
+ Ok(())
+ }
+}
+
+#[derive(Clone, Copy)]
+struct CacheDurations {
+ namespaces: u64,
+ repositories: u64,
+ tags: u64,
+}
+
+pub struct DockerHubClient {
+ base_url: String,
+ http_client: reqwest::Client,
+ auth_header: Option,
+ retry_attempts: usize,
+ cache: RedisCache,
+ cache_ttls: CacheDurations,
+ user_agent: String,
+ page_size: u32,
+}
+
+impl DockerHubClient {
+ pub async fn new(mut config: DockerHubConnectorConfig) -> Result {
+ if config.redis_url.is_none() {
+ config.redis_url = std::env::var("DOCKERHUB_REDIS_URL")
+ .ok()
+ .or_else(|| std::env::var("REDIS_URL").ok());
+ }
+
+ let redis_url = config
+ .redis_url
+ .clone()
+ .unwrap_or_else(|| "redis://127.0.0.1/0".to_string());
+ let cache = RedisCache::new(&redis_url).await?;
+
+ let timeout = Duration::from_secs(config.timeout_secs.max(1));
+ let http_client = reqwest::Client::builder()
+ .timeout(timeout)
+ .build()
+ .map_err(|err| ConnectorError::Internal(format!("HTTP client error: {}", err)))?;
+
+ let auth_header = Self::build_auth_header(&config.username, &config.personal_access_token);
+ let base_url = config.base_url.trim_end_matches('/').to_string();
+
+ Ok(Self {
+ base_url,
+ http_client,
+ auth_header,
+ retry_attempts: config.retry_attempts.max(1),
+ cache,
+ cache_ttls: CacheDurations {
+ namespaces: config.cache_ttl_namespaces_secs,
+ repositories: config.cache_ttl_repositories_secs,
+ tags: config.cache_ttl_tags_secs,
+ },
+ user_agent: format!("stacker-dockerhub-client/{}", env!("CARGO_PKG_VERSION")),
+ page_size: config.page_size.clamp(1, 100),
+ })
+ }
+
+ fn build_auth_header(username: &Option, token: &Option) -> Option {
+ match (username, token) {
+ (Some(user), Some(token)) if !user.is_empty() && !token.is_empty() => {
+ let encoded = general_purpose::STANDARD.encode(format!("{user}:{token}"));
+ Some(format!("Basic {}", encoded))
+ }
+ (None, Some(token)) if !token.is_empty() => Some(format!("Bearer {}", token)),
+ _ => None,
+ }
+ }
+
+ fn encode_segment(segment: &str) -> String {
+ urlencoding::encode(segment).into_owned()
+ }
+
+ fn cache_suffix(input: &str) -> String {
+ let normalized = input.trim();
+ if normalized.is_empty() {
+ "all".to_string()
+ } else {
+ normalized.to_lowercase()
+ }
+ }
+
+ async fn read_cache(&self, key: &str) -> Option
+ where
+ T: DeserializeOwned,
+ {
+ match self.cache.get(key).await {
+ Ok(value) => value,
+ Err(err) => {
+ tracing::debug!(error = %err, cache_key = key, "Docker Hub cache read failed");
+ None
+ }
+ }
+ }
+
+ async fn write_cache(&self, key: &str, value: &T, ttl: u64)
+ where
+ T: Serialize,
+ {
+ if let Err(err) = self.cache.set(key, value, ttl).await {
+ tracing::debug!(error = %err, cache_key = key, "Docker Hub cache write failed");
+ }
+ }
+
+ async fn send_request(
+ &self,
+ method: Method,
+ path: &str,
+ query: Vec<(String, String)>,
+ ) -> Result {
+ let mut attempt = 0usize;
+ let mut last_error: Option = None;
+
+ while attempt < self.retry_attempts {
+ attempt += 1;
+ let mut builder = self
+ .http_client
+ .request(method.clone(), format!("{}{}", self.base_url, path))
+ .header("User-Agent", &self.user_agent);
+
+ if let Some(auth) = &self.auth_header {
+ builder = builder.header("Authorization", auth);
+ }
+
+ if !query.is_empty() {
+ builder = builder.query(&query);
+ }
+
+ let span = tracing::info_span!(
+ "dockerhub_http_request",
+ path,
+ attempt,
+ method = %method,
+ );
+
+ match builder.send().instrument(span).await {
+ Ok(resp) => {
+ let status = resp.status();
+ let text = resp
+ .text()
+ .await
+ .map_err(|err| ConnectorError::HttpError(err.to_string()))?;
+
+ if status.is_success() {
+ return serde_json::from_str::(&text)
+ .map_err(|_| ConnectorError::InvalidResponse(text));
+ }
+
+ let error = match status {
+ StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => {
+ ConnectorError::Unauthorized(text)
+ }
+ StatusCode::NOT_FOUND => ConnectorError::NotFound(text),
+ StatusCode::TOO_MANY_REQUESTS => ConnectorError::RateLimited(text),
+ status if status.is_server_error() => ConnectorError::ServiceUnavailable(
+ format!("Docker Hub error {}: {}", status, text),
+ ),
+ status => ConnectorError::HttpError(format!(
+ "Docker Hub error {}: {}",
+ status, text
+ )),
+ };
+
+ if !status.is_server_error() {
+ return Err(error);
+ }
+ last_error = Some(error);
+ }
+ Err(err) => {
+ last_error = Some(ConnectorError::from(err));
+ }
+ }
+
+ if attempt < self.retry_attempts {
+ let backoff = Duration::from_millis(100 * (1_u64 << (attempt - 1)));
+ tokio::time::sleep(backoff).await;
+ }
+ }
+
+ Err(last_error.unwrap_or_else(|| {
+ ConnectorError::ServiceUnavailable("Docker Hub request failed".to_string())
+ }))
+ }
+
+ fn parse_repository_response(payload: Value) -> Vec {
+ Self::extract_items(&payload, &["results", "repositories"])
+ .into_iter()
+ .filter_map(|item| {
+ let (namespace, name) = Self::resolve_namespace_and_name(&item)?;
+
+ Some(RepositorySummary {
+ name,
+ namespace,
+ description: item
+ .get("description")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string()),
+ last_updated: item
+ .get("last_updated")
+ .or_else(|| item.get("last_push"))
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string()),
+ is_private: item
+ .get("is_private")
+ .or_else(|| item.get("private"))
+ .and_then(|v| v.as_bool())
+ .unwrap_or(false),
+ star_count: item.get("star_count").and_then(|v| v.as_u64()),
+ pull_count: item.get("pull_count").and_then(|v| v.as_u64()),
+ })
+ })
+ .collect()
+ }
+
+ fn parse_tag_response(payload: Value) -> Vec {
+ Self::extract_items(&payload, &["results", "tags"])
+ .into_iter()
+ .filter_map(|item| {
+ let name = item.get("name")?.as_str()?.to_string();
+ Some(TagSummary {
+ name,
+ digest: item
+ .get("digest")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string()),
+ last_updated: item
+ .get("last_updated")
+ .or_else(|| item.get("tag_last_pushed"))
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string()),
+ tag_status: item
+ .get("tag_status")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string()),
+ content_type: item
+ .get("content_type")
+ .or_else(|| item.get("media_type"))
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string()),
+ })
+ })
+ .collect()
+ }
+
+ fn extract_items(payload: &Value, keys: &[&str]) -> Vec {
+ for key in keys {
+ if let Some(array) = payload.get(*key).and_then(|value| value.as_array()) {
+ return array.clone();
+ }
+ }
+
+ payload.as_array().cloned().unwrap_or_default()
+ }
+
+ fn resolve_namespace_and_name(item: &Value) -> Option<(String, String)> {
+ let mut namespace = item
+ .get("namespace")
+ .or_else(|| item.get("user"))
+ .or_else(|| item.get("organization"))
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string());
+
+ let mut repo_name = item
+ .get("name")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string())?;
+
+ if namespace.as_ref().map(|s| s.is_empty()).unwrap_or(true) {
+ if let Some(slug) = item
+ .get("slug")
+ .or_else(|| item.get("repo_name"))
+ .and_then(|v| v.as_str())
+ {
+ if let Some((ns, repo)) = slug.split_once('/') {
+ namespace = Some(ns.to_string());
+ repo_name = repo.to_string();
+ }
+ }
+ }
+
+ if namespace.as_ref().map(|s| s.is_empty()).unwrap_or(true) && repo_name.contains('/') {
+ if let Some((ns, repo)) = repo_name.split_once('/') {
+ namespace = Some(ns.to_string());
+ repo_name = repo.to_string();
+ }
+ }
+
+ namespace.and_then(|ns| {
+ if ns.is_empty() {
+ None
+ } else {
+ Some((ns, repo_name))
+ }
+ })
+ }
+}
+
+#[async_trait]
+impl DockerHubConnector for DockerHubClient {
+ async fn search_namespaces(
+ &self,
+ query: &str,
+ ) -> Result, ConnectorError> {
+ let cache_key = format!("dockerhub:namespaces:{}", Self::cache_suffix(query));
+ if let Some(cached) = self.read_cache::>(&cache_key).await {
+ return Ok(cached);
+ }
+
+ let mut query_params = vec![("page_size".to_string(), self.page_size.to_string())];
+ let trimmed = query.trim();
+ if !trimmed.is_empty() {
+ query_params.push(("query".to_string(), trimmed.to_string()));
+ }
+
+ let payload = self
+ .send_request(Method::GET, "/v2/search/repositories/", query_params)
+ .await?;
+ let repositories = Self::parse_repository_response(payload);
+
+ let mut seen = HashSet::new();
+ let mut namespaces = Vec::new();
+ for repo in repositories {
+ if repo.namespace.is_empty() || !seen.insert(repo.namespace.clone()) {
+ continue;
+ }
+
+ namespaces.push(NamespaceSummary {
+ name: repo.namespace.clone(),
+ namespace_type: None,
+ description: repo.description.clone(),
+ is_user: false,
+ is_organization: false,
+ });
+ }
+
+ self.write_cache(&cache_key, &namespaces, self.cache_ttls.namespaces)
+ .await;
+ Ok(namespaces)
+ }
+
+ async fn list_repositories(
+ &self,
+ namespace: &str,
+ query: Option<&str>,
+ ) -> Result, ConnectorError> {
+ let cache_key = format!(
+ "dockerhub:repos:{}:{}",
+ Self::cache_suffix(namespace),
+ Self::cache_suffix(query.unwrap_or_default())
+ );
+
+ if let Some(cached) = self.read_cache::>(&cache_key).await {
+ return Ok(cached);
+ }
+
+ let mut query_params = vec![("page_size".to_string(), self.page_size.to_string())];
+ if let Some(filter) = query {
+ let trimmed = filter.trim();
+ if !trimmed.is_empty() {
+ query_params.push(("name".to_string(), trimmed.to_string()));
+ }
+ }
+
+ let path = format!(
+ "/v2/namespaces/{}/repositories",
+ Self::encode_segment(namespace)
+ );
+
+ let payload = self.send_request(Method::GET, &path, query_params).await?;
+ let repositories = Self::parse_repository_response(payload);
+ self.write_cache(&cache_key, &repositories, self.cache_ttls.repositories)
+ .await;
+ Ok(repositories)
+ }
+
+ async fn list_tags(
+ &self,
+ namespace: &str,
+ repository: &str,
+ query: Option<&str>,
+ ) -> Result, ConnectorError> {
+ let cache_key = format!(
+ "dockerhub:tags:{}:{}:{}",
+ Self::cache_suffix(namespace),
+ Self::cache_suffix(repository),
+ Self::cache_suffix(query.unwrap_or_default())
+ );
+
+ if let Some(cached) = self.read_cache::>(&cache_key).await {
+ return Ok(cached);
+ }
+
+ let mut query_params = vec![("page_size".to_string(), self.page_size.to_string())];
+ if let Some(filter) = query {
+ let trimmed = filter.trim();
+ if !trimmed.is_empty() {
+ query_params.push(("name".to_string(), trimmed.to_string()));
+ }
+ }
+
+ let path = format!(
+ "/v2/namespaces/{}/repositories/{}/tags",
+ Self::encode_segment(namespace),
+ Self::encode_segment(repository)
+ );
+
+ let payload = self.send_request(Method::GET, &path, query_params).await?;
+ let tags = Self::parse_tag_response(payload);
+ self.write_cache(&cache_key, &tags, self.cache_ttls.tags)
+ .await;
+ Ok(tags)
+ }
+}
+
+/// Initialize Docker Hub connector from app settings
+pub async fn init(connector_config: &ConnectorConfig) -> web::Data> {
+ let connector: Arc = if let Some(config) = connector_config
+ .dockerhub_service
+ .as_ref()
+ .filter(|cfg| cfg.enabled)
+ {
+ let mut cfg = config.clone();
+
+ if cfg.username.is_none() {
+ cfg.username = std::env::var("DOCKERHUB_USERNAME").ok();
+ }
+
+ if cfg.personal_access_token.is_none() {
+ cfg.personal_access_token = std::env::var("DOCKERHUB_TOKEN").ok();
+ }
+
+ if cfg.redis_url.is_none() {
+ cfg.redis_url = std::env::var("DOCKERHUB_REDIS_URL")
+ .ok()
+ .or_else(|| std::env::var("REDIS_URL").ok());
+ }
+
+ match DockerHubClient::new(cfg.clone()).await {
+ Ok(client) => {
+ tracing::info!("Docker Hub connector initialized ({})", cfg.base_url);
+ Arc::new(client)
+ }
+ Err(err) => {
+ tracing::error!(
+ error = %err,
+ "Failed to initialize Docker Hub connector, falling back to mock"
+ );
+ Arc::new(mock::MockDockerHubConnector::default())
+ }
+ }
+ } else {
+ tracing::warn!("Docker Hub connector disabled - using mock responses");
+ Arc::new(mock::MockDockerHubConnector::default())
+ };
+
+ web::Data::new(connector)
+}
+
+pub mod mock {
+ use super::*;
+
+ #[derive(Default)]
+ pub struct MockDockerHubConnector;
+
+ #[async_trait]
+ impl DockerHubConnector for MockDockerHubConnector {
+ async fn search_namespaces(
+ &self,
+ query: &str,
+ ) -> Result, ConnectorError> {
+ let mut namespaces = vec![
+ NamespaceSummary {
+ name: "trydirect".to_string(),
+ namespace_type: Some("organization".to_string()),
+ description: Some("TryDirect maintained images".to_string()),
+ is_user: false,
+ is_organization: true,
+ },
+ NamespaceSummary {
+ name: "stacker-labs".to_string(),
+ namespace_type: Some("organization".to_string()),
+ description: Some("Stacker lab images".to_string()),
+ is_user: false,
+ is_organization: true,
+ },
+ NamespaceSummary {
+ name: "dev-user".to_string(),
+ namespace_type: Some("user".to_string()),
+ description: Some("Individual maintainer".to_string()),
+ is_user: true,
+ is_organization: false,
+ },
+ ];
+
+ let needle = query.trim().to_lowercase();
+ if !needle.is_empty() {
+ namespaces.retain(|ns| ns.name.to_lowercase().contains(&needle));
+ }
+ Ok(namespaces)
+ }
+
+ async fn list_repositories(
+ &self,
+ namespace: &str,
+ query: Option<&str>,
+ ) -> Result, ConnectorError> {
+ let mut repositories = vec![
+ RepositorySummary {
+ name: "stacker-api".to_string(),
+ namespace: namespace.to_string(),
+ description: Some("Stacker API service".to_string()),
+ last_updated: Some("2026-01-01T00:00:00Z".to_string()),
+ is_private: false,
+ star_count: Some(42),
+ pull_count: Some(10_000),
+ },
+ RepositorySummary {
+ name: "agent-runner".to_string(),
+ namespace: namespace.to_string(),
+ description: Some("Agent runtime image".to_string()),
+ last_updated: Some("2026-01-03T00:00:00Z".to_string()),
+ is_private: false,
+ star_count: Some(8),
+ pull_count: Some(1_200),
+ },
+ ];
+
+ if let Some(filter) = query {
+ let needle = filter.trim().to_lowercase();
+ if !needle.is_empty() {
+ repositories.retain(|repo| repo.name.to_lowercase().contains(&needle));
+ }
+ }
+ Ok(repositories)
+ }
+
+ async fn list_tags(
+ &self,
+ _namespace: &str,
+ repository: &str,
+ query: Option<&str>,
+ ) -> Result, ConnectorError> {
+ let mut tags = vec![
+ TagSummary {
+ name: "latest".to_string(),
+ digest: Some(format!("sha256:{:x}", 1)),
+ last_updated: Some("2026-01-03T12:00:00Z".to_string()),
+ tag_status: Some("active".to_string()),
+ content_type: Some(
+ "application/vnd.docker.distribution.manifest.v2+json".to_string(),
+ ),
+ },
+ TagSummary {
+ name: "v1.2.3".to_string(),
+ digest: Some(format!("sha256:{:x}", 2)),
+ last_updated: Some("2026-01-02T08:00:00Z".to_string()),
+ tag_status: Some("active".to_string()),
+ content_type: Some(
+ "application/vnd.docker.distribution.manifest.v2+json".to_string(),
+ ),
+ },
+ ];
+
+ let needle = query.unwrap_or_default().trim().to_lowercase();
+ if !needle.is_empty() {
+ tags.retain(|tag| tag.name.to_lowercase().contains(&needle));
+ }
+
+ // Slightly mutate digests to include repository so tests can differentiate
+ for (idx, tag) in tags.iter_mut().enumerate() {
+ if tag.digest.is_some() {
+ tag.digest = Some(format!(
+ "sha256:{:x}{}",
+ idx,
+ repository
+ .to_lowercase()
+ .chars()
+ .take(4)
+ .collect::()
+ ));
+ }
+ }
+
+ Ok(tags)
+ }
+ }
+}
diff --git a/src/connectors/errors.rs b/src/connectors/errors.rs
new file mode 100644
index 0000000..6b521b5
--- /dev/null
+++ b/src/connectors/errors.rs
@@ -0,0 +1,81 @@
+use actix_web::{error::ResponseError, http::StatusCode, HttpResponse};
+use serde_json::json;
+use std::fmt;
+
+/// Errors that can occur during external service communication
+#[derive(Debug)]
+pub enum ConnectorError {
+ /// HTTP request/response error
+ HttpError(String),
+ /// Service unreachable or timeout
+ ServiceUnavailable(String),
+ /// Invalid response format from external service
+ InvalidResponse(String),
+ /// Authentication error (401/403)
+ Unauthorized(String),
+ /// Not found (404)
+ NotFound(String),
+ /// Rate limited or exceeded quota
+ RateLimited(String),
+ /// Internal error in connector
+ Internal(String),
+}
+
+impl fmt::Display for ConnectorError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::HttpError(msg) => write!(f, "HTTP error: {}", msg),
+ Self::ServiceUnavailable(msg) => write!(f, "Service unavailable: {}", msg),
+ Self::InvalidResponse(msg) => write!(f, "Invalid response: {}", msg),
+ Self::Unauthorized(msg) => write!(f, "Unauthorized: {}", msg),
+ Self::NotFound(msg) => write!(f, "Not found: {}", msg),
+ Self::RateLimited(msg) => write!(f, "Rate limited: {}", msg),
+ Self::Internal(msg) => write!(f, "Internal error: {}", msg),
+ }
+ }
+}
+
+impl ResponseError for ConnectorError {
+ fn error_response(&self) -> HttpResponse {
+ let (status, message) = match self {
+ Self::HttpError(_) => (StatusCode::BAD_GATEWAY, "External service error"),
+ Self::ServiceUnavailable(_) => (StatusCode::SERVICE_UNAVAILABLE, "Service unavailable"),
+ Self::InvalidResponse(_) => {
+ (StatusCode::BAD_GATEWAY, "Invalid external service response")
+ }
+ Self::Unauthorized(_) => (StatusCode::UNAUTHORIZED, "Unauthorized"),
+ Self::NotFound(_) => (StatusCode::NOT_FOUND, "Resource not found"),
+ Self::RateLimited(_) => (StatusCode::TOO_MANY_REQUESTS, "Rate limit exceeded"),
+ Self::Internal(_) => (StatusCode::INTERNAL_SERVER_ERROR, "Internal error"),
+ };
+
+ HttpResponse::build(status).json(json!({
+ "error": message,
+ "details": self.to_string(),
+ }))
+ }
+
+ fn status_code(&self) -> StatusCode {
+ match self {
+ Self::HttpError(_) => StatusCode::BAD_GATEWAY,
+ Self::ServiceUnavailable(_) => StatusCode::SERVICE_UNAVAILABLE,
+ Self::InvalidResponse(_) => StatusCode::BAD_GATEWAY,
+ Self::Unauthorized(_) => StatusCode::UNAUTHORIZED,
+ Self::NotFound(_) => StatusCode::NOT_FOUND,
+ Self::RateLimited(_) => StatusCode::TOO_MANY_REQUESTS,
+ Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR,
+ }
+ }
+}
+
+impl From for ConnectorError {
+ fn from(err: reqwest::Error) -> Self {
+ if err.is_timeout() {
+ Self::ServiceUnavailable(format!("Request timeout: {}", err))
+ } else if err.is_connect() {
+ Self::ServiceUnavailable(format!("Connection failed: {}", err))
+ } else {
+ Self::HttpError(err.to_string())
+ }
+ }
+}
diff --git a/src/connectors/install_service/client.rs b/src/connectors/install_service/client.rs
new file mode 100644
index 0000000..d82d486
--- /dev/null
+++ b/src/connectors/install_service/client.rs
@@ -0,0 +1,74 @@
+use super::InstallServiceConnector;
+use crate::forms::project::Stack;
+use crate::helpers::{compressor::compress, MqManager};
+use crate::models;
+use async_trait::async_trait;
+use uuid::Uuid;
+
+/// Real implementation that publishes deployment requests through RabbitMQ
+pub struct InstallServiceClient;
+
+#[async_trait]
+impl InstallServiceConnector for InstallServiceClient {
+ async fn deploy(
+ &self,
+ user_id: String,
+ user_email: String,
+ project_id: i32,
+ project: &models::Project,
+ cloud_creds: models::Cloud,
+ server: models::Server,
+ form_stack: &Stack,
+ fc: String,
+ mq_manager: &MqManager,
+ ) -> Result {
+ // Build payload for the install service
+ let mut payload = crate::forms::project::Payload::try_from(project)
+ .map_err(|err| format!("Failed to build payload: {}", err))?;
+
+ payload.server = Some(server.into());
+ payload.cloud = Some(cloud_creds.into());
+ payload.stack = form_stack.clone().into();
+ payload.user_token = Some(user_id);
+ payload.user_email = Some(user_email);
+ payload.docker_compose = Some(compress(fc.as_str()));
+
+ // Prepare deployment metadata
+ let json_request = project.metadata.clone();
+ let deployment_hash = format!("deployment_{}", Uuid::new_v4());
+ let _deployment = models::Deployment::new(
+ project.id,
+ payload.user_token.clone(),
+ deployment_hash.clone(),
+ String::from("pending"),
+ json_request,
+ );
+
+ let _deployment_id = Uuid::new_v4();
+
+ tracing::debug!("Send project data: {:?}", payload);
+
+ let provider = payload
+ .cloud
+ .as_ref()
+ .map(|form| {
+ if form.provider.contains("own") {
+ "own"
+ } else {
+ "tfa"
+ }
+ })
+ .unwrap_or("tfa")
+ .to_string();
+
+ let routing_key = format!("install.start.{}.all.all", provider);
+ tracing::debug!("Route: {:?}", routing_key);
+
+ mq_manager
+ .publish("install".to_string(), routing_key, &payload)
+ .await
+ .map_err(|err| format!("Failed to publish to MQ: {}", err))?;
+
+ Ok(project_id)
+ }
+}
diff --git a/src/connectors/install_service/mock.rs b/src/connectors/install_service/mock.rs
new file mode 100644
index 0000000..ae58494
--- /dev/null
+++ b/src/connectors/install_service/mock.rs
@@ -0,0 +1,25 @@
+use super::InstallServiceConnector;
+use crate::forms::project::Stack;
+use crate::helpers::MqManager;
+use crate::models;
+use async_trait::async_trait;
+
+pub struct MockInstallServiceConnector;
+
+#[async_trait]
+impl InstallServiceConnector for MockInstallServiceConnector {
+ async fn deploy(
+ &self,
+ _user_id: String,
+ _user_email: String,
+ project_id: i32,
+ _project: &models::Project,
+ _cloud_creds: models::Cloud,
+ _server: models::Server,
+ _form_stack: &Stack,
+ _fc: String,
+ _mq_manager: &MqManager,
+ ) -> Result {
+ Ok(project_id)
+ }
+}
diff --git a/src/connectors/install_service/mod.rs b/src/connectors/install_service/mod.rs
new file mode 100644
index 0000000..e179ec4
--- /dev/null
+++ b/src/connectors/install_service/mod.rs
@@ -0,0 +1,33 @@
+//! Install Service connector module
+//!
+//! Provides abstractions for delegating deployments to the external install service.
+
+use crate::forms::project::Stack;
+use crate::helpers::MqManager;
+use crate::models;
+use async_trait::async_trait;
+
+pub mod client;
+#[cfg(test)]
+pub mod mock;
+
+pub use client::InstallServiceClient;
+#[cfg(test)]
+pub use mock::MockInstallServiceConnector;
+
+#[async_trait]
+pub trait InstallServiceConnector: Send + Sync {
+ /// Deploy a project using compose file and credentials via the install service
+ async fn deploy(
+ &self,
+ user_id: String,
+ user_email: String,
+ project_id: i32,
+ project: &models::Project,
+ cloud_creds: models::Cloud,
+ server: models::Server,
+ form_stack: &Stack,
+ fc: String,
+ mq_manager: &MqManager,
+ ) -> Result;
+}
diff --git a/src/connectors/mod.rs b/src/connectors/mod.rs
new file mode 100644
index 0000000..10eae67
--- /dev/null
+++ b/src/connectors/mod.rs
@@ -0,0 +1,66 @@
+//! External Service Connectors
+//!
+//! This module provides adapters for communicating with external services (User Service, Payment Service, etc.).
+//! All external integrations must go through connectors to keep Stacker independent and testable.
+//!
+//! ## Architecture Pattern
+//!
+//! 1. Define trait in `{service}.rs` → allows mocking in tests
+//! 2. Implement HTTP client in same file
+//! 3. Configuration in `config.rs` → enable/disable per environment
+//! 4. Inject trait object into routes → routes never depend on HTTP implementation
+//!
+//! ## Usage in Routes
+//!
+//! ```ignore
+//! // In route handler
+//! pub async fn deploy_template(
+//! connector: web::Data>,
+//! ) -> Result {
+//! // Routes use trait methods, never care about HTTP details
+//! connector.create_stack_from_template(...).await?;
+//! }
+//! ```
+//!
+//! ## Testing
+//!
+//! ```ignore
+//! #[cfg(test)]
+//! mod tests {
+//! use super::*;
+//! use connectors::user_service::mock::MockUserServiceConnector;
+//!
+//! #[tokio::test]
+//! async fn test_deploy_without_http() {
+//! let connector = Arc::new(MockUserServiceConnector);
+//! // Test route logic without external API calls
+//! }
+//! }
+//! ```
+
+pub mod admin_service;
+pub mod config;
+pub mod dockerhub_service;
+pub mod errors;
+pub mod install_service;
+pub mod user_service;
+
+pub use admin_service::{
+ extract_bearer_token, parse_jwt_claims, user_from_jwt_claims, validate_jwt_expiration,
+};
+pub use config::{ConnectorConfig, EventsConfig, PaymentServiceConfig, UserServiceConfig};
+pub use errors::ConnectorError;
+pub use install_service::{InstallServiceClient, InstallServiceConnector};
+pub use user_service::{
+ CategoryInfo, DeploymentValidationError, DeploymentValidator, MarketplaceWebhookPayload,
+ MarketplaceWebhookSender, PlanDefinition, ProductInfo, StackResponse, UserPlanInfo,
+ UserProduct, UserProfile, UserServiceClient, UserServiceConnector, WebhookResponse,
+ WebhookSenderConfig,
+};
+
+// Re-export init functions for convenient access
+pub use dockerhub_service::init as init_dockerhub;
+pub use dockerhub_service::{
+ DockerHubClient, DockerHubConnector, NamespaceSummary, RepositorySummary, TagSummary,
+};
+pub use user_service::init as init_user_service;
diff --git a/src/connectors/user_service/category_sync.rs b/src/connectors/user_service/category_sync.rs
new file mode 100644
index 0000000..2936342
--- /dev/null
+++ b/src/connectors/user_service/category_sync.rs
@@ -0,0 +1,88 @@
+/// Category synchronization from User Service to local Stacker mirror
+///
+/// Implements automatic category sync on startup to keep local category table
+/// in sync with User Service as the source of truth.
+use sqlx::PgPool;
+use std::sync::Arc;
+use tracing::Instrument;
+
+use super::{CategoryInfo, UserServiceConnector};
+use crate::connectors::ConnectorError;
+
+/// Sync categories from User Service to local database
+///
+/// Fetches categories from User Service and upserts them into local stack_category table.
+/// This maintains a local mirror for fast lookups and offline capability.
+///
+/// # Arguments
+/// * `connector` - User Service connector to fetch categories from
+/// * `pool` - Database connection pool for local upsert
+///
+/// # Returns
+/// Number of categories synced, or error if sync fails
+pub async fn sync_categories_from_user_service(
+ connector: Arc,
+ pool: &PgPool,
+) -> Result {
+ let span = tracing::info_span!("sync_categories_from_user_service");
+
+ // Fetch categories from User Service
+ let categories = connector
+ .get_categories()
+ .instrument(span.clone())
+ .await
+ .map_err(|e| format!("Failed to fetch categories from User Service: {:?}", e))?;
+
+ tracing::info!("Fetched {} categories from User Service", categories.len());
+
+ if categories.is_empty() {
+ tracing::warn!("No categories returned from User Service");
+ return Ok(0);
+ }
+
+ // Upsert categories to local database
+ let synced_count = upsert_categories(pool, categories).instrument(span).await?;
+
+ tracing::info!(
+ "Successfully synced {} categories from User Service to local mirror",
+ synced_count
+ );
+
+ Ok(synced_count)
+}
+
+/// Upsert categories into local database
+async fn upsert_categories(pool: &PgPool, categories: Vec) -> Result {
+ let mut synced_count = 0;
+
+ for category in categories {
+ // Use INSERT ... ON CONFLICT DO UPDATE to upsert
+ let result = sqlx::query(
+ r#"
+ INSERT INTO stack_category (id, name, title, metadata)
+ VALUES ($1, $2, $3, $4)
+ ON CONFLICT (id) DO UPDATE
+ SET name = EXCLUDED.name,
+ title = EXCLUDED.title,
+ metadata = EXCLUDED.metadata
+ "#,
+ )
+ .bind(category.id)
+ .bind(&category.name)
+ .bind(&category.title)
+ .bind(serde_json::json!({"priority": category.priority}))
+ .execute(pool)
+ .await
+ .map_err(|e| {
+ tracing::error!("Failed to upsert category {}: {:?}", category.name, e);
+ format!("Failed to upsert category: {}", e)
+ })?;
+
+ if result.rows_affected() > 0 {
+ synced_count += 1;
+ tracing::debug!("Synced category: {} ({})", category.name, category.title);
+ }
+ }
+
+ Ok(synced_count)
+}
diff --git a/src/connectors/user_service/deployment_validator.rs b/src/connectors/user_service/deployment_validator.rs
new file mode 100644
index 0000000..ecbfe02
--- /dev/null
+++ b/src/connectors/user_service/deployment_validator.rs
@@ -0,0 +1,360 @@
+/// Deployment validator for marketplace template ownership
+///
+/// Validates that users can deploy marketplace templates they own.
+/// Implements plan gating (if template requires specific plan tier) and
+/// product ownership checks (if template is a paid marketplace product).
+use std::sync::Arc;
+use tracing::Instrument;
+
+use crate::connectors::{ConnectorError, UserServiceConnector};
+use crate::models;
+
+/// Custom error types for deployment validation
+#[derive(Debug, Clone)]
+pub enum DeploymentValidationError {
+ /// User's plan is insufficient for this template
+ InsufficientPlan {
+ required_plan: String,
+ user_plan: String,
+ },
+
+ /// User has not purchased this marketplace template
+ TemplateNotPurchased {
+ template_id: String,
+ product_price: Option,
+ },
+
+ /// Template not found in User Service
+ TemplateNotFound { template_id: String },
+
+ /// Failed to validate with User Service (unavailable, auth error, etc.)
+ ValidationFailed { reason: String },
+}
+
+impl std::fmt::Display for DeploymentValidationError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Self::InsufficientPlan {
+ required_plan,
+ user_plan,
+ } => write!(
+ f,
+ "You require a '{}' subscription to deploy this template (you have '{}')",
+ required_plan, user_plan
+ ),
+ Self::TemplateNotPurchased {
+ template_id,
+ product_price,
+ } => {
+ if let Some(price) = product_price {
+ write!(
+ f,
+ "This verified pro stack requires purchase (${:.2}). Please purchase from marketplace.",
+ price
+ )
+ } else {
+ write!(
+ f,
+ "You must purchase this template to deploy it. Template ID: {}",
+ template_id
+ )
+ }
+ }
+ Self::TemplateNotFound { template_id } => {
+ write!(f, "Template {} not found in marketplace", template_id)
+ }
+ Self::ValidationFailed { reason } => {
+ write!(f, "Failed to validate deployment: {}", reason)
+ }
+ }
+ }
+}
+
+/// Validator for marketplace template deployments
+pub struct DeploymentValidator {
+ user_service_connector: Arc,
+}
+
+impl DeploymentValidator {
+ /// Create new deployment validator
+ pub fn new(user_service_connector: Arc) -> Self {
+ Self {
+ user_service_connector,
+ }
+ }
+
+ /// Validate that user can deploy a marketplace template
+ ///
+ /// Checks:
+ /// 1. If template requires a plan tier, verify user has it
+ /// 2. If template is a paid marketplace product, verify user owns it
+ ///
+ /// # Arguments
+ /// * `template` - The stack template being deployed
+ /// * `user_token` - User's OAuth token for User Service queries
+ ///
+ /// # Returns
+ /// Ok(()) if validation passes, Err(DeploymentValidationError) otherwise
+ pub async fn validate_template_deployment(
+ &self,
+ template: &models::marketplace::StackTemplate,
+ user_token: &str,
+ ) -> Result<(), DeploymentValidationError> {
+ let span = tracing::info_span!(
+ "validate_template_deployment",
+ template_id = %template.id
+ );
+
+ // Check plan requirement first (if specified)
+ if let Some(required_plan) = &template.required_plan_name {
+ self.validate_plan_access(user_token, required_plan)
+ .instrument(span.clone())
+ .await?;
+ }
+
+ // Check marketplace template purchase (if it's a marketplace template with a product)
+ if template.product_id.is_some() {
+ self.validate_template_ownership(user_token, &template.id.to_string())
+ .instrument(span)
+ .await?;
+ }
+
+ tracing::info!("Template deployment validation successful");
+ Ok(())
+ }
+
+ /// Validate user has required plan tier
+ async fn validate_plan_access(
+ &self,
+ user_token: &str,
+ required_plan: &str,
+ ) -> Result<(), DeploymentValidationError> {
+ let span = tracing::info_span!("validate_plan_access", required_plan = required_plan);
+
+ // Extract user ID from token (or use token directly for User Service query)
+ // For now, we'll rely on User Service to validate the token
+ let has_plan = self
+ .user_service_connector
+ .user_has_plan(user_token, required_plan)
+ .instrument(span.clone())
+ .await
+ .map_err(|e| DeploymentValidationError::ValidationFailed {
+ reason: format!("Failed to check plan access: {}", e),
+ })?;
+
+ if !has_plan {
+ // Get user's actual plan for error message
+ let user_plan = self
+ .user_service_connector
+ .get_user_plan(user_token)
+ .instrument(span)
+ .await
+ .map(|info| info.plan_name)
+ .unwrap_or_else(|_| "unknown".to_string());
+
+ return Err(DeploymentValidationError::InsufficientPlan {
+ required_plan: required_plan.to_string(),
+ user_plan,
+ });
+ }
+
+ Ok(())
+ }
+
+ /// Validate user owns a marketplace template product
+ async fn validate_template_ownership(
+ &self,
+ user_token: &str,
+ stack_template_id: &str,
+ ) -> Result<(), DeploymentValidationError> {
+ let span = tracing::info_span!(
+ "validate_template_ownership",
+ template_id = stack_template_id
+ );
+
+ // First check if template even has a product
+ // Note: We need template ID as i32 for User Service query
+ // For now, we'll just check ownership directly
+ let owns_template = self
+ .user_service_connector
+ .user_owns_template(user_token, stack_template_id)
+ .instrument(span.clone())
+ .await
+ .map_err(|e| DeploymentValidationError::ValidationFailed {
+ reason: format!("Failed to check template ownership: {}", e),
+ })?;
+
+ if !owns_template {
+ // If user doesn't own, they may need to purchase
+ // In a real scenario, we'd fetch price from User Service
+ return Err(DeploymentValidationError::TemplateNotPurchased {
+ template_id: stack_template_id.to_string(),
+ product_price: None,
+ });
+ }
+
+ tracing::info!("User owns template, allowing deployment");
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use std::sync::Arc;
+
+ #[test]
+ fn test_validation_error_display() {
+ let err = DeploymentValidationError::InsufficientPlan {
+ required_plan: "professional".to_string(),
+ user_plan: "basic".to_string(),
+ };
+ let msg = err.to_string();
+ assert!(msg.contains("professional"));
+ assert!(msg.contains("basic"));
+ }
+
+ #[test]
+ fn test_template_not_purchased_error() {
+ let err = DeploymentValidationError::TemplateNotPurchased {
+ template_id: "template-123".to_string(),
+ product_price: Some(99.99),
+ };
+ let msg = err.to_string();
+ assert!(msg.contains("99.99"));
+ assert!(msg.contains("purchase"));
+ }
+
+ #[test]
+ fn test_template_not_purchased_error_no_price() {
+ let err = DeploymentValidationError::TemplateNotPurchased {
+ template_id: "template-456".to_string(),
+ product_price: None,
+ };
+ let msg = err.to_string();
+ assert!(msg.contains("template-456"));
+ assert!(msg.contains("purchase"));
+ }
+
+ #[test]
+ fn test_template_not_found_error() {
+ let err = DeploymentValidationError::TemplateNotFound {
+ template_id: "missing-template".to_string(),
+ };
+ let msg = err.to_string();
+ assert!(msg.contains("missing-template"));
+ assert!(msg.contains("marketplace"));
+ }
+
+ #[test]
+ fn test_validation_failed_error() {
+ let err = DeploymentValidationError::ValidationFailed {
+ reason: "User Service unavailable".to_string(),
+ };
+ let msg = err.to_string();
+ assert!(msg.contains("unavailable"));
+ }
+
+ /// Test deployment validator creation
+ #[test]
+ fn test_deployment_validator_creation() {
+ let connector = Arc::new(super::super::mock::MockUserServiceConnector);
+ let _validator = DeploymentValidator::new(connector);
+ // Validator created successfully - no need for additional assertions
+ }
+
+ /// Test that InsufficientPlan error message includes both plans
+ #[test]
+ fn test_error_message_includes_both_plans() {
+ let error = DeploymentValidationError::InsufficientPlan {
+ required_plan: "enterprise".to_string(),
+ user_plan: "basic".to_string(),
+ };
+ let message = error.to_string();
+ assert!(message.contains("enterprise"));
+ assert!(message.contains("basic"));
+ assert!(message.contains("subscription"));
+ }
+
+ /// Test that TemplateNotPurchased error shows price
+ #[test]
+ fn test_template_not_purchased_shows_price() {
+ let error = DeploymentValidationError::TemplateNotPurchased {
+ template_id: "ai-stack".to_string(),
+ product_price: Some(49.99),
+ };
+ let message = error.to_string();
+ assert!(message.contains("49.99"));
+ assert!(message.contains("pro stack"));
+ }
+
+ /// Test Debug trait for errors
+ #[test]
+ fn test_error_debug_display() {
+ let err = DeploymentValidationError::TemplateNotFound {
+ template_id: "template-123".to_string(),
+ };
+ let debug_str = format!("{:?}", err);
+ assert!(debug_str.contains("TemplateNotFound"));
+ }
+
+ /// Test Clone trait for errors
+ #[test]
+ fn test_error_clone() {
+ let err1 = DeploymentValidationError::InsufficientPlan {
+ required_plan: "professional".to_string(),
+ user_plan: "basic".to_string(),
+ };
+ let err2 = err1.clone();
+ assert_eq!(err1.to_string(), err2.to_string());
+ }
+
+ /// Test that error messages are user-friendly and actionable
+ #[test]
+ fn test_error_messages_are_user_friendly() {
+ // InsufficientPlan should guide users to upgrade
+ let plan_err = DeploymentValidationError::InsufficientPlan {
+ required_plan: "professional".to_string(),
+ user_plan: "basic".to_string(),
+ };
+ assert!(plan_err.to_string().contains("subscription"));
+ assert!(plan_err.to_string().contains("professional"));
+
+ // TemplateNotPurchased should direct to marketplace
+ let purchase_err = DeploymentValidationError::TemplateNotPurchased {
+ template_id: "premium-stack".to_string(),
+ product_price: Some(99.99),
+ };
+ assert!(purchase_err.to_string().contains("marketplace"));
+
+ // ValidationFailed should explain the issue
+ let validation_err = DeploymentValidationError::ValidationFailed {
+ reason: "Cannot connect to marketplace service".to_string(),
+ };
+ assert!(validation_err.to_string().contains("Cannot connect"));
+ }
+
+ /// Test all error variants can be created
+ #[test]
+ fn test_all_error_variants_creation() {
+ let _insufficient_plan = DeploymentValidationError::InsufficientPlan {
+ required_plan: "pro".to_string(),
+ user_plan: "basic".to_string(),
+ };
+
+ let _not_purchased = DeploymentValidationError::TemplateNotPurchased {
+ template_id: "id".to_string(),
+ product_price: Some(50.0),
+ };
+
+ let _not_found = DeploymentValidationError::TemplateNotFound {
+ template_id: "id".to_string(),
+ };
+
+ let _failed = DeploymentValidationError::ValidationFailed {
+ reason: "test".to_string(),
+ };
+
+ // If we get here, all variants can be constructed
+ }
+}
diff --git a/src/connectors/user_service/marketplace_webhook.rs b/src/connectors/user_service/marketplace_webhook.rs
new file mode 100644
index 0000000..780f23c
--- /dev/null
+++ b/src/connectors/user_service/marketplace_webhook.rs
@@ -0,0 +1,581 @@
+/// Marketplace webhook sender for User Service integration
+///
+/// Sends webhooks to User Service when marketplace templates change status.
+/// This implements Flow 3 from PAYMENT_MODEL.md: Creator publishes template → Product created in User Service
+///
+/// **Architecture**: One-way webhooks from Stacker to User Service.
+/// - No bi-directional queries on approval
+/// - Bearer token authentication using STACKER_SERVICE_TOKEN
+/// - Template approval does not block if webhook send fails (async/retry pattern)
+use serde::{Deserialize, Serialize};
+use std::sync::Arc;
+use tokio::sync::Mutex;
+use tracing::Instrument;
+
+use crate::connectors::ConnectorError;
+use crate::models;
+
+/// Marketplace webhook payload sent to User Service
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct MarketplaceWebhookPayload {
+ /// Action type: "template_approved", "template_updated", or "template_rejected"
+ pub action: String,
+
+ /// Stacker template UUID (as string)
+ pub stack_template_id: String,
+
+ /// External ID for User Service product (UUID as string or i32, same as stack_template_id)
+ pub external_id: String,
+
+ /// Product code (slug-based identifier)
+ pub code: Option,
+
+ /// Template name
+ pub name: Option,
+
+ /// Template description
+ pub description: Option,
+
+ /// Price in specified currency (if not free)
+ pub price: Option,
+
+ /// Billing cycle: "one_time" or "monthly"/"yearly"
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub billing_cycle: Option,
+
+ /// Currency code (USD, EUR, etc.)
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub currency: Option,
+
+ /// Creator/vendor user ID from Stacker
+ pub vendor_user_id: Option,
+
+ /// Vendor name or email
+ pub vendor_name: Option,
+
+ /// Category of template
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub category: Option,
+
+ /// Tags/keywords
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tags: Option,
+}
+
+/// Response from User Service webhook endpoint
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct WebhookResponse {
+ pub success: bool,
+ pub message: Option,
+ pub product_id: Option,
+}
+
+/// Configuration for webhook sender
+#[derive(Debug, Clone)]
+pub struct WebhookSenderConfig {
+ /// User Service base URL (e.g., "http://user:4100")
+ pub base_url: String,
+
+ /// Bearer token for service-to-service authentication
+ pub bearer_token: String,
+
+ /// HTTP client timeout in seconds
+ pub timeout_secs: u64,
+
+ /// Number of retry attempts on failure
+ pub retry_attempts: usize,
+}
+
+impl WebhookSenderConfig {
+ /// Create from environment variables
+ pub fn from_env() -> Result {
+ let base_url = std::env::var("URL_SERVER_USER")
+ .or_else(|_| std::env::var("USER_SERVICE_BASE_URL"))
+ .map_err(|_| "USER_SERVICE_BASE_URL not configured".to_string())?;
+
+ let bearer_token = std::env::var("STACKER_SERVICE_TOKEN")
+ .map_err(|_| "STACKER_SERVICE_TOKEN not configured".to_string())?;
+
+ Ok(Self {
+ base_url,
+ bearer_token,
+ timeout_secs: 10,
+ retry_attempts: 3,
+ })
+ }
+}
+
+/// Sends webhooks to User Service when marketplace templates change
+pub struct MarketplaceWebhookSender {
+ config: WebhookSenderConfig,
+ http_client: reqwest::Client,
+ // Track webhook deliveries in-memory (simple approach)
+ pending_webhooks: Arc>>,
+}
+
+impl MarketplaceWebhookSender {
+ /// Create new webhook sender with configuration
+ pub fn new(config: WebhookSenderConfig) -> Self {
+ let timeout = std::time::Duration::from_secs(config.timeout_secs);
+ let http_client = reqwest::Client::builder()
+ .timeout(timeout)
+ .build()
+ .expect("Failed to create HTTP client");
+
+ Self {
+ config,
+ http_client,
+ pending_webhooks: Arc::new(Mutex::new(Vec::new())),
+ }
+ }
+
+ /// Create from environment variables
+ pub fn from_env() -> Result {
+ let config = WebhookSenderConfig::from_env()?;
+ Ok(Self::new(config))
+ }
+
+ /// Send template approved webhook to User Service
+ /// Creates/updates product in User Service marketplace
+ pub async fn send_template_approved(
+ &self,
+ template: &models::marketplace::StackTemplate,
+ vendor_id: &str,
+ category_code: Option,
+ ) -> Result {
+ let span = tracing::info_span!(
+ "send_template_approved_webhook",
+ template_id = %template.id,
+ vendor_id = vendor_id
+ );
+
+ let payload = MarketplaceWebhookPayload {
+ action: "template_approved".to_string(),
+ stack_template_id: template.id.to_string(),
+ external_id: template.id.to_string(),
+ code: Some(template.slug.clone()),
+ name: Some(template.name.clone()),
+ description: template
+ .short_description
+ .clone()
+ .or_else(|| template.long_description.clone()),
+ price: None, // Pricing not stored in Stacker (User Service responsibility)
+ billing_cycle: None,
+ currency: None,
+ vendor_user_id: Some(vendor_id.to_string()),
+ vendor_name: Some(vendor_id.to_string()),
+ category: category_code,
+ tags: if let serde_json::Value::Array(_) = template.tags {
+ Some(template.tags.clone())
+ } else {
+ None
+ },
+ };
+
+ self.send_webhook(&payload).instrument(span).await
+ }
+
+ /// Send template updated webhook to User Service
+ /// Updates product metadata/details in User Service
+ pub async fn send_template_updated(
+ &self,
+ template: &models::marketplace::StackTemplate,
+ vendor_id: &str,
+ category_code: Option,
+ ) -> Result {
+ let span = tracing::info_span!(
+ "send_template_updated_webhook",
+ template_id = %template.id
+ );
+
+ let payload = MarketplaceWebhookPayload {
+ action: "template_updated".to_string(),
+ stack_template_id: template.id.to_string(),
+ external_id: template.id.to_string(),
+ code: Some(template.slug.clone()),
+ name: Some(template.name.clone()),
+ description: template
+ .short_description
+ .clone()
+ .or_else(|| template.long_description.clone()),
+ price: None,
+ billing_cycle: None,
+ currency: None,
+ vendor_user_id: Some(vendor_id.to_string()),
+ vendor_name: Some(vendor_id.to_string()),
+ category: category_code,
+ tags: if let serde_json::Value::Array(_) = template.tags {
+ Some(template.tags.clone())
+ } else {
+ None
+ },
+ };
+
+ self.send_webhook(&payload).instrument(span).await
+ }
+
+ /// Send template rejected webhook to User Service
+ /// Deactivates product in User Service
+ pub async fn send_template_rejected(
+ &self,
+ stack_template_id: &str,
+ ) -> Result {
+ let span = tracing::info_span!(
+ "send_template_rejected_webhook",
+ template_id = stack_template_id
+ );
+
+ let payload = MarketplaceWebhookPayload {
+ action: "template_rejected".to_string(),
+ stack_template_id: stack_template_id.to_string(),
+ external_id: stack_template_id.to_string(),
+ code: None,
+ name: None,
+ description: None,
+ price: None,
+ billing_cycle: None,
+ currency: None,
+ vendor_user_id: None,
+ vendor_name: None,
+ category: None,
+ tags: None,
+ };
+
+ self.send_webhook(&payload).instrument(span).await
+ }
+
+ /// Internal method to send webhook with retries
+ async fn send_webhook(
+ &self,
+ payload: &MarketplaceWebhookPayload,
+ ) -> Result {
+ let url = format!("{}/marketplace/sync", self.config.base_url);
+
+ let mut attempt = 0;
+ loop {
+ attempt += 1;
+
+ let req = self
+ .http_client
+ .post(&url)
+ .json(payload)
+ .header(
+ "Authorization",
+ format!("Bearer {}", self.config.bearer_token),
+ )
+ .header("Content-Type", "application/json");
+
+ match req.send().await {
+ Ok(resp) => match resp.status().as_u16() {
+ 200 | 201 => {
+ let text = resp
+ .text()
+ .await
+ .map_err(|e| ConnectorError::HttpError(e.to_string()))?;
+ return serde_json::from_str::(&text)
+ .map_err(|_| ConnectorError::InvalidResponse(text));
+ }
+ 401 => {
+ return Err(ConnectorError::Unauthorized(
+ "Invalid service token for User Service webhook".to_string(),
+ ));
+ }
+ 404 => {
+ return Err(ConnectorError::NotFound(
+ "/marketplace/sync endpoint not found".to_string(),
+ ));
+ }
+ 500..=599 => {
+ // Retry on server errors
+ if attempt < self.config.retry_attempts {
+ let backoff = std::time::Duration::from_millis(
+ 100 * 2_u64.pow((attempt - 1) as u32),
+ );
+ tracing::warn!(
+ "User Service webhook failed with {}, retrying after {:?}",
+ resp.status(),
+ backoff
+ );
+ tokio::time::sleep(backoff).await;
+ continue;
+ }
+ return Err(ConnectorError::ServiceUnavailable(format!(
+ "User Service returned {}: webhook send failed",
+ resp.status()
+ )));
+ }
+ status => {
+ return Err(ConnectorError::HttpError(format!(
+ "Unexpected status code: {}",
+ status
+ )));
+ }
+ },
+ Err(e) if e.is_timeout() => {
+ if attempt < self.config.retry_attempts {
+ let backoff =
+ std::time::Duration::from_millis(100 * 2_u64.pow((attempt - 1) as u32));
+ tracing::warn!(
+ "User Service webhook timeout, retrying after {:?}",
+ backoff
+ );
+ tokio::time::sleep(backoff).await;
+ continue;
+ }
+ return Err(ConnectorError::ServiceUnavailable(
+ "Webhook send timeout".to_string(),
+ ));
+ }
+ Err(e) => {
+ return Err(ConnectorError::HttpError(format!(
+ "Webhook send failed: {}",
+ e
+ )));
+ }
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_webhook_payload_serialization() {
+ let payload = MarketplaceWebhookPayload {
+ action: "template_approved".to_string(),
+ stack_template_id: "550e8400-e29b-41d4-a716-446655440000".to_string(),
+ external_id: "550e8400-e29b-41d4-a716-446655440000".to_string(),
+ code: Some("ai-agent-stack-pro".to_string()),
+ name: Some("AI Agent Stack Pro".to_string()),
+ description: Some("Advanced AI agent template".to_string()),
+ price: Some(99.99),
+ billing_cycle: Some("one_time".to_string()),
+ currency: Some("USD".to_string()),
+ vendor_user_id: Some("user-456".to_string()),
+ vendor_name: Some("alice@example.com".to_string()),
+ category: Some("AI Agents".to_string()),
+ tags: Some(serde_json::json!(["ai", "agents"])),
+ };
+
+ let json = serde_json::to_string(&payload).expect("Failed to serialize");
+ assert!(json.contains("template_approved"));
+ assert!(json.contains("ai-agent-stack-pro"));
+
+ // Verify all fields are present
+ assert!(json.contains("550e8400-e29b-41d4-a716-446655440000"));
+ assert!(json.contains("AI Agent Stack Pro"));
+ assert!(json.contains("99.99"));
+ }
+
+ #[test]
+ fn test_webhook_payload_with_rejection() {
+ let payload = MarketplaceWebhookPayload {
+ action: "template_rejected".to_string(),
+ stack_template_id: "550e8400-e29b-41d4-a716-446655440000".to_string(),
+ external_id: "550e8400-e29b-41d4-a716-446655440000".to_string(),
+ code: None,
+ name: None,
+ description: None,
+ price: None,
+ billing_cycle: None,
+ currency: None,
+ vendor_user_id: None,
+ vendor_name: None,
+ category: None,
+ tags: None,
+ };
+
+ let json = serde_json::to_string(&payload).expect("Failed to serialize");
+ assert!(json.contains("template_rejected"));
+ assert!(!json.contains("ai-agent"));
+ }
+
+ /// Test webhook payload for approved template action
+ #[test]
+ fn test_webhook_payload_template_approved() {
+ let payload = MarketplaceWebhookPayload {
+ action: "template_approved".to_string(),
+ stack_template_id: "550e8400-e29b-41d4-a716-446655440000".to_string(),
+ external_id: "550e8400-e29b-41d4-a716-446655440000".to_string(),
+ code: Some("cms-starter".to_string()),
+ name: Some("CMS Starter Template".to_string()),
+ description: Some("Complete CMS setup".to_string()),
+ price: Some(49.99),
+ billing_cycle: Some("one_time".to_string()),
+ currency: Some("USD".to_string()),
+ vendor_user_id: Some("vendor-123".to_string()),
+ vendor_name: Some("vendor@example.com".to_string()),
+ category: Some("CMS".to_string()),
+ tags: Some(serde_json::json!(["cms", "wordpress"])),
+ };
+
+ assert_eq!(payload.action, "template_approved");
+ assert_eq!(payload.code, Some("cms-starter".to_string()));
+ assert_eq!(payload.price, Some(49.99));
+ }
+
+ /// Test webhook payload for updated template action
+ #[test]
+ fn test_webhook_payload_template_updated() {
+ let payload = MarketplaceWebhookPayload {
+ action: "template_updated".to_string(),
+ stack_template_id: "550e8400-e29b-41d4-a716-446655440001".to_string(),
+ external_id: "550e8400-e29b-41d4-a716-446655440001".to_string(),
+ code: Some("cms-starter".to_string()),
+ name: Some("CMS Starter Template v2".to_string()),
+ description: Some("Updated CMS setup with new features".to_string()),
+ price: Some(59.99), // Price updated
+ billing_cycle: Some("one_time".to_string()),
+ currency: Some("USD".to_string()),
+ vendor_user_id: Some("vendor-123".to_string()),
+ vendor_name: Some("vendor@example.com".to_string()),
+ category: Some("CMS".to_string()),
+ tags: Some(serde_json::json!(["cms", "wordpress", "v2"])),
+ };
+
+ assert_eq!(payload.action, "template_updated");
+ assert_eq!(payload.name, Some("CMS Starter Template v2".to_string()));
+ assert_eq!(payload.price, Some(59.99));
+ }
+
+ /// Test webhook payload for free template
+ #[test]
+ fn test_webhook_payload_free_template() {
+ let payload = MarketplaceWebhookPayload {
+ action: "template_approved".to_string(),
+ stack_template_id: "550e8400-e29b-41d4-a716-446655440002".to_string(),
+ external_id: "550e8400-e29b-41d4-a716-446655440002".to_string(),
+ code: Some("basic-blog".to_string()),
+ name: Some("Basic Blog Template".to_string()),
+ description: Some("Free blog template".to_string()),
+ price: None, // Free template
+ billing_cycle: None,
+ currency: None,
+ vendor_user_id: None,
+ vendor_name: None,
+ category: Some("CMS".to_string()),
+ tags: Some(serde_json::json!(["blog", "free"])),
+ };
+
+ assert_eq!(payload.action, "template_approved");
+ assert_eq!(payload.price, None);
+ assert_eq!(payload.billing_cycle, None);
+ }
+
+ /// Test webhook sender config from environment
+ #[test]
+ fn test_webhook_sender_config_creation() {
+ let config = WebhookSenderConfig {
+ base_url: "http://user:4100".to_string(),
+ bearer_token: "test-token-123".to_string(),
+ timeout_secs: 10,
+ retry_attempts: 3,
+ };
+
+ assert_eq!(config.base_url, "http://user:4100");
+ assert_eq!(config.bearer_token, "test-token-123");
+ assert_eq!(config.timeout_secs, 10);
+ assert_eq!(config.retry_attempts, 3);
+ }
+
+ /// Test that MarketplaceWebhookSender creates successfully
+ #[test]
+ fn test_webhook_sender_creation() {
+ let config = WebhookSenderConfig {
+ base_url: "http://user:4100".to_string(),
+ bearer_token: "test-token".to_string(),
+ timeout_secs: 10,
+ retry_attempts: 3,
+ };
+
+ let sender = MarketplaceWebhookSender::new(config);
+ // Just verify sender was created without panicking
+ assert!(sender.pending_webhooks.blocking_lock().is_empty());
+ }
+
+ /// Test webhook response deserialization
+ #[test]
+ fn test_webhook_response_deserialization() {
+ let json = serde_json::json!({
+ "success": true,
+ "message": "Product created successfully",
+ "product_id": "product-123"
+ });
+
+ let response: WebhookResponse = serde_json::from_value(json).unwrap();
+ assert!(response.success);
+ assert_eq!(
+ response.message,
+ Some("Product created successfully".to_string())
+ );
+ assert_eq!(response.product_id, Some("product-123".to_string()));
+ }
+
+ /// Test webhook response with failure
+ #[test]
+ fn test_webhook_response_failure() {
+ let json = serde_json::json!({
+ "success": false,
+ "message": "Template not found",
+ "product_id": null
+ });
+
+ let response: WebhookResponse = serde_json::from_value(json).unwrap();
+ assert!(!response.success);
+ assert_eq!(response.message, Some("Template not found".to_string()));
+ assert_eq!(response.product_id, None);
+ }
+
+ /// Test payload with all optional fields populated
+ #[test]
+ fn test_webhook_payload_all_fields_populated() {
+ let payload = MarketplaceWebhookPayload {
+ action: "template_approved".to_string(),
+ stack_template_id: "template-uuid".to_string(),
+ external_id: "external-id".to_string(),
+ code: Some("complex-template".to_string()),
+ name: Some("Complex Template".to_string()),
+ description: Some("A complex template with many features".to_string()),
+ price: Some(199.99),
+ billing_cycle: Some("monthly".to_string()),
+ currency: Some("EUR".to_string()),
+ vendor_user_id: Some("vendor-id".to_string()),
+ vendor_name: Some("John Doe".to_string()),
+ category: Some("Enterprise".to_string()),
+ tags: Some(serde_json::json!(["enterprise", "complex", "saas"])),
+ };
+
+ // Verify all fields are accessible
+ assert_eq!(payload.action, "template_approved");
+ assert_eq!(payload.billing_cycle, Some("monthly".to_string()));
+ assert_eq!(payload.currency, Some("EUR".to_string()));
+ assert_eq!(payload.price, Some(199.99));
+ }
+
+ /// Test payload minimal fields (only required ones)
+ #[test]
+ fn test_webhook_payload_minimal_fields() {
+ let payload = MarketplaceWebhookPayload {
+ action: "template_rejected".to_string(),
+ stack_template_id: "template-uuid".to_string(),
+ external_id: "external-id".to_string(),
+ code: None,
+ name: None,
+ description: None,
+ price: None,
+ billing_cycle: None,
+ currency: None,
+ vendor_user_id: None,
+ vendor_name: None,
+ category: None,
+ tags: None,
+ };
+
+ // Should serialize without errors even with all optional fields as None
+ let json = serde_json::to_string(&payload).expect("Should serialize");
+ assert!(json.contains("template_rejected"));
+ assert!(json.contains("external_id"));
+ }
+}
diff --git a/src/connectors/user_service/mod.rs b/src/connectors/user_service/mod.rs
new file mode 100644
index 0000000..49903cf
--- /dev/null
+++ b/src/connectors/user_service/mod.rs
@@ -0,0 +1,1316 @@
+pub mod category_sync;
+pub mod deployment_validator;
+pub mod marketplace_webhook;
+
+pub use category_sync::sync_categories_from_user_service;
+pub use deployment_validator::{DeploymentValidationError, DeploymentValidator};
+pub use marketplace_webhook::{
+ MarketplaceWebhookPayload, MarketplaceWebhookSender, WebhookResponse, WebhookSenderConfig,
+};
+
+use super::config::UserServiceConfig;
+use super::errors::ConnectorError;
+use actix_web::web;
+use serde::{Deserialize, Serialize};
+use std::sync::Arc;
+use tracing::Instrument;
+use uuid::Uuid;
+
+/// Response from User Service when creating a stack from marketplace template
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct StackResponse {
+ pub id: i32,
+ pub user_id: String,
+ pub name: String,
+ pub marketplace_template_id: Option,
+ pub is_from_marketplace: bool,
+ pub template_version: Option,
+}
+
+/// User's current plan information
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct UserPlanInfo {
+ pub user_id: String,
+ pub plan_name: String,
+ pub plan_description: Option,
+ pub tier: Option,
+ pub active: bool,
+ pub started_at: Option,
+ pub expires_at: Option,
+}
+
+/// Available plan definition
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct PlanDefinition {
+ pub name: String,
+ pub description: Option,
+ pub tier: Option,
+ pub features: Option,
+}
+
+/// Product owned by a user (from /oauth_server/api/me response)
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct UserProduct {
+ pub id: Option,
+ pub name: String,
+ pub code: String,
+ pub product_type: String,
+ #[serde(default)]
+ pub external_id: Option, // Stack template ID from Stacker
+ #[serde(default)]
+ pub owned_since: Option,
+}
+
+/// User profile with ownership information
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct UserProfile {
+ pub email: String,
+ pub plan: Option, // Plan details from existing endpoint
+ #[serde(default)]
+ pub products: Vec, // List of owned products
+}
+
+/// Product information from User Service catalog
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct ProductInfo {
+ pub id: String,
+ pub name: String,
+ pub code: String,
+ pub product_type: String,
+ pub external_id: Option,
+ pub price: Option,
+ pub billing_cycle: Option,
+ pub currency: Option,
+ pub vendor_id: Option,
+ pub is_active: bool,
+}
+
+/// Category information from User Service
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct CategoryInfo {
+ #[serde(rename = "_id")]
+ pub id: i32,
+ pub name: String,
+ pub title: String,
+ #[serde(default)]
+ pub priority: Option,
+}
+
+/// Trait for User Service integration
+/// Allows mocking in tests and swapping implementations
+#[async_trait::async_trait]
+pub trait UserServiceConnector: Send + Sync {
+ /// Create a new stack in User Service from a marketplace template
+ async fn create_stack_from_template(
+ &self,
+ marketplace_template_id: &Uuid,
+ user_id: &str,
+ template_version: &str,
+ name: &str,
+ stack_definition: serde_json::Value,
+ ) -> Result;
+
+ /// Fetch stack details from User Service
+ async fn get_stack(
+ &self,
+ stack_id: i32,
+ user_id: &str,
+ ) -> Result;
+
+ /// List user's stacks
+ async fn list_stacks(&self, user_id: &str) -> Result, ConnectorError>;
+
+ /// Check if user has access to a specific plan
+ /// Returns true if user's current plan allows access to required_plan_name
+ async fn user_has_plan(
+ &self,
+ user_id: &str,
+ required_plan_name: &str,
+ ) -> Result;
+
+ /// Get user's current plan information
+ async fn get_user_plan(&self, user_id: &str) -> Result;
+
+ /// List all available plans that users can subscribe to
+ async fn list_available_plans(&self) -> Result, ConnectorError>;
+
+ /// Get user profile with owned products list
+ /// Calls GET /oauth_server/api/me and returns profile with products array
+ async fn get_user_profile(&self, user_token: &str) -> Result;
+
+ /// Get product information for a marketplace template
+ /// Calls GET /api/1.0/products?external_id={template_id}&product_type=template
+ async fn get_template_product(
+ &self,
+ stack_template_id: i32,
+ ) -> Result, ConnectorError>;
+
+ /// Check if user owns a specific template product
+ /// Returns true if user has the template in their products list
+ async fn user_owns_template(
+ &self,
+ user_token: &str,
+ stack_template_id: &str,
+ ) -> Result;
+
+ /// Get list of categories from User Service
+ /// Calls GET /api/1.0/category and returns available categories
+ async fn get_categories(&self) -> Result, ConnectorError>;
+}
+
+/// HTTP-based User Service client
+pub struct UserServiceClient {
+ base_url: String,
+ http_client: reqwest::Client,
+ auth_token: Option,
+ retry_attempts: usize,
+}
+
+impl UserServiceClient {
+ /// Create new User Service client
+ pub fn new(config: UserServiceConfig) -> Self {
+ let timeout = std::time::Duration::from_secs(config.timeout_secs);
+ let http_client = reqwest::Client::builder()
+ .timeout(timeout)
+ .build()
+ .expect("Failed to create HTTP client");
+
+ Self {
+ base_url: config.base_url,
+ http_client,
+ auth_token: config.auth_token,
+ retry_attempts: config.retry_attempts,
+ }
+ }
+
+ /// Build authorization header if token configured
+ fn auth_header(&self) -> Option {
+ self.auth_token
+ .as_ref()
+ .map(|token| format!("Bearer {}", token))
+ }
+
+ /// Retry helper with exponential backoff
+ async fn retry_request(&self, mut f: F) -> Result
+ where
+ F: FnMut() -> futures::future::BoxFuture<'static, Result>,
+ {
+ let mut attempt = 0;
+ loop {
+ match f().await {
+ Ok(result) => return Ok(result),
+ Err(err) => {
+ attempt += 1;
+ if attempt >= self.retry_attempts {
+ return Err(err);
+ }
+ // Exponential backoff: 100ms, 200ms, 400ms, etc.
+ let backoff = std::time::Duration::from_millis(100 * 2_u64.pow(attempt as u32));
+ tokio::time::sleep(backoff).await;
+ }
+ }
+ }
+ }
+}
+
+#[async_trait::async_trait]
+impl UserServiceConnector for UserServiceClient {
+ async fn create_stack_from_template(
+ &self,
+ marketplace_template_id: &Uuid,
+ user_id: &str,
+ template_version: &str,
+ name: &str,
+ stack_definition: serde_json::Value,
+ ) -> Result {
+ let span = tracing::info_span!(
+ "user_service_create_stack",
+ template_id = %marketplace_template_id,
+ user_id = %user_id
+ );
+
+ let url = format!("{}/api/1.0/stacks", self.base_url);
+ let payload = serde_json::json!({
+ "name": name,
+ "marketplace_template_id": marketplace_template_id.to_string(),
+ "is_from_marketplace": true,
+ "template_version": template_version,
+ "stack_definition": stack_definition,
+ "user_id": user_id,
+ });
+
+ let mut req = self.http_client.post(&url).json(&payload);
+
+ if let Some(auth) = self.auth_header() {
+ req = req.header("Authorization", auth);
+ }
+
+ let resp = req
+ .send()
+ .instrument(span)
+ .await
+ .and_then(|resp| resp.error_for_status())
+ .map_err(|e| {
+ tracing::error!("create_stack error: {:?}", e);
+ ConnectorError::HttpError(format!("Failed to create stack: {}", e))
+ })?;
+
+ let text = resp
+ .text()
+ .await
+ .map_err(|e| ConnectorError::HttpError(e.to_string()))?;
+ serde_json::from_str::(&text)
+ .map_err(|_| ConnectorError::InvalidResponse(text))
+ }
+
+ async fn get_stack(
+ &self,
+ stack_id: i32,
+ user_id: &str,
+ ) -> Result {
+ let span =
+ tracing::info_span!("user_service_get_stack", stack_id = stack_id, user_id = %user_id);
+
+ let url = format!("{}/api/1.0/stacks/{}", self.base_url, stack_id);
+ let mut req = self.http_client.get(&url);
+
+ if let Some(auth) = self.auth_header() {
+ req = req.header("Authorization", auth);
+ }
+
+ let resp = req.send().instrument(span).await.map_err(|e| {
+ if e.status().map_or(false, |s| s == 404) {
+ ConnectorError::NotFound(format!("Stack {} not found", stack_id))
+ } else {
+ ConnectorError::HttpError(format!("Failed to get stack: {}", e))
+ }
+ })?;
+
+ if resp.status() == 404 {
+ return Err(ConnectorError::NotFound(format!(
+ "Stack {} not found",
+ stack_id
+ )));
+ }
+
+ let text = resp
+ .text()
+ .await
+ .map_err(|e| ConnectorError::HttpError(e.to_string()))?;
+ serde_json::from_str::(&text)
+ .map_err(|_| ConnectorError::InvalidResponse(text))
+ }
+
+ async fn list_stacks(&self, user_id: &str) -> Result, ConnectorError> {
+ let span = tracing::info_span!("user_service_list_stacks", user_id = %user_id);
+
+ let url = format!("{}/api/1.0/stacks", self.base_url);
+ let mut req = self.http_client.post(&url);
+
+ if let Some(auth) = self.auth_header() {
+ req = req.header("Authorization", auth);
+ }
+
+ #[derive(Serialize)]
+ struct WhereFilter<'a> {
+ user_id: &'a str,
+ }
+
+ #[derive(Serialize)]
+ struct ListRequest<'a> {
+ r#where: WhereFilter<'a>,
+ }
+
+ let body = ListRequest {
+ r#where: WhereFilter { user_id },
+ };
+
+ #[derive(Deserialize)]
+ struct ListResponse {
+ _items: Vec,
+ }
+
+ let resp = req
+ .json(&body)
+ .send()
+ .instrument(span)
+ .await
+ .and_then(|resp| resp.error_for_status())
+ .map_err(|e| {
+ tracing::error!("list_stacks error: {:?}", e);
+ ConnectorError::HttpError(format!("Failed to list stacks: {}", e))
+ })?;
+
+ let text = resp
+ .text()
+ .await
+ .map_err(|e| ConnectorError::HttpError(e.to_string()))?;
+ serde_json::from_str::