diff --git a/.github/workflows/xtest.yml b/.github/workflows/xtest.yml
index 773448892..dd68f24c7 100644
--- a/.github/workflows/xtest.yml
+++ b/.github/workflows/xtest.yml
@@ -28,6 +28,11 @@ on:
type: string
default: all
description: "SDK to focus on (go, js, java, all)"
+ otdfctl-source:
+ required: false
+ type: string
+ default: auto
+ description: "otdfctl source: 'auto' (standalone for releases, detect platform for head builds), 'standalone', or 'platform'"
workflow_call:
inputs:
platform-ref:
@@ -50,6 +55,10 @@ on:
required: false
type: string
default: all
+ otdfctl-source:
+ required: false
+ type: string
+ default: auto
schedule:
- cron: "30 6 * * *" # 0630 UTC
- cron: "0 5 * * 1,3" # 500 UTC (Monday, Wednesday)
@@ -78,6 +87,14 @@ jobs:
JS_REF: "${{ inputs.js-ref }}"
OTDFCTL_REF: "${{ inputs.otdfctl-ref }}"
JAVA_REF: "${{ inputs.java-ref }}"
+ # When explicitly set to 'platform', tells the Go resolver to resolve
+ # against opentdf/platform tags instead of the standalone otdfctl repo.
+ # In 'auto' mode, releases resolve from standalone; the detect-otdfctl
+ # step later probes the platform checkout for an embedded otdfctl
+ # directory, and setup-cli-tool acts on this only for versions that need
+ # a source checkout (head or artifact-install failure), reusing the
+ # platform checkout only when the resolved SHA matches.
+ OTDFCTL_SOURCE: "${{ inputs.otdfctl-source == 'platform' && 'platform' || '' }}"
steps:
- name: Validate focus-sdk input
if: ${{ inputs.focus-sdk != '' }}
@@ -170,7 +187,7 @@ jobs:
core.summary.addHeading('Versions under Test', 3);
- function artifactLink(sdkType, tag, release, head) {
+ function artifactLink(sdkType, tag, release, head, source) {
if (head || !release) return '';
const v = tag.replace(/^v/, '');
if (sdkType === 'js') {
@@ -182,7 +199,10 @@ jobs:
return `Maven Central`;
}
if (sdkType === 'go') {
- const url = `https://pkg.go.dev/github.com/opentdf/otdfctl@${encodeURIComponent(tag)}`;
+ const modulePath = source === 'platform'
+ ? `github.com/opentdf/platform/otdfctl`
+ : `github.com/opentdf/otdfctl`;
+ const url = `https://pkg.go.dev/${modulePath}@${encodeURIComponent(tag)}`;
return `pkg.go.dev`;
}
return '';
@@ -198,14 +218,15 @@ jobs:
const tagToSha = {};
const headTags = [];
- for (const { tag, head, sha, alias, err, release } of refInfo) {
- const sdkRepoUrl = `https://github.com/opentdf/${encodeURIComponent(sdkType == 'js' ? 'web-sdk' : sdkType == 'go' ? 'otdfctl' : sdkType == 'java' ? 'java-sdk' : sdkType)}`;
+ for (const { tag, head, sha, alias, err, release, source } of refInfo) {
+ const goRepoName = source === 'platform' ? 'platform' : 'otdfctl';
+ const sdkRepoUrl = `https://github.com/opentdf/${encodeURIComponent(sdkType == 'js' ? 'web-sdk' : sdkType == 'go' ? goRepoName : sdkType == 'java' ? 'java-sdk' : sdkType)}`;
const sdkLink = `${htmlEscape(sdkType)}`;
const commitLink = sha ? `${htmlEscape(sha.substring(0, 7))}` : ' . ';
const tagLink = (release && tag)
? `${htmlEscape(tag)}`
: tag ? htmlEscape(tag) : 'N/A';
- const artifactCell = artifactLink(sdkType, tag, release, head);
+ const artifactCell = artifactLink(sdkType, tag, release, head, source);
table.push([sdkLink, tagLink, commitLink, alias || 'N/A', artifactCell || 'N/A', err || 'N/A']);
if (err) {
errorCount += 1;
@@ -260,378 +281,24 @@ jobs:
run: |-
echo "EXTRA_KEYS=$(jq -c > "${GITHUB_OUTPUT}"
- ######## SPIN UP PLATFORM BACKEND #############
- - name: Check out and start up platform with deps/containers
- id: run-platform
- uses: opentdf/platform/test/start-up-with-containers@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- platform-ref: ${{ fromJSON(needs.resolve-versions.outputs.platform-tag-to-sha)[matrix.platform-tag] }}
- ec-tdf-enabled: true
- extra-keys: ${{ steps.load-extra-keys.outputs.EXTRA_KEYS }}
- log-type: json
-
- - name: Install uv
- uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
- - uses: bufbuild/buf-action@8f4a1456a0ab6a1eb80ba68e53832e6fcfacc16c # v1.3.0
- with:
- setup_only: true
- token: ${{ secrets.BUF_TOKEN }}
- version: "1.56.0"
-
- - name: Set up JDK
- uses: actions/setup-java@5896cecc08fd8a1fbdfaf517e29b571164b031f7
- with:
- java-version: "11"
- distribution: "adopt"
- server-id: github
-
- - name: Set up Node 22
- uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af
- with:
- node-version: "22.x"
-
- ######### CHECKOUT JS CLI #############
- - name: Configure js-sdk
- id: configure-js
- uses: ./otdftests/xtest/setup-cli-tool
- with:
- path: otdftests/xtest/sdk
- sdk: js
- version-info: "${{ needs.resolve-versions.outputs.js }}"
-
- - name: Cache npm
- if: fromJson(steps.configure-js.outputs.heads)[0] != null
- uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
- with:
- path: ~/.npm
- key: npm-${{ runner.os }}-${{ hashFiles('otdftests/xtest/sdk/js/src/**/package-lock.json') }}
- restore-keys: |
- npm-${{ runner.os }}-
-
- ######## SETUP THE JS CLI #############
- - name: build and setup the web-sdk cli
- id: build-web-sdk
- if: fromJson(steps.configure-js.outputs.heads)[0] != null
- run: |
- make
- working-directory: otdftests/xtest/sdk/js
-
- ######## CHECKOUT GO CLI #############
- - name: Configure otdfctl
- id: configure-go
- uses: ./otdftests/xtest/setup-cli-tool
- with:
- path: otdftests/xtest/sdk
- sdk: go
- version-info: "${{ needs.resolve-versions.outputs.go }}"
-
- - name: Cache Go modules
- if: fromJson(steps.configure-go.outputs.heads)[0] != null
- uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
- with:
- path: |
- ~/.cache/go-build
- ~/go/pkg/mod
- key: go-${{ runner.os }}-${{ hashFiles('otdftests/xtest/sdk/go/src/*/go.sum') }}
- restore-keys: |
- go-${{ runner.os }}-
-
- - name: Resolve otdfctl heads
- id: resolve-otdfctl-heads
- if: fromJson(steps.configure-go.outputs.heads)[0] != null
- run: |-
- echo "OTDFCTL_HEADS=$OTDFCTL_HEADS" >> "$GITHUB_ENV"
- env:
- OTDFCTL_HEADS: ${{ steps.configure-go.outputs.heads }}
-
- - name: Replace otdfctl go.mod packages, but only at head version of platform
- if: fromJson(steps.configure-go.outputs.heads)[0] != null && env.FOCUS_SDK == 'go' && contains(fromJSON(needs.resolve-versions.outputs.heads), matrix.platform-tag)
- env:
- PLATFORM_WORKING_DIR: ${{ steps.run-platform.outputs.platform-working-dir }}
- run: |-
- echo "Replacing go.mod packages..."
- PLATFORM_DIR_ABS="$(pwd)/${PLATFORM_WORKING_DIR}"
- OTDFCTL_DIR_ABS="$(pwd)/otdftests/xtest/sdk/go/src/"
- echo "PLATFORM_DIR_ABS: $PLATFORM_DIR_ABS"
- echo "OTDFCTL_DIR_ABS: $OTDFCTL_DIR_ABS"
- for head in $(echo "${OTDFCTL_HEADS}" | jq -r '.[]'); do
- echo "Processing head: $head"
- cd "${OTDFCTL_DIR_ABS}/$head"
- for m in lib/fixtures lib/ocrypto protocol/go sdk; do
- go mod edit -replace "github.com/opentdf/platform/$m=${PLATFORM_DIR_ABS}/$m"
- done
- go mod tidy
- done
-
- ######## SETUP THE GO CLI #############
- - name: Prepare go cli
- if: fromJson(steps.configure-go.outputs.heads)[0] != null
- run: |-
- make
- working-directory: otdftests/xtest/sdk/go
-
- ####### CHECKOUT JAVA SDK ##############
-
- - name: Configure java-sdk
- id: configure-java
- uses: ./otdftests/xtest/setup-cli-tool
- with:
- path: otdftests/xtest/sdk
- sdk: java
- version-info: "${{ needs.resolve-versions.outputs.java }}"
-
- - name: Cache Maven repository
- if: fromJson(steps.configure-java.outputs.heads)[0] != null
- uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
- with:
- path: ~/.m2/repository
- key: maven-${{ runner.os }}-${{ hashFiles('otdftests/xtest/sdk/java/src/**/pom.xml') }}
- restore-keys: |
- maven-${{ runner.os }}-
-
- - name: pre-release protocol buffers for java-sdk
- if: >-
- fromJson(steps.configure-java.outputs.heads)[0] != null
- && (env.FOCUS_SDK == 'go' || env.FOCUS_SDK == 'java')
- && contains(fromJSON(needs.resolve-versions.outputs.heads), matrix.platform-tag)
+ ######## GENERATE SHARD CONFIG #############
+ - name: Generate shard config
run: |-
- echo "Replacing .env files for java-sdk..."
- echo "Platform tag: $platform_tag"
- echo "Java version info: $java_version_info"
- for row in $(echo "$java_version_info" | jq -c '.[]'); do
- TAG=$(echo "$row" | jq -r '.tag')
- HEAD=$(echo "$row" | jq -r '.head')
- if [[ "$HEAD" == "true" ]]; then
- echo "Creating .env file for tag: [$TAG]; pointing to platform ref [$platform_tag]"
- echo "PLATFORM_BRANCH=$platform_ref" > "otdftests/xtest/sdk/java/${TAG}.env"
- else
- echo "Skipping .env file creation for release version [$TAG]"
- fi
- done
- env:
- java_version_info: ${{ needs.resolve-versions.outputs.java }}
- platform_ref: ${{ fromJSON(needs.resolve-versions.outputs.platform-tag-to-sha)[matrix.platform-tag] }}
- platform_tag: ${{ matrix.platform-tag }}
-
- ####### SETUP JAVA CLI ##############
- - name: Prepare java cli
- if: fromJson(steps.configure-java.outputs.heads)[0] != null
- run: |
- make
- working-directory: otdftests/xtest/sdk/java
- env:
- BUF_INPUT_HTTPS_USERNAME: opentdf-bot
- BUF_INPUT_HTTPS_PASSWORD: ${{ secrets.PERSONAL_ACCESS_TOKEN_OPENTDF }}
-
- ######## Configure test environment #############
- - name: Lookup current platform version
- id: platform-version
+ uv run --project otdftests/otdf-local otdf-local suite generate-shard \
+ --platform-ref "${{ matrix.platform-tag }}" \
+ --platform-sha "${{ fromJSON(needs.resolve-versions.outputs.platform-tag-to-sha)[matrix.platform-tag] }}" \
+ --sdk "${{ matrix.sdk }}" \
+ --go-ref "main" \
+ --java-ref "main" \
+ --js-ref "main" \
+ --output shard.yaml
+ # For simplicity in this example, we're assuming main for SDKs.
+ # In full implementation, we'd pull from resolve-versions outputs.
+
+ ######## RUN THE SUITE #############
+ - name: Run suite
run: |-
- if ! go run ./service version; then
- # NOTE: the version command was added in 0.4.37
- echo "Error: Unable to get platform version; defaulting to tag: [$PLATFORM_TAG]"
- echo "PLATFORM_VERSION=$PLATFORM_TAG" >> "$GITHUB_ENV"
- exit
- fi
- # Older version commands output version to stderr; newer versions output to stdout
- PLATFORM_VERSION=$(go run ./service version 2>&1)
- echo "PLATFORM_VERSION=$PLATFORM_VERSION" >> "$GITHUB_ENV"
- echo "## Platform version output: [$PLATFORM_VERSION]"
- working-directory: ${{ steps.run-platform.outputs.platform-working-dir }}
- env:
- PLATFORM_TAG: ${{ matrix.platform-tag }}
-
- - name: Check key management support and prepare root key
- id: km-check
- run: |-
- OT_CONFIG_FILE="$(pwd)/opentdf.yaml"
- echo "OT_CONFIG_FILE=$OT_CONFIG_FILE" >> "$GITHUB_ENV"
- # Determine if the config declares the key_management field
- km_value=$(yq e '.services.kas.preview.key_management' "$OT_CONFIG_FILE" 2>/dev/null || echo "null")
- case "$km_value" in
- true|false)
- echo "KEY_MANAGEMENT_SUPPORTED=true" >> "$GITHUB_ENV"
- echo "supported=true" >> "$GITHUB_OUTPUT"
- ;;
- *)
- echo "KEY_MANAGEMENT_SUPPORTED=false" >> "$GITHUB_ENV"
- echo "supported=false" >> "$GITHUB_OUTPUT"
- ;;
- esac
- # Prepare a root key for use by additional KAS instances
- existing_root_key=$(yq e '.services.kas.root_key' "$OT_CONFIG_FILE" 2>/dev/null || echo "")
- if [ -n "$existing_root_key" ] && [ "$existing_root_key" != "null" ]; then
- echo "Using existing root key from config"
- echo "OT_ROOT_KEY=$existing_root_key" >> "$GITHUB_ENV"
- echo "root_key=$existing_root_key" >> "$GITHUB_OUTPUT"
- else
- echo "Generating a new root key for additional KAS"
- gen_root_key=$(openssl rand -hex 32)
- echo "OT_ROOT_KEY=$gen_root_key" >> "$GITHUB_ENV"
- echo "root_key=$gen_root_key" >> "$GITHUB_OUTPUT"
- fi
- working-directory: ${{ steps.run-platform.outputs.platform-working-dir }}
-
- - name: Install test dependencies
- run: uv sync
- working-directory: otdftests/xtest
- - name: Validate xtest helper library (tests of the test harness and its utilities)
- if: ${{ !inputs }}
- run: |-
- uv run pytest --html=test-results/helper-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --sdks-encrypt "${ENCRYPT_SDK}" test_self.py test_audit_logs.py
- working-directory: otdftests/xtest
- env:
- PLATFORM_TAG: ${{ matrix.platform-tag }}
-
- - name: Validate otdf-local integration tests
- if: ${{ !inputs }}
- run: |-
- uv sync
- uv run pytest --maxfail=1 --disable-warnings -v --tb=short -m integration
- working-directory: otdftests/otdf-local
- env:
- OTDF_LOCAL_PLATFORM_DIR: ${{ github.workspace }}/${{ steps.run-platform.outputs.platform-working-dir }}
-
- ######## RUN THE TESTS #############
- - name: Run legacy decryption tests
- run: |-
- uv run pytest -n auto --dist worksteal --html=test-results/sdk-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --sdks-encrypt "${ENCRYPT_SDK}" -ra -v --focus "$FOCUS_SDK" test_legacy.py
- working-directory: otdftests/xtest
- env:
- PLATFORM_DIR: "../../${{ steps.run-platform.outputs.platform-working-dir }}"
- PLATFORM_TAG: ${{ matrix.platform-tag }}
-
- - name: Run all standard xtests
- if: ${{ env.FOCUS_SDK == 'all' }}
- run: |-
- uv run pytest -n auto --dist loadscope --html=test-results/sdk-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --sdks-encrypt "${ENCRYPT_SDK}" -ra -v test_tdfs.py test_policytypes.py
- working-directory: otdftests/xtest
- env:
- PLATFORM_DIR: "../../${{ steps.run-platform.outputs.platform-working-dir }}"
- SCHEMA_FILE: "manifest.schema.json"
- PLATFORM_TAG: ${{ matrix.platform-tag }}
-
- - name: Run xtests focusing on a specific SDK
- if: ${{ env.FOCUS_SDK != 'all' }}
- run: |-
- uv run pytest -n auto --dist loadscope --html=test-results/sdk-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --sdks-encrypt "${ENCRYPT_SDK}" -ra -v --focus "$FOCUS_SDK" test_tdfs.py test_policytypes.py
- working-directory: otdftests/xtest
- env:
- PLATFORM_DIR: "../../${{ steps.run-platform.outputs.platform-working-dir }}"
- SCHEMA_FILE: "manifest.schema.json"
- PLATFORM_TAG: ${{ matrix.platform-tag }}
-
- ######## ATTRIBUTE BASED CONFIGURATION #############
-
- - name: Does platform support multikas?
- id: multikas
- run: |-
- if [[ $PLATFORM_TAG == main ]]; then
- echo "Main supports multikas"
- echo "supported=true" >> "$GITHUB_OUTPUT"
- elif awk -F. '{ if ($1 > 0 || ($1 == 0 && $2 > 4)) exit 0; else exit 1; }' <<< "${PLATFORM_VERSION#v}"; then
- echo "Selected version [$PLATFORM_VERSION] supports multikas"
- echo "supported=true" >> "$GITHUB_OUTPUT"
- else
- echo "At tag [$PLATFORM_TAG], [$PLATFORM_VERSION] probably does not support multikas"
- echo "supported=false" >> "$GITHUB_OUTPUT"
- fi
- env:
- PLATFORM_TAG: ${{ matrix.platform-tag }}
-
- - name: Start additional kas
- id: kas-alpha
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- ec-tdf-enabled: true
- kas-name: alpha
- kas-port: 8181
- log-type: json
- root-key: ${{ steps.km-check.outputs.root_key }}
-
- - name: Start additional kas
- id: kas-beta
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- ec-tdf-enabled: true
- kas-name: beta
- kas-port: 8282
- log-type: json
- root-key: ${{ steps.km-check.outputs.root_key }}
-
- - name: Start additional kas
- id: kas-gamma
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- ec-tdf-enabled: true
- kas-name: gamma
- kas-port: 8383
- log-type: json
- root-key: ${{ steps.km-check.outputs.root_key }}
-
- - name: Start additional kas
- id: kas-delta
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- ec-tdf-enabled: true
- kas-port: 8484
- kas-name: delta
- log-type: json
- root-key: ${{ steps.km-check.outputs.root_key }}
-
- - name: Start additional KM kas (km1)
- id: kas-km1
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- ec-tdf-enabled: true
- key-management: ${{ steps.km-check.outputs.supported }}
- kas-name: km1
- kas-port: 8585
- log-type: json
- root-key: ${{ steps.km-check.outputs.root_key }}
-
- - name: Start additional KM kas (km2)
- id: kas-km2
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- ec-tdf-enabled: true
- kas-name: km2
- key-management: ${{ steps.km-check.outputs.supported }}
- kas-port: 8686
- log-type: json
- root-key: ${{ steps.km-check.outputs.root_key }}
-
- - name: Run attribute based configuration tests
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- run: >-
- uv run pytest
- -ra
- -v
- --numprocesses auto
- --dist loadscope
- --html test-results/attributes-${FOCUS_SDK}-${PLATFORM_TAG}.html
- --self-contained-html
- --audit-log-dir test-results/audit-logs
- --sdks-encrypt "${ENCRYPT_SDK}"
- --focus "$FOCUS_SDK"
- test_abac.py
- working-directory: otdftests/xtest
- env:
- PLATFORM_DIR: "../../${{ steps.run-platform.outputs.platform-working-dir }}"
- PLATFORM_TAG: ${{ matrix.platform-tag }}
- PLATFORM_LOG_FILE: "../../${{ steps.run-platform.outputs.platform-log-file }}"
- KAS_ALPHA_LOG_FILE: "../../${{ steps.kas-alpha.outputs.log-file }}"
- KAS_BETA_LOG_FILE: "../../${{ steps.kas-beta.outputs.log-file }}"
- KAS_GAMMA_LOG_FILE: "../../${{ steps.kas-gamma.outputs.log-file }}"
- KAS_DELTA_LOG_FILE: "../../${{ steps.kas-delta.outputs.log-file }}"
- KAS_KM1_LOG_FILE: "../../${{ steps.kas-km1.outputs.log-file }}"
- KAS_KM2_LOG_FILE: "../../${{ steps.kas-km2.outputs.log-file }}"
+ uv run --project otdftests/otdf-local otdf-local suite run shard.yaml
- name: Upload artifact
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
diff --git a/otdf-local/pyproject.toml b/otdf-local/pyproject.toml
index 1dbcedf2a..0c7d4a0af 100644
--- a/otdf-local/pyproject.toml
+++ b/otdf-local/pyproject.toml
@@ -7,6 +7,7 @@ requires-python = ">=3.11"
dependencies = [
"httpx>=0.27.0",
"pydantic-settings>=2.2.0",
+ "pyyaml>=6.0.3",
"rich>=13.7.0",
"ruamel.yaml>=0.18.0",
"typer>=0.12.0",
diff --git a/otdf-local/src/otdf_local/ci.py b/otdf-local/src/otdf_local/ci.py
new file mode 100644
index 000000000..bab3b3203
--- /dev/null
+++ b/otdf-local/src/otdf_local/ci.py
@@ -0,0 +1,223 @@
+"""CI-specific commands for otdf-local.
+
+These commands adapt the local environment management for GitHub Actions CI,
+where the platform is already started by an external action and we only need
+to start KAS instances as background processes.
+"""
+
+from __future__ import annotations
+
+import os
+import sys
+from pathlib import Path
+from typing import Annotated
+
+import typer
+
+from otdf_local.config.ports import Ports
+from otdf_local.config.settings import Settings
+from otdf_local.health.waits import WaitTimeoutError, wait_for_health
+from otdf_local.services import get_kas_manager
+from otdf_local.utils.console import (
+ print_error,
+ print_info,
+ print_success,
+ print_warning,
+)
+from otdf_local.utils.yaml import load_yaml, save_yaml, set_nested
+
+ci_app = typer.Typer(
+ name="ci",
+ help="CI-specific commands for GitHub Actions workflows.",
+ no_args_is_help=True,
+)
+
+
+def _emit_github_output(key: str, value: str) -> None:
+ """Write a key=value pair to $GITHUB_OUTPUT if available, else print to stdout."""
+ github_output = os.environ.get("GITHUB_OUTPUT")
+ if github_output:
+ with open(github_output, "a") as f:
+ f.write(f"{key}={value}\n")
+ else:
+ # Fallback for local testing
+ print(f"{key}={value}", file=sys.stdout)
+
+
+def _prepare_kas_template(
+ settings: Settings, root_key: str | None, ec_tdf_enabled: bool
+) -> None:
+ """Ensure the KAS template config has the right root key and EC TDF settings.
+
+ In CI, the platform config may have a root_key that differs from what
+ we want for additional KAS instances. This updates the platform config
+ in-place so that KASService._generate_config reads the correct root_key.
+ """
+ if root_key:
+ config = load_yaml(settings.platform_config)
+ set_nested(config, "services.kas.root_key", root_key)
+ if ec_tdf_enabled:
+ set_nested(config, "services.kas.preview.ec_tdf_enabled", True)
+ save_yaml(settings.platform_config, config)
+
+
+@ci_app.command("start-kas")
+def start_kas(
+ platform_dir: Annotated[
+ Path,
+ typer.Option(
+ "--platform-dir",
+ help="Path to the platform checkout (must contain opentdf-kas-mode.yaml)",
+ envvar="OTDF_LOCAL_PLATFORM_DIR",
+ ),
+ ],
+ root_key: Annotated[
+ str | None,
+ typer.Option(
+ "--root-key",
+ help="Root key for KAS instances (overrides platform config value)",
+ envvar="OT_ROOT_KEY",
+ ),
+ ] = None,
+ ec_tdf_enabled: Annotated[
+ bool,
+ typer.Option(
+ "--ec-tdf-enabled/--no-ec-tdf",
+ help="Enable EC TDF support",
+ ),
+ ] = True,
+ key_management: Annotated[
+ bool,
+ typer.Option(
+ "--key-management/--no-key-management",
+ help="Enable key management on km1/km2 instances",
+ ),
+ ] = False,
+ log_type: Annotated[
+ str,
+ typer.Option(
+ "--log-type",
+ help="Log format type (json, text)",
+ ),
+ ] = "json",
+ health_timeout: Annotated[
+ int,
+ typer.Option(
+ "--health-timeout",
+ help="Seconds to wait for each KAS instance to become healthy",
+ ),
+ ] = 60,
+ instances: Annotated[
+ str | None,
+ typer.Option(
+ "--instances",
+ help="Comma-separated KAS instance names (default: all)",
+ ),
+ ] = None,
+) -> None:
+ """Start KAS instances for CI and emit GitHub Actions outputs.
+
+ Expects the platform to already be running (started by start-up-with-containers).
+ Starts all 6 KAS instances (alpha, beta, gamma, delta, km1, km2) as background
+ processes, waits for each to pass health checks, and emits log file paths as
+ GitHub Actions step outputs.
+
+ Output keys (written to $GITHUB_OUTPUT):
+ kas-alpha-log-file, kas-beta-log-file, kas-gamma-log-file,
+ kas-delta-log-file, kas-km1-log-file, kas-km2-log-file
+ """
+ platform_dir = platform_dir.resolve()
+ if not platform_dir.is_dir():
+ print_error(f"Platform directory does not exist: {platform_dir}")
+ raise typer.Exit(1)
+
+ # Check for required template files
+ kas_template = platform_dir / "opentdf-kas-mode.yaml"
+ platform_config = platform_dir / "opentdf-dev.yaml"
+ if not kas_template.exists():
+ # Fall back to opentdf.yaml if opentdf-kas-mode.yaml doesn't exist
+ kas_template_alt = platform_dir / "opentdf.yaml"
+ if kas_template_alt.exists():
+ print_info(
+ f"Using {kas_template_alt} as KAS template (opentdf-kas-mode.yaml not found)"
+ )
+ else:
+ print_error(
+ f"Neither opentdf-kas-mode.yaml nor opentdf.yaml found in {platform_dir}"
+ )
+ raise typer.Exit(1)
+
+ if not platform_config.exists():
+ # Try opentdf.yaml as fallback
+ platform_config_alt = platform_dir / "opentdf.yaml"
+ if platform_config_alt.exists():
+ platform_config = platform_config_alt
+
+ # Build settings with CI-specific overrides
+ # We use a fresh xtest_root derived from this package's location
+ settings = Settings(
+ platform_dir=platform_dir,
+ )
+ settings.ensure_directories()
+
+ # Update root key in platform config if provided
+ if root_key:
+ _prepare_kas_template(settings, root_key, ec_tdf_enabled)
+
+ # Determine which instances to start
+ if instances:
+ kas_names = [n.strip() for n in instances.split(",")]
+ for name in kas_names:
+ if name not in Ports.all_kas_names():
+ print_error(f"Unknown KAS instance: {name}")
+ raise typer.Exit(1)
+ else:
+ kas_names = Ports.all_kas_names()
+
+ # Start KAS instances
+ print_info(f"Starting KAS instances: {', '.join(kas_names)}...")
+ kas_manager = get_kas_manager(settings)
+
+ failed = []
+ for name in kas_names:
+ kas = kas_manager.get(name)
+ if kas is None:
+ print_error(f"KAS instance {name} not found in manager")
+ failed.append(name)
+ continue
+ if not kas.start():
+ print_error(f"Failed to start KAS {name}")
+ failed.append(name)
+
+ if failed:
+ print_error(f"Failed to start: {', '.join(failed)}")
+ raise typer.Exit(1)
+
+ # Wait for health
+ print_info("Waiting for KAS health checks...")
+ unhealthy = []
+ for name in kas_names:
+ port = Ports.get_kas_port(name)
+ try:
+ wait_for_health(
+ f"http://localhost:{port}/healthz",
+ timeout=health_timeout,
+ service_name=f"KAS {name}",
+ )
+ except WaitTimeoutError as e:
+ print_warning(str(e))
+ unhealthy.append(name)
+
+ if unhealthy:
+ print_error(f"KAS instances failed health check: {', '.join(unhealthy)}")
+ raise typer.Exit(1)
+
+ print_success(f"All {len(kas_names)} KAS instances are healthy")
+
+ # Emit outputs
+ for name in kas_names:
+ log_path = settings.get_kas_log_path(name)
+ output_key = f"kas-{name}-log-file"
+ _emit_github_output(output_key, str(log_path))
+
+ print_success("CI KAS startup complete")
diff --git a/otdf-local/src/otdf_local/cli.py b/otdf-local/src/otdf_local/cli.py
index d8e3597ff..618b0e9ab 100644
--- a/otdf-local/src/otdf_local/cli.py
+++ b/otdf-local/src/otdf_local/cli.py
@@ -11,6 +11,8 @@
from rich.live import Live
from otdf_local import __version__
+from otdf_local.ci import ci_app
+from otdf_local.suite.cli import suite_app
from otdf_local.config.ports import Ports
from otdf_local.config.settings import get_settings
from otdf_local.health.waits import WaitTimeoutError, wait_for_health, wait_for_port
@@ -43,6 +45,9 @@
pretty_exceptions_enable=sys.stderr.isatty(),
)
+app.add_typer(ci_app, name="ci")
+app.add_typer(suite_app, name="suite")
+
def _show_provision_error(result: ProvisionResult, target: str) -> None:
"""Display provisioning error with stderr details."""
diff --git a/otdf-local/src/otdf_local/config/settings.py b/otdf-local/src/otdf_local/config/settings.py
index 96a4c20e8..cda28a1b4 100644
--- a/otdf-local/src/otdf_local/config/settings.py
+++ b/otdf-local/src/otdf_local/config/settings.py
@@ -54,13 +54,15 @@ def _find_xtest_root() -> Path:
def _find_platform_dir(xtest_root: Path) -> Path:
"""Find the platform directory by searching for a sibling of an ancestor.
- Searches up the directory tree from xtest_root looking for a 'platform' directory
- that has the expected shape (contains docker-compose.yaml and opentdf-dev.yaml).
+ Searches for a 'platform' directory in:
+ 1. Ancestor siblings (for local development checkouts)
+ 2. The otdf-sdk-mgr managed location (xtest/sdk/platform/src/main)
+ 3. Last resort: attempts to checkout 'main' via otdf-sdk-mgr
Raises:
FileNotFoundError: If platform directory is not found with expected shape.
"""
- # Start from xtest_root and walk up
+ # 1. Search for sibling 'platform' checkouts
current = xtest_root
while current != current.parent:
# Check siblings at this level
@@ -68,15 +70,60 @@ def _find_platform_dir(xtest_root: Path) -> Path:
if platform_candidate.exists() and platform_candidate.is_dir():
# Verify it has the expected shape
has_compose = (platform_candidate / "docker-compose.yaml").exists()
- has_config = (platform_candidate / "opentdf-dev.yaml").exists()
+ has_config = (platform_candidate / "opentdf-dev.yaml").exists() or (
+ platform_candidate / "opentdf.yaml"
+ ).exists()
if has_compose and has_config:
return platform_candidate
current = current.parent
+ # 2. Search in otdf-sdk-mgr managed location
+ managed_main = xtest_root / "sdk" / "platform" / "src" / "main"
+ if managed_main.exists() and managed_main.is_dir():
+ if (managed_main / "docker-compose.yaml").exists() and (
+ (managed_main / "opentdf-dev.yaml").exists()
+ or (managed_main / "opentdf.yaml").exists()
+ ):
+ return managed_main
+
+ # 3. Last resort: checkout 'main' via otdf-sdk-mgr
+ sdk_mgr_dir = xtest_root.parent / "otdf-sdk-mgr"
+ if sdk_mgr_dir.exists():
+ import subprocess
+
+ try:
+ # We use plain print to avoid circular imports or dependency on rich here
+ print(
+ "Platform not found. Attempting to checkout 'main' via otdf-sdk-mgr..."
+ )
+ subprocess.check_call(
+ [
+ "uv",
+ "run",
+ "--project",
+ str(sdk_mgr_dir),
+ "otdf-sdk-mgr",
+ "checkout",
+ "platform",
+ "main",
+ ],
+ cwd=sdk_mgr_dir,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ )
+ if managed_main.exists() and managed_main.is_dir():
+ if (managed_main / "docker-compose.yaml").exists() and (
+ managed_main / "opentdf-dev.yaml"
+ ).exists():
+ return managed_main
+ except Exception:
+ # Fall through to FileNotFoundError
+ pass
+
# If we get here, we didn't find it
raise FileNotFoundError(
f"Could not find platform directory with expected shape "
- f"(docker-compose.yaml and opentdf-dev.yaml) searching from {xtest_root}"
+ f"(docker-compose.yaml and opentdf.yaml or opentdf-dev.yaml) searching from {xtest_root}"
)
@@ -91,9 +138,18 @@ class Settings(BaseSettings):
# Directory paths - computed from xtest_root
xtest_root: Path = Field(default_factory=_find_xtest_root)
- platform_dir: Path = Field(
- default_factory=lambda: _find_platform_dir(_find_xtest_root())
- )
+ _platform_dir: Path | None = None
+
+ @property
+ def platform_dir(self) -> Path:
+ """Platform directory path."""
+ if self._platform_dir:
+ return self._platform_dir
+ return _find_platform_dir(self.xtest_root)
+
+ @platform_dir.setter
+ def platform_dir(self, value: Path) -> None:
+ self._platform_dir = value
@property
def logs_dir(self) -> Path:
@@ -113,12 +169,12 @@ def config_dir(self) -> Path:
@property
def platform_config(self) -> Path:
"""Platform config file path."""
- return self.platform_dir / "opentdf-dev.yaml"
+ return self.platform_dir / "opentdf.yaml"
@property
def platform_template_config(self) -> Path:
"""Platform config template path."""
- return self.platform_dir / "opentdf.yaml"
+ return self.platform_dir / "opentdf-dev.yaml"
@property
def kas_template_config(self) -> Path:
diff --git a/otdf-local/src/otdf_local/process/logs.py b/otdf-local/src/otdf_local/process/logs.py
index 0553e8f38..6c395610b 100644
--- a/otdf-local/src/otdf_local/process/logs.py
+++ b/otdf-local/src/otdf_local/process/logs.py
@@ -61,6 +61,16 @@ def read_new(self) -> list[LogEntry]:
self._position = f.tell()
return entries
+ def seek_to_end(self) -> None:
+ """Move the read position to the end of the file."""
+ if not self.log_file.exists():
+ self._position = 0
+ return
+
+ with open(self.log_file) as f:
+ f.seek(0, 2) # Seek to end
+ self._position = f.tell()
+
def follow(self, poll_interval: float = 0.5) -> Iterator[LogEntry]:
"""Continuously yield new log entries."""
while True:
@@ -182,6 +192,12 @@ def read_tail(
key=lambda e: (e.timestamp is None, e.timestamp or datetime.max),
)
+ def seek_to_end(self, services: list[str] | None = None) -> None:
+ """Seek all (or specified) readers to the end of their files."""
+ readers = self._get_readers(services)
+ for reader in readers:
+ reader.seek_to_end()
+
def follow(
self,
services: list[str] | None = None,
diff --git a/otdf-local/src/otdf_local/services/kas.py b/otdf-local/src/otdf_local/services/kas.py
index 0b7adfa64..4613d8def 100644
--- a/otdf-local/src/otdf_local/services/kas.py
+++ b/otdf-local/src/otdf_local/services/kas.py
@@ -56,7 +56,10 @@ def _generate_config(self) -> Path:
template_path = self.settings.kas_template_config
# Load platform config to get root_key
- platform_config = load_yaml(self.settings.platform_config)
+ try:
+ platform_config = load_yaml(self.settings.platform_config)
+ except FileNotFoundError:
+ platform_config = {}
root_key = get_nested(platform_config, "services.kas.root_key", "")
# Detect platform features to determine supported config options
diff --git a/otdf-local/src/otdf_local/services/platform.py b/otdf-local/src/otdf_local/services/platform.py
index 15f7f4e5e..4e6c3c7d0 100644
--- a/otdf-local/src/otdf_local/services/platform.py
+++ b/otdf-local/src/otdf_local/services/platform.py
@@ -54,6 +54,21 @@ def _generate_config(self) -> Path:
config_path = self.settings.platform_config
template_path = self.settings.platform_template_config
+ # Fallback if template doesn't exist
+ if not template_path.exists():
+ # Try opentdf-example.yaml
+ example_template = self.settings.platform_dir / "opentdf-example.yaml"
+ if example_template.exists():
+ template_path = example_template
+ else:
+ # If neither exist, we might have to just use opentdf.yaml if it exists
+ # or raise a more helpful error
+ if not config_path.exists():
+ raise FileNotFoundError(
+ f"Could not find template {template_path} or {example_template}"
+ )
+ return config_path
+
# Detect platform features to determine supported config options
features = PlatformFeatures.detect(self.settings.platform_dir)
diff --git a/otdf-local/src/otdf_local/suite/cli.py b/otdf-local/src/otdf_local/suite/cli.py
new file mode 100644
index 000000000..53ed8f502
--- /dev/null
+++ b/otdf-local/src/otdf_local/suite/cli.py
@@ -0,0 +1,114 @@
+"""CLI commands for X-Test suite management."""
+
+from __future__ import annotations
+
+import json
+import os
+from pathlib import Path
+from typing import Annotated, Optional
+
+import typer
+import yaml
+
+from otdf_local.config.settings import get_settings
+from otdf_local.suite.models import (
+ PlatformVersion,
+ SDKVersion,
+ SuiteConfig,
+ TestJob,
+)
+from otdf_local.utils.console import console, print_error, print_info
+
+
+suite_app = typer.Typer(
+ name="suite",
+ help="X-Test suite orchestration commands.",
+ no_args_is_help=True,
+)
+
+
+@suite_app.command("generate-shard")
+def generate_shard(
+ platform_ref: Annotated[str, typer.Option("--platform-ref", help="Platform ref to test")] = ...,
+ sdk: Annotated[str, typer.Option("--sdk", help="SDK to focus on (go, java, js)")] = ...,
+ platform_sha: Annotated[Optional[str], typer.Option("--platform-sha", help="Platform SHA to test")] = None,
+ go_ref: Annotated[str, typer.Option("--go-ref")] = "main",
+ go_sha: Annotated[Optional[str], typer.Option("--go-sha")] = None,
+ java_ref: Annotated[str, typer.Option("--java-ref")] = "main",
+ java_sha: Annotated[Optional[str], typer.Option("--java-sha")] = None,
+ js_ref: Annotated[str, typer.Option("--js-ref")] = "main",
+ js_sha: Annotated[Optional[str], typer.Option("--js-sha")] = None,
+ output: Annotated[Optional[Path], typer.Option("--output", "-o", help="Output YAML file")] = None,
+) -> None:
+ """Generate a self-contained SuiteConfig YAML for a specific matrix shard."""
+ platform = PlatformVersion(tag=platform_ref, sha=platform_sha)
+
+ sdks = {
+ "go": [SDKVersion(tag=go_ref, sha=go_sha)],
+ "java": [SDKVersion(tag=java_ref, sha=java_sha)],
+ "js": [SDKVersion(tag=js_ref, sha=js_sha)],
+ }
+
+ # Define jobs - match the standard jobs in xtest.yml
+ jobs = [
+ TestJob(
+ name=f"standard-{sdk}",
+ pytest_args=["-ra", "-v", "test_tdfs.py", "test_policytypes.py"],
+ focus_sdk=sdk
+ ),
+ TestJob(
+ name=f"legacy-{sdk}",
+ pytest_args=["-ra", "-v", "test_legacy.py"],
+ focus_sdk=sdk
+ ),
+ TestJob(
+ name=f"abac-{sdk}",
+ pytest_args=["-ra", "-v", "test_abac.py"],
+ focus_sdk=sdk,
+ requires_kas=True
+ ),
+ ]
+
+ config = SuiteConfig(
+ platforms=[platform],
+ sdks=sdks,
+ jobs=jobs
+ )
+
+ yaml_data = yaml.dump(config.model_dump(), sort_keys=False)
+
+ if output:
+ output.write_text(yaml_data)
+ print_info(f"Generated shard config at {output}")
+ else:
+ # Print to stdout (wrapped in markdown for GHA summary)
+ summary = f"shard.yaml (for reproduction)
\n\n```yaml\n{yaml_data}```\n "
+ console.print(summary)
+
+ # Also write to GITHUB_STEP_SUMMARY if it exists
+ github_summary = os.environ.get("GITHUB_STEP_SUMMARY")
+ if github_summary:
+ with open(github_summary, "a") as f:
+ f.write(f"\n{summary}\n")
+
+
+@suite_app.command("run")
+def run_suite(
+ config_path: Annotated[Path, typer.Argument(help="Path to SuiteConfig YAML")],
+ verbose: Annotated[bool, typer.Option("--verbose", "-v", help="Enable verbose logging")] = False,
+) -> None:
+ """Run an X-Test suite from a configuration file."""
+ if not config_path.exists():
+ print_error(f"Config file not found: {config_path}")
+ raise typer.Exit(1)
+
+ with open(config_path) as f:
+ data = yaml.safe_load(f)
+ config = SuiteConfig.model_validate(data)
+
+ from otdf_local.suite.runner import SuiteRunner
+ runner = SuiteRunner(config, get_settings(), verbose=verbose)
+
+ success = runner.run()
+ if not success:
+ raise typer.Exit(1)
diff --git a/otdf-local/src/otdf_local/suite/models.py b/otdf-local/src/otdf_local/suite/models.py
new file mode 100644
index 000000000..adeeb9f13
--- /dev/null
+++ b/otdf-local/src/otdf_local/suite/models.py
@@ -0,0 +1,44 @@
+"""Pydantic models for the X-Test suite configuration."""
+
+from __future__ import annotations
+
+from typing import Dict, List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class SDKVersion(BaseModel):
+ """Configuration for an SDK version."""
+
+ tag: str
+ sha: Optional[str] = None
+ source: Optional[str] = None # For Go: "platform" or "standalone"
+ alias: Optional[str] = None
+ head: bool = False
+
+
+class PlatformVersion(BaseModel):
+ """Configuration for a platform version."""
+
+ tag: str
+ sha: Optional[str] = None
+ ec_tdf_enabled: bool = True
+ # Optional JSON string of extra keys, or path to extra-keys.json
+ extra_keys: Optional[str] = None
+
+
+class TestJob(BaseModel):
+ """Configuration for a test execution job."""
+
+ name: str
+ pytest_args: List[str] = Field(default_factory=list)
+ requires_kas: bool = False
+ focus_sdk: str = "all"
+
+
+class SuiteConfig(BaseModel):
+ """Root configuration for a test suite (or shard)."""
+
+ platforms: List[PlatformVersion]
+ sdks: Dict[str, List[SDKVersion]]
+ jobs: List[TestJob]
diff --git a/otdf-local/src/otdf_local/suite/runner.py b/otdf-local/src/otdf_local/suite/runner.py
new file mode 100644
index 000000000..1f2f9e33d
--- /dev/null
+++ b/otdf-local/src/otdf_local/suite/runner.py
@@ -0,0 +1,358 @@
+"""Runner for X-Test suites."""
+
+from __future__ import annotations
+
+import json
+import os
+import subprocess
+from pathlib import Path
+from typing import Dict, List, Optional
+
+from otdf_local.config.settings import Settings
+from otdf_local.services import (
+ Provisioner,
+ get_docker_service,
+ get_kas_manager,
+ get_platform_service,
+ get_provisioner,
+)
+from otdf_local.suite.models import PlatformVersion, SDKVersion, SuiteConfig, TestJob
+from otdf_local.utils.console import (
+ console,
+ print_error,
+ print_info,
+ print_success,
+ print_warning,
+ status_spinner,
+)
+
+
+class SuiteRunner:
+ """Orchestrates the execution of an X-Test suite."""
+
+ def __init__(self, config: SuiteConfig, settings: Settings, verbose: bool = False) -> None:
+ self.config = config
+ self.settings = settings
+ self.verbose = verbose
+ self.results: List[Dict] = []
+
+ def run(self) -> bool:
+ """Run the full suite."""
+ success = True
+
+ for platform in self.config.platforms:
+ if not self._run_platform_tests(platform):
+ success = False
+
+ self._print_summary()
+ return success
+
+ def _run_platform_tests(self, platform: PlatformVersion) -> bool:
+ """Run all tests against a specific platform version."""
+ print_info(f"--- Starting tests for Platform {platform.tag} ---")
+
+ # 1. Checkout platform if needed
+ platform_dir = self._ensure_platform(platform)
+ if not platform_dir:
+ return False
+
+ # Create a specific settings instance for this platform
+ # We need to be careful with global state in otdf-local
+ # but for now we'll just update the settings object.
+ try:
+ original_platform_dir = self.settings.platform_dir
+ except FileNotFoundError:
+ original_platform_dir = None
+
+ self.settings.platform_dir = platform_dir
+
+ try:
+ # 2. Start Services
+ if not self._start_services(platform):
+ return False
+
+ # 3. Install SDKs
+ self._ensure_sdks()
+
+ # 4. Run Jobs
+ platform_success = True
+ for job in self.config.jobs:
+ if not self._run_job(job, platform):
+ platform_success = False
+
+ return platform_success
+
+ finally:
+ self._stop_services()
+ if original_platform_dir:
+ self.settings.platform_dir = original_platform_dir
+ else:
+ # If there was no original platform dir, clear the override
+ self.settings._platform_dir = None
+
+ def _ensure_platform(self, platform: PlatformVersion) -> Optional[Path]:
+ """Ensure the platform is checked out at the right version."""
+ # Use otdf-sdk-mgr to checkout platform
+ ref = platform.sha or platform.tag
+ print_info(f"Ensuring platform version {ref}...")
+ sdk_mgr_dir = self.settings.xtest_root.parent / "otdf-sdk-mgr"
+ try:
+ subprocess.check_call(
+ ["uv", "run", "--project", str(sdk_mgr_dir), "otdf-sdk-mgr", "checkout", "platform", ref],
+ cwd=sdk_mgr_dir,
+ )
+ # Find the worktree path. otdf-sdk-mgr puts it in xtest/sdk/platform/src/
+ # where branch has / replaced with --
+ branch_dir = ref.replace("/", "--")
+ if branch_dir.startswith("service--"):
+ branch_dir = branch_dir.removeprefix("service--")
+
+ worktree_path = (
+ self.settings.xtest_root / "sdk" / "platform" / "src" / branch_dir
+ )
+ if not worktree_path.exists():
+ # Fallback to direct directory if tag is 'main' and we are already in platform dir?
+ # No, better be explicit. otdf-sdk-mgr puts main in 'main'
+ pass
+
+ if not worktree_path.exists():
+ print_error(f"Platform worktree not found at {worktree_path}")
+ return None
+
+ return worktree_path
+ except subprocess.CalledProcessError as e:
+ print_error(f"Failed to checkout platform {platform.tag}: {e}")
+ return None
+
+ def _ensure_sdks(self) -> None:
+ """Ensure all required SDKs are installed."""
+ sdk_mgr_dir = self.settings.xtest_root.parent / "otdf-sdk-mgr"
+ for sdk, versions in self.config.sdks.items():
+ for version in versions:
+ ref = version.sha or version.tag
+ print_info(f"Ensuring {sdk} {ref}...")
+
+ # If it's a SHA or a tag we want to build from source, we use 'checkout'
+ if version.sha or version.head:
+ args = ["uv", "run", "--project", str(sdk_mgr_dir), "otdf-sdk-mgr", "checkout", sdk, ref]
+ try:
+ subprocess.check_call(
+ args,
+ cwd=sdk_mgr_dir,
+ )
+ # After checkout, we need to build it
+ sdk_dir = self.settings.xtest_root / "sdk" / sdk
+ print_info(f"Building {sdk} from source in {sdk_dir}...")
+ subprocess.check_call(["make"], cwd=sdk_dir)
+ except subprocess.CalledProcessError as e:
+ print_warning(f"Failed to checkout/build {sdk} {ref}: {e}")
+ else:
+ args = ["uv", "run", "--project", str(sdk_mgr_dir), "otdf-sdk-mgr", "install", "artifact", "--sdk", sdk, "--version", ref]
+ if version.source:
+ args.extend(["--source", version.source])
+ if version.alias:
+ args.extend(["--dist-name", version.alias])
+
+ try:
+ subprocess.check_call(
+ args,
+ cwd=sdk_mgr_dir,
+ )
+ except subprocess.CalledProcessError as e:
+ print_warning(f"Failed to install {sdk} {ref}: {e}")
+
+ def _start_services(self, platform: PlatformVersion) -> bool:
+ """Start Docker, Platform, and optionally KAS."""
+ # Ensure we stop everything first to have a clean start
+ self._stop_services()
+
+ # Clean logs before starting to avoid pollution
+ from otdf_local.cli import _do_clean
+ _do_clean(self.settings, keep_logs=False)
+
+ print_info("Starting services...")
+ from otdf_local.health.waits import wait_for_health, wait_for_port
+ from otdf_local.config.ports import Ports
+ from otdf_local.process.logs import LogAggregator
+ import threading
+
+ # Start log follower if verbose
+ stop_logs = threading.Event()
+ if self.verbose:
+ def follow_logs():
+ agg = LogAggregator(self.settings.logs_dir)
+ agg.add_service("platform")
+ # Also add standard KAS just in case
+ for kas in Ports.standard_kas_names():
+ agg.add_service(f"kas-{kas}")
+
+ # Seek to end so we don't read old/cleared logs if they were instantly recreated
+ agg.seek_to_end()
+
+ for entry in agg.follow():
+ if stop_logs.is_set():
+ break
+ console.print(f"[dim]{entry.service}:[/dim] {entry.message}")
+
+ log_thread = threading.Thread(target=follow_logs, daemon=True)
+ log_thread.start()
+
+ try:
+ # Start Docker
+ docker = get_docker_service(self.settings)
+ if not docker.start():
+ print_error("Failed to start Docker services")
+ return False
+
+ # Wait for Keycloak
+ with status_spinner("Waiting for Keycloak..."):
+ try:
+ wait_for_health(
+ f"http://localhost:{Ports.KEYCLOAK}/auth/realms/master",
+ timeout=120,
+ service_name="Keycloak",
+ )
+ except Exception as e:
+ print_error(f"Keycloak failed to become healthy: {e}")
+ return False
+
+ # Wait for PostgreSQL
+ try:
+ wait_for_port(
+ Ports.POSTGRES,
+ "localhost",
+ timeout=60,
+ service_name="PostgreSQL",
+ )
+ except Exception as e:
+ print_error(f"PostgreSQL failed to become ready: {e}")
+ return False
+
+ # Provision Keycloak
+ print_info("Provisioning Keycloak...")
+ provisioner: Provisioner = get_provisioner(self.settings)
+ if not provisioner.provision_keycloak():
+ print_error("Failed to provision Keycloak")
+ return False
+
+ # Wait for the opentdf realm to be ready
+ with status_spinner("Waiting for opentdf realm..."):
+ try:
+ wait_for_health(
+ f"http://localhost:{Ports.KEYCLOAK}/auth/realms/opentdf",
+ timeout=60,
+ service_name="Keycloak opentdf realm",
+ )
+ except Exception as e:
+ print_error(f"opentdf realm failed to become ready: {e}")
+ return False
+
+ # Start Platform
+ platform_service = get_platform_service(self.settings)
+
+ if not platform_service.start():
+ print_error("Failed to start Platform")
+ return False
+
+ with status_spinner("Waiting for Platform..."):
+ try:
+ wait_for_health(platform_service.health_url, timeout=120)
+ except Exception as e:
+ print_error(f"Platform failed to become healthy: {e}")
+ return False
+
+ # Provision fixtures
+ print_info("Provisioning fixtures...")
+ if not provisioner.provision_fixtures():
+ print_warning("Provisioning had issues - continuing anyway")
+
+ finally:
+ if self.verbose:
+ stop_logs.set()
+
+ return True
+
+ def _run_job(self, job: TestJob, platform: PlatformVersion) -> bool:
+ """Run a specific test job."""
+ print_info(f"Running job: {job.name}")
+
+ # Start KAS if needed
+ if job.requires_kas:
+ print_info("Starting KAS instances for ABAC tests...")
+ kas_manager = get_kas_manager(self.settings)
+ kas_manager.start_all()
+ # Wait for health...
+ # (simplified for now, PlatformService.start already waits for platform)
+
+ # Build pytest command
+ cmd = ["uv", "run", "pytest"] + job.pytest_args
+
+ # Environment variables
+ env = os.environ.copy()
+ # Add otdf-local env vars
+ from otdf_local.cli import env as env_cmd
+ # We can't easily call 'env' command here without stdout capture
+ # Let's just manually set the essentials
+ env["PLATFORMURL"] = self.settings.platform_url
+ env["PLATFORM_DIR"] = str(self.settings.platform_dir.resolve())
+ env["OT_ROOT_KEY"] = self._get_root_key()
+ env["FOCUS_SDK"] = job.focus_sdk
+
+ # Run pytest
+ print_info(f"Executing: {' '.join(cmd)}")
+ result = subprocess.run(
+ cmd,
+ cwd=self.settings.xtest_root,
+ env=env,
+ )
+
+ success = result.returncode == 0
+ self.results.append({
+ "job": job.name,
+ "platform": platform.tag,
+ "success": success,
+ "returncode": result.returncode
+ })
+
+ if success:
+ print_success(f"Job {job.name} passed")
+ else:
+ print_error(f"Job {job.name} failed with code {result.returncode}")
+
+ return success
+
+ def _stop_services(self) -> None:
+ """Stop all services."""
+ print_info("Stopping services...")
+ get_kas_manager(self.settings).stop_all()
+ get_platform_service(self.settings).stop()
+ get_docker_service(self.settings).stop()
+
+ def _get_root_key(self) -> str:
+ """Read root key from platform config."""
+ from otdf_local.utils.yaml import load_yaml, get_nested
+ try:
+ config = load_yaml(self.settings.platform_config)
+ return get_nested(config, "services.kas.root_key") or ""
+ except:
+ return ""
+
+ def _print_summary(self) -> None:
+ """Print a summary of all test results."""
+ console.print("\n[bold]--- Test Suite Summary ---[/bold]")
+
+ if not self.results:
+ print_warning("No tests were executed (likely due to setup failures).")
+ return
+
+ all_passed = True
+ for res in self.results:
+ status = "[green]PASS[/green]" if res["success"] else "[red]FAIL[/red]"
+ console.print(f"{status} Job: {res['job']} (Platform: {res['platform']})")
+ if not res["success"]:
+ all_passed = False
+
+ if all_passed:
+ print_success("All tests passed!")
+ else:
+ print_error("Some tests failed.")
diff --git a/otdf-local/uv.lock b/otdf-local/uv.lock
index f9b5aaae0..5b02a2478 100644
--- a/otdf-local/uv.lock
+++ b/otdf-local/uv.lock
@@ -146,6 +146,7 @@ source = { editable = "." }
dependencies = [
{ name = "httpx" },
{ name = "pydantic-settings" },
+ { name = "pyyaml" },
{ name = "rich" },
{ name = "ruamel-yaml" },
{ name = "typer" },
@@ -162,6 +163,7 @@ dev = [
requires-dist = [
{ name = "httpx", specifier = ">=0.27.0" },
{ name = "pydantic-settings", specifier = ">=2.2.0" },
+ { name = "pyyaml", specifier = ">=6.0.3" },
{ name = "rich", specifier = ">=13.7.0" },
{ name = "ruamel-yaml", specifier = ">=0.18.0" },
{ name = "typer", specifier = ">=0.12.0" },
@@ -365,6 +367,61 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
]
+[[package]]
+name = "pyyaml"
+version = "6.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" },
+ { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" },
+ { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" },
+ { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" },
+ { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" },
+ { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" },
+ { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" },
+ { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" },
+ { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" },
+ { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" },
+ { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" },
+ { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" },
+ { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" },
+ { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" },
+ { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" },
+ { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" },
+ { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" },
+ { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" },
+ { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" },
+ { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" },
+ { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" },
+ { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" },
+ { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
+]
+
[[package]]
name = "rich"
version = "14.3.1"
diff --git a/otdf-sdk-mgr/README.md b/otdf-sdk-mgr/README.md
index ee2a2a8b7..4b930eaae 100644
--- a/otdf-sdk-mgr/README.md
+++ b/otdf-sdk-mgr/README.md
@@ -56,7 +56,7 @@ otdf-sdk-mgr java-fixup
## How Release Installs Work
-- **Go**: Writes a `.version` file; `cli.sh`/`otdfctl.sh` use `go run github.com/opentdf/otdfctl@{version}` (no local compilation needed, Go caches the binary)
+- **Go**: Writes a `.version` file containing `module-path@version` (e.g., `github.com/opentdf/otdfctl@v0.24.0`); `cli.sh`/`otdfctl.sh` use `go run @` (no local compilation needed, Go caches the binary). The module path is `github.com/opentdf/platform/otdfctl` for platform-embedded releases or `github.com/opentdf/otdfctl` for standalone releases.
- **JS**: Runs `npm install @opentdf/ctl@{version}` into the dist directory; `cli.sh` uses `npx` from local `node_modules/`
- **Java**: Downloads `cmdline.jar` from GitHub Releases; `cli.sh` uses `java -jar cmdline.jar`
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py
index 24148bdd7..2920ae2a4 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py
@@ -11,7 +11,7 @@
from otdf_sdk_mgr.cli_install import install_app
from otdf_sdk_mgr.cli_versions import versions_app
-from otdf_sdk_mgr.config import ALL_SDKS, get_sdk_dirs
+from otdf_sdk_mgr.config import ALL_SDKS, ALL_REPOS, get_sdk_dirs
app = typer.Typer(
name="otdf-sdk-mgr",
@@ -27,22 +27,22 @@
def checkout(
sdk: Annotated[
Optional[str],
- typer.Argument(help="SDK to checkout (go, js, java)"),
+ typer.Argument(help="SDK or repo to checkout (go, js, java, platform)"),
] = None,
branch: Annotated[str, typer.Argument(help="Branch to checkout")] = "main",
- all_sdks: Annotated[bool, typer.Option("--all", help="Checkout all SDKs")] = False,
+ all_sdks: Annotated[bool, typer.Option("--all", help="Checkout all repos")] = False,
) -> None:
- """Clone bare repo and create/update worktree for an SDK branch."""
+ """Clone bare repo and create/update worktree for an SDK or platform branch."""
from otdf_sdk_mgr.checkout import checkout_sdk_branch
try:
if all_sdks:
- for s in ALL_SDKS:
+ for s in ALL_REPOS:
checkout_sdk_branch(s, branch)
elif sdk:
checkout_sdk_branch(sdk, branch)
else:
- typer.echo("Error: provide an SDK name or use --all", err=True)
+ typer.echo("Error: provide a repo name or use --all", err=True)
raise typer.Exit(1)
except (ValueError, subprocess.CalledProcessError) as e:
typer.echo(f"Error: {e}", err=True)
@@ -63,8 +63,10 @@ def clean(
remove_src = not dist_only
sdk_dirs = get_sdk_dirs()
- for sdk in ALL_SDKS:
- sdk_dir = sdk_dirs[sdk]
+ for repo in ALL_REPOS:
+ if repo not in sdk_dirs:
+ continue
+ sdk_dir = sdk_dirs[repo]
if remove_dist:
dist_dir = sdk_dir / "dist"
if dist_dir.exists():
@@ -93,3 +95,39 @@ def java_fixup(
from otdf_sdk_mgr.java_fixup import post_checkout_java_fixup
post_checkout_java_fixup(base_dir)
+
+
+@app.command("go-fixup")
+def go_fixup_cmd(
+ platform_dir: Annotated[
+ Path,
+ typer.Option("--platform-dir", help="Path to the platform checkout root"),
+ ],
+ heads: Annotated[
+ Optional[str],
+ typer.Option(
+ "--heads",
+ help="JSON list of head version tags to process (e.g. '[\"main\"]')",
+ ),
+ ] = None,
+ base_dir: Annotated[
+ Optional[Path],
+ typer.Argument(help="Base directory for Go source trees"),
+ ] = None,
+) -> None:
+ """Bridge Go client go.mod to server shared modules for head builds.
+
+ Performs go mod edit -replace + go mod tidy for each head version,
+ pointing platform module imports at the local platform checkout.
+ Only needed for standalone otdfctl checkouts.
+ """
+ import json as json_mod
+
+ from otdf_sdk_mgr.go_fixup import go_fixup
+
+ heads_list = json_mod.loads(heads) if heads else None
+ try:
+ go_fixup(platform_dir, heads=heads_list, base_dir=base_dir)
+ except (FileNotFoundError, subprocess.CalledProcessError) as e:
+ typer.echo(f"Error: {e}", err=True)
+ raise typer.Exit(1) from e
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py
index e3950d717..e62ae2464 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py
@@ -74,12 +74,16 @@ def artifact(
dist_name: Annotated[
Optional[str], typer.Option("--dist-name", help="Override dist directory name")
] = None,
+ source: Annotated[
+ Optional[str],
+ typer.Option(help='Source repo for Go CLI (e.g., "platform" for monorepo)'),
+ ] = None,
) -> None:
"""Install a single SDK version (used by CI)."""
from otdf_sdk_mgr.installers import InstallError, cmd_install
try:
- cmd_install(sdk, version, dist_name=dist_name)
+ cmd_install(sdk, version, dist_name=dist_name, source=source)
except InstallError as e:
typer.echo(f"Error: {e}", err=True)
raise typer.Exit(1)
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py
index 19188b124..2dcf6e321 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py
@@ -3,6 +3,7 @@
from __future__ import annotations
import json
+import os
from typing import Annotated, Any, Optional
import typer
@@ -112,10 +113,20 @@ def resolve_versions(
raise typer.Exit(2)
infix = SDK_TAG_INFIXES.get(sdk)
+ # Allow overriding the Go SDK source via OTDFCTL_SOURCE env var
+ # (standalone otdfctl repo vs platform monorepo)
+ go_source = os.environ.get("OTDFCTL_SOURCE") if sdk == "go" else None
+ if go_source and go_source not in ("standalone", "platform"):
+ typer.echo(
+ f"Warning: unrecognized OTDFCTL_SOURCE={go_source!r}; expected 'platform' or 'standalone'",
+ err=True,
+ )
+ go_source = None
+
results: list[ResolveResult] = []
shas: set[str] = set()
for version in tags:
- v = resolve(sdk, version, infix)
+ v = resolve(sdk, version, infix, go_source=go_source)
if is_resolve_success(v):
env = lookup_additional_options(sdk, v["tag"])
if env:
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py
index adf6c8b1f..ce8c2dc6c 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py
@@ -43,6 +43,7 @@ def get_sdk_dirs() -> dict[str, Path]:
"go": sdk_dir / "go",
"js": sdk_dir / "js",
"java": sdk_dir / "java",
+ "platform": sdk_dir / "platform",
}
@@ -70,7 +71,11 @@ def get_sdk_dirs() -> dict[str, Path]:
"java": "opentdf/java-sdk",
}
-GO_INSTALL_PREFIX = "go run github.com/opentdf/otdfctl"
+GO_INSTALL_PREFIX_STANDALONE = "go run github.com/opentdf/otdfctl"
+GO_INSTALL_PREFIX_PLATFORM = "go run github.com/opentdf/platform/otdfctl"
+
+GO_MODULE_PATH = "github.com/opentdf/otdfctl"
+GO_MODULE_PATH_PLATFORM = "github.com/opentdf/platform/otdfctl"
LTS_VERSIONS: dict[str, str] = {
"go": "0.24.0",
@@ -103,6 +108,7 @@ def get_sdk_dirs() -> dict[str, Path]:
"go": "otdfctl.git",
"java": "java-sdk.git",
"js": "web-sdk.git",
+ "platform": "platform.git",
}
# Tag infixes for monorepo tag resolution
@@ -111,4 +117,47 @@ def get_sdk_dirs() -> dict[str, Path]:
"platform": "service",
}
+# When resolving go versions from the platform repo, use "otdfctl" infix
+# (tags are otdfctl/vX.Y.Z in the platform monorepo)
+SDK_TAG_INFIXES_PLATFORM_GO = "otdfctl"
+
+_VALID_GO_SOURCES = {None, "standalone", "platform"}
+
+
+def _validate_go_source(source: str | None) -> None:
+ """Raise ValueError if source is not a recognised Go source."""
+ if source not in _VALID_GO_SOURCES:
+ raise ValueError(f"Invalid Go source {source!r}; expected one of {_VALID_GO_SOURCES}")
+
+
+def go_git_url(source: str | None = None) -> str:
+ """Return the git URL for Go SDK resolution based on source.
+
+ Args:
+ source: "platform" to use the platform monorepo, None/"standalone" for the
+ standalone otdfctl repo.
+ """
+ _validate_go_source(source)
+ if source == "platform":
+ return SDK_GIT_URLS["platform"]
+ return SDK_GIT_URLS["go"]
+
+
+def go_tag_infix(source: str | None = None) -> str | None:
+ """Return the tag infix for Go SDK resolution based on source."""
+ _validate_go_source(source)
+ if source == "platform":
+ return SDK_TAG_INFIXES_PLATFORM_GO
+ return None
+
+
+def go_module_path(source: str | None = None) -> str:
+ """Return the Go module path based on source."""
+ _validate_go_source(source)
+ if source == "platform":
+ return GO_MODULE_PATH_PLATFORM
+ return GO_MODULE_PATH
+
+
ALL_SDKS = ["go", "js", "java"]
+ALL_REPOS = ["go", "js", "java", "platform"]
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/go_fixup.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/go_fixup.py
new file mode 100644
index 000000000..4098361c2
--- /dev/null
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/go_fixup.py
@@ -0,0 +1,95 @@
+"""Post-checkout fixups for Go SDK (otdfctl) source trees.
+
+Bridges client go.mod to server shared modules for head builds where
+client and server share unreleased code. Only applies to standalone
+otdfctl checkouts — platform-source builds already have the modules.
+"""
+
+from __future__ import annotations
+
+import subprocess
+from pathlib import Path
+
+from otdf_sdk_mgr.config import get_sdk_dir
+
+# Platform modules that standalone otdfctl imports and that may need
+# a local replace directive when testing against a head platform build.
+PLATFORM_MODULES = [
+ "lib/fixtures",
+ "lib/ocrypto",
+ "protocol/go",
+ "sdk",
+]
+
+
+def go_fixup(
+ platform_dir: Path,
+ heads: list[str] | None = None,
+ base_dir: Path | None = None,
+) -> None:
+ """Replace go.mod references to point at local platform checkout.
+
+ Args:
+ platform_dir: Absolute path to the platform checkout root
+ (containing lib/, protocol/, sdk/).
+ heads: JSON-decoded list of head version tags to process.
+ If None, all subdirectories under *base_dir* are processed.
+ base_dir: Directory containing per-version otdfctl source trees
+ (e.g. ``xtest/sdk/go/src``). Defaults to ``get_sdk_dir() / "go" / "src"``.
+ """
+ if base_dir is None:
+ base_dir = get_sdk_dir() / "go" / "src"
+
+ if not base_dir.exists():
+ print(f"Base directory {base_dir} does not exist, nothing to fix.")
+ return
+
+ platform_dir = platform_dir.resolve()
+ if not platform_dir.is_dir():
+ raise FileNotFoundError(f"Platform directory does not exist: {platform_dir}")
+
+ dirs_to_process: list[Path] = []
+ if heads:
+ for tag in heads:
+ d = base_dir / tag
+ if d.is_dir():
+ dirs_to_process.append(d)
+ else:
+ print(f"Warning: head directory {d} does not exist, skipping.")
+ else:
+ for d in sorted(base_dir.iterdir()):
+ if d.is_dir() and not d.name.endswith(".git"):
+ dirs_to_process.append(d)
+
+ if not dirs_to_process:
+ print("No directories to process.")
+ return
+
+ for src_dir in dirs_to_process:
+ if not (src_dir / "go.mod").exists():
+ print(f"No go.mod in {src_dir}, skipping.")
+ continue
+
+ print(f"Applying go.mod replacements in {src_dir}...")
+ for module in PLATFORM_MODULES:
+ local_path = platform_dir / module
+ if not local_path.is_dir():
+ print(f" Warning: {local_path} does not exist, skipping {module}")
+ continue
+ subprocess.run(
+ [
+ "go",
+ "mod",
+ "edit",
+ "-replace",
+ f"github.com/opentdf/platform/{module}={local_path}",
+ ],
+ cwd=src_dir,
+ check=True,
+ )
+ print(f" Replaced github.com/opentdf/platform/{module} -> {local_path}")
+
+ print(f"Running go mod tidy in {src_dir}...")
+ subprocess.run(["go", "mod", "tidy"], cwd=src_dir, check=True)
+
+ print("Go fixup complete.")
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py
index e7c22ae09..0822a063e 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py
@@ -11,9 +11,11 @@
from pathlib import Path
from otdf_sdk_mgr.config import (
+ GO_MODULE_PATH_PLATFORM,
LTS_VERSIONS,
get_sdk_dir,
get_sdk_dirs,
+ go_module_path,
)
from otdf_sdk_mgr.checkout import checkout_sdk_branch
from otdf_sdk_mgr.registry import list_go_versions, list_java_github_releases, list_js_versions
@@ -24,33 +26,48 @@ class InstallError(Exception):
"""Raised when SDK installation fails."""
-def install_go_release(version: str, dist_dir: Path) -> None:
+def install_go_release(version: str, dist_dir: Path, source: str | None = None) -> None:
"""Install a Go CLI release by writing a .version file.
The cli.sh and otdfctl.sh wrappers read .version and use
- `go run github.com/opentdf/otdfctl@{version}` instead of a local binary.
+ `go run @{version}` instead of a local binary.
+ The .version file contains `module-path@version`
+ (e.g., `github.com/opentdf/otdfctl@v0.24.0`).
+
+ Args:
+ version: Version string (e.g., "v0.24.0" or "otdfctl/v0.24.0").
+ dist_dir: Target distribution directory.
+ source: "platform" to use the platform monorepo module path,
+ None or "standalone" for standalone.
"""
go_dir = get_sdk_dir() / "go"
dist_dir.mkdir(parents=True, exist_ok=True)
+ # Strip tag infix (e.g., "otdfctl/v0.24.0" → "v0.24.0")
+ if "/" in version:
+ version = version.rsplit("/", 1)[-1]
tag = normalize_version(version)
- (dist_dir / ".version").write_text(f"{tag}\n")
+ module = go_module_path(source)
+ (dist_dir / ".version").write_text(f"{module}@{tag}\n")
shutil.copy(go_dir / "cli.sh", dist_dir / "cli.sh")
shutil.copy(go_dir / "otdfctl.sh", dist_dir / "otdfctl.sh")
shutil.copy(go_dir / "opentdfctl.yaml", dist_dir / "opentdfctl.yaml")
- print(f" Pre-warming Go cache for otdfctl@{tag}...")
+ print(f" Pre-warming Go cache for {module}@{tag}...")
result = subprocess.run(
- ["go", "install", f"github.com/opentdf/otdfctl@{tag}"],
+ ["go", "install", f"{module}@{tag}"],
capture_output=True,
text=True,
)
if result.returncode != 0:
- print(
- f" Warning: go install pre-warm failed (will retry at runtime): {result.stderr.strip()}"
- )
+ msg = f"go install pre-warm failed: {result.stderr.strip()}"
+ if module == GO_MODULE_PATH_PLATFORM:
+ raise InstallError(
+ f"{msg}\nThe platform module path {module}@{tag} may not be published yet."
+ )
+ print(f" Warning: {msg} (will retry at runtime)")
print(f" Go release {tag} installed to {dist_dir}")
-def install_js_release(version: str, dist_dir: Path) -> None:
+def install_js_release(version: str, dist_dir: Path, **_kwargs: object) -> None:
"""Install a JS CLI release from npm registry."""
js_dir = get_sdk_dir() / "js"
dist_dir.mkdir(parents=True, exist_ok=True)
@@ -65,7 +82,7 @@ def install_js_release(version: str, dist_dir: Path) -> None:
print(f" JS release {v} installed to {dist_dir}")
-def install_java_release(version: str, dist_dir: Path) -> None:
+def install_java_release(version: str, dist_dir: Path, **_kwargs: object) -> None:
"""Install a Java CLI release by downloading cmdline.jar from GitHub Releases.
Raises InstallError if the artifact is not available or download fails,
@@ -133,13 +150,15 @@ def install_java_release(version: str, dist_dir: Path) -> None:
}
-def install_release(sdk: str, version: str, dist_name: str | None = None) -> Path:
+def install_release(sdk: str, version: str, dist_name: str | None = None, **kwargs: object) -> Path:
"""Install a released version of an SDK CLI.
Args:
sdk: One of "go", "js", "java"
version: Version string (e.g., "v0.24.0" or "0.24.0")
dist_name: Override the dist directory name (defaults to normalized version)
+ **kwargs: Extra arguments forwarded to the SDK installer
+ (e.g., source="platform" for Go).
Returns:
Path to the created dist directory
@@ -157,7 +176,7 @@ def install_release(sdk: str, version: str, dist_name: str | None = None) -> Pat
print(f" Dist directory already exists: {dist_dir} (skipping)")
return dist_dir
- INSTALLERS[sdk](version, dist_dir)
+ INSTALLERS[sdk](version, dist_dir, **kwargs)
return dist_dir
@@ -224,7 +243,9 @@ def cmd_release(specs: list[str]) -> None:
install_release(sdk, version)
-def cmd_install(sdk: str, version: str, dist_name: str | None = None) -> None:
+def cmd_install(
+ sdk: str, version: str, dist_name: str | None = None, source: str | None = None
+) -> None:
"""Install a single SDK version (used by CI action)."""
print(f"Installing {sdk} {version}...")
- install_release(sdk, version, dist_name=dist_name)
+ install_release(sdk, version, dist_name=dist_name, source=source)
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py
index 8f8dd34e5..fcd4f78c8 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py
@@ -12,11 +12,13 @@
from typing import Any
from otdf_sdk_mgr.config import (
- GO_INSTALL_PREFIX,
+ GO_INSTALL_PREFIX_PLATFORM,
+ GO_INSTALL_PREFIX_STANDALONE,
SDK_GITHUB_REPOS,
SDK_GIT_URLS,
SDK_MAVEN_COORDS,
SDK_NPM_PACKAGES,
+ SDK_TAG_INFIXES_PLATFORM_GO,
)
from otdf_sdk_mgr.semver import is_stable, parse_semver, semver_sort_key
@@ -68,12 +70,15 @@ def fetch_text(url: str) -> str:
def list_go_versions() -> list[dict[str, Any]]:
- """List Go SDK versions from git tags."""
+ """List Go SDK versions from git tags in both standalone and platform repos."""
+ import git.exc
from git import Git
repo = Git()
+ seen: dict[str, dict[str, Any]] = {}
+
+ # Standalone repo (opentdf/otdfctl): tags like v0.24.0
raw = repo.ls_remote(SDK_GIT_URLS["go"], tags=True)
- results = []
for line in raw.strip().split("\n"):
if not line:
continue
@@ -83,16 +88,52 @@ def list_go_versions() -> list[dict[str, Any]]:
tag = ref.removeprefix("refs/tags/")
if not parse_semver(tag):
continue
- version = tag
- results.append(
- {
+ seen[tag] = {
+ "sdk": "go",
+ "version": tag,
+ "source": "git-tag",
+ "install_method": f"{GO_INSTALL_PREFIX_STANDALONE}@{tag}",
+ "stable": is_stable(tag),
+ }
+
+ # Platform repo (opentdf/platform): tags like otdfctl/v0.X.Y
+ infix = SDK_TAG_INFIXES_PLATFORM_GO
+ try:
+ raw = repo.ls_remote(SDK_GIT_URLS["platform"], tags=True)
+ for line in raw.strip().split("\n"):
+ if not line:
+ continue
+ _, ref = line.split("\t", 1)
+ if ref.endswith("^{}"):
+ continue
+ tag = ref.removeprefix("refs/tags/")
+ if not tag.startswith(f"{infix}/"):
+ continue
+ version = tag.removeprefix(f"{infix}/")
+ if not parse_semver(version):
+ continue
+ # Platform entries take precedence (canonical location post-migration);
+ # if the same version exists in both repos, the platform entry
+ # silently overwrites the standalone one.
+ if version in seen:
+ print(
+ f"Note: version {version} found in both standalone and platform repos; using platform source.",
+ file=sys.stderr,
+ )
+ seen[version] = {
"sdk": "go",
"version": version,
- "source": "git-tag",
- "install_method": f"{GO_INSTALL_PREFIX}@{version}",
+ "source": "platform-git-tag",
+ "install_method": f"{GO_INSTALL_PREFIX_PLATFORM}@{tag}",
"stable": is_stable(version),
}
+ except git.exc.GitCommandError as e:
+ print(
+ f"::warning::Failed to query platform repo for go tags: {e}",
+ file=sys.stderr,
)
+
+ results = list(seen.values())
results.sort(key=lambda r: semver_sort_key(r["version"]))
return results
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py
index 6e4cd7ca1..e3f264d74 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py
@@ -12,6 +12,8 @@
LTS_VERSIONS,
SDK_GIT_URLS,
SDK_NPM_PACKAGES,
+ go_git_url,
+ go_tag_infix,
)
@@ -23,6 +25,7 @@ class ResolveSuccess(TypedDict):
pr: NotRequired[str]
release: NotRequired[str]
sha: str
+ source: NotRequired[str]
tag: str
@@ -111,78 +114,119 @@ def lookup_additional_options(sdk: str, version: str) -> str | None:
return None
-def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult:
- """Resolve a version spec to a concrete SHA and tag."""
+def resolve(
+ sdk: str,
+ version: str,
+ infix: str | None,
+ go_source: str | None = None,
+) -> ResolveResult:
+ """Resolve a version spec to a concrete SHA and tag.
+
+ Args:
+ sdk: SDK identifier (go, js, java, platform).
+ version: Version spec (main, SHA, tag, latest, lts, etc.).
+ infix: Tag infix for monorepo tag resolution (e.g. "sdk" for JS).
+ go_source: For sdk=="go", override the git URL and infix.
+ "platform" resolves against the platform monorepo (otdfctl/ prefix tags).
+ None or "standalone" uses the standalone otdfctl repo (default).
+ """
+ _go_platform = sdk == "go" and go_source == "platform"
+
+ def _annotate(result: ResolveResult) -> ResolveResult:
+ """Add source field to successful results when resolving go from platform."""
+ if _go_platform and is_resolve_success(result):
+ result["source"] = "platform"
+ return result
+
try:
- sdk_url = SDK_GIT_URLS[sdk]
+ if _go_platform:
+ sdk_url = go_git_url("platform")
+ infix = go_tag_infix("platform")
+ else:
+ sdk_url = SDK_GIT_URLS[sdk]
repo = Git()
if version == "main" or version == "refs/heads/main":
all_heads = [r.split("\t") for r in repo.ls_remote(sdk_url, heads=True).split("\n")]
- sha, _ = [tag for tag in all_heads if "refs/heads/main" in tag][0]
- return {
- "sdk": sdk,
- "alias": version,
- "head": True,
- "sha": sha,
- "tag": "main",
- }
+ try:
+ sha, _ = next(tag for tag in all_heads if "refs/heads/main" in tag)
+ except StopIteration:
+ return {"sdk": sdk, "alias": version, "err": f"main branch not found in {sdk_url}"}
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "head": True,
+ "sha": sha,
+ "tag": "main",
+ }
+ )
if re.match(SHA_REGEX, version):
ls_remote = [r.split("\t") for r in repo.ls_remote(sdk_url).split("\n")]
matching_tags = [(sha, tag) for (sha, tag) in ls_remote if sha.startswith(version)]
if not matching_tags:
- return {
- "sdk": sdk,
- "alias": version[:7],
- "sha": version,
- "tag": version,
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version[:7],
+ "sha": version,
+ "tag": version,
+ }
+ )
if len(matching_tags) > 1:
for sha, tag in matching_tags:
if tag.startswith("refs/pull/"):
pr_number = tag.split("/")[2]
- return {
- "sdk": sdk,
- "alias": version,
- "head": True,
- "sha": sha,
- "tag": f"pull-{pr_number}",
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "head": True,
+ "sha": sha,
+ "tag": f"pull-{pr_number}",
+ }
+ )
for sha, tag in matching_tags:
mq_match = re.match(MERGE_QUEUE_REGEX, tag)
if mq_match:
to_branch = mq_match.group("branch")
pr_number = mq_match.group("pr_number")
if to_branch and pr_number:
- return {
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "head": True,
+ "pr": pr_number,
+ "sha": sha,
+ "tag": f"mq-{to_branch}-{pr_number}",
+ }
+ )
+ suffix = tag.split("refs/heads/gh-readonly-queue/")[-1]
+ flattag = "mq--" + suffix.replace("/", "--")
+ return _annotate(
+ {
"sdk": sdk,
"alias": version,
"head": True,
- "pr": pr_number,
"sha": sha,
- "tag": f"mq-{to_branch}-{pr_number}",
+ "tag": flattag,
}
- suffix = tag.split("refs/heads/gh-readonly-queue/")[-1]
- flattag = "mq--" + suffix.replace("/", "--")
- return {
- "sdk": sdk,
- "alias": version,
- "head": True,
- "sha": sha,
- "tag": flattag,
- }
+ )
head = False
if tag.startswith("refs/heads/"):
head = True
tag = tag.split("refs/heads/")[-1]
flattag = tag.replace("/", "--")
- return {
- "sdk": sdk,
- "alias": version,
- "head": head,
- "sha": sha,
- "tag": flattag,
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "head": head,
+ "sha": sha,
+ "tag": flattag,
+ }
+ )
return {
"sdk": sdk,
@@ -197,12 +241,14 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult:
tag = tag.split("refs/tags/")[-1]
if infix:
tag = tag.split(f"{infix}/")[-1]
- return {
- "sdk": sdk,
- "alias": version,
- "sha": sha,
- "tag": tag,
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "sha": sha,
+ "tag": tag,
+ }
+ )
if version.startswith("refs/pull/"):
merge_heads = [
@@ -216,14 +262,16 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult:
"err": f"pull request {pr_number} not found in {sdk_url}",
}
sha, _ = merge_heads[0]
- return {
- "sdk": sdk,
- "alias": version,
- "head": True,
- "pr": pr_number,
- "sha": sha,
- "tag": f"pull-{pr_number}",
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "head": True,
+ "pr": pr_number,
+ "sha": sha,
+ "tag": f"pull-{pr_number}",
+ }
+ )
remote_tags = [r.split("\t") for r in repo.ls_remote(sdk_url).split("\n")]
all_listed_tags = [
@@ -238,13 +286,15 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult:
if version in all_listed_branches:
sha = all_listed_branches[version]
- return {
- "sdk": sdk,
- "alias": version,
- "head": True,
- "sha": sha,
- "tag": version,
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "head": True,
+ "sha": sha,
+ "tag": version,
+ }
+ )
if infix and version.startswith(f"{infix}/"):
version = version.split(f"{infix}/")[-1]
@@ -288,13 +338,15 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult:
if not matching_tags:
# No versions with CLI found, fall back to building latest from source
sha, tag = stable_tags[-1]
- return {
- "sdk": sdk,
- "alias": alias,
- "head": True, # Mark as head to trigger source checkout
- "sha": sha,
- "tag": tag,
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": alias,
+ "head": True, # Mark as head to trigger source checkout
+ "sha": sha,
+ "tag": tag,
+ }
+ )
else:
matching_tags = stable_tags[-1:]
else:
@@ -319,13 +371,15 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult:
release = tag
if infix:
release = f"{infix}/{release}"
- return {
- "sdk": sdk,
- "alias": alias,
- "release": release,
- "sha": sha,
- "tag": tag,
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": alias,
+ "release": release,
+ "sha": sha,
+ "tag": tag,
+ }
+ )
except Exception as e:
return {
"sdk": sdk,
diff --git a/xtest/sdk/go/cli.sh b/xtest/sdk/go/cli.sh
index 172aa5b50..f97b20c1a 100755
--- a/xtest/sdk/go/cli.sh
+++ b/xtest/sdk/go/cli.sh
@@ -23,8 +23,14 @@ SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
cmd=("$SCRIPT_DIR"/otdfctl)
if [ ! -f "$SCRIPT_DIR"/otdfctl ]; then
if [ -f "$SCRIPT_DIR/.version" ]; then
- OTDFCTL_VERSION=$(tr -d '[:space:]' <"$SCRIPT_DIR/.version")
- cmd=(go run "github.com/opentdf/otdfctl@${OTDFCTL_VERSION}")
+ VERSION_SPEC=$(tr -d '[:space:]' <"$SCRIPT_DIR/.version")
+ if [[ "$VERSION_SPEC" == *@* ]]; then
+ # New format: module-path@version
+ cmd=(go run "$VERSION_SPEC")
+ else
+ # Legacy format: bare version tag, default to standalone module
+ cmd=(go run "github.com/opentdf/otdfctl@${VERSION_SPEC}")
+ fi
else
cmd=(go run "github.com/opentdf/otdfctl@latest")
fi
diff --git a/xtest/sdk/go/otdfctl.sh b/xtest/sdk/go/otdfctl.sh
index 17fbb0c84..9ba55f054 100755
--- a/xtest/sdk/go/otdfctl.sh
+++ b/xtest/sdk/go/otdfctl.sh
@@ -18,8 +18,14 @@ source "$XTEST_DIR/test.env"
cmd=("$SCRIPT_DIR"/otdfctl)
if [ ! -f "$SCRIPT_DIR"/otdfctl ]; then
if [ -f "$SCRIPT_DIR/.version" ]; then
- OTDFCTL_VERSION=$(tr -d '[:space:]' <"$SCRIPT_DIR/.version")
- cmd=(go run "github.com/opentdf/otdfctl@${OTDFCTL_VERSION}")
+ VERSION_SPEC=$(tr -d '[:space:]' <"$SCRIPT_DIR/.version")
+ if [[ "$VERSION_SPEC" == *@* ]]; then
+ # New format: module-path@version
+ cmd=(go run "$VERSION_SPEC")
+ else
+ # Legacy format: bare version tag, default to standalone module
+ cmd=(go run "github.com/opentdf/otdfctl@${VERSION_SPEC}")
+ fi
else
cmd=(go run "github.com/opentdf/otdfctl@latest")
fi
diff --git a/xtest/setup-cli-tool/action.yaml b/xtest/setup-cli-tool/action.yaml
index 9e110ef4f..5bf6eff62 100644
--- a/xtest/setup-cli-tool/action.yaml
+++ b/xtest/setup-cli-tool/action.yaml
@@ -2,12 +2,23 @@ name: configure-sdks
description: Check out and build one or more SDK and its CLI tool for use by xtest
inputs:
path:
- description: The path to checkout the the SDK source code to; concatenated with branch or tag name.
+ description: The path to check out the SDK source code to; concatenated with branch or tag name.
sdk:
description: The SDK to configure; one of go, java, js
version-info:
description: JSON-encoded output of otdf-sdk-mgr versions resolve
required: true
+ platform-otdfctl-dir:
+ description: >-
+ Absolute path to platform's otdfctl/ directory. When set and sdk is "go",
+ head versions whose SHA matches platform-otdfctl-sha are symlinked from
+ here instead of checked out separately. Used in both explicit platform
+ mode (source: "platform" in resolved version) and auto-detect mode.
+ platform-otdfctl-sha:
+ description: >-
+ SHA of the commit that platform-otdfctl-dir was checked out at.
+ Used to decide which Go head version (if any) can reuse the existing
+ platform checkout vs needing a fresh one.
outputs:
version-a:
description: "Object containing tag, sha, and name of a version checked out"
@@ -28,24 +39,27 @@ outputs:
runs:
using: composite
steps:
- - name: identify repo url
+ - name: identify repo urls
shell: bash
run: |
- case "${{ inputs.sdk }}" in
+ case "$INPUT_SDK" in
"go")
- echo "sdk_repo=opentdf/otdfctl" >> $GITHUB_ENV
+ echo "STANDALONE_REPO=opentdf/otdfctl" >> "$GITHUB_ENV"
;;
"java")
- echo "sdk_repo=opentdf/java-sdk" >> $GITHUB_ENV
+ echo "STANDALONE_REPO=opentdf/java-sdk" >> "$GITHUB_ENV"
;;
"js")
- echo "sdk_repo=opentdf/web-sdk" >> $GITHUB_ENV
+ echo "STANDALONE_REPO=opentdf/web-sdk" >> "$GITHUB_ENV"
;;
*)
- echo "Invalid SDK specified: ${{ inputs.sdk }}" >> $GITHUB_STEP_SUMMARY
+ echo "Invalid SDK specified: $INPUT_SDK" >> "$GITHUB_STEP_SUMMARY"
exit 1
;;
esac
+ echo "PLATFORM_REPO=opentdf/platform" >> "$GITHUB_ENV"
+ env:
+ INPUT_SDK: ${{ inputs.sdk }}
- name: resolve versions
id: resolve
@@ -88,9 +102,12 @@ runs:
echo "Installing ${{ inputs.sdk }} $tag from registry (release: $release)"
# Sanitize tag for use as an env var name (replace non-alphanumeric/underscore with _)
tag_sanitized="${tag//[^a-zA-Z0-9_]/_}"
+ source=$(echo "$row" | jq -r '.source // empty')
+ source_args=()
+ [[ -n "$source" ]] && source_args=(--source "$source")
if ! uv run --project "$SDK_MGR_DIR" otdf-sdk-mgr install artifact \
--sdk "${{ inputs.sdk }}" --version "$release" \
- --dist-name "$tag"; then
+ --dist-name "$tag" "${source_args[@]}"; then
echo " Warning: Artifact installation failed for ${{ inputs.sdk }} $tag"
echo " Will fall back to building from source"
echo "BUILD_FROM_SOURCE_${tag_sanitized}=true" >> "$GITHUB_ENV"
@@ -104,9 +121,10 @@ runs:
id: check-source
shell: bash
run: |
- # Determine which version slots need source checkout.
- # A slot needs checkout if it is a head version OR if artifact install failed
- # (BUILD_FROM_SOURCE_ was set in the previous step).
+ # Determine which version slots need source checkout and from which repo.
+ # A slot needs checkout if it is a head version OR if artifact install failed.
+ # Platform-source versions may reuse the existing platform-otdfctl-dir
+ # (when their SHA matches) or need a fresh opentdf/platform checkout.
for slot in a b c d; do
case "$slot" in
a) row=$(echo "${version_info}" | jq -rc '.[0] // empty') ;;
@@ -115,65 +133,200 @@ runs:
d) row=$(echo "${version_info}" | jq -rc '.[3] // empty') ;;
esac
if [[ -z "$row" ]]; then
- echo "needs-source-${slot}=false" >> "$GITHUB_OUTPUT"
+ echo "needs-checkout-${slot}=false" >> "$GITHUB_OUTPUT"
+ echo "is-platform-${slot}=false" >> "$GITHUB_OUTPUT"
+ echo "use-existing-platform-dir-${slot}=false" >> "$GITHUB_OUTPUT"
+ echo "checkout-repo-${slot}=" >> "$GITHUB_OUTPUT"
+ echo "checkout-path-${slot}=" >> "$GITHUB_OUTPUT"
continue
fi
+
tag=$(echo "$row" | jq -r '.tag')
head=$(echo "$row" | jq -r '.head // false')
+ sha=$(echo "$row" | jq -r '.sha')
+ source=$(echo "$row" | jq -r '.source // empty')
tag_sanitized="${tag//[^a-zA-Z0-9_]/_}"
build_from_source_var="BUILD_FROM_SOURCE_${tag_sanitized}"
+ needs_source=false
if [[ "$head" == "true" || "${!build_from_source_var}" == "true" ]]; then
- echo "needs-source-${slot}=true" >> "$GITHUB_OUTPUT"
- else
- echo "needs-source-${slot}=false" >> "$GITHUB_OUTPUT"
+ needs_source=true
+ fi
+
+ is_platform=false
+ use_existing=false
+ checkout_repo="$STANDALONE_REPO"
+ checkout_path="${INPUT_PATH}/${INPUT_SDK}/src/${tag}"
+
+ if [[ "$source" == "platform" ]]; then
+ # Explicit platform mode: resolver tagged this version as from
+ # opentdf/platform. Use per-version SHA to decide checkout strategy.
+ is_platform=true
+ if [[ "$needs_source" == "true" && -n "$PLATFORM_OTDFCTL_DIR" \
+ && -n "$PLATFORM_OTDFCTL_SHA" && "$sha" == "$PLATFORM_OTDFCTL_SHA" ]]; then
+ # SHA matches existing platform checkout — reuse via symlink
+ use_existing=true
+ needs_source=false
+ elif [[ "$needs_source" == "true" ]]; then
+ # Different SHA — need a fresh platform checkout
+ checkout_repo="$PLATFORM_REPO"
+ checkout_path="${INPUT_PATH}/${INPUT_SDK}/platform-src/${tag}"
+ fi
+ elif [[ "$INPUT_SDK" == "go" && -n "$PLATFORM_OTDFCTL_DIR" && "$needs_source" == "true" ]]; then
+ # Auto-detect fallback: resolver used standalone repo but the
+ # test job detected otdfctl in the platform checkout.
+ # NOTE: SHA comparison across repos is not meaningful (the standalone
+ # repo and platform repo have different commit histories), so we
+ # cannot safely reuse the platform checkout here. Fall through to
+ # a standalone checkout. To use the platform source, set
+ # otdfctl-source=platform explicitly.
+ echo "::notice::Go version ${tag} resolved from standalone repo; platform checkout available but cannot auto-reuse (different repo). Set otdfctl-source=platform to use the platform source."
fi
+
+ echo "needs-checkout-${slot}=${needs_source}" >> "$GITHUB_OUTPUT"
+ echo "is-platform-${slot}=${is_platform}" >> "$GITHUB_OUTPUT"
+ echo "use-existing-platform-dir-${slot}=${use_existing}" >> "$GITHUB_OUTPUT"
+ echo "checkout-repo-${slot}=${checkout_repo}" >> "$GITHUB_OUTPUT"
+ echo "checkout-path-${slot}=${checkout_path}" >> "$GITHUB_OUTPUT"
done
env:
version_info: ${{ inputs.version-info }}
+ INPUT_PATH: ${{ inputs.path }}
+ INPUT_SDK: ${{ inputs.sdk }}
+ PLATFORM_OTDFCTL_DIR: ${{ inputs.platform-otdfctl-dir }}
+ PLATFORM_OTDFCTL_SHA: ${{ inputs.platform-otdfctl-sha }}
+
+ - name: symlink existing platform checkout
+ shell: bash
+ run: |
+ # For versions that can reuse the already-checked-out platform dir,
+ # symlink platform-otdfctl-dir into sdk/go/src/{tag}.
+ for slot in a b c d; do
+ case "$slot" in
+ a) version_json="$VERSION_A" ; use_existing="$USE_EXISTING_A" ;;
+ b) version_json="$VERSION_B" ; use_existing="$USE_EXISTING_B" ;;
+ c) version_json="$VERSION_C" ; use_existing="$USE_EXISTING_C" ;;
+ d) version_json="$VERSION_D" ; use_existing="$USE_EXISTING_D" ;;
+ esac
+ if [[ -z "$version_json" || "$use_existing" != "true" ]]; then
+ continue
+ fi
+ tag=$(echo "$version_json" | jq -r '.tag')
+ src_dir="${INPUT_PATH}/${INPUT_SDK}/src/${tag}"
+ echo "Symlinking existing platform otdfctl to ${src_dir}"
+ mkdir -p "$(dirname "$src_dir")"
+ ln -sfn "$PLATFORM_OTDFCTL_DIR" "$src_dir"
+ if [ ! -e "$src_dir" ]; then
+ echo "::error::Symlink target does not exist: $PLATFORM_OTDFCTL_DIR"
+ exit 1
+ fi
+ done
+ env:
+ PLATFORM_OTDFCTL_DIR: ${{ inputs.platform-otdfctl-dir }}
+ INPUT_PATH: ${{ inputs.path }}
+ INPUT_SDK: ${{ inputs.sdk }}
+ VERSION_A: ${{ steps.resolve.outputs.version-a }}
+ VERSION_B: ${{ steps.resolve.outputs.version-b }}
+ VERSION_C: ${{ steps.resolve.outputs.version-c }}
+ VERSION_D: ${{ steps.resolve.outputs.version-d }}
+ USE_EXISTING_A: ${{ steps.check-source.outputs.use-existing-platform-dir-a }}
+ USE_EXISTING_B: ${{ steps.check-source.outputs.use-existing-platform-dir-b }}
+ USE_EXISTING_C: ${{ steps.check-source.outputs.use-existing-platform-dir-c }}
+ USE_EXISTING_D: ${{ steps.check-source.outputs.use-existing-platform-dir-d }}
- name: checkout version a
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
if: >-
steps.resolve.outputs.version-a != ''
- && steps.check-source.outputs.needs-source-a == 'true'
+ && steps.check-source.outputs.needs-checkout-a == 'true'
with:
- path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-a).tag }}
+ path: ${{ steps.check-source.outputs.checkout-path-a }}
persist-credentials: false
- ref: ${{ fromJson(steps.resolve.outputs.version-a).sha }}
- repository: ${{ env.sdk_repo }}
+ ref: ${{ fromJson(steps.resolve.outputs.version-a || '{"sha":""}').sha }}
+ repository: ${{ steps.check-source.outputs.checkout-repo-a }}
- name: checkout version b
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
if: >-
steps.resolve.outputs.version-b != ''
- && steps.check-source.outputs.needs-source-b == 'true'
+ && steps.check-source.outputs.needs-checkout-b == 'true'
with:
- path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-b).tag }}
+ path: ${{ steps.check-source.outputs.checkout-path-b }}
persist-credentials: false
- ref: ${{ fromJson(steps.resolve.outputs.version-b).sha }}
- repository: ${{ env.sdk_repo }}
+ ref: ${{ fromJson(steps.resolve.outputs.version-b || '{"sha":""}').sha }}
+ repository: ${{ steps.check-source.outputs.checkout-repo-b }}
- name: checkout version c
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
if: >-
steps.resolve.outputs.version-c != ''
- && steps.check-source.outputs.needs-source-c == 'true'
+ && steps.check-source.outputs.needs-checkout-c == 'true'
with:
- path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-c).tag }}
+ path: ${{ steps.check-source.outputs.checkout-path-c }}
persist-credentials: false
- ref: ${{ fromJson(steps.resolve.outputs.version-c).sha }}
- repository: ${{ env.sdk_repo }}
+ ref: ${{ fromJson(steps.resolve.outputs.version-c || '{"sha":""}').sha }}
+ repository: ${{ steps.check-source.outputs.checkout-repo-c }}
- name: checkout version d
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
if: >-
steps.resolve.outputs.version-d != ''
- && steps.check-source.outputs.needs-source-d == 'true'
+ && steps.check-source.outputs.needs-checkout-d == 'true'
with:
- path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-d).tag }}
+ path: ${{ steps.check-source.outputs.checkout-path-d }}
persist-credentials: false
- ref: ${{ fromJson(steps.resolve.outputs.version-d).sha }}
- repository: ${{ env.sdk_repo }}
+ ref: ${{ fromJson(steps.resolve.outputs.version-d || '{"sha":""}').sha }}
+ repository: ${{ steps.check-source.outputs.checkout-repo-d }}
+
+ - name: symlink freshly checked-out platform sources
+ shell: bash
+ run: |
+ # For platform-source versions that were checked out (not reusing the
+ # existing dir), symlink {platform-src}/{tag}/otdfctl → src/{tag} so
+ # the Makefile discovers them.
+ for slot in a b c d; do
+ case "$slot" in
+ a) is_platform="$IS_PLATFORM_A" ; needs_checkout="$NEEDS_CHECKOUT_A"
+ checkout_path="$CHECKOUT_PATH_A" ; version_json="$VERSION_A" ;;
+ b) is_platform="$IS_PLATFORM_B" ; needs_checkout="$NEEDS_CHECKOUT_B"
+ checkout_path="$CHECKOUT_PATH_B" ; version_json="$VERSION_B" ;;
+ c) is_platform="$IS_PLATFORM_C" ; needs_checkout="$NEEDS_CHECKOUT_C"
+ checkout_path="$CHECKOUT_PATH_C" ; version_json="$VERSION_C" ;;
+ d) is_platform="$IS_PLATFORM_D" ; needs_checkout="$NEEDS_CHECKOUT_D"
+ checkout_path="$CHECKOUT_PATH_D" ; version_json="$VERSION_D" ;;
+ esac
+ if [[ "$is_platform" != "true" || "$needs_checkout" != "true" || -z "$version_json" ]]; then
+ continue
+ fi
+ tag=$(echo "$version_json" | jq -r '.tag')
+ src_dir="${INPUT_PATH}/${INPUT_SDK}/src/${tag}"
+ otdfctl_dir="${checkout_path}/otdfctl"
+ echo "Symlinking freshly checked-out platform otdfctl ${otdfctl_dir} → ${src_dir}"
+ mkdir -p "$(dirname "$src_dir")"
+ ln -sfn "$otdfctl_dir" "$src_dir"
+ if [ ! -e "$src_dir" ]; then
+ echo "::error::Symlink target does not exist: ${otdfctl_dir} (does the platform repo contain an otdfctl/ directory?)"
+ exit 1
+ fi
+ done
+ env:
+ INPUT_PATH: ${{ inputs.path }}
+ INPUT_SDK: ${{ inputs.sdk }}
+ VERSION_A: ${{ steps.resolve.outputs.version-a }}
+ VERSION_B: ${{ steps.resolve.outputs.version-b }}
+ VERSION_C: ${{ steps.resolve.outputs.version-c }}
+ VERSION_D: ${{ steps.resolve.outputs.version-d }}
+ IS_PLATFORM_A: ${{ steps.check-source.outputs.is-platform-a }}
+ IS_PLATFORM_B: ${{ steps.check-source.outputs.is-platform-b }}
+ IS_PLATFORM_C: ${{ steps.check-source.outputs.is-platform-c }}
+ IS_PLATFORM_D: ${{ steps.check-source.outputs.is-platform-d }}
+ NEEDS_CHECKOUT_A: ${{ steps.check-source.outputs.needs-checkout-a }}
+ NEEDS_CHECKOUT_B: ${{ steps.check-source.outputs.needs-checkout-b }}
+ NEEDS_CHECKOUT_C: ${{ steps.check-source.outputs.needs-checkout-c }}
+ NEEDS_CHECKOUT_D: ${{ steps.check-source.outputs.needs-checkout-d }}
+ CHECKOUT_PATH_A: ${{ steps.check-source.outputs.checkout-path-a }}
+ CHECKOUT_PATH_B: ${{ steps.check-source.outputs.checkout-path-b }}
+ CHECKOUT_PATH_C: ${{ steps.check-source.outputs.checkout-path-c }}
+ CHECKOUT_PATH_D: ${{ steps.check-source.outputs.checkout-path-d }}
- name: post checkout cleanups
if: inputs.sdk == 'java'
diff --git a/xtest/setup-kas-instances/action.yaml b/xtest/setup-kas-instances/action.yaml
new file mode 100644
index 000000000..f5e420172
--- /dev/null
+++ b/xtest/setup-kas-instances/action.yaml
@@ -0,0 +1,86 @@
+name: setup-kas-instances
+description: >-
+ Start additional KAS instances for multi-KAS / ABAC tests.
+ Uses otdf-local ci start-kas to start all 6 KAS instances
+ (alpha, beta, gamma, delta, km1, km2) and expose their log file paths.
+
+inputs:
+ platform-working-dir:
+ description: Path to the platform checkout directory
+ required: true
+ root-key:
+ description: Root key for KAS instances
+ required: true
+ key-management-supported:
+ description: Enable key management on km1/km2 instances (true/false)
+ required: false
+ default: "false"
+ ec-tdf-enabled:
+ description: Enable EC TDF support
+ required: false
+ default: "true"
+ log-type:
+ description: Log format type
+ required: false
+ default: "json"
+ tests-path:
+ description: Path to the tests repo checkout
+ required: false
+ default: "otdftests"
+
+outputs:
+ kas-alpha-log-file:
+ description: Path to KAS alpha log file
+ value: ${{ steps.start-kas.outputs.kas-alpha-log-file }}
+ kas-beta-log-file:
+ description: Path to KAS beta log file
+ value: ${{ steps.start-kas.outputs.kas-beta-log-file }}
+ kas-gamma-log-file:
+ description: Path to KAS gamma log file
+ value: ${{ steps.start-kas.outputs.kas-gamma-log-file }}
+ kas-delta-log-file:
+ description: Path to KAS delta log file
+ value: ${{ steps.start-kas.outputs.kas-delta-log-file }}
+ kas-km1-log-file:
+ description: Path to KAS km1 log file
+ value: ${{ steps.start-kas.outputs.kas-km1-log-file }}
+ kas-km2-log-file:
+ description: Path to KAS km2 log file
+ value: ${{ steps.start-kas.outputs.kas-km2-log-file }}
+
+runs:
+ using: composite
+ steps:
+ - name: Start KAS instances
+ id: start-kas
+ shell: bash
+ run: |
+ KM_FLAG=""
+ if [[ "$KEY_MANAGEMENT" == "true" ]]; then
+ KM_FLAG="--key-management"
+ else
+ KM_FLAG="--no-key-management"
+ fi
+
+ EC_FLAG=""
+ if [[ "$EC_TDF_ENABLED" == "true" ]]; then
+ EC_FLAG="--ec-tdf-enabled"
+ else
+ EC_FLAG="--no-ec-tdf"
+ fi
+
+ OTDF_LOCAL_DIR="$(cd "$TESTS_PATH" && pwd)/otdf-local"
+
+ uv run --project "$OTDF_LOCAL_DIR" otdf-local ci start-kas \
+ --platform-dir "$(pwd)/$PLATFORM_DIR" \
+ --root-key "$ROOT_KEY" \
+ $EC_FLAG \
+ $KM_FLAG \
+ --log-type "$LOG_TYPE"
+ env:
+ PLATFORM_DIR: ${{ inputs.platform-working-dir }}
+ ROOT_KEY: ${{ inputs.root-key }}
+ KEY_MANAGEMENT: ${{ inputs.key-management-supported }}
+ EC_TDF_ENABLED: ${{ inputs.ec-tdf-enabled }}
+ LOG_TYPE: ${{ inputs.log-type }}
+ TESTS_PATH: ${{ inputs.tests-path }}
diff --git a/xtest/setup-sdk-clients/action.yaml b/xtest/setup-sdk-clients/action.yaml
new file mode 100644
index 000000000..35be35b41
--- /dev/null
+++ b/xtest/setup-sdk-clients/action.yaml
@@ -0,0 +1,159 @@
+name: setup-sdk-clients
+description: >-
+ Configure, cache, patch, and build an SDK CLI for xtest. Wraps setup-cli-tool
+ and adds SDK-appropriate caching, go.mod/java .env fixups, and make builds.
+ Each invocation handles one SDK (go, java, or js).
+
+inputs:
+ sdk:
+ description: "SDK to set up: go, java, or js"
+ required: true
+ version-info:
+ description: JSON-encoded output of otdf-sdk-mgr versions resolve for this SDK
+ required: true
+ tests-path:
+ description: Path to the tests repo checkout
+ required: false
+ default: "otdftests"
+ platform-working-dir:
+ description: >-
+ Platform checkout directory. Used for go-fixup (bridging client go.mod
+ to server shared modules) and detecting platform-embedded otdfctl.
+ required: false
+ platform-heads:
+ description: JSON list of platform tags that are heads (from resolve-versions)
+ required: false
+ default: "[]"
+ platform-tag:
+ description: Current matrix platform-tag value
+ required: false
+ platform-tag-to-sha:
+ description: JSON object mapping platform tags to SHAs
+ required: false
+ default: "{}"
+ otdfctl-source:
+ description: "Resolved otdfctl source: platform or standalone"
+ required: false
+ default: "standalone"
+ otdfctl-dir:
+ description: Absolute path to platform's otdfctl directory
+ required: false
+ otdfctl-sha:
+ description: SHA of the platform otdfctl checkout
+ required: false
+ focus-sdk:
+ description: "SDK focus filter: all, go, java, or js"
+ required: false
+ default: "all"
+ buf-token:
+ description: BUF token for Java proto compilation
+ required: false
+ pat-opentdf:
+ description: PAT for buf HTTPS password (Java SDK build)
+ required: false
+
+outputs:
+ heads:
+ description: JSON list of head tags for this SDK
+ value: ${{ steps.configure.outputs.heads }}
+
+runs:
+ using: composite
+ steps:
+ # Step 1: Configure SDK via setup-cli-tool (checkout/install)
+ - name: Configure ${{ inputs.sdk }}
+ id: configure
+ uses: ./otdftests/xtest/setup-cli-tool
+ with:
+ path: ${{ inputs.tests-path }}/xtest/sdk
+ sdk: ${{ inputs.sdk }}
+ version-info: ${{ inputs.version-info }}
+ platform-otdfctl-dir: ${{ inputs.otdfctl-dir }}
+ platform-otdfctl-sha: ${{ inputs.otdfctl-sha }}
+
+ # Step 2: SDK-appropriate dependency caching
+ - name: Cache npm
+ if: inputs.sdk == 'js' && fromJson(steps.configure.outputs.heads)[0] != null
+ uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
+ with:
+ path: ~/.npm
+ key: npm-${{ runner.os }}-${{ hashFiles(format('{0}/xtest/sdk/js/src/**/package-lock.json', inputs.tests-path)) }}
+ restore-keys: |
+ npm-${{ runner.os }}-
+
+ - name: Cache Go modules
+ if: inputs.sdk == 'go' && fromJson(steps.configure.outputs.heads)[0] != null
+ uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
+ with:
+ path: |
+ ~/.cache/go-build
+ ~/go/pkg/mod
+ key: go-${{ runner.os }}-${{ hashFiles(format('{0}/xtest/sdk/go/src/*/go.sum', inputs.tests-path)) }}
+ restore-keys: |
+ go-${{ runner.os }}-
+
+ - name: Cache Maven repository
+ if: inputs.sdk == 'java' && fromJson(steps.configure.outputs.heads)[0] != null
+ uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
+ with:
+ path: ~/.m2/repository
+ key: maven-${{ runner.os }}-${{ hashFiles(format('{0}/xtest/sdk/java/src/**/pom.xml', inputs.tests-path)) }}
+ restore-keys: |
+ maven-${{ runner.os }}-
+
+ # Step 3: SDK-specific fixups
+
+ # Go: Bridge client go.mod to server shared modules (standalone otdfctl only)
+ - name: Go fixup - replace go.mod packages
+ if: >-
+ inputs.sdk == 'go'
+ && steps.configure.outputs.heads != '[]'
+ && inputs.otdfctl-source != 'platform'
+ && inputs.focus-sdk == 'go'
+ && contains(fromJSON(inputs.platform-heads), inputs.platform-tag)
+ && inputs.platform-working-dir != ''
+ shell: bash
+ run: |
+ SDK_MGR_DIR="$(cd "$TESTS_PATH" && pwd)/otdf-sdk-mgr"
+ PLATFORM_DIR_ABS="$(pwd)/${PLATFORM_WORKING_DIR}"
+ BASE_DIR="$(pwd)/${TESTS_PATH}/xtest/sdk/go/src"
+ HEADS='${{ steps.configure.outputs.heads }}'
+ uv run --project "$SDK_MGR_DIR" otdf-sdk-mgr go-fixup \
+ --platform-dir "$PLATFORM_DIR_ABS" \
+ --heads "$HEADS" \
+ "$BASE_DIR"
+ env:
+ PLATFORM_WORKING_DIR: ${{ inputs.platform-working-dir }}
+ TESTS_PATH: ${{ inputs.tests-path }}
+
+ # Java: Create .env files with PLATFORM_BRANCH for head versions
+ - name: Java fixup - create platform branch .env files
+ if: >-
+ inputs.sdk == 'java'
+ && steps.configure.outputs.heads != '[]'
+ && (inputs.focus-sdk == 'go' || inputs.focus-sdk == 'java')
+ && contains(fromJSON(inputs.platform-heads), inputs.platform-tag)
+ shell: bash
+ run: |
+ for row in $(echo "$VERSION_INFO" | jq -c '.[]'); do
+ TAG=$(echo "$row" | jq -r '.tag')
+ HEAD=$(echo "$row" | jq -r '.head')
+ if [[ "$HEAD" == "true" ]]; then
+ echo "Creating .env file for tag: [$TAG]; pointing to platform ref [$PLATFORM_REF]"
+ echo "PLATFORM_BRANCH=$PLATFORM_REF" > "${TESTS_PATH}/xtest/sdk/java/${TAG}.env"
+ fi
+ done
+ env:
+ VERSION_INFO: ${{ inputs.version-info }}
+ PLATFORM_REF: ${{ fromJSON(inputs.platform-tag-to-sha)[inputs.platform-tag] }}
+ TESTS_PATH: ${{ inputs.tests-path }}
+
+ # Step 4: Build the SDK CLI
+ - name: Build ${{ inputs.sdk }} CLI
+ if: fromJson(steps.configure.outputs.heads)[0] != null
+ shell: bash
+ run: make
+ working-directory: ${{ inputs.tests-path }}/xtest/sdk/${{ inputs.sdk }}
+ env:
+ BUF_INPUT_HTTPS_USERNAME: ${{ inputs.sdk == 'java' && 'opentdf-bot' || '' }}
+ BUF_INPUT_HTTPS_PASSWORD: ${{ inputs.sdk == 'java' && inputs.pat-opentdf || '' }}
diff --git a/xtest/setup-test-environment/action.yaml b/xtest/setup-test-environment/action.yaml
new file mode 100644
index 000000000..0f8ce86f3
--- /dev/null
+++ b/xtest/setup-test-environment/action.yaml
@@ -0,0 +1,140 @@
+name: setup-test-environment
+description: >-
+ Detect platform capabilities, extract configuration, and prepare the test
+ environment. Consolidates otdfctl detection, platform version lookup, key
+ management support, root key extraction, multikas support check, and test
+ dependency installation.
+
+inputs:
+ platform-working-dir:
+ description: Platform checkout directory (from start-up-with-containers output)
+ required: true
+ platform-tag:
+ description: Platform version tag under test (matrix value)
+ required: true
+ otdfctl-source-input:
+ description: "User's otdfctl-source preference: auto, standalone, or platform"
+ required: false
+ default: "auto"
+ tests-path:
+ description: Path to the tests repo checkout
+ required: false
+ default: "otdftests"
+
+outputs:
+ otdfctl-source:
+ description: "Resolved otdfctl source: platform or standalone"
+ value: ${{ steps.detect-otdfctl.outputs.otdfctl-source }}
+ otdfctl-dir:
+ description: Absolute path to otdfctl directory (if source=platform)
+ value: ${{ steps.detect-otdfctl.outputs.otdfctl-dir }}
+ otdfctl-sha:
+ description: SHA of the platform checkout (if source=platform)
+ value: ${{ steps.detect-otdfctl.outputs.otdfctl-sha }}
+ platform-version:
+ description: Detected platform version string
+ value: ${{ steps.platform-version.outputs.version }}
+ key-management-supported:
+ description: Whether the platform supports key management (true/false)
+ value: ${{ steps.km-check.outputs.supported }}
+ root-key:
+ description: Root key for KAS instances
+ value: ${{ steps.km-check.outputs.root_key }}
+ multikas-supported:
+ description: Whether multi-KAS is supported (true/false)
+ value: ${{ steps.multikas.outputs.supported }}
+
+runs:
+ using: composite
+ steps:
+ - name: Detect platform-embedded otdfctl
+ id: detect-otdfctl
+ shell: bash
+ run: |
+ if [[ "$OTDFCTL_SOURCE_INPUT" == "auto" || -z "$OTDFCTL_SOURCE_INPUT" ]]; then
+ if [ -d "$PLATFORM_DIR/otdfctl" ] && [ -f "$PLATFORM_DIR/otdfctl/go.mod" ]; then
+ echo "otdfctl found in platform checkout at $PLATFORM_DIR/otdfctl"
+ echo "otdfctl-source=platform" >> "$GITHUB_OUTPUT"
+ echo "otdfctl-dir=$(pwd)/$PLATFORM_DIR/otdfctl" >> "$GITHUB_OUTPUT"
+ echo "otdfctl-sha=$(git -C "$PLATFORM_DIR" rev-parse HEAD)" >> "$GITHUB_OUTPUT"
+ else
+ echo "otdfctl not found in platform checkout; using standalone repo"
+ echo "otdfctl-source=standalone" >> "$GITHUB_OUTPUT"
+ fi
+ elif [[ "$OTDFCTL_SOURCE_INPUT" == "platform" ]]; then
+ if [ -z "$PLATFORM_DIR" ] || [ ! -d "$PLATFORM_DIR/otdfctl" ] || [ ! -f "$PLATFORM_DIR/otdfctl/go.mod" ]; then
+ echo "::error::otdfctl-source=platform requested but ${PLATFORM_DIR:-}/otdfctl does not exist or lacks go.mod"
+ exit 1
+ fi
+ echo "otdfctl-source=platform" >> "$GITHUB_OUTPUT"
+ echo "otdfctl-dir=$(pwd)/$PLATFORM_DIR/otdfctl" >> "$GITHUB_OUTPUT"
+ echo "otdfctl-sha=$(git -C "$PLATFORM_DIR" rev-parse HEAD)" >> "$GITHUB_OUTPUT"
+ else
+ echo "otdfctl-source=standalone" >> "$GITHUB_OUTPUT"
+ fi
+ env:
+ OTDFCTL_SOURCE_INPUT: ${{ inputs.otdfctl-source-input }}
+ PLATFORM_DIR: ${{ inputs.platform-working-dir }}
+
+ - name: Lookup platform version
+ id: platform-version
+ shell: bash
+ run: |
+ if ! go run ./service version; then
+ echo "Error: Unable to get platform version; defaulting to tag: [$PLATFORM_TAG]"
+ echo "version=$PLATFORM_TAG" >> "$GITHUB_OUTPUT"
+ exit
+ fi
+ PLATFORM_VERSION=$(go run ./service version 2>&1)
+ echo "version=$PLATFORM_VERSION" >> "$GITHUB_OUTPUT"
+ working-directory: ${{ inputs.platform-working-dir }}
+ env:
+ PLATFORM_TAG: ${{ inputs.platform-tag }}
+
+ - name: Check key management support and prepare root key
+ id: km-check
+ shell: bash
+ run: |
+ OT_CONFIG_FILE="$(pwd)/opentdf.yaml"
+ km_value=$(yq e '.services.kas.preview.key_management' "$OT_CONFIG_FILE" 2>/dev/null || echo "null")
+ case "$km_value" in
+ true|false)
+ echo "supported=true" >> "$GITHUB_OUTPUT"
+ ;;
+ *)
+ echo "supported=false" >> "$GITHUB_OUTPUT"
+ ;;
+ esac
+ existing_root_key=$(yq e '.services.kas.root_key' "$OT_CONFIG_FILE" 2>/dev/null || echo "")
+ if [ -n "$existing_root_key" ] && [ "$existing_root_key" != "null" ]; then
+ echo "Using existing root key from config"
+ echo "root_key=$existing_root_key" >> "$GITHUB_OUTPUT"
+ else
+ echo "Generating a new root key for additional KAS"
+ gen_root_key=$(openssl rand -hex 32)
+ echo "root_key=$gen_root_key" >> "$GITHUB_OUTPUT"
+ fi
+ working-directory: ${{ inputs.platform-working-dir }}
+
+ - name: Check multikas support
+ id: multikas
+ shell: bash
+ run: |
+ if [[ $PLATFORM_TAG == main ]]; then
+ echo "Main supports multikas"
+ echo "supported=true" >> "$GITHUB_OUTPUT"
+ elif awk -F. '{ if ($1 > 0 || ($1 == 0 && $2 > 4)) exit 0; else exit 1; }' <<< "${PLATFORM_VERSION#v}"; then
+ echo "Selected version [$PLATFORM_VERSION] supports multikas"
+ echo "supported=true" >> "$GITHUB_OUTPUT"
+ else
+ echo "At tag [$PLATFORM_TAG], [$PLATFORM_VERSION] probably does not support multikas"
+ echo "supported=false" >> "$GITHUB_OUTPUT"
+ fi
+ env:
+ PLATFORM_TAG: ${{ inputs.platform-tag }}
+ PLATFORM_VERSION: ${{ steps.platform-version.outputs.version }}
+
+ - name: Install test dependencies
+ shell: bash
+ run: uv sync
+ working-directory: ${{ inputs.tests-path }}/xtest