diff --git a/.github/workflows/xtest.yml b/.github/workflows/xtest.yml
index 773448892..f18882390 100644
--- a/.github/workflows/xtest.yml
+++ b/.github/workflows/xtest.yml
@@ -28,6 +28,11 @@ on:
type: string
default: all
description: "SDK to focus on (go, js, java, all)"
+ otdfctl-source:
+ required: false
+ type: string
+ default: auto
+ description: "otdfctl source: 'auto', 'standalone', or 'platform'"
workflow_call:
inputs:
platform-ref:
@@ -50,229 +55,93 @@ on:
required: false
type: string
default: all
+ otdfctl-source:
+ required: false
+ type: string
+ default: auto
schedule:
- - cron: "30 6 * * *" # 0630 UTC
- - cron: "0 5 * * 1,3" # 500 UTC (Monday, Wednesday)
- - cron: "0 18 * * 0" # 1800 UTC (Sunday)
+ - cron: "30 6 * * *"
+ - cron: "0 5 * * 1,3"
+ - cron: "0 18 * * 0"
concurrency:
group: ${{ github.workflow }}-pr-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
- resolve-versions:
+ prepare:
timeout-minutes: 10
runs-on: ubuntu-latest
- permissions:
- contents: read
outputs:
- platform-tag-to-sha: ${{ steps.version-info.outputs.platform-tag-to-sha }}
- platform-tag-list: ${{ steps.version-info.outputs.platform-tag-list }}
- heads: ${{ steps.version-info.outputs.platform-heads }}
- default-tags: ${{ steps.version-info.outputs.default-tags }}
- go: ${{ steps.version-info.outputs.go-version-info }}
- java: ${{ steps.version-info.outputs.java-version-info }}
- js: ${{ steps.version-info.outputs.js-version-info }}
- env:
- PLATFORM_REF: "${{ inputs.platform-ref }}"
- JS_REF: "${{ inputs.js-ref }}"
- OTDFCTL_REF: "${{ inputs.otdfctl-ref }}"
- JAVA_REF: "${{ inputs.java-ref }}"
+ platform-refs: ${{ steps.platform-refs.outputs.json }}
steps:
- name: Validate focus-sdk input
if: ${{ inputs.focus-sdk != '' }}
env:
FOCUS_SDK_INPUT: ${{ inputs.focus-sdk }}
- run: |-
+ run: |
if [[ ! "all go java js" =~ (^|[[:space:]])${FOCUS_SDK_INPUT}($|[[:space:]]) ]]; then
echo "Invalid focus-sdk input: ${FOCUS_SDK_INPUT}. Must be one of: all, go, java, js." >> "$GITHUB_STEP_SUMMARY"
exit 1
fi
- - name: Default Versions depend on context
- id: default-tags
- run: |-
- if [[ $CRON_NIGHTLY == 'true' ]]; then
- echo "Running nightly tests"
- echo "DEFAULT_TAGS=main latest" >> "$GITHUB_ENV"
- elif [[ $CRON_MONDAY_WEDNESDAY == 'true' ]]; then
- echo "Running Monday/Wednesday tests"
- echo "DEFAULT_TAGS=main" >> "$GITHUB_ENV"
- elif [[ $CRON_WEEKLY == 'true' ]]; then
- echo "Running weekly tests"
- echo "DEFAULT_TAGS=main latest" >> "$GITHUB_ENV"
- else
- echo "Running PR, Workflow Dispatch, or manual trigger"
- echo "DEFAULT_TAGS=main latest" >> "$GITHUB_ENV"
- fi
- env:
- CRON_NIGHTLY: ${{ github.event.schedule == '30 6 * * *' }}
- CRON_MONDAY_WEDNESDAY: ${{ github.event.schedule == '0 5 * * 1,3' }}
- CRON_WEEKLY: ${{ github.event.schedule == '0 18 * * 0' }}
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- with:
- path: otdf-sdk
- persist-credentials: false
- repository: opentdf/tests
- sparse-checkout: |
- xtest/sdk
- otdf-sdk-mgr
- - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
- with:
- python-version: "3.14"
- - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
- - id: version-info
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea #v7.0.1
- with:
- script: |
- function htmlEscape(str) {
- return str.replace(/&/g, "&")
- .replace(//g, ">")
- .replace(/"/g, """)
- .replace(/'/g, "'");
- }
- const { spawnSync } = require('child_process');
- const path = require('path');
-
- const sdkMgrDir = path.join(process.env.GITHUB_WORKSPACE, 'otdf-sdk', 'otdf-sdk-mgr');
- const defaultTags = process.env.DEFAULT_TAGS || 'main';
- core.setOutput('default-tags', defaultTags);
-
- const refs = {
- platform: process.env.PLATFORM_REF || defaultTags,
- go: process.env.OTDFCTL_REF || defaultTags,
- js: process.env.JS_REF || defaultTags,
- java: process.env.JAVA_REF || defaultTags
- };
-
- const versionData = {};
-
- for (const [sdkType, ref] of Object.entries(refs)) {
- try {
- const refArgs = ref.trim().split(/\s+/).filter(Boolean);
- const result = spawnSync('uv', ['run', '--project', sdkMgrDir, 'otdf-sdk-mgr', 'versions', 'resolve', sdkType, ...refArgs], { cwd: sdkMgrDir, encoding: 'utf-8' });
- if (result.status !== 0) {
- throw new Error(result.stderr || `Process exited with code ${result.status}`);
- }
- const output = result.stdout;
- const ojson = JSON.parse(output);
- if (!!ojson.err) {
- throw new Error(ojson.err);
- }
- versionData[sdkType] = ojson;
- } catch (error) {
- console.error(`Error resolving version for ${sdkType}:`, error);
- versionData[sdkType] = [{ tag: ref, err: error.message }];
- }
- }
-
- core.setOutput('all', JSON.stringify(versionData));
-
- core.summary.addHeading('Versions under Test', 3);
- function artifactLink(sdkType, tag, release, head) {
- if (head || !release) return '';
- const v = tag.replace(/^v/, '');
- if (sdkType === 'js') {
- const url = `https://www.npmjs.com/package/@opentdf/ctl/v/${encodeURIComponent(v)}`;
- return `npmjs`;
- }
- if (sdkType === 'java') {
- const url = `https://central.sonatype.com/artifact/io.opentdf.platform/sdk/${encodeURIComponent(v)}`;
- return `Maven Central`;
- }
- if (sdkType === 'go') {
- const url = `https://pkg.go.dev/github.com/opentdf/otdfctl@${encodeURIComponent(tag)}`;
- return `pkg.go.dev`;
- }
- return '';
- }
-
- let errorCount = 0;
- const table = [];
- const th = (data) => ({ data, header: true });
- table.push([th('Library'), th('Tag'), th('SHA'), th('Alias'), th('Artifact'), th('Error')]);
-
- for (const [sdkType, refInfo] of Object.entries(versionData)) {
- const tagList = [];
- const tagToSha = {};
- const headTags = [];
-
- for (const { tag, head, sha, alias, err, release } of refInfo) {
- const sdkRepoUrl = `https://github.com/opentdf/${encodeURIComponent(sdkType == 'js' ? 'web-sdk' : sdkType == 'go' ? 'otdfctl' : sdkType == 'java' ? 'java-sdk' : sdkType)}`;
- const sdkLink = `${htmlEscape(sdkType)}`;
- const commitLink = sha ? `${htmlEscape(sha.substring(0, 7))}` : ' . ';
- const tagLink = (release && tag)
- ? `${htmlEscape(tag)}`
- : tag ? htmlEscape(tag) : 'N/A';
- const artifactCell = artifactLink(sdkType, tag, release, head);
- table.push([sdkLink, tagLink, commitLink, alias || 'N/A', artifactCell || 'N/A', err || 'N/A']);
- if (err) {
- errorCount += 1;
- continue;
- }
- tagList.push(tag);
- tagToSha[tag] = sha;
- if (head) {
- headTags.push(tag);
- }
- }
-
- core.setOutput(`${sdkType}-tag-list`, JSON.stringify(tagList));
- core.setOutput(`${sdkType}-tag-to-sha`, JSON.stringify(tagToSha));
- core.setOutput(`${sdkType}-heads`, JSON.stringify(headTags));
- core.setOutput(`${sdkType}-version-info`, JSON.stringify(refInfo));
- }
-
- core.summary.addTable(table);
- core.summary.write();
-
- if (errorCount > 0) {
- throw new Error('Errors detected in version resolution. Failing the run.');
- }
+ - name: Choose platform refs
+ id: platform-refs
+ env:
+ INPUT_PLATFORM_REF: ${{ inputs.platform-ref }}
+ EVENT_NAME: ${{ github.event_name }}
+ EVENT_SCHEDULE: ${{ github.event.schedule }}
+ run: |
+ refs="${INPUT_PLATFORM_REF}"
+ if [[ -z "$refs" ]]; then
+ case "$EVENT_NAME" in
+ workflow_dispatch|workflow_call)
+ refs="main"
+ ;;
+ *)
+ if [[ "$EVENT_SCHEDULE" == "0 5 * * 1,3" ]]; then
+ refs="main"
+ else
+ refs="main latest"
+ fi
+ ;;
+ esac
+ fi
+ json=$(printf '%s' "$refs" | jq -Rc 'split(" ") | map(select(length > 0))')
+ echo "json=$json" >> "$GITHUB_OUTPUT"
+ echo "Platform refs: $refs" >> "$GITHUB_STEP_SUMMARY"
xct:
timeout-minutes: 60
runs-on: ubuntu-latest
- needs: resolve-versions
+ needs: prepare
permissions:
contents: read
packages: read
- checks: write # Needed to publish junit tests
- pull-requests: write # Add comments to PRs
+ checks: write
+ pull-requests: write
env:
FOCUS_SDK: ${{ inputs.focus-sdk || 'all' }}
ENCRYPT_SDK: ${{ matrix.sdk }}
strategy:
fail-fast: false
matrix:
- platform-tag: ${{ fromJSON(needs.resolve-versions.outputs.platform-tag-list) }}
+ platform-ref: ${{ fromJSON(needs.prepare.outputs.platform-refs) }}
sdk: ["go", "java", "js"]
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: opentdf/tests
- path: otdftests # use different name bc other repos might have tests directories
+ path: otdftests
persist-credentials: false
- - name: load extra keys from file
- id: load-extra-keys
- run: |-
- echo "EXTRA_KEYS=$(jq -c > "${GITHUB_OUTPUT}"
-
- ######## SPIN UP PLATFORM BACKEND #############
- - name: Check out and start up platform with deps/containers
- id: run-platform
- uses: opentdf/platform/test/start-up-with-containers@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
+ - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
with:
- platform-ref: ${{ fromJSON(needs.resolve-versions.outputs.platform-tag-to-sha)[matrix.platform-tag] }}
- ec-tdf-enabled: true
- extra-keys: ${{ steps.load-extra-keys.outputs.EXTRA_KEYS }}
- log-type: json
+ python-version: "3.14"
+
+ - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b
- - name: Install uv
- uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0
- - uses: bufbuild/buf-action@8f4a1456a0ab6a1eb80ba68e53832e6fcfacc16c # v1.3.0
+ - uses: bufbuild/buf-action@8f4a1456a0ab6a1eb80ba68e53832e6fcfacc16c
with:
setup_only: true
token: ${{ secrets.BUF_TOKEN }}
@@ -290,389 +159,99 @@ jobs:
with:
node-version: "22.x"
- ######### CHECKOUT JS CLI #############
- - name: Configure js-sdk
- id: configure-js
- uses: ./otdftests/xtest/setup-cli-tool
- with:
- path: otdftests/xtest/sdk
- sdk: js
- version-info: "${{ needs.resolve-versions.outputs.js }}"
-
- - name: Cache npm
- if: fromJson(steps.configure-js.outputs.heads)[0] != null
- uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
- with:
- path: ~/.npm
- key: npm-${{ runner.os }}-${{ hashFiles('otdftests/xtest/sdk/js/src/**/package-lock.json') }}
- restore-keys: |
- npm-${{ runner.os }}-
-
- ######## SETUP THE JS CLI #############
- - name: build and setup the web-sdk cli
- id: build-web-sdk
- if: fromJson(steps.configure-js.outputs.heads)[0] != null
- run: |
- make
- working-directory: otdftests/xtest/sdk/js
-
- ######## CHECKOUT GO CLI #############
- - name: Configure otdfctl
- id: configure-go
- uses: ./otdftests/xtest/setup-cli-tool
- with:
- path: otdftests/xtest/sdk
- sdk: go
- version-info: "${{ needs.resolve-versions.outputs.go }}"
-
- - name: Cache Go modules
- if: fromJson(steps.configure-go.outputs.heads)[0] != null
- uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
- with:
- path: |
- ~/.cache/go-build
- ~/go/pkg/mod
- key: go-${{ runner.os }}-${{ hashFiles('otdftests/xtest/sdk/go/src/*/go.sum') }}
- restore-keys: |
- go-${{ runner.os }}-
-
- - name: Resolve otdfctl heads
- id: resolve-otdfctl-heads
- if: fromJson(steps.configure-go.outputs.heads)[0] != null
- run: |-
- echo "OTDFCTL_HEADS=$OTDFCTL_HEADS" >> "$GITHUB_ENV"
- env:
- OTDFCTL_HEADS: ${{ steps.configure-go.outputs.heads }}
-
- - name: Replace otdfctl go.mod packages, but only at head version of platform
- if: fromJson(steps.configure-go.outputs.heads)[0] != null && env.FOCUS_SDK == 'go' && contains(fromJSON(needs.resolve-versions.outputs.heads), matrix.platform-tag)
- env:
- PLATFORM_WORKING_DIR: ${{ steps.run-platform.outputs.platform-working-dir }}
- run: |-
- echo "Replacing go.mod packages..."
- PLATFORM_DIR_ABS="$(pwd)/${PLATFORM_WORKING_DIR}"
- OTDFCTL_DIR_ABS="$(pwd)/otdftests/xtest/sdk/go/src/"
- echo "PLATFORM_DIR_ABS: $PLATFORM_DIR_ABS"
- echo "OTDFCTL_DIR_ABS: $OTDFCTL_DIR_ABS"
- for head in $(echo "${OTDFCTL_HEADS}" | jq -r '.[]'); do
- echo "Processing head: $head"
- cd "${OTDFCTL_DIR_ABS}/$head"
- for m in lib/fixtures lib/ocrypto protocol/go sdk; do
- go mod edit -replace "github.com/opentdf/platform/$m=${PLATFORM_DIR_ABS}/$m"
- done
- go mod tidy
- done
-
- ######## SETUP THE GO CLI #############
- - name: Prepare go cli
- if: fromJson(steps.configure-go.outputs.heads)[0] != null
- run: |-
- make
- working-directory: otdftests/xtest/sdk/go
-
- ####### CHECKOUT JAVA SDK ##############
-
- - name: Configure java-sdk
- id: configure-java
- uses: ./otdftests/xtest/setup-cli-tool
- with:
- path: otdftests/xtest/sdk
- sdk: java
- version-info: "${{ needs.resolve-versions.outputs.java }}"
-
- - name: Cache Maven repository
- if: fromJson(steps.configure-java.outputs.heads)[0] != null
- uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
- with:
- path: ~/.m2/repository
- key: maven-${{ runner.os }}-${{ hashFiles('otdftests/xtest/sdk/java/src/**/pom.xml') }}
- restore-keys: |
- maven-${{ runner.os }}-
-
- - name: pre-release protocol buffers for java-sdk
- if: >-
- fromJson(steps.configure-java.outputs.heads)[0] != null
- && (env.FOCUS_SDK == 'go' || env.FOCUS_SDK == 'java')
- && contains(fromJSON(needs.resolve-versions.outputs.heads), matrix.platform-tag)
- run: |-
- echo "Replacing .env files for java-sdk..."
- echo "Platform tag: $platform_tag"
- echo "Java version info: $java_version_info"
- for row in $(echo "$java_version_info" | jq -c '.[]'); do
- TAG=$(echo "$row" | jq -r '.tag')
- HEAD=$(echo "$row" | jq -r '.head')
- if [[ "$HEAD" == "true" ]]; then
- echo "Creating .env file for tag: [$TAG]; pointing to platform ref [$platform_tag]"
- echo "PLATFORM_BRANCH=$platform_ref" > "otdftests/xtest/sdk/java/${TAG}.env"
- else
- echo "Skipping .env file creation for release version [$TAG]"
- fi
- done
+ - name: Resolve default SDK refs for this event
+ id: refs
env:
- java_version_info: ${{ needs.resolve-versions.outputs.java }}
- platform_ref: ${{ fromJSON(needs.resolve-versions.outputs.platform-tag-to-sha)[matrix.platform-tag] }}
- platform_tag: ${{ matrix.platform-tag }}
-
- ####### SETUP JAVA CLI ##############
- - name: Prepare java cli
- if: fromJson(steps.configure-java.outputs.heads)[0] != null
+ INPUT_OTDFCTL_REF: ${{ inputs.otdfctl-ref }}
+ INPUT_JS_REF: ${{ inputs.js-ref }}
+ INPUT_JAVA_REF: ${{ inputs.java-ref }}
+ INPUT_OTDFCTL_SOURCE: ${{ inputs.otdfctl-source }}
+ EVENT_NAME: ${{ github.event_name }}
+ EVENT_SCHEDULE: ${{ github.event.schedule }}
run: |
- make
- working-directory: otdftests/xtest/sdk/java
- env:
- BUF_INPUT_HTTPS_USERNAME: opentdf-bot
- BUF_INPUT_HTTPS_PASSWORD: ${{ secrets.PERSONAL_ACCESS_TOKEN_OPENTDF }}
-
- ######## Configure test environment #############
- - name: Lookup current platform version
- id: platform-version
- run: |-
- if ! go run ./service version; then
- # NOTE: the version command was added in 0.4.37
- echo "Error: Unable to get platform version; defaulting to tag: [$PLATFORM_TAG]"
- echo "PLATFORM_VERSION=$PLATFORM_TAG" >> "$GITHUB_ENV"
- exit
- fi
- # Older version commands output version to stderr; newer versions output to stdout
- PLATFORM_VERSION=$(go run ./service version 2>&1)
- echo "PLATFORM_VERSION=$PLATFORM_VERSION" >> "$GITHUB_ENV"
- echo "## Platform version output: [$PLATFORM_VERSION]"
- working-directory: ${{ steps.run-platform.outputs.platform-working-dir }}
- env:
- PLATFORM_TAG: ${{ matrix.platform-tag }}
-
- - name: Check key management support and prepare root key
- id: km-check
- run: |-
- OT_CONFIG_FILE="$(pwd)/opentdf.yaml"
- echo "OT_CONFIG_FILE=$OT_CONFIG_FILE" >> "$GITHUB_ENV"
- # Determine if the config declares the key_management field
- km_value=$(yq e '.services.kas.preview.key_management' "$OT_CONFIG_FILE" 2>/dev/null || echo "null")
- case "$km_value" in
- true|false)
- echo "KEY_MANAGEMENT_SUPPORTED=true" >> "$GITHUB_ENV"
- echo "supported=true" >> "$GITHUB_OUTPUT"
+ defaults="main latest"
+ case "$EVENT_NAME" in
+ workflow_dispatch|workflow_call)
+ defaults="main"
;;
*)
- echo "KEY_MANAGEMENT_SUPPORTED=false" >> "$GITHUB_ENV"
- echo "supported=false" >> "$GITHUB_OUTPUT"
+ if [[ "$EVENT_SCHEDULE" == "0 5 * * 1,3" ]]; then
+ defaults="main"
+ fi
;;
esac
- # Prepare a root key for use by additional KAS instances
- existing_root_key=$(yq e '.services.kas.root_key' "$OT_CONFIG_FILE" 2>/dev/null || echo "")
- if [ -n "$existing_root_key" ] && [ "$existing_root_key" != "null" ]; then
- echo "Using existing root key from config"
- echo "OT_ROOT_KEY=$existing_root_key" >> "$GITHUB_ENV"
- echo "root_key=$existing_root_key" >> "$GITHUB_OUTPUT"
- else
- echo "Generating a new root key for additional KAS"
- gen_root_key=$(openssl rand -hex 32)
- echo "OT_ROOT_KEY=$gen_root_key" >> "$GITHUB_ENV"
- echo "root_key=$gen_root_key" >> "$GITHUB_OUTPUT"
- fi
- working-directory: ${{ steps.run-platform.outputs.platform-working-dir }}
-
- - name: Install test dependencies
- run: uv sync
- working-directory: otdftests/xtest
- - name: Validate xtest helper library (tests of the test harness and its utilities)
- if: ${{ !inputs }}
- run: |-
- uv run pytest --html=test-results/helper-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --sdks-encrypt "${ENCRYPT_SDK}" test_self.py test_audit_logs.py
- working-directory: otdftests/xtest
- env:
- PLATFORM_TAG: ${{ matrix.platform-tag }}
-
- - name: Validate otdf-local integration tests
- if: ${{ !inputs }}
- run: |-
- uv sync
- uv run pytest --maxfail=1 --disable-warnings -v --tb=short -m integration
- working-directory: otdftests/otdf-local
+ echo "otdfctl-ref=${INPUT_OTDFCTL_REF:-$defaults}" >> "$GITHUB_OUTPUT"
+ echo "js-ref=${INPUT_JS_REF:-$defaults}" >> "$GITHUB_OUTPUT"
+ echo "java-ref=${INPUT_JAVA_REF:-$defaults}" >> "$GITHUB_OUTPUT"
+ echo "otdfctl-source=${INPUT_OTDFCTL_SOURCE:-auto}" >> "$GITHUB_OUTPUT"
+
+ - name: Write replay config and summary
+ id: plan
+ working-directory: otdftests
env:
- OTDF_LOCAL_PLATFORM_DIR: ${{ github.workspace }}/${{ steps.run-platform.outputs.platform-working-dir }}
-
- ######## RUN THE TESTS #############
- - name: Run legacy decryption tests
- run: |-
- uv run pytest -n auto --dist worksteal --html=test-results/sdk-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --sdks-encrypt "${ENCRYPT_SDK}" -ra -v --focus "$FOCUS_SDK" test_legacy.py
- working-directory: otdftests/xtest
- env:
- PLATFORM_DIR: "../../${{ steps.run-platform.outputs.platform-working-dir }}"
- PLATFORM_TAG: ${{ matrix.platform-tag }}
-
- - name: Run all standard xtests
- if: ${{ env.FOCUS_SDK == 'all' }}
- run: |-
- uv run pytest -n auto --dist loadscope --html=test-results/sdk-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --sdks-encrypt "${ENCRYPT_SDK}" -ra -v test_tdfs.py test_policytypes.py
- working-directory: otdftests/xtest
- env:
- PLATFORM_DIR: "../../${{ steps.run-platform.outputs.platform-working-dir }}"
- SCHEMA_FILE: "manifest.schema.json"
- PLATFORM_TAG: ${{ matrix.platform-tag }}
-
- - name: Run xtests focusing on a specific SDK
- if: ${{ env.FOCUS_SDK != 'all' }}
- run: |-
- uv run pytest -n auto --dist loadscope --html=test-results/sdk-${FOCUS_SDK}-${PLATFORM_TAG}.html --self-contained-html --sdks-encrypt "${ENCRYPT_SDK}" -ra -v --focus "$FOCUS_SDK" test_tdfs.py test_policytypes.py
- working-directory: otdftests/xtest
- env:
- PLATFORM_DIR: "../../${{ steps.run-platform.outputs.platform-working-dir }}"
- SCHEMA_FILE: "manifest.schema.json"
- PLATFORM_TAG: ${{ matrix.platform-tag }}
-
- ######## ATTRIBUTE BASED CONFIGURATION #############
-
- - name: Does platform support multikas?
- id: multikas
- run: |-
- if [[ $PLATFORM_TAG == main ]]; then
- echo "Main supports multikas"
- echo "supported=true" >> "$GITHUB_OUTPUT"
- elif awk -F. '{ if ($1 > 0 || ($1 == 0 && $2 > 4)) exit 0; else exit 1; }' <<< "${PLATFORM_VERSION#v}"; then
- echo "Selected version [$PLATFORM_VERSION] supports multikas"
- echo "supported=true" >> "$GITHUB_OUTPUT"
- else
- echo "At tag [$PLATFORM_TAG], [$PLATFORM_VERSION] probably does not support multikas"
- echo "supported=false" >> "$GITHUB_OUTPUT"
+ GITHUB_EVENT_NAME: ${{ github.event_name }}
+ run: |
+ extra_args=()
+ if [[ "$GITHUB_EVENT_NAME" != "workflow_dispatch" && "$GITHUB_EVENT_NAME" != "workflow_call" ]]; then
+ extra_args+=(--include-helper-tests --include-otdf-local-integration)
fi
- env:
- PLATFORM_TAG: ${{ matrix.platform-tag }}
-
- - name: Start additional kas
- id: kas-alpha
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- ec-tdf-enabled: true
- kas-name: alpha
- kas-port: 8181
- log-type: json
- root-key: ${{ steps.km-check.outputs.root_key }}
-
- - name: Start additional kas
- id: kas-beta
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- ec-tdf-enabled: true
- kas-name: beta
- kas-port: 8282
- log-type: json
- root-key: ${{ steps.km-check.outputs.root_key }}
-
- - name: Start additional kas
- id: kas-gamma
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- ec-tdf-enabled: true
- kas-name: gamma
- kas-port: 8383
- log-type: json
- root-key: ${{ steps.km-check.outputs.root_key }}
-
- - name: Start additional kas
- id: kas-delta
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- ec-tdf-enabled: true
- kas-port: 8484
- kas-name: delta
- log-type: json
- root-key: ${{ steps.km-check.outputs.root_key }}
-
- - name: Start additional KM kas (km1)
- id: kas-km1
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- ec-tdf-enabled: true
- key-management: ${{ steps.km-check.outputs.supported }}
- kas-name: km1
- kas-port: 8585
- log-type: json
- root-key: ${{ steps.km-check.outputs.root_key }}
-
- - name: Start additional KM kas (km2)
- id: kas-km2
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix
- with:
- ec-tdf-enabled: true
- kas-name: km2
- key-management: ${{ steps.km-check.outputs.supported }}
- kas-port: 8686
- log-type: json
- root-key: ${{ steps.km-check.outputs.root_key }}
-
- - name: Run attribute based configuration tests
- if: ${{ steps.multikas.outputs.supported == 'true' }}
- run: >-
- uv run pytest
- -ra
- -v
- --numprocesses auto
- --dist loadscope
- --html test-results/attributes-${FOCUS_SDK}-${PLATFORM_TAG}.html
- --self-contained-html
- --audit-log-dir test-results/audit-logs
- --sdks-encrypt "${ENCRYPT_SDK}"
- --focus "$FOCUS_SDK"
- test_abac.py
- working-directory: otdftests/xtest
- env:
- PLATFORM_DIR: "../../${{ steps.run-platform.outputs.platform-working-dir }}"
- PLATFORM_TAG: ${{ matrix.platform-tag }}
- PLATFORM_LOG_FILE: "../../${{ steps.run-platform.outputs.platform-log-file }}"
- KAS_ALPHA_LOG_FILE: "../../${{ steps.kas-alpha.outputs.log-file }}"
- KAS_BETA_LOG_FILE: "../../${{ steps.kas-beta.outputs.log-file }}"
- KAS_GAMMA_LOG_FILE: "../../${{ steps.kas-gamma.outputs.log-file }}"
- KAS_DELTA_LOG_FILE: "../../${{ steps.kas-delta.outputs.log-file }}"
- KAS_KM1_LOG_FILE: "../../${{ steps.kas-km1.outputs.log-file }}"
- KAS_KM2_LOG_FILE: "../../${{ steps.kas-km2.outputs.log-file }}"
+ uv run --project otdf-local otdf-local xtest plan \
+ --platform-ref "${{ matrix.platform-ref }}" \
+ --go-ref "${{ steps.refs.outputs.otdfctl-ref }}" \
+ --js-ref "${{ steps.refs.outputs.js-ref }}" \
+ --java-ref "${{ steps.refs.outputs.java-ref }}" \
+ --encrypt-sdk "${ENCRYPT_SDK}" \
+ --focus-sdk "${FOCUS_SDK}" \
+ --otdfctl-source "${{ steps.refs.outputs.otdfctl-source }}" \
+ --output "xtest/ci-config-${ENCRYPT_SDK}-${{ matrix.platform-ref }}.yaml" \
+ --write-summary \
+ "${extra_args[@]}"
+
+ - name: Run xtest job
+ working-directory: otdftests
+ run: |
+ uv run --project otdf-local otdf-local xtest run --config "${{ steps.plan.outputs.config-path }}"
- name: Upload artifact
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
id: upload-artifact
if: success() || failure()
with:
- name: ${{ job.status == 'success' && '✅' || job.status == 'failure' && '❌' }} ${{ matrix.sdk }}-${{matrix.platform-tag}}
+ name: ${{ job.status == 'success' && '✅' || job.status == 'failure' && '❌' }} ${{ matrix.sdk }}-${{ steps.plan.outputs.platform-tag }}
path: otdftests/xtest/test-results/*.html
- name: Upload audit logs on failure
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
if: failure()
with:
- name: audit-logs-${{ matrix.sdk }}-${{ matrix.platform-tag }}
+ name: audit-logs-${{ matrix.sdk }}-${{ steps.plan.outputs.platform-tag }}
path: otdftests/xtest/test-results/audit-logs/*.log
if-no-files-found: ignore
publish-results:
runs-on: ubuntu-latest
needs: xct
- if: always() # Always try to publish results, even if tests fail
+ if: always()
permissions:
- pull-requests: write # Add comments to PRs
+ pull-requests: write
steps:
- id: comment-artifact-list
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea #v7.0.1
+ uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea
env:
XCT_RESULT: ${{ needs.xct.result }}
with:
script: |
- const { owner, repo} = context.repo;
+ const { owner, repo } = context.repo;
const runId = context.runId;
const artifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner,
repo,
run_id: runId,
- })
+ });
const results = artifacts.data.artifacts.map(({ name, id }) => `[${name}](https://github.com/${owner}/${repo}/actions/runs/${runId}/artifacts/${id})`).join('\n');
const xctResult = process.env.XCT_RESULT;
const heading = xctResult === 'success' ? 'X-Test Results' : 'X-Test Failure Report';
- const body = `## ${heading}\n${results}`
+ const body = `## ${heading}\n${results}`;
const issue_number = context.issue?.number ?? context.pull_request?.number;
if (!issue_number) {
core.summary.addRaw(body);
@@ -689,11 +268,10 @@ jobs:
console.error("403 when attempting to comment on PR (likely fork)", err);
return;
}
- throw err; // Re-throw unexpected errors
+ throw err;
}
xtest:
- # Capstone job: always runs, evaluates matrix job aggregate result and fails if any xct variant failed
runs-on: ubuntu-latest
needs: xct
if: always()
@@ -702,12 +280,13 @@ jobs:
steps:
- name: Assert all matrix jobs passed
if: ${{ needs.xct.result == 'failure' || needs.xct.result == 'cancelled' }}
- run: |-
+ run: |
echo "xct matrix had failures (overall result: ${XCT_RESULT}). Marking xtest failed." >> "$GITHUB_STEP_SUMMARY"
exit 1
env:
XCT_RESULT: ${{ needs.xct.result }}
+
- name: Success summary
if: ${{ needs.xct.result == 'success' }}
- run: |-
- echo "All xtest jobs succeeded." >> "$GITHUB_STEP_SUMMARY"
+ run: |
+ echo "All xtest jobs succeeded." >> "$GITHUB_STEP_SUMMARY"
diff --git a/otdf-local/README.md b/otdf-local/README.md
index 1a897c06a..9b2654aab 100644
--- a/otdf-local/README.md
+++ b/otdf-local/README.md
@@ -51,6 +51,34 @@ uv run otdf-local down
## Commands
+### `xtest` - Replay CI XTests Locally
+
+Generate a replayable config from refs:
+
+```bash
+otdf-local xtest plan \
+ --platform-ref latest \
+ --go-ref "main latest" \
+ --js-ref "main latest" \
+ --java-ref "main latest" \
+ --encrypt-sdk go \
+ --output xtest-repro.yaml
+```
+
+Run the same orchestration locally that CI uses:
+
+```bash
+otdf-local xtest run --config xtest-repro.yaml
+```
+
+Or run directly without a saved config:
+
+```bash
+otdf-local xtest run --platform-ref latest --encrypt-sdk go
+```
+
+The generated YAML is designed to be pasted directly from the GitHub Actions step summary and replayed locally.
+
### `up` - Start Environment
Start all or specific services.
diff --git a/otdf-local/src/otdf_local/ci.py b/otdf-local/src/otdf_local/ci.py
new file mode 100644
index 000000000..bab3b3203
--- /dev/null
+++ b/otdf-local/src/otdf_local/ci.py
@@ -0,0 +1,223 @@
+"""CI-specific commands for otdf-local.
+
+These commands adapt the local environment management for GitHub Actions CI,
+where the platform is already started by an external action and we only need
+to start KAS instances as background processes.
+"""
+
+from __future__ import annotations
+
+import os
+import sys
+from pathlib import Path
+from typing import Annotated
+
+import typer
+
+from otdf_local.config.ports import Ports
+from otdf_local.config.settings import Settings
+from otdf_local.health.waits import WaitTimeoutError, wait_for_health
+from otdf_local.services import get_kas_manager
+from otdf_local.utils.console import (
+ print_error,
+ print_info,
+ print_success,
+ print_warning,
+)
+from otdf_local.utils.yaml import load_yaml, save_yaml, set_nested
+
+ci_app = typer.Typer(
+ name="ci",
+ help="CI-specific commands for GitHub Actions workflows.",
+ no_args_is_help=True,
+)
+
+
+def _emit_github_output(key: str, value: str) -> None:
+ """Write a key=value pair to $GITHUB_OUTPUT if available, else print to stdout."""
+ github_output = os.environ.get("GITHUB_OUTPUT")
+ if github_output:
+ with open(github_output, "a") as f:
+ f.write(f"{key}={value}\n")
+ else:
+ # Fallback for local testing
+ print(f"{key}={value}", file=sys.stdout)
+
+
+def _prepare_kas_template(
+ settings: Settings, root_key: str | None, ec_tdf_enabled: bool
+) -> None:
+ """Ensure the KAS template config has the right root key and EC TDF settings.
+
+ In CI, the platform config may have a root_key that differs from what
+ we want for additional KAS instances. This updates the platform config
+ in-place so that KASService._generate_config reads the correct root_key.
+ """
+ if root_key:
+ config = load_yaml(settings.platform_config)
+ set_nested(config, "services.kas.root_key", root_key)
+ if ec_tdf_enabled:
+ set_nested(config, "services.kas.preview.ec_tdf_enabled", True)
+ save_yaml(settings.platform_config, config)
+
+
+@ci_app.command("start-kas")
+def start_kas(
+ platform_dir: Annotated[
+ Path,
+ typer.Option(
+ "--platform-dir",
+ help="Path to the platform checkout (must contain opentdf-kas-mode.yaml)",
+ envvar="OTDF_LOCAL_PLATFORM_DIR",
+ ),
+ ],
+ root_key: Annotated[
+ str | None,
+ typer.Option(
+ "--root-key",
+ help="Root key for KAS instances (overrides platform config value)",
+ envvar="OT_ROOT_KEY",
+ ),
+ ] = None,
+ ec_tdf_enabled: Annotated[
+ bool,
+ typer.Option(
+ "--ec-tdf-enabled/--no-ec-tdf",
+ help="Enable EC TDF support",
+ ),
+ ] = True,
+ key_management: Annotated[
+ bool,
+ typer.Option(
+ "--key-management/--no-key-management",
+ help="Enable key management on km1/km2 instances",
+ ),
+ ] = False,
+ log_type: Annotated[
+ str,
+ typer.Option(
+ "--log-type",
+ help="Log format type (json, text)",
+ ),
+ ] = "json",
+ health_timeout: Annotated[
+ int,
+ typer.Option(
+ "--health-timeout",
+ help="Seconds to wait for each KAS instance to become healthy",
+ ),
+ ] = 60,
+ instances: Annotated[
+ str | None,
+ typer.Option(
+ "--instances",
+ help="Comma-separated KAS instance names (default: all)",
+ ),
+ ] = None,
+) -> None:
+ """Start KAS instances for CI and emit GitHub Actions outputs.
+
+ Expects the platform to already be running (started by start-up-with-containers).
+ Starts all 6 KAS instances (alpha, beta, gamma, delta, km1, km2) as background
+ processes, waits for each to pass health checks, and emits log file paths as
+ GitHub Actions step outputs.
+
+ Output keys (written to $GITHUB_OUTPUT):
+ kas-alpha-log-file, kas-beta-log-file, kas-gamma-log-file,
+ kas-delta-log-file, kas-km1-log-file, kas-km2-log-file
+ """
+ platform_dir = platform_dir.resolve()
+ if not platform_dir.is_dir():
+ print_error(f"Platform directory does not exist: {platform_dir}")
+ raise typer.Exit(1)
+
+ # Check for required template files
+ kas_template = platform_dir / "opentdf-kas-mode.yaml"
+ platform_config = platform_dir / "opentdf-dev.yaml"
+ if not kas_template.exists():
+ # Fall back to opentdf.yaml if opentdf-kas-mode.yaml doesn't exist
+ kas_template_alt = platform_dir / "opentdf.yaml"
+ if kas_template_alt.exists():
+ print_info(
+ f"Using {kas_template_alt} as KAS template (opentdf-kas-mode.yaml not found)"
+ )
+ else:
+ print_error(
+ f"Neither opentdf-kas-mode.yaml nor opentdf.yaml found in {platform_dir}"
+ )
+ raise typer.Exit(1)
+
+ if not platform_config.exists():
+ # Try opentdf.yaml as fallback
+ platform_config_alt = platform_dir / "opentdf.yaml"
+ if platform_config_alt.exists():
+ platform_config = platform_config_alt
+
+ # Build settings with CI-specific overrides
+ # We use a fresh xtest_root derived from this package's location
+ settings = Settings(
+ platform_dir=platform_dir,
+ )
+ settings.ensure_directories()
+
+ # Update root key in platform config if provided
+ if root_key:
+ _prepare_kas_template(settings, root_key, ec_tdf_enabled)
+
+ # Determine which instances to start
+ if instances:
+ kas_names = [n.strip() for n in instances.split(",")]
+ for name in kas_names:
+ if name not in Ports.all_kas_names():
+ print_error(f"Unknown KAS instance: {name}")
+ raise typer.Exit(1)
+ else:
+ kas_names = Ports.all_kas_names()
+
+ # Start KAS instances
+ print_info(f"Starting KAS instances: {', '.join(kas_names)}...")
+ kas_manager = get_kas_manager(settings)
+
+ failed = []
+ for name in kas_names:
+ kas = kas_manager.get(name)
+ if kas is None:
+ print_error(f"KAS instance {name} not found in manager")
+ failed.append(name)
+ continue
+ if not kas.start():
+ print_error(f"Failed to start KAS {name}")
+ failed.append(name)
+
+ if failed:
+ print_error(f"Failed to start: {', '.join(failed)}")
+ raise typer.Exit(1)
+
+ # Wait for health
+ print_info("Waiting for KAS health checks...")
+ unhealthy = []
+ for name in kas_names:
+ port = Ports.get_kas_port(name)
+ try:
+ wait_for_health(
+ f"http://localhost:{port}/healthz",
+ timeout=health_timeout,
+ service_name=f"KAS {name}",
+ )
+ except WaitTimeoutError as e:
+ print_warning(str(e))
+ unhealthy.append(name)
+
+ if unhealthy:
+ print_error(f"KAS instances failed health check: {', '.join(unhealthy)}")
+ raise typer.Exit(1)
+
+ print_success(f"All {len(kas_names)} KAS instances are healthy")
+
+ # Emit outputs
+ for name in kas_names:
+ log_path = settings.get_kas_log_path(name)
+ output_key = f"kas-{name}-log-file"
+ _emit_github_output(output_key, str(log_path))
+
+ print_success("CI KAS startup complete")
diff --git a/otdf-local/src/otdf_local/cli.py b/otdf-local/src/otdf_local/cli.py
index d8e3597ff..254f2adb9 100644
--- a/otdf-local/src/otdf_local/cli.py
+++ b/otdf-local/src/otdf_local/cli.py
@@ -11,6 +11,7 @@
from rich.live import Live
from otdf_local import __version__
+from otdf_local.ci import ci_app
from otdf_local.config.ports import Ports
from otdf_local.config.settings import get_settings
from otdf_local.health.waits import WaitTimeoutError, wait_for_health, wait_for_port
@@ -35,6 +36,7 @@
status_spinner,
)
from otdf_local.utils.yaml import get_nested, load_yaml
+from otdf_local.xtest import xtest_app
app = typer.Typer(
name="otdf-local",
@@ -43,6 +45,9 @@
pretty_exceptions_enable=sys.stderr.isatty(),
)
+app.add_typer(ci_app, name="ci")
+app.add_typer(xtest_app, name="xtest")
+
def _show_provision_error(result: ProvisionResult, target: str) -> None:
"""Display provisioning error with stderr details."""
diff --git a/otdf-local/src/otdf_local/utils/yaml.py b/otdf-local/src/otdf_local/utils/yaml.py
index a71653a4b..9df2614ec 100644
--- a/otdf-local/src/otdf_local/utils/yaml.py
+++ b/otdf-local/src/otdf_local/utils/yaml.py
@@ -1,5 +1,6 @@
"""YAML manipulation utilities using ruamel.yaml."""
+from io import StringIO
from pathlib import Path
from typing import Any
@@ -23,6 +24,13 @@ def save_yaml(path: Path, data: dict[str, Any]) -> None:
_yaml.dump(data, f)
+def dump_yaml(data: dict[str, Any]) -> str:
+ """Serialize YAML data to a string."""
+ stream = StringIO()
+ _yaml.dump(data, stream)
+ return stream.getvalue()
+
+
def get_nested(data: dict[str, Any], path: str, default: Any = None) -> Any:
"""Get a nested value from a dict using dot notation.
diff --git a/otdf-local/src/otdf_local/xtest.py b/otdf-local/src/otdf_local/xtest.py
new file mode 100644
index 000000000..76868f141
--- /dev/null
+++ b/otdf-local/src/otdf_local/xtest.py
@@ -0,0 +1,859 @@
+"""XTest orchestration commands for local replay and CI."""
+
+from __future__ import annotations
+
+import json
+import os
+import shutil
+import subprocess
+from pathlib import Path
+from typing import Annotated, Literal
+
+import typer
+from pydantic import BaseModel, Field, ValidationError
+
+from otdf_local.ci import _emit_github_output
+from otdf_local.config.ports import Ports
+from otdf_local.config.settings import Settings, get_settings
+from otdf_local.health.waits import WaitTimeoutError, wait_for_health, wait_for_port
+from otdf_local.services import get_docker_service, get_kas_manager, get_platform_service, get_provisioner
+from otdf_local.utils.console import print_error, print_info, print_success, print_warning, status_spinner
+from otdf_local.utils.yaml import dump_yaml, get_nested, load_yaml, save_yaml
+
+xtest_app = typer.Typer(
+ name="xtest",
+ help="Resolve, summarize, and run xtest jobs locally or in CI.",
+ no_args_is_help=True,
+)
+
+SdkName = Literal["go", "java", "js"]
+FocusSdk = Literal["all", "go", "java", "js"]
+OtdfctlSource = Literal["auto", "standalone", "platform"]
+PhaseName = Literal["legacy", "standard", "abac"]
+
+REPO_URLS = {
+ "platform": "https://github.com/opentdf/platform.git",
+ "go": "https://github.com/opentdf/otdfctl.git",
+ "java": "https://github.com/opentdf/java-sdk.git",
+ "js": "https://github.com/opentdf/web-sdk.git",
+}
+
+
+class ResolvedVersion(BaseModel):
+ """Resolved SDK or platform version."""
+
+ sdk: str
+ alias: str
+ tag: str
+ sha: str = ""
+ head: bool = False
+ pr: str | None = None
+ release: str | None = None
+ source: str | None = None
+ env: str | None = None
+ err: str | None = None
+
+
+class XTestRefs(BaseModel):
+ """User-facing refs that were requested."""
+
+ platform: str
+ go: str
+ js: str
+ java: str
+
+
+class XTestOptions(BaseModel):
+ """Execution options for one xtest job."""
+
+ encrypt_sdk: SdkName
+ focus_sdk: FocusSdk = "all"
+ otdfctl_source: OtdfctlSource = "auto"
+ phases: list[PhaseName] = Field(default_factory=lambda: ["legacy", "standard", "abac"])
+ include_helper_tests: bool = False
+ include_otdf_local_integration: bool = False
+
+
+class XTestResolved(BaseModel):
+ """Resolved concrete versions under test."""
+
+ platform: list[ResolvedVersion]
+ go: list[ResolvedVersion]
+ js: list[ResolvedVersion]
+ java: list[ResolvedVersion]
+
+
+class XTestRunConfig(BaseModel):
+ """Replayable xtest run configuration."""
+
+ schema_version: int = 1
+ kind: str = "otdf-local.xtest.run"
+ refs: XTestRefs
+ options: XTestOptions
+ resolved: XTestResolved
+
+ @property
+ def platform(self) -> ResolvedVersion:
+ """Return the single platform version for this run."""
+ if len(self.resolved.platform) != 1:
+ raise ValueError("xtest run config requires exactly one resolved platform version")
+ return self.resolved.platform[0]
+
+ def go_heads_json(self) -> str:
+ """Return Go head tags as JSON for pytest env."""
+ return json.dumps([row.tag for row in self.resolved.go if row.head])
+
+
+class WorkspacePaths(BaseModel):
+ """Relevant workspace paths for the runner."""
+
+ repo_root: Path
+ xtest_root: Path
+ otdf_local_root: Path
+ otdf_sdk_mgr_root: Path
+ run_root: Path
+
+
+def _workspace() -> WorkspacePaths:
+ settings = get_settings()
+ repo_root = settings.xtest_root.parent
+ otdf_local_root = repo_root / "otdf-local"
+ otdf_sdk_mgr_root = repo_root / "otdf-sdk-mgr"
+ run_root = settings.xtest_root / "tmp" / "xtest-runner"
+ run_root.mkdir(parents=True, exist_ok=True)
+ return WorkspacePaths(
+ repo_root=repo_root,
+ xtest_root=settings.xtest_root,
+ otdf_local_root=otdf_local_root,
+ otdf_sdk_mgr_root=otdf_sdk_mgr_root,
+ run_root=run_root,
+ )
+
+
+def _sanitize_name(value: str) -> str:
+ return "".join(c if c.isalnum() or c in {"-", "_", "."} else "-" for c in value)
+
+
+def _run(
+ cmd: list[str],
+ *,
+ cwd: Path | None = None,
+ env: dict[str, str] | None = None,
+ capture: bool = False,
+) -> subprocess.CompletedProcess[str]:
+ result = subprocess.run(
+ cmd,
+ cwd=cwd,
+ env={**os.environ, **(env or {})},
+ text=True,
+ capture_output=capture,
+ )
+ if result.returncode != 0:
+ raise subprocess.CalledProcessError(
+ result.returncode,
+ cmd,
+ output=result.stdout,
+ stderr=result.stderr,
+ )
+ return result
+
+
+def _resolve_versions(
+ sdk_mgr_root: Path,
+ sdk: str,
+ refs: str,
+ *,
+ go_source: OtdfctlSource,
+) -> list[ResolvedVersion]:
+ tokens = [part for part in refs.split() if part]
+ if not tokens:
+ raise ValueError(f"No refs provided for {sdk}")
+ env = {"OTDFCTL_SOURCE": "platform"} if sdk == "go" and go_source == "platform" else None
+ result = _run(
+ ["uv", "run", "--project", str(sdk_mgr_root), "otdf-sdk-mgr", "versions", "resolve", sdk, *tokens],
+ cwd=sdk_mgr_root,
+ env=env,
+ capture=True,
+ )
+ data = json.loads(result.stdout)
+ resolved = [ResolvedVersion.model_validate(row) for row in data]
+ errors = [row for row in resolved if row.err]
+ if errors:
+ raise ValueError(
+ f"Version resolution failed for {sdk}: "
+ + "; ".join(f"{row.alias}: {row.err}" for row in errors)
+ )
+ return resolved
+
+
+def _build_run_config(
+ workspace: WorkspacePaths,
+ *,
+ platform_ref: str,
+ go_ref: str,
+ js_ref: str,
+ java_ref: str,
+ encrypt_sdk: SdkName,
+ focus_sdk: FocusSdk,
+ otdfctl_source: OtdfctlSource,
+ include_helper_tests: bool,
+ include_otdf_local_integration: bool,
+ phases: list[PhaseName] | None = None,
+) -> XTestRunConfig:
+ resolved = XTestResolved(
+ platform=_resolve_versions(workspace.otdf_sdk_mgr_root, "platform", platform_ref, go_source=otdfctl_source),
+ go=_resolve_versions(workspace.otdf_sdk_mgr_root, "go", go_ref, go_source=otdfctl_source),
+ js=_resolve_versions(workspace.otdf_sdk_mgr_root, "js", js_ref, go_source=otdfctl_source),
+ java=_resolve_versions(workspace.otdf_sdk_mgr_root, "java", java_ref, go_source=otdfctl_source),
+ )
+ if len(resolved.platform) != 1:
+ raise ValueError("platform-ref must resolve to exactly one platform version for a single xtest run")
+ return XTestRunConfig(
+ refs=XTestRefs(platform=platform_ref, go=go_ref, js=js_ref, java=java_ref),
+ options=XTestOptions(
+ encrypt_sdk=encrypt_sdk,
+ focus_sdk=focus_sdk,
+ otdfctl_source=otdfctl_source,
+ phases=phases or ["legacy", "standard", "abac"],
+ include_helper_tests=include_helper_tests,
+ include_otdf_local_integration=include_otdf_local_integration,
+ ),
+ resolved=resolved,
+ )
+
+
+def _config_to_dict(config: XTestRunConfig) -> dict[str, object]:
+ return config.model_dump(mode="json", exclude_none=True)
+
+
+def _summary_markdown(config: XTestRunConfig) -> str:
+ yaml_text = dump_yaml(_config_to_dict(config)).rstrip()
+ platform = config.platform
+ job_name = f"{config.options.encrypt_sdk}/{platform.tag}"
+ return (
+ f"### Local Repro: `{job_name}`\n"
+ f"\n"
+ f"Replay with otdf-local xtest run --config ...
\n\n"
+ "```yaml\n"
+ f"{yaml_text}\n"
+ "```\n\n"
+ "```bash\n"
+ "cat > xtest-repro.yaml <<'EOF'\n"
+ f"{yaml_text}\n"
+ "EOF\n"
+ "uv run --project otdf-local otdf-local xtest run --config xtest-repro.yaml\n"
+ "```\n"
+ " \n"
+ )
+
+
+def _write_summary(config: XTestRunConfig) -> None:
+ path = os.environ.get("GITHUB_STEP_SUMMARY")
+ if not path:
+ print_warning("GITHUB_STEP_SUMMARY is not set; skipping summary output")
+ return
+ with open(path, "a") as handle:
+ handle.write(_summary_markdown(config))
+
+
+def _write_config(path: Path, config: XTestRunConfig) -> None:
+ path.parent.mkdir(parents=True, exist_ok=True)
+ save_yaml(path, _config_to_dict(config))
+
+
+def _load_config(path: Path) -> XTestRunConfig:
+ try:
+ return XTestRunConfig.model_validate(load_yaml(path))
+ except ValidationError as exc:
+ raise ValueError(f"Invalid xtest config {path}: {exc}") from exc
+
+
+def _git_worktree_checkout(repo_url: str, bare_repo: Path, worktree: Path, sha: str) -> Path:
+ bare_repo.parent.mkdir(parents=True, exist_ok=True)
+ if not bare_repo.exists():
+ _run(["git", "clone", "--bare", repo_url, str(bare_repo)])
+ else:
+ _run(["git", f"--git-dir={bare_repo}", "fetch", "--all", "--tags"])
+ if worktree.exists():
+ current = _run(["git", "-C", str(worktree), "rev-parse", "HEAD"], capture=True)
+ if current.stdout.strip() == sha:
+ return worktree
+ _run(["git", f"--git-dir={bare_repo}", "worktree", "remove", "--force", str(worktree)])
+ shutil.rmtree(worktree, ignore_errors=True)
+ _run(["git", f"--git-dir={bare_repo}", "worktree", "add", "--detach", str(worktree), sha])
+ return worktree
+
+
+def _checkout_platform(workspace: WorkspacePaths, config: XTestRunConfig) -> Path:
+ platform = config.platform
+ slug = _sanitize_name(f"{platform.tag}-{platform.sha[:7] or 'local'}")
+ bare_repo = workspace.run_root / "repos" / "platform.git"
+ worktree = workspace.run_root / "platform" / slug
+ print_info(f"Checking out platform {platform.tag} ({platform.sha[:7]})")
+ return _git_worktree_checkout(REPO_URLS["platform"], bare_repo, worktree, platform.sha)
+
+
+def _resolved_otdfctl_source(config: XTestRunConfig, platform_dir: Path) -> Literal["standalone", "platform"]:
+ requested = config.options.otdfctl_source
+ if requested == "platform":
+ return "platform"
+ if requested == "standalone":
+ return "standalone"
+ embedded = platform_dir / "otdfctl" / "go.mod"
+ return "platform" if embedded.is_file() else "standalone"
+
+
+def _clean_sdk_state(workspace: WorkspacePaths) -> None:
+ print_info("Cleaning xtest SDK dist/src state")
+ _run(
+ ["uv", "run", "--project", str(workspace.otdf_sdk_mgr_root), "otdf-sdk-mgr", "clean"],
+ cwd=workspace.otdf_sdk_mgr_root,
+ )
+ for env_file in (_sdk_root(workspace, "java")).glob("*.env"):
+ env_file.unlink(missing_ok=True)
+
+
+def _install_artifact_version(workspace: WorkspacePaths, sdk: str, version: ResolvedVersion) -> bool:
+ if version.head or not version.release:
+ return False
+ cmd = [
+ "uv",
+ "run",
+ "--project",
+ str(workspace.otdf_sdk_mgr_root),
+ "otdf-sdk-mgr",
+ "install",
+ "artifact",
+ "--sdk",
+ sdk,
+ "--version",
+ version.release,
+ "--dist-name",
+ version.tag,
+ ]
+ if version.source:
+ cmd.extend(["--source", version.source])
+ try:
+ _run(cmd, cwd=workspace.otdf_sdk_mgr_root)
+ return True
+ except subprocess.CalledProcessError as exc:
+ print_warning(f"Artifact install failed for {sdk} {version.tag}; falling back to source build")
+ if exc.stderr:
+ print_warning(exc.stderr.strip().splitlines()[-1])
+ return False
+
+
+def _sdk_root(workspace: WorkspacePaths, sdk: str) -> Path:
+ return workspace.xtest_root / "sdk" / sdk
+
+
+def _checkout_source_version(
+ workspace: WorkspacePaths,
+ sdk: str,
+ version: ResolvedVersion,
+ *,
+ platform_dir: Path,
+ platform_sha: str,
+) -> None:
+ sdk_root = _sdk_root(workspace, sdk)
+ src_dir = sdk_root / "src" / version.tag
+ if sdk == "go" and version.source == "platform":
+ if version.sha == platform_sha and (platform_dir / "otdfctl").is_dir():
+ src_dir.parent.mkdir(parents=True, exist_ok=True)
+ src_dir.unlink(missing_ok=True)
+ src_dir.symlink_to(platform_dir / "otdfctl", target_is_directory=True)
+ return
+ bare_repo = workspace.run_root / "repos" / "platform.git"
+ platform_src = workspace.run_root / "sdk-platform" / _sanitize_name(f"{version.tag}-{version.sha[:7]}")
+ _git_worktree_checkout(REPO_URLS["platform"], bare_repo, platform_src, version.sha)
+ src_dir.parent.mkdir(parents=True, exist_ok=True)
+ src_dir.unlink(missing_ok=True)
+ src_dir.symlink_to(platform_src / "otdfctl", target_is_directory=True)
+ return
+
+ bare_name = f"{sdk}.git"
+ bare_repo = sdk_root / "src" / bare_name
+ _git_worktree_checkout(REPO_URLS[sdk], bare_repo, src_dir, version.sha)
+
+
+def _prepare_java_env_files(config: XTestRunConfig, workspace: WorkspacePaths, platform_sha: str) -> None:
+ java_root = _sdk_root(workspace, "java")
+ for version in config.resolved.java:
+ env_path = java_root / f"{version.tag}.env"
+ if version.head and config.platform.head and config.options.focus_sdk in {"go", "java"}:
+ env_path.write_text(f"PLATFORM_BRANCH={platform_sha}\n")
+ continue
+ if version.env:
+ env_path.write_text(f"{version.env}\n")
+
+
+def _build_source_sdks(
+ workspace: WorkspacePaths,
+ config: XTestRunConfig,
+ *,
+ platform_dir: Path,
+ otdfctl_source: Literal["standalone", "platform"],
+) -> None:
+ needs_build: dict[str, list[ResolvedVersion]] = {"go": [], "java": [], "js": []}
+ for sdk in ("go", "java", "js"):
+ versions = getattr(config.resolved, sdk)
+ for version in versions:
+ if not _install_artifact_version(workspace, sdk, version):
+ needs_build[sdk].append(version)
+
+ if not any(needs_build.values()):
+ return
+
+ for sdk, versions in needs_build.items():
+ for version in versions:
+ print_info(f"Checking out {sdk} source for {version.tag} ({version.sha[:7]})")
+ _checkout_source_version(
+ workspace,
+ sdk,
+ version,
+ platform_dir=platform_dir,
+ platform_sha=config.platform.sha,
+ )
+
+ if needs_build["java"]:
+ _run(
+ ["uv", "run", "--project", str(workspace.otdf_sdk_mgr_root), "otdf-sdk-mgr", "java-fixup"],
+ cwd=workspace.otdf_sdk_mgr_root,
+ )
+ _prepare_java_env_files(config, workspace, config.platform.sha)
+
+ if (
+ needs_build["go"]
+ and any(version.head for version in needs_build["go"])
+ and otdfctl_source != "platform"
+ and config.options.focus_sdk == "go"
+ and config.platform.head
+ ):
+ heads = json.dumps([version.tag for version in needs_build["go"] if version.head])
+ _run(
+ [
+ "uv",
+ "run",
+ "--project",
+ str(workspace.otdf_sdk_mgr_root),
+ "otdf-sdk-mgr",
+ "go-fixup",
+ "--platform-dir",
+ str(platform_dir),
+ "--heads",
+ heads,
+ str(_sdk_root(workspace, "go") / "src"),
+ ],
+ cwd=workspace.otdf_sdk_mgr_root,
+ )
+
+ for sdk, versions in needs_build.items():
+ if not versions:
+ continue
+ print_info(f"Building {sdk} SDK CLI(s)")
+ _run(["make"], cwd=_sdk_root(workspace, sdk))
+
+
+def _platform_version(platform_dir: Path, fallback: str) -> str:
+ try:
+ result = _run(["go", "run", "./service", "version"], cwd=platform_dir, capture=True)
+ version = result.stdout.strip() or result.stderr.strip()
+ return version or fallback
+ except subprocess.CalledProcessError:
+ return fallback
+
+
+def _supports_multikas(platform_tag: str, platform_version: str) -> bool:
+ if platform_tag == "main":
+ return True
+ raw = platform_version.lstrip("v")
+ parts = raw.split(".")
+ if len(parts) < 2:
+ return False
+ try:
+ major = int(parts[0])
+ minor = int(parts[1])
+ except ValueError:
+ return False
+ return major > 0 or minor > 4
+
+
+def _key_management_supported(settings: Settings) -> bool:
+ config = load_yaml(settings.platform_config)
+ return get_nested(config, "services.kas.preview.key_management") in {True, False}
+
+
+def _root_key(settings: Settings) -> str:
+ config = load_yaml(settings.platform_config)
+ value = get_nested(config, "services.kas.root_key")
+ if isinstance(value, str) and value:
+ return value
+ raise ValueError(f"No services.kas.root_key found in {settings.platform_config}")
+
+
+def _prepare_test_results_dir(workspace: WorkspacePaths) -> Path:
+ results_dir = workspace.xtest_root / "test-results"
+ shutil.rmtree(results_dir, ignore_errors=True)
+ results_dir.mkdir(parents=True, exist_ok=True)
+ return results_dir
+
+
+def _pytest_env(
+ workspace: WorkspacePaths,
+ settings: Settings,
+ config: XTestRunConfig,
+ *,
+ platform_tag: str,
+ platform_version: str,
+ root_key: str,
+ kas_logs: dict[str, str] | None = None,
+) -> dict[str, str]:
+ env = {
+ "PLATFORMURL": settings.platform_url,
+ "PLATFORM_DIR": str(settings.platform_dir.resolve()),
+ "PLATFORM_TAG": platform_tag,
+ "PLATFORM_VERSION": platform_version,
+ "SCHEMA_FILE": str((workspace.xtest_root / "manifest.schema.json").resolve()),
+ "OT_ROOT_KEY": root_key,
+ "PLATFORM_LOG_FILE": str((settings.logs_dir / "platform.log").resolve()),
+ "OTDFCTL_HEADS": config.go_heads_json(),
+ }
+ if kas_logs:
+ env.update(kas_logs)
+ return env
+
+
+def _run_pytest(workspace: WorkspacePaths, args: list[str], env: dict[str, str]) -> None:
+ cmd = ["uv", "run", "pytest", *args]
+ _run(cmd, cwd=workspace.xtest_root, env=env)
+
+
+def _start_environment(settings: Settings) -> None:
+ docker = get_docker_service(settings)
+ platform = get_platform_service(settings)
+ provisioner = get_provisioner(settings)
+
+ print_info("Starting Docker services")
+ if not docker.start():
+ raise RuntimeError("Failed to start Docker services")
+
+ with status_spinner("Waiting for Keycloak..."):
+ wait_for_health(
+ f"http://localhost:{Ports.KEYCLOAK}/auth/realms/master",
+ timeout=120,
+ service_name="Keycloak",
+ )
+ wait_for_port(Ports.POSTGRES, "localhost", timeout=60, service_name="PostgreSQL")
+ if not provisioner.provision_keycloak():
+ raise RuntimeError("Keycloak provisioning failed")
+
+ print_info("Starting platform")
+ if not platform.start():
+ raise RuntimeError("Failed to start platform")
+ with status_spinner("Waiting for platform..."):
+ wait_for_health(
+ f"http://localhost:{Ports.PLATFORM}/healthz",
+ timeout=120,
+ service_name="Platform",
+ )
+ fixtures = provisioner.provision_fixtures()
+ if not fixtures:
+ raise RuntimeError(f"Fixture provisioning failed: {fixtures.error_message}")
+
+
+def _start_kas(settings: Settings) -> dict[str, str]:
+ logs: dict[str, str] = {}
+ manager = get_kas_manager(settings)
+ print_info("Starting KAS instances")
+ failed = []
+ for name in Ports.all_kas_names():
+ kas = manager.get(name)
+ if kas is None or not kas.start():
+ failed.append(name)
+ if failed:
+ raise RuntimeError(f"Failed to start KAS instances: {', '.join(failed)}")
+ for name in Ports.all_kas_names():
+ wait_for_health(
+ f"http://localhost:{Ports.get_kas_port(name)}/healthz",
+ timeout=60,
+ service_name=f"KAS {name}",
+ )
+ logs[f"KAS_{name.upper()}_LOG_FILE"] = str(settings.get_kas_log_path(name).resolve())
+ return logs
+
+
+def _stop_environment(settings: Settings) -> None:
+ try:
+ get_kas_manager(settings).stop_all()
+ finally:
+ try:
+ get_platform_service(settings).stop()
+ finally:
+ get_docker_service(settings).stop()
+
+
+def _run_requested_phases(
+ workspace: WorkspacePaths,
+ settings: Settings,
+ config: XTestRunConfig,
+ *,
+ platform_tag: str,
+ platform_version: str,
+ root_key: str,
+ multikas_supported: bool,
+) -> None:
+ results_dir = _prepare_test_results_dir(workspace)
+ base_env = _pytest_env(
+ workspace,
+ settings,
+ config,
+ platform_tag=platform_tag,
+ platform_version=platform_version,
+ root_key=root_key,
+ )
+
+ if config.options.include_helper_tests:
+ _run_pytest(
+ workspace,
+ [
+ "--html",
+ str(results_dir / f"helper-{config.options.focus_sdk}-{platform_tag}.html"),
+ "--self-contained-html",
+ "--sdks-encrypt",
+ config.options.encrypt_sdk,
+ "test_self.py",
+ "test_audit_logs.py",
+ ],
+ base_env,
+ )
+
+ if config.options.include_otdf_local_integration:
+ _run(
+ [
+ "uv",
+ "run",
+ "--project",
+ str(workspace.otdf_local_root),
+ "pytest",
+ "--maxfail=1",
+ "--disable-warnings",
+ "-v",
+ "--tb=short",
+ "-m",
+ "integration",
+ ],
+ cwd=workspace.otdf_local_root,
+ env={"OTDF_LOCAL_PLATFORM_DIR": str(settings.platform_dir.resolve())},
+ )
+
+ common = [
+ "-ra",
+ "-v",
+ "--sdks-encrypt",
+ config.options.encrypt_sdk,
+ "--focus",
+ config.options.focus_sdk,
+ ]
+
+ if "legacy" in config.options.phases:
+ _run_pytest(
+ workspace,
+ [
+ "-n",
+ "auto",
+ "--dist",
+ "worksteal",
+ "--html",
+ str(results_dir / f"legacy-{config.options.focus_sdk}-{platform_tag}.html"),
+ "--self-contained-html",
+ *common,
+ "test_legacy.py",
+ ],
+ base_env,
+ )
+
+ if "standard" in config.options.phases:
+ _run_pytest(
+ workspace,
+ [
+ "-n",
+ "auto",
+ "--dist",
+ "loadscope",
+ "--html",
+ str(results_dir / f"standard-{config.options.focus_sdk}-{platform_tag}.html"),
+ "--self-contained-html",
+ *common,
+ "test_tdfs.py",
+ "test_policytypes.py",
+ ],
+ base_env,
+ )
+
+ if "abac" in config.options.phases:
+ if not multikas_supported:
+ print_warning(f"Skipping ABAC phase: platform {platform_version} does not support multikas")
+ return
+ kas_logs = _start_kas(settings)
+ abac_env = _pytest_env(
+ workspace,
+ settings,
+ config,
+ platform_tag=platform_tag,
+ platform_version=platform_version,
+ root_key=root_key,
+ kas_logs=kas_logs,
+ )
+ _run_pytest(
+ workspace,
+ [
+ "-n",
+ "auto",
+ "--dist",
+ "loadscope",
+ "--html",
+ str(results_dir / f"attributes-{config.options.focus_sdk}-{platform_tag}.html"),
+ "--self-contained-html",
+ "--audit-log-dir",
+ str(results_dir / "audit-logs"),
+ *common,
+ "test_abac.py",
+ ],
+ abac_env,
+ )
+
+
+@xtest_app.command("plan")
+def plan_run(
+ platform_ref: Annotated[str, typer.Option(help="Platform ref under test (single ref)")],
+ encrypt_sdk: Annotated[SdkName, typer.Option(help="SDK used for encrypt side of xtest matrix")],
+ output: Annotated[Path | None, typer.Option(help="Write config YAML to this path")] = None,
+ go_ref: Annotated[str, typer.Option(help="Go/otdfctl refs to resolve")] = "main",
+ js_ref: Annotated[str, typer.Option(help="JS SDK refs to resolve")] = "main",
+ java_ref: Annotated[str, typer.Option(help="Java SDK refs to resolve")] = "main",
+ focus_sdk: Annotated[FocusSdk, typer.Option(help="Focus filter for pytest")] = "all",
+ otdfctl_source: Annotated[OtdfctlSource, typer.Option(help="Go SDK source resolution mode")] = "auto",
+ include_helper_tests: Annotated[
+ bool,
+ typer.Option("--include-helper-tests", help="Include helper-library pytest validation"),
+ ] = False,
+ include_otdf_local_integration: Annotated[
+ bool,
+ typer.Option("--include-otdf-local-integration", help="Include otdf-local integration tests"),
+ ] = False,
+ write_summary: Annotated[
+ bool,
+ typer.Option("--write-summary", help="Append the replay config to GITHUB_STEP_SUMMARY"),
+ ] = False,
+) -> None:
+ """Resolve refs and write a replayable xtest config."""
+ workspace = _workspace()
+ config = _build_run_config(
+ workspace,
+ platform_ref=platform_ref,
+ go_ref=go_ref,
+ js_ref=js_ref,
+ java_ref=java_ref,
+ encrypt_sdk=encrypt_sdk,
+ focus_sdk=focus_sdk,
+ otdfctl_source=otdfctl_source,
+ include_helper_tests=include_helper_tests,
+ include_otdf_local_integration=include_otdf_local_integration,
+ )
+
+ if output:
+ _write_config(output, config)
+ print_success(f"Wrote xtest config to {output}")
+ _emit_github_output("config-path", str(output))
+ else:
+ print(dump_yaml(_config_to_dict(config)).rstrip())
+
+ _emit_github_output("platform-tag", config.platform.tag)
+ _emit_github_output("platform-sha", config.platform.sha)
+ _emit_github_output("encrypt-sdk", config.options.encrypt_sdk)
+
+ if write_summary:
+ _write_summary(config)
+
+
+@xtest_app.command("run")
+def run_xtest(
+ config_path: Annotated[
+ Path | None,
+ typer.Option("--config", help="Replay config generated by `otdf-local xtest plan`"),
+ ] = None,
+ platform_ref: Annotated[str | None, typer.Option(help="Platform ref under test")] = None,
+ encrypt_sdk: Annotated[SdkName | None, typer.Option(help="SDK used for encrypt side")] = None,
+ go_ref: Annotated[str, typer.Option(help="Go/otdfctl refs to resolve")] = "main",
+ js_ref: Annotated[str, typer.Option(help="JS SDK refs to resolve")] = "main",
+ java_ref: Annotated[str, typer.Option(help="Java SDK refs to resolve")] = "main",
+ focus_sdk: Annotated[FocusSdk, typer.Option(help="Focus filter for pytest")] = "all",
+ otdfctl_source: Annotated[OtdfctlSource, typer.Option(help="Go SDK source resolution mode")] = "auto",
+ include_helper_tests: Annotated[
+ bool,
+ typer.Option("--include-helper-tests", help="Include helper-library pytest validation"),
+ ] = False,
+ include_otdf_local_integration: Annotated[
+ bool,
+ typer.Option("--include-otdf-local-integration", help="Include otdf-local integration tests"),
+ ] = False,
+) -> None:
+ """Run the same xtest orchestration locally that CI uses."""
+ workspace = _workspace()
+ if config_path:
+ config = _load_config(config_path)
+ else:
+ if not platform_ref or not encrypt_sdk:
+ raise typer.BadParameter("either --config or both --platform-ref and --encrypt-sdk are required")
+ config = _build_run_config(
+ workspace,
+ platform_ref=platform_ref,
+ go_ref=go_ref,
+ js_ref=js_ref,
+ java_ref=java_ref,
+ encrypt_sdk=encrypt_sdk,
+ focus_sdk=focus_sdk,
+ otdfctl_source=otdfctl_source,
+ include_helper_tests=include_helper_tests,
+ include_otdf_local_integration=include_otdf_local_integration,
+ )
+
+ platform_dir = _checkout_platform(workspace, config)
+ settings = Settings(xtest_root=workspace.xtest_root, platform_dir=platform_dir)
+ settings.ensure_directories()
+ source_mode = _resolved_otdfctl_source(config, platform_dir)
+
+ print_info(
+ f"Running xtest for encrypt SDK {config.options.encrypt_sdk} against platform {config.platform.tag}"
+ )
+
+ try:
+ _clean_sdk_state(workspace)
+ _build_source_sdks(workspace, config, platform_dir=platform_dir, otdfctl_source=source_mode)
+ _start_environment(settings)
+
+ platform_version = _platform_version(platform_dir, config.platform.tag)
+ root_key = _root_key(settings)
+ multikas_supported = _supports_multikas(config.platform.tag, platform_version)
+ km_supported = _key_management_supported(settings)
+ print_info(f"Platform version: {platform_version}")
+ print_info(f"otdfctl source mode: {source_mode}")
+ print_info(f"Multi-KAS support: {'enabled' if multikas_supported else 'disabled'}")
+ print_info(f"Key-management config field present: {'yes' if km_supported else 'no'}")
+
+ _run_requested_phases(
+ workspace,
+ settings,
+ config,
+ platform_tag=config.platform.tag,
+ platform_version=platform_version,
+ root_key=root_key,
+ multikas_supported=multikas_supported,
+ )
+ except (RuntimeError, ValueError, subprocess.CalledProcessError, WaitTimeoutError) as exc:
+ print_error(str(exc))
+ raise typer.Exit(1) from exc
+ finally:
+ _stop_environment(settings)
+
+ print_success("xtest run completed successfully")
diff --git a/otdf-local/tests/test_xtest.py b/otdf-local/tests/test_xtest.py
new file mode 100644
index 000000000..8ab83c06a
--- /dev/null
+++ b/otdf-local/tests/test_xtest.py
@@ -0,0 +1,106 @@
+"""Unit tests for xtest orchestration helpers."""
+
+from pathlib import Path
+
+from otdf_local.xtest import (
+ ResolvedVersion,
+ XTestOptions,
+ XTestRefs,
+ XTestResolved,
+ XTestRunConfig,
+ _load_config,
+ _resolved_otdfctl_source,
+ _summary_markdown,
+ _supports_multikas,
+ _write_config,
+)
+
+
+def sample_config() -> XTestRunConfig:
+ return XTestRunConfig(
+ refs=XTestRefs(platform="main", go="main latest", js="main", java="main"),
+ options=XTestOptions(
+ encrypt_sdk="go",
+ focus_sdk="all",
+ otdfctl_source="auto",
+ include_helper_tests=True,
+ ),
+ resolved=XTestResolved(
+ platform=[
+ ResolvedVersion(
+ sdk="platform",
+ alias="main",
+ tag="main",
+ sha="0123456789abcdef0123456789abcdef01234567",
+ head=True,
+ )
+ ],
+ go=[
+ ResolvedVersion(
+ sdk="go",
+ alias="main",
+ tag="main",
+ sha="fedcba9876543210fedcba9876543210fedcba98",
+ head=True,
+ )
+ ],
+ js=[
+ ResolvedVersion(
+ sdk="js",
+ alias="main",
+ tag="main",
+ sha="1111111111111111111111111111111111111111",
+ head=True,
+ )
+ ],
+ java=[
+ ResolvedVersion(
+ sdk="java",
+ alias="main",
+ tag="main",
+ sha="2222222222222222222222222222222222222222",
+ head=True,
+ )
+ ],
+ ),
+ )
+
+
+def test_summary_markdown_contains_yaml_and_replay_command():
+ config = sample_config()
+
+ summary = _summary_markdown(config)
+
+ assert "Local Repro" in summary
+ assert "uv run --project otdf-local otdf-local xtest run --config xtest-repro.yaml" in summary
+ assert "encrypt_sdk: go" in summary
+ assert "platform: main" in summary
+
+
+def test_config_roundtrip(tmp_path: Path):
+ path = tmp_path / "xtest-repro.yaml"
+ config = sample_config()
+
+ _write_config(path, config)
+ loaded = _load_config(path)
+
+ assert loaded == config
+
+
+def test_resolved_otdfctl_source_auto_prefers_embedded_checkout(tmp_path: Path):
+ platform_dir = tmp_path / "platform"
+ embedded = platform_dir / "otdfctl"
+ embedded.mkdir(parents=True)
+ (embedded / "go.mod").write_text("module github.com/opentdf/platform/otdfctl\n")
+
+ config = sample_config()
+ assert _resolved_otdfctl_source(config, platform_dir) == "platform"
+
+ config.options.otdfctl_source = "standalone"
+ assert _resolved_otdfctl_source(config, platform_dir) == "standalone"
+
+
+def test_supports_multikas_semver_gate():
+ assert _supports_multikas("main", "0.1.0") is True
+ assert _supports_multikas("v0.5.0", "v0.5.0") is True
+ assert _supports_multikas("v0.4.9", "v0.4.9") is False
diff --git a/otdf-sdk-mgr/README.md b/otdf-sdk-mgr/README.md
index ee2a2a8b7..4b930eaae 100644
--- a/otdf-sdk-mgr/README.md
+++ b/otdf-sdk-mgr/README.md
@@ -56,7 +56,7 @@ otdf-sdk-mgr java-fixup
## How Release Installs Work
-- **Go**: Writes a `.version` file; `cli.sh`/`otdfctl.sh` use `go run github.com/opentdf/otdfctl@{version}` (no local compilation needed, Go caches the binary)
+- **Go**: Writes a `.version` file containing `module-path@version` (e.g., `github.com/opentdf/otdfctl@v0.24.0`); `cli.sh`/`otdfctl.sh` use `go run @` (no local compilation needed, Go caches the binary). The module path is `github.com/opentdf/platform/otdfctl` for platform-embedded releases or `github.com/opentdf/otdfctl` for standalone releases.
- **JS**: Runs `npm install @opentdf/ctl@{version}` into the dist directory; `cli.sh` uses `npx` from local `node_modules/`
- **Java**: Downloads `cmdline.jar` from GitHub Releases; `cli.sh` uses `java -jar cmdline.jar`
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py
index 24148bdd7..62580ebc7 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py
@@ -93,3 +93,39 @@ def java_fixup(
from otdf_sdk_mgr.java_fixup import post_checkout_java_fixup
post_checkout_java_fixup(base_dir)
+
+
+@app.command("go-fixup")
+def go_fixup_cmd(
+ platform_dir: Annotated[
+ Path,
+ typer.Option("--platform-dir", help="Path to the platform checkout root"),
+ ],
+ heads: Annotated[
+ Optional[str],
+ typer.Option(
+ "--heads",
+ help="JSON list of head version tags to process (e.g. '[\"main\"]')",
+ ),
+ ] = None,
+ base_dir: Annotated[
+ Optional[Path],
+ typer.Argument(help="Base directory for Go source trees"),
+ ] = None,
+) -> None:
+ """Bridge Go client go.mod to server shared modules for head builds.
+
+ Performs go mod edit -replace + go mod tidy for each head version,
+ pointing platform module imports at the local platform checkout.
+ Only needed for standalone otdfctl checkouts.
+ """
+ import json as json_mod
+
+ from otdf_sdk_mgr.go_fixup import go_fixup
+
+ heads_list = json_mod.loads(heads) if heads else None
+ try:
+ go_fixup(platform_dir, heads=heads_list, base_dir=base_dir)
+ except (FileNotFoundError, subprocess.CalledProcessError) as e:
+ typer.echo(f"Error: {e}", err=True)
+ raise typer.Exit(1) from e
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py
index e3950d717..e62ae2464 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py
@@ -74,12 +74,16 @@ def artifact(
dist_name: Annotated[
Optional[str], typer.Option("--dist-name", help="Override dist directory name")
] = None,
+ source: Annotated[
+ Optional[str],
+ typer.Option(help='Source repo for Go CLI (e.g., "platform" for monorepo)'),
+ ] = None,
) -> None:
"""Install a single SDK version (used by CI)."""
from otdf_sdk_mgr.installers import InstallError, cmd_install
try:
- cmd_install(sdk, version, dist_name=dist_name)
+ cmd_install(sdk, version, dist_name=dist_name, source=source)
except InstallError as e:
typer.echo(f"Error: {e}", err=True)
raise typer.Exit(1)
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py
index 19188b124..2dcf6e321 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py
@@ -3,6 +3,7 @@
from __future__ import annotations
import json
+import os
from typing import Annotated, Any, Optional
import typer
@@ -112,10 +113,20 @@ def resolve_versions(
raise typer.Exit(2)
infix = SDK_TAG_INFIXES.get(sdk)
+ # Allow overriding the Go SDK source via OTDFCTL_SOURCE env var
+ # (standalone otdfctl repo vs platform monorepo)
+ go_source = os.environ.get("OTDFCTL_SOURCE") if sdk == "go" else None
+ if go_source and go_source not in ("standalone", "platform"):
+ typer.echo(
+ f"Warning: unrecognized OTDFCTL_SOURCE={go_source!r}; expected 'platform' or 'standalone'",
+ err=True,
+ )
+ go_source = None
+
results: list[ResolveResult] = []
shas: set[str] = set()
for version in tags:
- v = resolve(sdk, version, infix)
+ v = resolve(sdk, version, infix, go_source=go_source)
if is_resolve_success(v):
env = lookup_additional_options(sdk, v["tag"])
if env:
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py
index adf6c8b1f..1046a5ef8 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py
@@ -70,7 +70,11 @@ def get_sdk_dirs() -> dict[str, Path]:
"java": "opentdf/java-sdk",
}
-GO_INSTALL_PREFIX = "go run github.com/opentdf/otdfctl"
+GO_INSTALL_PREFIX_STANDALONE = "go run github.com/opentdf/otdfctl"
+GO_INSTALL_PREFIX_PLATFORM = "go run github.com/opentdf/platform/otdfctl"
+
+GO_MODULE_PATH = "github.com/opentdf/otdfctl"
+GO_MODULE_PATH_PLATFORM = "github.com/opentdf/platform/otdfctl"
LTS_VERSIONS: dict[str, str] = {
"go": "0.24.0",
@@ -111,4 +115,46 @@ def get_sdk_dirs() -> dict[str, Path]:
"platform": "service",
}
+# When resolving go versions from the platform repo, use "otdfctl" infix
+# (tags are otdfctl/vX.Y.Z in the platform monorepo)
+SDK_TAG_INFIXES_PLATFORM_GO = "otdfctl"
+
+_VALID_GO_SOURCES = {None, "standalone", "platform"}
+
+
+def _validate_go_source(source: str | None) -> None:
+ """Raise ValueError if source is not a recognised Go source."""
+ if source not in _VALID_GO_SOURCES:
+ raise ValueError(f"Invalid Go source {source!r}; expected one of {_VALID_GO_SOURCES}")
+
+
+def go_git_url(source: str | None = None) -> str:
+ """Return the git URL for Go SDK resolution based on source.
+
+ Args:
+ source: "platform" to use the platform monorepo, None/"standalone" for the
+ standalone otdfctl repo.
+ """
+ _validate_go_source(source)
+ if source == "platform":
+ return SDK_GIT_URLS["platform"]
+ return SDK_GIT_URLS["go"]
+
+
+def go_tag_infix(source: str | None = None) -> str | None:
+ """Return the tag infix for Go SDK resolution based on source."""
+ _validate_go_source(source)
+ if source == "platform":
+ return SDK_TAG_INFIXES_PLATFORM_GO
+ return None
+
+
+def go_module_path(source: str | None = None) -> str:
+ """Return the Go module path based on source."""
+ _validate_go_source(source)
+ if source == "platform":
+ return GO_MODULE_PATH_PLATFORM
+ return GO_MODULE_PATH
+
+
ALL_SDKS = ["go", "js", "java"]
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/go_fixup.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/go_fixup.py
new file mode 100644
index 000000000..4098361c2
--- /dev/null
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/go_fixup.py
@@ -0,0 +1,95 @@
+"""Post-checkout fixups for Go SDK (otdfctl) source trees.
+
+Bridges client go.mod to server shared modules for head builds where
+client and server share unreleased code. Only applies to standalone
+otdfctl checkouts — platform-source builds already have the modules.
+"""
+
+from __future__ import annotations
+
+import subprocess
+from pathlib import Path
+
+from otdf_sdk_mgr.config import get_sdk_dir
+
+# Platform modules that standalone otdfctl imports and that may need
+# a local replace directive when testing against a head platform build.
+PLATFORM_MODULES = [
+ "lib/fixtures",
+ "lib/ocrypto",
+ "protocol/go",
+ "sdk",
+]
+
+
+def go_fixup(
+ platform_dir: Path,
+ heads: list[str] | None = None,
+ base_dir: Path | None = None,
+) -> None:
+ """Replace go.mod references to point at local platform checkout.
+
+ Args:
+ platform_dir: Absolute path to the platform checkout root
+ (containing lib/, protocol/, sdk/).
+ heads: JSON-decoded list of head version tags to process.
+ If None, all subdirectories under *base_dir* are processed.
+ base_dir: Directory containing per-version otdfctl source trees
+ (e.g. ``xtest/sdk/go/src``). Defaults to ``get_sdk_dir() / "go" / "src"``.
+ """
+ if base_dir is None:
+ base_dir = get_sdk_dir() / "go" / "src"
+
+ if not base_dir.exists():
+ print(f"Base directory {base_dir} does not exist, nothing to fix.")
+ return
+
+ platform_dir = platform_dir.resolve()
+ if not platform_dir.is_dir():
+ raise FileNotFoundError(f"Platform directory does not exist: {platform_dir}")
+
+ dirs_to_process: list[Path] = []
+ if heads:
+ for tag in heads:
+ d = base_dir / tag
+ if d.is_dir():
+ dirs_to_process.append(d)
+ else:
+ print(f"Warning: head directory {d} does not exist, skipping.")
+ else:
+ for d in sorted(base_dir.iterdir()):
+ if d.is_dir() and not d.name.endswith(".git"):
+ dirs_to_process.append(d)
+
+ if not dirs_to_process:
+ print("No directories to process.")
+ return
+
+ for src_dir in dirs_to_process:
+ if not (src_dir / "go.mod").exists():
+ print(f"No go.mod in {src_dir}, skipping.")
+ continue
+
+ print(f"Applying go.mod replacements in {src_dir}...")
+ for module in PLATFORM_MODULES:
+ local_path = platform_dir / module
+ if not local_path.is_dir():
+ print(f" Warning: {local_path} does not exist, skipping {module}")
+ continue
+ subprocess.run(
+ [
+ "go",
+ "mod",
+ "edit",
+ "-replace",
+ f"github.com/opentdf/platform/{module}={local_path}",
+ ],
+ cwd=src_dir,
+ check=True,
+ )
+ print(f" Replaced github.com/opentdf/platform/{module} -> {local_path}")
+
+ print(f"Running go mod tidy in {src_dir}...")
+ subprocess.run(["go", "mod", "tidy"], cwd=src_dir, check=True)
+
+ print("Go fixup complete.")
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py
index e7c22ae09..0822a063e 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py
@@ -11,9 +11,11 @@
from pathlib import Path
from otdf_sdk_mgr.config import (
+ GO_MODULE_PATH_PLATFORM,
LTS_VERSIONS,
get_sdk_dir,
get_sdk_dirs,
+ go_module_path,
)
from otdf_sdk_mgr.checkout import checkout_sdk_branch
from otdf_sdk_mgr.registry import list_go_versions, list_java_github_releases, list_js_versions
@@ -24,33 +26,48 @@ class InstallError(Exception):
"""Raised when SDK installation fails."""
-def install_go_release(version: str, dist_dir: Path) -> None:
+def install_go_release(version: str, dist_dir: Path, source: str | None = None) -> None:
"""Install a Go CLI release by writing a .version file.
The cli.sh and otdfctl.sh wrappers read .version and use
- `go run github.com/opentdf/otdfctl@{version}` instead of a local binary.
+ `go run @{version}` instead of a local binary.
+ The .version file contains `module-path@version`
+ (e.g., `github.com/opentdf/otdfctl@v0.24.0`).
+
+ Args:
+ version: Version string (e.g., "v0.24.0" or "otdfctl/v0.24.0").
+ dist_dir: Target distribution directory.
+ source: "platform" to use the platform monorepo module path,
+ None or "standalone" for standalone.
"""
go_dir = get_sdk_dir() / "go"
dist_dir.mkdir(parents=True, exist_ok=True)
+ # Strip tag infix (e.g., "otdfctl/v0.24.0" → "v0.24.0")
+ if "/" in version:
+ version = version.rsplit("/", 1)[-1]
tag = normalize_version(version)
- (dist_dir / ".version").write_text(f"{tag}\n")
+ module = go_module_path(source)
+ (dist_dir / ".version").write_text(f"{module}@{tag}\n")
shutil.copy(go_dir / "cli.sh", dist_dir / "cli.sh")
shutil.copy(go_dir / "otdfctl.sh", dist_dir / "otdfctl.sh")
shutil.copy(go_dir / "opentdfctl.yaml", dist_dir / "opentdfctl.yaml")
- print(f" Pre-warming Go cache for otdfctl@{tag}...")
+ print(f" Pre-warming Go cache for {module}@{tag}...")
result = subprocess.run(
- ["go", "install", f"github.com/opentdf/otdfctl@{tag}"],
+ ["go", "install", f"{module}@{tag}"],
capture_output=True,
text=True,
)
if result.returncode != 0:
- print(
- f" Warning: go install pre-warm failed (will retry at runtime): {result.stderr.strip()}"
- )
+ msg = f"go install pre-warm failed: {result.stderr.strip()}"
+ if module == GO_MODULE_PATH_PLATFORM:
+ raise InstallError(
+ f"{msg}\nThe platform module path {module}@{tag} may not be published yet."
+ )
+ print(f" Warning: {msg} (will retry at runtime)")
print(f" Go release {tag} installed to {dist_dir}")
-def install_js_release(version: str, dist_dir: Path) -> None:
+def install_js_release(version: str, dist_dir: Path, **_kwargs: object) -> None:
"""Install a JS CLI release from npm registry."""
js_dir = get_sdk_dir() / "js"
dist_dir.mkdir(parents=True, exist_ok=True)
@@ -65,7 +82,7 @@ def install_js_release(version: str, dist_dir: Path) -> None:
print(f" JS release {v} installed to {dist_dir}")
-def install_java_release(version: str, dist_dir: Path) -> None:
+def install_java_release(version: str, dist_dir: Path, **_kwargs: object) -> None:
"""Install a Java CLI release by downloading cmdline.jar from GitHub Releases.
Raises InstallError if the artifact is not available or download fails,
@@ -133,13 +150,15 @@ def install_java_release(version: str, dist_dir: Path) -> None:
}
-def install_release(sdk: str, version: str, dist_name: str | None = None) -> Path:
+def install_release(sdk: str, version: str, dist_name: str | None = None, **kwargs: object) -> Path:
"""Install a released version of an SDK CLI.
Args:
sdk: One of "go", "js", "java"
version: Version string (e.g., "v0.24.0" or "0.24.0")
dist_name: Override the dist directory name (defaults to normalized version)
+ **kwargs: Extra arguments forwarded to the SDK installer
+ (e.g., source="platform" for Go).
Returns:
Path to the created dist directory
@@ -157,7 +176,7 @@ def install_release(sdk: str, version: str, dist_name: str | None = None) -> Pat
print(f" Dist directory already exists: {dist_dir} (skipping)")
return dist_dir
- INSTALLERS[sdk](version, dist_dir)
+ INSTALLERS[sdk](version, dist_dir, **kwargs)
return dist_dir
@@ -224,7 +243,9 @@ def cmd_release(specs: list[str]) -> None:
install_release(sdk, version)
-def cmd_install(sdk: str, version: str, dist_name: str | None = None) -> None:
+def cmd_install(
+ sdk: str, version: str, dist_name: str | None = None, source: str | None = None
+) -> None:
"""Install a single SDK version (used by CI action)."""
print(f"Installing {sdk} {version}...")
- install_release(sdk, version, dist_name=dist_name)
+ install_release(sdk, version, dist_name=dist_name, source=source)
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py
index 8f8dd34e5..fcd4f78c8 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py
@@ -12,11 +12,13 @@
from typing import Any
from otdf_sdk_mgr.config import (
- GO_INSTALL_PREFIX,
+ GO_INSTALL_PREFIX_PLATFORM,
+ GO_INSTALL_PREFIX_STANDALONE,
SDK_GITHUB_REPOS,
SDK_GIT_URLS,
SDK_MAVEN_COORDS,
SDK_NPM_PACKAGES,
+ SDK_TAG_INFIXES_PLATFORM_GO,
)
from otdf_sdk_mgr.semver import is_stable, parse_semver, semver_sort_key
@@ -68,12 +70,15 @@ def fetch_text(url: str) -> str:
def list_go_versions() -> list[dict[str, Any]]:
- """List Go SDK versions from git tags."""
+ """List Go SDK versions from git tags in both standalone and platform repos."""
+ import git.exc
from git import Git
repo = Git()
+ seen: dict[str, dict[str, Any]] = {}
+
+ # Standalone repo (opentdf/otdfctl): tags like v0.24.0
raw = repo.ls_remote(SDK_GIT_URLS["go"], tags=True)
- results = []
for line in raw.strip().split("\n"):
if not line:
continue
@@ -83,16 +88,52 @@ def list_go_versions() -> list[dict[str, Any]]:
tag = ref.removeprefix("refs/tags/")
if not parse_semver(tag):
continue
- version = tag
- results.append(
- {
+ seen[tag] = {
+ "sdk": "go",
+ "version": tag,
+ "source": "git-tag",
+ "install_method": f"{GO_INSTALL_PREFIX_STANDALONE}@{tag}",
+ "stable": is_stable(tag),
+ }
+
+ # Platform repo (opentdf/platform): tags like otdfctl/v0.X.Y
+ infix = SDK_TAG_INFIXES_PLATFORM_GO
+ try:
+ raw = repo.ls_remote(SDK_GIT_URLS["platform"], tags=True)
+ for line in raw.strip().split("\n"):
+ if not line:
+ continue
+ _, ref = line.split("\t", 1)
+ if ref.endswith("^{}"):
+ continue
+ tag = ref.removeprefix("refs/tags/")
+ if not tag.startswith(f"{infix}/"):
+ continue
+ version = tag.removeprefix(f"{infix}/")
+ if not parse_semver(version):
+ continue
+ # Platform entries take precedence (canonical location post-migration);
+ # if the same version exists in both repos, the platform entry
+ # silently overwrites the standalone one.
+ if version in seen:
+ print(
+ f"Note: version {version} found in both standalone and platform repos; using platform source.",
+ file=sys.stderr,
+ )
+ seen[version] = {
"sdk": "go",
"version": version,
- "source": "git-tag",
- "install_method": f"{GO_INSTALL_PREFIX}@{version}",
+ "source": "platform-git-tag",
+ "install_method": f"{GO_INSTALL_PREFIX_PLATFORM}@{tag}",
"stable": is_stable(version),
}
+ except git.exc.GitCommandError as e:
+ print(
+ f"::warning::Failed to query platform repo for go tags: {e}",
+ file=sys.stderr,
)
+
+ results = list(seen.values())
results.sort(key=lambda r: semver_sort_key(r["version"]))
return results
diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py
index 6e4cd7ca1..e3f264d74 100644
--- a/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py
+++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py
@@ -12,6 +12,8 @@
LTS_VERSIONS,
SDK_GIT_URLS,
SDK_NPM_PACKAGES,
+ go_git_url,
+ go_tag_infix,
)
@@ -23,6 +25,7 @@ class ResolveSuccess(TypedDict):
pr: NotRequired[str]
release: NotRequired[str]
sha: str
+ source: NotRequired[str]
tag: str
@@ -111,78 +114,119 @@ def lookup_additional_options(sdk: str, version: str) -> str | None:
return None
-def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult:
- """Resolve a version spec to a concrete SHA and tag."""
+def resolve(
+ sdk: str,
+ version: str,
+ infix: str | None,
+ go_source: str | None = None,
+) -> ResolveResult:
+ """Resolve a version spec to a concrete SHA and tag.
+
+ Args:
+ sdk: SDK identifier (go, js, java, platform).
+ version: Version spec (main, SHA, tag, latest, lts, etc.).
+ infix: Tag infix for monorepo tag resolution (e.g. "sdk" for JS).
+ go_source: For sdk=="go", override the git URL and infix.
+ "platform" resolves against the platform monorepo (otdfctl/ prefix tags).
+ None or "standalone" uses the standalone otdfctl repo (default).
+ """
+ _go_platform = sdk == "go" and go_source == "platform"
+
+ def _annotate(result: ResolveResult) -> ResolveResult:
+ """Add source field to successful results when resolving go from platform."""
+ if _go_platform and is_resolve_success(result):
+ result["source"] = "platform"
+ return result
+
try:
- sdk_url = SDK_GIT_URLS[sdk]
+ if _go_platform:
+ sdk_url = go_git_url("platform")
+ infix = go_tag_infix("platform")
+ else:
+ sdk_url = SDK_GIT_URLS[sdk]
repo = Git()
if version == "main" or version == "refs/heads/main":
all_heads = [r.split("\t") for r in repo.ls_remote(sdk_url, heads=True).split("\n")]
- sha, _ = [tag for tag in all_heads if "refs/heads/main" in tag][0]
- return {
- "sdk": sdk,
- "alias": version,
- "head": True,
- "sha": sha,
- "tag": "main",
- }
+ try:
+ sha, _ = next(tag for tag in all_heads if "refs/heads/main" in tag)
+ except StopIteration:
+ return {"sdk": sdk, "alias": version, "err": f"main branch not found in {sdk_url}"}
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "head": True,
+ "sha": sha,
+ "tag": "main",
+ }
+ )
if re.match(SHA_REGEX, version):
ls_remote = [r.split("\t") for r in repo.ls_remote(sdk_url).split("\n")]
matching_tags = [(sha, tag) for (sha, tag) in ls_remote if sha.startswith(version)]
if not matching_tags:
- return {
- "sdk": sdk,
- "alias": version[:7],
- "sha": version,
- "tag": version,
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version[:7],
+ "sha": version,
+ "tag": version,
+ }
+ )
if len(matching_tags) > 1:
for sha, tag in matching_tags:
if tag.startswith("refs/pull/"):
pr_number = tag.split("/")[2]
- return {
- "sdk": sdk,
- "alias": version,
- "head": True,
- "sha": sha,
- "tag": f"pull-{pr_number}",
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "head": True,
+ "sha": sha,
+ "tag": f"pull-{pr_number}",
+ }
+ )
for sha, tag in matching_tags:
mq_match = re.match(MERGE_QUEUE_REGEX, tag)
if mq_match:
to_branch = mq_match.group("branch")
pr_number = mq_match.group("pr_number")
if to_branch and pr_number:
- return {
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "head": True,
+ "pr": pr_number,
+ "sha": sha,
+ "tag": f"mq-{to_branch}-{pr_number}",
+ }
+ )
+ suffix = tag.split("refs/heads/gh-readonly-queue/")[-1]
+ flattag = "mq--" + suffix.replace("/", "--")
+ return _annotate(
+ {
"sdk": sdk,
"alias": version,
"head": True,
- "pr": pr_number,
"sha": sha,
- "tag": f"mq-{to_branch}-{pr_number}",
+ "tag": flattag,
}
- suffix = tag.split("refs/heads/gh-readonly-queue/")[-1]
- flattag = "mq--" + suffix.replace("/", "--")
- return {
- "sdk": sdk,
- "alias": version,
- "head": True,
- "sha": sha,
- "tag": flattag,
- }
+ )
head = False
if tag.startswith("refs/heads/"):
head = True
tag = tag.split("refs/heads/")[-1]
flattag = tag.replace("/", "--")
- return {
- "sdk": sdk,
- "alias": version,
- "head": head,
- "sha": sha,
- "tag": flattag,
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "head": head,
+ "sha": sha,
+ "tag": flattag,
+ }
+ )
return {
"sdk": sdk,
@@ -197,12 +241,14 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult:
tag = tag.split("refs/tags/")[-1]
if infix:
tag = tag.split(f"{infix}/")[-1]
- return {
- "sdk": sdk,
- "alias": version,
- "sha": sha,
- "tag": tag,
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "sha": sha,
+ "tag": tag,
+ }
+ )
if version.startswith("refs/pull/"):
merge_heads = [
@@ -216,14 +262,16 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult:
"err": f"pull request {pr_number} not found in {sdk_url}",
}
sha, _ = merge_heads[0]
- return {
- "sdk": sdk,
- "alias": version,
- "head": True,
- "pr": pr_number,
- "sha": sha,
- "tag": f"pull-{pr_number}",
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "head": True,
+ "pr": pr_number,
+ "sha": sha,
+ "tag": f"pull-{pr_number}",
+ }
+ )
remote_tags = [r.split("\t") for r in repo.ls_remote(sdk_url).split("\n")]
all_listed_tags = [
@@ -238,13 +286,15 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult:
if version in all_listed_branches:
sha = all_listed_branches[version]
- return {
- "sdk": sdk,
- "alias": version,
- "head": True,
- "sha": sha,
- "tag": version,
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": version,
+ "head": True,
+ "sha": sha,
+ "tag": version,
+ }
+ )
if infix and version.startswith(f"{infix}/"):
version = version.split(f"{infix}/")[-1]
@@ -288,13 +338,15 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult:
if not matching_tags:
# No versions with CLI found, fall back to building latest from source
sha, tag = stable_tags[-1]
- return {
- "sdk": sdk,
- "alias": alias,
- "head": True, # Mark as head to trigger source checkout
- "sha": sha,
- "tag": tag,
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": alias,
+ "head": True, # Mark as head to trigger source checkout
+ "sha": sha,
+ "tag": tag,
+ }
+ )
else:
matching_tags = stable_tags[-1:]
else:
@@ -319,13 +371,15 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult:
release = tag
if infix:
release = f"{infix}/{release}"
- return {
- "sdk": sdk,
- "alias": alias,
- "release": release,
- "sha": sha,
- "tag": tag,
- }
+ return _annotate(
+ {
+ "sdk": sdk,
+ "alias": alias,
+ "release": release,
+ "sha": sha,
+ "tag": tag,
+ }
+ )
except Exception as e:
return {
"sdk": sdk,
diff --git a/xtest/sdk/go/cli.sh b/xtest/sdk/go/cli.sh
index 172aa5b50..f97b20c1a 100755
--- a/xtest/sdk/go/cli.sh
+++ b/xtest/sdk/go/cli.sh
@@ -23,8 +23,14 @@ SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
cmd=("$SCRIPT_DIR"/otdfctl)
if [ ! -f "$SCRIPT_DIR"/otdfctl ]; then
if [ -f "$SCRIPT_DIR/.version" ]; then
- OTDFCTL_VERSION=$(tr -d '[:space:]' <"$SCRIPT_DIR/.version")
- cmd=(go run "github.com/opentdf/otdfctl@${OTDFCTL_VERSION}")
+ VERSION_SPEC=$(tr -d '[:space:]' <"$SCRIPT_DIR/.version")
+ if [[ "$VERSION_SPEC" == *@* ]]; then
+ # New format: module-path@version
+ cmd=(go run "$VERSION_SPEC")
+ else
+ # Legacy format: bare version tag, default to standalone module
+ cmd=(go run "github.com/opentdf/otdfctl@${VERSION_SPEC}")
+ fi
else
cmd=(go run "github.com/opentdf/otdfctl@latest")
fi
diff --git a/xtest/sdk/go/otdfctl.sh b/xtest/sdk/go/otdfctl.sh
index 17fbb0c84..9ba55f054 100755
--- a/xtest/sdk/go/otdfctl.sh
+++ b/xtest/sdk/go/otdfctl.sh
@@ -18,8 +18,14 @@ source "$XTEST_DIR/test.env"
cmd=("$SCRIPT_DIR"/otdfctl)
if [ ! -f "$SCRIPT_DIR"/otdfctl ]; then
if [ -f "$SCRIPT_DIR/.version" ]; then
- OTDFCTL_VERSION=$(tr -d '[:space:]' <"$SCRIPT_DIR/.version")
- cmd=(go run "github.com/opentdf/otdfctl@${OTDFCTL_VERSION}")
+ VERSION_SPEC=$(tr -d '[:space:]' <"$SCRIPT_DIR/.version")
+ if [[ "$VERSION_SPEC" == *@* ]]; then
+ # New format: module-path@version
+ cmd=(go run "$VERSION_SPEC")
+ else
+ # Legacy format: bare version tag, default to standalone module
+ cmd=(go run "github.com/opentdf/otdfctl@${VERSION_SPEC}")
+ fi
else
cmd=(go run "github.com/opentdf/otdfctl@latest")
fi
diff --git a/xtest/setup-cli-tool/action.yaml b/xtest/setup-cli-tool/action.yaml
index 9e110ef4f..29747fa4e 100644
--- a/xtest/setup-cli-tool/action.yaml
+++ b/xtest/setup-cli-tool/action.yaml
@@ -2,12 +2,23 @@ name: configure-sdks
description: Check out and build one or more SDK and its CLI tool for use by xtest
inputs:
path:
- description: The path to checkout the the SDK source code to; concatenated with branch or tag name.
+ description: The path to check out the SDK source code to; concatenated with branch or tag name.
sdk:
description: The SDK to configure; one of go, java, js
version-info:
description: JSON-encoded output of otdf-sdk-mgr versions resolve
required: true
+ platform-otdfctl-dir:
+ description: >-
+ Absolute path to platform's otdfctl/ directory. When set and sdk is "go",
+ head versions whose SHA matches platform-otdfctl-sha are symlinked from
+ here instead of checked out separately. Used in both explicit platform
+ mode (source: "platform" in resolved version) and auto-detect mode.
+ platform-otdfctl-sha:
+ description: >-
+ SHA of the commit that platform-otdfctl-dir was checked out at.
+ Used to decide which Go head version (if any) can reuse the existing
+ platform checkout vs needing a fresh one.
outputs:
version-a:
description: "Object containing tag, sha, and name of a version checked out"
@@ -28,24 +39,27 @@ outputs:
runs:
using: composite
steps:
- - name: identify repo url
+ - name: identify repo urls
shell: bash
run: |
- case "${{ inputs.sdk }}" in
+ case "$INPUT_SDK" in
"go")
- echo "sdk_repo=opentdf/otdfctl" >> $GITHUB_ENV
+ echo "STANDALONE_REPO=opentdf/otdfctl" >> "$GITHUB_ENV"
;;
"java")
- echo "sdk_repo=opentdf/java-sdk" >> $GITHUB_ENV
+ echo "STANDALONE_REPO=opentdf/java-sdk" >> "$GITHUB_ENV"
;;
"js")
- echo "sdk_repo=opentdf/web-sdk" >> $GITHUB_ENV
+ echo "STANDALONE_REPO=opentdf/web-sdk" >> "$GITHUB_ENV"
;;
*)
- echo "Invalid SDK specified: ${{ inputs.sdk }}" >> $GITHUB_STEP_SUMMARY
+ echo "Invalid SDK specified: $INPUT_SDK" >> "$GITHUB_STEP_SUMMARY"
exit 1
;;
esac
+ echo "PLATFORM_REPO=opentdf/platform" >> "$GITHUB_ENV"
+ env:
+ INPUT_SDK: ${{ inputs.sdk }}
- name: resolve versions
id: resolve
@@ -88,9 +102,12 @@ runs:
echo "Installing ${{ inputs.sdk }} $tag from registry (release: $release)"
# Sanitize tag for use as an env var name (replace non-alphanumeric/underscore with _)
tag_sanitized="${tag//[^a-zA-Z0-9_]/_}"
+ source=$(echo "$row" | jq -r '.source // empty')
+ source_args=()
+ [[ -n "$source" ]] && source_args=(--source "$source")
if ! uv run --project "$SDK_MGR_DIR" otdf-sdk-mgr install artifact \
--sdk "${{ inputs.sdk }}" --version "$release" \
- --dist-name "$tag"; then
+ --dist-name "$tag" "${source_args[@]}"; then
echo " Warning: Artifact installation failed for ${{ inputs.sdk }} $tag"
echo " Will fall back to building from source"
echo "BUILD_FROM_SOURCE_${tag_sanitized}=true" >> "$GITHUB_ENV"
@@ -104,9 +121,10 @@ runs:
id: check-source
shell: bash
run: |
- # Determine which version slots need source checkout.
- # A slot needs checkout if it is a head version OR if artifact install failed
- # (BUILD_FROM_SOURCE_ was set in the previous step).
+ # Determine which version slots need source checkout and from which repo.
+ # A slot needs checkout if it is a head version OR if artifact install failed.
+ # Platform-source versions may reuse the existing platform-otdfctl-dir
+ # (when their SHA matches) or need a fresh opentdf/platform checkout.
for slot in a b c d; do
case "$slot" in
a) row=$(echo "${version_info}" | jq -rc '.[0] // empty') ;;
@@ -115,65 +133,200 @@ runs:
d) row=$(echo "${version_info}" | jq -rc '.[3] // empty') ;;
esac
if [[ -z "$row" ]]; then
- echo "needs-source-${slot}=false" >> "$GITHUB_OUTPUT"
+ echo "needs-checkout-${slot}=false" >> "$GITHUB_OUTPUT"
+ echo "is-platform-${slot}=false" >> "$GITHUB_OUTPUT"
+ echo "use-existing-platform-dir-${slot}=false" >> "$GITHUB_OUTPUT"
+ echo "checkout-repo-${slot}=" >> "$GITHUB_OUTPUT"
+ echo "checkout-path-${slot}=" >> "$GITHUB_OUTPUT"
continue
fi
+
tag=$(echo "$row" | jq -r '.tag')
head=$(echo "$row" | jq -r '.head // false')
+ sha=$(echo "$row" | jq -r '.sha')
+ source=$(echo "$row" | jq -r '.source // empty')
tag_sanitized="${tag//[^a-zA-Z0-9_]/_}"
build_from_source_var="BUILD_FROM_SOURCE_${tag_sanitized}"
+ needs_source=false
if [[ "$head" == "true" || "${!build_from_source_var}" == "true" ]]; then
- echo "needs-source-${slot}=true" >> "$GITHUB_OUTPUT"
- else
- echo "needs-source-${slot}=false" >> "$GITHUB_OUTPUT"
+ needs_source=true
+ fi
+
+ is_platform=false
+ use_existing=false
+ checkout_repo="$STANDALONE_REPO"
+ checkout_path="${INPUT_PATH}/${INPUT_SDK}/src/${tag}"
+
+ if [[ "$source" == "platform" ]]; then
+ # Explicit platform mode: resolver tagged this version as from
+ # opentdf/platform. Use per-version SHA to decide checkout strategy.
+ is_platform=true
+ if [[ "$needs_source" == "true" && -n "$PLATFORM_OTDFCTL_DIR" \
+ && -n "$PLATFORM_OTDFCTL_SHA" && "$sha" == "$PLATFORM_OTDFCTL_SHA" ]]; then
+ # SHA matches existing platform checkout — reuse via symlink
+ use_existing=true
+ needs_source=false
+ elif [[ "$needs_source" == "true" ]]; then
+ # Different SHA — need a fresh platform checkout
+ checkout_repo="$PLATFORM_REPO"
+ checkout_path="${INPUT_PATH}/${INPUT_SDK}/platform-src/${tag}"
+ fi
+ elif [[ "$INPUT_SDK" == "go" && -n "$PLATFORM_OTDFCTL_DIR" && "$needs_source" == "true" ]]; then
+ # Auto-detect fallback: resolver used standalone repo but the
+ # test job detected otdfctl in the platform checkout.
+ # NOTE: SHA comparison across repos is not meaningful (the standalone
+ # repo and platform repo have different commit histories), so we
+ # cannot safely reuse the platform checkout here. Fall through to
+ # a standalone checkout. To use the platform source, set
+ # otdfctl-source=platform explicitly.
+ echo "::notice::Go version ${tag} resolved from standalone repo; platform checkout available but cannot auto-reuse (different repo). Set otdfctl-source=platform to use the platform source."
fi
+
+ echo "needs-checkout-${slot}=${needs_source}" >> "$GITHUB_OUTPUT"
+ echo "is-platform-${slot}=${is_platform}" >> "$GITHUB_OUTPUT"
+ echo "use-existing-platform-dir-${slot}=${use_existing}" >> "$GITHUB_OUTPUT"
+ echo "checkout-repo-${slot}=${checkout_repo}" >> "$GITHUB_OUTPUT"
+ echo "checkout-path-${slot}=${checkout_path}" >> "$GITHUB_OUTPUT"
done
env:
version_info: ${{ inputs.version-info }}
+ INPUT_PATH: ${{ inputs.path }}
+ INPUT_SDK: ${{ inputs.sdk }}
+ PLATFORM_OTDFCTL_DIR: ${{ inputs.platform-otdfctl-dir }}
+ PLATFORM_OTDFCTL_SHA: ${{ inputs.platform-otdfctl-sha }}
+
+ - name: symlink existing platform checkout
+ shell: bash
+ run: |
+ # For versions that can reuse the already-checked-out platform dir,
+ # symlink platform-otdfctl-dir into sdk/go/src/{tag}.
+ for slot in a b c d; do
+ case "$slot" in
+ a) version_json="$VERSION_A" ; use_existing="$USE_EXISTING_A" ;;
+ b) version_json="$VERSION_B" ; use_existing="$USE_EXISTING_B" ;;
+ c) version_json="$VERSION_C" ; use_existing="$USE_EXISTING_C" ;;
+ d) version_json="$VERSION_D" ; use_existing="$USE_EXISTING_D" ;;
+ esac
+ if [[ -z "$version_json" || "$use_existing" != "true" ]]; then
+ continue
+ fi
+ tag=$(echo "$version_json" | jq -r '.tag')
+ src_dir="${INPUT_PATH}/${INPUT_SDK}/src/${tag}"
+ echo "Symlinking existing platform otdfctl to ${src_dir}"
+ mkdir -p "$(dirname "$src_dir")"
+ ln -sfn "$PLATFORM_OTDFCTL_DIR" "$src_dir"
+ if [ ! -e "$src_dir" ]; then
+ echo "::error::Symlink target does not exist: $PLATFORM_OTDFCTL_DIR"
+ exit 1
+ fi
+ done
+ env:
+ PLATFORM_OTDFCTL_DIR: ${{ inputs.platform-otdfctl-dir }}
+ INPUT_PATH: ${{ inputs.path }}
+ INPUT_SDK: ${{ inputs.sdk }}
+ VERSION_A: ${{ steps.resolve.outputs.version-a }}
+ VERSION_B: ${{ steps.resolve.outputs.version-b }}
+ VERSION_C: ${{ steps.resolve.outputs.version-c }}
+ VERSION_D: ${{ steps.resolve.outputs.version-d }}
+ USE_EXISTING_A: ${{ steps.check-source.outputs.use-existing-platform-dir-a }}
+ USE_EXISTING_B: ${{ steps.check-source.outputs.use-existing-platform-dir-b }}
+ USE_EXISTING_C: ${{ steps.check-source.outputs.use-existing-platform-dir-c }}
+ USE_EXISTING_D: ${{ steps.check-source.outputs.use-existing-platform-dir-d }}
- name: checkout version a
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
if: >-
steps.resolve.outputs.version-a != ''
- && steps.check-source.outputs.needs-source-a == 'true'
+ && steps.check-source.outputs.needs-checkout-a == 'true'
with:
- path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-a).tag }}
+ path: ${{ steps.check-source.outputs.checkout-path-a }}
persist-credentials: false
ref: ${{ fromJson(steps.resolve.outputs.version-a).sha }}
- repository: ${{ env.sdk_repo }}
+ repository: ${{ steps.check-source.outputs.checkout-repo-a }}
- name: checkout version b
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
if: >-
steps.resolve.outputs.version-b != ''
- && steps.check-source.outputs.needs-source-b == 'true'
+ && steps.check-source.outputs.needs-checkout-b == 'true'
with:
- path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-b).tag }}
+ path: ${{ steps.check-source.outputs.checkout-path-b }}
persist-credentials: false
ref: ${{ fromJson(steps.resolve.outputs.version-b).sha }}
- repository: ${{ env.sdk_repo }}
+ repository: ${{ steps.check-source.outputs.checkout-repo-b }}
- name: checkout version c
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
if: >-
steps.resolve.outputs.version-c != ''
- && steps.check-source.outputs.needs-source-c == 'true'
+ && steps.check-source.outputs.needs-checkout-c == 'true'
with:
- path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-c).tag }}
+ path: ${{ steps.check-source.outputs.checkout-path-c }}
persist-credentials: false
ref: ${{ fromJson(steps.resolve.outputs.version-c).sha }}
- repository: ${{ env.sdk_repo }}
+ repository: ${{ steps.check-source.outputs.checkout-repo-c }}
- name: checkout version d
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
if: >-
steps.resolve.outputs.version-d != ''
- && steps.check-source.outputs.needs-source-d == 'true'
+ && steps.check-source.outputs.needs-checkout-d == 'true'
with:
- path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-d).tag }}
+ path: ${{ steps.check-source.outputs.checkout-path-d }}
persist-credentials: false
ref: ${{ fromJson(steps.resolve.outputs.version-d).sha }}
- repository: ${{ env.sdk_repo }}
+ repository: ${{ steps.check-source.outputs.checkout-repo-d }}
+
+ - name: symlink freshly checked-out platform sources
+ shell: bash
+ run: |
+ # For platform-source versions that were checked out (not reusing the
+ # existing dir), symlink {platform-src}/{tag}/otdfctl → src/{tag} so
+ # the Makefile discovers them.
+ for slot in a b c d; do
+ case "$slot" in
+ a) is_platform="$IS_PLATFORM_A" ; needs_checkout="$NEEDS_CHECKOUT_A"
+ checkout_path="$CHECKOUT_PATH_A" ; version_json="$VERSION_A" ;;
+ b) is_platform="$IS_PLATFORM_B" ; needs_checkout="$NEEDS_CHECKOUT_B"
+ checkout_path="$CHECKOUT_PATH_B" ; version_json="$VERSION_B" ;;
+ c) is_platform="$IS_PLATFORM_C" ; needs_checkout="$NEEDS_CHECKOUT_C"
+ checkout_path="$CHECKOUT_PATH_C" ; version_json="$VERSION_C" ;;
+ d) is_platform="$IS_PLATFORM_D" ; needs_checkout="$NEEDS_CHECKOUT_D"
+ checkout_path="$CHECKOUT_PATH_D" ; version_json="$VERSION_D" ;;
+ esac
+ if [[ "$is_platform" != "true" || "$needs_checkout" != "true" || -z "$version_json" ]]; then
+ continue
+ fi
+ tag=$(echo "$version_json" | jq -r '.tag')
+ src_dir="${INPUT_PATH}/${INPUT_SDK}/src/${tag}"
+ otdfctl_dir="${checkout_path}/otdfctl"
+ echo "Symlinking freshly checked-out platform otdfctl ${otdfctl_dir} → ${src_dir}"
+ mkdir -p "$(dirname "$src_dir")"
+ ln -sfn "$otdfctl_dir" "$src_dir"
+ if [ ! -e "$src_dir" ]; then
+ echo "::error::Symlink target does not exist: ${otdfctl_dir} (does the platform repo contain an otdfctl/ directory?)"
+ exit 1
+ fi
+ done
+ env:
+ INPUT_PATH: ${{ inputs.path }}
+ INPUT_SDK: ${{ inputs.sdk }}
+ VERSION_A: ${{ steps.resolve.outputs.version-a }}
+ VERSION_B: ${{ steps.resolve.outputs.version-b }}
+ VERSION_C: ${{ steps.resolve.outputs.version-c }}
+ VERSION_D: ${{ steps.resolve.outputs.version-d }}
+ IS_PLATFORM_A: ${{ steps.check-source.outputs.is-platform-a }}
+ IS_PLATFORM_B: ${{ steps.check-source.outputs.is-platform-b }}
+ IS_PLATFORM_C: ${{ steps.check-source.outputs.is-platform-c }}
+ IS_PLATFORM_D: ${{ steps.check-source.outputs.is-platform-d }}
+ NEEDS_CHECKOUT_A: ${{ steps.check-source.outputs.needs-checkout-a }}
+ NEEDS_CHECKOUT_B: ${{ steps.check-source.outputs.needs-checkout-b }}
+ NEEDS_CHECKOUT_C: ${{ steps.check-source.outputs.needs-checkout-c }}
+ NEEDS_CHECKOUT_D: ${{ steps.check-source.outputs.needs-checkout-d }}
+ CHECKOUT_PATH_A: ${{ steps.check-source.outputs.checkout-path-a }}
+ CHECKOUT_PATH_B: ${{ steps.check-source.outputs.checkout-path-b }}
+ CHECKOUT_PATH_C: ${{ steps.check-source.outputs.checkout-path-c }}
+ CHECKOUT_PATH_D: ${{ steps.check-source.outputs.checkout-path-d }}
- name: post checkout cleanups
if: inputs.sdk == 'java'
diff --git a/xtest/setup-kas-instances/action.yaml b/xtest/setup-kas-instances/action.yaml
new file mode 100644
index 000000000..f5e420172
--- /dev/null
+++ b/xtest/setup-kas-instances/action.yaml
@@ -0,0 +1,86 @@
+name: setup-kas-instances
+description: >-
+ Start additional KAS instances for multi-KAS / ABAC tests.
+ Uses otdf-local ci start-kas to start all 6 KAS instances
+ (alpha, beta, gamma, delta, km1, km2) and expose their log file paths.
+
+inputs:
+ platform-working-dir:
+ description: Path to the platform checkout directory
+ required: true
+ root-key:
+ description: Root key for KAS instances
+ required: true
+ key-management-supported:
+ description: Enable key management on km1/km2 instances (true/false)
+ required: false
+ default: "false"
+ ec-tdf-enabled:
+ description: Enable EC TDF support
+ required: false
+ default: "true"
+ log-type:
+ description: Log format type
+ required: false
+ default: "json"
+ tests-path:
+ description: Path to the tests repo checkout
+ required: false
+ default: "otdftests"
+
+outputs:
+ kas-alpha-log-file:
+ description: Path to KAS alpha log file
+ value: ${{ steps.start-kas.outputs.kas-alpha-log-file }}
+ kas-beta-log-file:
+ description: Path to KAS beta log file
+ value: ${{ steps.start-kas.outputs.kas-beta-log-file }}
+ kas-gamma-log-file:
+ description: Path to KAS gamma log file
+ value: ${{ steps.start-kas.outputs.kas-gamma-log-file }}
+ kas-delta-log-file:
+ description: Path to KAS delta log file
+ value: ${{ steps.start-kas.outputs.kas-delta-log-file }}
+ kas-km1-log-file:
+ description: Path to KAS km1 log file
+ value: ${{ steps.start-kas.outputs.kas-km1-log-file }}
+ kas-km2-log-file:
+ description: Path to KAS km2 log file
+ value: ${{ steps.start-kas.outputs.kas-km2-log-file }}
+
+runs:
+ using: composite
+ steps:
+ - name: Start KAS instances
+ id: start-kas
+ shell: bash
+ run: |
+ KM_FLAG=""
+ if [[ "$KEY_MANAGEMENT" == "true" ]]; then
+ KM_FLAG="--key-management"
+ else
+ KM_FLAG="--no-key-management"
+ fi
+
+ EC_FLAG=""
+ if [[ "$EC_TDF_ENABLED" == "true" ]]; then
+ EC_FLAG="--ec-tdf-enabled"
+ else
+ EC_FLAG="--no-ec-tdf"
+ fi
+
+ OTDF_LOCAL_DIR="$(cd "$TESTS_PATH" && pwd)/otdf-local"
+
+ uv run --project "$OTDF_LOCAL_DIR" otdf-local ci start-kas \
+ --platform-dir "$(pwd)/$PLATFORM_DIR" \
+ --root-key "$ROOT_KEY" \
+ $EC_FLAG \
+ $KM_FLAG \
+ --log-type "$LOG_TYPE"
+ env:
+ PLATFORM_DIR: ${{ inputs.platform-working-dir }}
+ ROOT_KEY: ${{ inputs.root-key }}
+ KEY_MANAGEMENT: ${{ inputs.key-management-supported }}
+ EC_TDF_ENABLED: ${{ inputs.ec-tdf-enabled }}
+ LOG_TYPE: ${{ inputs.log-type }}
+ TESTS_PATH: ${{ inputs.tests-path }}
diff --git a/xtest/setup-sdk-clients/action.yaml b/xtest/setup-sdk-clients/action.yaml
new file mode 100644
index 000000000..35be35b41
--- /dev/null
+++ b/xtest/setup-sdk-clients/action.yaml
@@ -0,0 +1,159 @@
+name: setup-sdk-clients
+description: >-
+ Configure, cache, patch, and build an SDK CLI for xtest. Wraps setup-cli-tool
+ and adds SDK-appropriate caching, go.mod/java .env fixups, and make builds.
+ Each invocation handles one SDK (go, java, or js).
+
+inputs:
+ sdk:
+ description: "SDK to set up: go, java, or js"
+ required: true
+ version-info:
+ description: JSON-encoded output of otdf-sdk-mgr versions resolve for this SDK
+ required: true
+ tests-path:
+ description: Path to the tests repo checkout
+ required: false
+ default: "otdftests"
+ platform-working-dir:
+ description: >-
+ Platform checkout directory. Used for go-fixup (bridging client go.mod
+ to server shared modules) and detecting platform-embedded otdfctl.
+ required: false
+ platform-heads:
+ description: JSON list of platform tags that are heads (from resolve-versions)
+ required: false
+ default: "[]"
+ platform-tag:
+ description: Current matrix platform-tag value
+ required: false
+ platform-tag-to-sha:
+ description: JSON object mapping platform tags to SHAs
+ required: false
+ default: "{}"
+ otdfctl-source:
+ description: "Resolved otdfctl source: platform or standalone"
+ required: false
+ default: "standalone"
+ otdfctl-dir:
+ description: Absolute path to platform's otdfctl directory
+ required: false
+ otdfctl-sha:
+ description: SHA of the platform otdfctl checkout
+ required: false
+ focus-sdk:
+ description: "SDK focus filter: all, go, java, or js"
+ required: false
+ default: "all"
+ buf-token:
+ description: BUF token for Java proto compilation
+ required: false
+ pat-opentdf:
+ description: PAT for buf HTTPS password (Java SDK build)
+ required: false
+
+outputs:
+ heads:
+ description: JSON list of head tags for this SDK
+ value: ${{ steps.configure.outputs.heads }}
+
+runs:
+ using: composite
+ steps:
+ # Step 1: Configure SDK via setup-cli-tool (checkout/install)
+ - name: Configure ${{ inputs.sdk }}
+ id: configure
+ uses: ./otdftests/xtest/setup-cli-tool
+ with:
+ path: ${{ inputs.tests-path }}/xtest/sdk
+ sdk: ${{ inputs.sdk }}
+ version-info: ${{ inputs.version-info }}
+ platform-otdfctl-dir: ${{ inputs.otdfctl-dir }}
+ platform-otdfctl-sha: ${{ inputs.otdfctl-sha }}
+
+ # Step 2: SDK-appropriate dependency caching
+ - name: Cache npm
+ if: inputs.sdk == 'js' && fromJson(steps.configure.outputs.heads)[0] != null
+ uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
+ with:
+ path: ~/.npm
+ key: npm-${{ runner.os }}-${{ hashFiles(format('{0}/xtest/sdk/js/src/**/package-lock.json', inputs.tests-path)) }}
+ restore-keys: |
+ npm-${{ runner.os }}-
+
+ - name: Cache Go modules
+ if: inputs.sdk == 'go' && fromJson(steps.configure.outputs.heads)[0] != null
+ uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
+ with:
+ path: |
+ ~/.cache/go-build
+ ~/go/pkg/mod
+ key: go-${{ runner.os }}-${{ hashFiles(format('{0}/xtest/sdk/go/src/*/go.sum', inputs.tests-path)) }}
+ restore-keys: |
+ go-${{ runner.os }}-
+
+ - name: Cache Maven repository
+ if: inputs.sdk == 'java' && fromJson(steps.configure.outputs.heads)[0] != null
+ uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
+ with:
+ path: ~/.m2/repository
+ key: maven-${{ runner.os }}-${{ hashFiles(format('{0}/xtest/sdk/java/src/**/pom.xml', inputs.tests-path)) }}
+ restore-keys: |
+ maven-${{ runner.os }}-
+
+ # Step 3: SDK-specific fixups
+
+ # Go: Bridge client go.mod to server shared modules (standalone otdfctl only)
+ - name: Go fixup - replace go.mod packages
+ if: >-
+ inputs.sdk == 'go'
+ && steps.configure.outputs.heads != '[]'
+ && inputs.otdfctl-source != 'platform'
+ && inputs.focus-sdk == 'go'
+ && contains(fromJSON(inputs.platform-heads), inputs.platform-tag)
+ && inputs.platform-working-dir != ''
+ shell: bash
+ run: |
+ SDK_MGR_DIR="$(cd "$TESTS_PATH" && pwd)/otdf-sdk-mgr"
+ PLATFORM_DIR_ABS="$(pwd)/${PLATFORM_WORKING_DIR}"
+ BASE_DIR="$(pwd)/${TESTS_PATH}/xtest/sdk/go/src"
+ HEADS='${{ steps.configure.outputs.heads }}'
+ uv run --project "$SDK_MGR_DIR" otdf-sdk-mgr go-fixup \
+ --platform-dir "$PLATFORM_DIR_ABS" \
+ --heads "$HEADS" \
+ "$BASE_DIR"
+ env:
+ PLATFORM_WORKING_DIR: ${{ inputs.platform-working-dir }}
+ TESTS_PATH: ${{ inputs.tests-path }}
+
+ # Java: Create .env files with PLATFORM_BRANCH for head versions
+ - name: Java fixup - create platform branch .env files
+ if: >-
+ inputs.sdk == 'java'
+ && steps.configure.outputs.heads != '[]'
+ && (inputs.focus-sdk == 'go' || inputs.focus-sdk == 'java')
+ && contains(fromJSON(inputs.platform-heads), inputs.platform-tag)
+ shell: bash
+ run: |
+ for row in $(echo "$VERSION_INFO" | jq -c '.[]'); do
+ TAG=$(echo "$row" | jq -r '.tag')
+ HEAD=$(echo "$row" | jq -r '.head')
+ if [[ "$HEAD" == "true" ]]; then
+ echo "Creating .env file for tag: [$TAG]; pointing to platform ref [$PLATFORM_REF]"
+ echo "PLATFORM_BRANCH=$PLATFORM_REF" > "${TESTS_PATH}/xtest/sdk/java/${TAG}.env"
+ fi
+ done
+ env:
+ VERSION_INFO: ${{ inputs.version-info }}
+ PLATFORM_REF: ${{ fromJSON(inputs.platform-tag-to-sha)[inputs.platform-tag] }}
+ TESTS_PATH: ${{ inputs.tests-path }}
+
+ # Step 4: Build the SDK CLI
+ - name: Build ${{ inputs.sdk }} CLI
+ if: fromJson(steps.configure.outputs.heads)[0] != null
+ shell: bash
+ run: make
+ working-directory: ${{ inputs.tests-path }}/xtest/sdk/${{ inputs.sdk }}
+ env:
+ BUF_INPUT_HTTPS_USERNAME: ${{ inputs.sdk == 'java' && 'opentdf-bot' || '' }}
+ BUF_INPUT_HTTPS_PASSWORD: ${{ inputs.sdk == 'java' && inputs.pat-opentdf || '' }}
diff --git a/xtest/setup-test-environment/action.yaml b/xtest/setup-test-environment/action.yaml
new file mode 100644
index 000000000..0f8ce86f3
--- /dev/null
+++ b/xtest/setup-test-environment/action.yaml
@@ -0,0 +1,140 @@
+name: setup-test-environment
+description: >-
+ Detect platform capabilities, extract configuration, and prepare the test
+ environment. Consolidates otdfctl detection, platform version lookup, key
+ management support, root key extraction, multikas support check, and test
+ dependency installation.
+
+inputs:
+ platform-working-dir:
+ description: Platform checkout directory (from start-up-with-containers output)
+ required: true
+ platform-tag:
+ description: Platform version tag under test (matrix value)
+ required: true
+ otdfctl-source-input:
+ description: "User's otdfctl-source preference: auto, standalone, or platform"
+ required: false
+ default: "auto"
+ tests-path:
+ description: Path to the tests repo checkout
+ required: false
+ default: "otdftests"
+
+outputs:
+ otdfctl-source:
+ description: "Resolved otdfctl source: platform or standalone"
+ value: ${{ steps.detect-otdfctl.outputs.otdfctl-source }}
+ otdfctl-dir:
+ description: Absolute path to otdfctl directory (if source=platform)
+ value: ${{ steps.detect-otdfctl.outputs.otdfctl-dir }}
+ otdfctl-sha:
+ description: SHA of the platform checkout (if source=platform)
+ value: ${{ steps.detect-otdfctl.outputs.otdfctl-sha }}
+ platform-version:
+ description: Detected platform version string
+ value: ${{ steps.platform-version.outputs.version }}
+ key-management-supported:
+ description: Whether the platform supports key management (true/false)
+ value: ${{ steps.km-check.outputs.supported }}
+ root-key:
+ description: Root key for KAS instances
+ value: ${{ steps.km-check.outputs.root_key }}
+ multikas-supported:
+ description: Whether multi-KAS is supported (true/false)
+ value: ${{ steps.multikas.outputs.supported }}
+
+runs:
+ using: composite
+ steps:
+ - name: Detect platform-embedded otdfctl
+ id: detect-otdfctl
+ shell: bash
+ run: |
+ if [[ "$OTDFCTL_SOURCE_INPUT" == "auto" || -z "$OTDFCTL_SOURCE_INPUT" ]]; then
+ if [ -d "$PLATFORM_DIR/otdfctl" ] && [ -f "$PLATFORM_DIR/otdfctl/go.mod" ]; then
+ echo "otdfctl found in platform checkout at $PLATFORM_DIR/otdfctl"
+ echo "otdfctl-source=platform" >> "$GITHUB_OUTPUT"
+ echo "otdfctl-dir=$(pwd)/$PLATFORM_DIR/otdfctl" >> "$GITHUB_OUTPUT"
+ echo "otdfctl-sha=$(git -C "$PLATFORM_DIR" rev-parse HEAD)" >> "$GITHUB_OUTPUT"
+ else
+ echo "otdfctl not found in platform checkout; using standalone repo"
+ echo "otdfctl-source=standalone" >> "$GITHUB_OUTPUT"
+ fi
+ elif [[ "$OTDFCTL_SOURCE_INPUT" == "platform" ]]; then
+ if [ -z "$PLATFORM_DIR" ] || [ ! -d "$PLATFORM_DIR/otdfctl" ] || [ ! -f "$PLATFORM_DIR/otdfctl/go.mod" ]; then
+ echo "::error::otdfctl-source=platform requested but ${PLATFORM_DIR:-}/otdfctl does not exist or lacks go.mod"
+ exit 1
+ fi
+ echo "otdfctl-source=platform" >> "$GITHUB_OUTPUT"
+ echo "otdfctl-dir=$(pwd)/$PLATFORM_DIR/otdfctl" >> "$GITHUB_OUTPUT"
+ echo "otdfctl-sha=$(git -C "$PLATFORM_DIR" rev-parse HEAD)" >> "$GITHUB_OUTPUT"
+ else
+ echo "otdfctl-source=standalone" >> "$GITHUB_OUTPUT"
+ fi
+ env:
+ OTDFCTL_SOURCE_INPUT: ${{ inputs.otdfctl-source-input }}
+ PLATFORM_DIR: ${{ inputs.platform-working-dir }}
+
+ - name: Lookup platform version
+ id: platform-version
+ shell: bash
+ run: |
+ if ! go run ./service version; then
+ echo "Error: Unable to get platform version; defaulting to tag: [$PLATFORM_TAG]"
+ echo "version=$PLATFORM_TAG" >> "$GITHUB_OUTPUT"
+ exit
+ fi
+ PLATFORM_VERSION=$(go run ./service version 2>&1)
+ echo "version=$PLATFORM_VERSION" >> "$GITHUB_OUTPUT"
+ working-directory: ${{ inputs.platform-working-dir }}
+ env:
+ PLATFORM_TAG: ${{ inputs.platform-tag }}
+
+ - name: Check key management support and prepare root key
+ id: km-check
+ shell: bash
+ run: |
+ OT_CONFIG_FILE="$(pwd)/opentdf.yaml"
+ km_value=$(yq e '.services.kas.preview.key_management' "$OT_CONFIG_FILE" 2>/dev/null || echo "null")
+ case "$km_value" in
+ true|false)
+ echo "supported=true" >> "$GITHUB_OUTPUT"
+ ;;
+ *)
+ echo "supported=false" >> "$GITHUB_OUTPUT"
+ ;;
+ esac
+ existing_root_key=$(yq e '.services.kas.root_key' "$OT_CONFIG_FILE" 2>/dev/null || echo "")
+ if [ -n "$existing_root_key" ] && [ "$existing_root_key" != "null" ]; then
+ echo "Using existing root key from config"
+ echo "root_key=$existing_root_key" >> "$GITHUB_OUTPUT"
+ else
+ echo "Generating a new root key for additional KAS"
+ gen_root_key=$(openssl rand -hex 32)
+ echo "root_key=$gen_root_key" >> "$GITHUB_OUTPUT"
+ fi
+ working-directory: ${{ inputs.platform-working-dir }}
+
+ - name: Check multikas support
+ id: multikas
+ shell: bash
+ run: |
+ if [[ $PLATFORM_TAG == main ]]; then
+ echo "Main supports multikas"
+ echo "supported=true" >> "$GITHUB_OUTPUT"
+ elif awk -F. '{ if ($1 > 0 || ($1 == 0 && $2 > 4)) exit 0; else exit 1; }' <<< "${PLATFORM_VERSION#v}"; then
+ echo "Selected version [$PLATFORM_VERSION] supports multikas"
+ echo "supported=true" >> "$GITHUB_OUTPUT"
+ else
+ echo "At tag [$PLATFORM_TAG], [$PLATFORM_VERSION] probably does not support multikas"
+ echo "supported=false" >> "$GITHUB_OUTPUT"
+ fi
+ env:
+ PLATFORM_TAG: ${{ inputs.platform-tag }}
+ PLATFORM_VERSION: ${{ steps.platform-version.outputs.version }}
+
+ - name: Install test dependencies
+ shell: bash
+ run: uv sync
+ working-directory: ${{ inputs.tests-path }}/xtest