diff --git a/.github/workflows/xtest.yml b/.github/workflows/xtest.yml index 773448892..59350831f 100644 --- a/.github/workflows/xtest.yml +++ b/.github/workflows/xtest.yml @@ -28,6 +28,11 @@ on: type: string default: all description: "SDK to focus on (go, js, java, all)" + otdfctl-source: + required: false + type: string + default: auto + description: "otdfctl source: 'auto' (standalone for releases, detect platform for head builds), 'standalone', or 'platform'" workflow_call: inputs: platform-ref: @@ -50,6 +55,10 @@ on: required: false type: string default: all + otdfctl-source: + required: false + type: string + default: auto schedule: - cron: "30 6 * * *" # 0630 UTC - cron: "0 5 * * 1,3" # 500 UTC (Monday, Wednesday) @@ -78,6 +87,14 @@ jobs: JS_REF: "${{ inputs.js-ref }}" OTDFCTL_REF: "${{ inputs.otdfctl-ref }}" JAVA_REF: "${{ inputs.java-ref }}" + # When explicitly set to 'platform', tells the Go resolver to resolve + # against opentdf/platform tags instead of the standalone otdfctl repo. + # In 'auto' mode, releases resolve from standalone; the detect-otdfctl + # step later probes the platform checkout for an embedded otdfctl + # directory, and setup-cli-tool acts on this only for versions that need + # a source checkout (head or artifact-install failure), reusing the + # platform checkout only when the resolved SHA matches. + OTDFCTL_SOURCE: "${{ inputs.otdfctl-source == 'platform' && 'platform' || '' }}" steps: - name: Validate focus-sdk input if: ${{ inputs.focus-sdk != '' }} @@ -116,6 +133,7 @@ jobs: sparse-checkout: | xtest/sdk otdf-sdk-mgr + otdf-local - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b with: python-version: "3.14" @@ -170,7 +188,7 @@ jobs: core.summary.addHeading('Versions under Test', 3); - function artifactLink(sdkType, tag, release, head) { + function artifactLink(sdkType, tag, release, head, source) { if (head || !release) return ''; const v = tag.replace(/^v/, ''); if (sdkType === 'js') { @@ -182,7 +200,10 @@ jobs: return `Maven Central`; } if (sdkType === 'go') { - const url = `https://pkg.go.dev/github.com/opentdf/otdfctl@${encodeURIComponent(tag)}`; + const modulePath = source === 'platform' + ? `github.com/opentdf/platform/otdfctl` + : `github.com/opentdf/otdfctl`; + const url = `https://pkg.go.dev/${modulePath}@${encodeURIComponent(tag)}`; return `pkg.go.dev`; } return ''; @@ -198,14 +219,15 @@ jobs: const tagToSha = {}; const headTags = []; - for (const { tag, head, sha, alias, err, release } of refInfo) { - const sdkRepoUrl = `https://github.com/opentdf/${encodeURIComponent(sdkType == 'js' ? 'web-sdk' : sdkType == 'go' ? 'otdfctl' : sdkType == 'java' ? 'java-sdk' : sdkType)}`; + for (const { tag, head, sha, alias, err, release, source } of refInfo) { + const goRepoName = source === 'platform' ? 'platform' : 'otdfctl'; + const sdkRepoUrl = `https://github.com/opentdf/${encodeURIComponent(sdkType == 'js' ? 'web-sdk' : sdkType == 'go' ? goRepoName : sdkType == 'java' ? 'java-sdk' : sdkType)}`; const sdkLink = `${htmlEscape(sdkType)}`; const commitLink = sha ? `${htmlEscape(sha.substring(0, 7))}` : ' . '; const tagLink = (release && tag) ? `${htmlEscape(tag)}` : tag ? htmlEscape(tag) : 'N/A'; - const artifactCell = artifactLink(sdkType, tag, release, head); + const artifactCell = artifactLink(sdkType, tag, release, head, source); table.push([sdkLink, tagLink, commitLink, alias || 'N/A', artifactCell || 'N/A', err || 'N/A']); if (err) { errorCount += 1; @@ -231,6 +253,76 @@ jobs: throw new Error('Errors detected in version resolution. Failing the run.'); } + - name: Generate xtest configs + id: xtest-configs + continue-on-error: true + shell: bash + run: | + set +e # Don't abort on errors; this step is supplementary + OTDF_LOCAL_DIR="${GITHUB_WORKSPACE}/otdf-sdk/otdf-local" + FOCUS_SDK="${FOCUS_SDK_INPUT:-all}" + OTDFCTL_SRC="${OTDFCTL_SOURCE_INPUT:-auto}" + GO_REFS="${OTDFCTL_REF:-${DEFAULT_TAGS}}" + JS_REFS="${JS_REF:-${DEFAULT_TAGS}}" + JAVA_REFS="${JAVA_REF:-${DEFAULT_TAGS}}" + + mkdir -p xtest-configs + + # Generate one config per (platform-tag, sdk) matrix cell + PLATFORM_TAGS=$(echo "$PLATFORM_TAG_LIST_JSON" | jq -r '.[]') + for ptag in $PLATFORM_TAGS; do + for sdk in go java js; do + CONFIG_FILE="xtest-configs/${sdk}-${ptag}.yaml" + echo "Generating config: $CONFIG_FILE" + uv run --project "$OTDF_LOCAL_DIR" otdf-local xtest resolve \ + --platform-ref "$ptag" \ + --go-ref "$GO_REFS" \ + --js-ref "$JS_REFS" \ + --java-ref "$JAVA_REFS" \ + --focus-sdk "$FOCUS_SDK" \ + --otdfctl-source "$OTDFCTL_SRC" \ + --output "$CONFIG_FILE" || echo "Warning: config generation failed for ${sdk}-${ptag}" + # Patch encrypt-sdk to match the matrix cell + if [ -f "$CONFIG_FILE" ]; then + sed -i "s/^encrypt-sdk:.*/encrypt-sdk: ${sdk}/" "$CONFIG_FILE" + fi + done + done + + # Emit a sample config to step summary for local reproduction + SAMPLE=$(ls xtest-configs/*.yaml 2>/dev/null | head -1) + if [ -n "$SAMPLE" ]; then + { + echo '
Reproduce locally (xtest config)' + echo '' + echo '```yaml' + cat "$SAMPLE" + echo '```' + echo '' + echo '**To reproduce locally:**' + echo '```bash' + echo '# Save the above YAML to a file, then:' + echo 'cd tests/otdf-local' + echo 'uv run otdf-local xtest run ../xtest-config.yaml' + echo '```' + echo '
' + } >> "$GITHUB_STEP_SUMMARY" + else + echo "Warning: No xtest configs were generated" + fi + env: + FOCUS_SDK_INPUT: "${{ inputs.focus-sdk }}" + OTDFCTL_SOURCE_INPUT: "${{ inputs.otdfctl-source }}" + PLATFORM_TAG_LIST_JSON: "${{ steps.version-info.outputs.platform-tag-list }}" + + - name: Upload xtest configs + if: steps.xtest-configs.outcome == 'success' + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: xtest-configs + path: xtest-configs/ + if-no-files-found: warn + xct: timeout-minutes: 60 runs-on: ubuntu-latest @@ -255,6 +347,25 @@ jobs: path: otdftests # use different name bc other repos might have tests directories persist-credentials: false + - name: Download xtest config + continue-on-error: true + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + with: + name: xtest-configs + path: xtest-configs + + - name: Set xtest config path + id: xtest-config + run: | + CONFIG="xtest-configs/${{ matrix.sdk }}-${{ matrix.platform-tag }}.yaml" + if [ -f "$CONFIG" ]; then + echo "config-file=$CONFIG" >> "$GITHUB_OUTPUT" + echo "Using xtest config: $CONFIG" + else + echo "config-file=" >> "$GITHUB_OUTPUT" + echo "No xtest config found for ${{ matrix.sdk }}-${{ matrix.platform-tag }}" + fi + - name: load extra keys from file id: load-extra-keys run: |- @@ -290,191 +401,56 @@ jobs: with: node-version: "22.x" - ######### CHECKOUT JS CLI ############# - - name: Configure js-sdk - id: configure-js - uses: ./otdftests/xtest/setup-cli-tool + ######## SETUP TEST ENVIRONMENT ############# + - name: Setup test environment + id: test-env + uses: ./otdftests/xtest/setup-test-environment + with: + platform-working-dir: ${{ steps.run-platform.outputs.platform-working-dir }} + platform-tag: ${{ matrix.platform-tag }} + otdfctl-source-input: ${{ inputs.otdfctl-source }} + + ######## SETUP SDK CLIENTS ############# + - name: Setup JS SDK client + id: setup-js + uses: ./otdftests/xtest/setup-sdk-clients with: - path: otdftests/xtest/sdk sdk: js version-info: "${{ needs.resolve-versions.outputs.js }}" - - - name: Cache npm - if: fromJson(steps.configure-js.outputs.heads)[0] != null - uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2 - with: - path: ~/.npm - key: npm-${{ runner.os }}-${{ hashFiles('otdftests/xtest/sdk/js/src/**/package-lock.json') }} - restore-keys: | - npm-${{ runner.os }}- - - ######## SETUP THE JS CLI ############# - - name: build and setup the web-sdk cli - id: build-web-sdk - if: fromJson(steps.configure-js.outputs.heads)[0] != null - run: | - make - working-directory: otdftests/xtest/sdk/js - - ######## CHECKOUT GO CLI ############# - - name: Configure otdfctl - id: configure-go - uses: ./otdftests/xtest/setup-cli-tool + platform-working-dir: ${{ steps.run-platform.outputs.platform-working-dir }} + platform-heads: ${{ needs.resolve-versions.outputs.heads }} + platform-tag: ${{ matrix.platform-tag }} + platform-tag-to-sha: ${{ needs.resolve-versions.outputs.platform-tag-to-sha }} + + - name: Setup Go SDK client (otdfctl) + id: setup-go + uses: ./otdftests/xtest/setup-sdk-clients with: - path: otdftests/xtest/sdk sdk: go version-info: "${{ needs.resolve-versions.outputs.go }}" - - - name: Cache Go modules - if: fromJson(steps.configure-go.outputs.heads)[0] != null - uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2 - with: - path: | - ~/.cache/go-build - ~/go/pkg/mod - key: go-${{ runner.os }}-${{ hashFiles('otdftests/xtest/sdk/go/src/*/go.sum') }} - restore-keys: | - go-${{ runner.os }}- - - - name: Resolve otdfctl heads - id: resolve-otdfctl-heads - if: fromJson(steps.configure-go.outputs.heads)[0] != null - run: |- - echo "OTDFCTL_HEADS=$OTDFCTL_HEADS" >> "$GITHUB_ENV" - env: - OTDFCTL_HEADS: ${{ steps.configure-go.outputs.heads }} - - - name: Replace otdfctl go.mod packages, but only at head version of platform - if: fromJson(steps.configure-go.outputs.heads)[0] != null && env.FOCUS_SDK == 'go' && contains(fromJSON(needs.resolve-versions.outputs.heads), matrix.platform-tag) - env: - PLATFORM_WORKING_DIR: ${{ steps.run-platform.outputs.platform-working-dir }} - run: |- - echo "Replacing go.mod packages..." - PLATFORM_DIR_ABS="$(pwd)/${PLATFORM_WORKING_DIR}" - OTDFCTL_DIR_ABS="$(pwd)/otdftests/xtest/sdk/go/src/" - echo "PLATFORM_DIR_ABS: $PLATFORM_DIR_ABS" - echo "OTDFCTL_DIR_ABS: $OTDFCTL_DIR_ABS" - for head in $(echo "${OTDFCTL_HEADS}" | jq -r '.[]'); do - echo "Processing head: $head" - cd "${OTDFCTL_DIR_ABS}/$head" - for m in lib/fixtures lib/ocrypto protocol/go sdk; do - go mod edit -replace "github.com/opentdf/platform/$m=${PLATFORM_DIR_ABS}/$m" - done - go mod tidy - done - - ######## SETUP THE GO CLI ############# - - name: Prepare go cli - if: fromJson(steps.configure-go.outputs.heads)[0] != null - run: |- - make - working-directory: otdftests/xtest/sdk/go - - ####### CHECKOUT JAVA SDK ############## - - - name: Configure java-sdk - id: configure-java - uses: ./otdftests/xtest/setup-cli-tool + platform-working-dir: ${{ steps.run-platform.outputs.platform-working-dir }} + platform-heads: ${{ needs.resolve-versions.outputs.heads }} + platform-tag: ${{ matrix.platform-tag }} + platform-tag-to-sha: ${{ needs.resolve-versions.outputs.platform-tag-to-sha }} + otdfctl-source: ${{ steps.test-env.outputs.otdfctl-source }} + otdfctl-dir: ${{ steps.test-env.outputs.otdfctl-dir }} + otdfctl-sha: ${{ steps.test-env.outputs.otdfctl-sha }} + focus-sdk: ${{ inputs.focus-sdk || 'all' }} + + - name: Setup Java SDK client + id: setup-java + uses: ./otdftests/xtest/setup-sdk-clients with: - path: otdftests/xtest/sdk sdk: java version-info: "${{ needs.resolve-versions.outputs.java }}" - - - name: Cache Maven repository - if: fromJson(steps.configure-java.outputs.heads)[0] != null - uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2 - with: - path: ~/.m2/repository - key: maven-${{ runner.os }}-${{ hashFiles('otdftests/xtest/sdk/java/src/**/pom.xml') }} - restore-keys: | - maven-${{ runner.os }}- - - - name: pre-release protocol buffers for java-sdk - if: >- - fromJson(steps.configure-java.outputs.heads)[0] != null - && (env.FOCUS_SDK == 'go' || env.FOCUS_SDK == 'java') - && contains(fromJSON(needs.resolve-versions.outputs.heads), matrix.platform-tag) - run: |- - echo "Replacing .env files for java-sdk..." - echo "Platform tag: $platform_tag" - echo "Java version info: $java_version_info" - for row in $(echo "$java_version_info" | jq -c '.[]'); do - TAG=$(echo "$row" | jq -r '.tag') - HEAD=$(echo "$row" | jq -r '.head') - if [[ "$HEAD" == "true" ]]; then - echo "Creating .env file for tag: [$TAG]; pointing to platform ref [$platform_tag]" - echo "PLATFORM_BRANCH=$platform_ref" > "otdftests/xtest/sdk/java/${TAG}.env" - else - echo "Skipping .env file creation for release version [$TAG]" - fi - done - env: - java_version_info: ${{ needs.resolve-versions.outputs.java }} - platform_ref: ${{ fromJSON(needs.resolve-versions.outputs.platform-tag-to-sha)[matrix.platform-tag] }} - platform_tag: ${{ matrix.platform-tag }} - - ####### SETUP JAVA CLI ############## - - name: Prepare java cli - if: fromJson(steps.configure-java.outputs.heads)[0] != null - run: | - make - working-directory: otdftests/xtest/sdk/java - env: - BUF_INPUT_HTTPS_USERNAME: opentdf-bot - BUF_INPUT_HTTPS_PASSWORD: ${{ secrets.PERSONAL_ACCESS_TOKEN_OPENTDF }} - - ######## Configure test environment ############# - - name: Lookup current platform version - id: platform-version - run: |- - if ! go run ./service version; then - # NOTE: the version command was added in 0.4.37 - echo "Error: Unable to get platform version; defaulting to tag: [$PLATFORM_TAG]" - echo "PLATFORM_VERSION=$PLATFORM_TAG" >> "$GITHUB_ENV" - exit - fi - # Older version commands output version to stderr; newer versions output to stdout - PLATFORM_VERSION=$(go run ./service version 2>&1) - echo "PLATFORM_VERSION=$PLATFORM_VERSION" >> "$GITHUB_ENV" - echo "## Platform version output: [$PLATFORM_VERSION]" - working-directory: ${{ steps.run-platform.outputs.platform-working-dir }} - env: - PLATFORM_TAG: ${{ matrix.platform-tag }} - - - name: Check key management support and prepare root key - id: km-check - run: |- - OT_CONFIG_FILE="$(pwd)/opentdf.yaml" - echo "OT_CONFIG_FILE=$OT_CONFIG_FILE" >> "$GITHUB_ENV" - # Determine if the config declares the key_management field - km_value=$(yq e '.services.kas.preview.key_management' "$OT_CONFIG_FILE" 2>/dev/null || echo "null") - case "$km_value" in - true|false) - echo "KEY_MANAGEMENT_SUPPORTED=true" >> "$GITHUB_ENV" - echo "supported=true" >> "$GITHUB_OUTPUT" - ;; - *) - echo "KEY_MANAGEMENT_SUPPORTED=false" >> "$GITHUB_ENV" - echo "supported=false" >> "$GITHUB_OUTPUT" - ;; - esac - # Prepare a root key for use by additional KAS instances - existing_root_key=$(yq e '.services.kas.root_key' "$OT_CONFIG_FILE" 2>/dev/null || echo "") - if [ -n "$existing_root_key" ] && [ "$existing_root_key" != "null" ]; then - echo "Using existing root key from config" - echo "OT_ROOT_KEY=$existing_root_key" >> "$GITHUB_ENV" - echo "root_key=$existing_root_key" >> "$GITHUB_OUTPUT" - else - echo "Generating a new root key for additional KAS" - gen_root_key=$(openssl rand -hex 32) - echo "OT_ROOT_KEY=$gen_root_key" >> "$GITHUB_ENV" - echo "root_key=$gen_root_key" >> "$GITHUB_OUTPUT" - fi - working-directory: ${{ steps.run-platform.outputs.platform-working-dir }} - - - name: Install test dependencies - run: uv sync - working-directory: otdftests/xtest + platform-working-dir: ${{ steps.run-platform.outputs.platform-working-dir }} + platform-heads: ${{ needs.resolve-versions.outputs.heads }} + platform-tag: ${{ matrix.platform-tag }} + platform-tag-to-sha: ${{ needs.resolve-versions.outputs.platform-tag-to-sha }} + focus-sdk: ${{ inputs.focus-sdk || 'all' }} + pat-opentdf: ${{ secrets.PERSONAL_ACCESS_TOKEN_OPENTDF }} + + ######## VALIDATE HELPERS ############# - name: Validate xtest helper library (tests of the test harness and its utilities) if: ${{ !inputs }} run: |- @@ -523,92 +499,17 @@ jobs: ######## ATTRIBUTE BASED CONFIGURATION ############# - - name: Does platform support multikas? - id: multikas - run: |- - if [[ $PLATFORM_TAG == main ]]; then - echo "Main supports multikas" - echo "supported=true" >> "$GITHUB_OUTPUT" - elif awk -F. '{ if ($1 > 0 || ($1 == 0 && $2 > 4)) exit 0; else exit 1; }' <<< "${PLATFORM_VERSION#v}"; then - echo "Selected version [$PLATFORM_VERSION] supports multikas" - echo "supported=true" >> "$GITHUB_OUTPUT" - else - echo "At tag [$PLATFORM_TAG], [$PLATFORM_VERSION] probably does not support multikas" - echo "supported=false" >> "$GITHUB_OUTPUT" - fi - env: - PLATFORM_TAG: ${{ matrix.platform-tag }} - - - name: Start additional kas - id: kas-alpha - if: ${{ steps.multikas.outputs.supported == 'true' }} - uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix - with: - ec-tdf-enabled: true - kas-name: alpha - kas-port: 8181 - log-type: json - root-key: ${{ steps.km-check.outputs.root_key }} - - - name: Start additional kas - id: kas-beta - if: ${{ steps.multikas.outputs.supported == 'true' }} - uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix - with: - ec-tdf-enabled: true - kas-name: beta - kas-port: 8282 - log-type: json - root-key: ${{ steps.km-check.outputs.root_key }} - - - name: Start additional kas - id: kas-gamma - if: ${{ steps.multikas.outputs.supported == 'true' }} - uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix - with: - ec-tdf-enabled: true - kas-name: gamma - kas-port: 8383 - log-type: json - root-key: ${{ steps.km-check.outputs.root_key }} - - - name: Start additional kas - id: kas-delta - if: ${{ steps.multikas.outputs.supported == 'true' }} - uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix - with: - ec-tdf-enabled: true - kas-port: 8484 - kas-name: delta - log-type: json - root-key: ${{ steps.km-check.outputs.root_key }} - - - name: Start additional KM kas (km1) - id: kas-km1 - if: ${{ steps.multikas.outputs.supported == 'true' }} - uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix + - name: Start KAS instances for ABAC tests + id: kas-instances + if: ${{ steps.test-env.outputs.multikas-supported == 'true' }} + uses: ./otdftests/xtest/setup-kas-instances with: - ec-tdf-enabled: true - key-management: ${{ steps.km-check.outputs.supported }} - kas-name: km1 - kas-port: 8585 - log-type: json - root-key: ${{ steps.km-check.outputs.root_key }} - - - name: Start additional KM kas (km2) - id: kas-km2 - if: ${{ steps.multikas.outputs.supported == 'true' }} - uses: opentdf/platform/test/start-additional-kas@998929e5c66d41f928b90e6af7dbaa0a14302ca6 # watch-sh-fix - with: - ec-tdf-enabled: true - kas-name: km2 - key-management: ${{ steps.km-check.outputs.supported }} - kas-port: 8686 - log-type: json - root-key: ${{ steps.km-check.outputs.root_key }} + platform-working-dir: ${{ steps.run-platform.outputs.platform-working-dir }} + root-key: ${{ steps.test-env.outputs.root-key }} + key-management-supported: ${{ steps.test-env.outputs.key-management-supported }} - name: Run attribute based configuration tests - if: ${{ steps.multikas.outputs.supported == 'true' }} + if: ${{ steps.test-env.outputs.multikas-supported == 'true' }} run: >- uv run pytest -ra @@ -625,13 +526,14 @@ jobs: env: PLATFORM_DIR: "../../${{ steps.run-platform.outputs.platform-working-dir }}" PLATFORM_TAG: ${{ matrix.platform-tag }} + OT_ROOT_KEY: ${{ steps.test-env.outputs.root-key }} PLATFORM_LOG_FILE: "../../${{ steps.run-platform.outputs.platform-log-file }}" - KAS_ALPHA_LOG_FILE: "../../${{ steps.kas-alpha.outputs.log-file }}" - KAS_BETA_LOG_FILE: "../../${{ steps.kas-beta.outputs.log-file }}" - KAS_GAMMA_LOG_FILE: "../../${{ steps.kas-gamma.outputs.log-file }}" - KAS_DELTA_LOG_FILE: "../../${{ steps.kas-delta.outputs.log-file }}" - KAS_KM1_LOG_FILE: "../../${{ steps.kas-km1.outputs.log-file }}" - KAS_KM2_LOG_FILE: "../../${{ steps.kas-km2.outputs.log-file }}" + KAS_ALPHA_LOG_FILE: ${{ steps.kas-instances.outputs.kas-alpha-log-file }} + KAS_BETA_LOG_FILE: ${{ steps.kas-instances.outputs.kas-beta-log-file }} + KAS_GAMMA_LOG_FILE: ${{ steps.kas-instances.outputs.kas-gamma-log-file }} + KAS_DELTA_LOG_FILE: ${{ steps.kas-instances.outputs.kas-delta-log-file }} + KAS_KM1_LOG_FILE: ${{ steps.kas-instances.outputs.kas-km1-log-file }} + KAS_KM2_LOG_FILE: ${{ steps.kas-instances.outputs.kas-km2-log-file }} - name: Upload artifact uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 @@ -649,6 +551,28 @@ jobs: path: otdftests/xtest/test-results/audit-logs/*.log if-no-files-found: ignore + - name: Emit xtest config for reproduction + if: failure() && steps.xtest-config.outputs.config-file != '' + run: | + CONFIG="${{ steps.xtest-config.outputs.config-file }}" + { + echo '
Reproduce this failure locally' + echo '' + echo '```yaml' + cat "$CONFIG" + echo '```' + echo '' + echo '**To reproduce:**' + echo '```bash' + echo "cat > xtest-config.yaml <<'EOF'" + cat "$CONFIG" + echo 'EOF' + echo 'cd tests/otdf-local' + echo 'uv run otdf-local xtest run ../../xtest-config.yaml' + echo '```' + echo '
' + } >> "$GITHUB_STEP_SUMMARY" + publish-results: runs-on: ubuntu-latest needs: xct diff --git a/otdf-local/src/otdf_local/ci.py b/otdf-local/src/otdf_local/ci.py new file mode 100644 index 000000000..bab3b3203 --- /dev/null +++ b/otdf-local/src/otdf_local/ci.py @@ -0,0 +1,223 @@ +"""CI-specific commands for otdf-local. + +These commands adapt the local environment management for GitHub Actions CI, +where the platform is already started by an external action and we only need +to start KAS instances as background processes. +""" + +from __future__ import annotations + +import os +import sys +from pathlib import Path +from typing import Annotated + +import typer + +from otdf_local.config.ports import Ports +from otdf_local.config.settings import Settings +from otdf_local.health.waits import WaitTimeoutError, wait_for_health +from otdf_local.services import get_kas_manager +from otdf_local.utils.console import ( + print_error, + print_info, + print_success, + print_warning, +) +from otdf_local.utils.yaml import load_yaml, save_yaml, set_nested + +ci_app = typer.Typer( + name="ci", + help="CI-specific commands for GitHub Actions workflows.", + no_args_is_help=True, +) + + +def _emit_github_output(key: str, value: str) -> None: + """Write a key=value pair to $GITHUB_OUTPUT if available, else print to stdout.""" + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output: + with open(github_output, "a") as f: + f.write(f"{key}={value}\n") + else: + # Fallback for local testing + print(f"{key}={value}", file=sys.stdout) + + +def _prepare_kas_template( + settings: Settings, root_key: str | None, ec_tdf_enabled: bool +) -> None: + """Ensure the KAS template config has the right root key and EC TDF settings. + + In CI, the platform config may have a root_key that differs from what + we want for additional KAS instances. This updates the platform config + in-place so that KASService._generate_config reads the correct root_key. + """ + if root_key: + config = load_yaml(settings.platform_config) + set_nested(config, "services.kas.root_key", root_key) + if ec_tdf_enabled: + set_nested(config, "services.kas.preview.ec_tdf_enabled", True) + save_yaml(settings.platform_config, config) + + +@ci_app.command("start-kas") +def start_kas( + platform_dir: Annotated[ + Path, + typer.Option( + "--platform-dir", + help="Path to the platform checkout (must contain opentdf-kas-mode.yaml)", + envvar="OTDF_LOCAL_PLATFORM_DIR", + ), + ], + root_key: Annotated[ + str | None, + typer.Option( + "--root-key", + help="Root key for KAS instances (overrides platform config value)", + envvar="OT_ROOT_KEY", + ), + ] = None, + ec_tdf_enabled: Annotated[ + bool, + typer.Option( + "--ec-tdf-enabled/--no-ec-tdf", + help="Enable EC TDF support", + ), + ] = True, + key_management: Annotated[ + bool, + typer.Option( + "--key-management/--no-key-management", + help="Enable key management on km1/km2 instances", + ), + ] = False, + log_type: Annotated[ + str, + typer.Option( + "--log-type", + help="Log format type (json, text)", + ), + ] = "json", + health_timeout: Annotated[ + int, + typer.Option( + "--health-timeout", + help="Seconds to wait for each KAS instance to become healthy", + ), + ] = 60, + instances: Annotated[ + str | None, + typer.Option( + "--instances", + help="Comma-separated KAS instance names (default: all)", + ), + ] = None, +) -> None: + """Start KAS instances for CI and emit GitHub Actions outputs. + + Expects the platform to already be running (started by start-up-with-containers). + Starts all 6 KAS instances (alpha, beta, gamma, delta, km1, km2) as background + processes, waits for each to pass health checks, and emits log file paths as + GitHub Actions step outputs. + + Output keys (written to $GITHUB_OUTPUT): + kas-alpha-log-file, kas-beta-log-file, kas-gamma-log-file, + kas-delta-log-file, kas-km1-log-file, kas-km2-log-file + """ + platform_dir = platform_dir.resolve() + if not platform_dir.is_dir(): + print_error(f"Platform directory does not exist: {platform_dir}") + raise typer.Exit(1) + + # Check for required template files + kas_template = platform_dir / "opentdf-kas-mode.yaml" + platform_config = platform_dir / "opentdf-dev.yaml" + if not kas_template.exists(): + # Fall back to opentdf.yaml if opentdf-kas-mode.yaml doesn't exist + kas_template_alt = platform_dir / "opentdf.yaml" + if kas_template_alt.exists(): + print_info( + f"Using {kas_template_alt} as KAS template (opentdf-kas-mode.yaml not found)" + ) + else: + print_error( + f"Neither opentdf-kas-mode.yaml nor opentdf.yaml found in {platform_dir}" + ) + raise typer.Exit(1) + + if not platform_config.exists(): + # Try opentdf.yaml as fallback + platform_config_alt = platform_dir / "opentdf.yaml" + if platform_config_alt.exists(): + platform_config = platform_config_alt + + # Build settings with CI-specific overrides + # We use a fresh xtest_root derived from this package's location + settings = Settings( + platform_dir=platform_dir, + ) + settings.ensure_directories() + + # Update root key in platform config if provided + if root_key: + _prepare_kas_template(settings, root_key, ec_tdf_enabled) + + # Determine which instances to start + if instances: + kas_names = [n.strip() for n in instances.split(",")] + for name in kas_names: + if name not in Ports.all_kas_names(): + print_error(f"Unknown KAS instance: {name}") + raise typer.Exit(1) + else: + kas_names = Ports.all_kas_names() + + # Start KAS instances + print_info(f"Starting KAS instances: {', '.join(kas_names)}...") + kas_manager = get_kas_manager(settings) + + failed = [] + for name in kas_names: + kas = kas_manager.get(name) + if kas is None: + print_error(f"KAS instance {name} not found in manager") + failed.append(name) + continue + if not kas.start(): + print_error(f"Failed to start KAS {name}") + failed.append(name) + + if failed: + print_error(f"Failed to start: {', '.join(failed)}") + raise typer.Exit(1) + + # Wait for health + print_info("Waiting for KAS health checks...") + unhealthy = [] + for name in kas_names: + port = Ports.get_kas_port(name) + try: + wait_for_health( + f"http://localhost:{port}/healthz", + timeout=health_timeout, + service_name=f"KAS {name}", + ) + except WaitTimeoutError as e: + print_warning(str(e)) + unhealthy.append(name) + + if unhealthy: + print_error(f"KAS instances failed health check: {', '.join(unhealthy)}") + raise typer.Exit(1) + + print_success(f"All {len(kas_names)} KAS instances are healthy") + + # Emit outputs + for name in kas_names: + log_path = settings.get_kas_log_path(name) + output_key = f"kas-{name}-log-file" + _emit_github_output(output_key, str(log_path)) + + print_success("CI KAS startup complete") diff --git a/otdf-local/src/otdf_local/cli.py b/otdf-local/src/otdf_local/cli.py index d8e3597ff..a3bc51265 100644 --- a/otdf-local/src/otdf_local/cli.py +++ b/otdf-local/src/otdf_local/cli.py @@ -11,6 +11,8 @@ from rich.live import Live from otdf_local import __version__ +from otdf_local.ci import ci_app +from otdf_local.xtest.cli import xtest_app from otdf_local.config.ports import Ports from otdf_local.config.settings import get_settings from otdf_local.health.waits import WaitTimeoutError, wait_for_health, wait_for_port @@ -43,6 +45,9 @@ pretty_exceptions_enable=sys.stderr.isatty(), ) +app.add_typer(ci_app, name="ci") +app.add_typer(xtest_app, name="xtest") + def _show_provision_error(result: ProvisionResult, target: str) -> None: """Display provisioning error with stderr details.""" diff --git a/otdf-local/src/otdf_local/xtest/__init__.py b/otdf-local/src/otdf_local/xtest/__init__.py new file mode 100644 index 000000000..03d10531f --- /dev/null +++ b/otdf-local/src/otdf_local/xtest/__init__.py @@ -0,0 +1,10 @@ +"""xtest configuration and execution subpackage.""" + +from otdf_local.xtest.config import Features, TestPhase, XtestConfig, XtestInputs + +__all__ = [ + "Features", + "TestPhase", + "XtestConfig", + "XtestInputs", +] diff --git a/otdf-local/src/otdf_local/xtest/cli.py b/otdf-local/src/otdf_local/xtest/cli.py new file mode 100644 index 000000000..eca67266f --- /dev/null +++ b/otdf-local/src/otdf_local/xtest/cli.py @@ -0,0 +1,202 @@ +"""Typer CLI subcommands for xtest configuration and execution.""" + +from __future__ import annotations + +import sys +from pathlib import Path +from typing import Annotated + +import typer +from rich.table import Table + +from otdf_local.config.settings import get_settings +from otdf_local.utils.console import console, print_error, print_success +from otdf_local.xtest.config import XtestConfig, XtestInputs + +xtest_app = typer.Typer( + name="xtest", + help="Resolve, configure, and run xtest integration tests.", + no_args_is_help=True, +) + + +@xtest_app.command() +def resolve( + platform_ref: Annotated[ + str, + typer.Option( + "--platform-ref", help="Platform ref: branch, tag, SHA, 'latest', or 'lts'" + ), + ] = "main", + go_ref: Annotated[ + str, + typer.Option("--go-ref", help="Go/otdfctl ref"), + ] = "main", + js_ref: Annotated[ + str, + typer.Option("--js-ref", help="JS/web-sdk ref"), + ] = "main", + java_ref: Annotated[ + str, + typer.Option("--java-ref", help="Java SDK ref"), + ] = "main", + focus_sdk: Annotated[ + str, + typer.Option("--focus-sdk", help="SDK to focus on (go, js, java, all)"), + ] = "all", + otdfctl_source: Annotated[ + str, + typer.Option( + "--otdfctl-source", help="otdfctl source: auto, standalone, platform" + ), + ] = "auto", + output: Annotated[ + Path | None, + typer.Option("--output", "-o", help="Write config to file (default: stdout)"), + ] = None, +) -> None: + """Resolve SDK versions and generate an xtest configuration file. + + Calls otdf-sdk-mgr to resolve version refs to SHAs, detects platform features, + and outputs a YAML config suitable for `otdf-local xtest run`. + """ + from otdf_local.xtest.resolve import resolve_all + + if focus_sdk not in ("all", "go", "js", "java"): + print_error( + f"Invalid focus-sdk: {focus_sdk}. Must be one of: all, go, js, java" + ) + raise typer.Exit(1) + + inputs = XtestInputs( + platform_ref=platform_ref, + go_ref=go_ref, + js_ref=js_ref, + java_ref=java_ref, + focus_sdk=focus_sdk, + otdfctl_source=otdfctl_source, + ) + + try: + settings = get_settings() + except (FileNotFoundError, Exception): + settings = None + config = resolve_all(inputs, settings) + + yaml_output = config.to_yaml() + + if output: + config.to_yaml_file(output) + print_success(f"Config written to {output}") + else: + print(yaml_output, file=sys.stdout) + + +@xtest_app.command() +def run( + config_file: Annotated[ + Path, + typer.Argument(help="Path to xtest config YAML file"), + ], + phase: Annotated[ + str | None, + typer.Option( + "--phase", + "-p", + help="Run only this phase (helpers, legacy, standard, abac)", + ), + ] = None, + skip_services: Annotated[ + bool, + typer.Option("--skip-services", help="Assume services are already running"), + ] = False, + skip_install: Annotated[ + bool, + typer.Option("--skip-install", help="Assume SDKs are already installed"), + ] = False, +) -> None: + """Run xtest integration tests from a configuration file. + + Installs SDKs, starts services, and runs test phases as defined in the config. + + Example: + otdf-local xtest run xtest-config.yaml + otdf-local xtest run xtest-config.yaml --phase legacy --skip-services + """ + from otdf_local.xtest.runner import run_xtest + + if not config_file.exists(): + print_error(f"Config file not found: {config_file}") + raise typer.Exit(1) + + config = XtestConfig.from_yaml(config_file) + settings = get_settings() + + passed = run_xtest( + config=config, + settings=settings, + phase_name=phase, + skip_services=skip_services, + skip_install=skip_install, + ) + + if not passed: + raise typer.Exit(1) + + +@xtest_app.command() +def show( + config_file: Annotated[ + Path, + typer.Argument(help="Path to xtest config YAML file"), + ], +) -> None: + """Display a human-readable summary of an xtest configuration.""" + if not config_file.exists(): + print_error(f"Config file not found: {config_file}") + raise typer.Exit(1) + + config = XtestConfig.from_yaml(config_file) + + console.print(f"[bold]xtest config v{config.version}[/bold]") + console.print(f" Platform tag: {config.platform_tag}") + console.print(f" Encrypt SDK: {config.encrypt_sdk}") + console.print(f" Focus SDK: {config.inputs.focus_sdk}") + console.print() + + # Versions table + table = Table(title="Resolved Versions", show_header=True, header_style="bold") + table.add_column("SDK", width=10) + table.add_column("Tag", width=20) + table.add_column("SHA", width=10) + table.add_column("Type", width=10) + table.add_column("Error", width=30) + + for sdk, versions in config.resolved.items(): + for v in versions: + vtype = "head" if v.head else "release" if v.release else "?" + table.add_row( + sdk, + v.tag, + v.sha[:7] if v.sha else "", + vtype, + v.err or "", + ) + + console.print(table) + console.print() + + # Features + console.print("[bold]Features[/bold]") + console.print(f" EC TDF: {config.features.ec_tdf}") + console.print(f" Key Management: {config.features.key_management}") + console.print(f" Multi-KAS: {config.features.multikas}") + console.print() + + # Phases + console.print("[bold]Test Phases[/bold]") + for phase in config.phases: + reqs = f" (requires: {', '.join(phase.requires)})" if phase.requires else "" + met = config.check_phase_requirements(phase) + status = "[green]ready[/green]" if met else "[yellow]skipped[/yellow]" + console.print(f" {status} {phase.name}: {', '.join(phase.test_files)}{reqs}") diff --git a/otdf-local/src/otdf_local/xtest/config.py b/otdf-local/src/otdf_local/xtest/config.py new file mode 100644 index 000000000..e38041872 --- /dev/null +++ b/otdf-local/src/otdf_local/xtest/config.py @@ -0,0 +1,236 @@ +"""Pydantic models for xtest configuration.""" + +from __future__ import annotations + +import io +from pathlib import Path +from typing import Any + +from pydantic import BaseModel, Field +from ruamel.yaml import YAML + +_yaml = YAML() +_yaml.default_flow_style = False +_yaml.width = 120 + + +class ResolvedVersion(BaseModel): + """A resolved SDK version, mirroring otdf-sdk-mgr ResolveResult.""" + + sdk: str + tag: str + sha: str = "" + alias: str = "" + head: bool = False + release: str = "" + source: str = "" + env: str = "" + err: str = "" + + +class TestPhase(BaseModel): + """A test phase definition.""" + + name: str + description: str = "" + test_files: list[str] = Field(default_factory=list) + pytest_args: list[str] = Field(default_factory=list) + requires: list[str] = Field(default_factory=list) + skip_on_dispatch: bool = False + env: dict[str, str] = Field(default_factory=dict) + + +class Features(BaseModel): + """Detected platform features relevant to test execution.""" + + ec_tdf: bool = True + key_management: bool = False + multikas: bool = True + + +class XtestInputs(BaseModel): + """Original input refs that were resolved.""" + + platform_ref: str = "main" + go_ref: str = "main" + js_ref: str = "main" + java_ref: str = "main" + focus_sdk: str = "all" + otdfctl_source: str = "auto" + + +class XtestConfig(BaseModel): + """Complete xtest configuration for a single test run.""" + + version: str = "1" + inputs: XtestInputs = Field(default_factory=XtestInputs) + resolved: dict[str, list[ResolvedVersion]] = Field(default_factory=dict) + platform_tag: str = "main" + encrypt_sdk: str = "go" + features: Features = Field(default_factory=Features) + phases: list[TestPhase] = Field(default_factory=lambda: list(DEFAULT_PHASES)) + + def to_yaml(self) -> str: + """Serialize to YAML string.""" + data = self._to_dict() + buf = io.StringIO() + _yaml.dump(data, buf) + return buf.getvalue() + + def to_yaml_file(self, path: Path) -> None: + """Write config to a YAML file.""" + data = self._to_dict() + with open(path, "w") as f: + _yaml.dump(data, f) + + def _to_dict(self) -> dict[str, Any]: + """Convert to a plain dict suitable for YAML serialization.""" + data: dict[str, Any] = { + "version": self.version, + "inputs": _strip_defaults(self.inputs.model_dump(), XtestInputs()), + "resolved": {}, + "platform-tag": self.platform_tag, + "encrypt-sdk": self.encrypt_sdk, + "features": _strip_defaults(self.features.model_dump(), Features()), + "phases": [], + } + for sdk, versions in self.resolved.items(): + data["resolved"][sdk] = [_strip_empty(v.model_dump()) for v in versions] + for phase in self.phases: + p: dict[str, Any] = {"name": phase.name} + if phase.description: + p["description"] = phase.description + if phase.test_files: + p["test-files"] = phase.test_files + if phase.pytest_args: + p["pytest-args"] = phase.pytest_args + if phase.requires: + p["requires"] = phase.requires + if phase.skip_on_dispatch: + p["skip-on-dispatch"] = True + if phase.env: + p["env"] = phase.env + data["phases"].append(p) + return data + + @classmethod + def from_yaml(cls, source: str | Path) -> XtestConfig: + """Parse config from a YAML string or file path.""" + if isinstance(source, Path): + with open(source) as f: + data = _yaml.load(f) + else: + data = _yaml.load(source) + return cls._from_dict(data) + + @classmethod + def _from_dict(cls, data: dict[str, Any]) -> XtestConfig: + """Build config from a parsed YAML dict.""" + inputs_data = data.get("inputs", {}) + inputs = XtestInputs( + platform_ref=inputs_data.get("platform-ref", "main"), + go_ref=inputs_data.get("go-ref", "main"), + js_ref=inputs_data.get("js-ref", "main"), + java_ref=inputs_data.get("java-ref", "main"), + focus_sdk=inputs_data.get("focus-sdk", "all"), + otdfctl_source=inputs_data.get("otdfctl-source", "auto"), + ) + + resolved: dict[str, list[ResolvedVersion]] = {} + for sdk, versions in data.get("resolved", {}).items(): + resolved[sdk] = [ResolvedVersion(**v) for v in versions] + + features_data = data.get("features", {}) + features = Features( + ec_tdf=features_data.get("ec-tdf", True), + key_management=features_data.get("key-management", False), + multikas=features_data.get("multikas", True), + ) + + phases = [] + for p in data.get("phases", []): + phases.append( + TestPhase( + name=p["name"], + description=p.get("description", ""), + test_files=p.get("test-files", []), + pytest_args=p.get("pytest-args", []), + requires=p.get("requires", []), + skip_on_dispatch=p.get("skip-on-dispatch", False), + env=p.get("env", {}), + ) + ) + + return cls( + version=data.get("version", "1"), + inputs=inputs, + resolved=resolved, + platform_tag=data.get("platform-tag", "main"), + encrypt_sdk=data.get("encrypt-sdk", "go"), + features=features, + phases=phases if phases else list(DEFAULT_PHASES), + ) + + def check_phase_requirements(self, phase: TestPhase) -> bool: + """Check if a phase's requirements are met by current features.""" + for req in phase.requires: + if req == "multikas" and not self.features.multikas: + return False + if req == "key-management" and not self.features.key_management: + return False + if req == "ec-tdf" and not self.features.ec_tdf: + return False + return True + + +def _strip_empty(d: dict[str, Any]) -> dict[str, Any]: + """Remove keys with empty/falsy values from a dict.""" + return {k: v for k, v in d.items() if v} + + +def _to_yaml_keys(d: dict[str, Any]) -> dict[str, Any]: + """Convert Python snake_case keys to YAML kebab-case keys.""" + return {k.replace("_", "-"): v for k, v in d.items()} + + +def _strip_defaults(d: dict[str, Any], defaults: BaseModel) -> dict[str, Any]: + """Remove keys that match the default model values, and convert to kebab-case.""" + default_dict = defaults.model_dump() + return _to_yaml_keys({k: v for k, v in d.items() if v != default_dict.get(k)}) + + +# Default test phases matching what xtest.yml runs +DEFAULT_PHASES: list[TestPhase] = [ + TestPhase( + name="helpers", + description="Validate xtest helper library", + test_files=["test_self.py", "test_audit_logs.py"], + skip_on_dispatch=True, + ), + TestPhase( + name="legacy", + description="Legacy decryption tests", + test_files=["test_legacy.py"], + pytest_args=["-n", "auto", "--dist", "worksteal"], + ), + TestPhase( + name="standard", + description="Standard TDF and policy tests", + test_files=["test_tdfs.py", "test_policytypes.py"], + pytest_args=["-n", "auto", "--dist", "loadscope"], + ), + TestPhase( + name="abac", + description="Attribute-based access control tests", + test_files=["test_abac.py"], + pytest_args=[ + "-n", + "auto", + "--dist", + "loadscope", + "--audit-log-dir", + "test-results/audit-logs", + ], + requires=["multikas"], + ), +] diff --git a/otdf-local/src/otdf_local/xtest/resolve.py b/otdf-local/src/otdf_local/xtest/resolve.py new file mode 100644 index 000000000..7eb6e1844 --- /dev/null +++ b/otdf-local/src/otdf_local/xtest/resolve.py @@ -0,0 +1,193 @@ +"""Version resolution via otdf-sdk-mgr subprocess calls.""" + +from __future__ import annotations + +import json +import subprocess +from pathlib import Path + +from otdf_local.config.features import PlatformFeatures +from otdf_local.config.settings import Settings +from otdf_local.utils.console import print_error, print_info, print_warning +from otdf_local.utils.yaml import get_nested, load_yaml +from otdf_local.xtest.config import ( + Features, + ResolvedVersion, + XtestConfig, + XtestInputs, +) + + +def _find_sdk_mgr_dir(settings: Settings | None = None) -> Path: + """Locate the otdf-sdk-mgr directory (sibling of otdf-local in the repo).""" + # Walk up from this file to find otdf-local root, then look for sibling + otdf_local_dir = Path(__file__).resolve().parent.parent.parent.parent + sdk_mgr = otdf_local_dir.parent / "otdf-sdk-mgr" + if sdk_mgr.is_dir(): + return sdk_mgr + # Try from xtest_root if settings available + if settings is not None: + sdk_mgr = settings.xtest_root.parent / "otdf-sdk-mgr" + if sdk_mgr.is_dir(): + return sdk_mgr + raise FileNotFoundError( + f"Could not find otdf-sdk-mgr directory. Checked: {otdf_local_dir.parent / 'otdf-sdk-mgr'}" + ) + + +def resolve_sdk_versions( + sdk: str, + refs: str, + sdk_mgr_dir: Path, + env_overrides: dict[str, str] | None = None, +) -> list[ResolvedVersion]: + """Call otdf-sdk-mgr versions resolve for a single SDK type. + + Args: + sdk: SDK type (platform, go, js, java) + refs: Space-separated version refs (e.g., "main latest") + sdk_mgr_dir: Path to otdf-sdk-mgr project + env_overrides: Extra environment variables (e.g., OTDFCTL_SOURCE) + + Returns: + List of ResolvedVersion objects + """ + import os + + ref_args = refs.strip().split() + cmd = [ + "uv", + "run", + "--project", + str(sdk_mgr_dir), + "otdf-sdk-mgr", + "versions", + "resolve", + sdk, + *ref_args, + ] + + env = dict(os.environ) + if env_overrides: + env.update(env_overrides) + + result = subprocess.run( + cmd, + capture_output=True, + text=True, + cwd=str(sdk_mgr_dir), + env=env, + ) + + if result.returncode != 0: + return [ + ResolvedVersion( + sdk=sdk, + tag=refs, + err=result.stderr.strip() + or f"Process exited with code {result.returncode}", + ) + ] + + try: + data = json.loads(result.stdout) + except json.JSONDecodeError as e: + return [ResolvedVersion(sdk=sdk, tag=refs, err=f"Invalid JSON output: {e}")] + + return [ResolvedVersion(**item) for item in data] + + +def detect_features(settings: Settings) -> Features: + """Detect platform features from the local platform config and version.""" + features = Features() + + # Try to detect from platform version + try: + pf = PlatformFeatures.detect(settings.platform_dir) + features.key_management = pf.supports("key_management") + features.ec_tdf = pf.supports("ecwrap") + # multikas is supported for main or version >= 0.4.x + features.multikas = pf.semver >= (0, 4, 0) + except Exception: + pass + + # Also check config file for explicit settings + try: + config = load_yaml(settings.platform_config) + ec_enabled = get_nested(config, "services.kas.preview.ec_tdf_enabled") + if ec_enabled is not None: + features.ec_tdf = bool(ec_enabled) + km_enabled = get_nested(config, "services.kas.preview.key_management") + if km_enabled is not None: + features.key_management = bool(km_enabled) + except Exception: + pass + + return features + + +def resolve_all(inputs: XtestInputs, settings: Settings | None = None) -> XtestConfig: + """Resolve all SDK versions and detect features, returning a complete config. + + Args: + inputs: The version refs and options to resolve + settings: otdf-local settings (for feature detection and path finding). + Optional - if not provided, feature detection is skipped. + + Returns: + A fully populated XtestConfig + """ + sdk_mgr_dir = _find_sdk_mgr_dir(settings) + print_info(f"Using otdf-sdk-mgr at: {sdk_mgr_dir}") + + resolved: dict[str, list[ResolvedVersion]] = {} + has_errors = False + + # Resolve each SDK type + sdk_refs = { + "platform": inputs.platform_ref, + "go": inputs.go_ref, + "js": inputs.js_ref, + "java": inputs.java_ref, + } + + env_overrides: dict[str, str] = {} + if inputs.otdfctl_source == "platform": + env_overrides["OTDFCTL_SOURCE"] = "platform" + + for sdk, refs in sdk_refs.items(): + print_info(f"Resolving {sdk}: {refs}") + sdk_env = env_overrides if sdk == "go" else None + versions = resolve_sdk_versions(sdk, refs, sdk_mgr_dir, sdk_env) + resolved[sdk] = versions + + for v in versions: + if v.err: + print_error(f" Error resolving {sdk} {v.tag}: {v.err}") + has_errors = True + else: + head_marker = " (head)" if v.head else "" + print_info(f" {v.tag} -> {v.sha[:7]}{head_marker}") + + if has_errors: + print_warning("Some versions had errors; config may be incomplete") + + # Determine platform tag from resolved platform versions + platform_tags = [v.tag for v in resolved.get("platform", []) if not v.err] + platform_tag = platform_tags[0] if platform_tags else "main" + + # Detect features (only if platform dir is available) + features = Features() + if settings is not None: + try: + features = detect_features(settings) + except Exception: + pass + + return XtestConfig( + inputs=inputs, + resolved=resolved, + platform_tag=platform_tag, + encrypt_sdk=inputs.focus_sdk if inputs.focus_sdk != "all" else "go", + features=features, + ) diff --git a/otdf-local/src/otdf_local/xtest/runner.py b/otdf-local/src/otdf_local/xtest/runner.py new file mode 100644 index 000000000..ec17062a3 --- /dev/null +++ b/otdf-local/src/otdf_local/xtest/runner.py @@ -0,0 +1,316 @@ +"""xtest runner - installs SDKs, manages services, runs test phases.""" + +from __future__ import annotations + +import os +import subprocess +from pathlib import Path + +from otdf_local.config.settings import Settings +from otdf_local.utils.console import ( + console, + print_error, + print_info, + print_success, + print_warning, +) +from otdf_local.xtest.config import ResolvedVersion, TestPhase, XtestConfig +from otdf_local.xtest.resolve import _find_sdk_mgr_dir, detect_features + + +def install_sdks(config: XtestConfig, settings: Settings) -> bool: + """Install SDK CLIs based on resolved version info. + + For released versions, calls `otdf-sdk-mgr install artifact`. + For head versions, calls `otdf-sdk-mgr checkout` then `make`. + + Returns True if all installs succeeded. + """ + sdk_mgr_dir = _find_sdk_mgr_dir(settings) + sdk_base = settings.xtest_root / "sdk" + ok = True + + for sdk_type in ("go", "js", "java"): + versions = config.resolved.get(sdk_type, []) + for v in versions: + if v.err: + print_warning(f"Skipping {sdk_type} {v.tag}: has errors") + continue + + if v.head: + ok = _install_from_source(sdk_type, v, sdk_mgr_dir, sdk_base) and ok + elif v.release: + ok = _install_artifact(sdk_type, v, sdk_mgr_dir, sdk_base) and ok + else: + print_warning(f"Skipping {sdk_type} {v.tag}: neither head nor release") + + return ok + + +def _install_artifact( + sdk_type: str, + version: ResolvedVersion, + sdk_mgr_dir: Path, + sdk_base: Path, +) -> bool: + """Install a released SDK artifact via otdf-sdk-mgr.""" + print_info(f"Installing {sdk_type} {version.tag} from artifact...") + cmd = [ + "uv", + "run", + "--project", + str(sdk_mgr_dir), + "otdf-sdk-mgr", + "install", + "artifact", + "--sdk", + sdk_type, + "--version", + version.tag, + ] + if version.source: + cmd.extend(["--source", version.source]) + + result = subprocess.run(cmd, cwd=str(sdk_base / sdk_type)) + if result.returncode != 0: + print_error( + f"Failed to install {sdk_type} {version.tag} artifact, trying source..." + ) + return _install_from_source(sdk_type, version, sdk_mgr_dir, sdk_base) + + print_success(f"Installed {sdk_type} {version.tag}") + return True + + +def _install_from_source( + sdk_type: str, + version: ResolvedVersion, + sdk_mgr_dir: Path, + sdk_base: Path, +) -> bool: + """Checkout and build an SDK from source.""" + print_info(f"Building {sdk_type} {version.tag} from source...") + + # Checkout + cmd = [ + "uv", + "run", + "--project", + str(sdk_mgr_dir), + "otdf-sdk-mgr", + "checkout", + sdk_type, + version.tag, + ] + result = subprocess.run(cmd, cwd=str(sdk_base / sdk_type)) + if result.returncode != 0: + print_error(f"Failed to checkout {sdk_type} {version.tag}") + return False + + # Build + result = subprocess.run(["make"], cwd=str(sdk_base / sdk_type)) + if result.returncode != 0: + print_error(f"Failed to build {sdk_type} {version.tag}") + return False + + print_success(f"Built {sdk_type} {version.tag} from source") + return True + + +def run_phase( + phase: TestPhase, + config: XtestConfig, + settings: Settings, +) -> bool: + """Run a single test phase. + + Returns True if pytest exited successfully. + """ + xtest_dir = settings.xtest_root + + # Build pytest command + cmd = ["uv", "run", "pytest"] + cmd.extend(phase.pytest_args) + cmd.extend(["-ra", "-v"]) + + # Add focus/encrypt SDK flags + cmd.extend(["--sdks-encrypt", config.encrypt_sdk]) + if config.inputs.focus_sdk != "all": + cmd.extend(["--focus", config.inputs.focus_sdk]) + + # Add HTML report + report_name = f"{phase.name}-{config.encrypt_sdk}-{config.platform_tag}" + cmd.extend( + [ + "--html", + f"test-results/{report_name}.html", + "--self-contained-html", + ] + ) + + # Add test files + cmd.extend(phase.test_files) + + # Build environment + env = _build_phase_env(config, settings, phase) + + print_info(f"Running phase: {phase.name}") + print_info(f" Command: {' '.join(cmd)}") + + result = subprocess.run(cmd, cwd=str(xtest_dir), env=env) + + if result.returncode == 0: + print_success(f"Phase {phase.name} passed") + return True + else: + print_error(f"Phase {phase.name} failed (exit code {result.returncode})") + return False + + +def _build_phase_env( + config: XtestConfig, + settings: Settings, + phase: TestPhase, +) -> dict[str, str]: + """Build environment variables for a test phase.""" + env = dict(os.environ) + + # Core variables + env["PLATFORM_TAG"] = config.platform_tag + env["PLATFORM_DIR"] = str(settings.platform_dir.resolve()) + env["PLATFORMURL"] = settings.platform_url + env["ENCRYPT_SDK"] = config.encrypt_sdk + env["FOCUS_SDK"] = config.inputs.focus_sdk + + # Schema file + schema_file = settings.platform_dir / "sdk" / "schema" / "manifest.schema.json" + if schema_file.exists(): + env["SCHEMA_FILE"] = str(schema_file.resolve()) + else: + # Fallback to xtest-local manifest.schema.json + local_schema = settings.xtest_root / "manifest.schema.json" + if local_schema.exists(): + env["SCHEMA_FILE"] = "manifest.schema.json" + + # Log files + platform_log = settings.logs_dir / "platform.log" + if platform_log.exists(): + env["PLATFORM_LOG_FILE"] = str(platform_log.resolve()) + + kas_env_mapping = { + "alpha": "KAS_ALPHA_LOG_FILE", + "beta": "KAS_BETA_LOG_FILE", + "gamma": "KAS_GAMMA_LOG_FILE", + "delta": "KAS_DELTA_LOG_FILE", + "km1": "KAS_KM1_LOG_FILE", + "km2": "KAS_KM2_LOG_FILE", + } + for kas_name, env_var in kas_env_mapping.items(): + log_path = settings.get_kas_log_path(kas_name) + if log_path.exists(): + env[env_var] = str(log_path.resolve()) + + # Root key + from otdf_local.utils.yaml import get_nested, load_yaml + + try: + platform_config = load_yaml(settings.platform_config) + root_key = get_nested(platform_config, "services.kas.root_key") + if root_key: + env["OT_ROOT_KEY"] = root_key + except Exception: + pass + + # Phase-specific env overrides + env.update(phase.env) + + return env + + +def run_xtest( + config: XtestConfig, + settings: Settings, + phase_name: str | None = None, + skip_services: bool = False, + skip_install: bool = False, +) -> bool: + """Execute the full xtest lifecycle. + + Args: + config: Parsed xtest configuration + settings: otdf-local settings + phase_name: Run only this phase (None = all phases) + skip_services: Don't start/stop services + skip_install: Don't install SDKs + + Returns: + True if all phases passed + """ + # Ensure test-results directory exists + results_dir = settings.xtest_root / "test-results" + results_dir.mkdir(parents=True, exist_ok=True) + + # Step 1: Install SDKs + if not skip_install: + print_info("Installing SDK CLIs...") + if not install_sdks(config, settings): + print_warning("Some SDK installs failed; continuing with available SDKs") + + # Step 2: Start services + if not skip_services: + print_info("Starting services...") + from otdf_local.cli import up + + try: + up(services=None, no_provision=False) + except SystemExit as e: + if e.code != 0: + print_error("Failed to start services") + return False + + # Step 3: Re-detect features from running platform + try: + config.features = detect_features(settings) + print_info( + f"Features: ec-tdf={config.features.ec_tdf}, " + f"key-management={config.features.key_management}, " + f"multikas={config.features.multikas}" + ) + except Exception as e: + print_warning(f"Could not detect features: {e}") + + # Step 4: Run phases + phases = config.phases + if phase_name: + phases = [p for p in phases if p.name == phase_name] + if not phases: + valid = ", ".join(p.name for p in config.phases) + print_error(f"Unknown phase: {phase_name}. Valid phases: {valid}") + return False + + all_passed = True + results: list[tuple[str, bool, str]] = [] + + for phase in phases: + # Check requirements + if not config.check_phase_requirements(phase): + reason = f"unmet requirements: {', '.join(phase.requires)}" + print_warning(f"Skipping phase {phase.name}: {reason}") + results.append((phase.name, True, "skipped")) + continue + + passed = run_phase(phase, config, settings) + results.append((phase.name, passed, "passed" if passed else "FAILED")) + if not passed: + all_passed = False + + # Print summary + console.print() + console.print("[bold]Test Summary[/bold]") + for name, passed, status in results: + icon = "[green]PASS[/green]" if passed else "[red]FAIL[/red]" + if status == "skipped": + icon = "[yellow]SKIP[/yellow]" + console.print(f" {icon} {name}") + + return all_passed diff --git a/otdf-local/tests/test_xtest_config.py b/otdf-local/tests/test_xtest_config.py new file mode 100644 index 000000000..85677c126 --- /dev/null +++ b/otdf-local/tests/test_xtest_config.py @@ -0,0 +1,233 @@ +"""Tests for xtest config models: serialization, parsing, requirements.""" + +from __future__ import annotations + +from pathlib import Path + +from otdf_local.xtest.config import ( + DEFAULT_PHASES, + Features, + ResolvedVersion, + TestPhase, + XtestConfig, + XtestInputs, +) + +SAMPLE_CONFIG_YAML = """\ +version: '1' +inputs: + platform-ref: main + go-ref: v0.29.0 + focus-sdk: go +resolved: + platform: + - sdk: platform + tag: main + sha: abc1234567890 + alias: main + head: true + go: + - sdk: go + tag: v0.29.0 + sha: def5678901234 + alias: latest + release: v0.29.0 + js: + - sdk: js + tag: '0.9.0' + sha: aaa1111222233 + alias: latest + release: sdk/0.9.0 + java: + - sdk: java + tag: v0.12.0 + sha: bbb2222333344 + alias: latest + release: v0.12.0 +platform-tag: main +encrypt-sdk: go +features: + ec-tdf: true + multikas: true +phases: +- name: legacy + test-files: + - test_legacy.py + pytest-args: + - -n + - auto +- name: abac + test-files: + - test_abac.py + requires: + - multikas +""" + + +class TestXtestConfigRoundTrip: + """Config can be serialized to YAML and parsed back identically.""" + + def test_round_trip_from_objects(self): + config = XtestConfig( + inputs=XtestInputs(platform_ref="main", go_ref="v0.29.0", focus_sdk="go"), + resolved={ + "platform": [ + ResolvedVersion( + sdk="platform", tag="main", sha="abc123", head=True + ), + ], + "go": [ + ResolvedVersion( + sdk="go", tag="v0.29.0", sha="def456", release="v0.29.0" + ), + ], + }, + platform_tag="main", + encrypt_sdk="go", + features=Features(ec_tdf=True, key_management=False, multikas=True), + phases=[ + TestPhase(name="legacy", test_files=["test_legacy.py"]), + TestPhase( + name="abac", test_files=["test_abac.py"], requires=["multikas"] + ), + ], + ) + + yaml_str = config.to_yaml() + parsed = XtestConfig.from_yaml(yaml_str) + + assert parsed.version == config.version + assert parsed.platform_tag == config.platform_tag + assert parsed.encrypt_sdk == config.encrypt_sdk + assert parsed.inputs.focus_sdk == "go" + assert parsed.inputs.go_ref == "v0.29.0" + assert len(parsed.resolved["platform"]) == 1 + assert parsed.resolved["platform"][0].sha == "abc123" + assert parsed.resolved["go"][0].release == "v0.29.0" + assert parsed.features.ec_tdf is True + assert parsed.features.key_management is False + assert len(parsed.phases) == 2 + assert parsed.phases[1].requires == ["multikas"] + + def test_round_trip_file(self, tmp_path: Path): + config = XtestConfig( + resolved={ + "go": [ResolvedVersion(sdk="go", tag="main", sha="aaa111", head=True)] + }, + ) + out_file = tmp_path / "config.yaml" + config.to_yaml_file(out_file) + parsed = XtestConfig.from_yaml(out_file) + assert parsed.resolved["go"][0].sha == "aaa111" + + def test_parse_sample_yaml(self): + config = XtestConfig.from_yaml(SAMPLE_CONFIG_YAML) + assert config.platform_tag == "main" + assert config.encrypt_sdk == "go" + assert config.inputs.go_ref == "v0.29.0" + assert config.inputs.focus_sdk == "go" + assert len(config.resolved["platform"]) == 1 + assert config.resolved["platform"][0].head is True + assert config.resolved["go"][0].release == "v0.29.0" + assert config.resolved["js"][0].tag == "0.9.0" + assert config.features.ec_tdf is True + assert config.features.multikas is True + assert len(config.phases) == 2 + assert config.phases[0].name == "legacy" + assert config.phases[1].requires == ["multikas"] + + +class TestDefaultPhases: + """Default phase definitions are valid and complete.""" + + def test_default_phases_exist(self): + assert len(DEFAULT_PHASES) == 4 + + def test_default_phase_names(self): + names = [p.name for p in DEFAULT_PHASES] + assert names == ["helpers", "legacy", "standard", "abac"] + + def test_abac_requires_multikas(self): + abac = next(p for p in DEFAULT_PHASES if p.name == "abac") + assert "multikas" in abac.requires + + def test_helpers_skip_on_dispatch(self): + helpers = next(p for p in DEFAULT_PHASES if p.name == "helpers") + assert helpers.skip_on_dispatch is True + + def test_all_phases_have_test_files(self): + for phase in DEFAULT_PHASES: + assert len(phase.test_files) > 0, f"Phase {phase.name} has no test files" + + +class TestFeatureRequirements: + """Phase requirement checking works correctly.""" + + def test_met_requirements(self): + config = XtestConfig(features=Features(multikas=True, ec_tdf=True)) + phase = TestPhase( + name="abac", test_files=["test_abac.py"], requires=["multikas"] + ) + assert config.check_phase_requirements(phase) is True + + def test_unmet_requirements(self): + config = XtestConfig(features=Features(multikas=False)) + phase = TestPhase( + name="abac", test_files=["test_abac.py"], requires=["multikas"] + ) + assert config.check_phase_requirements(phase) is False + + def test_no_requirements(self): + config = XtestConfig(features=Features()) + phase = TestPhase(name="legacy", test_files=["test_legacy.py"]) + assert config.check_phase_requirements(phase) is True + + def test_key_management_requirement(self): + config = XtestConfig(features=Features(key_management=False)) + phase = TestPhase( + name="km", test_files=["test.py"], requires=["key-management"] + ) + assert config.check_phase_requirements(phase) is False + + config.features.key_management = True + assert config.check_phase_requirements(phase) is True + + def test_ec_tdf_requirement(self): + config = XtestConfig(features=Features(ec_tdf=False)) + phase = TestPhase(name="ec", test_files=["test.py"], requires=["ec-tdf"]) + assert config.check_phase_requirements(phase) is False + + +class TestStripDefaults: + """YAML output omits fields that match defaults for cleaner output.""" + + def test_default_inputs_stripped(self): + config = XtestConfig() + yaml_str = config.to_yaml() + # Default inputs should be stripped (empty dict or missing keys) + parsed = XtestConfig.from_yaml(yaml_str) + assert parsed.inputs.platform_ref == "main" + assert parsed.inputs.focus_sdk == "all" + + def test_non_default_inputs_preserved(self): + config = XtestConfig( + inputs=XtestInputs(focus_sdk="go", go_ref="v1.0.0"), + ) + yaml_str = config.to_yaml() + assert "focus-sdk: go" in yaml_str + assert "go-ref: v1.0.0" in yaml_str + + +class TestResolvedVersionErrors: + """Error versions are preserved through serialization.""" + + def test_error_version_round_trip(self): + config = XtestConfig( + resolved={ + "go": [ResolvedVersion(sdk="go", tag="bad-ref", err="Not found")], + }, + ) + yaml_str = config.to_yaml() + parsed = XtestConfig.from_yaml(yaml_str) + assert parsed.resolved["go"][0].err == "Not found" + assert parsed.resolved["go"][0].sha == "" diff --git a/otdf-sdk-mgr/README.md b/otdf-sdk-mgr/README.md index ee2a2a8b7..4b930eaae 100644 --- a/otdf-sdk-mgr/README.md +++ b/otdf-sdk-mgr/README.md @@ -56,7 +56,7 @@ otdf-sdk-mgr java-fixup ## How Release Installs Work -- **Go**: Writes a `.version` file; `cli.sh`/`otdfctl.sh` use `go run github.com/opentdf/otdfctl@{version}` (no local compilation needed, Go caches the binary) +- **Go**: Writes a `.version` file containing `module-path@version` (e.g., `github.com/opentdf/otdfctl@v0.24.0`); `cli.sh`/`otdfctl.sh` use `go run @` (no local compilation needed, Go caches the binary). The module path is `github.com/opentdf/platform/otdfctl` for platform-embedded releases or `github.com/opentdf/otdfctl` for standalone releases. - **JS**: Runs `npm install @opentdf/ctl@{version}` into the dist directory; `cli.sh` uses `npx` from local `node_modules/` - **Java**: Downloads `cmdline.jar` from GitHub Releases; `cli.sh` uses `java -jar cmdline.jar` diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py index 24148bdd7..62580ebc7 100644 --- a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli.py @@ -93,3 +93,39 @@ def java_fixup( from otdf_sdk_mgr.java_fixup import post_checkout_java_fixup post_checkout_java_fixup(base_dir) + + +@app.command("go-fixup") +def go_fixup_cmd( + platform_dir: Annotated[ + Path, + typer.Option("--platform-dir", help="Path to the platform checkout root"), + ], + heads: Annotated[ + Optional[str], + typer.Option( + "--heads", + help="JSON list of head version tags to process (e.g. '[\"main\"]')", + ), + ] = None, + base_dir: Annotated[ + Optional[Path], + typer.Argument(help="Base directory for Go source trees"), + ] = None, +) -> None: + """Bridge Go client go.mod to server shared modules for head builds. + + Performs go mod edit -replace + go mod tidy for each head version, + pointing platform module imports at the local platform checkout. + Only needed for standalone otdfctl checkouts. + """ + import json as json_mod + + from otdf_sdk_mgr.go_fixup import go_fixup + + heads_list = json_mod.loads(heads) if heads else None + try: + go_fixup(platform_dir, heads=heads_list, base_dir=base_dir) + except (FileNotFoundError, subprocess.CalledProcessError) as e: + typer.echo(f"Error: {e}", err=True) + raise typer.Exit(1) from e diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py index e3950d717..e62ae2464 100644 --- a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_install.py @@ -74,12 +74,16 @@ def artifact( dist_name: Annotated[ Optional[str], typer.Option("--dist-name", help="Override dist directory name") ] = None, + source: Annotated[ + Optional[str], + typer.Option(help='Source repo for Go CLI (e.g., "platform" for monorepo)'), + ] = None, ) -> None: """Install a single SDK version (used by CI).""" from otdf_sdk_mgr.installers import InstallError, cmd_install try: - cmd_install(sdk, version, dist_name=dist_name) + cmd_install(sdk, version, dist_name=dist_name, source=source) except InstallError as e: typer.echo(f"Error: {e}", err=True) raise typer.Exit(1) diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py index 19188b124..2dcf6e321 100644 --- a/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/cli_versions.py @@ -3,6 +3,7 @@ from __future__ import annotations import json +import os from typing import Annotated, Any, Optional import typer @@ -112,10 +113,20 @@ def resolve_versions( raise typer.Exit(2) infix = SDK_TAG_INFIXES.get(sdk) + # Allow overriding the Go SDK source via OTDFCTL_SOURCE env var + # (standalone otdfctl repo vs platform monorepo) + go_source = os.environ.get("OTDFCTL_SOURCE") if sdk == "go" else None + if go_source and go_source not in ("standalone", "platform"): + typer.echo( + f"Warning: unrecognized OTDFCTL_SOURCE={go_source!r}; expected 'platform' or 'standalone'", + err=True, + ) + go_source = None + results: list[ResolveResult] = [] shas: set[str] = set() for version in tags: - v = resolve(sdk, version, infix) + v = resolve(sdk, version, infix, go_source=go_source) if is_resolve_success(v): env = lookup_additional_options(sdk, v["tag"]) if env: diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py index adf6c8b1f..1046a5ef8 100644 --- a/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/config.py @@ -70,7 +70,11 @@ def get_sdk_dirs() -> dict[str, Path]: "java": "opentdf/java-sdk", } -GO_INSTALL_PREFIX = "go run github.com/opentdf/otdfctl" +GO_INSTALL_PREFIX_STANDALONE = "go run github.com/opentdf/otdfctl" +GO_INSTALL_PREFIX_PLATFORM = "go run github.com/opentdf/platform/otdfctl" + +GO_MODULE_PATH = "github.com/opentdf/otdfctl" +GO_MODULE_PATH_PLATFORM = "github.com/opentdf/platform/otdfctl" LTS_VERSIONS: dict[str, str] = { "go": "0.24.0", @@ -111,4 +115,46 @@ def get_sdk_dirs() -> dict[str, Path]: "platform": "service", } +# When resolving go versions from the platform repo, use "otdfctl" infix +# (tags are otdfctl/vX.Y.Z in the platform monorepo) +SDK_TAG_INFIXES_PLATFORM_GO = "otdfctl" + +_VALID_GO_SOURCES = {None, "standalone", "platform"} + + +def _validate_go_source(source: str | None) -> None: + """Raise ValueError if source is not a recognised Go source.""" + if source not in _VALID_GO_SOURCES: + raise ValueError(f"Invalid Go source {source!r}; expected one of {_VALID_GO_SOURCES}") + + +def go_git_url(source: str | None = None) -> str: + """Return the git URL for Go SDK resolution based on source. + + Args: + source: "platform" to use the platform monorepo, None/"standalone" for the + standalone otdfctl repo. + """ + _validate_go_source(source) + if source == "platform": + return SDK_GIT_URLS["platform"] + return SDK_GIT_URLS["go"] + + +def go_tag_infix(source: str | None = None) -> str | None: + """Return the tag infix for Go SDK resolution based on source.""" + _validate_go_source(source) + if source == "platform": + return SDK_TAG_INFIXES_PLATFORM_GO + return None + + +def go_module_path(source: str | None = None) -> str: + """Return the Go module path based on source.""" + _validate_go_source(source) + if source == "platform": + return GO_MODULE_PATH_PLATFORM + return GO_MODULE_PATH + + ALL_SDKS = ["go", "js", "java"] diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/go_fixup.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/go_fixup.py new file mode 100644 index 000000000..4098361c2 --- /dev/null +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/go_fixup.py @@ -0,0 +1,95 @@ +"""Post-checkout fixups for Go SDK (otdfctl) source trees. + +Bridges client go.mod to server shared modules for head builds where +client and server share unreleased code. Only applies to standalone +otdfctl checkouts — platform-source builds already have the modules. +""" + +from __future__ import annotations + +import subprocess +from pathlib import Path + +from otdf_sdk_mgr.config import get_sdk_dir + +# Platform modules that standalone otdfctl imports and that may need +# a local replace directive when testing against a head platform build. +PLATFORM_MODULES = [ + "lib/fixtures", + "lib/ocrypto", + "protocol/go", + "sdk", +] + + +def go_fixup( + platform_dir: Path, + heads: list[str] | None = None, + base_dir: Path | None = None, +) -> None: + """Replace go.mod references to point at local platform checkout. + + Args: + platform_dir: Absolute path to the platform checkout root + (containing lib/, protocol/, sdk/). + heads: JSON-decoded list of head version tags to process. + If None, all subdirectories under *base_dir* are processed. + base_dir: Directory containing per-version otdfctl source trees + (e.g. ``xtest/sdk/go/src``). Defaults to ``get_sdk_dir() / "go" / "src"``. + """ + if base_dir is None: + base_dir = get_sdk_dir() / "go" / "src" + + if not base_dir.exists(): + print(f"Base directory {base_dir} does not exist, nothing to fix.") + return + + platform_dir = platform_dir.resolve() + if not platform_dir.is_dir(): + raise FileNotFoundError(f"Platform directory does not exist: {platform_dir}") + + dirs_to_process: list[Path] = [] + if heads: + for tag in heads: + d = base_dir / tag + if d.is_dir(): + dirs_to_process.append(d) + else: + print(f"Warning: head directory {d} does not exist, skipping.") + else: + for d in sorted(base_dir.iterdir()): + if d.is_dir() and not d.name.endswith(".git"): + dirs_to_process.append(d) + + if not dirs_to_process: + print("No directories to process.") + return + + for src_dir in dirs_to_process: + if not (src_dir / "go.mod").exists(): + print(f"No go.mod in {src_dir}, skipping.") + continue + + print(f"Applying go.mod replacements in {src_dir}...") + for module in PLATFORM_MODULES: + local_path = platform_dir / module + if not local_path.is_dir(): + print(f" Warning: {local_path} does not exist, skipping {module}") + continue + subprocess.run( + [ + "go", + "mod", + "edit", + "-replace", + f"github.com/opentdf/platform/{module}={local_path}", + ], + cwd=src_dir, + check=True, + ) + print(f" Replaced github.com/opentdf/platform/{module} -> {local_path}") + + print(f"Running go mod tidy in {src_dir}...") + subprocess.run(["go", "mod", "tidy"], cwd=src_dir, check=True) + + print("Go fixup complete.") diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py index e7c22ae09..0822a063e 100644 --- a/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/installers.py @@ -11,9 +11,11 @@ from pathlib import Path from otdf_sdk_mgr.config import ( + GO_MODULE_PATH_PLATFORM, LTS_VERSIONS, get_sdk_dir, get_sdk_dirs, + go_module_path, ) from otdf_sdk_mgr.checkout import checkout_sdk_branch from otdf_sdk_mgr.registry import list_go_versions, list_java_github_releases, list_js_versions @@ -24,33 +26,48 @@ class InstallError(Exception): """Raised when SDK installation fails.""" -def install_go_release(version: str, dist_dir: Path) -> None: +def install_go_release(version: str, dist_dir: Path, source: str | None = None) -> None: """Install a Go CLI release by writing a .version file. The cli.sh and otdfctl.sh wrappers read .version and use - `go run github.com/opentdf/otdfctl@{version}` instead of a local binary. + `go run @{version}` instead of a local binary. + The .version file contains `module-path@version` + (e.g., `github.com/opentdf/otdfctl@v0.24.0`). + + Args: + version: Version string (e.g., "v0.24.0" or "otdfctl/v0.24.0"). + dist_dir: Target distribution directory. + source: "platform" to use the platform monorepo module path, + None or "standalone" for standalone. """ go_dir = get_sdk_dir() / "go" dist_dir.mkdir(parents=True, exist_ok=True) + # Strip tag infix (e.g., "otdfctl/v0.24.0" → "v0.24.0") + if "/" in version: + version = version.rsplit("/", 1)[-1] tag = normalize_version(version) - (dist_dir / ".version").write_text(f"{tag}\n") + module = go_module_path(source) + (dist_dir / ".version").write_text(f"{module}@{tag}\n") shutil.copy(go_dir / "cli.sh", dist_dir / "cli.sh") shutil.copy(go_dir / "otdfctl.sh", dist_dir / "otdfctl.sh") shutil.copy(go_dir / "opentdfctl.yaml", dist_dir / "opentdfctl.yaml") - print(f" Pre-warming Go cache for otdfctl@{tag}...") + print(f" Pre-warming Go cache for {module}@{tag}...") result = subprocess.run( - ["go", "install", f"github.com/opentdf/otdfctl@{tag}"], + ["go", "install", f"{module}@{tag}"], capture_output=True, text=True, ) if result.returncode != 0: - print( - f" Warning: go install pre-warm failed (will retry at runtime): {result.stderr.strip()}" - ) + msg = f"go install pre-warm failed: {result.stderr.strip()}" + if module == GO_MODULE_PATH_PLATFORM: + raise InstallError( + f"{msg}\nThe platform module path {module}@{tag} may not be published yet." + ) + print(f" Warning: {msg} (will retry at runtime)") print(f" Go release {tag} installed to {dist_dir}") -def install_js_release(version: str, dist_dir: Path) -> None: +def install_js_release(version: str, dist_dir: Path, **_kwargs: object) -> None: """Install a JS CLI release from npm registry.""" js_dir = get_sdk_dir() / "js" dist_dir.mkdir(parents=True, exist_ok=True) @@ -65,7 +82,7 @@ def install_js_release(version: str, dist_dir: Path) -> None: print(f" JS release {v} installed to {dist_dir}") -def install_java_release(version: str, dist_dir: Path) -> None: +def install_java_release(version: str, dist_dir: Path, **_kwargs: object) -> None: """Install a Java CLI release by downloading cmdline.jar from GitHub Releases. Raises InstallError if the artifact is not available or download fails, @@ -133,13 +150,15 @@ def install_java_release(version: str, dist_dir: Path) -> None: } -def install_release(sdk: str, version: str, dist_name: str | None = None) -> Path: +def install_release(sdk: str, version: str, dist_name: str | None = None, **kwargs: object) -> Path: """Install a released version of an SDK CLI. Args: sdk: One of "go", "js", "java" version: Version string (e.g., "v0.24.0" or "0.24.0") dist_name: Override the dist directory name (defaults to normalized version) + **kwargs: Extra arguments forwarded to the SDK installer + (e.g., source="platform" for Go). Returns: Path to the created dist directory @@ -157,7 +176,7 @@ def install_release(sdk: str, version: str, dist_name: str | None = None) -> Pat print(f" Dist directory already exists: {dist_dir} (skipping)") return dist_dir - INSTALLERS[sdk](version, dist_dir) + INSTALLERS[sdk](version, dist_dir, **kwargs) return dist_dir @@ -224,7 +243,9 @@ def cmd_release(specs: list[str]) -> None: install_release(sdk, version) -def cmd_install(sdk: str, version: str, dist_name: str | None = None) -> None: +def cmd_install( + sdk: str, version: str, dist_name: str | None = None, source: str | None = None +) -> None: """Install a single SDK version (used by CI action).""" print(f"Installing {sdk} {version}...") - install_release(sdk, version, dist_name=dist_name) + install_release(sdk, version, dist_name=dist_name, source=source) diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py index 8f8dd34e5..fcd4f78c8 100644 --- a/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/registry.py @@ -12,11 +12,13 @@ from typing import Any from otdf_sdk_mgr.config import ( - GO_INSTALL_PREFIX, + GO_INSTALL_PREFIX_PLATFORM, + GO_INSTALL_PREFIX_STANDALONE, SDK_GITHUB_REPOS, SDK_GIT_URLS, SDK_MAVEN_COORDS, SDK_NPM_PACKAGES, + SDK_TAG_INFIXES_PLATFORM_GO, ) from otdf_sdk_mgr.semver import is_stable, parse_semver, semver_sort_key @@ -68,12 +70,15 @@ def fetch_text(url: str) -> str: def list_go_versions() -> list[dict[str, Any]]: - """List Go SDK versions from git tags.""" + """List Go SDK versions from git tags in both standalone and platform repos.""" + import git.exc from git import Git repo = Git() + seen: dict[str, dict[str, Any]] = {} + + # Standalone repo (opentdf/otdfctl): tags like v0.24.0 raw = repo.ls_remote(SDK_GIT_URLS["go"], tags=True) - results = [] for line in raw.strip().split("\n"): if not line: continue @@ -83,16 +88,52 @@ def list_go_versions() -> list[dict[str, Any]]: tag = ref.removeprefix("refs/tags/") if not parse_semver(tag): continue - version = tag - results.append( - { + seen[tag] = { + "sdk": "go", + "version": tag, + "source": "git-tag", + "install_method": f"{GO_INSTALL_PREFIX_STANDALONE}@{tag}", + "stable": is_stable(tag), + } + + # Platform repo (opentdf/platform): tags like otdfctl/v0.X.Y + infix = SDK_TAG_INFIXES_PLATFORM_GO + try: + raw = repo.ls_remote(SDK_GIT_URLS["platform"], tags=True) + for line in raw.strip().split("\n"): + if not line: + continue + _, ref = line.split("\t", 1) + if ref.endswith("^{}"): + continue + tag = ref.removeprefix("refs/tags/") + if not tag.startswith(f"{infix}/"): + continue + version = tag.removeprefix(f"{infix}/") + if not parse_semver(version): + continue + # Platform entries take precedence (canonical location post-migration); + # if the same version exists in both repos, the platform entry + # silently overwrites the standalone one. + if version in seen: + print( + f"Note: version {version} found in both standalone and platform repos; using platform source.", + file=sys.stderr, + ) + seen[version] = { "sdk": "go", "version": version, - "source": "git-tag", - "install_method": f"{GO_INSTALL_PREFIX}@{version}", + "source": "platform-git-tag", + "install_method": f"{GO_INSTALL_PREFIX_PLATFORM}@{tag}", "stable": is_stable(version), } + except git.exc.GitCommandError as e: + print( + f"::warning::Failed to query platform repo for go tags: {e}", + file=sys.stderr, ) + + results = list(seen.values()) results.sort(key=lambda r: semver_sort_key(r["version"])) return results diff --git a/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py b/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py index 6e4cd7ca1..e3f264d74 100644 --- a/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py +++ b/otdf-sdk-mgr/src/otdf_sdk_mgr/resolve.py @@ -12,6 +12,8 @@ LTS_VERSIONS, SDK_GIT_URLS, SDK_NPM_PACKAGES, + go_git_url, + go_tag_infix, ) @@ -23,6 +25,7 @@ class ResolveSuccess(TypedDict): pr: NotRequired[str] release: NotRequired[str] sha: str + source: NotRequired[str] tag: str @@ -111,78 +114,119 @@ def lookup_additional_options(sdk: str, version: str) -> str | None: return None -def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult: - """Resolve a version spec to a concrete SHA and tag.""" +def resolve( + sdk: str, + version: str, + infix: str | None, + go_source: str | None = None, +) -> ResolveResult: + """Resolve a version spec to a concrete SHA and tag. + + Args: + sdk: SDK identifier (go, js, java, platform). + version: Version spec (main, SHA, tag, latest, lts, etc.). + infix: Tag infix for monorepo tag resolution (e.g. "sdk" for JS). + go_source: For sdk=="go", override the git URL and infix. + "platform" resolves against the platform monorepo (otdfctl/ prefix tags). + None or "standalone" uses the standalone otdfctl repo (default). + """ + _go_platform = sdk == "go" and go_source == "platform" + + def _annotate(result: ResolveResult) -> ResolveResult: + """Add source field to successful results when resolving go from platform.""" + if _go_platform and is_resolve_success(result): + result["source"] = "platform" + return result + try: - sdk_url = SDK_GIT_URLS[sdk] + if _go_platform: + sdk_url = go_git_url("platform") + infix = go_tag_infix("platform") + else: + sdk_url = SDK_GIT_URLS[sdk] repo = Git() if version == "main" or version == "refs/heads/main": all_heads = [r.split("\t") for r in repo.ls_remote(sdk_url, heads=True).split("\n")] - sha, _ = [tag for tag in all_heads if "refs/heads/main" in tag][0] - return { - "sdk": sdk, - "alias": version, - "head": True, - "sha": sha, - "tag": "main", - } + try: + sha, _ = next(tag for tag in all_heads if "refs/heads/main" in tag) + except StopIteration: + return {"sdk": sdk, "alias": version, "err": f"main branch not found in {sdk_url}"} + return _annotate( + { + "sdk": sdk, + "alias": version, + "head": True, + "sha": sha, + "tag": "main", + } + ) if re.match(SHA_REGEX, version): ls_remote = [r.split("\t") for r in repo.ls_remote(sdk_url).split("\n")] matching_tags = [(sha, tag) for (sha, tag) in ls_remote if sha.startswith(version)] if not matching_tags: - return { - "sdk": sdk, - "alias": version[:7], - "sha": version, - "tag": version, - } + return _annotate( + { + "sdk": sdk, + "alias": version[:7], + "sha": version, + "tag": version, + } + ) if len(matching_tags) > 1: for sha, tag in matching_tags: if tag.startswith("refs/pull/"): pr_number = tag.split("/")[2] - return { - "sdk": sdk, - "alias": version, - "head": True, - "sha": sha, - "tag": f"pull-{pr_number}", - } + return _annotate( + { + "sdk": sdk, + "alias": version, + "head": True, + "sha": sha, + "tag": f"pull-{pr_number}", + } + ) for sha, tag in matching_tags: mq_match = re.match(MERGE_QUEUE_REGEX, tag) if mq_match: to_branch = mq_match.group("branch") pr_number = mq_match.group("pr_number") if to_branch and pr_number: - return { + return _annotate( + { + "sdk": sdk, + "alias": version, + "head": True, + "pr": pr_number, + "sha": sha, + "tag": f"mq-{to_branch}-{pr_number}", + } + ) + suffix = tag.split("refs/heads/gh-readonly-queue/")[-1] + flattag = "mq--" + suffix.replace("/", "--") + return _annotate( + { "sdk": sdk, "alias": version, "head": True, - "pr": pr_number, "sha": sha, - "tag": f"mq-{to_branch}-{pr_number}", + "tag": flattag, } - suffix = tag.split("refs/heads/gh-readonly-queue/")[-1] - flattag = "mq--" + suffix.replace("/", "--") - return { - "sdk": sdk, - "alias": version, - "head": True, - "sha": sha, - "tag": flattag, - } + ) head = False if tag.startswith("refs/heads/"): head = True tag = tag.split("refs/heads/")[-1] flattag = tag.replace("/", "--") - return { - "sdk": sdk, - "alias": version, - "head": head, - "sha": sha, - "tag": flattag, - } + return _annotate( + { + "sdk": sdk, + "alias": version, + "head": head, + "sha": sha, + "tag": flattag, + } + ) return { "sdk": sdk, @@ -197,12 +241,14 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult: tag = tag.split("refs/tags/")[-1] if infix: tag = tag.split(f"{infix}/")[-1] - return { - "sdk": sdk, - "alias": version, - "sha": sha, - "tag": tag, - } + return _annotate( + { + "sdk": sdk, + "alias": version, + "sha": sha, + "tag": tag, + } + ) if version.startswith("refs/pull/"): merge_heads = [ @@ -216,14 +262,16 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult: "err": f"pull request {pr_number} not found in {sdk_url}", } sha, _ = merge_heads[0] - return { - "sdk": sdk, - "alias": version, - "head": True, - "pr": pr_number, - "sha": sha, - "tag": f"pull-{pr_number}", - } + return _annotate( + { + "sdk": sdk, + "alias": version, + "head": True, + "pr": pr_number, + "sha": sha, + "tag": f"pull-{pr_number}", + } + ) remote_tags = [r.split("\t") for r in repo.ls_remote(sdk_url).split("\n")] all_listed_tags = [ @@ -238,13 +286,15 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult: if version in all_listed_branches: sha = all_listed_branches[version] - return { - "sdk": sdk, - "alias": version, - "head": True, - "sha": sha, - "tag": version, - } + return _annotate( + { + "sdk": sdk, + "alias": version, + "head": True, + "sha": sha, + "tag": version, + } + ) if infix and version.startswith(f"{infix}/"): version = version.split(f"{infix}/")[-1] @@ -288,13 +338,15 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult: if not matching_tags: # No versions with CLI found, fall back to building latest from source sha, tag = stable_tags[-1] - return { - "sdk": sdk, - "alias": alias, - "head": True, # Mark as head to trigger source checkout - "sha": sha, - "tag": tag, - } + return _annotate( + { + "sdk": sdk, + "alias": alias, + "head": True, # Mark as head to trigger source checkout + "sha": sha, + "tag": tag, + } + ) else: matching_tags = stable_tags[-1:] else: @@ -319,13 +371,15 @@ def resolve(sdk: str, version: str, infix: str | None) -> ResolveResult: release = tag if infix: release = f"{infix}/{release}" - return { - "sdk": sdk, - "alias": alias, - "release": release, - "sha": sha, - "tag": tag, - } + return _annotate( + { + "sdk": sdk, + "alias": alias, + "release": release, + "sha": sha, + "tag": tag, + } + ) except Exception as e: return { "sdk": sdk, diff --git a/xtest/sdk/go/cli.sh b/xtest/sdk/go/cli.sh index 172aa5b50..f97b20c1a 100755 --- a/xtest/sdk/go/cli.sh +++ b/xtest/sdk/go/cli.sh @@ -23,8 +23,14 @@ SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) cmd=("$SCRIPT_DIR"/otdfctl) if [ ! -f "$SCRIPT_DIR"/otdfctl ]; then if [ -f "$SCRIPT_DIR/.version" ]; then - OTDFCTL_VERSION=$(tr -d '[:space:]' <"$SCRIPT_DIR/.version") - cmd=(go run "github.com/opentdf/otdfctl@${OTDFCTL_VERSION}") + VERSION_SPEC=$(tr -d '[:space:]' <"$SCRIPT_DIR/.version") + if [[ "$VERSION_SPEC" == *@* ]]; then + # New format: module-path@version + cmd=(go run "$VERSION_SPEC") + else + # Legacy format: bare version tag, default to standalone module + cmd=(go run "github.com/opentdf/otdfctl@${VERSION_SPEC}") + fi else cmd=(go run "github.com/opentdf/otdfctl@latest") fi diff --git a/xtest/sdk/go/otdfctl.sh b/xtest/sdk/go/otdfctl.sh index 17fbb0c84..9ba55f054 100755 --- a/xtest/sdk/go/otdfctl.sh +++ b/xtest/sdk/go/otdfctl.sh @@ -18,8 +18,14 @@ source "$XTEST_DIR/test.env" cmd=("$SCRIPT_DIR"/otdfctl) if [ ! -f "$SCRIPT_DIR"/otdfctl ]; then if [ -f "$SCRIPT_DIR/.version" ]; then - OTDFCTL_VERSION=$(tr -d '[:space:]' <"$SCRIPT_DIR/.version") - cmd=(go run "github.com/opentdf/otdfctl@${OTDFCTL_VERSION}") + VERSION_SPEC=$(tr -d '[:space:]' <"$SCRIPT_DIR/.version") + if [[ "$VERSION_SPEC" == *@* ]]; then + # New format: module-path@version + cmd=(go run "$VERSION_SPEC") + else + # Legacy format: bare version tag, default to standalone module + cmd=(go run "github.com/opentdf/otdfctl@${VERSION_SPEC}") + fi else cmd=(go run "github.com/opentdf/otdfctl@latest") fi diff --git a/xtest/setup-cli-tool/action.yaml b/xtest/setup-cli-tool/action.yaml index 9e110ef4f..5bf6eff62 100644 --- a/xtest/setup-cli-tool/action.yaml +++ b/xtest/setup-cli-tool/action.yaml @@ -2,12 +2,23 @@ name: configure-sdks description: Check out and build one or more SDK and its CLI tool for use by xtest inputs: path: - description: The path to checkout the the SDK source code to; concatenated with branch or tag name. + description: The path to check out the SDK source code to; concatenated with branch or tag name. sdk: description: The SDK to configure; one of go, java, js version-info: description: JSON-encoded output of otdf-sdk-mgr versions resolve required: true + platform-otdfctl-dir: + description: >- + Absolute path to platform's otdfctl/ directory. When set and sdk is "go", + head versions whose SHA matches platform-otdfctl-sha are symlinked from + here instead of checked out separately. Used in both explicit platform + mode (source: "platform" in resolved version) and auto-detect mode. + platform-otdfctl-sha: + description: >- + SHA of the commit that platform-otdfctl-dir was checked out at. + Used to decide which Go head version (if any) can reuse the existing + platform checkout vs needing a fresh one. outputs: version-a: description: "Object containing tag, sha, and name of a version checked out" @@ -28,24 +39,27 @@ outputs: runs: using: composite steps: - - name: identify repo url + - name: identify repo urls shell: bash run: | - case "${{ inputs.sdk }}" in + case "$INPUT_SDK" in "go") - echo "sdk_repo=opentdf/otdfctl" >> $GITHUB_ENV + echo "STANDALONE_REPO=opentdf/otdfctl" >> "$GITHUB_ENV" ;; "java") - echo "sdk_repo=opentdf/java-sdk" >> $GITHUB_ENV + echo "STANDALONE_REPO=opentdf/java-sdk" >> "$GITHUB_ENV" ;; "js") - echo "sdk_repo=opentdf/web-sdk" >> $GITHUB_ENV + echo "STANDALONE_REPO=opentdf/web-sdk" >> "$GITHUB_ENV" ;; *) - echo "Invalid SDK specified: ${{ inputs.sdk }}" >> $GITHUB_STEP_SUMMARY + echo "Invalid SDK specified: $INPUT_SDK" >> "$GITHUB_STEP_SUMMARY" exit 1 ;; esac + echo "PLATFORM_REPO=opentdf/platform" >> "$GITHUB_ENV" + env: + INPUT_SDK: ${{ inputs.sdk }} - name: resolve versions id: resolve @@ -88,9 +102,12 @@ runs: echo "Installing ${{ inputs.sdk }} $tag from registry (release: $release)" # Sanitize tag for use as an env var name (replace non-alphanumeric/underscore with _) tag_sanitized="${tag//[^a-zA-Z0-9_]/_}" + source=$(echo "$row" | jq -r '.source // empty') + source_args=() + [[ -n "$source" ]] && source_args=(--source "$source") if ! uv run --project "$SDK_MGR_DIR" otdf-sdk-mgr install artifact \ --sdk "${{ inputs.sdk }}" --version "$release" \ - --dist-name "$tag"; then + --dist-name "$tag" "${source_args[@]}"; then echo " Warning: Artifact installation failed for ${{ inputs.sdk }} $tag" echo " Will fall back to building from source" echo "BUILD_FROM_SOURCE_${tag_sanitized}=true" >> "$GITHUB_ENV" @@ -104,9 +121,10 @@ runs: id: check-source shell: bash run: | - # Determine which version slots need source checkout. - # A slot needs checkout if it is a head version OR if artifact install failed - # (BUILD_FROM_SOURCE_ was set in the previous step). + # Determine which version slots need source checkout and from which repo. + # A slot needs checkout if it is a head version OR if artifact install failed. + # Platform-source versions may reuse the existing platform-otdfctl-dir + # (when their SHA matches) or need a fresh opentdf/platform checkout. for slot in a b c d; do case "$slot" in a) row=$(echo "${version_info}" | jq -rc '.[0] // empty') ;; @@ -115,65 +133,200 @@ runs: d) row=$(echo "${version_info}" | jq -rc '.[3] // empty') ;; esac if [[ -z "$row" ]]; then - echo "needs-source-${slot}=false" >> "$GITHUB_OUTPUT" + echo "needs-checkout-${slot}=false" >> "$GITHUB_OUTPUT" + echo "is-platform-${slot}=false" >> "$GITHUB_OUTPUT" + echo "use-existing-platform-dir-${slot}=false" >> "$GITHUB_OUTPUT" + echo "checkout-repo-${slot}=" >> "$GITHUB_OUTPUT" + echo "checkout-path-${slot}=" >> "$GITHUB_OUTPUT" continue fi + tag=$(echo "$row" | jq -r '.tag') head=$(echo "$row" | jq -r '.head // false') + sha=$(echo "$row" | jq -r '.sha') + source=$(echo "$row" | jq -r '.source // empty') tag_sanitized="${tag//[^a-zA-Z0-9_]/_}" build_from_source_var="BUILD_FROM_SOURCE_${tag_sanitized}" + needs_source=false if [[ "$head" == "true" || "${!build_from_source_var}" == "true" ]]; then - echo "needs-source-${slot}=true" >> "$GITHUB_OUTPUT" - else - echo "needs-source-${slot}=false" >> "$GITHUB_OUTPUT" + needs_source=true + fi + + is_platform=false + use_existing=false + checkout_repo="$STANDALONE_REPO" + checkout_path="${INPUT_PATH}/${INPUT_SDK}/src/${tag}" + + if [[ "$source" == "platform" ]]; then + # Explicit platform mode: resolver tagged this version as from + # opentdf/platform. Use per-version SHA to decide checkout strategy. + is_platform=true + if [[ "$needs_source" == "true" && -n "$PLATFORM_OTDFCTL_DIR" \ + && -n "$PLATFORM_OTDFCTL_SHA" && "$sha" == "$PLATFORM_OTDFCTL_SHA" ]]; then + # SHA matches existing platform checkout — reuse via symlink + use_existing=true + needs_source=false + elif [[ "$needs_source" == "true" ]]; then + # Different SHA — need a fresh platform checkout + checkout_repo="$PLATFORM_REPO" + checkout_path="${INPUT_PATH}/${INPUT_SDK}/platform-src/${tag}" + fi + elif [[ "$INPUT_SDK" == "go" && -n "$PLATFORM_OTDFCTL_DIR" && "$needs_source" == "true" ]]; then + # Auto-detect fallback: resolver used standalone repo but the + # test job detected otdfctl in the platform checkout. + # NOTE: SHA comparison across repos is not meaningful (the standalone + # repo and platform repo have different commit histories), so we + # cannot safely reuse the platform checkout here. Fall through to + # a standalone checkout. To use the platform source, set + # otdfctl-source=platform explicitly. + echo "::notice::Go version ${tag} resolved from standalone repo; platform checkout available but cannot auto-reuse (different repo). Set otdfctl-source=platform to use the platform source." fi + + echo "needs-checkout-${slot}=${needs_source}" >> "$GITHUB_OUTPUT" + echo "is-platform-${slot}=${is_platform}" >> "$GITHUB_OUTPUT" + echo "use-existing-platform-dir-${slot}=${use_existing}" >> "$GITHUB_OUTPUT" + echo "checkout-repo-${slot}=${checkout_repo}" >> "$GITHUB_OUTPUT" + echo "checkout-path-${slot}=${checkout_path}" >> "$GITHUB_OUTPUT" done env: version_info: ${{ inputs.version-info }} + INPUT_PATH: ${{ inputs.path }} + INPUT_SDK: ${{ inputs.sdk }} + PLATFORM_OTDFCTL_DIR: ${{ inputs.platform-otdfctl-dir }} + PLATFORM_OTDFCTL_SHA: ${{ inputs.platform-otdfctl-sha }} + + - name: symlink existing platform checkout + shell: bash + run: | + # For versions that can reuse the already-checked-out platform dir, + # symlink platform-otdfctl-dir into sdk/go/src/{tag}. + for slot in a b c d; do + case "$slot" in + a) version_json="$VERSION_A" ; use_existing="$USE_EXISTING_A" ;; + b) version_json="$VERSION_B" ; use_existing="$USE_EXISTING_B" ;; + c) version_json="$VERSION_C" ; use_existing="$USE_EXISTING_C" ;; + d) version_json="$VERSION_D" ; use_existing="$USE_EXISTING_D" ;; + esac + if [[ -z "$version_json" || "$use_existing" != "true" ]]; then + continue + fi + tag=$(echo "$version_json" | jq -r '.tag') + src_dir="${INPUT_PATH}/${INPUT_SDK}/src/${tag}" + echo "Symlinking existing platform otdfctl to ${src_dir}" + mkdir -p "$(dirname "$src_dir")" + ln -sfn "$PLATFORM_OTDFCTL_DIR" "$src_dir" + if [ ! -e "$src_dir" ]; then + echo "::error::Symlink target does not exist: $PLATFORM_OTDFCTL_DIR" + exit 1 + fi + done + env: + PLATFORM_OTDFCTL_DIR: ${{ inputs.platform-otdfctl-dir }} + INPUT_PATH: ${{ inputs.path }} + INPUT_SDK: ${{ inputs.sdk }} + VERSION_A: ${{ steps.resolve.outputs.version-a }} + VERSION_B: ${{ steps.resolve.outputs.version-b }} + VERSION_C: ${{ steps.resolve.outputs.version-c }} + VERSION_D: ${{ steps.resolve.outputs.version-d }} + USE_EXISTING_A: ${{ steps.check-source.outputs.use-existing-platform-dir-a }} + USE_EXISTING_B: ${{ steps.check-source.outputs.use-existing-platform-dir-b }} + USE_EXISTING_C: ${{ steps.check-source.outputs.use-existing-platform-dir-c }} + USE_EXISTING_D: ${{ steps.check-source.outputs.use-existing-platform-dir-d }} - name: checkout version a uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 if: >- steps.resolve.outputs.version-a != '' - && steps.check-source.outputs.needs-source-a == 'true' + && steps.check-source.outputs.needs-checkout-a == 'true' with: - path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-a).tag }} + path: ${{ steps.check-source.outputs.checkout-path-a }} persist-credentials: false - ref: ${{ fromJson(steps.resolve.outputs.version-a).sha }} - repository: ${{ env.sdk_repo }} + ref: ${{ fromJson(steps.resolve.outputs.version-a || '{"sha":""}').sha }} + repository: ${{ steps.check-source.outputs.checkout-repo-a }} - name: checkout version b uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 if: >- steps.resolve.outputs.version-b != '' - && steps.check-source.outputs.needs-source-b == 'true' + && steps.check-source.outputs.needs-checkout-b == 'true' with: - path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-b).tag }} + path: ${{ steps.check-source.outputs.checkout-path-b }} persist-credentials: false - ref: ${{ fromJson(steps.resolve.outputs.version-b).sha }} - repository: ${{ env.sdk_repo }} + ref: ${{ fromJson(steps.resolve.outputs.version-b || '{"sha":""}').sha }} + repository: ${{ steps.check-source.outputs.checkout-repo-b }} - name: checkout version c uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 if: >- steps.resolve.outputs.version-c != '' - && steps.check-source.outputs.needs-source-c == 'true' + && steps.check-source.outputs.needs-checkout-c == 'true' with: - path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-c).tag }} + path: ${{ steps.check-source.outputs.checkout-path-c }} persist-credentials: false - ref: ${{ fromJson(steps.resolve.outputs.version-c).sha }} - repository: ${{ env.sdk_repo }} + ref: ${{ fromJson(steps.resolve.outputs.version-c || '{"sha":""}').sha }} + repository: ${{ steps.check-source.outputs.checkout-repo-c }} - name: checkout version d uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 if: >- steps.resolve.outputs.version-d != '' - && steps.check-source.outputs.needs-source-d == 'true' + && steps.check-source.outputs.needs-checkout-d == 'true' with: - path: ${{ inputs.path }}/${{ inputs.sdk }}/src/${{ fromJson(steps.resolve.outputs.version-d).tag }} + path: ${{ steps.check-source.outputs.checkout-path-d }} persist-credentials: false - ref: ${{ fromJson(steps.resolve.outputs.version-d).sha }} - repository: ${{ env.sdk_repo }} + ref: ${{ fromJson(steps.resolve.outputs.version-d || '{"sha":""}').sha }} + repository: ${{ steps.check-source.outputs.checkout-repo-d }} + + - name: symlink freshly checked-out platform sources + shell: bash + run: | + # For platform-source versions that were checked out (not reusing the + # existing dir), symlink {platform-src}/{tag}/otdfctl → src/{tag} so + # the Makefile discovers them. + for slot in a b c d; do + case "$slot" in + a) is_platform="$IS_PLATFORM_A" ; needs_checkout="$NEEDS_CHECKOUT_A" + checkout_path="$CHECKOUT_PATH_A" ; version_json="$VERSION_A" ;; + b) is_platform="$IS_PLATFORM_B" ; needs_checkout="$NEEDS_CHECKOUT_B" + checkout_path="$CHECKOUT_PATH_B" ; version_json="$VERSION_B" ;; + c) is_platform="$IS_PLATFORM_C" ; needs_checkout="$NEEDS_CHECKOUT_C" + checkout_path="$CHECKOUT_PATH_C" ; version_json="$VERSION_C" ;; + d) is_platform="$IS_PLATFORM_D" ; needs_checkout="$NEEDS_CHECKOUT_D" + checkout_path="$CHECKOUT_PATH_D" ; version_json="$VERSION_D" ;; + esac + if [[ "$is_platform" != "true" || "$needs_checkout" != "true" || -z "$version_json" ]]; then + continue + fi + tag=$(echo "$version_json" | jq -r '.tag') + src_dir="${INPUT_PATH}/${INPUT_SDK}/src/${tag}" + otdfctl_dir="${checkout_path}/otdfctl" + echo "Symlinking freshly checked-out platform otdfctl ${otdfctl_dir} → ${src_dir}" + mkdir -p "$(dirname "$src_dir")" + ln -sfn "$otdfctl_dir" "$src_dir" + if [ ! -e "$src_dir" ]; then + echo "::error::Symlink target does not exist: ${otdfctl_dir} (does the platform repo contain an otdfctl/ directory?)" + exit 1 + fi + done + env: + INPUT_PATH: ${{ inputs.path }} + INPUT_SDK: ${{ inputs.sdk }} + VERSION_A: ${{ steps.resolve.outputs.version-a }} + VERSION_B: ${{ steps.resolve.outputs.version-b }} + VERSION_C: ${{ steps.resolve.outputs.version-c }} + VERSION_D: ${{ steps.resolve.outputs.version-d }} + IS_PLATFORM_A: ${{ steps.check-source.outputs.is-platform-a }} + IS_PLATFORM_B: ${{ steps.check-source.outputs.is-platform-b }} + IS_PLATFORM_C: ${{ steps.check-source.outputs.is-platform-c }} + IS_PLATFORM_D: ${{ steps.check-source.outputs.is-platform-d }} + NEEDS_CHECKOUT_A: ${{ steps.check-source.outputs.needs-checkout-a }} + NEEDS_CHECKOUT_B: ${{ steps.check-source.outputs.needs-checkout-b }} + NEEDS_CHECKOUT_C: ${{ steps.check-source.outputs.needs-checkout-c }} + NEEDS_CHECKOUT_D: ${{ steps.check-source.outputs.needs-checkout-d }} + CHECKOUT_PATH_A: ${{ steps.check-source.outputs.checkout-path-a }} + CHECKOUT_PATH_B: ${{ steps.check-source.outputs.checkout-path-b }} + CHECKOUT_PATH_C: ${{ steps.check-source.outputs.checkout-path-c }} + CHECKOUT_PATH_D: ${{ steps.check-source.outputs.checkout-path-d }} - name: post checkout cleanups if: inputs.sdk == 'java' diff --git a/xtest/setup-kas-instances/action.yaml b/xtest/setup-kas-instances/action.yaml new file mode 100644 index 000000000..f5e420172 --- /dev/null +++ b/xtest/setup-kas-instances/action.yaml @@ -0,0 +1,86 @@ +name: setup-kas-instances +description: >- + Start additional KAS instances for multi-KAS / ABAC tests. + Uses otdf-local ci start-kas to start all 6 KAS instances + (alpha, beta, gamma, delta, km1, km2) and expose their log file paths. + +inputs: + platform-working-dir: + description: Path to the platform checkout directory + required: true + root-key: + description: Root key for KAS instances + required: true + key-management-supported: + description: Enable key management on km1/km2 instances (true/false) + required: false + default: "false" + ec-tdf-enabled: + description: Enable EC TDF support + required: false + default: "true" + log-type: + description: Log format type + required: false + default: "json" + tests-path: + description: Path to the tests repo checkout + required: false + default: "otdftests" + +outputs: + kas-alpha-log-file: + description: Path to KAS alpha log file + value: ${{ steps.start-kas.outputs.kas-alpha-log-file }} + kas-beta-log-file: + description: Path to KAS beta log file + value: ${{ steps.start-kas.outputs.kas-beta-log-file }} + kas-gamma-log-file: + description: Path to KAS gamma log file + value: ${{ steps.start-kas.outputs.kas-gamma-log-file }} + kas-delta-log-file: + description: Path to KAS delta log file + value: ${{ steps.start-kas.outputs.kas-delta-log-file }} + kas-km1-log-file: + description: Path to KAS km1 log file + value: ${{ steps.start-kas.outputs.kas-km1-log-file }} + kas-km2-log-file: + description: Path to KAS km2 log file + value: ${{ steps.start-kas.outputs.kas-km2-log-file }} + +runs: + using: composite + steps: + - name: Start KAS instances + id: start-kas + shell: bash + run: | + KM_FLAG="" + if [[ "$KEY_MANAGEMENT" == "true" ]]; then + KM_FLAG="--key-management" + else + KM_FLAG="--no-key-management" + fi + + EC_FLAG="" + if [[ "$EC_TDF_ENABLED" == "true" ]]; then + EC_FLAG="--ec-tdf-enabled" + else + EC_FLAG="--no-ec-tdf" + fi + + OTDF_LOCAL_DIR="$(cd "$TESTS_PATH" && pwd)/otdf-local" + + uv run --project "$OTDF_LOCAL_DIR" otdf-local ci start-kas \ + --platform-dir "$(pwd)/$PLATFORM_DIR" \ + --root-key "$ROOT_KEY" \ + $EC_FLAG \ + $KM_FLAG \ + --log-type "$LOG_TYPE" + env: + PLATFORM_DIR: ${{ inputs.platform-working-dir }} + ROOT_KEY: ${{ inputs.root-key }} + KEY_MANAGEMENT: ${{ inputs.key-management-supported }} + EC_TDF_ENABLED: ${{ inputs.ec-tdf-enabled }} + LOG_TYPE: ${{ inputs.log-type }} + TESTS_PATH: ${{ inputs.tests-path }} diff --git a/xtest/setup-sdk-clients/action.yaml b/xtest/setup-sdk-clients/action.yaml new file mode 100644 index 000000000..35be35b41 --- /dev/null +++ b/xtest/setup-sdk-clients/action.yaml @@ -0,0 +1,159 @@ +name: setup-sdk-clients +description: >- + Configure, cache, patch, and build an SDK CLI for xtest. Wraps setup-cli-tool + and adds SDK-appropriate caching, go.mod/java .env fixups, and make builds. + Each invocation handles one SDK (go, java, or js). + +inputs: + sdk: + description: "SDK to set up: go, java, or js" + required: true + version-info: + description: JSON-encoded output of otdf-sdk-mgr versions resolve for this SDK + required: true + tests-path: + description: Path to the tests repo checkout + required: false + default: "otdftests" + platform-working-dir: + description: >- + Platform checkout directory. Used for go-fixup (bridging client go.mod + to server shared modules) and detecting platform-embedded otdfctl. + required: false + platform-heads: + description: JSON list of platform tags that are heads (from resolve-versions) + required: false + default: "[]" + platform-tag: + description: Current matrix platform-tag value + required: false + platform-tag-to-sha: + description: JSON object mapping platform tags to SHAs + required: false + default: "{}" + otdfctl-source: + description: "Resolved otdfctl source: platform or standalone" + required: false + default: "standalone" + otdfctl-dir: + description: Absolute path to platform's otdfctl directory + required: false + otdfctl-sha: + description: SHA of the platform otdfctl checkout + required: false + focus-sdk: + description: "SDK focus filter: all, go, java, or js" + required: false + default: "all" + buf-token: + description: BUF token for Java proto compilation + required: false + pat-opentdf: + description: PAT for buf HTTPS password (Java SDK build) + required: false + +outputs: + heads: + description: JSON list of head tags for this SDK + value: ${{ steps.configure.outputs.heads }} + +runs: + using: composite + steps: + # Step 1: Configure SDK via setup-cli-tool (checkout/install) + - name: Configure ${{ inputs.sdk }} + id: configure + uses: ./otdftests/xtest/setup-cli-tool + with: + path: ${{ inputs.tests-path }}/xtest/sdk + sdk: ${{ inputs.sdk }} + version-info: ${{ inputs.version-info }} + platform-otdfctl-dir: ${{ inputs.otdfctl-dir }} + platform-otdfctl-sha: ${{ inputs.otdfctl-sha }} + + # Step 2: SDK-appropriate dependency caching + - name: Cache npm + if: inputs.sdk == 'js' && fromJson(steps.configure.outputs.heads)[0] != null + uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2 + with: + path: ~/.npm + key: npm-${{ runner.os }}-${{ hashFiles(format('{0}/xtest/sdk/js/src/**/package-lock.json', inputs.tests-path)) }} + restore-keys: | + npm-${{ runner.os }}- + + - name: Cache Go modules + if: inputs.sdk == 'go' && fromJson(steps.configure.outputs.heads)[0] != null + uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2 + with: + path: | + ~/.cache/go-build + ~/go/pkg/mod + key: go-${{ runner.os }}-${{ hashFiles(format('{0}/xtest/sdk/go/src/*/go.sum', inputs.tests-path)) }} + restore-keys: | + go-${{ runner.os }}- + + - name: Cache Maven repository + if: inputs.sdk == 'java' && fromJson(steps.configure.outputs.heads)[0] != null + uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2 + with: + path: ~/.m2/repository + key: maven-${{ runner.os }}-${{ hashFiles(format('{0}/xtest/sdk/java/src/**/pom.xml', inputs.tests-path)) }} + restore-keys: | + maven-${{ runner.os }}- + + # Step 3: SDK-specific fixups + + # Go: Bridge client go.mod to server shared modules (standalone otdfctl only) + - name: Go fixup - replace go.mod packages + if: >- + inputs.sdk == 'go' + && steps.configure.outputs.heads != '[]' + && inputs.otdfctl-source != 'platform' + && inputs.focus-sdk == 'go' + && contains(fromJSON(inputs.platform-heads), inputs.platform-tag) + && inputs.platform-working-dir != '' + shell: bash + run: | + SDK_MGR_DIR="$(cd "$TESTS_PATH" && pwd)/otdf-sdk-mgr" + PLATFORM_DIR_ABS="$(pwd)/${PLATFORM_WORKING_DIR}" + BASE_DIR="$(pwd)/${TESTS_PATH}/xtest/sdk/go/src" + HEADS='${{ steps.configure.outputs.heads }}' + uv run --project "$SDK_MGR_DIR" otdf-sdk-mgr go-fixup \ + --platform-dir "$PLATFORM_DIR_ABS" \ + --heads "$HEADS" \ + "$BASE_DIR" + env: + PLATFORM_WORKING_DIR: ${{ inputs.platform-working-dir }} + TESTS_PATH: ${{ inputs.tests-path }} + + # Java: Create .env files with PLATFORM_BRANCH for head versions + - name: Java fixup - create platform branch .env files + if: >- + inputs.sdk == 'java' + && steps.configure.outputs.heads != '[]' + && (inputs.focus-sdk == 'go' || inputs.focus-sdk == 'java') + && contains(fromJSON(inputs.platform-heads), inputs.platform-tag) + shell: bash + run: | + for row in $(echo "$VERSION_INFO" | jq -c '.[]'); do + TAG=$(echo "$row" | jq -r '.tag') + HEAD=$(echo "$row" | jq -r '.head') + if [[ "$HEAD" == "true" ]]; then + echo "Creating .env file for tag: [$TAG]; pointing to platform ref [$PLATFORM_REF]" + echo "PLATFORM_BRANCH=$PLATFORM_REF" > "${TESTS_PATH}/xtest/sdk/java/${TAG}.env" + fi + done + env: + VERSION_INFO: ${{ inputs.version-info }} + PLATFORM_REF: ${{ fromJSON(inputs.platform-tag-to-sha)[inputs.platform-tag] }} + TESTS_PATH: ${{ inputs.tests-path }} + + # Step 4: Build the SDK CLI + - name: Build ${{ inputs.sdk }} CLI + if: fromJson(steps.configure.outputs.heads)[0] != null + shell: bash + run: make + working-directory: ${{ inputs.tests-path }}/xtest/sdk/${{ inputs.sdk }} + env: + BUF_INPUT_HTTPS_USERNAME: ${{ inputs.sdk == 'java' && 'opentdf-bot' || '' }} + BUF_INPUT_HTTPS_PASSWORD: ${{ inputs.sdk == 'java' && inputs.pat-opentdf || '' }} diff --git a/xtest/setup-test-environment/action.yaml b/xtest/setup-test-environment/action.yaml new file mode 100644 index 000000000..0f8ce86f3 --- /dev/null +++ b/xtest/setup-test-environment/action.yaml @@ -0,0 +1,140 @@ +name: setup-test-environment +description: >- + Detect platform capabilities, extract configuration, and prepare the test + environment. Consolidates otdfctl detection, platform version lookup, key + management support, root key extraction, multikas support check, and test + dependency installation. + +inputs: + platform-working-dir: + description: Platform checkout directory (from start-up-with-containers output) + required: true + platform-tag: + description: Platform version tag under test (matrix value) + required: true + otdfctl-source-input: + description: "User's otdfctl-source preference: auto, standalone, or platform" + required: false + default: "auto" + tests-path: + description: Path to the tests repo checkout + required: false + default: "otdftests" + +outputs: + otdfctl-source: + description: "Resolved otdfctl source: platform or standalone" + value: ${{ steps.detect-otdfctl.outputs.otdfctl-source }} + otdfctl-dir: + description: Absolute path to otdfctl directory (if source=platform) + value: ${{ steps.detect-otdfctl.outputs.otdfctl-dir }} + otdfctl-sha: + description: SHA of the platform checkout (if source=platform) + value: ${{ steps.detect-otdfctl.outputs.otdfctl-sha }} + platform-version: + description: Detected platform version string + value: ${{ steps.platform-version.outputs.version }} + key-management-supported: + description: Whether the platform supports key management (true/false) + value: ${{ steps.km-check.outputs.supported }} + root-key: + description: Root key for KAS instances + value: ${{ steps.km-check.outputs.root_key }} + multikas-supported: + description: Whether multi-KAS is supported (true/false) + value: ${{ steps.multikas.outputs.supported }} + +runs: + using: composite + steps: + - name: Detect platform-embedded otdfctl + id: detect-otdfctl + shell: bash + run: | + if [[ "$OTDFCTL_SOURCE_INPUT" == "auto" || -z "$OTDFCTL_SOURCE_INPUT" ]]; then + if [ -d "$PLATFORM_DIR/otdfctl" ] && [ -f "$PLATFORM_DIR/otdfctl/go.mod" ]; then + echo "otdfctl found in platform checkout at $PLATFORM_DIR/otdfctl" + echo "otdfctl-source=platform" >> "$GITHUB_OUTPUT" + echo "otdfctl-dir=$(pwd)/$PLATFORM_DIR/otdfctl" >> "$GITHUB_OUTPUT" + echo "otdfctl-sha=$(git -C "$PLATFORM_DIR" rev-parse HEAD)" >> "$GITHUB_OUTPUT" + else + echo "otdfctl not found in platform checkout; using standalone repo" + echo "otdfctl-source=standalone" >> "$GITHUB_OUTPUT" + fi + elif [[ "$OTDFCTL_SOURCE_INPUT" == "platform" ]]; then + if [ -z "$PLATFORM_DIR" ] || [ ! -d "$PLATFORM_DIR/otdfctl" ] || [ ! -f "$PLATFORM_DIR/otdfctl/go.mod" ]; then + echo "::error::otdfctl-source=platform requested but ${PLATFORM_DIR:-}/otdfctl does not exist or lacks go.mod" + exit 1 + fi + echo "otdfctl-source=platform" >> "$GITHUB_OUTPUT" + echo "otdfctl-dir=$(pwd)/$PLATFORM_DIR/otdfctl" >> "$GITHUB_OUTPUT" + echo "otdfctl-sha=$(git -C "$PLATFORM_DIR" rev-parse HEAD)" >> "$GITHUB_OUTPUT" + else + echo "otdfctl-source=standalone" >> "$GITHUB_OUTPUT" + fi + env: + OTDFCTL_SOURCE_INPUT: ${{ inputs.otdfctl-source-input }} + PLATFORM_DIR: ${{ inputs.platform-working-dir }} + + - name: Lookup platform version + id: platform-version + shell: bash + run: | + if ! go run ./service version; then + echo "Error: Unable to get platform version; defaulting to tag: [$PLATFORM_TAG]" + echo "version=$PLATFORM_TAG" >> "$GITHUB_OUTPUT" + exit + fi + PLATFORM_VERSION=$(go run ./service version 2>&1) + echo "version=$PLATFORM_VERSION" >> "$GITHUB_OUTPUT" + working-directory: ${{ inputs.platform-working-dir }} + env: + PLATFORM_TAG: ${{ inputs.platform-tag }} + + - name: Check key management support and prepare root key + id: km-check + shell: bash + run: | + OT_CONFIG_FILE="$(pwd)/opentdf.yaml" + km_value=$(yq e '.services.kas.preview.key_management' "$OT_CONFIG_FILE" 2>/dev/null || echo "null") + case "$km_value" in + true|false) + echo "supported=true" >> "$GITHUB_OUTPUT" + ;; + *) + echo "supported=false" >> "$GITHUB_OUTPUT" + ;; + esac + existing_root_key=$(yq e '.services.kas.root_key' "$OT_CONFIG_FILE" 2>/dev/null || echo "") + if [ -n "$existing_root_key" ] && [ "$existing_root_key" != "null" ]; then + echo "Using existing root key from config" + echo "root_key=$existing_root_key" >> "$GITHUB_OUTPUT" + else + echo "Generating a new root key for additional KAS" + gen_root_key=$(openssl rand -hex 32) + echo "root_key=$gen_root_key" >> "$GITHUB_OUTPUT" + fi + working-directory: ${{ inputs.platform-working-dir }} + + - name: Check multikas support + id: multikas + shell: bash + run: | + if [[ $PLATFORM_TAG == main ]]; then + echo "Main supports multikas" + echo "supported=true" >> "$GITHUB_OUTPUT" + elif awk -F. '{ if ($1 > 0 || ($1 == 0 && $2 > 4)) exit 0; else exit 1; }' <<< "${PLATFORM_VERSION#v}"; then + echo "Selected version [$PLATFORM_VERSION] supports multikas" + echo "supported=true" >> "$GITHUB_OUTPUT" + else + echo "At tag [$PLATFORM_TAG], [$PLATFORM_VERSION] probably does not support multikas" + echo "supported=false" >> "$GITHUB_OUTPUT" + fi + env: + PLATFORM_TAG: ${{ inputs.platform-tag }} + PLATFORM_VERSION: ${{ steps.platform-version.outputs.version }} + + - name: Install test dependencies + shell: bash + run: uv sync + working-directory: ${{ inputs.tests-path }}/xtest