From 973fe05729fc80832ed60a3f64c90a0121d1ee16 Mon Sep 17 00:00:00 2001 From: Dan Ferguson Date: Tue, 17 Mar 2026 10:23:15 -0400 Subject: [PATCH] feat: Add SageMaker HyperPod EKS deployment target and MCP server integration --- generators/app/index.js | 49 +- generators/app/lib/cli-handler.js | 13 +- generators/app/lib/config-manager.js | 129 +- generators/app/lib/mcp-client.js | 8 +- generators/app/lib/prompt-runner.js | 215 +- generators/app/lib/prompts.js | 155 +- generators/app/lib/template-manager.js | 65 +- generators/app/templates/MIGRATION.md | 14 +- generators/app/templates/PROJECT_README.md | 8 +- generators/app/templates/TEMPLATE_SYSTEM.md | 2 +- .../app/templates/deploy/submit_build.sh | 2 +- generators/app/templates/do/README.md | 18 +- generators/app/templates/do/clean | 174 +- generators/app/templates/do/config | 42 +- generators/app/templates/do/deploy | 465 ++- generators/app/templates/do/export | 77 + generators/app/templates/do/logs | 120 +- generators/app/templates/do/test | 237 +- .../app/templates/hyperpod/configmap.yaml | 24 + .../app/templates/hyperpod/deployment.yaml | 71 + generators/app/templates/hyperpod/pvc.yaml | 42 + .../app/templates/hyperpod/service.yaml | 17 + servers/hyperpod-cluster-picker/LICENSE | 202 ++ servers/hyperpod-cluster-picker/index.js | 346 +++ .../hyperpod-cluster-picker/package-lock.json | 2515 +++++++++++++++++ servers/hyperpod-cluster-picker/package.json | 16 + servers/hyperpod-cluster-picker/test.js | 104 + test/generator.test.js | 8 +- ...ibility-managed-inference.property.test.js | 452 +++ .../codebuild.test.js | 23 +- ...o-clean-deployment-target.property.test.js | 370 +++ ...-config-deployment-target.property.test.js | 316 +++ ...-deploy-deployment-target.property.test.js | 382 +++ .../do-framework-simple.test.js | 4 +- ...do-logs-deployment-target.property.test.js | 238 ++ ...do-test-deployment-target.property.test.js | 403 +++ .../docker-build-validation.test.js | 8 +- .../error-handling.test.js | 3 +- ...perpod-directory-presence.property.test.js | 256 ++ ...manifest-port-consistency.property.test.js | 451 +++ .../prompt-runner-ordering.property.test.js | 415 +++ ...prompts-deployment-target.property.test.js | 517 ++++ .../property-test-utils.js | 20 +- .../registry-integration.test.js | 14 +- ...unified-script-generation.property.test.js | 539 ++++ test/property/config-manager.property.test.js | 6 +- .../hyperpod-cluster-picker.property.test.js | 280 ++ ...nager-hyperpod-validation.property.test.js | 349 +++ test/template-manager.test.js | 16 +- test/unit.test.js | 12 +- test/unit/config-manager-unit.test.js | 39 +- test/unit/mcp-client.test.js | 7 +- 52 files changed, 9849 insertions(+), 409 deletions(-) create mode 100755 generators/app/templates/do/export create mode 100644 generators/app/templates/hyperpod/configmap.yaml create mode 100644 generators/app/templates/hyperpod/deployment.yaml create mode 100644 generators/app/templates/hyperpod/pvc.yaml create mode 100644 generators/app/templates/hyperpod/service.yaml create mode 100644 servers/hyperpod-cluster-picker/LICENSE create mode 100644 servers/hyperpod-cluster-picker/index.js create mode 100644 servers/hyperpod-cluster-picker/package-lock.json create mode 100644 servers/hyperpod-cluster-picker/package.json create mode 100644 servers/hyperpod-cluster-picker/test.js create mode 100644 test/input-parsing-and-generation/backward-compatibility-managed-inference.property.test.js create mode 100644 test/input-parsing-and-generation/do-clean-deployment-target.property.test.js create mode 100644 test/input-parsing-and-generation/do-config-deployment-target.property.test.js create mode 100644 test/input-parsing-and-generation/do-deploy-deployment-target.property.test.js create mode 100644 test/input-parsing-and-generation/do-logs-deployment-target.property.test.js create mode 100644 test/input-parsing-and-generation/do-test-deployment-target.property.test.js create mode 100644 test/input-parsing-and-generation/hyperpod-directory-presence.property.test.js create mode 100644 test/input-parsing-and-generation/k8s-manifest-port-consistency.property.test.js create mode 100644 test/input-parsing-and-generation/prompt-runner-ordering.property.test.js create mode 100644 test/input-parsing-and-generation/prompts-deployment-target.property.test.js create mode 100644 test/input-parsing-and-generation/unified-script-generation.property.test.js create mode 100644 test/property/hyperpod-cluster-picker.property.test.js create mode 100644 test/property/template-manager-hyperpod-validation.property.test.js diff --git a/generators/app/index.js b/generators/app/index.js index 38f1021..c1b89f0 100644 --- a/generators/app/index.js +++ b/generators/app/index.js @@ -101,9 +101,9 @@ export default class extends Generator { }); // Infrastructure options - this.option('deploy-target', { + this.option('build-target', { type: String, - description: 'Deployment target (sagemaker, codebuild)' + description: 'Build target (codebuild)' }); this.option('codebuild-compute-type', { @@ -126,6 +126,31 @@ export default class extends Generator { description: 'AWS IAM role ARN for SageMaker execution' }); + this.option('deployment-target', { + type: String, + description: 'Deployment target (managed-inference, hyperpod-eks)' + }); + + this.option('hyperpod-cluster', { + type: String, + description: 'HyperPod EKS cluster name' + }); + + this.option('hyperpod-namespace', { + type: String, + description: 'Kubernetes namespace for HyperPod deployment (default: default)' + }); + + this.option('hyperpod-replicas', { + type: Number, + description: 'Number of replicas for HyperPod deployment (default: 1)' + }); + + this.option('fsx-volume-handle', { + type: String, + description: 'FSx for Lustre volume handle for HyperPod storage' + }); + this.option('hf-token', { type: String, description: 'HuggingFace authentication token (or "$HF_TOKEN" to use environment variable)' @@ -361,11 +386,21 @@ export default class extends Generator { orderedEnvVars }; + // Build ignore patterns for conditional directory exclusion + const ignorePatterns = []; + + // Exclude HyperPod K8s manifests when not deploying to HyperPod + if (this.answers.deploymentTarget !== 'hyperpod-eks') { + ignorePatterns.push('**/hyperpod/**'); + } + // Copy all templates, processing EJS variables this.fs.copyTpl( this.templatePath('**/*'), this.destinationPath(), - templateVars + templateVars, + {}, + { globOptions: { ignore: ignorePatterns, dot: true } } ); // Remove files that don't belong in this deployment configuration @@ -723,7 +758,13 @@ export default class extends Generator { includeSampleModel: false, includeTesting: true, testTypes: [], - buildTimestamp: new Date().toISOString() + buildTimestamp: new Date().toISOString(), + buildTarget: 'codebuild', + deploymentTarget: 'managed-inference', + hyperPodCluster: null, + hyperPodNamespace: 'default', + hyperPodReplicas: 1, + fsxVolumeHandle: null }; // Apply defaults for any missing fields diff --git a/generators/app/lib/cli-handler.js b/generators/app/lib/cli-handler.js index fd0540c..adec152 100644 --- a/generators/app/lib/cli-handler.js +++ b/generators/app/lib/cli-handler.js @@ -169,12 +169,17 @@ CLI OPTIONS: --include-sample Include sample model code --include-testing Include test suite --test-types= Comma-separated test types (local-model-cli,local-model-server,hosted-model-endpoint) - --deploy-target= Deployment target (sagemaker|codebuild) + --build-target= Build target (codebuild) --codebuild-compute-type= CodeBuild compute type (BUILD_GENERAL1_SMALL|BUILD_GENERAL1_MEDIUM|BUILD_GENERAL1_LARGE) --codebuild-project-name= CodeBuild project name --instance-type= SageMaker instance type (e.g., ml.m5.large, ml.g5.xlarge) --region= AWS region --role-arn= AWS IAM role ARN for SageMaker execution + --deployment-target= Deployment target (managed-inference|hyperpod-eks) + --hyperpod-cluster= HyperPod EKS cluster name + --hyperpod-namespace= Kubernetes namespace for HyperPod (default: default) + --hyperpod-replicas= Number of replicas for HyperPod (default: 1) + --fsx-volume-handle= FSx for Lustre volume handle for HyperPod storage --hf-token= HuggingFace token (or "$HF_TOKEN" for env var) VALIDATION OPTIONS: @@ -211,7 +216,7 @@ REGISTRY SYSTEM: ENVIRONMENT VARIABLES: ML_INSTANCE_TYPE Instance type - ML_DEPLOY_TARGET Deployment target + ML_BUILD_TARGET Build target ML_CODEBUILD_COMPUTE_TYPE CodeBuild compute type AWS_REGION AWS region AWS_ROLE AWS IAM role ARN @@ -460,7 +465,7 @@ yo ml-container-creator my-codebuild-project \\ --framework=sklearn \\ --model-server=flask \\ --model-format=pkl \\ - --deploy-target=codebuild \\ + --build-target=codebuild \\ --codebuild-compute-type=BUILD_GENERAL1_MEDIUM \\ --codebuild-project-name=my-build-project \\ --skip-prompts @@ -494,7 +499,7 @@ yo ml-container-creator \\ 'includeSampleModel': false, 'includeTesting': true, 'testTypes': ['local-model-cli', 'local-model-server', 'hosted-model-endpoint'], - 'deployTarget': 'codebuild', + 'buildTarget': 'codebuild', 'codebuildComputeType': 'BUILD_GENERAL1_MEDIUM', 'codebuildProjectName': 'my-build-project', 'instanceType': 'ml.m5.large', diff --git a/generators/app/lib/config-manager.js b/generators/app/lib/config-manager.js index 3b73a0e..3f488eb 100644 --- a/generators/app/lib/config-manager.js +++ b/generators/app/lib/config-manager.js @@ -17,8 +17,14 @@ import fs from 'fs'; import path from 'path'; +import { fileURLToPath } from 'node:url'; import { McpClient } from './mcp-client.js'; +// Resolve the generator project root (three levels up from generators/app/lib/) +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); +const GENERATOR_ROOT = path.resolve(__dirname, '..', '..', '..'); + /** * Configuration error for invalid configuration values */ @@ -122,6 +128,21 @@ export default class ConfigManager { } }); + // Derive framework and modelServer from deploymentConfig if present. + // In prompted mode the PromptRunner does this split, but in --skip-prompts + // mode we need to do it here so the values are available for downstream logic. + if (finalConfig.deploymentConfig) { + const parts = finalConfig.deploymentConfig.split('-'); + const derivedFramework = parts[0]; + const derivedModelServer = parts.slice(1).join('-'); + if (!finalConfig.framework || finalConfig.framework === null) { + finalConfig.framework = derivedFramework; + } + if (!finalConfig.modelServer || finalConfig.modelServer === null) { + finalConfig.modelServer = derivedModelServer; + } + } + // When skipping prompts, provide reasonable defaults for missing required parameters if (this.skipPrompts) { Object.entries(this.parameterMatrix).forEach(([param, config]) => { @@ -202,8 +223,8 @@ export default class ConfigManager { finalConfig.destinationDir = `./${finalConfig.projectName}`; } - // Generate CodeBuild project name if deployTarget is codebuild - if (finalConfig.deployTarget === 'codebuild' && !finalConfig.codebuildProjectName) { + // Generate CodeBuild project name if buildTarget is codebuild + if ((finalConfig.buildTarget === 'codebuild' || finalConfig.deployTarget === 'codebuild') && !finalConfig.codebuildProjectName) { finalConfig.codebuildProjectName = this._generateCodeBuildProjectName( finalConfig.projectName, finalConfig.framework @@ -343,6 +364,7 @@ export default class ConfigManager { awsRegion: { cliOption: 'region', envVar: 'AWS_REGION', + ambientEnvVar: true, // AWS_REGION is commonly set in shells; treat as default, not explicit override configFile: true, packageJson: true, mcp: true, @@ -406,9 +428,9 @@ export default class ConfigManager { default: '.', valueSpace: 'bounded' }, - deployTarget: { - cliOption: 'deploy-target', - envVar: 'ML_DEPLOY_TARGET', + buildTarget: { + cliOption: 'build-target', + envVar: 'ML_BUILD_TARGET', configFile: true, packageJson: false, mcp: false, @@ -449,6 +471,61 @@ export default class ConfigManager { required: false, default: null, valueSpace: 'bounded' + }, + deploymentTarget: { + cliOption: 'deployment-target', + envVar: 'ML_DEPLOYMENT_TARGET', + configFile: true, + packageJson: false, + mcp: false, + promptable: true, + required: true, + default: 'managed-inference', + valueSpace: 'bounded' + }, + hyperPodCluster: { + cliOption: 'hyperpod-cluster', + envVar: null, + configFile: true, + packageJson: false, + mcp: true, + promptable: true, + required: false, + default: null, + valueSpace: 'unbounded' + }, + hyperPodNamespace: { + cliOption: 'hyperpod-namespace', + envVar: null, + configFile: true, + packageJson: false, + mcp: false, + promptable: true, + required: false, + default: 'default', + valueSpace: 'bounded' + }, + hyperPodReplicas: { + cliOption: 'hyperpod-replicas', + envVar: null, + configFile: true, + packageJson: false, + mcp: false, + promptable: true, + required: false, + default: 1, + valueSpace: 'bounded' + }, + fsxVolumeHandle: { + cliOption: 'fsx-volume-handle', + envVar: null, + configFile: true, + packageJson: false, + mcp: false, + promptable: true, + required: false, + default: null, + valueSpace: 'bounded' } }; } @@ -550,7 +627,7 @@ export default class ConfigManager { */ async _loadCustomConfigFile() { try { - const configPath = this.generator.destinationPath('config/mcp.json'); + const configPath = path.join(GENERATOR_ROOT, 'config', 'mcp.json'); if (fs.existsSync(configPath)) { const config = JSON.parse(fs.readFileSync(configPath, 'utf8')); this._mergeConfig(config); @@ -626,19 +703,22 @@ export default class ConfigManager { const envMapping = {}; Object.entries(this.parameterMatrix).forEach(([param, config]) => { if (config.envVar) { - envMapping[config.envVar] = param; + envMapping[config.envVar] = { param, ambient: config.ambientEnvVar === true }; } }); - Object.entries(envMapping).forEach(([envVar, configKey]) => { + Object.entries(envMapping).forEach(([envVar, { param: configKey, ambient }]) => { const value = process.env[envVar]; if (value !== undefined && value !== '' && this._isSourceSupported(configKey, 'envVar')) { this.config[configKey] = this._parseValue(configKey, value); - // Track as explicit configuration - if (!this.explicitConfig) { - this.explicitConfig = {}; + // Track as explicit configuration โ€” unless the env var is ambient + // (e.g. AWS_REGION is commonly set in shells as a default, not an override) + if (!ambient) { + if (!this.explicitConfig) { + this.explicitConfig = {}; + } + this.explicitConfig[configKey] = this._parseValue(configKey, value); } - this.explicitConfig[configKey] = this._parseValue(configKey, value); } }); } @@ -702,7 +782,7 @@ export default class ConfigManager { async queryMcpServer(serverName, context = {}) { let mcpServerConfigs; try { - const configPath = this.generator.destinationPath('config/mcp.json'); + const configPath = path.join(GENERATOR_ROOT, 'config', 'mcp.json'); if (!fs.existsSync(configPath)) return null; const config = JSON.parse(fs.readFileSync(configPath, 'utf8')); mcpServerConfigs = config.mcpServers; @@ -770,7 +850,7 @@ export default class ConfigManager { */ getMcpServerNames() { try { - const configPath = this.generator.destinationPath('config/mcp.json'); + const configPath = path.join(GENERATOR_ROOT, 'config', 'mcp.json'); if (!fs.existsSync(configPath)) return []; const config = JSON.parse(fs.readFileSync(configPath, 'utf8')); return Object.keys(config.mcpServers || {}); @@ -868,9 +948,10 @@ export default class ConfigManager { } } - // Validate deployment target - if (this.config.deployTarget && !supportedOptions.deployTargets.includes(this.config.deployTarget)) { - errors.push(`Unsupported deployment target: ${this.config.deployTarget}. Supported targets: ${supportedOptions.deployTargets.join(', ')}`); + // Validate build target (renamed from deployTarget) + const buildTarget = this.config.buildTarget || this.config.deployTarget; + if (buildTarget && !supportedOptions.buildTargets.includes(buildTarget)) { + errors.push(`Unsupported build target: ${buildTarget}. Supported targets: ${supportedOptions.buildTargets.join(', ')}`); } // Validate CodeBuild compute type @@ -944,6 +1025,13 @@ export default class ConfigManager { return; // Skip validation for transformers } + // Special case: instanceType is not required for hyperpod-eks + // when not provided (backward compatibility) โ€” but it IS prompted now + // so it should normally be present + if (param === 'instanceType' && finalConfig.deploymentTarget === 'hyperpod-eks' && !finalConfig.instanceType) { + return; // Skip validation only if truly missing for backward compat + } + if (isEmpty) { if (config.promptable) { // Promptable required parameter is missing - this should not happen after prompting @@ -1154,10 +1242,11 @@ export default class ConfigManager { } break; + case 'buildTarget': case 'deployTarget': - if (value && !supportedOptions.deployTargets.includes(value)) { + if (value && !supportedOptions.buildTargets.includes(value)) { throw new ValidationError( - `Unsupported deployment target: ${value}. Supported targets: ${supportedOptions.deployTargets.join(', ')}`, + `Unsupported build target: ${value}. Supported targets: ${supportedOptions.buildTargets.join(', ')}`, parameter, value ); @@ -1253,7 +1342,7 @@ export default class ConfigManager { 'tensorflow': ['keras', 'h5', 'SavedModel'], 'transformers': [] // No format needed }, - deployTargets: ['codebuild'], + buildTargets: ['codebuild'], codebuildComputeTypes: ['BUILD_GENERAL1_SMALL', 'BUILD_GENERAL1_MEDIUM', 'BUILD_GENERAL1_LARGE'], awsRegions: [ 'us-east-1', 'us-east-2', 'us-west-1', 'us-west-2', diff --git a/generators/app/lib/mcp-client.js b/generators/app/lib/mcp-client.js index fb47d5d..f0c40a4 100644 --- a/generators/app/lib/mcp-client.js +++ b/generators/app/lib/mcp-client.js @@ -97,12 +97,10 @@ class McpClient { // Build environment: merge process.env with server-specific env // When --smart flag is active, inject BEDROCK_SMART=true for this run + // Always pass process.env so child processes inherit AWS credentials, profiles, etc. const smartEnv = this.smart ? { BEDROCK_SMART: 'true' } : {}; const serverEnv = env && Object.keys(env).length > 0 ? env : {}; - const mergedEnv = { ...smartEnv, ...serverEnv }; - const spawnEnv = Object.keys(mergedEnv).length > 0 - ? { ...process.env, ...mergedEnv } - : undefined; + const spawnEnv = { ...process.env, ...smartEnv, ...serverEnv }; // Create stdio transport โ€” spawns the server process this._transport = new StdioClientTransport({ @@ -231,7 +229,7 @@ class McpClient { } } - return { values, choices }; + return { values, choices, message: parsed.message || null }; } /** diff --git a/generators/app/lib/prompt-runner.js b/generators/app/lib/prompt-runner.js index 693cfa5..5d3321f 100644 --- a/generators/app/lib/prompt-runner.js +++ b/generators/app/lib/prompt-runner.js @@ -18,7 +18,10 @@ import { hfTokenPrompts, ngcApiKeyPrompts, modulePrompts, - infrastructurePrompts, + infraRegionAndTargetPrompts, + infraInstancePrompts, + infraHyperPodPrompts, + infraBuildPrompts, projectPrompts, destinationPrompts } from './prompts.js'; @@ -44,9 +47,52 @@ export default class PromptRunner { // Get only explicit configuration (not defaults) for prompt skipping const explicitConfig = this.configManager ? this.configManager.getExplicitConfiguration() : {}; - // Phase 1: Core Configuration (deployment config first) - console.log('\n๐Ÿ”ง Core Configuration'); - const deploymentConfigAnswers = await this._runPhase(deploymentConfigPrompts, {}, explicitConfig, existingConfig); + // Phase 1: Infrastructure & Deployment + // Requirements: 3.1 โ€” infrastructure prompts run first + // Ordering: Region โ†’ Deployment Target โ†’ Instance (if managed) โ†’ HyperPod (if eks) โ†’ Build Target + console.log('\n๐Ÿ’ช Infrastructure & Deployment'); + + // 1a. Query region MCP, then prompt for region + deployment target + await this._queryMcpForRegion({}, explicitConfig); + const regionAndTargetAnswers = await this._runPhase(infraRegionAndTargetPrompts, {}, explicitConfig, existingConfig); + + // 1b. Instance type โ€” query MCP and prompt for managed-inference and hyperpod-eks + let instanceAnswers = {}; + if (regionAndTargetAnswers.deploymentTarget === 'managed-inference' || + regionAndTargetAnswers.deploymentTarget === 'hyperpod-eks') { + await this._queryMcpForInstance({}, explicitConfig); + const mcpInstanceChoices = this.configManager?.mcpChoices?.instanceType; + const instancePreviousAnswers = { + ...regionAndTargetAnswers, + ...(mcpInstanceChoices && mcpInstanceChoices.length > 0 ? { _mcpInstanceChoices: mcpInstanceChoices } : {}) + }; + instanceAnswers = await this._runPhase(infraInstancePrompts, instancePreviousAnswers, explicitConfig, existingConfig); + } + + // 1c. HyperPod prompts โ€” only query MCP and prompt when deployment target is hyperpod-eks + let hyperPodAnswers = {}; + if (regionAndTargetAnswers.deploymentTarget === 'hyperpod-eks') { + // Resolve the actual region (handle 'custom' selection) + const resolvedRegion = regionAndTargetAnswers.customAwsRegion || regionAndTargetAnswers.awsRegion; + await this._queryMcpForHyperPod({ ...regionAndTargetAnswers, awsRegion: resolvedRegion }, explicitConfig); + hyperPodAnswers = await this._runPhase(infraHyperPodPrompts, { ...regionAndTargetAnswers }, explicitConfig, existingConfig); + } + + // 1d. Build target + role ARN (always) + const buildAnswers = await this._runPhase(infraBuildPrompts, { ...regionAndTargetAnswers, ...instanceAnswers, ...hyperPodAnswers }, explicitConfig, existingConfig); + + // Combine all infrastructure answers + const infraAnswers = { + ...regionAndTargetAnswers, + ...instanceAnswers, + ...hyperPodAnswers, + ...buildAnswers + }; + + // Phase 2: Core ML Configuration + // Requirements: 3.2 โ€” ML configuration prompts run after infrastructure + console.log('\n๐Ÿ”ง Core ML Configuration'); + const deploymentConfigAnswers = await this._runPhase(deploymentConfigPrompts, { ...infraAnswers }, explicitConfig, existingConfig); // Derive framework and modelServer from deploymentConfig // Requirements: 16.3 @@ -132,31 +178,7 @@ export default class PromptRunner { { ...frameworkAnswers, ...frameworkVersionAnswers, ...frameworkProfileAnswers, ...modelFormatAnswers, ...modelServerAnswers, ...modelProfileAnswers }, explicitConfig, existingConfig); - // Phase 2: Module Selection - console.log('\n๐Ÿ“ฆ Module Selection'); - const moduleAnswers = await this._runPhase(modulePrompts, frameworkAnswers, explicitConfig, existingConfig); - - // Ensure transformers don't get sample model - if (frameworkAnswers.framework === 'transformers') { - moduleAnswers.includeSampleModel = false; - } - - // Phase 3: Infrastructure & Performance - console.log('\n๐Ÿ’ช Infrastructure & Performance'); - - // Query MCP servers on-demand with user search terms - await this._queryMcpForInfrastructure(frameworkAnswers, explicitConfig); - - // Pass MCP instance choices so the table and choices in prompts.js can filter - const mcpInstanceChoices = this.configManager?.mcpChoices?.instanceType; - const infraPreviousAnswers = { - ...frameworkAnswers, - ...(mcpInstanceChoices && mcpInstanceChoices.length > 0 ? { _mcpInstanceChoices: mcpInstanceChoices } : {}) - }; - - const infraAnswers = await this._runPhase(infrastructurePrompts, infraPreviousAnswers, explicitConfig, existingConfig); - - // Validate instance type against framework requirements + // Validate instance type against framework requirements (now that framework is known) // Requirements: 4.1, 4.2, 4.3, 4.4, 4.5, 4.6 const instanceType = infraAnswers.customInstanceType || infraAnswers.instanceType; if (instanceType && frameworkVersionAnswers.frameworkVersion) { @@ -179,10 +201,18 @@ export default class PromptRunner { infraAnswers._resolvedInferenceAmiVersion = cudaAnswer.inferenceAmiVersion; } - // Show warning for SageMaker deployment target - // Note: sagemaker deploy target has been removed; only codebuild is supported + // Phase 3: Module Selection + // Requirements: 3.3 โ€” module selection after ML configuration + console.log('\n๐Ÿ“ฆ Module Selection'); + const moduleAnswers = await this._runPhase(modulePrompts, frameworkAnswers, explicitConfig, existingConfig); + + // Ensure transformers don't get sample model + if (frameworkAnswers.framework === 'transformers') { + moduleAnswers.includeSampleModel = false; + } - // Phase 4: Project Configuration (moved to end) + // Phase 4: Project Configuration + // Requirements: 3.4 โ€” project configuration last console.log('\n๐Ÿ“‹ Project Configuration'); const allTechnicalAnswers = { ...frameworkAnswers, @@ -195,11 +225,12 @@ export default class PromptRunner { const destinationAnswers = await this._runPhase(destinationPrompts, { ...allTechnicalAnswers, ...projectAnswers }, explicitConfig, existingConfig); - // Phase 5: Deployment Instructions + // Deployment Instructions this._showDeploymentInstructions(); // Combine all answers const combinedAnswers = { + ...infraAnswers, ...frameworkAnswers, ...frameworkVersionAnswers, ...frameworkProfileAnswers, @@ -209,7 +240,6 @@ export default class PromptRunner { ...hfTokenAnswers, ...ngcApiKeyAnswers, ...moduleAnswers, - ...infraAnswers, ...projectAnswers, ...destinationAnswers, buildTimestamp @@ -227,6 +257,12 @@ export default class PromptRunner { delete combinedAnswers.customInstanceType; } + // Handle custom HyperPod cluster name + if (combinedAnswers.customHyperPodCluster) { + combinedAnswers.hyperPodCluster = combinedAnswers.customHyperPodCluster; + delete combinedAnswers.customHyperPodCluster; + } + // Apply CUDA version selection โ†’ inference AMI override if (combinedAnswers._resolvedInferenceAmiVersion) { combinedAnswers.inferenceAmiVersion = combinedAnswers._resolvedInferenceAmiVersion; @@ -349,11 +385,59 @@ export default class PromptRunner { } /** - * Query MCP servers for infrastructure parameters using user-provided search terms. + * Query MCP region-picker server before infrastructure prompts. * Populates configManager.mcpChoices so _runPhase injects them into list prompts. * @private */ - async _queryMcpForInfrastructure(frameworkAnswers, explicitConfig) { + async _queryMcpForRegion(frameworkAnswers, explicitConfig) { + const cm = this.configManager; + if (!cm) return; + + const mcpServers = cm.getMcpServerNames(); + if (mcpServers.length === 0) return; + + const smart = this.generator.options.smart === true; + + // Region: query unless explicitly provided via CLI option or config file + // Note: AWS_REGION env var is treated as a default, not an explicit override, + // so we only skip when awsRegion was set via --region CLI flag or config file + const cliRegion = this.generator.options.region; + const skipRegionQuery = cliRegion !== undefined && cliRegion !== null; + + if (!skipRegionQuery && mcpServers.includes('region-picker')) { + const { regionSearch } = await this.generator.prompt([{ + type: 'input', + name: 'regionSearch', + message: '๐Ÿ”Œ Search for a region (e.g. "europe", "us west", "tokyo"):', + default: '' + }]); + + if (regionSearch && regionSearch.trim()) { + console.log(` ๐Ÿ” Querying region-picker${smart ? ' [smart]' : ''}...`); + const result = await cm.queryMcpServer('region-picker', { + ...frameworkAnswers, + regionSearch: regionSearch.trim() + }); + if (result && result.choices?.awsRegion?.length > 0) { + const choices = result.choices.awsRegion; + const preview = choices.length <= 5 + ? choices.join(', ') + : `${choices.slice(0, 5).join(', ') } (+${choices.length - 5} more)`; + console.log(` โœ“ ${choices.length} region(s): [${preview}]`); + } else { + console.log(' โ†ณ No MCP results, using static list'); + } + } + } + } + + /** + * Query MCP instance-recommender server after deployment target is known. + * Only runs when deploymentTarget is managed-inference. + * Populates configManager.mcpChoices so _runPhase injects them into list prompts. + * @private + */ + async _queryMcpForInstance(frameworkAnswers, explicitConfig) { const cm = this.configManager; if (!cm) return; @@ -388,31 +472,44 @@ export default class PromptRunner { } } } + } - // Region: query if not already provided via CLI/config - if (!explicitConfig.awsRegion && mcpServers.includes('region-picker')) { - const { regionSearch } = await this.generator.prompt([{ - type: 'input', - name: 'regionSearch', - message: '๐Ÿ”Œ Search for a region (e.g. "europe", "us west", "tokyo"):', - default: '' - }]); + /** + * Query the hyperpod-cluster-picker MCP server for available HyperPod EKS clusters. + * Populates configManager.mcpChoices.hyperPodCluster so _runPhase injects them into the list prompt. + * Falls back to manual entry if the MCP server is not configured or fails. + * Requirements: 12.1, 12.2, 12.3 + * @private + */ + async _queryMcpForHyperPod(infraAnswers, explicitConfig) { + const cm = this.configManager; + if (!cm) return; - if (regionSearch && regionSearch.trim()) { - console.log(` ๐Ÿ” Querying region-picker${smart ? ' [smart]' : ''}...`); - const result = await cm.queryMcpServer('region-picker', { - ...frameworkAnswers, - regionSearch: regionSearch.trim() - }); - if (result && result.choices?.awsRegion?.length > 0) { - const choices = result.choices.awsRegion; - const preview = choices.length <= 5 - ? choices.join(', ') - : `${choices.slice(0, 5).join(', ') } (+${choices.length - 5} more)`; - console.log(` โœ“ ${choices.length} region(s): [${preview}]`); - } else { - console.log(' โ†ณ No MCP results, using static list'); - } + const mcpServers = cm.getMcpServerNames(); + if (!mcpServers.includes('hyperpod-cluster-picker')) return; + + // Skip if cluster already provided via CLI/config + if (explicitConfig.hyperPodCluster) return; + + const smart = this.generator.options.smart === true; + console.log(` ๐Ÿ” Querying hyperpod-cluster-picker${smart ? ' [smart]' : ''}...`); + + const result = await cm.queryMcpServer('hyperpod-cluster-picker', { + ...infraAnswers + }); + + if (result && result.choices?.hyperPodCluster?.length > 0) { + const choices = result.choices.hyperPodCluster; + const preview = choices.length <= 5 + ? choices.join(', ') + : `${choices.slice(0, 5).join(', ')} (+${choices.length - 5} more)`; + console.log(` โœ“ ${choices.length} cluster(s): [${preview}]`); + } else { + // Surface any error message from the MCP server + if (result?.message) { + console.log(` โš ๏ธ ${result.message}`); + } else { + console.log(' โ†ณ No HyperPod clusters found via MCP, manual entry available'); } } } diff --git a/generators/app/lib/prompts.js b/generators/app/lib/prompts.js index a02e3d5..d8f8d0d 100644 --- a/generators/app/lib/prompts.js +++ b/generators/app/lib/prompts.js @@ -367,36 +367,53 @@ const modulePrompts = [ } ]; -const infrastructurePrompts = [ +/** + * Infrastructure prompts split into sub-phases so the prompt runner can + * interleave MCP queries between them (e.g. query instance-recommender + * only after we know the deployment target is managed-inference). + * + * Ordering: Region โ†’ Deployment Target โ†’ Instance/HyperPod โ†’ Build Target โ†’ Role + */ + +// Sub-phase A: Region + Deployment Target (always asked first) +const infraRegionAndTargetPrompts = [ { type: 'list', - name: 'deployTarget', - message: 'Deployment target?', + name: 'awsRegion', + message: 'Target AWS region?', choices: [ - { name: 'codebuild (recommended)', value: 'codebuild' } + 'us-east-1', + { name: 'Custom...', value: 'custom' } ], - default: 'codebuild' + default: 'us-east-1' + }, + { + type: 'input', + name: 'customAwsRegion', + message: 'Enter AWS region (e.g., us-west-2, eu-west-1):', + when: answers => answers.awsRegion === 'custom' }, { type: 'list', - name: 'codebuildComputeType', - message: 'CodeBuild compute type?', + name: 'deploymentTarget', + message: 'Deployment target?', choices: [ - 'BUILD_GENERAL1_SMALL', - 'BUILD_GENERAL1_MEDIUM', - 'BUILD_GENERAL1_LARGE' + { name: 'SageMaker Managed Inference - Real Time', value: 'managed-inference' }, + { name: 'SageMaker HyperPod - EKS', value: 'hyperpod-eks' } ], - default: 'BUILD_GENERAL1_MEDIUM', - when: answers => answers.deployTarget === 'codebuild' - }, + default: 'managed-inference' + } +]; + +// Sub-phase B: Instance type (only when deploymentTarget === 'managed-inference') +const infraInstancePrompts = [ { type: 'list', name: 'instanceType', + when: answers => answers.deploymentTarget === 'managed-inference' || answers.deploymentTarget === 'hyperpod-eks', message: (answers) => { - // Derive framework and modelServer from deploymentConfig if not already set const framework = answers.framework || answers.deploymentConfig?.split('-')[0]; - // Display instance type table const table = new Table({ head: [ chalk.cyan('Instance Type'), @@ -408,20 +425,17 @@ const infrastructurePrompts = [ colWidths: [20, 8, 12, 20, 25] }); - // Filter instances based on framework const instances = Object.values(instanceTypeRegistry); let filteredInstances = framework === 'transformers' ? instances.filter(i => i.category === 'gpu') : instances; - // Further filter by MCP results when available const mcpChoices = answers._mcpInstanceChoices; if (mcpChoices && mcpChoices.length > 0) { const mcpSet = new Set(mcpChoices); filteredInstances = filteredInstances.filter(i => mcpSet.has(i.type)); } - // Add rows to table filteredInstances.forEach(instance => { table.push([ instance.type, @@ -432,7 +446,6 @@ const infrastructurePrompts = [ ]); }); - // Add custom option table.push([ chalk.yellow('Custom...'), '-', @@ -451,29 +464,24 @@ const infrastructurePrompts = [ return 'Select instance type:'; }, choices: (answers) => { - // Derive framework from deploymentConfig if not already set const framework = answers.framework || answers.deploymentConfig?.split('-')[0]; - // Get instance types based on framework const instances = Object.values(instanceTypeRegistry); let filteredInstances = framework === 'transformers' ? instances.filter(i => i.category === 'gpu') : instances; - // Further filter by MCP results when available const mcpChoices = answers._mcpInstanceChoices; if (mcpChoices && mcpChoices.length > 0) { const mcpSet = new Set(mcpChoices); filteredInstances = filteredInstances.filter(i => mcpSet.has(i.type)); } - // Build choices array const choices = filteredInstances.map(instance => ({ name: instance.type, value: instance.type })); - // Add custom option choices.push({ name: 'Custom...', value: 'custom' @@ -485,14 +493,13 @@ const infrastructurePrompts = [ const framework = answers.framework || answers.deploymentConfig?.split('-')[0]; const modelServer = answers.modelServer || answers.deploymentConfig?.split('-')[1]; - // Default recommendations if (framework === 'transformers') { if (modelServer === 'tensorrt-llm') { - return 'ml.g5.12xlarge'; // TensorRT-LLM needs more GPU memory + return 'ml.g5.12xlarge'; } - return 'ml.g5.2xlarge'; // Good default for vLLM/SGLang + return 'ml.g5.2xlarge'; } - return 'ml.m5.xlarge'; // Good default for CPU workloads + return 'ml.m5.xlarge'; } }, { @@ -503,7 +510,6 @@ const infrastructurePrompts = [ if (!input || input.trim() === '') { return 'Instance type is required'; } - // Validate AWS SageMaker instance type format const instancePattern = /^ml\.[a-z0-9]+\.(nano|micro|small|medium|large|xlarge|[0-9]+xlarge)$/; if (!instancePattern.test(input.trim())) { return 'Invalid instance type format. Expected format: ml.{family}.{size} (e.g., ml.m5.large, ml.g4dn.xlarge)'; @@ -511,22 +517,81 @@ const infrastructurePrompts = [ return true; }, when: answers => answers.instanceType === 'custom' + } +]; + +// Sub-phase C: HyperPod EKS-specific prompts (only when deploymentTarget === 'hyperpod-eks') +const infraHyperPodPrompts = [ + { + type: 'list', + name: 'hyperPodCluster', + message: 'Select HyperPod EKS cluster:', + choices: (answers) => { + const mcpChoices = answers._mcpHyperPodChoices || []; + if (mcpChoices.length > 0) { + return [...mcpChoices, { name: 'Custom (enter manually)', value: 'custom' }]; + } + // No MCP results โ€” offer manual entry as the only option + return [{ name: 'Enter cluster name manually', value: 'custom' }]; + }, + when: answers => answers.deploymentTarget === 'hyperpod-eks' + }, + { + type: 'input', + name: 'customHyperPodCluster', + message: 'Enter HyperPod EKS cluster name:', + validate: (input) => { + if (!input || input.trim() === '') { + return 'Cluster name is required'; + } + return true; + }, + when: answers => answers.deploymentTarget === 'hyperpod-eks' && answers.hyperPodCluster === 'custom' + }, + { + type: 'input', + name: 'hyperPodNamespace', + message: 'Kubernetes namespace?', + default: 'default', + when: answers => answers.deploymentTarget === 'hyperpod-eks' + }, + { + type: 'number', + name: 'hyperPodReplicas', + message: 'Number of pod replicas?', + default: 1, + when: answers => answers.deploymentTarget === 'hyperpod-eks' }, + { + type: 'input', + name: 'fsxVolumeHandle', + message: 'FSx for Lustre volume handle (optional, press Enter to skip):', + when: answers => answers.deploymentTarget === 'hyperpod-eks' + } +]; + +// Sub-phase D: Build target + role ARN (always asked last) +const infraBuildPrompts = [ { type: 'list', - name: 'awsRegion', - message: 'Target AWS region?', + name: 'buildTarget', + message: 'Build target?', choices: [ - 'us-east-1', - { name: 'Custom...', value: 'custom' } + { name: 'CodeBuild (recommended)', value: 'codebuild' } ], - default: 'us-east-1' + default: 'codebuild' }, { - type: 'input', - name: 'customAwsRegion', - message: 'Enter AWS region (e.g., us-west-2, eu-west-1):', - when: answers => answers.awsRegion === 'custom' + type: 'list', + name: 'codebuildComputeType', + message: 'CodeBuild compute type?', + choices: [ + 'BUILD_GENERAL1_SMALL', + 'BUILD_GENERAL1_MEDIUM', + 'BUILD_GENERAL1_LARGE' + ], + default: 'BUILD_GENERAL1_MEDIUM', + when: answers => answers.buildTarget === 'codebuild' }, { type: 'input', @@ -534,7 +599,7 @@ const infrastructurePrompts = [ message: 'AWS IAM Role ARN for SageMaker execution (optional)?', validate: (input) => { if (!input || input.trim() === '') { - return true; // Optional parameter + return true; } const arnPattern = /^arn:aws:iam::\d{12}:role\/[\w+=,.@-]+$/; if (!arnPattern.test(input)) { @@ -545,6 +610,14 @@ const infrastructurePrompts = [ } ]; +// Combined view for tests and backward compatibility +const infrastructurePrompts = [ + ...infraRegionAndTargetPrompts, + ...infraInstancePrompts, + ...infraHyperPodPrompts, + ...infraBuildPrompts +]; + const projectPrompts = [ { type: 'input', @@ -582,6 +655,10 @@ export { ngcApiKeyPrompts, modulePrompts, infrastructurePrompts, + infraRegionAndTargetPrompts, + infraInstancePrompts, + infraHyperPodPrompts, + infraBuildPrompts, projectPrompts, destinationPrompts }; \ No newline at end of file diff --git a/generators/app/lib/template-manager.js b/generators/app/lib/template-manager.js index 82690bb..52dc1b9 100644 --- a/generators/app/lib/template-manager.js +++ b/generators/app/lib/template-manager.js @@ -30,7 +30,8 @@ export default class TemplateManager { 'transformers-vllm', 'transformers-sglang', 'transformers-tensorrt-llm', 'transformers-lmi', 'transformers-djl' ], - deployment: ['codebuild'], + buildTargets: ['codebuild'], + deploymentTargets: ['managed-inference', 'hyperpod-eks'], testTypes: ['local-model-cli', 'local-model-server', 'hosted-model-endpoint'], awsRegions: [ 'us-east-1', 'us-east-2', 'us-west-1', 'us-west-2', @@ -57,9 +58,25 @@ export default class TemplateManager { } } - this._validateChoice('deployTarget', supportedOptions.deployment); + // Validate buildTarget (replaces deployTarget) + if (this.answers.buildTarget) { + this._validateChoice('buildTarget', supportedOptions.buildTargets); + } else if (this.answers.deployTarget) { + // Backward compatibility: validate deployTarget against buildTargets + this._validateChoice('deployTarget', supportedOptions.buildTargets); + } + + // Validate deploymentTarget + if (this.answers.deploymentTarget) { + this._validateChoice('deploymentTarget', supportedOptions.deploymentTargets); + } + + // Validate HyperPod EKS specific fields + if (this.answers.deploymentTarget === 'hyperpod-eks') { + this._validateHyperPodConfig(); + } - // Validate instance type format (ml.*.*) + // Validate instance type format (ml.*.*) - only for managed-inference if (this.answers.instanceType && this.answers.instanceType !== 'custom') { const instancePattern = /^ml\.[a-z0-9]+\.(nano|micro|small|medium|large|xlarge|[0-9]+xlarge)$/; if (!instancePattern.test(this.answers.instanceType)) { @@ -77,6 +94,48 @@ export default class TemplateManager { } } + /** + * Validates HyperPod EKS specific configuration + * @private + * @throws {Error} If HyperPod configuration is invalid + */ + _validateHyperPodConfig() { + // Validate hyperPodCluster is non-empty + if (!this.answers.hyperPodCluster || this.answers.hyperPodCluster.trim() === '') { + throw new Error('โš ๏ธ hyperPodCluster is required when deploymentTarget is "hyperpod-eks". Please provide a valid HyperPod cluster name.'); + } + + // Validate hyperPodNamespace conforms to RFC 1123 DNS label format + if (this.answers.hyperPodNamespace) { + if (!this._isValidRfc1123DnsLabel(this.answers.hyperPodNamespace)) { + throw new Error(`โš ๏ธ Invalid hyperPodNamespace: "${this.answers.hyperPodNamespace}". Namespace must conform to RFC 1123 DNS label format: lowercase alphanumeric characters or hyphens, must start and end with an alphanumeric character, and be at most 63 characters.`); + } + } + + // Validate hyperPodReplicas is an integer >= 1 + if (this.answers.hyperPodReplicas !== undefined) { + const replicas = this.answers.hyperPodReplicas; + if (!Number.isInteger(replicas) || replicas < 1) { + throw new Error(`โš ๏ธ Invalid hyperPodReplicas: "${replicas}". Replicas must be an integer greater than or equal to 1.`); + } + } + } + + /** + * Validates a string conforms to RFC 1123 DNS label format + * @private + * @param {string} value - The value to validate + * @returns {boolean} True if valid RFC 1123 DNS label + */ + _isValidRfc1123DnsLabel(value) { + if (!value || typeof value !== 'string') { + return false; + } + // RFC 1123 DNS label: lowercase alphanumeric, hyphens allowed (not at start/end), max 63 chars + const rfc1123Pattern = /^[a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?$/; + return value.length <= 63 && rfc1123Pattern.test(value); + } + /** * Validates a single configuration choice * @private diff --git a/generators/app/templates/MIGRATION.md b/generators/app/templates/MIGRATION.md index e7d7dd3..8a460e0 100644 --- a/generators/app/templates/MIGRATION.md +++ b/generators/app/templates/MIGRATION.md @@ -18,7 +18,7 @@ The do-framework provides: |----------------|---------------------|-------| | `./deploy/build_and_push.sh` | `./do/build && ./do/push` | Now split into two commands | | `./deploy/deploy.sh ` | `./do/deploy ` | Same functionality | -<% if (deployTarget === 'codebuild') { %>| `./deploy/submit_build.sh` | `./do/submit` | CodeBuild integration | +<% if (buildTarget === 'codebuild') { %>| `./deploy/submit_build.sh` | `./do/submit` | CodeBuild integration | <% } %>| N/A | `./do/run` | New: Run container locally | | N/A | `./do/test [endpoint]` | New: Test container or endpoint | | N/A | `./do/clean ` | New: Clean up resources | @@ -38,7 +38,7 @@ do/ โ”œโ”€โ”€ run # Run locally โ”œโ”€โ”€ test # Test container/endpoint โ”œโ”€โ”€ clean # Clean up resources -<% if (deployTarget === 'codebuild') { %>โ”œโ”€โ”€ submit # Submit to CodeBuild +<% if (buildTarget === 'codebuild') { %>โ”œโ”€โ”€ submit # Submit to CodeBuild <% } %>โ””โ”€โ”€ README.md # Detailed documentation ``` @@ -74,7 +74,7 @@ do/ ./do/test <%= projectName %>-endpoint ``` -<% if (deployTarget === 'codebuild') { %>#### CodeBuild Workflow +<% if (buildTarget === 'codebuild') { %>#### CodeBuild Workflow **Old**: ```bash @@ -151,7 +151,7 @@ Update any project documentation that references the old scripts: **Find and replace**: - `./deploy/build_and_push.sh` โ†’ `./do/build && ./do/push` - `./deploy/deploy.sh` โ†’ `./do/deploy` -<% if (deployTarget === 'codebuild') { %>- `./deploy/submit_build.sh` โ†’ `./do/submit` +<% if (buildTarget === 'codebuild') { %>- `./deploy/submit_build.sh` โ†’ `./do/submit` <% } %> ## Command Mapping Details @@ -203,7 +203,7 @@ This single script built the Docker image and pushed it to ECR. - Automatic endpoint status polling - Displays test command when complete -<% if (deployTarget === 'codebuild') { %>### CodeBuild Submit +<% if (buildTarget === 'codebuild') { %>### CodeBuild Submit **Legacy**: ```bash @@ -309,7 +309,7 @@ export MODEL_SERVER="<%= modelServer %>" export AWS_REGION="<%= awsRegion %>" export INSTANCE_TYPE="<%= instanceType %>" export ECR_REPOSITORY_NAME="ml-container-creator" -<% if (deployTarget === 'codebuild') { %>export DEPLOY_TARGET="codebuild" +<% if (buildTarget === 'codebuild') { %>export BUILD_TARGET="codebuild" export CODEBUILD_COMPUTE_TYPE="<%= codebuildComputeType %>" <% } %><% if (framework === 'transformers') { %>export MODEL_NAME="<%= modelName %>" <% if (hfToken) { %>export HF_TOKEN="<%= hfToken %>" @@ -329,7 +329,7 @@ The legacy scripts are still available in the `deploy/` directory for backward c ```bash ./deploy/build_and_push.sh # Still works ./deploy/deploy.sh # Still works -<% if (deployTarget === 'codebuild') { %>./deploy/submit_build.sh # Still works +<% if (buildTarget === 'codebuild') { %>./deploy/submit_build.sh # Still works <% } %> ``` diff --git a/generators/app/templates/PROJECT_README.md b/generators/app/templates/PROJECT_README.md index 0eca09e..aaf3fd2 100644 --- a/generators/app/templates/PROJECT_README.md +++ b/generators/app/templates/PROJECT_README.md @@ -57,7 +57,7 @@ Creates a SageMaker endpoint named `<%= projectName %>-endpoint`. โ”‚ โ”œโ”€โ”€ run # Run container locally โ”‚ โ”œโ”€โ”€ test # Test container or endpoint โ”‚ โ”œโ”€โ”€ clean # Clean up resources -<% if (deployTarget === 'codebuild') { %>โ”‚ โ”œโ”€โ”€ submit # Submit build to CodeBuild +<% if (buildTarget === 'codebuild') { %>โ”‚ โ”œโ”€โ”€ submit # Submit build to CodeBuild <% } %>โ”‚ โ”œโ”€โ”€ config # Configuration variables โ”‚ โ””โ”€โ”€ README.md # Detailed do-framework documentation โ”œโ”€โ”€ code/ # Model serving code @@ -118,7 +118,7 @@ You can override these values by setting environment variables before running do ./do/push ``` -<% if (deployTarget === 'codebuild') { %>### CodeBuild Workflow +<% if (buildTarget === 'codebuild') { %>### CodeBuild Workflow ```bash # Submit build to CodeBuild (builds and pushes to ECR) @@ -173,7 +173,7 @@ This project uses the [do-framework](https://github.com/iankoulski/do-framework) | `./do/run` | Run container locally on port 8080 | | `./do/test [endpoint]` | Test local container or SageMaker endpoint | | `./do/clean ` | Clean up resources (local/ecr/endpoint/all) | -<% if (deployTarget === 'codebuild') { %>| `./do/submit` | Submit build to AWS CodeBuild | +<% if (buildTarget === 'codebuild') { %>| `./do/submit` | Submit build to AWS CodeBuild | <% } %> For detailed documentation on each command, see `do/README.md`. @@ -408,7 +408,7 @@ If you're familiar with the old `deploy/` scripts, see `MIGRATION.md` for a comm |----------------|---------------------| | `./deploy/build_and_push.sh` | `./do/build && ./do/push` | | `./deploy/deploy.sh ` | `./do/deploy ` | -<% if (deployTarget === 'codebuild') { %>| `./deploy/submit_build.sh` | `./do/submit` | +<% if (buildTarget === 'codebuild') { %>| `./deploy/submit_build.sh` | `./do/submit` | <% } %> The legacy scripts are still available but deprecated. They will display warnings and forward to do-framework commands. diff --git a/generators/app/templates/TEMPLATE_SYSTEM.md b/generators/app/templates/TEMPLATE_SYSTEM.md index 50b88ce..5cffb87 100644 --- a/generators/app/templates/TEMPLATE_SYSTEM.md +++ b/generators/app/templates/TEMPLATE_SYSTEM.md @@ -29,7 +29,7 @@ All user answers from the prompting phase are available in templates: | `includeSampleModel` | boolean | Include sample model | `true`, `false` | | `includeTesting` | boolean | Include test suite | `true`, `false` | | `testTypes` | string[] | Selected test types | `['local-model-cli', 'hosted-model-endpoint']` | -| `deployTarget` | string | Deployment target | `sagemaker` | +| `buildTarget` | string | Build target | `codebuild` | | `instanceType` | string | Instance configuration | `cpu-optimized`, `gpu-enabled`, `custom` | | `customInstanceType` | string | Custom AWS instance type | `ml.m5.large`, `ml.g4dn.xlarge` | | `awsRegion` | string | AWS region | `us-east-1` | diff --git a/generators/app/templates/deploy/submit_build.sh b/generators/app/templates/deploy/submit_build.sh index 61b031d..9c42b3d 100755 --- a/generators/app/templates/deploy/submit_build.sh +++ b/generators/app/templates/deploy/submit_build.sh @@ -1,4 +1,4 @@ -<% if (deployTarget === 'codebuild') { %>#!/bin/bash +<% if (buildTarget === 'codebuild') { %>#!/bin/bash # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 diff --git a/generators/app/templates/do/README.md b/generators/app/templates/do/README.md index 543f7a1..d1d4687 100644 --- a/generators/app/templates/do/README.md +++ b/generators/app/templates/do/README.md @@ -32,7 +32,7 @@ export ROLE_ARN=arn:aws:iam::ACCOUNT_ID:role/YOUR_ROLE - Model Server: `<%= modelServer %>` - AWS Region: `<%= awsRegion %>` - Instance Type: `<%= instanceType %>` -- Deploy Target: `<%= deployTarget %>` +- Build Target: `<%= buildTarget %>` All configuration is centralized in `do/config`. You can override any setting by exporting environment variables before running scripts. @@ -110,7 +110,7 @@ Deploy the container to AWS SageMaker as a managed endpoint. **Prerequisites:** - AWS credentials configured -- Docker image pushed to ECR (`./do/push`<% if (deployTarget === 'codebuild') { %> or `./do/submit`<% } %>) +- Docker image pushed to ECR (`./do/push`<% if (buildTarget === 'codebuild') { %> or `./do/submit`<% } %>) - SageMaker execution role ARN **Usage:** @@ -262,7 +262,7 @@ Clean everything: --- -<% if (deployTarget === 'codebuild') { %> +<% if (buildTarget === 'codebuild') { %> ### `./do/submit` Submit a build job to AWS CodeBuild (CodeBuild deployment only). @@ -319,7 +319,7 @@ All scripts source configuration from `do/config`. Key variables: | `AWS_REGION` | AWS region for deployment | `<%= awsRegion %>` | | `ECR_REPOSITORY_NAME` | ECR repository name | `ml-container-creator` | | `INSTANCE_TYPE` | SageMaker instance type | `<%= instanceType %>` | -| `DEPLOY_TARGET` | Deployment target | `<%= deployTarget %>` | +| `BUILD_TARGET` | Build target | `<%= buildTarget %>` | <% if (framework === 'transformers') { %>| `MODEL_NAME` | HuggingFace model name | `<%= modelName %>` | <% } %><% if (modelFormat) { %>| `MODEL_FORMAT` | Model file format | `<%= modelFormat %>` | <% } %> @@ -392,7 +392,7 @@ export ROLE_ARN=arn:aws:iam::ACCOUNT_ID:role/YOUR_ROLE ``` โŒ ECR image not found ``` -<% if (deployTarget === 'codebuild') { %>Run `./do/submit` to build and push the image via CodeBuild. +<% if (buildTarget === 'codebuild') { %>Run `./do/submit` to build and push the image via CodeBuild. <% } else { %>Run `./do/build` and `./do/push` to build and push the image. <% } %> @@ -455,7 +455,7 @@ Try: 2. **Deploy to SageMaker:** ```bash - <% if (deployTarget === 'codebuild') { %>./do/submit<% } else { %>./do/push<% } %> + <% if (buildTarget === 'codebuild') { %>./do/submit<% } else { %>./do/push<% } %> export ROLE_ARN=arn:aws:iam::ACCOUNT_ID:role/YOUR_ROLE ./do/deploy ``` @@ -472,7 +472,7 @@ Try: ### CI/CD Workflow -<% if (deployTarget === 'codebuild') { %> +<% if (buildTarget === 'codebuild') { %> ```bash # In your CI/CD pipeline ./do/submit # Build and push via CodeBuild @@ -501,7 +501,7 @@ vim code/model_handler.py ./do/test # Deploy updated version -<% if (deployTarget === 'codebuild') { %>./do/submit<% } else { %>./do/push<% } %> +<% if (buildTarget === 'codebuild') { %>./do/submit<% } else { %>./do/push<% } %> ./do/deploy ``` @@ -513,7 +513,7 @@ The `deploy/` directory contains legacy wrapper scripts for backward compatibili |---------------|------------------------|--------| | `deploy/build_and_push.sh` | `./do/build && ./do/push` | Deprecated | | `deploy/deploy.sh` | `./do/deploy` | Deprecated | -<% if (deployTarget === 'codebuild') { %>| `deploy/submit_build.sh` | `./do/submit` | Deprecated | +<% if (buildTarget === 'codebuild') { %>| `deploy/submit_build.sh` | `./do/submit` | Deprecated | <% } %> **Migration:** The legacy scripts display deprecation warnings and forward to do-framework scripts. Update your workflows to use `do/` scripts directly. diff --git a/generators/app/templates/do/clean b/generators/app/templates/do/clean index c5f693e..7773dcd 100644 --- a/generators/app/templates/do/clean +++ b/generators/app/templates/do/clean @@ -15,18 +15,30 @@ CLEANUP_TARGET="${1:-}" # Function to display usage show_usage() { +<% if (deploymentTarget === 'managed-inference') { %> echo "Usage: ./do/clean [local|ecr|endpoint|codebuild|all]" +<% } else if (deploymentTarget === 'hyperpod-eks') { %> + echo "Usage: ./do/clean [local|ecr|hyperpod|codebuild|all]" +<% } %> echo "" echo "Cleanup targets:" echo " local - Remove local Docker images" echo " ecr - Remove images from Amazon ECR" +<% if (deploymentTarget === 'managed-inference') { %> echo " endpoint - Delete SageMaker endpoint, configuration, and model" +<% } else if (deploymentTarget === 'hyperpod-eks') { %> + echo " hyperpod - Delete HyperPod EKS deployment and services" +<% } %> echo " codebuild - Delete CodeBuild project, IAM role, and S3 source artifacts" echo " all - Perform all cleanup operations" echo "" echo "Examples:" echo " ./do/clean local # Remove local Docker images only" +<% if (deploymentTarget === 'managed-inference') { %> echo " ./do/clean endpoint # Delete SageMaker resources only" +<% } else if (deploymentTarget === 'hyperpod-eks') { %> + echo " ./do/clean hyperpod # Delete HyperPod EKS resources only" +<% } %> echo " ./do/clean codebuild # Delete CodeBuild project and rebuild fresh" echo " ./do/clean all # Clean up everything" } @@ -154,7 +166,8 @@ clean_ecr() { fi } -# Function to clean SageMaker endpoint +<% if (deploymentTarget === 'managed-inference') { %> +# Function to clean SageMaker endpoint and inference components clean_endpoint() { echo "๐Ÿงน Cleaning SageMaker resources" echo " Project: ${PROJECT_NAME}" @@ -167,8 +180,11 @@ clean_endpoint() { exit 4 fi - # Use ENDPOINT_NAME from config (set by do/deploy) or argument + # Use names from config (set by do/deploy) or argument local EP_NAME="${ENDPOINT_NAME:-}" + local IC_NAME="${INFERENCE_COMPONENT_NAME:-}" + local EPC_NAME="${ENDPOINT_CONFIG_NAME:-}" + if [ -z "${EP_NAME}" ]; then echo "โŒ No endpoint name found" echo " Run ./do/deploy first, or set ENDPOINT_NAME in do/config" @@ -178,48 +194,55 @@ clean_endpoint() { echo "" echo "Checking for SageMaker resources..." - # Discover endpoint config and model from the endpoint itself - local ENDPOINT_CONFIG_NAME="" - local MODEL_NAME="" local ENDPOINT_EXISTS=false if aws sagemaker describe-endpoint \ --endpoint-name "${EP_NAME}" \ --region "${AWS_REGION}" &> /dev/null; then ENDPOINT_EXISTS=true - ENDPOINT_CONFIG_NAME=$(aws sagemaker describe-endpoint \ - --endpoint-name "${EP_NAME}" \ - --region "${AWS_REGION}" \ - --query 'EndpointConfigName' --output text 2>/dev/null || echo "") echo " โœ“ Endpoint: ${EP_NAME}" - echo " โœ“ Endpoint config: ${ENDPOINT_CONFIG_NAME}" else echo "โ„น๏ธ Endpoint not found: ${EP_NAME}" return 0 fi - # Discover model from endpoint config - if [ -n "${ENDPOINT_CONFIG_NAME}" ]; then - MODEL_NAME=$(aws sagemaker describe-endpoint-config \ - --endpoint-config-name "${ENDPOINT_CONFIG_NAME}" \ - --region "${AWS_REGION}" \ - --query 'ProductionVariants[0].ModelName' --output text 2>/dev/null || echo "") - if [ -n "${MODEL_NAME}" ] && [ "${MODEL_NAME}" != "None" ]; then - echo " โœ“ Model: ${MODEL_NAME}" + # Check for inference component + local IC_EXISTS=false + if [ -n "${IC_NAME}" ]; then + if aws sagemaker describe-inference-component \ + --inference-component-name "${IC_NAME}" \ + --region "${AWS_REGION}" &> /dev/null; then + IC_EXISTS=true + echo " โœ“ Inference component: ${IC_NAME}" fi fi - if ! confirm_action "This will delete the SageMaker endpoint, endpoint configuration, and model"; then + if ! confirm_action "This will delete the SageMaker endpoint and inference component(s)"; then return 1 fi - - # Delete endpoint first (must be deleted before config) + + # Delete inference component first (must be deleted before endpoint) + if [ "${IC_EXISTS}" = true ]; then + echo "๐Ÿ—‘๏ธ Deleting inference component: ${IC_NAME}" + if aws sagemaker delete-inference-component \ + --inference-component-name "${IC_NAME}" \ + --region "${AWS_REGION}" &> /dev/null; then + echo "โณ Waiting for inference component deletion..." + aws sagemaker wait inference-component-deleted \ + --inference-component-name "${IC_NAME}" \ + --region "${AWS_REGION}" 2>/dev/null || sleep 15 + echo "โœ… Inference component deleted" + else + echo "โŒ Failed to delete inference component" + fi + fi + + # Delete endpoint echo "๐Ÿ—‘๏ธ Deleting endpoint: ${EP_NAME}" if aws sagemaker delete-endpoint \ --endpoint-name "${EP_NAME}" \ --region "${AWS_REGION}" &> /dev/null; then echo "โœ… Endpoint deleted" - # Wait briefly for endpoint deletion to propagate echo "โณ Waiting for endpoint deletion..." aws sagemaker wait endpoint-deleted \ --endpoint-name "${EP_NAME}" \ @@ -227,39 +250,95 @@ clean_endpoint() { else echo "โŒ Failed to delete endpoint" fi - + # Delete endpoint configuration - if [ -n "${ENDPOINT_CONFIG_NAME}" ] && [ "${ENDPOINT_CONFIG_NAME}" != "None" ]; then - echo "๐Ÿ—‘๏ธ Deleting endpoint configuration: ${ENDPOINT_CONFIG_NAME}" + if [ -n "${EPC_NAME}" ]; then + echo "๐Ÿ—‘๏ธ Deleting endpoint configuration: ${EPC_NAME}" if aws sagemaker delete-endpoint-config \ - --endpoint-config-name "${ENDPOINT_CONFIG_NAME}" \ + --endpoint-config-name "${EPC_NAME}" \ --region "${AWS_REGION}" &> /dev/null; then echo "โœ… Endpoint configuration deleted" else echo "โŒ Failed to delete endpoint configuration" fi fi - - # Delete model - if [ -n "${MODEL_NAME}" ] && [ "${MODEL_NAME}" != "None" ]; then - echo "๐Ÿ—‘๏ธ Deleting model: ${MODEL_NAME}" - if aws sagemaker delete-model \ - --model-name "${MODEL_NAME}" \ - --region "${AWS_REGION}" &> /dev/null; then - echo "โœ… Model deleted" - else - echo "โŒ Failed to delete model" - fi - fi - # Remove ENDPOINT_NAME from config + # Remove saved names from config if grep -q "^export ENDPOINT_NAME=" "${SCRIPT_DIR}/config" 2>/dev/null; then - sed -i.bak '/^# Last deployed endpoint/d;/^export ENDPOINT_NAME=/d' "${SCRIPT_DIR}/config" + sed -i.bak '/^# Last deployed resources/d;/^export ENDPOINT_NAME=/d;/^export ENDPOINT_CONFIG_NAME=/d;/^export INFERENCE_COMPONENT_NAME=/d' "${SCRIPT_DIR}/config" rm -f "${SCRIPT_DIR}/config.bak" fi echo "โœ… SageMaker resources cleaned" } +<% } else if (deploymentTarget === 'hyperpod-eks') { %> +# Function to clean HyperPod EKS deployment +clean_hyperpod() { + echo "๐Ÿงน Cleaning HyperPod EKS resources" + echo " Cluster: ${HYPERPOD_CLUSTER_NAME}" + echo " Namespace: ${HYPERPOD_NAMESPACE}" + + # Validate AWS credentials + if ! aws sts get-caller-identity &> /dev/null; then + echo "โŒ AWS credentials not configured" + echo " Run: aws configure" + exit 4 + fi + + # Get kubeconfig for HyperPod cluster + echo "๐Ÿ”‘ Configuring kubectl for HyperPod cluster..." + KUBECONFIG_PATH="${HOME}/.kube/hyperpod-${HYPERPOD_CLUSTER_NAME}" + + EKS_CLUSTER_ARN=$(aws sagemaker describe-cluster \ + --cluster-name "${HYPERPOD_CLUSTER_NAME}" \ + --region "${AWS_REGION}" \ + --query "Orchestrator.Eks.ClusterArn" \ + --output text 2>&1) || { + echo "โŒ Failed to describe HyperPod cluster: ${HYPERPOD_CLUSTER_NAME}" + echo " Check that the cluster exists and you have permission to access it" + return 1 + } + + EKS_CLUSTER_NAME=$(echo "${EKS_CLUSTER_ARN}" | awk -F'/' '{print $NF}') + + if ! aws eks update-kubeconfig \ + --name "${EKS_CLUSTER_NAME}" \ + --region "${AWS_REGION}" \ + --kubeconfig "${KUBECONFIG_PATH}" 2>&1; then + echo "โŒ Failed to configure kubectl for EKS cluster: ${EKS_CLUSTER_NAME}" + return 1 + fi + + export KUBECONFIG="${KUBECONFIG_PATH}" + + if ! confirm_action "This will delete the HyperPod deployment in namespace ${HYPERPOD_NAMESPACE}"; then + return 1 + fi + + # Delete Kubernetes resources + echo "๐Ÿ—‘๏ธ Deleting Kubernetes resources..." + AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) + export AWS_ACCOUNT_ID + DELETE_FAILED=false + for manifest in hyperpod/*.yaml; do + # Skip files that contain no Kubernetes objects (e.g. comment-only PVC stubs) + RENDERED=$(envsubst < "${manifest}") + if echo "${RENDERED}" | grep -q '^kind:'; then + if ! echo "${RENDERED}" | kubectl delete -n "${HYPERPOD_NAMESPACE}" --ignore-not-found -f - 2>&1; then + DELETE_FAILED=true + fi + fi + done + if [ "${DELETE_FAILED}" = true ]; then + echo "โŒ Failed to delete some Kubernetes resources" + echo " You may need to manually clean up:" + echo " kubectl get all -n ${HYPERPOD_NAMESPACE}" + return 1 + fi + + echo "โœ… HyperPod EKS resources cleaned" +} +<% } %> # Function to clean CodeBuild project and related resources clean_codebuild() { @@ -268,7 +347,7 @@ clean_codebuild() { echo " Region: ${AWS_REGION}" if [ -z "${CODEBUILD_PROJECT_NAME:-}" ]; then - echo "โ„น๏ธ No CodeBuild project name configured (deploy target may not be codebuild)" + echo "โ„น๏ธ No CodeBuild project name configured (build target may not be codebuild)" return 0 fi @@ -375,9 +454,15 @@ case "${CLEANUP_TARGET}" in ecr) clean_ecr ;; +<% if (deploymentTarget === 'managed-inference') { %> endpoint) clean_endpoint ;; +<% } else if (deploymentTarget === 'hyperpod-eks') { %> + hyperpod) + clean_hyperpod + ;; +<% } %> codebuild) clean_codebuild ;; @@ -402,10 +487,17 @@ case "${CLEANUP_TARGET}" in echo "" +<% if (deploymentTarget === 'managed-inference') { %> # Clean SageMaker resources if clean_endpoint; then CLEANED_ITEMS+=("SageMaker resources") fi +<% } else if (deploymentTarget === 'hyperpod-eks') { %> + # Clean HyperPod EKS resources + if clean_hyperpod; then + CLEANED_ITEMS+=("HyperPod EKS resources") + fi +<% } %> echo "" diff --git a/generators/app/templates/do/config b/generators/app/templates/do/config index 1339534..485fde4 100644 --- a/generators/app/templates/do/config +++ b/generators/app/templates/do/config @@ -13,15 +13,35 @@ export MODEL_SERVER="<%= modelServer %>" # AWS configuration export AWS_REGION="<%= awsRegion %>" export ECR_REPOSITORY_NAME="ml-container-creator" -export INSTANCE_TYPE="<%= instanceType %>" -# Deployment configuration -export DEPLOY_TARGET="<%= deployTarget %>" -<% if (deployTarget === 'codebuild') { %> +# Build configuration โ€” WHERE the Docker image gets built +export BUILD_TARGET="<%= buildTarget %>" +<% if (buildTarget === 'codebuild') { %> export CODEBUILD_COMPUTE_TYPE="<%= codebuildComputeType %>" export CODEBUILD_PROJECT_NAME="${PROJECT_NAME}-build-$(date +%Y%m%d)" <% } %> +# Deployment configuration โ€” WHERE the model runs +export DEPLOYMENT_TARGET="<%= deploymentTarget %>" + +<% if (deploymentTarget === 'managed-inference') { %> +# SageMaker Managed Inference configuration +export INSTANCE_TYPE="<%= instanceType %>" +<% if (inferenceAmiVersion) { %> +export INFERENCE_AMI_VERSION="<%= inferenceAmiVersion %>" +<% } %> +<% } %> + +<% if (deploymentTarget === 'hyperpod-eks') { %> +# HyperPod EKS configuration +export HYPERPOD_CLUSTER_NAME="<%= hyperPodCluster %>" +export HYPERPOD_NAMESPACE="<%= hyperPodNamespace %>" +export HYPERPOD_REPLICAS="<%= hyperPodReplicas %>" +<% if (fsxVolumeHandle) { %> +export FSX_VOLUME_HANDLE="<%= fsxVolumeHandle %>" +<% } %> +<% } %> + # Framework-specific configuration <% if (framework === 'transformers') { %> export MODEL_NAME="<%= modelName %>" @@ -37,18 +57,15 @@ export NGC_API_KEY="<%= ngcApiKey %>" export MODEL_FORMAT="<%= modelFormat %>" <% } %> -<% if (inferenceAmiVersion) { %> -# SageMaker inference AMI version (determines CUDA driver version on the instance) -export INFERENCE_AMI_VERSION="<%= inferenceAmiVersion %>" -<% } %> - <% if (roleArn) { %> export ROLE_ARN="<%= roleArn %>" <% } %> # Allow environment variable overrides export AWS_REGION=${AWS_REGION:-<%= awsRegion %>} +<% if (deploymentTarget === 'managed-inference') { %> export INSTANCE_TYPE=${INSTANCE_TYPE:-<%= instanceType %>} +<% } %> export ECR_REPOSITORY_NAME=${ECR_REPOSITORY_NAME:-ml-container-creator} # Print configuration summary @@ -56,4 +73,11 @@ echo "โš™๏ธ Configuration loaded" echo " Project: ${PROJECT_NAME}" echo " Config: ${DEPLOYMENT_CONFIG}" echo " Region: ${AWS_REGION}" +echo " Build target: ${BUILD_TARGET}" +echo " Deployment target: ${DEPLOYMENT_TARGET}" +<% if (deploymentTarget === 'managed-inference') { %> echo " Instance: ${INSTANCE_TYPE}" +<% } else if (deploymentTarget === 'hyperpod-eks') { %> +echo " HyperPod cluster: ${HYPERPOD_CLUSTER_NAME}" +echo " Namespace: ${HYPERPOD_NAMESPACE}" +<% } %> diff --git a/generators/app/templates/do/deploy b/generators/app/templates/do/deploy index ef1d5dd..441870c 100644 --- a/generators/app/templates/do/deploy +++ b/generators/app/templates/do/deploy @@ -10,12 +10,60 @@ set -o pipefail SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" source "${SCRIPT_DIR}/config" -echo "๐Ÿš€ Deploying to AWS SageMaker" +echo "๐Ÿš€ Deploying to AWS" echo " Project: ${PROJECT_NAME}" echo " Deployment config: ${DEPLOYMENT_CONFIG}" echo " Region: ${AWS_REGION}" +echo " Build target: ${BUILD_TARGET}" +echo " Deployment target: ${DEPLOYMENT_TARGET}" +<% if (deploymentTarget === 'managed-inference') { %> echo " Instance type: ${INSTANCE_TYPE}" -echo " Deploy target: ${DEPLOY_TARGET}" +<% } else if (deploymentTarget === 'hyperpod-eks') { %> +echo " HyperPod cluster: ${HYPERPOD_CLUSTER_NAME}" +echo " Namespace: ${HYPERPOD_NAMESPACE}" +echo " Replicas: ${HYPERPOD_REPLICAS}" +<% } %> + +# Check AWS credentials +echo "๐Ÿ” Validating AWS credentials..." +if ! aws sts get-caller-identity &> /dev/null; then + echo "โŒ AWS credentials not configured" + echo " Run: aws configure" + echo " Or set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables" + exit 4 +fi + +AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) +echo "โœ… AWS credentials validated (Account: ${AWS_ACCOUNT_ID})" + +# Construct ECR repository URL +ECR_REPOSITORY="${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com/${ECR_REPOSITORY_NAME}" + +# ============================================================ +# Shared: Verify ECR image exists +# ============================================================ +echo "๐Ÿ” Verifying ECR image exists..." +if ! aws ecr describe-images \ + --repository-name "${ECR_REPOSITORY_NAME}" \ + --image-ids imageTag="${PROJECT_NAME}-latest" \ + --region "${AWS_REGION}" &> /dev/null; then + + echo "โŒ ECR image not found: ${ECR_REPOSITORY}:${PROJECT_NAME}-latest" + echo "" + echo "Please build and push your image first:" + echo " ./do/submit" + echo "" + echo "After the build completes successfully, run this deploy script again." + exit 4 +fi + +echo "โœ… ECR image found: ${ECR_REPOSITORY}:${PROJECT_NAME}-latest" +IMAGE_TAG="${PROJECT_NAME}-latest" + +<% if (deploymentTarget === 'managed-inference') { %> +# ============================================================ +# SageMaker Managed Inference Deployment (Inference Components) +# ============================================================ # Validate execution role ARN if [ -z "${ROLE_ARN:-}" ]; then @@ -28,7 +76,7 @@ if [ -z "${ROLE_ARN:-}" ]; then echo "Or set ROLE_ARN in do/config" echo "" echo "The execution role must have permissions for:" - echo " โ€ข SageMaker model creation and endpoint management" + echo " โ€ข SageMaker endpoint and inference component management" echo " โ€ข ECR image access" echo " โ€ข S3 access (if using model artifacts)" echo " โ€ข CloudWatch Logs" @@ -37,119 +85,34 @@ fi echo " Using execution role: ${ROLE_ARN}" -# Check AWS credentials -echo "๐Ÿ” Validating AWS credentials..." -if ! aws sts get-caller-identity &> /dev/null; then - echo "โŒ AWS credentials not configured" - echo " Run: aws configure" - echo " Or set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables" - exit 4 -fi - -AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) -echo "โœ… AWS credentials validated (Account: ${AWS_ACCOUNT_ID})" - -# Construct ECR repository URL -ECR_REPOSITORY="${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com/${ECR_REPOSITORY_NAME}" - -# Deployment target branching -case "${DEPLOY_TARGET}" in - codebuild) - echo "๐Ÿ” CodeBuild deployment - verifying ECR image exists..." - - # Verify ECR image exists - if ! aws ecr describe-images \ - --repository-name "${ECR_REPOSITORY_NAME}" \ - --image-ids imageTag="${PROJECT_NAME}-latest" \ - --region "${AWS_REGION}" &> /dev/null; then - - echo "โŒ ECR image not found: ${ECR_REPOSITORY}:${PROJECT_NAME}-latest" - echo "" - echo "The CodeBuild deployment requires the Docker image to be built and pushed to ECR first." - echo "Please run the following command to build and push your image:" - echo " ./do/submit" - echo "" - echo "After the build completes successfully, run this deploy script again." - exit 4 - fi - - echo "โœ… ECR image found: ${ECR_REPOSITORY}:${PROJECT_NAME}-latest" - IMAGE_TAG="${PROJECT_NAME}-latest" - ;; - - sagemaker) - echo "๐Ÿ” SageMaker deployment - using latest image from ECR..." - - # Verify ECR image exists - if ! aws ecr describe-images \ - --repository-name "${ECR_REPOSITORY_NAME}" \ - --image-ids imageTag="${PROJECT_NAME}-latest" \ - --region "${AWS_REGION}" &> /dev/null; then - - echo "โŒ ECR image not found: ${ECR_REPOSITORY}:${PROJECT_NAME}-latest" - echo "" - echo "Please build and push your image first:" - echo " ./do/build" - echo " ./do/push" - echo "" - echo "Then run this deploy script again." - exit 4 - fi - - echo "โœ… ECR image found: ${ECR_REPOSITORY}:${PROJECT_NAME}-latest" - IMAGE_TAG="${PROJECT_NAME}-latest" - ;; - - *) - echo "โŒ Unknown deployment target: ${DEPLOY_TARGET}" - echo " Valid values: codebuild, sagemaker" - exit 3 - ;; -esac - # Generate unique names with timestamp TIMESTAMP=$(date +%s) -MODEL_NAME="${PROJECT_NAME}-model-${TIMESTAMP}" -ENDPOINT_CONFIG_NAME="${PROJECT_NAME}-endpoint-config-${TIMESTAMP}" +ENDPOINT_CONFIG_NAME="${PROJECT_NAME}-epc-${TIMESTAMP}" ENDPOINT_NAME="${PROJECT_NAME}-endpoint-${TIMESTAMP}" +IC_NAME="${PROJECT_NAME}-ic-${TIMESTAMP}" -# Create SageMaker model -echo "๐Ÿ“ฆ Creating SageMaker model: ${MODEL_NAME}" -if ! aws sagemaker create-model \ - --model-name "${MODEL_NAME}" \ - --primary-container Image="${ECR_REPOSITORY}:${IMAGE_TAG}",Mode=SingleModel \ - --execution-role-arn "${ROLE_ARN}" \ - --region "${AWS_REGION}"; then - - echo "โŒ Failed to create SageMaker model" - echo " Check that:" - echo " โ€ข The execution role ARN is valid" - echo " โ€ข The role has permissions to access the ECR image" - echo " โ€ข The ECR image exists and is accessible" - exit 4 -fi - -echo "โœ… SageMaker model created: ${MODEL_NAME}" - -# Create endpoint configuration -echo "โš™๏ธ Creating endpoint configuration: ${ENDPOINT_CONFIG_NAME}" - -# Build production variant arguments -VARIANT_ARGS="VariantName=primary,ModelName=${MODEL_NAME},InitialInstanceCount=1,InstanceType=${INSTANCE_TYPE},InitialVariantWeight=1" +# Build production variant JSON โ€” no ModelName, just instance provisioning +VARIANT_JSON="[{\"VariantName\":\"AllTraffic\",\"InstanceType\":\"${INSTANCE_TYPE}\",\"InitialInstanceCount\":1" # Append InferenceAmiVersion if configured (controls CUDA driver version on the instance) if [ -n "${INFERENCE_AMI_VERSION:-}" ]; then - VARIANT_ARGS="${VARIANT_ARGS},InferenceAmiVersion=${INFERENCE_AMI_VERSION}" + VARIANT_JSON="${VARIANT_JSON},\"InferenceAmiVersion\":\"${INFERENCE_AMI_VERSION}\"" echo " AMI version: ${INFERENCE_AMI_VERSION}" fi +VARIANT_JSON="${VARIANT_JSON}}]" + +# Step 1: Create endpoint configuration +echo "โš™๏ธ Creating endpoint configuration: ${ENDPOINT_CONFIG_NAME}" if ! aws sagemaker create-endpoint-config \ --endpoint-config-name "${ENDPOINT_CONFIG_NAME}" \ - --production-variants "${VARIANT_ARGS}" \ + --execution-role-arn "${ROLE_ARN}" \ + --production-variants "${VARIANT_JSON}" \ --region "${AWS_REGION}"; then - + echo "โŒ Failed to create endpoint configuration" echo " Check that:" + echo " โ€ข The execution role ARN is valid" echo " โ€ข The instance type is valid: ${INSTANCE_TYPE}" echo " โ€ข The instance type is available in region: ${AWS_REGION}" echo " โ€ข You have sufficient service quota for the instance type" @@ -158,52 +121,324 @@ fi echo "โœ… Endpoint configuration created: ${ENDPOINT_CONFIG_NAME}" -# Create endpoint +# Step 2: Create endpoint echo "๐Ÿš€ Creating endpoint: ${ENDPOINT_NAME}" if ! aws sagemaker create-endpoint \ --endpoint-name "${ENDPOINT_NAME}" \ --endpoint-config-name "${ENDPOINT_CONFIG_NAME}" \ --region "${AWS_REGION}"; then - + echo "โŒ Failed to create endpoint" + echo " Check that:" + echo " โ€ข Your IAM credentials have sagemaker:CreateEndpoint permission" + echo " โ€ข You have sufficient service quota in region: ${AWS_REGION}" exit 4 fi echo "โœ… Endpoint creation initiated: ${ENDPOINT_NAME}" -# Wait for endpoint to reach InService status +# Wait for endpoint to be in service before creating inference component echo "โณ Waiting for endpoint to reach InService status..." -echo " This may take 5-10 minutes..." +echo " This may take a few minutes..." if ! aws sagemaker wait endpoint-in-service \ --endpoint-name "${ENDPOINT_NAME}" \ --region "${AWS_REGION}"; then - + echo "โŒ Endpoint failed to reach InService status" echo " Check CloudWatch Logs for details:" echo " https://console.aws.amazon.com/cloudwatch/home?region=${AWS_REGION}#logsV2:log-groups/log-group//aws/sagemaker/Endpoints/${ENDPOINT_NAME}" exit 4 fi +echo "โœ… Endpoint is InService: ${ENDPOINT_NAME}" + +# Step 3: Create inference component on the endpoint +echo "๐Ÿ“ฆ Creating inference component: ${IC_NAME}" +if ! aws sagemaker create-inference-component \ + --inference-component-name "${IC_NAME}" \ + --endpoint-name "${ENDPOINT_NAME}" \ + --variant-name "AllTraffic" \ + --specification "{ + \"Container\": { + \"Image\": \"${ECR_REPOSITORY}:${IMAGE_TAG}\" + }, + \"ComputeResourceRequirements\": { + \"NumberOfAcceleratorDevicesRequired\": 1, + \"MinMemoryRequiredInMb\": 1024 + } + }" \ + --runtime-config "{\"CopyCount\": 1}" \ + --region "${AWS_REGION}"; then + + echo "โŒ Failed to create inference component" + echo " Check that:" + echo " โ€ข The ECR image exists and is accessible" + echo " โ€ข The endpoint is in InService status" + echo " โ€ข The compute resource requirements fit the instance type: ${INSTANCE_TYPE}" + exit 4 +fi + +echo "โœ… Inference component creation initiated: ${IC_NAME}" + +# Wait for inference component to reach InService status +echo "โณ Waiting for inference component to reach InService status..." +echo " This may take 5-10 minutes..." + +if ! aws sagemaker wait inference-component-in-service \ + --inference-component-name "${IC_NAME}" \ + --region "${AWS_REGION}"; then + + echo "โŒ Inference component failed to reach InService status" + echo " Check CloudWatch Logs for details:" + echo " https://console.aws.amazon.com/cloudwatch/home?region=${AWS_REGION}#logsV2:log-groups/log-group//aws/sagemaker/Endpoints/${ENDPOINT_NAME}" + echo "" + echo " Debug:" + echo " aws sagemaker describe-inference-component --inference-component-name ${IC_NAME} --region ${AWS_REGION}" + exit 4 +fi + echo "โœ… Deployment complete!" echo "" -echo "๐Ÿ“‹ Endpoint Details:" -echo " Name: ${ENDPOINT_NAME}" +echo "๐Ÿ“‹ Deployment Details:" +echo " Endpoint: ${ENDPOINT_NAME}" +echo " Endpoint Config: ${ENDPOINT_CONFIG_NAME}" +echo " Inference Component: ${IC_NAME}" echo " Region: ${AWS_REGION}" echo " Instance Type: ${INSTANCE_TYPE}" -echo " Model: ${MODEL_NAME}" echo " Image: ${ECR_REPOSITORY}:${IMAGE_TAG}" echo "" echo "๐Ÿงช Test your endpoint:" -echo " ./do/test ${ENDPOINT_NAME}" +echo " ./do/test" echo "" -echo "๐Ÿ” Monitor your endpoint:" +echo "๐Ÿ” Monitor your deployment:" +echo " aws sagemaker describe-inference-component --inference-component-name ${IC_NAME} --region ${AWS_REGION}" echo " aws sagemaker describe-endpoint --endpoint-name ${ENDPOINT_NAME} --region ${AWS_REGION}" echo "" echo "๐Ÿงน Clean up when done:" echo " ./do/clean endpoint" -# Write endpoint name to config so other scripts can use it -echo "" >> "${SCRIPT_DIR}/config" -echo "# Last deployed endpoint (auto-generated by do/deploy)" >> "${SCRIPT_DIR}/config" -echo "export ENDPOINT_NAME=\"${ENDPOINT_NAME}\"" >> "${SCRIPT_DIR}/config" +# Write names to config so other scripts can use them (idempotent) +_update_config_var() { + local var_name="$1" var_value="$2" config_file="${SCRIPT_DIR}/config" + if grep -q "^export ${var_name}=" "${config_file}" 2>/dev/null; then + sed -i.bak "s|^export ${var_name}=.*|export ${var_name}=\"${var_value}\"|" "${config_file}" + rm -f "${config_file}.bak" + else + echo "" >> "${config_file}" + echo "export ${var_name}=\"${var_value}\"" >> "${config_file}" + fi +} + +_update_config_var "ENDPOINT_NAME" "${ENDPOINT_NAME}" +_update_config_var "ENDPOINT_CONFIG_NAME" "${ENDPOINT_CONFIG_NAME}" +_update_config_var "INFERENCE_COMPONENT_NAME" "${IC_NAME}" + +<% } else if (deploymentTarget === 'hyperpod-eks') { %> +# ============================================================ +# HyperPod EKS Deployment +# ============================================================ + +# Get kubeconfig for HyperPod cluster +echo "๐Ÿ”‘ Configuring kubectl for HyperPod cluster..." +KUBECONFIG_PATH="${HOME}/.kube/hyperpod-${HYPERPOD_CLUSTER_NAME}" + +# Step 1: Describe the HyperPod cluster to get the underlying EKS cluster ARN +EKS_CLUSTER_ARN=$(aws sagemaker describe-cluster \ + --cluster-name "${HYPERPOD_CLUSTER_NAME}" \ + --region "${AWS_REGION}" \ + --query "Orchestrator.Eks.ClusterArn" \ + --output text 2>&1) || { + echo "โŒ Failed to describe HyperPod cluster: ${HYPERPOD_CLUSTER_NAME}" + echo "" + echo " Error details:" + echo " ${EKS_CLUSTER_ARN}" + echo "" + echo " Check that:" + echo " โ€ข The cluster name is correct" + echo " โ€ข The cluster exists in region: ${AWS_REGION}" + echo " โ€ข Your IAM user/role has permission to access the cluster" + echo "" + echo " Required IAM permissions:" + echo " โ€ข sagemaker:DescribeCluster" + echo " โ€ข eks:DescribeCluster" + exit 4 +} + +# Step 2: Extract the EKS cluster name from the ARN +EKS_CLUSTER_NAME=$(echo "${EKS_CLUSTER_ARN}" | awk -F'/' '{print $NF}') +echo " HyperPod cluster: ${HYPERPOD_CLUSTER_NAME}" +echo " EKS cluster: ${EKS_CLUSTER_NAME}" + +# Step 3: Update kubeconfig using the EKS cluster +if ! aws eks update-kubeconfig \ + --name "${EKS_CLUSTER_NAME}" \ + --region "${AWS_REGION}" \ + --kubeconfig "${KUBECONFIG_PATH}" 2>&1; then + echo "โŒ Failed to configure kubectl for EKS cluster: ${EKS_CLUSTER_NAME}" + echo "" + echo " Required IAM permissions:" + echo " โ€ข eks:DescribeCluster" + echo " โ€ข eks:AccessKubernetesApi" + exit 4 +fi + +export KUBECONFIG="${KUBECONFIG_PATH}" +echo "โœ… Kubeconfig saved to: ${KUBECONFIG_PATH}" + +# Verify cluster connectivity +echo "๐Ÿ” Verifying cluster connectivity..." +if ! kubectl cluster-info &> /dev/null; then + echo "โŒ Cannot connect to HyperPod cluster" + echo "" + echo " Check that:" + echo " โ€ข The cluster is in 'InService' status" + echo " โ€ข Your network can reach the cluster API server" + echo " โ€ข Your IAM credentials are valid" + exit 4 +fi +echo "โœ… Connected to HyperPod cluster" + +# Create namespace if it doesn't exist +echo "๐Ÿ“ Ensuring namespace exists: ${HYPERPOD_NAMESPACE}" +if ! kubectl create namespace "${HYPERPOD_NAMESPACE}" --dry-run=client -o yaml | kubectl apply -f - 2>&1; then + echo "โš ๏ธ Warning: Could not create/verify namespace" +fi + +# Apply Kubernetes manifests +echo "๐Ÿ“„ Applying Kubernetes manifests from hyperpod/..." + +# Substitute shell variables (e.g. ${AWS_ACCOUNT_ID}) in manifests before applying +export AWS_ACCOUNT_ID +export ECR_IMAGE="${ECR_REPOSITORY}:${IMAGE_TAG}" + +APPLY_OUTPUT="" +APPLY_EXIT_CODE=0 +for manifest in hyperpod/*.yaml; do + # Skip files that contain no Kubernetes objects (e.g. comment-only PVC stubs) + RENDERED=$(envsubst < "${manifest}") + if echo "${RENDERED}" | grep -q '^kind:'; then + FILE_OUTPUT=$(echo "${RENDERED}" | kubectl apply -n "${HYPERPOD_NAMESPACE}" -f - 2>&1) || { + APPLY_EXIT_CODE=$? + } + APPLY_OUTPUT="${APPLY_OUTPUT}${FILE_OUTPUT}\n" + fi +done + +if [ "${APPLY_EXIT_CODE}" -ne 0 ]; then + echo "" + echo "โŒ Failed to apply Kubernetes manifests" + echo "" + echo " Error details:" + echo " ${APPLY_OUTPUT}" + echo "" + echo " Common issues:" + echo " โ€ข Insufficient node capacity - check available GPU nodes" + echo " โ€ข Resource requests exceed node capacity" + echo " โ€ข RBAC permissions - ensure you have permission to create resources in namespace '${HYPERPOD_NAMESPACE}'" + echo " โ€ข Invalid manifest syntax" +<% if (fsxVolumeHandle) { %> + echo " โ€ข PVC creation failure - verify the FSx CSI driver is installed on the cluster" + echo " kubectl get csidriver -o name | grep fsx" +<% } %> + echo "" + echo " Debug commands:" + echo " kubectl get nodes -o wide" + echo " kubectl describe nodes" + echo " kubectl get events -n ${HYPERPOD_NAMESPACE}" + exit ${APPLY_EXIT_CODE} +fi + +echo "โœ… Kubernetes manifests applied" + +# Wait for deployment to be ready +DEPLOY_TIMEOUT=${DEPLOY_TIMEOUT:-1200} +echo "โณ Waiting for deployment to be ready (timeout: ${DEPLOY_TIMEOUT}s)..." +echo " This may take several minutes for GPU workloads..." +echo "" + +# Poll pod status every 30s while rollout is in progress +( + while true; do + sleep 30 + POD_STATUS=$(kubectl get pods -n "${HYPERPOD_NAMESPACE}" -l app=${PROJECT_NAME} \ + --no-headers 2>/dev/null | head -5) + if [ -n "${POD_STATUS}" ]; then + echo " ๐Ÿ“Š $(date +%H:%M:%S) Pod status:" + echo "${POD_STATUS}" | while read -r line; do echo " ${line}"; done + fi + done +) & +STATUS_PID=$! +trap "kill ${STATUS_PID} 2>/dev/null; wait ${STATUS_PID} 2>/dev/null" EXIT + +ROLLOUT_OUTPUT=$(kubectl rollout status deployment/${PROJECT_NAME} -n "${HYPERPOD_NAMESPACE}" --timeout=${DEPLOY_TIMEOUT}s 2>&1) || { + ROLLOUT_EXIT_CODE=$? + kill ${STATUS_PID} 2>/dev/null + echo "" + echo "โŒ Deployment failed to become ready within timeout" + echo "" + echo " Error details:" + echo " ${ROLLOUT_OUTPUT}" + echo "" + echo " Current pod state:" + kubectl get pods -n "${HYPERPOD_NAMESPACE}" -l app=${PROJECT_NAME} -o wide 2>/dev/null + echo "" + echo " Debug commands:" + echo " kubectl describe pods -n ${HYPERPOD_NAMESPACE} -l app=${PROJECT_NAME}" + echo " kubectl logs -n ${HYPERPOD_NAMESPACE} -l app=${PROJECT_NAME} --tail=100" + echo "" + echo " Common issues:" + echo " โ€ข Image pull errors - check ECR permissions" + echo " โ€ข Resource scheduling - insufficient GPU nodes" + echo " โ€ข Container crash - check application logs" +<% if (fsxVolumeHandle) { %> + echo " โ€ข PVC binding errors - verify FSx CSI driver is installed on the cluster" + echo " kubectl get pvc -n ${HYPERPOD_NAMESPACE}" + echo " kubectl describe pvc -n ${HYPERPOD_NAMESPACE}" + echo " kubectl get csidriver -o name | grep fsx" +<% } %> + exit ${ROLLOUT_EXIT_CODE} +} + +kill ${STATUS_PID} 2>/dev/null +wait ${STATUS_PID} 2>/dev/null + +echo "โœ… HyperPod EKS deployment complete!" +echo "" +echo "๐Ÿ“‹ Deployment Details:" +echo " Cluster: ${HYPERPOD_CLUSTER_NAME}" +echo " Namespace: ${HYPERPOD_NAMESPACE}" +echo " Deployment: ${PROJECT_NAME}" +echo " Replicas: ${HYPERPOD_REPLICAS}" +echo " Image: ${ECR_REPOSITORY}:${IMAGE_TAG}" +echo "" +echo "๐Ÿ” Check deployment status:" +echo " export KUBECONFIG=${KUBECONFIG_PATH}" +echo " kubectl get pods -n ${HYPERPOD_NAMESPACE}" +echo " kubectl get svc -n ${HYPERPOD_NAMESPACE}" +echo "" +echo "๐Ÿงช Test your deployment:" +echo " ./do/test" +echo "" +echo "๐Ÿ“‹ View logs:" +echo " ./do/logs" +echo "" +echo "๐Ÿงน Clean up when done:" +echo " ./do/clean hyperpod" + +# Write kubeconfig path to config so other scripts can use it (idempotent) +_update_config_var() { + local var_name="$1" var_value="$2" config_file="${SCRIPT_DIR}/config" + if grep -q "^export ${var_name}=" "${config_file}" 2>/dev/null; then + sed -i.bak "s|^export ${var_name}=.*|export ${var_name}=\"${var_value}\"|" "${config_file}" + rm -f "${config_file}.bak" + else + echo "" >> "${config_file}" + echo "export ${var_name}=\"${var_value}\"" >> "${config_file}" + fi +} + +_update_config_var "KUBECONFIG" "${KUBECONFIG_PATH}" + +<% } %> diff --git a/generators/app/templates/do/export b/generators/app/templates/do/export new file mode 100755 index 0000000..718033c --- /dev/null +++ b/generators/app/templates/do/export @@ -0,0 +1,77 @@ +#!/bin/bash +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +# Export current configuration as a single yo ml-container-creator command + +# Source configuration (suppress the summary output) +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +source "${SCRIPT_DIR}/config" > /dev/null 2>&1 + +# Build the command +CMD="yo ml-container-creator" +CMD="${CMD} --project-name=${PROJECT_NAME}" + +# Use deployment-config if available (bundles framework + model server) +if [ -n "${DEPLOYMENT_CONFIG:-}" ]; then + CMD="${CMD} --deployment-config=${DEPLOYMENT_CONFIG}" +else + CMD="${CMD} --framework=${FRAMEWORK}" + CMD="${CMD} --model-server=${MODEL_SERVER}" +fi + +# Model format (traditional ML only) +if [ -n "${MODEL_FORMAT:-}" ]; then + CMD="${CMD} --model-format=${MODEL_FORMAT}" +fi + +# Model name (transformers only) +if [ -n "${MODEL_NAME:-}" ]; then + CMD="${CMD} --model-name=${MODEL_NAME}" +fi + +# Build configuration +CMD="${CMD} --build-target=${BUILD_TARGET}" +if [ "${BUILD_TARGET}" = "codebuild" ] && [ -n "${CODEBUILD_COMPUTE_TYPE:-}" ]; then + CMD="${CMD} --codebuild-compute-type=${CODEBUILD_COMPUTE_TYPE}" +fi + +# Deployment target +CMD="${CMD} --deployment-target=${DEPLOYMENT_TARGET}" + +<% if (deploymentTarget === 'managed-inference') { %> +# SageMaker Managed Inference +CMD="${CMD} --instance-type=${INSTANCE_TYPE}" +<% } else if (deploymentTarget === 'hyperpod-eks') { %> +# HyperPod EKS +CMD="${CMD} --hyperpod-cluster=${HYPERPOD_CLUSTER_NAME}" +if [ "${HYPERPOD_NAMESPACE}" != "default" ]; then + CMD="${CMD} --hyperpod-namespace=${HYPERPOD_NAMESPACE}" +fi +if [ "${HYPERPOD_REPLICAS}" != "1" ]; then + CMD="${CMD} --hyperpod-replicas=${HYPERPOD_REPLICAS}" +fi +<% if (fsxVolumeHandle) { %> +CMD="${CMD} --fsx-volume-handle=${FSX_VOLUME_HANDLE}" +<% } %> +<% } %> + +# AWS region +CMD="${CMD} --region=${AWS_REGION}" + +# Role ARN +if [ -n "${ROLE_ARN:-}" ]; then + CMD="${CMD} --role-arn=${ROLE_ARN}" +fi + +# HuggingFace token โ€” reference env var, don't leak the actual value +if [ -n "${HF_TOKEN:-}" ]; then + CMD="${CMD} --hf-token=\$HF_TOKEN" +fi + +CMD="${CMD} --skip-prompts" + +echo "" +echo "# Reproduce this project with a single command:" +echo "${CMD}" +echo "" diff --git a/generators/app/templates/do/logs b/generators/app/templates/do/logs index 89d1909..f209d54 100644 --- a/generators/app/templates/do/logs +++ b/generators/app/templates/do/logs @@ -10,23 +10,38 @@ set -o pipefail SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" source "${SCRIPT_DIR}/config" -# Allow endpoint name as argument or from config -ENDPOINT="${1:-${ENDPOINT_NAME:-}}" +<% if (deploymentTarget === 'managed-inference') { %> +# ============================================================ +# SageMaker Managed Inference Logs (CloudWatch) +# ============================================================ -if [ -z "${ENDPOINT}" ]; then - echo "โŒ No endpoint name provided" +# Allow inference component name as argument or from config +IC_NAME="${1:-${INFERENCE_COMPONENT_NAME:-}}" +ENDPOINT="${ENDPOINT_NAME:-}" + +if [ -z "${IC_NAME}" ] && [ -z "${ENDPOINT}" ]; then + echo "โŒ No inference component or endpoint name provided" echo "" echo "Usage:" - echo " ./do/logs " - echo " ./do/logs # uses ENDPOINT_NAME from do/config" + echo " ./do/logs " + echo " ./do/logs # uses INFERENCE_COMPONENT_NAME from do/config" echo "" - echo "Run ./do/deploy first to set ENDPOINT_NAME automatically." + echo "Run ./do/deploy first to set INFERENCE_COMPONENT_NAME automatically." + exit 1 +fi + +# Inference component logs live under the endpoint log group +# but in log streams named after the inference component +if [ -z "${ENDPOINT}" ]; then + echo "โš ๏ธ ENDPOINT_NAME not set in config โ€” cannot determine log group" + echo " Run ./do/deploy first, or set ENDPOINT_NAME in do/config" exit 1 fi -LOG_GROUP="/aws/sagemaker/Endpoints/${ENDPOINT}" +LOG_GROUP="/aws/sagemaker/InferenceComponents/${IC_NAME}" -echo "๐Ÿ“‹ Tailing logs for endpoint: ${ENDPOINT}" +echo "๐Ÿ“‹ Tailing logs for inference component: ${IC_NAME}" +echo " Endpoint: ${ENDPOINT}" echo " Log group: ${LOG_GROUP}" echo " Region: ${AWS_REGION}" echo "" @@ -42,12 +57,95 @@ if ! aws logs describe-log-groups \ --query "logGroups[?logGroupName=='${LOG_GROUP}'].logGroupName" \ --output text 2>/dev/null | grep -q "${LOG_GROUP}"; then echo "โš ๏ธ Log group not found yet: ${LOG_GROUP}" - echo " The endpoint may still be starting up. Waiting for logs..." + echo " The inference component may still be starting up. Waiting for logs..." + echo "" + echo " Also check the endpoint log group:" + echo " aws logs tail /aws/sagemaker/Endpoints/${ENDPOINT} --follow --region ${AWS_REGION}" echo "" fi -# Tail logs using aws logs tail +# Tail logs aws logs tail "${LOG_GROUP}" \ --region "${AWS_REGION}" \ --follow \ --format short + +<% } else if (deploymentTarget === 'hyperpod-eks') { %> +# ============================================================ +# HyperPod EKS Logs (kubectl) +# ============================================================ + +# Allow pod selector as argument, default to app label +POD_SELECTOR="${1:-app=${PROJECT_NAME}}" + +echo "๐Ÿ“‹ Tailing logs for HyperPod EKS deployment" +echo " Cluster: ${HYPERPOD_CLUSTER_NAME}" +echo " Namespace: ${HYPERPOD_NAMESPACE}" +echo " Selector: ${POD_SELECTOR}" +echo " Region: ${AWS_REGION}" +echo "" + +# Get kubeconfig for HyperPod cluster +echo "๐Ÿ”‘ Configuring kubectl for HyperPod cluster..." +KUBECONFIG_PATH="${HOME}/.kube/hyperpod-${HYPERPOD_CLUSTER_NAME}" + +EKS_CLUSTER_ARN=$(aws sagemaker describe-cluster \ + --cluster-name "${HYPERPOD_CLUSTER_NAME}" \ + --region "${AWS_REGION}" \ + --query "Orchestrator.Eks.ClusterArn" \ + --output text 2>&1) || { + echo "โŒ Failed to describe HyperPod cluster: ${HYPERPOD_CLUSTER_NAME}" + echo "" + echo " Check that:" + echo " โ€ข The cluster name is correct" + echo " โ€ข The cluster exists in region: ${AWS_REGION}" + echo " โ€ข Your IAM user/role has permission to access the cluster" + exit 4 +} + +EKS_CLUSTER_NAME=$(echo "${EKS_CLUSTER_ARN}" | awk -F'/' '{print $NF}') + +if ! aws eks update-kubeconfig \ + --name "${EKS_CLUSTER_NAME}" \ + --region "${AWS_REGION}" \ + --kubeconfig "${KUBECONFIG_PATH}" 2>&1; then + echo "โŒ Failed to configure kubectl for EKS cluster: ${EKS_CLUSTER_NAME}" + exit 4 +fi + +export KUBECONFIG="${KUBECONFIG_PATH}" + +# Verify cluster connectivity +if ! kubectl cluster-info &> /dev/null; then + echo "โŒ Cannot connect to HyperPod cluster" + echo "" + echo " Check that:" + echo " โ€ข The cluster is in 'InService' status" + echo " โ€ข Your network can reach the cluster API server" + exit 4 +fi + +# Check if any pods match the selector +POD_COUNT=$(kubectl get pods -n "${HYPERPOD_NAMESPACE}" -l "${POD_SELECTOR}" --no-headers 2>/dev/null | wc -l || echo "0") +if [ "${POD_COUNT}" -eq 0 ]; then + echo "โš ๏ธ No pods found matching selector: ${POD_SELECTOR}" + echo " Namespace: ${HYPERPOD_NAMESPACE}" + echo "" + echo " Run ./do/deploy first to create the deployment." + echo "" + echo " To list all pods in the namespace:" + echo " kubectl get pods -n ${HYPERPOD_NAMESPACE}" + exit 1 +fi + +echo " Found ${POD_COUNT} pod(s) matching selector" +echo "" +echo " Press Ctrl+C to stop" +echo "" +echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" +echo "" + +# Tail logs from all matching pods +kubectl logs -f -l "${POD_SELECTOR}" -n "${HYPERPOD_NAMESPACE}" --all-containers --prefix + +<% } %> diff --git a/generators/app/templates/do/test b/generators/app/templates/do/test index 0aadeea..0e9dd81 100644 --- a/generators/app/templates/do/test +++ b/generators/app/templates/do/test @@ -10,8 +10,13 @@ set -o pipefail SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" source "${SCRIPT_DIR}/config" +<% if (deploymentTarget === 'managed-inference') { %> +# ============================================================ +# SageMaker Managed Inference Testing +# ============================================================ + # Parse arguments -ENDPOINT_NAME="${1:-}" +ENDPOINT_NAME="${1:-${ENDPOINT_NAME:-}}" if [ -z "${ENDPOINT_NAME}" ]; then echo "๐Ÿงช Testing local container at localhost:8080" @@ -43,7 +48,7 @@ if [ "${TEST_MODE}" = "local" ]; then fi HTTP_CODE=$(echo "${PING_RESPONSE}" | tail -n1) - RESPONSE_BODY=$(echo "${PING_RESPONSE}" | head -n-1) + RESPONSE_BODY=$(echo "${PING_RESPONSE}" | sed '$d') if [ "${HTTP_CODE}" = "200" ]; then echo "โœ… Health check passed (HTTP ${HTTP_CODE})" @@ -124,7 +129,7 @@ if [ "${TEST_MODE}" = "local" ]; then fi HTTP_CODE=$(echo "${INVOKE_RESPONSE}" | tail -n1) - RESPONSE_BODY=$(echo "${INVOKE_RESPONSE}" | head -n-1) + RESPONSE_BODY=$(echo "${INVOKE_RESPONSE}" | sed '$d') if [ "${HTTP_CODE}" = "200" ]; then echo "โœ… Inference request successful (HTTP ${HTTP_CODE})" @@ -147,12 +152,21 @@ else # Create temporary file for response TEMP_RESPONSE=$(mktemp) - # Invoke endpoint + # Invoke endpoint via inference component + IC_NAME="${INFERENCE_COMPONENT_NAME:-}" + INVOKE_ARGS=( + --endpoint-name "${ENDPOINT_NAME}" + --region "${AWS_REGION}" + --content-type "application/json" + --body "fileb://${TEMP_PAYLOAD}" + ) + if [ -n "${IC_NAME}" ]; then + INVOKE_ARGS+=(--inference-component-name "${IC_NAME}") + echo " Inference component: ${IC_NAME}" + fi + if ! aws sagemaker-runtime invoke-endpoint \ - --endpoint-name "${ENDPOINT_NAME}" \ - --region "${AWS_REGION}" \ - --content-type "application/json" \ - --body "fileb://${TEMP_PAYLOAD}" \ + "${INVOKE_ARGS[@]}" \ "${TEMP_RESPONSE}" &> /dev/null; then echo "โŒ Inference request failed" rm -f "${TEMP_PAYLOAD}" "${TEMP_RESPONSE}" @@ -185,3 +199,210 @@ else echo " โ€ข Endpoint name: ${ENDPOINT_NAME}" echo " โ€ข Region: ${AWS_REGION}" fi + +<% } else if (deploymentTarget === 'hyperpod-eks') { %> +# ============================================================ +# HyperPod EKS Testing +# ============================================================ + +# Parse arguments: local or hyperpod test mode +# Default to hyperpod if no argument given (deployment target is hyperpod-eks) +TEST_TARGET="${1:-hyperpod}" + +case "${TEST_TARGET}" in + local) + echo "๐Ÿงช Testing local container at localhost:8080" + echo " Project: ${PROJECT_NAME}" + echo " Framework: ${FRAMEWORK}" + echo " Model server: ${MODEL_SERVER}" + TARGET_URL="http://localhost:8080" + ;; + hyperpod) + echo "๐Ÿงช Testing HyperPod EKS deployment" + echo " Project: ${PROJECT_NAME}" + echo " Framework: ${FRAMEWORK}" + echo " Model server: ${MODEL_SERVER}" + echo " Cluster: ${HYPERPOD_CLUSTER_NAME}" + echo " Namespace: ${HYPERPOD_NAMESPACE}" + echo " Region: ${AWS_REGION}" + echo "" + + # Get kubeconfig for HyperPod cluster + echo "๐Ÿ”‘ Configuring kubectl for HyperPod cluster..." + KUBECONFIG_PATH="${HOME}/.kube/hyperpod-${HYPERPOD_CLUSTER_NAME}" + + EKS_CLUSTER_ARN=$(aws sagemaker describe-cluster \ + --cluster-name "${HYPERPOD_CLUSTER_NAME}" \ + --region "${AWS_REGION}" \ + --query "Orchestrator.Eks.ClusterArn" \ + --output text 2>&1) || { + echo "โŒ Failed to describe HyperPod cluster: ${HYPERPOD_CLUSTER_NAME}" + echo "" + echo " Check that:" + echo " โ€ข The cluster name is correct" + echo " โ€ข The cluster exists in region: ${AWS_REGION}" + echo " โ€ข Your IAM user/role has permission to access the cluster" + exit 4 + } + + EKS_CLUSTER_NAME=$(echo "${EKS_CLUSTER_ARN}" | awk -F'/' '{print $NF}') + + if ! aws eks update-kubeconfig \ + --name "${EKS_CLUSTER_NAME}" \ + --region "${AWS_REGION}" \ + --kubeconfig "${KUBECONFIG_PATH}" 2>&1; then + echo "โŒ Failed to configure kubectl for EKS cluster: ${EKS_CLUSTER_NAME}" + exit 4 + fi + + export KUBECONFIG="${KUBECONFIG_PATH}" + + # Verify cluster connectivity + if ! kubectl cluster-info &> /dev/null; then + echo "โŒ Cannot connect to HyperPod cluster" + exit 4 + fi + echo "โœ… Connected to HyperPod cluster" + + # Port-forward the service to a local port + LOCAL_PORT=8080 + echo "" + echo "๐Ÿ”Œ Port-forwarding svc/${PROJECT_NAME} to localhost:${LOCAL_PORT}..." + kubectl port-forward "svc/${PROJECT_NAME}" "${LOCAL_PORT}:8080" \ + -n "${HYPERPOD_NAMESPACE}" & + PF_PID=$! + + # Wait for port-forward to establish + sleep 3 + + # Ensure cleanup on exit + trap "kill ${PF_PID} 2>/dev/null || true" EXIT + + # Verify port-forward is running + if ! kill -0 ${PF_PID} 2>/dev/null; then + echo "โŒ Port-forward failed to start" + echo "" + echo " Check that:" + echo " โ€ข The service exists: kubectl get svc ${PROJECT_NAME} -n ${HYPERPOD_NAMESPACE}" + echo " โ€ข The deployment is running: kubectl get pods -n ${HYPERPOD_NAMESPACE}" + exit 1 + fi + echo "โœ… Port-forward established" + + TARGET_URL="http://localhost:${LOCAL_PORT}" + ;; + *) + echo "Usage: ./do/test [local|hyperpod]" + echo "" + echo "Test modes:" + echo " local - Test local container at localhost:8080" + echo " hyperpod - Test HyperPod EKS deployment via port-forward" + exit 1 + ;; +esac + +echo "" + +# Test 1: Health check (/ping) +echo "๐Ÿ” Test 1: Health check" +echo " Sending GET request to ${TARGET_URL}/ping" + +if ! PING_RESPONSE=$(curl -s -w "\n%{http_code}" -X GET "${TARGET_URL}/ping" 2>&1); then + echo "โŒ Health check failed: Could not connect" + if [ "${TEST_TARGET}" = "local" ]; then + echo " Make sure the container is running: ./do/run" + else + echo " Check that the port-forward is working and pods are running" + fi + exit 1 +fi + +HTTP_CODE=$(echo "${PING_RESPONSE}" | tail -n1) +RESPONSE_BODY=$(echo "${PING_RESPONSE}" | sed '$d') + +if [ "${HTTP_CODE}" = "200" ]; then + echo "โœ… Health check passed (HTTP ${HTTP_CODE})" +else + echo "โŒ Health check failed (HTTP ${HTTP_CODE})" + echo " Response: ${RESPONSE_BODY}" + exit 1 +fi + +echo "" + +# Test 2: Inference request (/invocations) +echo "๐Ÿ” Test 2: Inference request" + +# Create framework-specific test payload +case "${FRAMEWORK}" in + sklearn|xgboost) + # Traditional ML: JSON with instances array + TEST_PAYLOAD='{"instances": [[1.0, 2.0, 3.0, 4.0]]}' + echo " Payload: Sample feature vector" + ;; + tensorflow) + # TensorFlow: JSON with instances array + TEST_PAYLOAD='{"instances": [[1.0, 2.0, 3.0, 4.0]]}' + echo " Payload: Sample feature vector" + ;; + transformers) + # Transformers: payload format depends on model server + case "${MODEL_SERVER}" in + vllm|sglang) + # OpenAI-compatible chat completions format + TEST_PAYLOAD='{"model": "'"${MODEL_NAME}"'", "messages": [{"role": "user", "content": "What is machine learning?"}], "max_tokens": 50, "temperature": 0.7}' + echo " Payload: OpenAI-compatible chat completion request" + ;; + *) + # HuggingFace-style format for LMI, DJL, TensorRT-LLM + TEST_PAYLOAD='{"inputs": "What is machine learning?", "parameters": {"max_new_tokens": 50, "temperature": 0.7}}' + echo " Payload: HuggingFace-style text generation request" + ;; + esac + ;; + *) + echo "โŒ Unknown framework: ${FRAMEWORK}" + exit 3 + ;; +esac + +echo " Sending POST request to ${TARGET_URL}/invocations" + +if ! INVOKE_RESPONSE=$(curl -s -w "\n%{http_code}" -X POST "${TARGET_URL}/invocations" \ + -H "Content-Type: application/json" \ + -d "${TEST_PAYLOAD}" 2>&1); then + echo "โŒ Inference request failed: Could not connect" + exit 1 +fi + +HTTP_CODE=$(echo "${INVOKE_RESPONSE}" | tail -n1) +RESPONSE_BODY=$(echo "${INVOKE_RESPONSE}" | sed '$d') + +if [ "${HTTP_CODE}" = "200" ]; then + echo "โœ… Inference request successful (HTTP ${HTTP_CODE})" + echo " Response preview: ${RESPONSE_BODY:0:200}" + if [ ${#RESPONSE_BODY} -gt 200 ]; then + echo " (truncated, full response is ${#RESPONSE_BODY} characters)" + fi +else + echo "โŒ Inference request failed (HTTP ${HTTP_CODE})" + echo " Response: ${RESPONSE_BODY}" + exit 1 +fi + +echo "" +echo "โœ… All tests passed!" +echo "" + +if [ "${TEST_TARGET}" = "local" ]; then + echo "Next steps:" + echo " โ€ข Push to ECR: ./do/push" + echo " โ€ข Deploy to HyperPod: ./do/deploy" +else + echo "HyperPod deployment is ready for production use!" + echo " โ€ข Cluster: ${HYPERPOD_CLUSTER_NAME}" + echo " โ€ข Namespace: ${HYPERPOD_NAMESPACE}" + echo " โ€ข Service: ${PROJECT_NAME}" +fi + +<% } %> diff --git a/generators/app/templates/hyperpod/configmap.yaml b/generators/app/templates/hyperpod/configmap.yaml new file mode 100644 index 0000000..8330397 --- /dev/null +++ b/generators/app/templates/hyperpod/configmap.yaml @@ -0,0 +1,24 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: <%= projectName %>-config + namespace: <%= hyperPodNamespace %> + labels: + app: <%= projectName %> + managed-by: ml-container-creator +data: + DEPLOYMENT_CONFIG: "<%= deploymentConfig %>" + FRAMEWORK: "<%= framework %>" + MODEL_SERVER: "<%= modelServer %>" +<% if (framework === 'transformers') { %> + MODEL_NAME: "<%= modelName %>" +<% if (hfToken) { %> + HF_TOKEN: "<%= hfToken %>" +<% } %> +<% if (ngcApiKey) { %> + NGC_API_KEY: "<%= ngcApiKey %>" +<% } %> +<% } %> +<% if (modelFormat) { %> + MODEL_FORMAT: "<%= modelFormat %>" +<% } %> diff --git a/generators/app/templates/hyperpod/deployment.yaml b/generators/app/templates/hyperpod/deployment.yaml new file mode 100644 index 0000000..f459e11 --- /dev/null +++ b/generators/app/templates/hyperpod/deployment.yaml @@ -0,0 +1,71 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: <%= projectName %> + namespace: <%= hyperPodNamespace %> + labels: + app: <%= projectName %> + framework: <%= framework %> + managed-by: ml-container-creator +spec: + replicas: <%= hyperPodReplicas %> + selector: + matchLabels: + app: <%= projectName %> + template: + metadata: + labels: + app: <%= projectName %> + framework: <%= framework %> + spec: + containers: + - name: <%= projectName %> + image: ${AWS_ACCOUNT_ID}.dkr.ecr.<%= awsRegion %>.amazonaws.com/ml-container-creator:<%= projectName %>-latest + ports: + - containerPort: 8080 + protocol: TCP + envFrom: + - configMapRef: + name: <%= projectName %>-config + resources: + requests: + cpu: "4" + memory: "16Gi" + nvidia.com/gpu: "1" + limits: + cpu: "8" + memory: "32Gi" + nvidia.com/gpu: "1" +<% if (fsxVolumeHandle) { %> + volumeMounts: + - name: fsx-storage + mountPath: /opt/ml/model +<% } %> + readinessProbe: + httpGet: + path: /ping + port: 8080 + initialDelaySeconds: 120 + periodSeconds: 10 + livenessProbe: + httpGet: + path: /ping + port: 8080 + initialDelaySeconds: 900 + periodSeconds: 30 + failureThreshold: 5 + nodeSelector: + node.kubernetes.io/instance-type: <%= instanceType %> + tolerations: + - key: "nvidia.com/gpu" + operator: "Exists" + effect: "NoSchedule" + - key: "sagemaker.amazonaws.com/hyperpod" + operator: "Exists" + effect: "NoSchedule" +<% if (fsxVolumeHandle) { %> + volumes: + - name: fsx-storage + persistentVolumeClaim: + claimName: <%= projectName %>-fsx-pvc +<% } %> diff --git a/generators/app/templates/hyperpod/pvc.yaml b/generators/app/templates/hyperpod/pvc.yaml new file mode 100644 index 0000000..004a005 --- /dev/null +++ b/generators/app/templates/hyperpod/pvc.yaml @@ -0,0 +1,42 @@ +<% if (fsxVolumeHandle) { %> +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: <%= projectName %>-fsx-pvc + namespace: <%= hyperPodNamespace %> + labels: + app: <%= projectName %> + managed-by: ml-container-creator +spec: + accessModes: + - ReadWriteMany + storageClassName: fsx-lustre + resources: + requests: + storage: 1200Gi +--- +apiVersion: v1 +kind: PersistentVolume +metadata: + name: <%= projectName %>-fsx-pv + labels: + app: <%= projectName %> + managed-by: ml-container-creator +spec: + capacity: + storage: 1200Gi + volumeMode: Filesystem + accessModes: + - ReadWriteMany + persistentVolumeReclaimPolicy: Retain + storageClassName: fsx-lustre + csi: + driver: fsx.csi.aws.com + volumeHandle: <%= fsxVolumeHandle %> + volumeAttributes: + dnsname: <%= fsxVolumeHandle %>.fsx.<%= awsRegion %>.amazonaws.com + mountname: fsx +<% } else { %> +# PVC not generated - no FSx volume handle provided +# To enable FSx for Lustre storage, provide fsxVolumeHandle during generation +<% } %> diff --git a/generators/app/templates/hyperpod/service.yaml b/generators/app/templates/hyperpod/service.yaml new file mode 100644 index 0000000..63caaa8 --- /dev/null +++ b/generators/app/templates/hyperpod/service.yaml @@ -0,0 +1,17 @@ +apiVersion: v1 +kind: Service +metadata: + name: <%= projectName %> + namespace: <%= hyperPodNamespace %> + labels: + app: <%= projectName %> + managed-by: ml-container-creator +spec: + type: ClusterIP + ports: + - port: 8080 + targetPort: 8080 + protocol: TCP + name: http + selector: + app: <%= projectName %> diff --git a/servers/hyperpod-cluster-picker/LICENSE b/servers/hyperpod-cluster-picker/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/servers/hyperpod-cluster-picker/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/servers/hyperpod-cluster-picker/index.js b/servers/hyperpod-cluster-picker/index.js new file mode 100644 index 0000000..150fdbe --- /dev/null +++ b/servers/hyperpod-cluster-picker/index.js @@ -0,0 +1,346 @@ +#!/usr/bin/env node +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * HyperPod Cluster Picker MCP Server + * + * A bundled MCP server that discovers available SageMaker HyperPod EKS clusters + * via the AWS SageMaker ListClusters and DescribeCluster APIs. + * + * Only clusters that are InService and use EKS orchestration are returned. + * Slurm-based clusters are excluded. + * + * Tool: get_ml_config + * Accepts: { parameters: string[], limit: number, context: object } + * Returns: { values: Record, choices: Record } + * + * Environment variables: + * AWS_REGION - AWS region for SageMaker API calls (default: us-east-1) + */ + +import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js' +import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js' +import { z } from 'zod' +import { fileURLToPath } from 'node:url' +import { resolve } from 'node:path' +import { readFileSync } from 'node:fs' +import { homedir } from 'node:os' + +/** + * Log to stderr so it doesn't interfere with MCP stdio protocol on stdout. + */ +function log(message) { + process.stderr.write(`[hyperpod-cluster-picker] ${message}\n`) +} + +/** + * Create a SageMaker client for the given region. + * Accepts an optional factory function for testability. + * If no credentials are found with the default provider chain, + * falls back to the first available AWS profile. + * + * @param {string} region - AWS region + * @param {Function|null} clientFactory - Optional factory (used in tests) + * @returns {object} SageMaker client + */ +function createSageMakerClient(region, clientFactory = null) { + if (clientFactory) return clientFactory(region) + return _defaultClientFactory(region) +} + +let _SageMakerClient = null +let _ListClustersCommand = null +let _DescribeClusterCommand = null +let _fromIni = null + +/** + * Lazily load the AWS SDK SageMaker client classes. + * This allows the module to be imported in test environments + * without requiring @aws-sdk/client-sagemaker to be installed. + */ +async function _ensureSdkLoaded() { + if (_SageMakerClient) return + const sdk = await import('@aws-sdk/client-sagemaker') + _SageMakerClient = sdk.SageMakerClient + _ListClustersCommand = sdk.ListClustersCommand + _DescribeClusterCommand = sdk.DescribeClusterCommand + try { + const credentialProviders = await import('@aws-sdk/credential-providers') + _fromIni = credentialProviders.fromIni + } catch { + // credential-providers not available โ€” profile-based fallback won't work + } +} + +function _defaultClientFactory(region) { + return new _SageMakerClient({ region }) +} + +/** + * Create a SageMaker client using a named AWS profile via fromIni. + * @param {string} region - AWS region + * @param {string} profile - AWS profile name + * @returns {object} SageMaker client + */ +function _createClientWithProfile(region, profile) { + if (!_fromIni) { + throw new Error('Cannot use profile-based credentials: @aws-sdk/credential-providers not available') + } + return new _SageMakerClient({ + region, + credentials: _fromIni({ profile }) + }) +} + +/** + * Detect available AWS profile names from ~/.aws/credentials and ~/.aws/config. + * @returns {string[]} Array of profile names + */ +function _detectAwsProfiles() { + const profiles = new Set() + try { + const credsPath = resolve(homedir(), '.aws/credentials') + const creds = readFileSync(credsPath, 'utf8') + for (const match of creds.matchAll(/^\[(.+)\]$/gm)) { + profiles.add(match[1]) + } + } catch { /* no credentials file */ } + try { + const configPath = resolve(homedir(), '.aws/config') + const config = readFileSync(configPath, 'utf8') + for (const match of config.matchAll(/^\[profile\s+(.+)\]$/gm)) { + profiles.add(match[1]) + } + } catch { /* no config file */ } + return [...profiles] +} + +/** + * Fetch all HyperPod clusters, filtering to InService + EKS only. + * + * @param {object} client - SageMaker client instance + * @param {object} options - { limit } + * @returns {Promise>} + */ +async function fetchHyperPodClusters(client, { limit = 10 } = {}) { + const clusters = [] + let nextToken + + // Paginate through ListClusters + do { + const params = { MaxResults: 100 } + if (nextToken) params.NextToken = nextToken + + const command = new _ListClustersCommand(params) + const response = await client.send(command) + + const summaries = response.ClusterSummaries || [] + for (const summary of summaries) { + // Filter: InService only + if (summary.ClusterStatus !== 'InService') continue + + clusters.push({ + clusterName: summary.ClusterName, + clusterArn: summary.ClusterArn, + status: summary.ClusterStatus + }) + } + + nextToken = response.NextToken + } while (nextToken && clusters.length < limit * 3) // over-fetch to account for EKS filtering + + // Now describe each cluster to check orchestrator type and get instance groups + const eksClusters = [] + for (const cluster of clusters) { + if (eksClusters.length >= limit) break + + try { + const describeCommand = new _DescribeClusterCommand({ + ClusterName: cluster.clusterName + }) + const detail = await client.send(describeCommand) + + // Filter: EKS orchestrator only (exclude Slurm) + const orchestrator = detail.Orchestrator?.Eks ? 'EKS' : 'Slurm' + if (orchestrator !== 'EKS') continue + + const instanceGroups = (detail.InstanceGroups || []).map(g => ({ + name: g.InstanceGroupName, + instanceType: g.InstanceType, + count: g.CurrentCount ?? g.TargetCount ?? 0 + })) + + eksClusters.push({ + clusterName: cluster.clusterName, + clusterArn: cluster.clusterArn, + status: cluster.status, + instanceGroups + }) + } catch (err) { + log(`Warning: could not describe cluster "${cluster.clusterName}": ${err.message}`) + } + } + + return eksClusters +} + +/** + * Build the MCP response from a list of discovered clusters. + * + * @param {Array} clusters - Array of cluster objects from fetchHyperPodClusters + * @returns {{ values: object, choices: object, metadata?: object }} + */ +function buildResponse(clusters) { + if (!clusters || clusters.length === 0) { + return { + values: {}, + choices: { hyperPodCluster: [] }, + message: 'No InService HyperPod EKS clusters found in the specified region. Verify the region and that you have HyperPod EKS clusters provisioned.' + } + } + + const clusterNames = clusters.map(c => c.clusterName) + + return { + values: { hyperPodCluster: clusterNames[0] }, + choices: { hyperPodCluster: clusterNames }, + metadata: Object.fromEntries( + clusters.map(c => [c.clusterName, { + clusterArn: c.clusterArn, + status: c.status, + instanceGroups: c.instanceGroups + }]) + ) + } +} + +// Create MCP server +const server = new McpServer({ + name: 'hyperpod-cluster-picker', + version: '1.0.0' +}) + +// Register the get_ml_config tool +server.tool( + 'get_ml_config', + 'Discovers available SageMaker HyperPod EKS clusters for deployment target selection', + { + parameters: z.array(z.string()).describe('List of parameter names to provide values for'), + limit: z.number().int().positive().default(10).describe('Maximum number of choices per parameter'), + context: z.record(z.string(), z.any()).optional().describe('Current configuration context (awsRegion, etc.)') + }, + async ({ parameters, limit, context }) => { + // If hyperPodCluster is not requested, return empty + if (!parameters.includes('hyperPodCluster')) { + return { + content: [{ + type: 'text', + text: JSON.stringify({ values: {}, choices: {} }) + }] + } + } + + const region = context?.awsRegion || process.env.AWS_REGION || 'us-east-1' + const profile = context?.awsProfile || process.env.AWS_PROFILE || null + log(`Querying HyperPod clusters in region: ${region}${profile ? ` (profile: ${profile})` : ''}`) + + try { + await _ensureSdkLoaded() + + let clusters = null + let lastError = null + + // Strategy 1: If a specific profile was requested, use it directly + if (profile) { + try { + log(`Trying explicit profile: ${profile}`) + const client = _createClientWithProfile(region, profile) + clusters = await fetchHyperPodClusters(client, { limit }) + } catch (err) { + log(`Profile "${profile}" failed: ${err.message}`) + lastError = err + } + } + + // Strategy 2: Try the default credential chain (env vars, instance profile, etc.) + if (!clusters) { + try { + log('Trying default credential chain') + const client = createSageMakerClient(region) + clusters = await fetchHyperPodClusters(client, { limit }) + } catch (err) { + log(`Default credential chain failed: ${err.message}`) + lastError = err + } + } + + // Strategy 3: Detect available AWS profiles and try each + if (!clusters && _fromIni) { + const profiles = _detectAwsProfiles() + if (profiles.length > 0) { + log(`Default credentials failed, trying ${profiles.length} detected profile(s): ${profiles.join(', ')}`) + for (const p of profiles) { + try { + const client = _createClientWithProfile(region, p) + clusters = await fetchHyperPodClusters(client, { limit }) + log(`Profile "${p}" succeeded`) + break + } catch (err) { + log(`Profile "${p}" failed: ${err.message}`) + lastError = err + } + } + } + } + + // If all strategies failed, throw the last error + if (!clusters) { + throw lastError || new Error('No AWS credentials available') + } + + const result = buildResponse(clusters) + + if (clusters.length > 0) { + log(`Found ${clusters.length} HyperPod EKS cluster(s)`) + } else { + log('No InService HyperPod EKS clusters found') + } + + return { + content: [{ + type: 'text', + text: JSON.stringify(result) + }] + } + } catch (err) { + log(`Error querying clusters: ${err.message}`) + const errorResult = { + values: {}, + choices: { hyperPodCluster: [] }, + error: err.message, + message: `Failed to query HyperPod clusters: ${err.message}` + } + return { + content: [{ + type: 'text', + text: JSON.stringify(errorResult) + }] + } + } + } +) + +// Export for testing +export { fetchHyperPodClusters, buildResponse, createSageMakerClient, _ensureSdkLoaded } + +// Guard MCP transport โ€” only connect when run as main module +const __filename = fileURLToPath(import.meta.url) +const isMain = process.argv[1] && resolve(process.argv[1]) === __filename + +if (isMain) { + log('Starting HyperPod Cluster Picker MCP server') + await _ensureSdkLoaded() + const transport = new StdioServerTransport() + await server.connect(transport) +} diff --git a/servers/hyperpod-cluster-picker/package-lock.json b/servers/hyperpod-cluster-picker/package-lock.json new file mode 100644 index 0000000..d212e7b --- /dev/null +++ b/servers/hyperpod-cluster-picker/package-lock.json @@ -0,0 +1,2515 @@ +{ + "name": "@ml-container-creator/hyperpod-cluster-picker", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@ml-container-creator/hyperpod-cluster-picker", + "version": "1.0.0", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sagemaker": "^3.700.0", + "@aws-sdk/credential-providers": "^3.700.0", + "@modelcontextprotocol/sdk": "^1.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz", + "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz", + "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/supports-web-crypto": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz", + "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz", + "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity": { + "version": "3.1009.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-cognito-identity/-/client-cognito-identity-3.1009.0.tgz", + "integrity": "sha512-4+lBLB2sIdrnGruxqsfPM2AOSME3DVDRX7R5OXcgd2jfrUSyYdHJN1kT9hO/vwK4uajRZsQNP5hLJMttkjkoAQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/credential-provider-node": "^3.972.21", + "@aws-sdk/middleware-host-header": "^3.972.8", + "@aws-sdk/middleware-logger": "^3.972.8", + "@aws-sdk/middleware-recursion-detection": "^3.972.8", + "@aws-sdk/middleware-user-agent": "^3.972.21", + "@aws-sdk/region-config-resolver": "^3.972.8", + "@aws-sdk/types": "^3.973.6", + "@aws-sdk/util-endpoints": "^3.996.5", + "@aws-sdk/util-user-agent-browser": "^3.972.8", + "@aws-sdk/util-user-agent-node": "^3.973.7", + "@smithy/config-resolver": "^4.4.11", + "@smithy/core": "^3.23.11", + "@smithy/fetch-http-handler": "^5.3.15", + "@smithy/hash-node": "^4.2.12", + "@smithy/invalid-dependency": "^4.2.12", + "@smithy/middleware-content-length": "^4.2.12", + "@smithy/middleware-endpoint": "^4.4.25", + "@smithy/middleware-retry": "^4.4.42", + "@smithy/middleware-serde": "^4.2.14", + "@smithy/middleware-stack": "^4.2.12", + "@smithy/node-config-provider": "^4.3.12", + "@smithy/node-http-handler": "^4.4.16", + "@smithy/protocol-http": "^5.3.12", + "@smithy/smithy-client": "^4.12.5", + "@smithy/types": "^4.13.1", + "@smithy/url-parser": "^4.2.12", + "@smithy/util-base64": "^4.3.2", + "@smithy/util-body-length-browser": "^4.2.2", + "@smithy/util-body-length-node": "^4.2.3", + "@smithy/util-defaults-mode-browser": "^4.3.41", + "@smithy/util-defaults-mode-node": "^4.2.44", + "@smithy/util-endpoints": "^3.3.3", + "@smithy/util-middleware": "^4.2.12", + "@smithy/util-retry": "^4.2.12", + "@smithy/util-utf8": "^4.2.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/client-sagemaker": { + "version": "3.1009.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sagemaker/-/client-sagemaker-3.1009.0.tgz", + "integrity": "sha512-+UOiguzb8iX3qVpWdrw+3iU/E6UkhriCXWYjhJ8RQVaoeINPm9naSFsyUrMT2SfcjdJkjwXCtlDErUuY+Asi8g==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/credential-provider-node": "^3.972.21", + "@aws-sdk/middleware-host-header": "^3.972.8", + "@aws-sdk/middleware-logger": "^3.972.8", + "@aws-sdk/middleware-recursion-detection": "^3.972.8", + "@aws-sdk/middleware-user-agent": "^3.972.21", + "@aws-sdk/region-config-resolver": "^3.972.8", + "@aws-sdk/types": "^3.973.6", + "@aws-sdk/util-endpoints": "^3.996.5", + "@aws-sdk/util-user-agent-browser": "^3.972.8", + "@aws-sdk/util-user-agent-node": "^3.973.7", + "@smithy/config-resolver": "^4.4.11", + "@smithy/core": "^3.23.11", + "@smithy/fetch-http-handler": "^5.3.15", + "@smithy/hash-node": "^4.2.12", + "@smithy/invalid-dependency": "^4.2.12", + "@smithy/middleware-content-length": "^4.2.12", + "@smithy/middleware-endpoint": "^4.4.25", + "@smithy/middleware-retry": "^4.4.42", + "@smithy/middleware-serde": "^4.2.14", + "@smithy/middleware-stack": "^4.2.12", + "@smithy/node-config-provider": "^4.3.12", + "@smithy/node-http-handler": "^4.4.16", + "@smithy/protocol-http": "^5.3.12", + "@smithy/smithy-client": "^4.12.5", + "@smithy/types": "^4.13.1", + "@smithy/url-parser": "^4.2.12", + "@smithy/util-base64": "^4.3.2", + "@smithy/util-body-length-browser": "^4.2.2", + "@smithy/util-body-length-node": "^4.2.3", + "@smithy/util-defaults-mode-browser": "^4.3.41", + "@smithy/util-defaults-mode-node": "^4.2.44", + "@smithy/util-endpoints": "^3.3.3", + "@smithy/util-middleware": "^4.2.12", + "@smithy/util-retry": "^4.2.12", + "@smithy/util-utf8": "^4.2.2", + "@smithy/util-waiter": "^4.2.13", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/core": { + "version": "3.973.20", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.973.20.tgz", + "integrity": "sha512-i3GuX+lowD892F3IuJf8o6AbyDupMTdyTxQrCJGcn71ni5hTZ82L4nQhcdumxZ7XPJRJJVHS/CR3uYOIIs0PVA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.973.6", + "@aws-sdk/xml-builder": "^3.972.11", + "@smithy/core": "^3.23.11", + "@smithy/node-config-provider": "^4.3.12", + "@smithy/property-provider": "^4.2.12", + "@smithy/protocol-http": "^5.3.12", + "@smithy/signature-v4": "^5.3.12", + "@smithy/smithy-client": "^4.12.5", + "@smithy/types": "^4.13.1", + "@smithy/util-base64": "^4.3.2", + "@smithy/util-middleware": "^4.2.12", + "@smithy/util-utf8": "^4.2.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-cognito-identity": { + "version": "3.972.13", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-cognito-identity/-/credential-provider-cognito-identity-3.972.13.tgz", + "integrity": "sha512-WZnIK8NPX+4OXkpVoNmUS+Ya1osqjszUsDqFEz97+a/LD5K012np9iR/eWEC43btx8zQjyRIK8kyiwbh8SiHzg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "^3.996.10", + "@aws-sdk/types": "^3.973.6", + "@smithy/property-provider": "^4.2.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.972.18", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.972.18.tgz", + "integrity": "sha512-X0B8AlQY507i5DwjLByeU2Af4ARsl9Vr84koDcXCbAkplmU+1xBFWxEPrWRAoh56waBne/yJqEloSwvRf4x6XA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/types": "^3.973.6", + "@smithy/property-provider": "^4.2.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-http": { + "version": "3.972.20", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.972.20.tgz", + "integrity": "sha512-ey9Lelj001+oOfrbKmS6R2CJAiXX7QKY4Vj9VJv6L2eE6/VjD8DocHIoYqztTm70xDLR4E1jYPTKfIui+eRNDA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/types": "^3.973.6", + "@smithy/fetch-http-handler": "^5.3.15", + "@smithy/node-http-handler": "^4.4.16", + "@smithy/property-provider": "^4.2.12", + "@smithy/protocol-http": "^5.3.12", + "@smithy/smithy-client": "^4.12.5", + "@smithy/types": "^4.13.1", + "@smithy/util-stream": "^4.5.19", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.972.20", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.972.20.tgz", + "integrity": "sha512-5flXSnKHMloObNF+9N0cupKegnH1Z37cdVlpETVgx8/rAhCe+VNlkcZH3HDg2SDn9bI765S+rhNPXGDJJPfbtA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/credential-provider-env": "^3.972.18", + "@aws-sdk/credential-provider-http": "^3.972.20", + "@aws-sdk/credential-provider-login": "^3.972.20", + "@aws-sdk/credential-provider-process": "^3.972.18", + "@aws-sdk/credential-provider-sso": "^3.972.20", + "@aws-sdk/credential-provider-web-identity": "^3.972.20", + "@aws-sdk/nested-clients": "^3.996.10", + "@aws-sdk/types": "^3.973.6", + "@smithy/credential-provider-imds": "^4.2.12", + "@smithy/property-provider": "^4.2.12", + "@smithy/shared-ini-file-loader": "^4.4.7", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-login": { + "version": "3.972.20", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.972.20.tgz", + "integrity": "sha512-gEWo54nfqp2jABMu6HNsjVC4hDLpg9HC8IKSJnp0kqWtxIJYHTmiLSsIfI4ScQjxEwpB+jOOH8dOLax1+hy/Hw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/nested-clients": "^3.996.10", + "@aws-sdk/types": "^3.973.6", + "@smithy/property-provider": "^4.2.12", + "@smithy/protocol-http": "^5.3.12", + "@smithy/shared-ini-file-loader": "^4.4.7", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.972.21", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.972.21.tgz", + "integrity": "sha512-hah8if3/B/Q+LBYN5FukyQ1Mym6PLPDsBOBsIgNEYD6wLyZg0UmUF/OKIVC3nX9XH8TfTPuITK+7N/jenVACWA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "^3.972.18", + "@aws-sdk/credential-provider-http": "^3.972.20", + "@aws-sdk/credential-provider-ini": "^3.972.20", + "@aws-sdk/credential-provider-process": "^3.972.18", + "@aws-sdk/credential-provider-sso": "^3.972.20", + "@aws-sdk/credential-provider-web-identity": "^3.972.20", + "@aws-sdk/types": "^3.973.6", + "@smithy/credential-provider-imds": "^4.2.12", + "@smithy/property-provider": "^4.2.12", + "@smithy/shared-ini-file-loader": "^4.4.7", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.972.18", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.972.18.tgz", + "integrity": "sha512-Tpl7SRaPoOLT32jbTWchPsn52hYYgJ0kpiFgnwk8pxTANQdUymVSZkzFvv1+oOgZm1CrbQUP9MBeoMZ9IzLZjA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/types": "^3.973.6", + "@smithy/property-provider": "^4.2.12", + "@smithy/shared-ini-file-loader": "^4.4.7", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.972.20", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.972.20.tgz", + "integrity": "sha512-p+R+PYR5Z7Gjqf/6pvbCnzEHcqPCpLzR7Yf127HjJ6EAb4hUcD+qsNRnuww1sB/RmSeCLxyay8FMyqREw4p1RA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/nested-clients": "^3.996.10", + "@aws-sdk/token-providers": "3.1009.0", + "@aws-sdk/types": "^3.973.6", + "@smithy/property-provider": "^4.2.12", + "@smithy/shared-ini-file-loader": "^4.4.7", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.972.20", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.972.20.tgz", + "integrity": "sha512-rWCmh8o7QY4CsUj63qopzMzkDq/yPpkrpb+CnjBEFSOg/02T/we7sSTVg4QsDiVS9uwZ8VyONhq98qt+pIh3KA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/nested-clients": "^3.996.10", + "@aws-sdk/types": "^3.973.6", + "@smithy/property-provider": "^4.2.12", + "@smithy/shared-ini-file-loader": "^4.4.7", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers": { + "version": "3.1009.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-providers/-/credential-providers-3.1009.0.tgz", + "integrity": "sha512-URs590x3cCOHvjgDz8J0iqxDQ87NjF550pnWmd1jQm+w5ZHTxuUEYjivBQQkAB603IlEdeadqO1+LSjtMznhwA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-cognito-identity": "3.1009.0", + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/credential-provider-cognito-identity": "^3.972.13", + "@aws-sdk/credential-provider-env": "^3.972.18", + "@aws-sdk/credential-provider-http": "^3.972.20", + "@aws-sdk/credential-provider-ini": "^3.972.20", + "@aws-sdk/credential-provider-login": "^3.972.20", + "@aws-sdk/credential-provider-node": "^3.972.21", + "@aws-sdk/credential-provider-process": "^3.972.18", + "@aws-sdk/credential-provider-sso": "^3.972.20", + "@aws-sdk/credential-provider-web-identity": "^3.972.20", + "@aws-sdk/nested-clients": "^3.996.10", + "@aws-sdk/types": "^3.973.6", + "@smithy/config-resolver": "^4.4.11", + "@smithy/core": "^3.23.11", + "@smithy/credential-provider-imds": "^4.2.12", + "@smithy/node-config-provider": "^4.3.12", + "@smithy/property-provider": "^4.2.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header": { + "version": "3.972.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.972.8.tgz", + "integrity": "sha512-wAr2REfKsqoKQ+OkNqvOShnBoh+nkPurDKW7uAeVSu6kUECnWlSJiPvnoqxGlfousEY/v9LfS9sNc46hjSYDIQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.973.6", + "@smithy/protocol-http": "^5.3.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.972.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.972.8.tgz", + "integrity": "sha512-CWl5UCM57WUFaFi5kB7IBY1UmOeLvNZAZ2/OZ5l20ldiJ3TiIz1pC65gYj8X0BCPWkeR1E32mpsCk1L1I4n+lA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.973.6", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.972.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.972.8.tgz", + "integrity": "sha512-BnnvYs2ZEpdlmZ2PNlV2ZyQ8j8AEkMTjN79y/YA475ER1ByFYrkVR85qmhni8oeTaJcDqbx364wDpitDAA/wCA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.973.6", + "@aws/lambda-invoke-store": "^0.2.2", + "@smithy/protocol-http": "^5.3.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.972.21", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.972.21.tgz", + "integrity": "sha512-62XRl1GDYPpkt7cx1AX1SPy9wgNE9Iw/NPuurJu4lmhCWS7sGKO+kS53TQ8eRmIxy3skmvNInnk0ZbWrU5Dpyg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/types": "^3.973.6", + "@aws-sdk/util-endpoints": "^3.996.5", + "@smithy/core": "^3.23.11", + "@smithy/protocol-http": "^5.3.12", + "@smithy/types": "^4.13.1", + "@smithy/util-retry": "^4.2.12", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/nested-clients": { + "version": "3.996.10", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.996.10.tgz", + "integrity": "sha512-SlDol5Z+C7Ivnc2rKGqiqfSUmUZzY1qHfVs9myt/nxVwswgfpjdKahyTzLTx802Zfq0NFRs7AejwKzzzl5Co2w==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/middleware-host-header": "^3.972.8", + "@aws-sdk/middleware-logger": "^3.972.8", + "@aws-sdk/middleware-recursion-detection": "^3.972.8", + "@aws-sdk/middleware-user-agent": "^3.972.21", + "@aws-sdk/region-config-resolver": "^3.972.8", + "@aws-sdk/types": "^3.973.6", + "@aws-sdk/util-endpoints": "^3.996.5", + "@aws-sdk/util-user-agent-browser": "^3.972.8", + "@aws-sdk/util-user-agent-node": "^3.973.7", + "@smithy/config-resolver": "^4.4.11", + "@smithy/core": "^3.23.11", + "@smithy/fetch-http-handler": "^5.3.15", + "@smithy/hash-node": "^4.2.12", + "@smithy/invalid-dependency": "^4.2.12", + "@smithy/middleware-content-length": "^4.2.12", + "@smithy/middleware-endpoint": "^4.4.25", + "@smithy/middleware-retry": "^4.4.42", + "@smithy/middleware-serde": "^4.2.14", + "@smithy/middleware-stack": "^4.2.12", + "@smithy/node-config-provider": "^4.3.12", + "@smithy/node-http-handler": "^4.4.16", + "@smithy/protocol-http": "^5.3.12", + "@smithy/smithy-client": "^4.12.5", + "@smithy/types": "^4.13.1", + "@smithy/url-parser": "^4.2.12", + "@smithy/util-base64": "^4.3.2", + "@smithy/util-body-length-browser": "^4.2.2", + "@smithy/util-body-length-node": "^4.2.3", + "@smithy/util-defaults-mode-browser": "^4.3.41", + "@smithy/util-defaults-mode-node": "^4.2.44", + "@smithy/util-endpoints": "^3.3.3", + "@smithy/util-middleware": "^4.2.12", + "@smithy/util-retry": "^4.2.12", + "@smithy/util-utf8": "^4.2.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/region-config-resolver": { + "version": "3.972.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.972.8.tgz", + "integrity": "sha512-1eD4uhTDeambO/PNIDVG19A6+v4NdD7xzwLHDutHsUqz0B+i661MwQB2eYO4/crcCvCiQG4SRm1k81k54FEIvw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.973.6", + "@smithy/config-resolver": "^4.4.11", + "@smithy/node-config-provider": "^4.3.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/token-providers": { + "version": "3.1009.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.1009.0.tgz", + "integrity": "sha512-KCPLuTqN9u0Rr38Arln78fRG9KXpzsPWmof+PZzfAHMMQq2QED6YjQrkrfiH7PDefLWEposY1o4/eGwrmKA4JA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "^3.973.20", + "@aws-sdk/nested-clients": "^3.996.10", + "@aws-sdk/types": "^3.973.6", + "@smithy/property-provider": "^4.2.12", + "@smithy/shared-ini-file-loader": "^4.4.7", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/types": { + "version": "3.973.6", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.973.6.tgz", + "integrity": "sha512-Atfcy4E++beKtwJHiDln2Nby8W/mam64opFPTiHEqgsthqeydFS1pY+OUlN1ouNOmf8ArPU/6cDS65anOP3KQw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/util-endpoints": { + "version": "3.996.5", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.5.tgz", + "integrity": "sha512-Uh93L5sXFNbyR5sEPMzUU8tJ++Ku97EY4udmC01nB8Zu+xfBPwpIwJ6F7snqQeq8h2pf+8SGN5/NoytfKgYPIw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.973.6", + "@smithy/types": "^4.13.1", + "@smithy/url-parser": "^4.2.12", + "@smithy/util-endpoints": "^3.3.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/util-locate-window": { + "version": "3.965.5", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.965.5.tgz", + "integrity": "sha512-WhlJNNINQB+9qtLtZJcpQdgZw3SCDCpXdUJP7cToGwHbCWCnRckGlc6Bx/OhWwIYFNAn+FIydY8SZ0QmVu3xTQ==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.972.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.972.8.tgz", + "integrity": "sha512-B3KGXJviV2u6Cdw2SDY2aDhoJkVfY/Q/Trwk2CMSkikE1Oi6gRzxhvhIfiRpHfmIsAhV4EA54TVEX8K6CbHbkA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.973.6", + "@smithy/types": "^4.13.1", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.973.7", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.973.7.tgz", + "integrity": "sha512-Hz6EZMUAEzqUd7e+vZ9LE7mn+5gMbxltXy18v+YSFY+9LBJz15wkNZvw5JqfX3z0FS9n3bgUtz3L5rAsfh4YlA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "^3.972.21", + "@aws-sdk/types": "^3.973.6", + "@smithy/node-config-provider": "^4.3.12", + "@smithy/types": "^4.13.1", + "@smithy/util-config-provider": "^4.2.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@aws-sdk/xml-builder": { + "version": "3.972.11", + "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.972.11.tgz", + "integrity": "sha512-iitV/gZKQMvY9d7ovmyFnFuTHbBAtrmLnvaSb/3X8vOKyevwtpmEtyc8AdhVWZe0pI/1GsHxlEvQeOePFzy7KQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1", + "fast-xml-parser": "5.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@aws/lambda-invoke-store": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@aws/lambda-invoke-store/-/lambda-invoke-store-0.2.4.tgz", + "integrity": "sha512-iY8yvjE0y651BixKNPgmv1WrQc+GZ142sb0z4gYnChDDY2YqI4P/jsSopBWrKfAt7LOJAkOXt7rC/hms+WclQQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@hono/node-server": { + "version": "1.19.11", + "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.11.tgz", + "integrity": "sha512-dr8/3zEaB+p0D2n/IUrlPF1HZm586qgJNXK1a9fhg/PzdtkK7Ksd5l312tJX2yBuALqDYBlG20QEbayqPyxn+g==", + "license": "MIT", + "engines": { + "node": ">=18.14.1" + }, + "peerDependencies": { + "hono": "^4" + } + }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.27.1", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.27.1.tgz", + "integrity": "sha512-sr6GbP+4edBwFndLbM60gf07z0FQ79gaExpnsjMGePXqFcSSb7t6iscpjk9DhFhwd+mTEQrzNafGP8/iGGFYaA==", + "license": "MIT", + "dependencies": { + "@hono/node-server": "^1.19.9", + "ajv": "^8.17.1", + "ajv-formats": "^3.0.1", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "eventsource-parser": "^3.0.0", + "express": "^5.2.1", + "express-rate-limit": "^8.2.1", + "hono": "^4.11.4", + "jose": "^6.1.3", + "json-schema-typed": "^8.0.2", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.25 || ^4.0", + "zod-to-json-schema": "^3.25.1" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@cfworker/json-schema": "^4.1.1", + "zod": "^3.25 || ^4.0" + }, + "peerDependenciesMeta": { + "@cfworker/json-schema": { + "optional": true + }, + "zod": { + "optional": false + } + } + }, + "node_modules/@smithy/abort-controller": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.12.tgz", + "integrity": "sha512-xolrFw6b+2iYGl6EcOL7IJY71vvyZ0DJ3mcKtpykqPe2uscwtzDZJa1uVQXyP7w9Dd+kGwYnPbMsJrGISKiY/Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/config-resolver": { + "version": "4.4.11", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.11.tgz", + "integrity": "sha512-YxFiiG4YDAtX7WMN7RuhHZLeTmRRAOyCbr+zB8e3AQzHPnUhS8zXjB1+cniPVQI3xbWsQPM0X2aaIkO/ME0ymw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.3.12", + "@smithy/types": "^4.13.1", + "@smithy/util-config-provider": "^4.2.2", + "@smithy/util-endpoints": "^3.3.3", + "@smithy/util-middleware": "^4.2.12", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/core": { + "version": "3.23.12", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.23.12.tgz", + "integrity": "sha512-o9VycsYNtgC+Dy3I0yrwCqv9CWicDnke0L7EVOrZtJpjb2t0EjaEofmMrYc0T1Kn3yk32zm6cspxF9u9Bj7e5w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.3.12", + "@smithy/types": "^4.13.1", + "@smithy/url-parser": "^4.2.12", + "@smithy/util-base64": "^4.3.2", + "@smithy/util-body-length-browser": "^4.2.2", + "@smithy/util-middleware": "^4.2.12", + "@smithy/util-stream": "^4.5.20", + "@smithy/util-utf8": "^4.2.2", + "@smithy/uuid": "^1.1.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.12.tgz", + "integrity": "sha512-cr2lR792vNZcYMriSIj+Um3x9KWrjcu98kn234xA6reOAFMmbRpQMOv8KPgEmLLtx3eldU6c5wALKFqNOhugmg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.3.12", + "@smithy/property-provider": "^4.2.12", + "@smithy/types": "^4.13.1", + "@smithy/url-parser": "^4.2.12", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/fetch-http-handler": { + "version": "5.3.15", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.15.tgz", + "integrity": "sha512-T4jFU5N/yiIfrtrsb9uOQn7RdELdM/7HbyLNr6uO/mpkj1ctiVs7CihVr51w4LyQlXWDpXFn4BElf1WmQvZu/A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.3.12", + "@smithy/querystring-builder": "^4.2.12", + "@smithy/types": "^4.13.1", + "@smithy/util-base64": "^4.3.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-node": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.12.tgz", + "integrity": "sha512-QhBYbGrbxTkZ43QoTPrK72DoYviDeg6YKDrHTMJbbC+A0sml3kSjzFtXP7BtbyJnXojLfTQldGdUR0RGD8dA3w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1", + "@smithy/util-buffer-from": "^4.2.2", + "@smithy/util-utf8": "^4.2.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/invalid-dependency": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.12.tgz", + "integrity": "sha512-/4F1zb7Z8LOu1PalTdESFHR0RbPwHd3FcaG1sI3UEIriQTWakysgJr65lc1jj6QY5ye7aFsisajotH6UhWfm/g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/is-array-buffer": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.2.2.tgz", + "integrity": "sha512-n6rQ4N8Jj4YTQO3YFrlgZuwKodf4zUFs7EJIWH86pSCWBaAtAGBFfCM7Wx6D2bBJ2xqFNxGBSrUWswT3M0VJow==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.12.tgz", + "integrity": "sha512-YE58Yz+cvFInWI/wOTrB+DbvUVz/pLn5mC5MvOV4fdRUc6qGwygyngcucRQjAhiCEbmfLOXX0gntSIcgMvAjmA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.3.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "4.4.26", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.26.tgz", + "integrity": "sha512-8Qfikvd2GVKSm8S6IbjfwFlRY9VlMrj0Dp4vTwAuhqbX7NhJKE5DQc2bnfJIcY0B+2YKMDBWfvexbSZeejDgeg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.23.12", + "@smithy/middleware-serde": "^4.2.15", + "@smithy/node-config-provider": "^4.3.12", + "@smithy/shared-ini-file-loader": "^4.4.7", + "@smithy/types": "^4.13.1", + "@smithy/url-parser": "^4.2.12", + "@smithy/util-middleware": "^4.2.12", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "4.4.43", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.43.tgz", + "integrity": "sha512-ZwsifBdyuNHrFGmbc7bAfP2b54+kt9J2rhFd18ilQGAB+GDiP4SrawqyExbB7v455QVR7Psyhb2kjULvBPIhvA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.3.12", + "@smithy/protocol-http": "^5.3.12", + "@smithy/service-error-classification": "^4.2.12", + "@smithy/smithy-client": "^4.12.6", + "@smithy/types": "^4.13.1", + "@smithy/util-middleware": "^4.2.12", + "@smithy/util-retry": "^4.2.12", + "@smithy/uuid": "^1.1.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "4.2.15", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.15.tgz", + "integrity": "sha512-ExYhcltZSli0pgAKOpQQe1DLFBLryeZ22605y/YS+mQpdNWekum9Ujb/jMKfJKgjtz1AZldtwA/wCYuKJgjjlg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.23.12", + "@smithy/protocol-http": "^5.3.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.12.tgz", + "integrity": "sha512-kruC5gRHwsCOuyCd4ouQxYjgRAym2uDlCvQ5acuMtRrcdfg7mFBg6blaxcJ09STpt3ziEkis6bhg1uwrWU7txw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "4.3.12", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.12.tgz", + "integrity": "sha512-tr2oKX2xMcO+rBOjobSwVAkV05SIfUKz8iI53rzxEmgW3GOOPOv0UioSDk+J8OpRQnpnhsO3Af6IEBabQBVmiw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.2.12", + "@smithy/shared-ini-file-loader": "^4.4.7", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.5.0.tgz", + "integrity": "sha512-Rnq9vQWiR1+/I6NZZMNzJHV6pZYyEHt2ZnuV3MG8z2NNenC4i/8Kzttz7CjZiHSmsN5frhXhg17z3Zqjjhmz1A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.2.12", + "@smithy/protocol-http": "^5.3.12", + "@smithy/querystring-builder": "^4.2.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.12.tgz", + "integrity": "sha512-jqve46eYU1v7pZ5BM+fmkbq3DerkSluPr5EhvOcHxygxzD05ByDRppRwRPPpFrsFo5yDtCYLKu+kreHKVrvc7A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/protocol-http": { + "version": "5.3.12", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.12.tgz", + "integrity": "sha512-fit0GZK9I1xoRlR4jXmbLhoN0OdEpa96ul8M65XdmXnxXkuMxM0Y8HDT0Fh0Xb4I85MBvBClOzgSrV1X2s1Hxw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-builder": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.12.tgz", + "integrity": "sha512-6wTZjGABQufekycfDGMEB84BgtdOE/rCVTov+EDXQ8NHKTUNIp/j27IliwP7tjIU9LR+sSzyGBOXjeEtVgzCHg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1", + "@smithy/util-uri-escape": "^4.2.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-parser": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.12.tgz", + "integrity": "sha512-P2OdvrgiAKpkPNKlKUtWbNZKB1XjPxM086NeVhK+W+wI46pIKdWBe5QyXvhUm3MEcyS/rkLvY8rZzyUdmyDZBw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/service-error-classification": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.12.tgz", + "integrity": "sha512-LlP29oSQN0Tw0b6D0Xo6BIikBswuIiGYbRACy5ujw/JgWSzTdYj46U83ssf6Ux0GyNJVivs2uReU8pt7Eu9okQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader": { + "version": "4.4.7", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.7.tgz", + "integrity": "sha512-HrOKWsUb+otTeo1HxVWeEb99t5ER1XrBi/xka2Wv6NVmTbuCUC1dvlrksdvxFtODLBjsC+PHK+fuy2x/7Ynyiw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/signature-v4": { + "version": "5.3.12", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.12.tgz", + "integrity": "sha512-B/FBwO3MVOL00DaRSXfXfa/TRXRheagt/q5A2NM13u7q+sHS59EOVGQNfG7DkmVtdQm5m3vOosoKAXSqn/OEgw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.2.2", + "@smithy/protocol-http": "^5.3.12", + "@smithy/types": "^4.13.1", + "@smithy/util-hex-encoding": "^4.2.2", + "@smithy/util-middleware": "^4.2.12", + "@smithy/util-uri-escape": "^4.2.2", + "@smithy/util-utf8": "^4.2.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/smithy-client": { + "version": "4.12.6", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.12.6.tgz", + "integrity": "sha512-aib3f0jiMsJ6+cvDnXipBsGDL7ztknYSVqJs1FdN9P+u9tr/VzOR7iygSh6EUOdaBeMCMSh3N0VdyYsG4o91DQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.23.12", + "@smithy/middleware-endpoint": "^4.4.26", + "@smithy/middleware-stack": "^4.2.12", + "@smithy/protocol-http": "^5.3.12", + "@smithy/types": "^4.13.1", + "@smithy/util-stream": "^4.5.20", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/types": { + "version": "4.13.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.13.1.tgz", + "integrity": "sha512-787F3yzE2UiJIQ+wYW1CVg2odHjmaWLGksnKQHUrK/lYZSEcy1msuLVvxaR/sI2/aDe9U+TBuLsXnr3vod1g0g==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/url-parser": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.12.tgz", + "integrity": "sha512-wOPKPEpso+doCZGIlr+e1lVI6+9VAKfL4kZWFgzVgGWY2hZxshNKod4l2LXS3PRC9otH/JRSjtEHqQ/7eLciRA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.2.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-base64": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.3.2.tgz", + "integrity": "sha512-XRH6b0H/5A3SgblmMa5ErXQ2XKhfbQB+Fm/oyLZ2O2kCUrwgg55bU0RekmzAhuwOjA9qdN5VU2BprOvGGUkOOQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.2.2", + "@smithy/util-utf8": "^4.2.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-browser": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.2.2.tgz", + "integrity": "sha512-JKCrLNOup3OOgmzeaKQwi4ZCTWlYR5H4Gm1r2uTMVBXoemo1UEghk5vtMi1xSu2ymgKVGW631e2fp9/R610ZjQ==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-node": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.2.3.tgz", + "integrity": "sha512-ZkJGvqBzMHVHE7r/hcuCxlTY8pQr1kMtdsVPs7ex4mMU+EAbcXppfo5NmyxMYi2XU49eqaz56j2gsk4dHHPG/g==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-buffer-from": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.2.2.tgz", + "integrity": "sha512-FDXD7cvUoFWwN6vtQfEta540Y/YBe5JneK3SoZg9bThSoOAC/eGeYEua6RkBgKjGa/sz6Y+DuBZj3+YEY21y4Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.2.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-config-provider": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.2.2.tgz", + "integrity": "sha512-dWU03V3XUprJwaUIFVv4iOnS1FC9HnMHDfUrlNDSh4315v0cWyaIErP8KiqGVbf5z+JupoVpNM7ZB3jFiTejvQ==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-browser": { + "version": "4.3.42", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.42.tgz", + "integrity": "sha512-0vjwmcvkWAUtikXnWIUOyV6IFHTEeQUYh3JUZcDgcszF+hD/StAsQ3rCZNZEPHgI9kVNcbnyc8P2CBHnwgmcwg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.2.12", + "@smithy/smithy-client": "^4.12.6", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-node": { + "version": "4.2.45", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.45.tgz", + "integrity": "sha512-q5dOqqfTgUcLe38TAGiFn9srToKj2YCHJ34QGOLzM+xYLLA+qRZv7N+33kl1MERVusue36ZHnlNaNEvY/PzSrw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.4.11", + "@smithy/credential-provider-imds": "^4.2.12", + "@smithy/node-config-provider": "^4.3.12", + "@smithy/property-provider": "^4.2.12", + "@smithy/smithy-client": "^4.12.6", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-endpoints": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.3.3.tgz", + "integrity": "sha512-VACQVe50j0HZPjpwWcjyT51KUQ4AnsvEaQ2lKHOSL4mNLD0G9BjEniQ+yCt1qqfKfiAHRAts26ud7hBjamrwig==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.3.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-hex-encoding": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.2.2.tgz", + "integrity": "sha512-Qcz3W5vuHK4sLQdyT93k/rfrUwdJ8/HZ+nMUOyGdpeGA1Wxt65zYwi3oEl9kOM+RswvYq90fzkNDahPS8K0OIg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-middleware": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.12.tgz", + "integrity": "sha512-Er805uFUOvgc0l8nv0e0su0VFISoxhJ/AwOn3gL2NWNY2LUEldP5WtVcRYSQBcjg0y9NfG8JYrCJaYDpupBHJQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-retry": { + "version": "4.2.12", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.12.tgz", + "integrity": "sha512-1zopLDUEOwumjcHdJ1mwBHddubYF8GMQvstVCLC54Y46rqoHwlIU+8ZzUeaBcD+WCJHyDGSeZ2ml9YSe9aqcoQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.2.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-stream": { + "version": "4.5.20", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.20.tgz", + "integrity": "sha512-4yXLm5n/B5SRBR2p8cZ90Sbv4zL4NKsgxdzCzp/83cXw2KxLEumt5p+GAVyRNZgQOSrzXn9ARpO0lUe8XSlSDw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.3.15", + "@smithy/node-http-handler": "^4.5.0", + "@smithy/types": "^4.13.1", + "@smithy/util-base64": "^4.3.2", + "@smithy/util-buffer-from": "^4.2.2", + "@smithy/util-hex-encoding": "^4.2.2", + "@smithy/util-utf8": "^4.2.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-uri-escape": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.2.2.tgz", + "integrity": "sha512-2kAStBlvq+lTXHyAZYfJRb/DfS3rsinLiwb+69SstC9Vb0s9vNWkRwpnj918Pfi85mzi42sOqdV72OLxWAISnw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-utf8": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.2.2.tgz", + "integrity": "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.2.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-waiter": { + "version": "4.2.13", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.13.tgz", + "integrity": "sha512-2zdZ9DTHngRtcYxJK1GUDxruNr53kv5W2Lupe0LMU+Imr6ohQg8M2T14MNkj1Y0wS3FFwpgpGQyvuaMF7CiTmQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.2.12", + "@smithy/types": "^4.13.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/uuid": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@smithy/uuid/-/uuid-1.1.2.tgz", + "integrity": "sha512-O/IEdcCUKkubz60tFbGA7ceITTAJsty+lBjNoorP4Z6XRqaFb/OjQjZODophEcuq68nKm6/0r+6/lLQ+XVpk8g==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ajv": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.18.0.tgz", + "integrity": "sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz", + "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==", + "license": "MIT", + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/body-parser": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.2.tgz", + "integrity": "sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.3", + "http-errors": "^2.0.0", + "iconv-lite": "^0.7.0", + "on-finished": "^2.4.1", + "qs": "^6.14.1", + "raw-body": "^3.0.1", + "type-is": "^2.0.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/bowser": { + "version": "2.14.1", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.14.1.tgz", + "integrity": "sha512-tzPjzCxygAKWFOJP011oxFHs57HzIhOEracIgAePE4pqB3LikALKnSzUyU4MGs9/iCEUuHlAJTjTc5M+u7YEGg==", + "license": "MIT" + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/content-disposition": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz", + "integrity": "sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/cors": { + "version": "2.8.6", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.6.tgz", + "integrity": "sha512-tJtZBBHA6vjIAaF6EnIaq6laBBP9aq/Y3ouVJjEfoHbRBcHBAHYcMh/w8LDrk2PvIMMq8gmopa5D4V8RmbrxGw==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventsource": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", + "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", + "license": "MIT", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.6.tgz", + "integrity": "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/express": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/express/-/express-5.2.1.tgz", + "integrity": "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==", + "license": "MIT", + "peer": true, + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.1", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "depd": "^2.0.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "8.3.1", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-8.3.1.tgz", + "integrity": "sha512-D1dKN+cmyPWuvB+G2SREQDzPY1agpBIcTa9sJxOPMCNeH3gwzhqJRDWCXW3gg0y//+LQ/8j52JbMROWyrKdMdw==", + "license": "MIT", + "dependencies": { + "ip-address": "10.1.0" + }, + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": ">= 4.11" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/fast-xml-builder": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/fast-xml-builder/-/fast-xml-builder-1.1.4.tgz", + "integrity": "sha512-f2jhpN4Eccy0/Uz9csxh3Nu6q4ErKxf0XIsasomfOihuSUa3/xw6w8dnOtCDgEItQFJG8KyXPzQXzcODDrrbOg==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT", + "dependencies": { + "path-expression-matcher": "^1.1.3" + } + }, + "node_modules/fast-xml-parser": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.4.1.tgz", + "integrity": "sha512-BQ30U1mKkvXQXXkAGcuyUA/GA26oEB7NzOtsxCDtyu62sjGw5QraKFhx2Em3WQNjPw9PG6MQ9yuIIgkSDfGu5A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT", + "dependencies": { + "fast-xml-builder": "^1.0.0", + "strnum": "^2.1.2" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/finalhandler": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.1.tgz", + "integrity": "sha512-S8KoZgRZN+a5rNwqTxlZZePjT/4cnm0ROV70LedRHZ0p8u9fRID0hJUZQpkKLzro8LfmC8sx23bY6tVNxv8pQA==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hono": { + "version": "4.12.8", + "resolved": "https://registry.npmjs.org/hono/-/hono-4.12.8.tgz", + "integrity": "sha512-VJCEvtrezO1IAR+kqEYnxUOoStaQPGrCmX3j4wDTNOcD1uRPFpGlwQUIW8niPuvHXaTUxeOUl5MMDGrl+tmO9A==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=16.9.0" + } + }, + "node_modules/http-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", + "license": "MIT", + "dependencies": { + "depd": "~2.0.0", + "inherits": "~2.0.4", + "setprototypeof": "~1.2.0", + "statuses": "~2.0.2", + "toidentifier": "~1.0.1" + }, + "engines": { + "node": ">= 0.8" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/iconv-lite": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.2.tgz", + "integrity": "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ip-address": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz", + "integrity": "sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==", + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/jose": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/jose/-/jose-6.2.1.tgz", + "integrity": "sha512-jUaKr1yrbfaImV7R2TN/b3IcZzsw38/chqMpo2XJ7i2F8AfM/lA4G1goC3JVEwg0H7UldTmSt3P68nt31W7/mw==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/panva" + } + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "license": "MIT" + }, + "node_modules/json-schema-typed": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/json-schema-typed/-/json-schema-typed-8.0.2.tgz", + "integrity": "sha512-fQhoXdcvc3V28x7C7BMs4P5+kNlgUURe2jmUT1T//oBRMDrqy1QPelJimwZGo7Hg9VPV3EQV5Bnq4hbFy2vetA==", + "license": "BSD-2-Clause" + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz", + "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-expression-matcher": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/path-expression-matcher/-/path-expression-matcher-1.1.3.tgz", + "integrity": "sha512-qdVgY8KXmVdJZRSS1JdEPOKPdTiEK/pi0RkcT2sw1RhXxohdujUlJFPuS1TSkevZ9vzd3ZlL7ULl1MHGTApKzQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-to-regexp": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.3.0.tgz", + "integrity": "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/pkce-challenge": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.1.tgz", + "integrity": "sha512-wQ0b/W4Fr01qtpHlqSqspcj3EhBvimsdh0KlHhH8HRZnMsEa0ea2fTULOXOS9ccQr3om+GcGRk4e+isrZWV8qQ==", + "license": "MIT", + "engines": { + "node": ">=16.20.0" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/qs": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.15.0.tgz", + "integrity": "sha512-mAZTtNCeetKMH+pSjrb76NAM8V9a05I9aBZOHztWy/UqcJdQYNsf59vrRKWnojAT9Y+GbIvoTBC++CPHqpDBhQ==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.2.tgz", + "integrity": "sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==", + "license": "MIT", + "dependencies": { + "bytes": "~3.1.2", + "http-errors": "~2.0.1", + "iconv-lite": "~0.7.0", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/send": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.1.tgz", + "integrity": "sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.3", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.1", + "mime-types": "^3.0.2", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/serve-static": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.1.tgz", + "integrity": "sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/strnum": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-2.2.0.tgz", + "integrity": "sha512-Y7Bj8XyJxnPAORMZj/xltsfo55uOiyHcU2tnAVzHUnSJR/KsEX+9RoDeXEnsXtl/CX4fAcrt64gZ13aGaWPeBg==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT" + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, + "node_modules/zod": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz", + "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==", + "license": "MIT", + "peer": true, + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.25.1", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.25.1.tgz", + "integrity": "sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.25 || ^4" + } + } + } +} diff --git a/servers/hyperpod-cluster-picker/package.json b/servers/hyperpod-cluster-picker/package.json new file mode 100644 index 0000000..f92dedf --- /dev/null +++ b/servers/hyperpod-cluster-picker/package.json @@ -0,0 +1,16 @@ +{ + "name": "@ml-container-creator/hyperpod-cluster-picker", + "version": "1.0.0", + "description": "MCP server that discovers available SageMaker HyperPod EKS clusters for ML Container Creator deployment target selection.", + "type": "module", + "main": "index.js", + "license": "Apache-2.0", + "scripts": { + "test": "node test.js" + }, + "dependencies": { + "@aws-sdk/client-sagemaker": "^3.700.0", + "@aws-sdk/credential-providers": "^3.700.0", + "@modelcontextprotocol/sdk": "^1.0.0" + } +} diff --git a/servers/hyperpod-cluster-picker/test.js b/servers/hyperpod-cluster-picker/test.js new file mode 100644 index 0000000..0235159 --- /dev/null +++ b/servers/hyperpod-cluster-picker/test.js @@ -0,0 +1,104 @@ +#!/usr/bin/env node +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Standalone tests for the hyperpod-cluster-picker MCP server. + * Uses node:assert only โ€” no external test framework. + * Run: node servers/hyperpod-cluster-picker/test.js + */ + +import assert from 'node:assert' +import { buildResponse } from './index.js' + +let passed = 0 +let failed = 0 + +function test(name, fn) { + try { + fn() + passed++ + console.log(` โœ“ ${name}`) + } catch (err) { + failed++ + console.error(` โœ— ${name}`) + console.error(` ${err.message}`) + } +} + +console.log('\nhyperpod-cluster-picker: buildResponse\n') + +// --- Empty clusters returns empty choices with message --- +test('empty clusters returns empty choices with message', () => { + const result = buildResponse([]) + assert.deepStrictEqual(result.choices.hyperPodCluster, []) + assert.deepStrictEqual(result.values, {}) + assert.ok(result.message, 'should include a descriptive message') + assert.ok(result.message.includes('No InService'), 'message should mention no clusters found') +}) + +test('null clusters returns empty choices with message', () => { + const result = buildResponse(null) + assert.deepStrictEqual(result.choices.hyperPodCluster, []) + assert.deepStrictEqual(result.values, {}) + assert.ok(result.message) +}) + +// --- Single cluster --- +test('single cluster returns correct values and choices', () => { + const clusters = [{ + clusterName: 'my-cluster', + clusterArn: 'arn:aws:sagemaker:us-east-1:123456789012:cluster/my-cluster', + status: 'InService', + instanceGroups: [{ name: 'gpu-workers', instanceType: 'ml.p4d.24xlarge', count: 4 }] + }] + const result = buildResponse(clusters) + assert.strictEqual(result.values.hyperPodCluster, 'my-cluster') + assert.deepStrictEqual(result.choices.hyperPodCluster, ['my-cluster']) + assert.ok(result.metadata['my-cluster']) + assert.strictEqual(result.metadata['my-cluster'].clusterArn, clusters[0].clusterArn) +}) + +// --- Multiple clusters --- +test('multiple clusters: first is default value', () => { + const clusters = [ + { clusterName: 'cluster-a', clusterArn: 'arn:a', status: 'InService', instanceGroups: [] }, + { clusterName: 'cluster-b', clusterArn: 'arn:b', status: 'InService', instanceGroups: [] }, + { clusterName: 'cluster-c', clusterArn: 'arn:c', status: 'InService', instanceGroups: [] } + ] + const result = buildResponse(clusters) + assert.strictEqual(result.values.hyperPodCluster, 'cluster-a') + assert.strictEqual(result.choices.hyperPodCluster.length, 3) + assert.deepStrictEqual(result.choices.hyperPodCluster, ['cluster-a', 'cluster-b', 'cluster-c']) +}) + +// --- Metadata includes instance groups --- +test('metadata includes instance group details', () => { + const clusters = [{ + clusterName: 'gpu-cluster', + clusterArn: 'arn:aws:sagemaker:us-west-2:123456789012:cluster/gpu-cluster', + status: 'InService', + instanceGroups: [ + { name: 'workers', instanceType: 'ml.g5.48xlarge', count: 8 }, + { name: 'controllers', instanceType: 'ml.m5.xlarge', count: 1 } + ] + }] + const result = buildResponse(clusters) + const meta = result.metadata['gpu-cluster'] + assert.strictEqual(meta.instanceGroups.length, 2) + assert.strictEqual(meta.instanceGroups[0].instanceType, 'ml.g5.48xlarge') + assert.strictEqual(meta.instanceGroups[1].count, 1) +}) + +// --- No message field when clusters are found --- +test('no message field when clusters are found', () => { + const clusters = [ + { clusterName: 'c1', clusterArn: 'arn:c1', status: 'InService', instanceGroups: [] } + ] + const result = buildResponse(clusters) + assert.strictEqual(result.message, undefined) +}) + +// --- Summary --- +console.log(`\n ${passed} passing, ${failed} failing\n`) +process.exit(failed > 0 ? 1 : 0) diff --git a/test/generator.test.js b/test/generator.test.js index a64a275..d2a9453 100644 --- a/test/generator.test.js +++ b/test/generator.test.js @@ -24,7 +24,7 @@ describe('generator-ml-container-creator:app', () => { includeSampleModel: true, includeTesting: true, testTypes: ['local-model-cli', 'local-model-server'], - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', awsRoleArn: '' @@ -105,7 +105,7 @@ describe('generator-ml-container-creator:app', () => { includeSampleModel: false, includeTesting: true, testTypes: ['hosted-model-endpoint'], - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.g5.xlarge', awsRegion: 'us-east-1', awsRoleArn: '' @@ -175,7 +175,7 @@ describe('generator-ml-container-creator:app', () => { modelFormat: 'json', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', awsRoleArn: '' @@ -236,7 +236,7 @@ describe('generator-ml-container-creator:app', () => { modelFormat: 'pkl', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', awsRoleArn: '' diff --git a/test/input-parsing-and-generation/backward-compatibility-managed-inference.property.test.js b/test/input-parsing-and-generation/backward-compatibility-managed-inference.property.test.js new file mode 100644 index 0000000..acef11e --- /dev/null +++ b/test/input-parsing-and-generation/backward-compatibility-managed-inference.property.test.js @@ -0,0 +1,452 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Property 14: Backward Compatibility for Managed Inference + * + * For any valid managed-inference configuration, the generated project + * file structure, do/deploy SageMaker endpoint logic, do/clean endpoint + * cleanup logic, do/logs CloudWatch tailing logic, and do/test endpoint + * testing logic must be functionally equivalent to the current generator + * output (with deployTarget renamed to buildTarget). + * + * Validates: Requirements 11.1, 11.2, 11.3, 11.4, 11.5 + * + * Feature: sagemaker-hyperpod-deployment + */ + +import fc from 'fast-check'; +import { describe, it, before } from 'mocha'; +import assert from 'assert'; +import ejs from 'ejs'; +import { readFileSync } from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; +import TemplateManager from '../../generators/app/lib/template-manager.js'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Load all do-framework templates +const templatesDir = path.join(__dirname, '../../generators/app/templates/do'); + +const configTemplate = readFileSync(path.join(templatesDir, 'config'), 'utf8'); +const deployTemplate = readFileSync(path.join(templatesDir, 'deploy'), 'utf8'); +const cleanTemplate = readFileSync(path.join(templatesDir, 'clean'), 'utf8'); +const logsTemplate = readFileSync(path.join(templatesDir, 'logs'), 'utf8'); +const testTemplate = readFileSync(path.join(templatesDir, 'test'), 'utf8'); + +/** + * Render a template with the given variables. + */ +function renderTemplate(template, vars) { + return ejs.render(template, vars); +} + +/** Arbitrary for a managed-inference configuration */ +const managedInferenceConfigArb = fc.record({ + projectName: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + deploymentConfig: fc.constantFrom( + 'sklearn-flask', 'sklearn-fastapi', + 'xgboost-flask', 'xgboost-fastapi', + 'tensorflow-flask', 'tensorflow-fastapi', + 'transformers-vllm', 'transformers-sglang' + ), + framework: fc.constantFrom('sklearn', 'xgboost', 'tensorflow', 'transformers'), + modelServer: fc.constantFrom('flask', 'fastapi', 'vllm', 'sglang'), + awsRegion: fc.constantFrom('us-east-1', 'us-west-2', 'eu-west-1'), + buildTarget: fc.constant('codebuild'), + deploymentTarget: fc.constant('managed-inference'), + instanceType: fc.constantFrom('ml.m5.xlarge', 'ml.g5.xlarge', 'ml.p4d.24xlarge'), + modelName: fc.constantFrom('meta-llama/Llama-2-7b-hf', 'openai/gpt-oss-20b'), + codebuildComputeType: fc.constantFrom('BUILD_GENERAL1_SMALL', 'BUILD_GENERAL1_MEDIUM', 'BUILD_GENERAL1_LARGE'), + roleArn: fc.constantFrom('arn:aws:iam::123456789012:role/SageMakerRole', undefined), + hfToken: fc.constantFrom('hf_test123', undefined), + ngcApiKey: fc.constantFrom(undefined), + inferenceAmiVersion: fc.constantFrom('1.0.0', undefined), + modelFormat: fc.constantFrom('pkl', 'json', 'keras', undefined) +}); + +describe('Property 14: Backward Compatibility for Managed Inference', () => { + before(() => { + console.log('\n๐Ÿ”„ Starting Backward Compatibility for Managed Inference Property Tests'); + console.log('๐Ÿ“‹ Testing: Requirements 11.1, 11.2, 11.3, 11.4, 11.5'); + console.log('๐Ÿ”ง Configuration: EJS template rendering with fast-check\n'); + }); + + it('should produce do/config with BUILD_TARGET and no hyperpod directory references (Req 11.1)', function () { + /** + * **Validates: Requirements 11.1** + * + * When deploymentTarget === 'managed-inference', the do/config must contain + * BUILD_TARGET (renamed from DEPLOY_TARGET) and DEPLOYMENT_TARGET variables, + * and must NOT contain HyperPod-specific variables. + */ + this.timeout(30000); + + console.log(' ๐Ÿงช Req 11.1: do/config uses BUILD_TARGET and has no HyperPod vars'); + + fc.assert(fc.property( + managedInferenceConfigArb, + (config) => { + const output = renderTemplate(configTemplate, config); + + // Must contain BUILD_TARGET (renamed from DEPLOY_TARGET) + assert.ok( + output.includes('BUILD_TARGET'), + 'do/config must contain BUILD_TARGET variable' + ); + + // Must contain DEPLOYMENT_TARGET + assert.ok( + output.includes('DEPLOYMENT_TARGET'), + 'do/config must contain DEPLOYMENT_TARGET variable' + ); + + // Must contain INSTANCE_TYPE for managed-inference + assert.ok( + output.includes('INSTANCE_TYPE'), + 'managed-inference do/config must contain INSTANCE_TYPE' + ); + + // Must NOT contain HyperPod-specific variables + assert.ok( + !output.includes('HYPERPOD_CLUSTER_NAME'), + 'managed-inference do/config must NOT contain HYPERPOD_CLUSTER_NAME' + ); + assert.ok( + !output.includes('HYPERPOD_NAMESPACE'), + 'managed-inference do/config must NOT contain HYPERPOD_NAMESPACE' + ); + assert.ok( + !output.includes('HYPERPOD_REPLICAS'), + 'managed-inference do/config must NOT contain HYPERPOD_REPLICAS' + ); + } + ), { numRuns: 30 }); + + console.log(' โœ… do/config backward compatible with BUILD_TARGET rename'); + }); + + it('should produce do/deploy with SageMaker inference component logic (Req 11.2)', function () { + /** + * **Validates: Requirements 11.2** + * + * When deploymentTarget === 'managed-inference', the do/deploy script must + * contain SageMaker inference component logic: create-endpoint, + * create-inference-component, and wait inference-component-in-service. + */ + this.timeout(30000); + + console.log(' ๐Ÿงช Req 11.2: do/deploy contains SageMaker inference component logic'); + + fc.assert(fc.property( + managedInferenceConfigArb, + (config) => { + const vars = { + ...config, + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }; + + const output = renderTemplate(deployTemplate, vars); + + // Must contain SageMaker inference component commands + assert.ok( + output.includes('sagemaker create-endpoint-config'), + 'managed-inference do/deploy must contain create-endpoint-config' + ); + assert.ok( + output.includes('sagemaker create-endpoint'), + 'managed-inference do/deploy must contain create-endpoint' + ); + assert.ok( + output.includes('sagemaker create-inference-component'), + 'managed-inference do/deploy must contain create-inference-component' + ); + assert.ok( + output.includes('sagemaker wait inference-component-in-service'), + 'managed-inference do/deploy must contain wait inference-component-in-service' + ); + + // Must contain ROLE_ARN validation + assert.ok( + output.includes('ROLE_ARN'), + 'managed-inference do/deploy must validate ROLE_ARN' + ); + + // Must NOT contain kubectl commands + assert.ok( + !output.includes('kubectl'), + 'managed-inference do/deploy must NOT contain kubectl commands' + ); + assert.ok( + !output.includes('describe-cluster'), + 'managed-inference do/deploy must NOT contain describe-cluster' + ); + } + ), { numRuns: 30 }); + + console.log(' โœ… do/deploy contains SageMaker inference component logic'); + }); + + it('should produce do/clean with SageMaker cleanup logic (Req 11.3)', function () { + /** + * **Validates: Requirements 11.3** + * + * When deploymentTarget === 'managed-inference', the do/clean script must + * contain endpoint cleanup logic (delete-inference-component, delete-endpoint) + * and must NOT contain HyperPod cleanup logic. + */ + this.timeout(30000); + + console.log(' ๐Ÿงช Req 11.3: do/clean contains SageMaker cleanup logic'); + + fc.assert(fc.property( + managedInferenceConfigArb, + (config) => { + const vars = { + ...config, + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }; + + const output = renderTemplate(cleanTemplate, vars); + + // Must contain endpoint cleanup target + assert.ok( + output.includes('clean_endpoint'), + 'managed-inference do/clean must contain clean_endpoint function' + ); + assert.ok( + output.includes('endpoint)'), + 'managed-inference do/clean must support endpoint cleanup target' + ); + + // Must contain SageMaker delete commands + assert.ok( + output.includes('delete-endpoint'), + 'managed-inference do/clean must contain delete-endpoint' + ); + + // Must NOT contain HyperPod cleanup + assert.ok( + !output.includes('clean_hyperpod'), + 'managed-inference do/clean must NOT contain clean_hyperpod' + ); + assert.ok( + !output.includes('kubectl delete'), + 'managed-inference do/clean must NOT contain kubectl delete' + ); + + // Must still contain shared cleanup targets + assert.ok( + output.includes('clean_local'), + 'managed-inference do/clean must contain clean_local' + ); + assert.ok( + output.includes('clean_ecr'), + 'managed-inference do/clean must contain clean_ecr' + ); + assert.ok( + output.includes('clean_codebuild'), + 'managed-inference do/clean must contain clean_codebuild' + ); + } + ), { numRuns: 30 }); + + console.log(' โœ… do/clean contains SageMaker cleanup logic'); + }); + + it('should produce do/logs with CloudWatch log tailing logic (Req 11.4)', function () { + /** + * **Validates: Requirements 11.4** + * + * When deploymentTarget === 'managed-inference', the do/logs script must + * contain CloudWatch log tailing logic (aws logs tail) and must NOT + * contain kubectl logs commands. + */ + this.timeout(30000); + + console.log(' ๐Ÿงช Req 11.4: do/logs contains CloudWatch log tailing logic'); + + fc.assert(fc.property( + managedInferenceConfigArb, + (config) => { + const vars = { + ...config, + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }; + + const output = renderTemplate(logsTemplate, vars); + + // Must contain CloudWatch log tailing + assert.ok( + output.includes('aws logs tail'), + 'managed-inference do/logs must contain aws logs tail' + ); + assert.ok( + output.includes('/aws/sagemaker/Endpoints/'), + 'managed-inference do/logs must reference SageMaker Endpoints log group' + ); + assert.ok( + output.includes('--follow'), + 'managed-inference do/logs must use --follow for tailing' + ); + + // Must NOT contain kubectl logs + assert.ok( + !output.includes('kubectl logs'), + 'managed-inference do/logs must NOT contain kubectl logs' + ); + assert.ok( + !output.includes('describe-cluster'), + 'managed-inference do/logs must NOT contain describe-cluster' + ); + } + ), { numRuns: 30 }); + + console.log(' โœ… do/logs contains CloudWatch log tailing logic'); + }); + + it('should produce do/test with local and SageMaker endpoint testing logic (Req 11.5)', function () { + /** + * **Validates: Requirements 11.5** + * + * When deploymentTarget === 'managed-inference', the do/test script must + * contain both local container testing (curl to localhost:8080) and + * SageMaker endpoint testing (aws sagemaker-runtime invoke-endpoint). + */ + this.timeout(30000); + + console.log(' ๐Ÿงช Req 11.5: do/test contains local and SageMaker endpoint testing'); + + fc.assert(fc.property( + managedInferenceConfigArb, + (config) => { + const vars = { + ...config, + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }; + + const output = renderTemplate(testTemplate, vars); + + // Must contain local testing (curl to localhost:8080) + assert.ok( + output.includes('localhost:8080'), + 'managed-inference do/test must contain localhost:8080 for local testing' + ); + + // Must contain SageMaker endpoint testing + assert.ok( + output.includes('sagemaker-runtime invoke-endpoint') || + output.includes('sagemaker describe-endpoint'), + 'managed-inference do/test must contain SageMaker endpoint testing' + ); + + // Must contain health check (/ping) + assert.ok( + output.includes('/ping'), + 'managed-inference do/test must contain /ping health check' + ); + + // Must contain inference test (/invocations) + assert.ok( + output.includes('/invocations'), + 'managed-inference do/test must contain /invocations inference test' + ); + + // Must NOT contain kubectl port-forward + assert.ok( + !output.includes('kubectl port-forward'), + 'managed-inference do/test must NOT contain kubectl port-forward' + ); + assert.ok( + !output.includes('describe-cluster'), + 'managed-inference do/test must NOT contain describe-cluster' + ); + } + ), { numRuns: 30 }); + + console.log(' โœ… do/test contains local and SageMaker endpoint testing logic'); + }); + + it('should default deploymentTarget to managed-inference when not explicitly set (Req 11.6)', function () { + /** + * **Validates: Requirements 11.1, 11.2, 11.3, 11.4, 11.5** + * + * The generator defaults deploymentTarget to 'managed-inference' when + * not explicitly set, ensuring backward compatibility. + */ + this.timeout(30000); + + console.log(' ๐Ÿงช Req 11.6: deploymentTarget defaults to managed-inference'); + + fc.assert(fc.property( + fc.record({ + framework: fc.constantFrom('sklearn', 'xgboost', 'tensorflow', 'transformers'), + modelServer: fc.constantFrom('flask', 'fastapi', 'vllm', 'sglang'), + awsRegion: fc.constantFrom('us-east-1', 'us-west-2') + }), + (config) => { + // TemplateManager with no deploymentTarget set should still validate + const answers = { + ...config, + buildTarget: 'codebuild', + instanceType: 'ml.m5.large' + // deploymentTarget intentionally NOT set + }; + + const manager = new TemplateManager(answers); + // Should not throw - buildTarget validation should pass + assert.doesNotThrow(() => manager.validate()); + } + ), { numRuns: 20 }); + + console.log(' โœ… deploymentTarget defaults to managed-inference'); + }); + + it('should accept old deployTarget for backward compatibility in TemplateManager', function () { + /** + * **Validates: Requirements 11.1** + * + * The TemplateManager should accept the old deployTarget field name + * for backward compatibility, validating it against buildTargets. + */ + this.timeout(30000); + + console.log(' ๐Ÿงช Backward compat: TemplateManager accepts old deployTarget'); + + fc.assert(fc.property( + fc.record({ + framework: fc.constantFrom('sklearn', 'xgboost', 'tensorflow', 'transformers'), + modelServer: fc.constantFrom('flask', 'fastapi', 'vllm', 'sglang'), + awsRegion: fc.constantFrom('us-east-1', 'us-west-2') + }), + (config) => { + // Use old deployTarget field name + const answers = { + ...config, + deployTarget: 'codebuild', // old field name + instanceType: 'ml.m5.large' + }; + + const manager = new TemplateManager(answers); + // Should not throw - backward compat should handle old field name + assert.doesNotThrow(() => manager.validate()); + } + ), { numRuns: 20 }); + + console.log(' โœ… TemplateManager accepts old deployTarget for backward compat'); + }); +}); diff --git a/test/input-parsing-and-generation/codebuild.test.js b/test/input-parsing-and-generation/codebuild.test.js index 65f6fbb..0c9c3e8 100644 --- a/test/input-parsing-and-generation/codebuild.test.js +++ b/test/input-parsing-and-generation/codebuild.test.js @@ -52,35 +52,35 @@ describe('CodeBuild Feature', () => { }); describe('Deployment Target Validation', () => { - it('should reject invalid deployment target values', () => { + it('should reject invalid build target values', () => { const configManager = new ConfigManager(mockGenerator); const invalidTargets = ['invalid-target', 'aws-batch', 'kubernetes', 'docker-compose', 'local']; invalidTargets.forEach(target => { try { - configManager._validateParameterValue('deployTarget', target, {}); - throw new Error(`Invalid deployment target was accepted: ${target}`); + configManager._validateParameterValue('buildTarget', target, {}); + throw new Error(`Invalid build target was accepted: ${target}`); } catch (error) { if (!(error instanceof ValidationError)) { throw new Error(`Expected ValidationError, got: ${error.constructor.name}`); } - if (!error.message.includes('Unsupported deployment target')) { - throw new Error(`Error message should mention unsupported deployment target: ${error.message}`); + if (!error.message.includes('Unsupported build target')) { + throw new Error(`Error message should mention unsupported build target: ${error.message}`); } } }); }); - it('should accept valid deployment target values', () => { + it('should accept valid build target values', () => { const configManager = new ConfigManager(mockGenerator); const validTargets = ['codebuild']; validTargets.forEach(target => { try { - configManager._validateParameterValue('deployTarget', target, {}); + configManager._validateParameterValue('buildTarget', target, {}); } catch (error) { - throw new Error(`Valid deployment target rejected: ${target} - ${error.message}`); + throw new Error(`Valid build target rejected: ${target} - ${error.message}`); } }); }); @@ -187,7 +187,7 @@ describe('CodeBuild Feature', () => { const configManager = new ConfigManager(mockGenerator); const parameterMatrix = configManager._getParameterMatrix(); - const codebuildParams = ['deployTarget', 'codebuildComputeType', 'codebuildProjectName']; + const codebuildParams = ['buildTarget', 'codebuildComputeType', 'codebuildProjectName']; codebuildParams.forEach(param => { if (!parameterMatrix[param]) { @@ -203,14 +203,15 @@ describe('CodeBuild Feature', () => { framework: 'sklearn', modelServer: 'flask', modelFormat: 'pkl', - deployTarget: 'codebuild', + buildTarget: 'codebuild', codebuildComputeType: 'BUILD_GENERAL1_MEDIUM', codebuildProjectName: 'valid-project-name', includeSampleModel: false, includeTesting: true, instanceType: 'ml.m5.large', projectName: 'test-project', - destinationDir: '.' + destinationDir: '.', + deploymentTarget: 'managed-inference' }; const validationErrors = configManager.validateRequiredParameters(validConfig); diff --git a/test/input-parsing-and-generation/do-clean-deployment-target.property.test.js b/test/input-parsing-and-generation/do-clean-deployment-target.property.test.js new file mode 100644 index 0000000..ea7ad75 --- /dev/null +++ b/test/input-parsing-and-generation/do-clean-deployment-target.property.test.js @@ -0,0 +1,370 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Property 8: Clean Script Content by Deployment Target + * + * For any valid configuration, when deploymentTarget equals managed-inference, + * the generated do/clean script must contain the `endpoint` cleanup target + * with SageMaker deletion commands (delete-inference-component, delete-endpoint) + * and must not contain kubectl commands. When deploymentTarget + * equals hyperpod-eks, the generated do/clean script must contain the `hyperpod` + * cleanup target with kubectl delete commands and must not contain SageMaker + * endpoint deletion commands. For both targets, the script must support + * `local`, `ecr`, `codebuild`, and `all` cleanup targets. + * + * Validates: Requirements 6.2, 6.3, 6.4, 6.5 + * + * Feature: sagemaker-hyperpod-deployment + */ + +import fc from 'fast-check'; +import { describe, it, before } from 'mocha'; +import assert from 'assert'; +import ejs from 'ejs'; +import { readFileSync } from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const templatePath = path.join(__dirname, '../../generators/app/templates/do/clean'); +const templateContent = readFileSync(templatePath, 'utf8'); + +/** + * Render the do/clean template with the given variables. + */ +function renderClean(vars) { + return ejs.render(templateContent, vars); +} + +/** Arbitrary for a base config shared by both deployment targets */ +const baseConfigArb = fc.record({ + projectName: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + deploymentConfig: fc.constantFrom('transformers-vllm', 'sklearn-flask', 'xgboost-fastapi'), + framework: fc.constantFrom('transformers', 'sklearn', 'xgboost', 'tensorflow'), + modelServer: fc.constantFrom('vllm', 'flask', 'fastapi', 'sglang'), + awsRegion: fc.constantFrom('us-east-1', 'us-west-2', 'eu-west-1'), + buildTarget: fc.constant('codebuild') +}); + +describe('Property 8: Clean Script Content by Deployment Target', () => { + before(() => { + console.log('\n๐Ÿงน Starting Clean Script Content by Deployment Target Property Tests'); + console.log('๐Ÿ“‹ Testing: Requirements 6.2, 6.3, 6.4, 6.5'); + console.log('๐Ÿ”ง Configuration: EJS template rendering with fast-check\n'); + }); + + it('should support local, ecr, codebuild cleanup targets for any deployment target (Req 6.4)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 6.4: local, ecr, codebuild targets present for both deployment targets'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + (base, deploymentTarget) => { + const vars = { + ...base, + deploymentTarget, + instanceType: 'ml.m5.xlarge', + hyperPodCluster: 'test-cluster', + hyperPodNamespace: 'default', + hyperPodReplicas: 1 + }; + + const output = renderClean(vars); + + // Must contain local cleanup target + assert.ok( + output.includes('clean_local'), + 'Output must contain clean_local function' + ); + assert.ok( + output.includes('local)'), + 'Output must contain local case in switch' + ); + + // Must contain ecr cleanup target + assert.ok( + output.includes('clean_ecr'), + 'Output must contain clean_ecr function' + ); + assert.ok( + output.includes('ecr)'), + 'Output must contain ecr case in switch' + ); + + // Must contain codebuild cleanup target + assert.ok( + output.includes('clean_codebuild'), + 'Output must contain clean_codebuild function' + ); + assert.ok( + output.includes('codebuild)'), + 'Output must contain codebuild case in switch' + ); + + // Must contain all cleanup target + assert.ok( + output.includes('all)'), + 'Output must contain all case in switch' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… local, ecr, codebuild targets present for both deployment targets'); + }); + + it('should contain SageMaker endpoint cleanup for managed-inference (Req 6.2)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 6.2: SageMaker endpoint cleanup logic for managed-inference'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('ml.m5.xlarge', 'ml.g5.xlarge', 'ml.p4d.24xlarge'), + (base, instanceType) => { + const vars = { + ...base, + deploymentTarget: 'managed-inference', + instanceType, + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined + }; + + const output = renderClean(vars); + + // Must contain endpoint cleanup target + assert.ok( + output.includes('endpoint)'), + 'managed-inference must contain endpoint case in switch' + ); + assert.ok( + output.includes('clean_endpoint'), + 'managed-inference must contain clean_endpoint function' + ); + + // Must contain SageMaker deletion commands + assert.ok( + output.includes('sagemaker delete-endpoint'), + 'managed-inference must contain delete-endpoint command' + ); + assert.ok( + output.includes('sagemaker delete-endpoint-config'), + 'managed-inference must contain delete-endpoint-config command' + ); + assert.ok( + output.includes('sagemaker delete-inference-component'), + 'managed-inference must contain delete-inference-component command' + ); + + // Must NOT contain kubectl commands + assert.ok( + !output.includes('kubectl delete'), + 'managed-inference must NOT contain kubectl delete commands' + ); + assert.ok( + !output.includes('clean_hyperpod'), + 'managed-inference must NOT contain clean_hyperpod function' + ); + assert.ok( + !output.includes('hyperpod)'), + 'managed-inference must NOT contain hyperpod case in switch' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… SageMaker endpoint cleanup present for managed-inference'); + }); + + it('should contain kubectl cleanup for hyperpod-eks (Req 6.3)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 6.3: kubectl cleanup logic for hyperpod-eks'); + + fc.assert(fc.property( + baseConfigArb, + fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference', 'production'), + hyperPodReplicas: fc.integer({ min: 1, max: 10 }) + }), + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + ...hpVars + }; + + const output = renderClean(vars); + + // Must contain hyperpod cleanup target + assert.ok( + output.includes('hyperpod)'), + 'hyperpod-eks must contain hyperpod case in switch' + ); + assert.ok( + output.includes('clean_hyperpod'), + 'hyperpod-eks must contain clean_hyperpod function' + ); + + // Must contain kubectl delete command + assert.ok( + output.includes('kubectl delete') && output.includes('hyperpod/'), + 'hyperpod-eks must contain kubectl delete from hyperpod/ directory' + ); + + // Must contain kubeconfig retrieval + assert.ok( + output.includes('describe-cluster'), + 'hyperpod-eks must contain describe-cluster command' + ); + + // Must NOT contain SageMaker endpoint deletion commands + assert.ok( + !output.includes('sagemaker delete-endpoint'), + 'hyperpod-eks must NOT contain delete-endpoint command' + ); + assert.ok( + !output.includes('clean_endpoint'), + 'hyperpod-eks must NOT contain clean_endpoint function' + ); + assert.ok( + !output.includes('endpoint)'), + 'hyperpod-eks must NOT contain endpoint case in switch' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… kubectl cleanup present for hyperpod-eks'); + }); + + it('should include appropriate cleanup in all target for managed-inference (Req 6.5)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 6.5: all target includes endpoint cleanup for managed-inference'); + + fc.assert(fc.property( + baseConfigArb, + (base) => { + const vars = { + ...base, + deploymentTarget: 'managed-inference', + instanceType: 'ml.m5.xlarge', + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined + }; + + const output = renderClean(vars); + + // The all target should call clean_endpoint + // Look for the pattern in the all case block + const allCaseMatch = output.match(/all\)([\s\S]*?);;/); + assert.ok(allCaseMatch, 'Output must contain all case block'); + + const allCaseContent = allCaseMatch[1]; + assert.ok( + allCaseContent.includes('clean_endpoint'), + 'all target must call clean_endpoint for managed-inference' + ); + assert.ok( + allCaseContent.includes('SageMaker resources'), + 'all target must reference SageMaker resources for managed-inference' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… all target includes endpoint cleanup for managed-inference'); + }); + + it('should include appropriate cleanup in all target for hyperpod-eks (Req 6.5)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 6.5: all target includes hyperpod cleanup for hyperpod-eks'); + + fc.assert(fc.property( + baseConfigArb, + fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference'), + hyperPodReplicas: fc.integer({ min: 1, max: 4 }) + }), + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + ...hpVars + }; + + const output = renderClean(vars); + + // The all target should call clean_hyperpod + const allCaseMatch = output.match(/all\)([\s\S]*?);;/); + assert.ok(allCaseMatch, 'Output must contain all case block'); + + const allCaseContent = allCaseMatch[1]; + assert.ok( + allCaseContent.includes('clean_hyperpod'), + 'all target must call clean_hyperpod for hyperpod-eks' + ); + assert.ok( + allCaseContent.includes('HyperPod EKS resources'), + 'all target must reference HyperPod EKS resources for hyperpod-eks' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… all target includes hyperpod cleanup for hyperpod-eks'); + }); + + it('should show deployment-target-specific usage info', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Usage info shows target-specific cleanup options'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + (base, deploymentTarget) => { + const vars = { + ...base, + deploymentTarget, + instanceType: 'ml.m5.xlarge', + hyperPodCluster: 'test-cluster', + hyperPodNamespace: 'default', + hyperPodReplicas: 1 + }; + + const output = renderClean(vars); + + if (deploymentTarget === 'managed-inference') { + assert.ok( + output.includes('endpoint - Delete SageMaker endpoint'), + 'managed-inference usage must show endpoint cleanup option' + ); + assert.ok( + output.includes('./do/clean endpoint'), + 'managed-inference examples must show endpoint cleanup' + ); + } else { + assert.ok( + output.includes('hyperpod - Delete HyperPod EKS deployment'), + 'hyperpod-eks usage must show hyperpod cleanup option' + ); + assert.ok( + output.includes('./do/clean hyperpod'), + 'hyperpod-eks examples must show hyperpod cleanup' + ); + } + } + ), { numRuns: 20 }); + + console.log(' โœ… Deployment-target-specific usage info correct'); + }); +}); diff --git a/test/input-parsing-and-generation/do-config-deployment-target.property.test.js b/test/input-parsing-and-generation/do-config-deployment-target.property.test.js new file mode 100644 index 0000000..f019c8e --- /dev/null +++ b/test/input-parsing-and-generation/do-config-deployment-target.property.test.js @@ -0,0 +1,316 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Property 11: do/config Deployment-Target-Specific Variables + * + * For any valid configuration, the generated do/config must contain + * BUILD_TARGET and DEPLOYMENT_TARGET variables. When deploymentTarget + * equals managed-inference, do/config must contain INSTANCE_TYPE. + * When deploymentTarget equals hyperpod-eks, do/config must contain + * HYPERPOD_CLUSTER_NAME, HYPERPOD_NAMESPACE, and HYPERPOD_REPLICAS. + * When fsxVolumeHandle is provided with hyperpod-eks, do/config must + * also contain FSX_VOLUME_HANDLE. + * + * Validates: Requirements 9.1, 9.2, 9.3, 9.4, 9.5 + * + * Feature: sagemaker-hyperpod-deployment + */ + +import fc from 'fast-check'; +import { describe, it, before } from 'mocha'; +import assert from 'assert'; +import ejs from 'ejs'; +import { readFileSync } from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const templatePath = path.join(__dirname, '../../generators/app/templates/do/config'); +const templateContent = readFileSync(templatePath, 'utf8'); + +/** + * Render the do/config template with the given variables. + */ +function renderConfig(vars) { + return ejs.render(templateContent, vars); +} + +/** Arbitrary for a base config shared by both deployment targets */ +const baseConfigArb = fc.record({ + projectName: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + deploymentConfig: fc.constantFrom('transformers-vllm', 'sklearn-flask', 'xgboost-fastapi'), + framework: fc.constantFrom('transformers', 'sklearn', 'xgboost', 'tensorflow'), + modelServer: fc.constantFrom('vllm', 'flask', 'fastapi', 'sglang'), + awsRegion: fc.constantFrom('us-east-1', 'us-west-2', 'eu-west-1'), + buildTarget: fc.constant('codebuild'), + codebuildComputeType: fc.constantFrom('BUILD_GENERAL1_SMALL', 'BUILD_GENERAL1_MEDIUM', 'BUILD_GENERAL1_LARGE'), + roleArn: fc.option(fc.constant('arn:aws:iam::123456789012:role/SageMakerRole'), { nil: undefined }), + modelFormat: fc.option(fc.constantFrom('pkl', 'joblib', 'json'), { nil: undefined }), + modelName: fc.constantFrom('meta-llama/Llama-2-7b', 'gpt2', 'bert-base-uncased'), + hfToken: fc.option(fc.constant('hf_testtoken123'), { nil: undefined }), + ngcApiKey: fc.option(fc.constant('ngc_testkey456'), { nil: undefined }) +}); + +describe('Property 11: do/config Deployment-Target-Specific Variables', () => { + before(() => { + console.log('\n๐Ÿš€ Starting do/config Deployment-Target-Specific Variables Property Tests'); + console.log('๐Ÿ“‹ Testing: Requirements 9.1, 9.2, 9.3, 9.4, 9.5'); + console.log('๐Ÿ”ง Configuration: EJS template rendering with fast-check\n'); + }); + + it('should always contain BUILD_TARGET and DEPLOYMENT_TARGET for any valid config', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 9.1 + 9.2: BUILD_TARGET and DEPLOYMENT_TARGET always present'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + (base, deploymentTarget) => { + const vars = { + ...base, + deploymentTarget, + // provide defaults so EJS doesn't blow up on missing vars + instanceType: 'ml.m5.xlarge', + inferenceAmiVersion: undefined, + hyperPodCluster: 'test-cluster', + hyperPodNamespace: 'default', + hyperPodReplicas: 1, + fsxVolumeHandle: undefined + }; + + const output = renderConfig(vars); + + assert.ok( + output.includes('export BUILD_TARGET='), + 'Output must contain BUILD_TARGET export' + ); + assert.ok( + output.includes('export DEPLOYMENT_TARGET='), + 'Output must contain DEPLOYMENT_TARGET export' + ); + assert.ok( + output.includes(`BUILD_TARGET="${base.buildTarget}"`), + 'BUILD_TARGET must equal the configured buildTarget value' + ); + assert.ok( + output.includes(`DEPLOYMENT_TARGET="${deploymentTarget}"`), + 'DEPLOYMENT_TARGET must equal the configured deploymentTarget value' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… BUILD_TARGET and DEPLOYMENT_TARGET always present'); + }); + + it('should contain INSTANCE_TYPE when deploymentTarget is managed-inference', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 9.3: INSTANCE_TYPE present for managed-inference'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('ml.m5.xlarge', 'ml.g5.xlarge', 'ml.p4d.24xlarge'), + fc.option(fc.constant('1.0.0'), { nil: undefined }), + (base, instanceType, inferenceAmiVersion) => { + const vars = { + ...base, + deploymentTarget: 'managed-inference', + instanceType, + inferenceAmiVersion, + // HyperPod vars not needed but provide defaults + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }; + + const output = renderConfig(vars); + + assert.ok( + output.includes(`export INSTANCE_TYPE="${instanceType}"`), + `Output must contain INSTANCE_TYPE="${instanceType}"` + ); + + // Should NOT contain HyperPod variables + assert.ok( + !output.includes('export HYPERPOD_CLUSTER_NAME='), + 'managed-inference output must NOT contain HYPERPOD_CLUSTER_NAME' + ); + assert.ok( + !output.includes('export HYPERPOD_NAMESPACE='), + 'managed-inference output must NOT contain HYPERPOD_NAMESPACE' + ); + assert.ok( + !output.includes('export HYPERPOD_REPLICAS='), + 'managed-inference output must NOT contain HYPERPOD_REPLICAS' + ); + + // INFERENCE_AMI_VERSION conditional + if (inferenceAmiVersion) { + assert.ok( + output.includes(`export INFERENCE_AMI_VERSION="${inferenceAmiVersion}"`), + 'Output must contain INFERENCE_AMI_VERSION when provided' + ); + } else { + assert.ok( + !output.includes('export INFERENCE_AMI_VERSION='), + 'Output must NOT contain INFERENCE_AMI_VERSION when not provided' + ); + } + } + ), { numRuns: 20 }); + + console.log(' โœ… INSTANCE_TYPE present for managed-inference'); + }); + + it('should contain HyperPod variables when deploymentTarget is hyperpod-eks', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 9.4: HYPERPOD_CLUSTER_NAME, HYPERPOD_NAMESPACE, HYPERPOD_REPLICAS for hyperpod-eks'); + + fc.assert(fc.property( + baseConfigArb, + fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference', 'production'), + hyperPodReplicas: fc.integer({ min: 1, max: 10 }) + }), + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + inferenceAmiVersion: undefined, + hyperPodCluster: hpVars.hyperPodCluster, + hyperPodNamespace: hpVars.hyperPodNamespace, + hyperPodReplicas: hpVars.hyperPodReplicas, + fsxVolumeHandle: undefined + }; + + const output = renderConfig(vars); + + assert.ok( + output.includes(`export HYPERPOD_CLUSTER_NAME="${hpVars.hyperPodCluster}"`), + 'Output must contain HYPERPOD_CLUSTER_NAME' + ); + assert.ok( + output.includes(`export HYPERPOD_NAMESPACE="${hpVars.hyperPodNamespace}"`), + 'Output must contain HYPERPOD_NAMESPACE' + ); + assert.ok( + output.includes(`export HYPERPOD_REPLICAS="${hpVars.hyperPodReplicas}"`), + 'Output must contain HYPERPOD_REPLICAS' + ); + + // Should NOT contain managed-inference variables + assert.ok( + !output.includes('export INSTANCE_TYPE='), + 'hyperpod-eks output must NOT contain INSTANCE_TYPE' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… HyperPod variables present for hyperpod-eks'); + }); + + it('should contain FSX_VOLUME_HANDLE only when fsxVolumeHandle is provided with hyperpod-eks', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 9.5: FSX_VOLUME_HANDLE conditional on fsxVolumeHandle'); + + fc.assert(fc.property( + baseConfigArb, + fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constant('default'), + hyperPodReplicas: fc.integer({ min: 1, max: 4 }) + }), + fc.option(fc.stringMatching(/^fs-[a-f0-9]{17}$/), { nil: undefined }), + (base, hpVars, fsxVolumeHandle) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + inferenceAmiVersion: undefined, + ...hpVars, + fsxVolumeHandle + }; + + const output = renderConfig(vars); + + if (fsxVolumeHandle) { + assert.ok( + output.includes(`export FSX_VOLUME_HANDLE="${fsxVolumeHandle}"`), + 'Output must contain FSX_VOLUME_HANDLE when provided' + ); + } else { + assert.ok( + !output.includes('export FSX_VOLUME_HANDLE='), + 'Output must NOT contain FSX_VOLUME_HANDLE when not provided' + ); + } + } + ), { numRuns: 20 }); + + console.log(' โœ… FSX_VOLUME_HANDLE conditional logic correct'); + }); + + it('should show deployment-target-specific summary lines', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Configuration summary echo statements vary by deployment target'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + (base, deploymentTarget) => { + const vars = { + ...base, + deploymentTarget, + instanceType: 'ml.m5.xlarge', + inferenceAmiVersion: undefined, + hyperPodCluster: 'my-cluster', + hyperPodNamespace: 'default', + hyperPodReplicas: 1, + fsxVolumeHandle: undefined + }; + + const output = renderConfig(vars); + + // Both targets should show build target and deployment target in summary + assert.ok(output.includes('Build target:'), 'Summary must show Build target'); + assert.ok(output.includes('Deployment target:'), 'Summary must show Deployment target'); + + if (deploymentTarget === 'managed-inference') { + assert.ok( + output.includes('echo " Instance: ${INSTANCE_TYPE}"'), + 'managed-inference summary must show Instance' + ); + assert.ok( + !output.includes('HyperPod cluster:'), + 'managed-inference summary must NOT show HyperPod cluster' + ); + } else { + assert.ok( + output.includes('echo " HyperPod cluster: ${HYPERPOD_CLUSTER_NAME}"'), + 'hyperpod-eks summary must show HyperPod cluster' + ); + assert.ok( + output.includes('echo " Namespace: ${HYPERPOD_NAMESPACE}"'), + 'hyperpod-eks summary must show Namespace' + ); + assert.ok( + !output.includes('echo " Instance: ${INSTANCE_TYPE}"'), + 'hyperpod-eks summary must NOT show Instance' + ); + } + } + ), { numRuns: 20 }); + + console.log(' โœ… Summary lines correct per deployment target'); + }); +}); diff --git a/test/input-parsing-and-generation/do-deploy-deployment-target.property.test.js b/test/input-parsing-and-generation/do-deploy-deployment-target.property.test.js new file mode 100644 index 0000000..4a09734 --- /dev/null +++ b/test/input-parsing-and-generation/do-deploy-deployment-target.property.test.js @@ -0,0 +1,382 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Property 7: Deploy Script Content by Deployment Target + * + * For any valid configuration, when deploymentTarget equals managed-inference, + * the generated do/deploy script must contain SageMaker inference component + * commands (create-endpoint, create-inference-component) and must + * not contain kubectl commands. When deploymentTarget equals hyperpod-eks, + * the generated do/deploy script must contain kubectl commands + * (describe-cluster, eks update-kubeconfig, kubectl apply from hyperpod/) and must not contain + * SageMaker endpoint creation commands. For both targets, the script must + * contain ECR image verification logic. + * + * Validates: Requirements 5.2, 5.3, 5.4, 5.5, 5.6 + * + * Feature: sagemaker-hyperpod-deployment + */ + +import fc from 'fast-check'; +import { describe, it, before } from 'mocha'; +import assert from 'assert'; +import ejs from 'ejs'; +import { readFileSync } from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const templatePath = path.join(__dirname, '../../generators/app/templates/do/deploy'); +const templateContent = readFileSync(templatePath, 'utf8'); + +/** + * Render the do/deploy template with the given variables. + */ +function renderDeploy(vars) { + return ejs.render(templateContent, vars); +} + +/** Arbitrary for a base config shared by both deployment targets */ +const baseConfigArb = fc.record({ + projectName: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + deploymentConfig: fc.constantFrom('transformers-vllm', 'sklearn-flask', 'xgboost-fastapi'), + framework: fc.constantFrom('transformers', 'sklearn', 'xgboost', 'tensorflow'), + modelServer: fc.constantFrom('vllm', 'flask', 'fastapi', 'sglang'), + awsRegion: fc.constantFrom('us-east-1', 'us-west-2', 'eu-west-1'), + buildTarget: fc.constant('codebuild') +}); + +describe('Property 7: Deploy Script Content by Deployment Target', () => { + before(() => { + console.log('\n๐Ÿš€ Starting Deploy Script Content by Deployment Target Property Tests'); + console.log('๐Ÿ“‹ Testing: Requirements 5.2, 5.3, 5.4, 5.5, 5.6'); + console.log('๐Ÿ”ง Configuration: EJS template rendering with fast-check\n'); + }); + + it('should contain ECR image verification for any valid deployment target (Req 5.6)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 5.6: ECR image verification present for both targets'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + (base, deploymentTarget) => { + const vars = { + ...base, + deploymentTarget, + instanceType: 'ml.m5.xlarge', + inferenceAmiVersion: undefined, + hyperPodCluster: 'test-cluster', + hyperPodNamespace: 'default', + hyperPodReplicas: 1, + fsxVolumeHandle: undefined + }; + + const output = renderDeploy(vars); + + assert.ok( + output.includes('ecr describe-images'), + 'Output must contain ECR image verification (ecr describe-images)' + ); + assert.ok( + output.includes('ECR image not found'), + 'Output must contain ECR image not found error message' + ); + assert.ok( + output.includes('ECR image found'), + 'Output must contain ECR image found success message' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… ECR image verification present for both targets'); + }); + + it('should contain SageMaker endpoint creation commands for managed-inference (Req 5.2)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 5.2: SageMaker endpoint creation logic for managed-inference'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('ml.m5.xlarge', 'ml.g5.xlarge', 'ml.p4d.24xlarge'), + fc.option(fc.constant('1.0.0'), { nil: undefined }), + (base, instanceType, inferenceAmiVersion) => { + const vars = { + ...base, + deploymentTarget: 'managed-inference', + instanceType, + inferenceAmiVersion, + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }; + + const output = renderDeploy(vars); + + // Must contain SageMaker inference component commands + assert.ok( + output.includes('sagemaker create-endpoint-config'), + 'managed-inference must contain create-endpoint-config command' + ); + assert.ok( + output.includes('sagemaker create-endpoint'), + 'managed-inference must contain create-endpoint command' + ); + assert.ok( + output.includes('sagemaker create-inference-component'), + 'managed-inference must contain create-inference-component command' + ); + assert.ok( + output.includes('sagemaker wait inference-component-in-service'), + 'managed-inference must contain wait inference-component-in-service command' + ); + + // Must NOT contain kubectl commands + assert.ok( + !output.includes('kubectl'), + 'managed-inference must NOT contain kubectl commands' + ); + assert.ok( + !output.includes('describe-cluster'), + 'managed-inference must NOT contain describe-cluster' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… SageMaker endpoint creation commands present for managed-inference'); + }); + + it('should contain kubectl commands for hyperpod-eks (Req 5.3, 5.4, 5.5)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 5.3/5.4/5.5: kubectl deployment logic for hyperpod-eks'); + + fc.assert(fc.property( + baseConfigArb, + fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference', 'production'), + hyperPodReplicas: fc.integer({ min: 1, max: 10 }) + }), + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + inferenceAmiVersion: undefined, + ...hpVars, + fsxVolumeHandle: undefined + }; + + const output = renderDeploy(vars); + + // Req 5.4: Must retrieve kubeconfig + assert.ok( + output.includes('describe-cluster'), + 'hyperpod-eks must contain describe-cluster command' + ); + assert.ok( + output.includes('eks update-kubeconfig'), + 'hyperpod-eks must contain eks update-kubeconfig command' + ); + + // Req 5.5: Must apply manifests from hyperpod/ directory + assert.ok( + output.includes('kubectl apply') && output.includes('hyperpod/'), + 'hyperpod-eks must contain kubectl apply from hyperpod/ directory' + ); + + // Must contain rollout status check + assert.ok( + output.includes('kubectl rollout status'), + 'hyperpod-eks must contain kubectl rollout status command' + ); + + // Must contain namespace creation + assert.ok( + output.includes('kubectl create namespace'), + 'hyperpod-eks must contain namespace creation' + ); + + // Must NOT contain SageMaker inference component commands + assert.ok( + !output.includes('sagemaker create-endpoint-config'), + 'hyperpod-eks must NOT contain create-endpoint-config command' + ); + assert.ok( + !output.includes('sagemaker create-inference-component'), + 'hyperpod-eks must NOT contain create-inference-component command' + ); + assert.ok( + !output.includes('sagemaker wait inference-component-in-service'), + 'hyperpod-eks must NOT contain wait inference-component-in-service command' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… kubectl commands present for hyperpod-eks'); + }); + + it('should include IAM permission error handling for hyperpod-eks (Req 14.2)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 14.2: IAM permission error handling in hyperpod-eks deploy'); + + fc.assert(fc.property( + baseConfigArb, + fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference'), + hyperPodReplicas: fc.integer({ min: 1, max: 4 }) + }), + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + inferenceAmiVersion: undefined, + ...hpVars, + fsxVolumeHandle: undefined + }; + + const output = renderDeploy(vars); + + // Must contain IAM permission error hints + assert.ok( + output.includes('IAM') || output.includes('permission'), + 'hyperpod-eks must contain IAM permission error guidance' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… IAM permission error handling present'); + }); + + it('should include kubectl failure error handling for hyperpod-eks (Req 14.3)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 14.3: kubectl apply failure error handling'); + + fc.assert(fc.property( + baseConfigArb, + fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference'), + hyperPodReplicas: fc.integer({ min: 1, max: 4 }) + }), + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + inferenceAmiVersion: undefined, + ...hpVars, + fsxVolumeHandle: undefined + }; + + const output = renderDeploy(vars); + + // Must contain kubectl failure error messages with node capacity suggestions + assert.ok( + output.includes('Failed to apply Kubernetes manifests') || + output.includes('node capacity'), + 'hyperpod-eks must contain kubectl apply failure guidance' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… kubectl failure error handling present'); + }); + + it('should include FSx PVC error hints when fsxVolumeHandle is provided (Req 14.3)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 14.3: FSx PVC error hints when fsxVolumeHandle provided'); + + fc.assert(fc.property( + baseConfigArb, + fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference'), + hyperPodReplicas: fc.integer({ min: 1, max: 4 }) + }), + fc.option(fc.stringMatching(/^fs-[a-f0-9]{17}$/), { nil: undefined }), + (base, hpVars, fsxVolumeHandle) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + inferenceAmiVersion: undefined, + ...hpVars, + fsxVolumeHandle + }; + + const output = renderDeploy(vars); + + if (fsxVolumeHandle) { + assert.ok( + output.includes('FSx CSI driver') || output.includes('PVC binding'), + 'When fsxVolumeHandle is provided, deploy must include FSx/PVC error hints' + ); + } + } + ), { numRuns: 20 }); + + console.log(' โœ… FSx PVC error hints conditional on fsxVolumeHandle'); + }); + + it('should show deployment-target-specific header info', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Deploy script header shows target-specific info'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + (base, deploymentTarget) => { + const vars = { + ...base, + deploymentTarget, + instanceType: 'ml.m5.xlarge', + inferenceAmiVersion: undefined, + hyperPodCluster: 'test-cluster', + hyperPodNamespace: 'default', + hyperPodReplicas: 1, + fsxVolumeHandle: undefined + }; + + const output = renderDeploy(vars); + + // Both targets should show deployment target + assert.ok( + output.includes('Deployment target: ${DEPLOYMENT_TARGET}'), + 'Deploy script must show deployment target' + ); + + if (deploymentTarget === 'managed-inference') { + assert.ok( + output.includes('Instance type: ${INSTANCE_TYPE}'), + 'managed-inference header must show instance type' + ); + } else { + assert.ok( + output.includes('HyperPod cluster: ${HYPERPOD_CLUSTER_NAME}'), + 'hyperpod-eks header must show cluster name' + ); + assert.ok( + output.includes('Namespace: ${HYPERPOD_NAMESPACE}'), + 'hyperpod-eks header must show namespace' + ); + } + } + ), { numRuns: 20 }); + + console.log(' โœ… Deployment-target-specific header info correct'); + }); +}); diff --git a/test/input-parsing-and-generation/do-framework-simple.test.js b/test/input-parsing-and-generation/do-framework-simple.test.js index 8333e1c..cc1524e 100644 --- a/test/input-parsing-and-generation/do-framework-simple.test.js +++ b/test/input-parsing-and-generation/do-framework-simple.test.js @@ -121,8 +121,8 @@ describe('DO Framework - Simplified Tests', () => { 'FRAMEWORK', 'MODEL_SERVER', 'AWS_REGION', - 'INSTANCE_TYPE', - 'DEPLOY_TARGET' + 'BUILD_TARGET', + 'DEPLOYMENT_TARGET' ]; requiredVars.forEach(varName => { diff --git a/test/input-parsing-and-generation/do-logs-deployment-target.property.test.js b/test/input-parsing-and-generation/do-logs-deployment-target.property.test.js new file mode 100644 index 0000000..5e28620 --- /dev/null +++ b/test/input-parsing-and-generation/do-logs-deployment-target.property.test.js @@ -0,0 +1,238 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Property 15: Logs Script Content by Deployment Target + * + * For any valid configuration, when deploymentTarget equals managed-inference, + * the generated do/logs script must contain CloudWatch Logs tailing logic + * (aws logs tail) and must not contain kubectl commands. When deploymentTarget + * equals hyperpod-eks, the generated do/logs script must contain kubectl logs + * tailing logic and must retrieve kubeconfig via aws sagemaker + * get-cluster-kubeconfig before tailing. For both targets, the script must + * not contain content from the other target. + * + * Validates: Requirements 15.2, 15.3 + * + * Feature: sagemaker-hyperpod-deployment + */ + +import fc from 'fast-check'; +import { describe, it, before } from 'mocha'; +import assert from 'assert'; +import ejs from 'ejs'; +import { readFileSync } from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const templatePath = path.join(__dirname, '../../generators/app/templates/do/logs'); +const templateContent = readFileSync(templatePath, 'utf8'); + +/** + * Render the do/logs template with the given variables. + */ +function renderLogs(vars) { + return ejs.render(templateContent, vars); +} + +/** Arbitrary for a base config shared by both deployment targets */ +const baseConfigArb = fc.record({ + projectName: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + deploymentConfig: fc.constantFrom('transformers-vllm', 'sklearn-flask', 'xgboost-fastapi'), + framework: fc.constantFrom('transformers', 'sklearn', 'xgboost', 'tensorflow'), + modelServer: fc.constantFrom('vllm', 'flask', 'fastapi', 'sglang'), + awsRegion: fc.constantFrom('us-east-1', 'us-west-2', 'eu-west-1'), + buildTarget: fc.constant('codebuild') +}); + +describe('Property 15: Logs Script Content by Deployment Target', () => { + before(() => { + console.log('\n๐Ÿ“‹ Starting Logs Script Content by Deployment Target Property Tests'); + console.log('๐Ÿ“‹ Testing: Requirements 15.2, 15.3'); + console.log('๐Ÿ”ง Configuration: EJS template rendering with fast-check\n'); + }); + + it('should contain CloudWatch Logs tailing logic for managed-inference (Req 15.2)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 15.2: CloudWatch Logs tailing for managed-inference'); + + fc.assert(fc.property( + baseConfigArb, + (base) => { + const vars = { + ...base, + deploymentTarget: 'managed-inference', + instanceType: 'ml.m5.xlarge', + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }; + + const output = renderLogs(vars); + + // Must contain CloudWatch Logs tailing + assert.ok( + output.includes('aws logs tail'), + 'managed-inference must contain aws logs tail command' + ); + assert.ok( + output.includes('/aws/sagemaker/Endpoints/'), + 'managed-inference must reference SageMaker Endpoints log group' + ); + assert.ok( + output.includes('--follow'), + 'managed-inference must tail logs with --follow flag' + ); + + // Must NOT contain kubectl commands + assert.ok( + !output.includes('kubectl'), + 'managed-inference must NOT contain kubectl commands' + ); + assert.ok( + !output.includes('describe-cluster'), + 'managed-inference must NOT contain describe-cluster' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… CloudWatch Logs tailing present for managed-inference'); + }); + + it('should contain kubectl logs tailing logic for hyperpod-eks (Req 15.3)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 15.3: kubectl logs tailing for hyperpod-eks'); + + fc.assert(fc.property( + baseConfigArb, + fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference', 'production'), + hyperPodReplicas: fc.integer({ min: 1, max: 10 }) + }), + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + ...hpVars, + fsxVolumeHandle: undefined + }; + + const output = renderLogs(vars); + + // Must contain kubectl logs + assert.ok( + output.includes('kubectl logs'), + 'hyperpod-eks must contain kubectl logs command' + ); + assert.ok( + output.includes('-f -l'), + 'hyperpod-eks must tail logs with -f (follow) and -l (label selector)' + ); + assert.ok( + output.includes('${HYPERPOD_NAMESPACE}'), + 'hyperpod-eks must reference the configured namespace' + ); + + // Must NOT contain CloudWatch commands + assert.ok( + !output.includes('aws logs tail'), + 'hyperpod-eks must NOT contain aws logs tail command' + ); + assert.ok( + !output.includes('/aws/sagemaker/Endpoints/'), + 'hyperpod-eks must NOT reference SageMaker Endpoints log group' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… kubectl logs tailing present for hyperpod-eks'); + }); + + it('should retrieve kubeconfig before tailing for hyperpod-eks (Req 15.4)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 15.4: kubeconfig retrieval before tailing'); + + fc.assert(fc.property( + baseConfigArb, + fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference'), + hyperPodReplicas: fc.integer({ min: 1, max: 4 }) + }), + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + ...hpVars, + fsxVolumeHandle: undefined + }; + + const output = renderLogs(vars); + + // Must retrieve kubeconfig + assert.ok( + output.includes('describe-cluster'), + 'hyperpod-eks must retrieve cluster info via describe-cluster' + ); + assert.ok( + output.includes('eks update-kubeconfig'), + 'hyperpod-eks must configure kubectl via eks update-kubeconfig' + ); + + // kubeconfig retrieval must come BEFORE kubectl logs + const kubeconfigIndex = output.indexOf('eks update-kubeconfig'); + const kubectlLogsIndex = output.indexOf('kubectl logs'); + assert.ok( + kubeconfigIndex < kubectlLogsIndex, + 'eks update-kubeconfig must appear before kubectl logs' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… kubeconfig retrieval before tailing verified'); + }); + + it('should produce mutually exclusive content for each deployment target', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Mutual exclusivity: each target produces only its own content'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + (base, deploymentTarget) => { + const vars = { + ...base, + deploymentTarget, + instanceType: 'ml.m5.xlarge', + hyperPodCluster: 'test-cluster', + hyperPodNamespace: 'default', + hyperPodReplicas: 1, + fsxVolumeHandle: undefined + }; + + const output = renderLogs(vars); + + if (deploymentTarget === 'managed-inference') { + assert.ok(output.includes('aws logs tail'), 'managed-inference must have aws logs tail'); + assert.ok(!output.includes('kubectl logs'), 'managed-inference must NOT have kubectl logs'); + } else { + assert.ok(output.includes('kubectl logs'), 'hyperpod-eks must have kubectl logs'); + assert.ok(!output.includes('aws logs tail'), 'hyperpod-eks must NOT have aws logs tail'); + } + } + ), { numRuns: 20 }); + + console.log(' โœ… Mutual exclusivity verified'); + }); +}); diff --git a/test/input-parsing-and-generation/do-test-deployment-target.property.test.js b/test/input-parsing-and-generation/do-test-deployment-target.property.test.js new file mode 100644 index 0000000..59bee43 --- /dev/null +++ b/test/input-parsing-and-generation/do-test-deployment-target.property.test.js @@ -0,0 +1,403 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Property 16: Test Script Content by Deployment Target + * + * For any valid configuration, when deploymentTarget equals managed-inference, + * the generated do/test script must support local and SageMaker endpoint test + * modes using aws sagemaker-runtime invoke-endpoint. When deploymentTarget + * equals hyperpod-eks, the generated do/test script must support local and + * hyperpod test modes, where the hyperpod mode uses kubectl port-forward to + * test the deployed service via curl. + * + * Validates: Requirements 16.2, 16.3, 16.4 + * + * Feature: sagemaker-hyperpod-deployment + */ + +import fc from 'fast-check'; +import { describe, it, before } from 'mocha'; +import assert from 'assert'; +import ejs from 'ejs'; +import { readFileSync } from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const templatePath = path.join(__dirname, '../../generators/app/templates/do/test'); +const templateContent = readFileSync(templatePath, 'utf8'); + +/** + * Render the do/test template with the given variables. + */ +function renderTest(vars) { + return ejs.render(templateContent, vars); +} + +/** Arbitrary for a base config shared by both deployment targets */ +const baseConfigArb = fc.record({ + projectName: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + deploymentConfig: fc.constantFrom('transformers-vllm', 'sklearn-flask', 'xgboost-fastapi'), + framework: fc.constantFrom('transformers', 'sklearn', 'xgboost', 'tensorflow'), + modelServer: fc.constantFrom('vllm', 'flask', 'fastapi', 'sglang'), + awsRegion: fc.constantFrom('us-east-1', 'us-west-2', 'eu-west-1'), + buildTarget: fc.constant('codebuild'), + modelName: fc.constantFrom('meta-llama/Llama-2-7b-hf', 'mistralai/Mistral-7B-v0.1') +}); + +describe('Property 16: Test Script Content by Deployment Target', () => { + before(() => { + console.log('\n๐Ÿงช Starting Test Script Content by Deployment Target Property Tests'); + console.log('๐Ÿ“‹ Testing: Requirements 16.2, 16.3, 16.4'); + console.log('๐Ÿ”ง Configuration: EJS template rendering with fast-check\n'); + }); + + it('should support local and SageMaker endpoint test modes for managed-inference (Req 16.2)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 16.2: local + SageMaker endpoint test modes for managed-inference'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('ml.m5.xlarge', 'ml.g5.xlarge', 'ml.p4d.24xlarge'), + (base, instanceType) => { + const vars = { + ...base, + deploymentTarget: 'managed-inference', + instanceType, + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined + }; + + const output = renderTest(vars); + + // Must support local test mode + assert.ok( + output.includes('localhost:8080'), + 'managed-inference must support local testing at localhost:8080' + ); + assert.ok( + output.includes('Testing local container'), + 'managed-inference must have local container test message' + ); + + // Must support SageMaker endpoint test mode + assert.ok( + output.includes('sagemaker-runtime invoke-endpoint'), + 'managed-inference must use aws sagemaker-runtime invoke-endpoint' + ); + assert.ok( + output.includes('Testing SageMaker endpoint'), + 'managed-inference must have SageMaker endpoint test message' + ); + assert.ok( + output.includes('describe-endpoint'), + 'managed-inference must check endpoint status via describe-endpoint' + ); + + // Must NOT contain kubectl commands + assert.ok( + !output.includes('kubectl port-forward'), + 'managed-inference must NOT contain kubectl port-forward' + ); + assert.ok( + !output.includes('describe-cluster'), + 'managed-inference must NOT contain describe-cluster' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… local + SageMaker endpoint test modes present for managed-inference'); + }); + + it('should support local and hyperpod test modes for hyperpod-eks (Req 16.3)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 16.3: local + hyperpod test modes for hyperpod-eks'); + + fc.assert(fc.property( + baseConfigArb, + fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference', 'production'), + hyperPodReplicas: fc.integer({ min: 1, max: 10 }) + }), + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + ...hpVars + }; + + const output = renderTest(vars); + + // Must support local test mode + assert.ok( + output.includes('localhost:8080'), + 'hyperpod-eks must support local testing at localhost:8080' + ); + assert.ok( + output.includes('Testing local container'), + 'hyperpod-eks must have local container test message' + ); + + // Must support hyperpod test mode with kubectl port-forward + assert.ok( + output.includes('kubectl port-forward'), + 'hyperpod-eks must use kubectl port-forward' + ); + assert.ok( + output.includes('svc/${PROJECT_NAME}'), + 'hyperpod-eks must port-forward to svc/${PROJECT_NAME}' + ); + assert.ok( + output.includes('${LOCAL_PORT}:8080') || output.includes('8080:8080'), + 'hyperpod-eks must forward port 8080:8080' + ); + assert.ok( + output.includes('Testing HyperPod EKS deployment'), + 'hyperpod-eks must have HyperPod test message' + ); + + // Must NOT contain SageMaker endpoint commands + assert.ok( + !output.includes('sagemaker-runtime invoke-endpoint'), + 'hyperpod-eks must NOT contain sagemaker-runtime invoke-endpoint' + ); + assert.ok( + !output.includes('describe-endpoint'), + 'hyperpod-eks must NOT contain describe-endpoint' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… local + hyperpod test modes present for hyperpod-eks'); + }); + + it('should use kubectl port-forward and curl for hyperpod test mode (Req 16.4)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 16.4: kubectl port-forward + curl for hyperpod test mode'); + + fc.assert(fc.property( + baseConfigArb, + fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference'), + hyperPodReplicas: fc.integer({ min: 1, max: 4 }) + }), + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + ...hpVars + }; + + const output = renderTest(vars); + + // Must retrieve kubeconfig before port-forward + assert.ok( + output.includes('describe-cluster'), + 'hyperpod-eks must retrieve cluster info via describe-cluster' + ); + assert.ok( + output.includes('eks update-kubeconfig'), + 'hyperpod-eks must configure kubectl via eks update-kubeconfig' + ); + + // kubeconfig retrieval must come BEFORE port-forward + const kubeconfigIndex = output.indexOf('eks update-kubeconfig'); + const portForwardIndex = output.indexOf('kubectl port-forward'); + assert.ok( + kubeconfigIndex < portForwardIndex, + 'eks update-kubeconfig must appear before kubectl port-forward' + ); + + // Must test /ping endpoint via curl + assert.ok( + output.includes('/ping'), + 'hyperpod-eks must test /ping endpoint' + ); + + // Must test /invocations endpoint via curl + assert.ok( + output.includes('/invocations'), + 'hyperpod-eks must test /invocations endpoint' + ); + + // Must use curl for testing + assert.ok( + output.includes('curl'), + 'hyperpod-eks must use curl for testing' + ); + + // Must include cleanup trap for port-forward + assert.ok( + output.includes('trap'), + 'hyperpod-eks must include trap for cleanup' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… kubectl port-forward + curl verified for hyperpod test mode'); + }); + + it('should use framework-specific test payloads for both deployment targets', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Framework-specific test payloads for both targets'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + (base, deploymentTarget) => { + const vars = { + ...base, + deploymentTarget, + instanceType: 'ml.m5.xlarge', + hyperPodCluster: 'test-cluster', + hyperPodNamespace: 'default', + hyperPodReplicas: 1 + }; + + const output = renderTest(vars); + + // Must have framework-specific payload logic + assert.ok( + output.includes('case "${FRAMEWORK}"'), + 'Output must contain framework case statement' + ); + + // Must handle sklearn/xgboost with instances array + assert.ok( + output.includes('sklearn|xgboost)'), + 'Output must handle sklearn and xgboost frameworks' + ); + assert.ok( + output.includes('"instances"'), + 'Output must use instances array for traditional ML' + ); + + // Must handle tensorflow + assert.ok( + output.includes('tensorflow)'), + 'Output must handle tensorflow framework' + ); + + // Must handle transformers with model server variants + assert.ok( + output.includes('transformers)'), + 'Output must handle transformers framework' + ); + assert.ok( + output.includes('vllm|sglang)'), + 'Output must handle vllm and sglang model servers' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… Framework-specific test payloads verified'); + }); + + it('should show deployment-target-specific usage info', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Usage info shows target-specific test options'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + (base, deploymentTarget) => { + const vars = { + ...base, + deploymentTarget, + instanceType: 'ml.m5.xlarge', + hyperPodCluster: 'test-cluster', + hyperPodNamespace: 'default', + hyperPodReplicas: 1 + }; + + const output = renderTest(vars); + + if (deploymentTarget === 'managed-inference') { + // managed-inference uses endpoint name as argument + assert.ok( + output.includes('ENDPOINT_NAME="${1:-'), + 'managed-inference must parse endpoint name from argument' + ); + assert.ok( + output.includes('Deploy to SageMaker'), + 'managed-inference next steps must mention SageMaker' + ); + } else { + // hyperpod-eks uses local|hyperpod as argument + assert.ok( + output.includes('TEST_TARGET="${1:-'), + 'hyperpod-eks must parse test target from argument' + ); + assert.ok( + output.includes('local|hyperpod'), + 'hyperpod-eks usage must show local|hyperpod options' + ); + assert.ok( + output.includes('Deploy to HyperPod'), + 'hyperpod-eks next steps must mention HyperPod' + ); + } + } + ), { numRuns: 20 }); + + console.log(' โœ… Deployment-target-specific usage info correct'); + }); + + it('should produce mutually exclusive content for each deployment target', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Mutual exclusivity: each target produces only its own content'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + (base, deploymentTarget) => { + const vars = { + ...base, + deploymentTarget, + instanceType: 'ml.m5.xlarge', + hyperPodCluster: 'test-cluster', + hyperPodNamespace: 'default', + hyperPodReplicas: 1 + }; + + const output = renderTest(vars); + + if (deploymentTarget === 'managed-inference') { + assert.ok( + output.includes('sagemaker-runtime invoke-endpoint'), + 'managed-inference must have invoke-endpoint' + ); + assert.ok( + !output.includes('kubectl port-forward'), + 'managed-inference must NOT have kubectl port-forward' + ); + } else { + assert.ok( + output.includes('kubectl port-forward'), + 'hyperpod-eks must have kubectl port-forward' + ); + assert.ok( + !output.includes('sagemaker-runtime invoke-endpoint'), + 'hyperpod-eks must NOT have invoke-endpoint' + ); + } + } + ), { numRuns: 20 }); + + console.log(' โœ… Mutual exclusivity verified'); + }); +}); diff --git a/test/input-parsing-and-generation/docker-build-validation.test.js b/test/input-parsing-and-generation/docker-build-validation.test.js index ac178e7..5e2e816 100644 --- a/test/input-parsing-and-generation/docker-build-validation.test.js +++ b/test/input-parsing-and-generation/docker-build-validation.test.js @@ -142,7 +142,7 @@ describe('Docker Build Validation (Optional)', () => { modelServer: 'flask', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', projectName: 'test-sklearn-docker', @@ -189,7 +189,7 @@ describe('Docker Build Validation (Optional)', () => { modelServer: 'vllm', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.g5.xlarge', awsRegion: 'us-east-1', awsRoleArn: '', @@ -236,7 +236,7 @@ describe('Docker Build Validation (Optional)', () => { modelServer: 'flask', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', projectName: 'test-log-format-docker', @@ -293,7 +293,7 @@ describe('Docker Build Validation (Optional)', () => { modelServer: 'flask', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', projectName: 'test-failure-docker', diff --git a/test/input-parsing-and-generation/error-handling.test.js b/test/input-parsing-and-generation/error-handling.test.js index 0aa89cf..6a40fef 100644 --- a/test/input-parsing-and-generation/error-handling.test.js +++ b/test/input-parsing-and-generation/error-handling.test.js @@ -446,7 +446,8 @@ describe('Error Handling and Validation', () => { instanceType: 'ml.m5.large', projectName: 'test-project', destinationDir: '.', - deployTarget: 'codebuild' + buildTarget: 'codebuild', + deploymentTarget: 'managed-inference' }; const completeErrors = configManager.validateRequiredParameters(completeConfig); diff --git a/test/input-parsing-and-generation/hyperpod-directory-presence.property.test.js b/test/input-parsing-and-generation/hyperpod-directory-presence.property.test.js new file mode 100644 index 0000000..0ce82ee --- /dev/null +++ b/test/input-parsing-and-generation/hyperpod-directory-presence.property.test.js @@ -0,0 +1,256 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Property 9: Conditional HyperPod Directory Presence + * + * For any valid configuration, the `hyperpod/` directory must be present in + * the generated project if and only if `deploymentTarget` equals `hyperpod-eks`. + * When `deploymentTarget` equals `managed-inference`, the `hyperpod/` directory + * must be absent. + * + * This property validates the ignorePatterns logic in the writing() phase of + * index.js โ€” testing that the correct glob patterns are built based on + * deploymentTarget, not full file system operations. + * + * Validates: Requirements 7.1, 7.2 + * + * Feature: sagemaker-hyperpod-deployment + */ + +import fc from 'fast-check'; +import { describe, it, before } from 'mocha'; +import assert from 'assert'; + +/** + * Simulate the ignorePatterns logic from the writing() phase of index.js. + * This mirrors the exact conditional used in the generator. + * + * @param {object} answers - Generator answers containing deploymentTarget + * @returns {string[]} Array of glob ignore patterns + */ +function buildIgnorePatterns(answers) { + const ignorePatterns = []; + + // Exclude HyperPod K8s manifests when not deploying to HyperPod + if (answers.deploymentTarget !== 'hyperpod-eks') { + ignorePatterns.push('**/hyperpod/**'); + } + + return ignorePatterns; +} + +/** + * Check whether a file path would be excluded by the given ignore patterns. + * Uses simple glob matching for the `** /hyperpod/**` pattern. + * + * @param {string} filePath - Relative file path to check + * @param {string[]} ignorePatterns - Array of glob patterns + * @returns {boolean} True if the file would be excluded + */ +function isExcludedByPatterns(filePath, ignorePatterns) { + for (const pattern of ignorePatterns) { + if (pattern === '**/hyperpod/**') { + // Match any path containing a /hyperpod/ segment or starting with hyperpod/ + if (filePath.includes('/hyperpod/') || filePath.startsWith('hyperpod/')) { + return true; + } + } + } + return false; +} + +/** Arbitrary for base config shared by both deployment targets */ +const baseConfigArb = fc.record({ + projectName: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + deploymentConfig: fc.constantFrom('transformers-vllm', 'sklearn-flask', 'xgboost-fastapi'), + framework: fc.constantFrom('transformers', 'sklearn', 'xgboost', 'tensorflow'), + modelServer: fc.constantFrom('vllm', 'flask', 'fastapi', 'sglang'), + awsRegion: fc.constantFrom('us-east-1', 'us-west-2', 'eu-west-1'), + buildTarget: fc.constant('codebuild'), + modelName: fc.constantFrom('meta-llama/Llama-2-7b-hf', 'mistralai/Mistral-7B-v0.1'), + roleArn: fc.constantFrom('arn:aws:iam::123456789012:role/SageMakerRole', undefined), + inferenceAmiVersion: fc.constantFrom('1.0.0', undefined) +}); + +/** Arbitrary for HyperPod-specific config */ +const hyperPodConfigArb = fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference', 'production'), + hyperPodReplicas: fc.integer({ min: 1, max: 10 }), + fsxVolumeHandle: fc.option(fc.stringMatching(/^fs-[a-f0-9]{17}$/), { nil: undefined }) +}); + +/** Known hyperpod template file paths */ +const hyperpodFiles = [ + 'hyperpod/deployment.yaml', + 'hyperpod/service.yaml', + 'hyperpod/configmap.yaml', + 'hyperpod/pvc.yaml' +]; + +describe('Property 9: Conditional HyperPod Directory Presence', () => { + before(() => { + console.log('\n๐Ÿ“œ Starting Conditional HyperPod Directory Presence Property Tests'); + console.log('๐Ÿ“‹ Testing: Requirements 7.1, 7.2'); + console.log('๐Ÿ”ง Configuration: ignorePatterns logic with fast-check\n'); + }); + + it('should include hyperpod/ directory when deploymentTarget is hyperpod-eks (Req 7.1)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 7.1: hyperpod/ present when deploymentTarget === hyperpod-eks'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const answers = { + ...base, + deploymentTarget: 'hyperpod-eks', + ...hpVars + }; + + const ignorePatterns = buildIgnorePatterns(answers); + + // **/hyperpod/** must NOT be in ignorePatterns + assert.ok( + !ignorePatterns.includes('**/hyperpod/**'), + 'hyperpod/ must not be excluded when deploymentTarget is hyperpod-eks' + ); + + // All hyperpod template files must NOT be excluded + for (const file of hyperpodFiles) { + assert.ok( + !isExcludedByPatterns(file, ignorePatterns), + `${file} must not be excluded when deploymentTarget is hyperpod-eks` + ); + } + } + ), { numRuns: 50 }); + + console.log(' โœ… hyperpod/ directory included for hyperpod-eks'); + }); + + it('should exclude hyperpod/ directory when deploymentTarget is managed-inference (Req 7.2)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 7.2: hyperpod/ absent when deploymentTarget === managed-inference'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('ml.m5.xlarge', 'ml.g5.xlarge'), + (base, instanceType) => { + const answers = { + ...base, + deploymentTarget: 'managed-inference', + instanceType, + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }; + + const ignorePatterns = buildIgnorePatterns(answers); + + // **/hyperpod/** MUST be in ignorePatterns + assert.ok( + ignorePatterns.includes('**/hyperpod/**'), + 'hyperpod/ must be excluded when deploymentTarget is managed-inference' + ); + + // All hyperpod template files must be excluded + for (const file of hyperpodFiles) { + assert.ok( + isExcludedByPatterns(file, ignorePatterns), + `${file} must be excluded when deploymentTarget is managed-inference` + ); + } + } + ), { numRuns: 50 }); + + console.log(' โœ… hyperpod/ directory excluded for managed-inference'); + }); + + it('should have hyperpod/ presence be a biconditional on deploymentTarget (Req 7.1, 7.2)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 7.1 + 7.2: hyperpod/ present iff deploymentTarget === hyperpod-eks'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + hyperPodConfigArb, + (base, deploymentTarget, hpVars) => { + const answers = { + ...base, + deploymentTarget, + instanceType: deploymentTarget === 'managed-inference' ? 'ml.m5.xlarge' : undefined, + ...(deploymentTarget === 'hyperpod-eks' ? hpVars : { + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }) + }; + + const ignorePatterns = buildIgnorePatterns(answers); + const hyperpodExcluded = ignorePatterns.includes('**/hyperpod/**'); + + // Biconditional: hyperpod excluded iff NOT hyperpod-eks + assert.strictEqual( + hyperpodExcluded, + deploymentTarget !== 'hyperpod-eks', + `hyperpod/ exclusion (${hyperpodExcluded}) must match deploymentTarget !== hyperpod-eks (${deploymentTarget !== 'hyperpod-eks'})` + ); + + // Verify file-level exclusion matches pattern-level exclusion + for (const file of hyperpodFiles) { + const fileExcluded = isExcludedByPatterns(file, ignorePatterns); + assert.strictEqual( + fileExcluded, + deploymentTarget !== 'hyperpod-eks', + `${file} exclusion must match deploymentTarget for ${deploymentTarget}` + ); + } + } + ), { numRuns: 100 }); + + console.log(' โœ… Biconditional verified: hyperpod/ present iff hyperpod-eks'); + }); + + it('should not affect non-hyperpod files regardless of deploymentTarget', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Non-hyperpod files unaffected by deploymentTarget'); + + const nonHyperpodFiles = [ + 'Dockerfile', + 'do/config', + 'do/deploy', + 'do/clean', + 'do/logs', + 'do/test', + 'code/serve', + 'deploy/build_and_push.sh' + ]; + + fc.assert(fc.property( + fc.constantFrom('managed-inference', 'hyperpod-eks'), + (deploymentTarget) => { + const answers = { deploymentTarget }; + const ignorePatterns = buildIgnorePatterns(answers); + + // Non-hyperpod files must never be excluded by the hyperpod pattern + for (const file of nonHyperpodFiles) { + assert.ok( + !isExcludedByPatterns(file, ignorePatterns), + `${file} must not be excluded by hyperpod ignore pattern` + ); + } + } + ), { numRuns: 20 }); + + console.log(' โœ… Non-hyperpod files unaffected'); + }); +}); diff --git a/test/input-parsing-and-generation/k8s-manifest-port-consistency.property.test.js b/test/input-parsing-and-generation/k8s-manifest-port-consistency.property.test.js new file mode 100644 index 0000000..475503b --- /dev/null +++ b/test/input-parsing-and-generation/k8s-manifest-port-consistency.property.test.js @@ -0,0 +1,451 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Property 10: Kubernetes Manifest Port Consistency + * + * For any valid hyperpod-eks configuration, the generated deployment.yaml must + * specify containerPort: 8080 and the generated service.yaml must specify + * targetPort: 8080, maintaining SageMaker BYOC compatibility. + * + * Validates: Requirements 8.1, 8.2, 13.1, 13.2 + * + * Feature: sagemaker-hyperpod-deployment + */ + +import fc from 'fast-check'; +import { describe, it, before } from 'mocha'; +import assert from 'assert'; +import ejs from 'ejs'; +import { readFileSync } from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; +import yaml from 'js-yaml'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Load Kubernetes manifest templates +const templatesDir = path.join(__dirname, '../../generators/app/templates/hyperpod'); + +const deploymentTemplate = readFileSync(path.join(templatesDir, 'deployment.yaml'), 'utf8'); +const serviceTemplate = readFileSync(path.join(templatesDir, 'service.yaml'), 'utf8'); + +/** + * Render a template with the given variables. + */ +function renderTemplate(template, vars) { + return ejs.render(template, vars); +} + +/** Arbitrary for a base config for HyperPod EKS */ +const baseConfigArb = fc.record({ + projectName: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + deploymentConfig: fc.constantFrom('transformers-vllm', 'transformers-sglang'), + framework: fc.constantFrom('transformers'), + modelServer: fc.constantFrom('vllm', 'sglang'), + awsRegion: fc.constantFrom('us-east-1', 'us-west-2', 'eu-west-1'), + buildTarget: fc.constant('codebuild'), + deploymentTarget: fc.constant('hyperpod-eks'), + instanceType: fc.constantFrom('ml.g5.xlarge', 'ml.g5.2xlarge', 'ml.p4d.24xlarge'), + modelName: fc.constantFrom('meta-llama/Llama-2-7b-hf', 'mistralai/Mistral-7B-v0.1'), + hfToken: fc.option(fc.stringMatching(/^hf_[a-zA-Z0-9]{20,40}$/), { nil: undefined }), + ngcApiKey: fc.option(fc.stringMatching(/^[a-zA-Z0-9]{20,40}$/), { nil: undefined }), + modelFormat: fc.option(fc.constantFrom('safetensors', 'pytorch'), { nil: undefined }) +}); + +/** Arbitrary for HyperPod-specific config */ +const hyperPodConfigArb = fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference', 'production'), + hyperPodReplicas: fc.integer({ min: 1, max: 10 }), + fsxVolumeHandle: fc.option(fc.stringMatching(/^fs-[a-f0-9]{17}$/), { nil: undefined }) +}); + +describe('Property 10: Kubernetes Manifest Port Consistency', () => { + before(() => { + console.log('\n๐Ÿ“œ Starting Kubernetes Manifest Port Consistency Property Tests'); + console.log('๐Ÿ“‹ Testing: Requirements 8.1, 8.2, 13.1, 13.2'); + console.log('๐Ÿ”ง Configuration: EJS template rendering with fast-check\n'); + }); + + it('should generate deployment.yaml with containerPort 8080 (Req 8.1, 13.1)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 8.1, 13.1: deployment.yaml containerPort: 8080'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const vars = { + ...base, + ...hpVars + }; + + // Render deployment template + const output = renderTemplate(deploymentTemplate, vars); + + // Parse as YAML to validate structure + const deployment = yaml.load(output); + + // Verify it's a Deployment + assert.strictEqual( + deployment.kind, + 'Deployment', + 'Must be a Kubernetes Deployment' + ); + + // Verify apiVersion + assert.strictEqual( + deployment.apiVersion, + 'apps/v1', + 'Must use apps/v1 apiVersion' + ); + + // Get container spec + const containers = deployment.spec.template.spec.containers; + assert.ok( + containers && containers.length > 0, + 'Deployment must have at least one container' + ); + + const container = containers[0]; + + // Verify containerPort is 8080 + assert.ok( + container.ports && container.ports.length > 0, + 'Container must have ports defined' + ); + + const port = container.ports.find(p => p.containerPort === 8080); + assert.ok( + port !== undefined, + 'Container must have containerPort: 8080 for SageMaker BYOC compatibility' + ); + + assert.strictEqual( + port.containerPort, + 8080, + 'containerPort must be exactly 8080' + ); + } + ), { numRuns: 50 }); + + console.log(' โœ… deployment.yaml always has containerPort: 8080'); + }); + + it('should generate service.yaml with targetPort 8080 (Req 8.2, 13.2)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 8.2, 13.2: service.yaml targetPort: 8080'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const vars = { + ...base, + ...hpVars + }; + + // Render service template + const output = renderTemplate(serviceTemplate, vars); + + // Parse as YAML to validate structure + const service = yaml.load(output); + + // Verify it's a Service + assert.strictEqual( + service.kind, + 'Service', + 'Must be a Kubernetes Service' + ); + + // Verify apiVersion + assert.strictEqual( + service.apiVersion, + 'v1', + 'Must use v1 apiVersion' + ); + + // Verify ports + assert.ok( + service.spec.ports && service.spec.ports.length > 0, + 'Service must have ports defined' + ); + + const port = service.spec.ports.find(p => p.targetPort === 8080); + assert.ok( + port !== undefined, + 'Service must have targetPort: 8080 for SageMaker BYOC compatibility' + ); + + assert.strictEqual( + port.targetPort, + 8080, + 'targetPort must be exactly 8080' + ); + + // Also verify the service port is 8080 + assert.strictEqual( + port.port, + 8080, + 'Service port must be 8080' + ); + } + ), { numRuns: 50 }); + + console.log(' โœ… service.yaml always has targetPort: 8080'); + }); + + it('should have matching selectors between deployment and service', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Deployment and Service selectors must match'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const vars = { + ...base, + ...hpVars + }; + + // Render both templates + const deploymentOutput = renderTemplate(deploymentTemplate, vars); + const serviceOutput = renderTemplate(serviceTemplate, vars); + + // Parse as YAML + const deployment = yaml.load(deploymentOutput); + const service = yaml.load(serviceOutput); + + // Get deployment pod labels + const podLabels = deployment.spec.template.metadata.labels; + + // Get service selector + const serviceSelector = service.spec.selector; + + // Service selector must match deployment pod labels + assert.ok( + podLabels.app === serviceSelector.app, + `Service selector (${serviceSelector.app}) must match deployment pod label (${podLabels.app})` + ); + } + ), { numRuns: 50 }); + + console.log(' โœ… Deployment and Service selectors match'); + }); + + it('should use the configured namespace in both manifests', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Both manifests use configured hyperPodNamespace'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const vars = { + ...base, + ...hpVars + }; + + // Render both templates + const deploymentOutput = renderTemplate(deploymentTemplate, vars); + const serviceOutput = renderTemplate(serviceTemplate, vars); + + // Parse as YAML + const deployment = yaml.load(deploymentOutput); + const service = yaml.load(serviceOutput); + + // Verify namespace matches configured value + assert.strictEqual( + deployment.metadata.namespace, + hpVars.hyperPodNamespace, + 'Deployment namespace must match hyperPodNamespace' + ); + + assert.strictEqual( + service.metadata.namespace, + hpVars.hyperPodNamespace, + 'Service namespace must match hyperPodNamespace' + ); + } + ), { numRuns: 50 }); + + console.log(' โœ… Both manifests use configured namespace'); + }); + + it('should include GPU resource requests in deployment', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Deployment includes GPU resource requests'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const vars = { + ...base, + ...hpVars + }; + + // Render deployment template + const output = renderTemplate(deploymentTemplate, vars); + + // Parse as YAML + const deployment = yaml.load(output); + + // Get container resources + const container = deployment.spec.template.spec.containers[0]; + const resources = container.resources; + + assert.ok( + resources && resources.requests, + 'Container must have resource requests' + ); + + assert.ok( + resources.requests['nvidia.com/gpu'], + 'Container must request nvidia.com/gpu' + ); + + assert.ok( + resources.limits && resources.limits['nvidia.com/gpu'], + 'Container must have nvidia.com/gpu limits' + ); + } + ), { numRuns: 50 }); + + console.log(' โœ… Deployment includes GPU resource requests'); + }); + + it('should include GPU tolerations in deployment', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Deployment includes GPU tolerations'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const vars = { + ...base, + ...hpVars + }; + + // Render deployment template + const output = renderTemplate(deploymentTemplate, vars); + + // Parse as YAML + const deployment = yaml.load(output); + + // Get tolerations + const tolerations = deployment.spec.template.spec.tolerations; + + assert.ok( + tolerations && tolerations.length > 0, + 'Deployment must have tolerations for GPU nodes' + ); + + // Check for nvidia.com/gpu toleration + const gpuToleration = tolerations.find(t => t.key === 'nvidia.com/gpu'); + assert.ok( + gpuToleration, + 'Deployment must have nvidia.com/gpu toleration' + ); + } + ), { numRuns: 50 }); + + console.log(' โœ… Deployment includes GPU tolerations'); + }); + + it('should use configured replicas in deployment', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Deployment uses configured hyperPodReplicas'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const vars = { + ...base, + ...hpVars + }; + + // Render deployment template + const output = renderTemplate(deploymentTemplate, vars); + + // Parse as YAML + const deployment = yaml.load(output); + + // Verify replicas matches configured value + assert.strictEqual( + deployment.spec.replicas, + hpVars.hyperPodReplicas, + 'Deployment replicas must match hyperPodReplicas' + ); + } + ), { numRuns: 50 }); + + console.log(' โœ… Deployment uses configured replicas'); + }); + + it('should include health check probes targeting port 8080', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Health check probes target port 8080'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const vars = { + ...base, + ...hpVars + }; + + // Render deployment template + const output = renderTemplate(deploymentTemplate, vars); + + // Parse as YAML + const deployment = yaml.load(output); + + // Get container + const container = deployment.spec.template.spec.containers[0]; + + // Check readiness probe + assert.ok( + container.readinessProbe, + 'Container must have readinessProbe' + ); + assert.strictEqual( + container.readinessProbe.httpGet.port, + 8080, + 'readinessProbe must target port 8080' + ); + assert.strictEqual( + container.readinessProbe.httpGet.path, + '/ping', + 'readinessProbe must target /ping endpoint' + ); + + // Check liveness probe + assert.ok( + container.livenessProbe, + 'Container must have livenessProbe' + ); + assert.strictEqual( + container.livenessProbe.httpGet.port, + 8080, + 'livenessProbe must target port 8080' + ); + } + ), { numRuns: 50 }); + + console.log(' โœ… Health check probes target port 8080'); + }); +}); diff --git a/test/input-parsing-and-generation/prompt-runner-ordering.property.test.js b/test/input-parsing-and-generation/prompt-runner-ordering.property.test.js new file mode 100644 index 0000000..a8efe74 --- /dev/null +++ b/test/input-parsing-and-generation/prompt-runner-ordering.property.test.js @@ -0,0 +1,415 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Property-Based Tests for Infrastructure-First Prompt Ordering + * + * Property 3: Infrastructure-First Prompt Ordering + * + * For any valid generator run, the prompt phases must execute in strict order: + * infrastructure prompts (Phase 1) before ML configuration prompts (Phase 2) + * before module selection prompts (Phase 3) before project configuration prompts (Phase 4). + * No ML configuration prompt may be presented before all infrastructure prompts have been collected. + * + * Validates Requirements: 3.1, 3.2, 3.3, 3.4 + */ + +import fc from 'fast-check'; +import assert from 'assert'; +import fs from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; +import { setupTestHooks } from './test-utils.js'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +describe('Property 3: Infrastructure-First Prompt Ordering', () => { + setupTestHooks('Infrastructure-First Prompt Ordering'); + + // Read the prompt-runner.js source code for structural analysis + const promptRunnerPath = path.join(__dirname, '../../generators/app/lib/prompt-runner.js'); + const promptRunnerSource = fs.readFileSync(promptRunnerPath, 'utf8'); + + /** + * Helper to find the position of a pattern in the source code + * Returns -1 if not found + */ + function findPosition(pattern) { + const match = promptRunnerSource.match(pattern); + return match ? promptRunnerSource.indexOf(match[0]) : -1; + } + + /** + * Helper to find all positions of a pattern in the source code + */ + function findAllPositions(pattern) { + const positions = []; + let match; + const regex = new RegExp(pattern, 'g'); + while ((match = regex.exec(promptRunnerSource)) !== null) { + positions.push(match.index); + } + return positions; + } + + describe('Phase Ordering in run() Method', () => { + /** + * Property 3a: Infrastructure prompts must run in Phase 1 + * + * Validates: Requirement 3.1 + */ + it('should run infrastructure prompts in Phase 1 before ML configuration', function() { + this.timeout(10000); + + // Find the phase console.log statements to determine ordering + const phase1InfraPattern = /console\.log\([^)]*Infrastructure/; + const phase2MLPattern = /console\.log\([^)]*ML Configuration|console\.log\([^)]*Core ML Configuration/; + + const infraPhasePos = findPosition(phase1InfraPattern); + const mlPhasePos = findPosition(phase2MLPattern); + + assert.ok( + infraPhasePos !== -1, + 'Infrastructure phase console.log must exist in prompt-runner.js' + ); + assert.ok( + mlPhasePos !== -1, + 'ML Configuration phase console.log must exist in prompt-runner.js' + ); + assert.ok( + infraPhasePos < mlPhasePos, + `Infrastructure phase (pos ${infraPhasePos}) must come before ML Configuration phase (pos ${mlPhasePos})` + ); + }); + + /** + * Property 3b: ML configuration prompts must run in Phase 2 + * + * Validates: Requirement 3.2 + */ + it('should run ML configuration prompts in Phase 2 after infrastructure', function() { + this.timeout(10000); + + // Infrastructure is now split into sub-phases; verify the last infra sub-phase + // (infraBuildPrompts) runs before deploymentConfigPrompts + const infraBuildRunPhasePattern = /_runPhase\(infraBuildPrompts/; + const deploymentConfigRunPhasePattern = /_runPhase\(deploymentConfigPrompts/; + + const infraBuildRunPhasePos = findPosition(infraBuildRunPhasePattern); + const deploymentConfigRunPhasePos = findPosition(deploymentConfigRunPhasePattern); + + assert.ok( + infraBuildRunPhasePos !== -1, + '_runPhase(infraBuildPrompts) must exist in prompt-runner.js' + ); + assert.ok( + deploymentConfigRunPhasePos !== -1, + '_runPhase(deploymentConfigPrompts) must exist in prompt-runner.js' + ); + assert.ok( + infraBuildRunPhasePos < deploymentConfigRunPhasePos, + `infraBuildPrompts (pos ${infraBuildRunPhasePos}) must be run before deploymentConfigPrompts (pos ${deploymentConfigRunPhasePos})` + ); + }); + + /** + * Property 3c: Module selection prompts must run in Phase 3 + * + * Validates: Requirement 3.3 + */ + it('should run module selection prompts in Phase 3 after ML configuration', function() { + this.timeout(10000); + + const phase2MLPattern = /console\.log\([^)]*ML Configuration|console\.log\([^)]*Core ML Configuration/; + const phase3ModulePattern = /console\.log\([^)]*Module Selection/; + + const mlPhasePos = findPosition(phase2MLPattern); + const modulePhasePos = findPosition(phase3ModulePattern); + + assert.ok( + mlPhasePos !== -1, + 'ML Configuration phase console.log must exist' + ); + assert.ok( + modulePhasePos !== -1, + 'Module Selection phase console.log must exist' + ); + assert.ok( + mlPhasePos < modulePhasePos, + `ML Configuration phase (pos ${mlPhasePos}) must come before Module Selection phase (pos ${modulePhasePos})` + ); + }); + + /** + * Property 3d: Project configuration prompts must run in Phase 4 + * + * Validates: Requirement 3.4 + */ + it('should run project configuration prompts in Phase 4 after module selection', function() { + this.timeout(10000); + + const phase3ModulePattern = /console\.log\([^)]*Module Selection/; + const phase4ProjectPattern = /console\.log\([^)]*Project Configuration/; + + const modulePhasePos = findPosition(phase3ModulePattern); + const projectPhasePos = findPosition(phase4ProjectPattern); + + assert.ok( + modulePhasePos !== -1, + 'Module Selection phase console.log must exist' + ); + assert.ok( + projectPhasePos !== -1, + 'Project Configuration phase console.log must exist' + ); + assert.ok( + modulePhasePos < projectPhasePos, + `Module Selection phase (pos ${modulePhasePos}) must come before Project Configuration phase (pos ${projectPhasePos})` + ); + }); + + /** + * Property 3e: Complete phase ordering validation + * + * Validates: Requirements 3.1, 3.2, 3.3, 3.4 + */ + it('should maintain strict phase ordering: Infrastructure โ†’ ML Config โ†’ Module โ†’ Project', function() { + this.timeout(10000); + + // Find all phase markers + const phases = [ + { name: 'Infrastructure', pattern: /console\.log\([^)]*Infrastructure/ }, + { name: 'ML Configuration', pattern: /console\.log\([^)]*ML Configuration|console\.log\([^)]*Core ML Configuration/ }, + { name: 'Module Selection', pattern: /console\.log\([^)]*Module Selection/ }, + { name: 'Project Configuration', pattern: /console\.log\([^)]*Project Configuration/ } + ]; + + const positions = phases.map(phase => ({ + name: phase.name, + position: findPosition(phase.pattern) + })); + + // All phases must exist + positions.forEach(p => { + assert.ok( + p.position !== -1, + `Phase "${p.name}" must exist in prompt-runner.js` + ); + }); + + // Phases must be in strict ascending order + for (let i = 0; i < positions.length - 1; i++) { + assert.ok( + positions[i].position < positions[i + 1].position, + `Phase "${positions[i].name}" (pos ${positions[i].position}) must come before "${positions[i + 1].name}" (pos ${positions[i + 1].position})` + ); + } + }); + }); + + describe('Infrastructure Prompts Content', () => { + /** + * Property 3f: Infrastructure phase must include buildTarget, deploymentTarget, + * instanceType/HyperPod prompts, region, and role + * + * Validates: Requirement 3.1 + */ + it('should include all required infrastructure prompts in Phase 1', async function() { + this.timeout(10000); + + // Dynamically import the prompts module to check actual prompt definitions + const { infrastructurePrompts } = await import('../../generators/app/lib/prompts.js'); + + const requiredPromptNames = [ + 'buildTarget', + 'deploymentTarget', + 'instanceType', + 'hyperPodCluster', + 'awsRegion', + 'awsRoleArn' + ]; + + fc.assert(fc.property( + fc.constantFrom(...requiredPromptNames), + (promptName) => { + const found = infrastructurePrompts.some(p => p.name === promptName); + assert.ok( + found, + `Prompt "${promptName}" must be defined in infrastructurePrompts` + ); + return true; + } + ), { numRuns: requiredPromptNames.length }); + }); + }); + + describe('ML Configuration Prompts Content', () => { + /** + * Property 3g: ML configuration phase must include deploymentConfig, + * frameworkVersion, frameworkProfile, modelFormat, modelProfile, hfToken, ngcApiKey + * + * Validates: Requirement 3.2 + */ + it('should run ML configuration prompts after infrastructure prompts', function() { + this.timeout(10000); + + // Infrastructure is now split into sub-phases; use the first sub-phase + // (infraRegionAndTargetPrompts) as the anchor for "infrastructure starts here" + const infraRunPhasePos = findPosition(/_runPhase\(infraRegionAndTargetPrompts/); + const deploymentConfigRunPhasePos = findPosition(/_runPhase\(deploymentConfigPrompts/); + const frameworkVersionRunPhasePos = findPosition(/_runPhase\(\s*frameworkVersionPrompts/); + const frameworkProfileRunPhasePos = findPosition(/_runPhase\(\s*frameworkProfilePrompts/); + const modelFormatRunPhasePos = findPosition(/_runPhase\(\s*modelFormatPrompts/); + const hfTokenRunPhasePos = findPosition(/_runPhase\(hfTokenPrompts/); + const ngcApiKeyRunPhasePos = findPosition(/_runPhase\(ngcApiKeyPrompts/); + + // All ML config prompts must come after infrastructure + const mlConfigPrompts = [ + { name: 'deploymentConfigPrompts', pos: deploymentConfigRunPhasePos }, + { name: 'frameworkVersionPrompts', pos: frameworkVersionRunPhasePos }, + { name: 'frameworkProfilePrompts', pos: frameworkProfileRunPhasePos }, + { name: 'modelFormatPrompts', pos: modelFormatRunPhasePos }, + { name: 'hfTokenPrompts', pos: hfTokenRunPhasePos }, + { name: 'ngcApiKeyPrompts', pos: ngcApiKeyRunPhasePos } + ]; + + mlConfigPrompts.forEach(prompt => { + if (prompt.pos !== -1) { + assert.ok( + infraRunPhasePos < prompt.pos, + `${prompt.name} (pos ${prompt.pos}) must run after infraRegionAndTargetPrompts (pos ${infraRunPhasePos})` + ); + } + }); + }); + }); + + describe('Module Selection Prompts Content', () => { + /** + * Property 3h: Module selection phase must run after ML configuration + * + * Validates: Requirement 3.3 + */ + it('should run modulePrompts after all ML configuration prompts', function() { + this.timeout(10000); + + const moduleRunPhasePos = findPosition(/_runPhase\(modulePrompts/); + const ngcApiKeyRunPhasePos = findPosition(/_runPhase\(ngcApiKeyPrompts/); + + assert.ok( + moduleRunPhasePos !== -1, + '_runPhase(modulePrompts) must exist' + ); + assert.ok( + ngcApiKeyRunPhasePos !== -1, + '_runPhase(ngcApiKeyPrompts) must exist' + ); + assert.ok( + ngcApiKeyRunPhasePos < moduleRunPhasePos, + `ngcApiKeyPrompts (pos ${ngcApiKeyRunPhasePos}) must run before modulePrompts (pos ${moduleRunPhasePos})` + ); + }); + }); + + describe('Project Configuration Prompts Content', () => { + /** + * Property 3i: Project configuration phase must run last + * + * Validates: Requirement 3.4 + */ + it('should run projectPrompts and destinationPrompts after module selection', function() { + this.timeout(10000); + + const moduleRunPhasePos = findPosition(/_runPhase\(modulePrompts/); + const projectRunPhasePos = findPosition(/_runPhase\(projectPrompts/); + const destinationRunPhasePos = findPosition(/_runPhase\(destinationPrompts/); + + assert.ok( + moduleRunPhasePos !== -1, + '_runPhase(modulePrompts) must exist' + ); + assert.ok( + projectRunPhasePos !== -1, + '_runPhase(projectPrompts) must exist' + ); + assert.ok( + destinationRunPhasePos !== -1, + '_runPhase(destinationPrompts) must exist' + ); + assert.ok( + moduleRunPhasePos < projectRunPhasePos, + `modulePrompts (pos ${moduleRunPhasePos}) must run before projectPrompts (pos ${projectRunPhasePos})` + ); + assert.ok( + projectRunPhasePos < destinationRunPhasePos, + `projectPrompts (pos ${projectRunPhasePos}) must run before destinationPrompts (pos ${destinationRunPhasePos})` + ); + }); + }); + + describe('HyperPod MCP Query Integration', () => { + /** + * Property 3j: HyperPod MCP query must be wired for hyperpod-eks deployment target + * + * Validates: Requirements 12.1, 12.2, 12.3 + */ + it('should have _queryMcpForHyperPod method for HyperPod cluster discovery', function() { + this.timeout(10000); + + // Check that _queryMcpForHyperPod method exists + const methodPattern = /_queryMcpForHyperPod\s*\(/; + const methodPos = findPosition(methodPattern); + + assert.ok( + methodPos !== -1, + '_queryMcpForHyperPod method must exist in prompt-runner.js' + ); + + // Check that it's called when deploymentTarget is hyperpod-eks + const callPattern = /if\s*\([^)]*deploymentTarget\s*===\s*['"]hyperpod-eks['"]/; + const callPos = findPosition(callPattern); + + assert.ok( + callPos !== -1, + 'Conditional check for deploymentTarget === hyperpod-eks must exist' + ); + + // Check that _queryMcpForHyperPod is called within that conditional + const queryCallPattern = /_queryMcpForHyperPod\s*\(/g; + const queryCallPositions = findAllPositions(queryCallPattern.source); + + assert.ok( + queryCallPositions.length > 0, + '_queryMcpForHyperPod must be called at least once' + ); + }); + + /** + * Property 3k: HyperPod MCP query should query hyperpod-cluster-picker server + * + * Validates: Requirements 12.1, 12.2 + */ + it('should query hyperpod-cluster-picker MCP server', function() { + this.timeout(10000); + + // Check that the method references hyperpod-cluster-picker + const serverNamePattern = /hyperpod-cluster-picker/; + const serverNamePos = findPosition(serverNamePattern); + + assert.ok( + serverNamePos !== -1, + 'hyperpod-cluster-picker server name must be referenced in prompt-runner.js' + ); + + // Check that queryMcpServer is called with hyperpod-cluster-picker + const queryMcpPattern = /queryMcpServer\s*\(\s*['"]hyperpod-cluster-picker['"]/; + const queryMcpPos = findPosition(queryMcpPattern); + + assert.ok( + queryMcpPos !== -1, + 'queryMcpServer must be called with hyperpod-cluster-picker' + ); + }); + }); +}); diff --git a/test/input-parsing-and-generation/prompts-deployment-target.property.test.js b/test/input-parsing-and-generation/prompts-deployment-target.property.test.js new file mode 100644 index 0000000..6aac99f --- /dev/null +++ b/test/input-parsing-and-generation/prompts-deployment-target.property.test.js @@ -0,0 +1,517 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Property-Based Tests for Deployment Target Prompt System + * + * Tests universal correctness properties for the build/deployment target + * separation and conditional prompt visibility. + * + * Property 1: Build/Deploy Separation + * Property 2: Deployment-Target-Conditional Prompt Visibility + * + * Validates Requirements: 1.3, 1.4, 2.1, 2.2, 2.3, 2.4 + */ + +import fc from 'fast-check'; +import assert from 'assert'; +import { setupTestHooks } from './test-utils.js'; +import { infrastructurePrompts } from '../../generators/app/lib/prompts.js'; + +describe('Deployment Target Prompt Properties', () => { + setupTestHooks('Deployment Target Prompt Properties'); + + /** + * Helper to find a prompt by name in the infrastructurePrompts array + */ + function findPrompt(name) { + return infrastructurePrompts.find(p => p.name === name); + } + + /** + * Helper to evaluate a prompt's `when` function with given answers + */ + function evaluateWhen(prompt, answers) { + if (!prompt) return false; + if (typeof prompt.when === 'function') { + return prompt.when(answers); + } + // If no `when` function, prompt is always shown + return prompt.when !== false; + } + + describe('Property 2: Deployment-Target-Conditional Prompt Visibility', () => { + /** + * Property 2a: When deploymentTarget === 'managed-inference': + * - instanceType prompt's `when` function must return true + * - All HyperPod-specific prompts' `when` functions must return false + * + * Validates: Requirements 2.1, 2.2 + */ + it('should show instanceType and hide HyperPod prompts when deploymentTarget is managed-inference', function() { + this.timeout(10000); + + fc.assert(fc.property( + fc.record({ + // Generate various valid answer states that might exist before these prompts + buildTarget: fc.constantFrom('codebuild'), + deploymentConfig: fc.constantFrom( + 'transformers-vllm', 'transformers-sglang', 'transformers-tensorrt-llm', + 'sklearn-flask', 'sklearn-fastapi', 'xgboost-flask', 'xgboost-fastapi', + 'tensorflow-flask', 'tensorflow-fastapi' + ), + // Additional properties that might be set + _mcpInstanceChoices: fc.option(fc.array(fc.constantFrom( + 'ml.m5.xlarge', 'ml.g5.xlarge', 'ml.g5.2xlarge' + ), { minLength: 0, maxLength: 5 })) + }), + (baseAnswers) => { + // Set deploymentTarget to managed-inference + const answers = { + ...baseAnswers, + deploymentTarget: 'managed-inference' + }; + + // Get the prompts + const instanceTypePrompt = findPrompt('instanceType'); + const hyperPodClusterPrompt = findPrompt('hyperPodCluster'); + const hyperPodNamespacePrompt = findPrompt('hyperPodNamespace'); + const hyperPodReplicasPrompt = findPrompt('hyperPodReplicas'); + const fsxVolumeHandlePrompt = findPrompt('fsxVolumeHandle'); + + // instanceType should be shown for managed-inference + // Note: instanceType prompt doesn't have a `when` guard in current implementation + // but if it does, it should return true for managed-inference + if (instanceTypePrompt && instanceTypePrompt.when) { + const instanceTypeVisible = evaluateWhen(instanceTypePrompt, answers); + assert.strictEqual( + instanceTypeVisible, + true, + 'instanceType prompt should be visible when deploymentTarget is managed-inference' + ); + } + + // All HyperPod prompts should be hidden for managed-inference + if (hyperPodClusterPrompt) { + const clusterVisible = evaluateWhen(hyperPodClusterPrompt, answers); + assert.strictEqual( + clusterVisible, + false, + 'hyperPodCluster prompt should be hidden when deploymentTarget is managed-inference' + ); + } + + if (hyperPodNamespacePrompt) { + const namespaceVisible = evaluateWhen(hyperPodNamespacePrompt, answers); + assert.strictEqual( + namespaceVisible, + false, + 'hyperPodNamespace prompt should be hidden when deploymentTarget is managed-inference' + ); + } + + if (hyperPodReplicasPrompt) { + const replicasVisible = evaluateWhen(hyperPodReplicasPrompt, answers); + assert.strictEqual( + replicasVisible, + false, + 'hyperPodReplicas prompt should be hidden when deploymentTarget is managed-inference' + ); + } + + if (fsxVolumeHandlePrompt) { + const fsxVisible = evaluateWhen(fsxVolumeHandlePrompt, answers); + assert.strictEqual( + fsxVisible, + false, + 'fsxVolumeHandle prompt should be hidden when deploymentTarget is managed-inference' + ); + } + + return true; + } + ), { numRuns: 5 }); + }); + + /** + * Property 2b: When deploymentTarget === 'hyperpod-eks': + * - HyperPod cluster, namespace, replicas, and FSx prompts' `when` functions must return true + * - instanceType prompt's `when` function must also return true (used for nodeSelector) + * + * Validates: Requirements 2.3, 2.4 + */ + it('should show HyperPod prompts and instanceType when deploymentTarget is hyperpod-eks', function() { + this.timeout(10000); + + fc.assert(fc.property( + fc.record({ + buildTarget: fc.constantFrom('codebuild'), + deploymentConfig: fc.constantFrom( + 'transformers-vllm', 'transformers-sglang', 'transformers-tensorrt-llm', + 'sklearn-flask', 'sklearn-fastapi', 'xgboost-flask', 'xgboost-fastapi', + 'tensorflow-flask', 'tensorflow-fastapi' + ), + _mcpHyperPodChoices: fc.option(fc.array(fc.constantFrom( + 'my-hyperpod-cluster', 'prod-cluster', 'dev-cluster' + ), { minLength: 0, maxLength: 3 })) + }), + (baseAnswers) => { + // Set deploymentTarget to hyperpod-eks + const answers = { + ...baseAnswers, + deploymentTarget: 'hyperpod-eks' + }; + + // Get the prompts + const instanceTypePrompt = findPrompt('instanceType'); + const hyperPodClusterPrompt = findPrompt('hyperPodCluster'); + const hyperPodNamespacePrompt = findPrompt('hyperPodNamespace'); + const hyperPodReplicasPrompt = findPrompt('hyperPodReplicas'); + const fsxVolumeHandlePrompt = findPrompt('fsxVolumeHandle'); + + // instanceType should also be visible for hyperpod-eks (used for nodeSelector in deployment.yaml) + if (instanceTypePrompt && instanceTypePrompt.when) { + const instanceTypeVisible = evaluateWhen(instanceTypePrompt, answers); + assert.strictEqual( + instanceTypeVisible, + true, + 'instanceType prompt should be visible when deploymentTarget is hyperpod-eks' + ); + } + + // All HyperPod prompts should be visible for hyperpod-eks + if (hyperPodClusterPrompt) { + const clusterVisible = evaluateWhen(hyperPodClusterPrompt, answers); + assert.strictEqual( + clusterVisible, + true, + 'hyperPodCluster prompt should be visible when deploymentTarget is hyperpod-eks' + ); + } + + if (hyperPodNamespacePrompt) { + const namespaceVisible = evaluateWhen(hyperPodNamespacePrompt, answers); + assert.strictEqual( + namespaceVisible, + true, + 'hyperPodNamespace prompt should be visible when deploymentTarget is hyperpod-eks' + ); + } + + if (hyperPodReplicasPrompt) { + const replicasVisible = evaluateWhen(hyperPodReplicasPrompt, answers); + assert.strictEqual( + replicasVisible, + true, + 'hyperPodReplicas prompt should be visible when deploymentTarget is hyperpod-eks' + ); + } + + if (fsxVolumeHandlePrompt) { + const fsxVisible = evaluateWhen(fsxVolumeHandlePrompt, answers); + assert.strictEqual( + fsxVisible, + true, + 'fsxVolumeHandle prompt should be visible when deploymentTarget is hyperpod-eks' + ); + } + + return true; + } + ), { numRuns: 5 }); + }); + + /** + * Property 2c: instanceType is shown for both deployment targets, + * HyperPod prompts are only shown for hyperpod-eks + * + * Validates: Requirements 2.1, 2.2, 2.3, 2.4 + */ + it('should ensure instanceType and HyperPod prompts are mutually exclusive', function() { + this.timeout(10000); + + fc.assert(fc.property( + fc.record({ + buildTarget: fc.constantFrom('codebuild'), + deploymentTarget: fc.constantFrom('managed-inference', 'hyperpod-eks'), + deploymentConfig: fc.constantFrom( + 'transformers-vllm', 'sklearn-flask', 'xgboost-fastapi' + ) + }), + (answers) => { + const instanceTypePrompt = findPrompt('instanceType'); + const hyperPodClusterPrompt = findPrompt('hyperPodCluster'); + + // Evaluate visibility + const instanceTypeVisible = instanceTypePrompt && instanceTypePrompt.when + ? evaluateWhen(instanceTypePrompt, answers) + : true; // Default visible if no when guard + + const hyperPodVisible = hyperPodClusterPrompt + ? evaluateWhen(hyperPodClusterPrompt, answers) + : false; + + // instanceType should be visible for both deployment targets + if (instanceTypePrompt && instanceTypePrompt.when) { + assert.strictEqual( + instanceTypeVisible, + true, + 'instanceType should be visible for both managed-inference and hyperpod-eks' + ); + } + + // HyperPod prompts should only be visible for hyperpod-eks + if (answers.deploymentTarget === 'managed-inference') { + assert.strictEqual( + hyperPodVisible, + false, + 'hyperPodCluster should be hidden for managed-inference' + ); + } else { + assert.strictEqual( + hyperPodVisible, + true, + 'hyperPodCluster should be visible for hyperpod-eks' + ); + } + + return true; + } + ), { numRuns: 5 }); + }); + }); + + describe('Property 1: Build/Deploy Separation', () => { + /** + * Property 1a: buildTarget and deploymentTarget are independent prompts + * + * Validates: Requirements 1.3, 1.4 + */ + it('should have separate buildTarget and deploymentTarget prompts', function() { + this.timeout(10000); + + const buildTargetPrompt = findPrompt('buildTarget'); + const deploymentTargetPrompt = findPrompt('deploymentTarget'); + + // Both prompts must exist + assert.ok(buildTargetPrompt, 'buildTarget prompt must exist'); + assert.ok(deploymentTargetPrompt, 'deploymentTarget prompt must exist'); + + // They must be different prompts + assert.notStrictEqual( + buildTargetPrompt, + deploymentTargetPrompt, + 'buildTarget and deploymentTarget must be separate prompts' + ); + + // buildTarget should have 'codebuild' as an option + const buildTargetChoices = typeof buildTargetPrompt.choices === 'function' + ? buildTargetPrompt.choices({}) + : buildTargetPrompt.choices; + + const hasCodebuild = buildTargetChoices.some( + choice => (typeof choice === 'object' ? choice.value : choice) === 'codebuild' + ); + assert.ok(hasCodebuild, 'buildTarget must have codebuild as an option'); + + // deploymentTarget should have both managed-inference and hyperpod-eks + const deploymentTargetChoices = typeof deploymentTargetPrompt.choices === 'function' + ? deploymentTargetPrompt.choices({}) + : deploymentTargetPrompt.choices; + + const hasManagedInference = deploymentTargetChoices.some( + choice => (typeof choice === 'object' ? choice.value : choice) === 'managed-inference' + ); + const hasHyperPodEks = deploymentTargetChoices.some( + choice => (typeof choice === 'object' ? choice.value : choice) === 'hyperpod-eks' + ); + + assert.ok(hasManagedInference, 'deploymentTarget must have managed-inference as an option'); + assert.ok(hasHyperPodEks, 'deploymentTarget must have hyperpod-eks as an option'); + }); + + /** + * Property 1b: Changing buildTarget should not affect deployment-related prompt visibility + * + * Validates: Requirements 1.3, 1.4 + */ + it('should not change deployment prompt visibility when buildTarget changes', function() { + this.timeout(10000); + + fc.assert(fc.property( + fc.record({ + deploymentTarget: fc.constantFrom('managed-inference', 'hyperpod-eks'), + deploymentConfig: fc.constantFrom('transformers-vllm', 'sklearn-flask') + }), + (baseAnswers) => { + // Test with buildTarget = 'codebuild' + const answersWithCodebuild = { + ...baseAnswers, + buildTarget: 'codebuild' + }; + + // Get deployment-related prompts + const instanceTypePrompt = findPrompt('instanceType'); + const hyperPodClusterPrompt = findPrompt('hyperPodCluster'); + + // Evaluate visibility with codebuild + const instanceTypeVisibleCodebuild = instanceTypePrompt && instanceTypePrompt.when + ? evaluateWhen(instanceTypePrompt, answersWithCodebuild) + : true; + + const hyperPodVisibleCodebuild = hyperPodClusterPrompt + ? evaluateWhen(hyperPodClusterPrompt, answersWithCodebuild) + : false; + + // The visibility should depend only on deploymentTarget, not buildTarget + // Since we only have 'codebuild' as a build target currently, + // we verify that the visibility is consistent with deploymentTarget + if (baseAnswers.deploymentTarget === 'managed-inference') { + if (instanceTypePrompt && instanceTypePrompt.when) { + assert.strictEqual( + instanceTypeVisibleCodebuild, + true, + 'instanceType visibility should depend on deploymentTarget, not buildTarget' + ); + } + assert.strictEqual( + hyperPodVisibleCodebuild, + false, + 'hyperPodCluster visibility should depend on deploymentTarget, not buildTarget' + ); + } else { + // hyperpod-eks: both instanceType and HyperPod prompts should be visible + if (instanceTypePrompt && instanceTypePrompt.when) { + assert.strictEqual( + instanceTypeVisibleCodebuild, + true, + 'instanceType visibility should depend on deploymentTarget, not buildTarget' + ); + } + assert.strictEqual( + hyperPodVisibleCodebuild, + true, + 'hyperPodCluster visibility should depend on deploymentTarget, not buildTarget' + ); + } + + return true; + } + ), { numRuns: 5 }); + }); + + /** + * Property 1c: codebuildComputeType should depend only on buildTarget + * + * Validates: Requirements 1.3, 1.4 + */ + it('should show codebuildComputeType only when buildTarget is codebuild', function() { + this.timeout(10000); + + fc.assert(fc.property( + fc.record({ + deploymentTarget: fc.constantFrom('managed-inference', 'hyperpod-eks'), + deploymentConfig: fc.constantFrom('transformers-vllm', 'sklearn-flask') + }), + (baseAnswers) => { + const codebuildComputeTypePrompt = findPrompt('codebuildComputeType'); + + if (!codebuildComputeTypePrompt) { + // If prompt doesn't exist, skip this test + return true; + } + + // With buildTarget = 'codebuild', should be visible + const answersWithCodebuild = { + ...baseAnswers, + buildTarget: 'codebuild' + }; + + const visibleWithCodebuild = evaluateWhen(codebuildComputeTypePrompt, answersWithCodebuild); + assert.strictEqual( + visibleWithCodebuild, + true, + 'codebuildComputeType should be visible when buildTarget is codebuild' + ); + + // Visibility should not change based on deploymentTarget + // (it should only depend on buildTarget) + const answersWithDifferentDeployment = { + ...answersWithCodebuild, + deploymentTarget: baseAnswers.deploymentTarget === 'managed-inference' + ? 'hyperpod-eks' + : 'managed-inference' + }; + + const visibleWithDifferentDeployment = evaluateWhen( + codebuildComputeTypePrompt, + answersWithDifferentDeployment + ); + + assert.strictEqual( + visibleWithCodebuild, + visibleWithDifferentDeployment, + 'codebuildComputeType visibility should not depend on deploymentTarget' + ); + + return true; + } + ), { numRuns: 5 }); + }); + }); + + describe('Prompt Default Values', () => { + /** + * Verify HyperPod prompts have correct default values + * + * Validates: Requirements 2.5, 2.6 + */ + it('should have correct default values for HyperPod prompts', () => { + const hyperPodNamespacePrompt = findPrompt('hyperPodNamespace'); + const hyperPodReplicasPrompt = findPrompt('hyperPodReplicas'); + + if (hyperPodNamespacePrompt) { + const defaultNamespace = typeof hyperPodNamespacePrompt.default === 'function' + ? hyperPodNamespacePrompt.default({}) + : hyperPodNamespacePrompt.default; + + assert.strictEqual( + defaultNamespace, + 'default', + 'hyperPodNamespace should default to "default"' + ); + } + + if (hyperPodReplicasPrompt) { + const defaultReplicas = typeof hyperPodReplicasPrompt.default === 'function' + ? hyperPodReplicasPrompt.default({}) + : hyperPodReplicasPrompt.default; + + assert.strictEqual( + defaultReplicas, + 1, + 'hyperPodReplicas should default to 1' + ); + } + }); + + /** + * Verify deploymentTarget defaults to managed-inference + */ + it('should default deploymentTarget to managed-inference', () => { + const deploymentTargetPrompt = findPrompt('deploymentTarget'); + + assert.ok(deploymentTargetPrompt, 'deploymentTarget prompt must exist'); + + const defaultValue = typeof deploymentTargetPrompt.default === 'function' + ? deploymentTargetPrompt.default({}) + : deploymentTargetPrompt.default; + + assert.strictEqual( + defaultValue, + 'managed-inference', + 'deploymentTarget should default to managed-inference' + ); + }); + }); +}); diff --git a/test/input-parsing-and-generation/property-test-utils.js b/test/input-parsing-and-generation/property-test-utils.js index ad1f6d7..688dd0e 100644 --- a/test/input-parsing-and-generation/property-test-utils.js +++ b/test/input-parsing-and-generation/property-test-utils.js @@ -45,9 +45,9 @@ export const PARAMETER_MATRIX = { values: ['pkl', 'joblib', 'json', 'model', 'ubj', 'keras', 'h5', 'SavedModel'] }, - deployTarget: { - cliOption: 'deploy-target', - envVar: 'ML_DEPLOY_TARGET', + buildTarget: { + cliOption: 'build-target', + envVar: 'ML_BUILD_TARGET', configFile: true, packageJson: false, promptable: true, @@ -170,7 +170,7 @@ export const PARAMETER_MATRIX = { // Environment variable mappings export const ENV_VAR_MAPPING = { 'ML_INSTANCE_TYPE': 'instanceType', - 'ML_DEPLOY_TARGET': 'deployTarget', + 'ML_BUILD_TARGET': 'buildTarget', 'ML_CODEBUILD_COMPUTE_TYPE': 'codebuildComputeType', 'AWS_REGION': 'awsRegion', 'AWS_ROLE': 'awsRoleArn', @@ -221,7 +221,7 @@ export const generateConfiguration = () => fc.record({ framework: fc.constantFrom(...PARAMETER_MATRIX.framework.values), modelServer: fc.constantFrom(...PARAMETER_MATRIX.modelServer.values), modelFormat: fc.constantFrom(...PARAMETER_MATRIX.modelFormat.values), - deployTarget: fc.constantFrom(...PARAMETER_MATRIX.deployTarget.values), + buildTarget: fc.constantFrom(...PARAMETER_MATRIX.buildTarget.values), codebuildComputeType: fc.option(fc.constantFrom(...PARAMETER_MATRIX.codebuildComputeType.values)), codebuildProjectName: fc.option(generateCodeBuildProjectName()), includeSampleModel: fc.boolean(), @@ -238,7 +238,7 @@ export const generateCliOptions = () => fc.record({ 'framework': fc.option(fc.constantFrom(...PARAMETER_MATRIX.framework.values)), 'model-server': fc.option(fc.constantFrom(...PARAMETER_MATRIX.modelServer.values)), 'model-format': fc.option(fc.constantFrom(...PARAMETER_MATRIX.modelFormat.values)), - 'deploy-target': fc.option(fc.constantFrom(...PARAMETER_MATRIX.deployTarget.values)), + 'build-target': fc.option(fc.constantFrom(...PARAMETER_MATRIX.buildTarget.values)), 'codebuild-compute-type': fc.option(fc.constantFrom(...PARAMETER_MATRIX.codebuildComputeType.values)), 'codebuild-project-name': fc.option(generateCodeBuildProjectName()), 'include-sample': fc.option(fc.boolean()), @@ -255,7 +255,7 @@ export const generateCliOptions = () => fc.record({ // Generate environment variables object export const generateEnvironmentVariables = () => fc.record({ 'ML_INSTANCE_TYPE': fc.option(fc.constantFrom(...PARAMETER_MATRIX.instanceType.values)), - 'ML_DEPLOY_TARGET': fc.option(fc.constantFrom(...PARAMETER_MATRIX.deployTarget.values)), + 'ML_BUILD_TARGET': fc.option(fc.constantFrom(...PARAMETER_MATRIX.buildTarget.values)), 'ML_CODEBUILD_COMPUTE_TYPE': fc.option(fc.constantFrom(...PARAMETER_MATRIX.codebuildComputeType.values)), 'AWS_REGION': fc.option(fc.constantFrom(...PARAMETER_MATRIX.awsRegion.values)), 'AWS_ROLE': fc.option(generateValidArn()), @@ -285,7 +285,7 @@ export const generateConfigFileContent = () => fc.record({ framework: fc.option(fc.constantFrom(...PARAMETER_MATRIX.framework.values)), modelServer: fc.option(fc.constantFrom(...PARAMETER_MATRIX.modelServer.values)), modelFormat: fc.option(fc.constantFrom(...PARAMETER_MATRIX.modelFormat.values)), - deployTarget: fc.option(fc.constantFrom(...PARAMETER_MATRIX.deployTarget.values)), + buildTarget: fc.option(fc.constantFrom(...PARAMETER_MATRIX.buildTarget.values)), codebuildComputeType: fc.option(fc.constantFrom(...PARAMETER_MATRIX.codebuildComputeType.values)), codebuildProjectName: fc.option(generateCodeBuildProjectName()), includeSampleModel: fc.option(fc.boolean()), @@ -380,7 +380,7 @@ export function createMinimalValidConfig() { framework: 'sklearn', modelServer: 'flask', modelFormat: 'pkl', - deployTarget: 'codebuild', + buildTarget: 'codebuild', includeSampleModel: false, includeTesting: true, instanceType: 'cpu-optimized', @@ -396,7 +396,7 @@ export function createMinimalValidCodeBuildConfig() { framework: 'sklearn', modelServer: 'flask', modelFormat: 'pkl', - deployTarget: 'codebuild', + buildTarget: 'codebuild', codebuildComputeType: 'BUILD_GENERAL1_MEDIUM', codebuildProjectName: 'test-project-build', includeSampleModel: false, diff --git a/test/input-parsing-and-generation/registry-integration.test.js b/test/input-parsing-and-generation/registry-integration.test.js index 4f5c578..210149f 100644 --- a/test/input-parsing-and-generation/registry-integration.test.js +++ b/test/input-parsing-and-generation/registry-integration.test.js @@ -52,7 +52,7 @@ describe('Registry System Integration Tests', () => { modelServer: 'flask', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', projectName: 'test-registry-empty', @@ -87,7 +87,7 @@ describe('Registry System Integration Tests', () => { modelServer: 'vllm', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.g5.xlarge', awsRegion: 'us-east-1', awsRoleArn: '', @@ -122,7 +122,7 @@ describe('Registry System Integration Tests', () => { modelServer: 'vllm', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.g5.xlarge', awsRegion: 'us-east-1', awsRoleArn: '', @@ -160,7 +160,7 @@ describe('Registry System Integration Tests', () => { modelServer: 'vllm', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.g5.xlarge', awsRegion: 'us-east-1', awsRoleArn: '', @@ -194,7 +194,7 @@ describe('Registry System Integration Tests', () => { modelServer: 'flask', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', projectName: 'test-registry-env-validation', @@ -236,7 +236,7 @@ describe('Registry System Integration Tests', () => { modelServer: 'flask', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', projectName: 'test-registry-unavailable', @@ -267,7 +267,7 @@ describe('Registry System Integration Tests', () => { modelServer: 'flask', includeSampleModel: false, includeTesting: false, - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.g5.xlarge', awsRegion: 'us-east-1', projectName: 'test-registry-backward-compat', diff --git a/test/input-parsing-and-generation/unified-script-generation.property.test.js b/test/input-parsing-and-generation/unified-script-generation.property.test.js new file mode 100644 index 0000000..589d9f3 --- /dev/null +++ b/test/input-parsing-and-generation/unified-script-generation.property.test.js @@ -0,0 +1,539 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Property 6: Unified Script Generation + * + * For any valid configuration (regardless of deploymentTarget value), the + * generated project must contain exactly one do/deploy script, one do/clean + * script, one do/logs script, and one do/test script. No additional + * deployment-target-specific script files may be generated. + * + * This property validates that rendering each template produces exactly one + * script output for both deployment targets - this is a template rendering + * test, not a file system test. + * + * Validates: Requirements 5.1, 6.1, 15.1, 16.1 + * + * Feature: sagemaker-hyperpod-deployment + */ + +import fc from 'fast-check'; +import { describe, it, before } from 'mocha'; +import assert from 'assert'; +import ejs from 'ejs'; +import { readFileSync } from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Load all do-framework templates +const templatesDir = path.join(__dirname, '../../generators/app/templates/do'); + +const deployTemplate = readFileSync(path.join(templatesDir, 'deploy'), 'utf8'); +const cleanTemplate = readFileSync(path.join(templatesDir, 'clean'), 'utf8'); +const logsTemplate = readFileSync(path.join(templatesDir, 'logs'), 'utf8'); +const testTemplate = readFileSync(path.join(templatesDir, 'test'), 'utf8'); + +/** + * Render a template with the given variables. + */ +function renderTemplate(template, vars) { + return ejs.render(template, vars); +} + +/** Arbitrary for a base config shared by both deployment targets */ +const baseConfigArb = fc.record({ + projectName: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + deploymentConfig: fc.constantFrom('transformers-vllm', 'sklearn-flask', 'xgboost-fastapi'), + framework: fc.constantFrom('transformers', 'sklearn', 'xgboost', 'tensorflow'), + modelServer: fc.constantFrom('vllm', 'flask', 'fastapi', 'sglang'), + awsRegion: fc.constantFrom('us-east-1', 'us-west-2', 'eu-west-1'), + buildTarget: fc.constant('codebuild'), + modelName: fc.constantFrom('meta-llama/Llama-2-7b-hf', 'mistralai/Mistral-7B-v0.1'), + roleArn: fc.constantFrom('arn:aws:iam::123456789012:role/SageMakerRole', undefined), + inferenceAmiVersion: fc.constantFrom('1.0.0', undefined) +}); + +/** Arbitrary for HyperPod-specific config */ +const hyperPodConfigArb = fc.record({ + hyperPodCluster: fc.stringMatching(/^[a-z][a-z0-9-]{2,20}$/), + hyperPodNamespace: fc.constantFrom('default', 'ml-inference', 'production'), + hyperPodReplicas: fc.integer({ min: 1, max: 10 }), + fsxVolumeHandle: fc.option(fc.stringMatching(/^fs-[a-f0-9]{17}$/), { nil: undefined }) +}); + +describe('Property 6: Unified Script Generation', () => { + before(() => { + console.log('\n๐Ÿ“œ Starting Unified Script Generation Property Tests'); + console.log('๐Ÿ“‹ Testing: Requirements 5.1, 6.1, 15.1, 16.1'); + console.log('๐Ÿ”ง Configuration: EJS template rendering with fast-check\n'); + }); + + it('should generate exactly one do/deploy script for managed-inference (Req 5.1)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 5.1: Single do/deploy script for managed-inference'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('ml.m5.xlarge', 'ml.g5.xlarge'), + (base, instanceType) => { + const vars = { + ...base, + deploymentTarget: 'managed-inference', + instanceType, + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }; + + // Rendering should succeed and produce a single script + const output = renderTemplate(deployTemplate, vars); + + // Output must be a valid bash script + assert.ok( + output.startsWith('#!/bin/bash'), + 'do/deploy must start with bash shebang' + ); + + // Output must contain deployment logic (not be empty) + assert.ok( + output.length > 100, + 'do/deploy must contain substantial content' + ); + + // Must contain managed-inference specific content + assert.ok( + output.includes('sagemaker create-inference-component') || + output.includes('SageMaker'), + 'managed-inference do/deploy must contain SageMaker logic' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… Single do/deploy script generated for managed-inference'); + }); + + it('should generate exactly one do/deploy script for hyperpod-eks (Req 5.1)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 5.1: Single do/deploy script for hyperpod-eks'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + ...hpVars + }; + + // Rendering should succeed and produce a single script + const output = renderTemplate(deployTemplate, vars); + + // Output must be a valid bash script + assert.ok( + output.startsWith('#!/bin/bash'), + 'do/deploy must start with bash shebang' + ); + + // Output must contain deployment logic (not be empty) + assert.ok( + output.length > 100, + 'do/deploy must contain substantial content' + ); + + // Must contain hyperpod-eks specific content + assert.ok( + output.includes('kubectl') || + output.includes('HyperPod'), + 'hyperpod-eks do/deploy must contain kubectl logic' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… Single do/deploy script generated for hyperpod-eks'); + }); + + it('should generate exactly one do/clean script for managed-inference (Req 6.1)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 6.1: Single do/clean script for managed-inference'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('ml.m5.xlarge', 'ml.g5.xlarge'), + (base, instanceType) => { + const vars = { + ...base, + deploymentTarget: 'managed-inference', + instanceType, + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }; + + // Rendering should succeed and produce a single script + const output = renderTemplate(cleanTemplate, vars); + + // Output must be a valid bash script + assert.ok( + output.startsWith('#!/bin/bash'), + 'do/clean must start with bash shebang' + ); + + // Output must contain cleanup logic (not be empty) + assert.ok( + output.length > 100, + 'do/clean must contain substantial content' + ); + + // Must contain managed-inference specific cleanup + assert.ok( + output.includes('clean_endpoint') || + output.includes('delete-endpoint'), + 'managed-inference do/clean must contain endpoint cleanup' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… Single do/clean script generated for managed-inference'); + }); + + it('should generate exactly one do/clean script for hyperpod-eks (Req 6.1)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 6.1: Single do/clean script for hyperpod-eks'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + ...hpVars + }; + + // Rendering should succeed and produce a single script + const output = renderTemplate(cleanTemplate, vars); + + // Output must be a valid bash script + assert.ok( + output.startsWith('#!/bin/bash'), + 'do/clean must start with bash shebang' + ); + + // Output must contain cleanup logic (not be empty) + assert.ok( + output.length > 100, + 'do/clean must contain substantial content' + ); + + // Must contain hyperpod-eks specific cleanup + assert.ok( + output.includes('clean_hyperpod') || + output.includes('kubectl delete'), + 'hyperpod-eks do/clean must contain hyperpod cleanup' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… Single do/clean script generated for hyperpod-eks'); + }); + + it('should generate exactly one do/logs script for managed-inference (Req 15.1)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 15.1: Single do/logs script for managed-inference'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('ml.m5.xlarge', 'ml.g5.xlarge'), + (base, instanceType) => { + const vars = { + ...base, + deploymentTarget: 'managed-inference', + instanceType, + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }; + + // Rendering should succeed and produce a single script + const output = renderTemplate(logsTemplate, vars); + + // Output must be a valid bash script + assert.ok( + output.startsWith('#!/bin/bash'), + 'do/logs must start with bash shebang' + ); + + // Output must contain logs logic (not be empty) + assert.ok( + output.length > 100, + 'do/logs must contain substantial content' + ); + + // Must contain managed-inference specific logs + assert.ok( + output.includes('aws logs tail') || + output.includes('CloudWatch'), + 'managed-inference do/logs must contain CloudWatch logs logic' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… Single do/logs script generated for managed-inference'); + }); + + it('should generate exactly one do/logs script for hyperpod-eks (Req 15.1)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 15.1: Single do/logs script for hyperpod-eks'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + ...hpVars + }; + + // Rendering should succeed and produce a single script + const output = renderTemplate(logsTemplate, vars); + + // Output must be a valid bash script + assert.ok( + output.startsWith('#!/bin/bash'), + 'do/logs must start with bash shebang' + ); + + // Output must contain logs logic (not be empty) + assert.ok( + output.length > 100, + 'do/logs must contain substantial content' + ); + + // Must contain hyperpod-eks specific logs + assert.ok( + output.includes('kubectl logs') || + output.includes('HyperPod'), + 'hyperpod-eks do/logs must contain kubectl logs logic' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… Single do/logs script generated for hyperpod-eks'); + }); + + it('should generate exactly one do/test script for managed-inference (Req 16.1)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 16.1: Single do/test script for managed-inference'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('ml.m5.xlarge', 'ml.g5.xlarge'), + (base, instanceType) => { + const vars = { + ...base, + deploymentTarget: 'managed-inference', + instanceType, + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }; + + // Rendering should succeed and produce a single script + const output = renderTemplate(testTemplate, vars); + + // Output must be a valid bash script + assert.ok( + output.startsWith('#!/bin/bash'), + 'do/test must start with bash shebang' + ); + + // Output must contain test logic (not be empty) + assert.ok( + output.length > 100, + 'do/test must contain substantial content' + ); + + // Must contain managed-inference specific test + assert.ok( + output.includes('sagemaker-runtime invoke-endpoint') || + output.includes('SageMaker endpoint'), + 'managed-inference do/test must contain SageMaker test logic' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… Single do/test script generated for managed-inference'); + }); + + it('should generate exactly one do/test script for hyperpod-eks (Req 16.1)', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช Req 16.1: Single do/test script for hyperpod-eks'); + + fc.assert(fc.property( + baseConfigArb, + hyperPodConfigArb, + (base, hpVars) => { + const vars = { + ...base, + deploymentTarget: 'hyperpod-eks', + instanceType: undefined, + ...hpVars + }; + + // Rendering should succeed and produce a single script + const output = renderTemplate(testTemplate, vars); + + // Output must be a valid bash script + assert.ok( + output.startsWith('#!/bin/bash'), + 'do/test must start with bash shebang' + ); + + // Output must contain test logic (not be empty) + assert.ok( + output.length > 100, + 'do/test must contain substantial content' + ); + + // Must contain hyperpod-eks specific test + assert.ok( + output.includes('kubectl port-forward') || + output.includes('HyperPod'), + 'hyperpod-eks do/test must contain kubectl port-forward logic' + ); + } + ), { numRuns: 20 }); + + console.log(' โœ… Single do/test script generated for hyperpod-eks'); + }); + + it('should render all four scripts successfully for any valid deployment target', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช All four scripts render successfully for any deployment target'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + hyperPodConfigArb, + (base, deploymentTarget, hpVars) => { + const vars = { + ...base, + deploymentTarget, + instanceType: deploymentTarget === 'managed-inference' ? 'ml.m5.xlarge' : undefined, + ...(deploymentTarget === 'hyperpod-eks' ? hpVars : { + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }) + }; + + // All four templates should render without error + const deployOutput = renderTemplate(deployTemplate, vars); + const cleanOutput = renderTemplate(cleanTemplate, vars); + const logsOutput = renderTemplate(logsTemplate, vars); + const testOutput = renderTemplate(testTemplate, vars); + + // All outputs should be valid bash scripts + assert.ok(deployOutput.startsWith('#!/bin/bash'), 'do/deploy must be valid bash'); + assert.ok(cleanOutput.startsWith('#!/bin/bash'), 'do/clean must be valid bash'); + assert.ok(logsOutput.startsWith('#!/bin/bash'), 'do/logs must be valid bash'); + assert.ok(testOutput.startsWith('#!/bin/bash'), 'do/test must be valid bash'); + + // All outputs should have substantial content + assert.ok(deployOutput.length > 100, 'do/deploy must have content'); + assert.ok(cleanOutput.length > 100, 'do/clean must have content'); + assert.ok(logsOutput.length > 100, 'do/logs must have content'); + assert.ok(testOutput.length > 100, 'do/test must have content'); + } + ), { numRuns: 20 }); + + console.log(' โœ… All four scripts render successfully'); + }); + + it('should not generate deployment-target-specific script files', function () { + this.timeout(30000); + + console.log(' ๐Ÿงช No deployment-target-specific script files (e.g., deploy-hyperpod)'); + + fc.assert(fc.property( + baseConfigArb, + fc.constantFrom('managed-inference', 'hyperpod-eks'), + hyperPodConfigArb, + (base, deploymentTarget, hpVars) => { + const vars = { + ...base, + deploymentTarget, + instanceType: deploymentTarget === 'managed-inference' ? 'ml.m5.xlarge' : undefined, + ...(deploymentTarget === 'hyperpod-eks' ? hpVars : { + hyperPodCluster: undefined, + hyperPodNamespace: undefined, + hyperPodReplicas: undefined, + fsxVolumeHandle: undefined + }) + }; + + // Render all templates + const deployOutput = renderTemplate(deployTemplate, vars); + const cleanOutput = renderTemplate(cleanTemplate, vars); + const logsOutput = renderTemplate(logsTemplate, vars); + const testOutput = renderTemplate(testTemplate, vars); + + // Scripts should NOT reference other deployment-target-specific scripts + // (e.g., no ./do/deploy-hyperpod or ./do/clean-sagemaker) + const allOutputs = [deployOutput, cleanOutput, logsOutput, testOutput]; + + for (const output of allOutputs) { + assert.ok( + !output.includes('./do/deploy-hyperpod'), + 'Scripts must not reference deploy-hyperpod' + ); + assert.ok( + !output.includes('./do/deploy-sagemaker'), + 'Scripts must not reference deploy-sagemaker' + ); + assert.ok( + !output.includes('./do/clean-hyperpod'), + 'Scripts must not reference clean-hyperpod' + ); + assert.ok( + !output.includes('./do/clean-sagemaker'), + 'Scripts must not reference clean-sagemaker' + ); + assert.ok( + !output.includes('./do/logs-hyperpod'), + 'Scripts must not reference logs-hyperpod' + ); + assert.ok( + !output.includes('./do/test-hyperpod'), + 'Scripts must not reference test-hyperpod' + ); + } + } + ), { numRuns: 20 }); + + console.log(' โœ… No deployment-target-specific script files referenced'); + }); +}); diff --git a/test/property/config-manager.property.test.js b/test/property/config-manager.property.test.js index a4a2205..b4b17d6 100644 --- a/test/property/config-manager.property.test.js +++ b/test/property/config-manager.property.test.js @@ -108,7 +108,7 @@ describe('ConfigManager Property-Based Tests (Refactored)', () => { fc.record({ 'AWS_REGION': fc.option(fc.constantFrom('us-east-1', 'us-west-2', 'eu-west-1')), 'ML_INSTANCE_TYPE': fc.option(fc.constantFrom('ml.m5.large', 'ml.g5.xlarge')), - 'ML_DEPLOY_TARGET': fc.option(fc.constantFrom('codebuild')) + 'ML_BUILD_TARGET': fc.option(fc.constantFrom('codebuild')) }), async (envVars) => { // Set environment variables @@ -132,8 +132,8 @@ describe('ConfigManager Property-Based Tests (Refactored)', () => { assert.strictEqual(config.instanceType, envVars.ML_INSTANCE_TYPE); } - if (envVars.ML_DEPLOY_TARGET) { - assert.strictEqual(config.deployTarget, envVars.ML_DEPLOY_TARGET); + if (envVars.ML_BUILD_TARGET) { + assert.strictEqual(config.buildTarget, envVars.ML_BUILD_TARGET); } return true; diff --git a/test/property/hyperpod-cluster-picker.property.test.js b/test/property/hyperpod-cluster-picker.property.test.js new file mode 100644 index 0000000..a3f4fa3 --- /dev/null +++ b/test/property/hyperpod-cluster-picker.property.test.js @@ -0,0 +1,280 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * HyperPod Cluster Picker Server Property-Based Tests + * + * Property-based tests for the hyperpod-cluster-picker MCP server. + * + * Feature: sagemaker-hyperpod-deployment + */ + +import fc from 'fast-check'; +import { describe, it } from 'mocha'; +import assert from 'assert'; +import { buildResponse } from '../../servers/hyperpod-cluster-picker/index.js'; + +const FAST_PROPERTY_CONFIG = { + numRuns: 100, + timeout: 30000, + verbose: false +}; + +// โ”€โ”€ Shared arbitrary generators โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/** Valid cluster statuses from the SageMaker API */ +const arbClusterStatus = fc.constantFrom('InService', 'Creating', 'Deleting', 'Failed', 'Updating', 'RollingBack'); + +/** Cluster name: alphanumeric + hyphens, 1-63 chars */ +const arbClusterName = fc.stringMatching(/^[a-z][a-z0-9-]{0,30}$/).filter(s => s.length > 0); + +/** Cluster ARN */ +const arbClusterArn = arbClusterName.map(name => + `arn:aws:sagemaker:us-east-1:123456789012:cluster/${name}` +); + +/** Orchestrator type */ +const arbOrchestratorType = fc.constantFrom('EKS', 'Slurm'); + +/** Instance group */ +const arbInstanceGroup = fc.record({ + name: fc.constantFrom('gpu-workers', 'cpu-workers', 'controllers', 'training-nodes'), + instanceType: fc.constantFrom('ml.p4d.24xlarge', 'ml.p5.48xlarge', 'ml.g5.48xlarge', 'ml.m5.xlarge'), + count: fc.integer({ min: 1, max: 64 }) +}); + +/** A full cluster summary as returned by ListClusters */ +const arbClusterSummary = fc.record({ + ClusterName: arbClusterName, + ClusterArn: arbClusterArn, + ClusterStatus: arbClusterStatus +}); + +/** A cluster detail as returned by DescribeCluster */ +const arbClusterDetail = fc.record({ + orchestratorType: arbOrchestratorType, + instanceGroups: fc.array(arbInstanceGroup, { minLength: 0, maxLength: 4 }) +}); + +/** A combined cluster entry for testing (summary + detail) */ +const arbClusterEntry = fc.record({ + summary: arbClusterSummary, + detail: arbClusterDetail +}); + +/** Positive integer limit */ +const arbLimit = fc.integer({ min: 1, max: 50 }); + + +// โ”€โ”€ Property tests โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +describe('HyperPod Cluster Picker Server Property-Based Tests', () => { + + // Feature: sagemaker-hyperpod-deployment, Property 4: Cluster Discovery Filtering + describe('Property 4: Cluster Discovery Filtering', () => { + it('only InService + EKS clusters pass through filtering', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + fc.array(arbClusterEntry, { minLength: 0, maxLength: 20 }), + arbLimit, + (clusterEntries, limit) => { + // Simulate the filtering logic from fetchHyperPodClusters + // Step 1: Filter by InService status + const inServiceClusters = clusterEntries.filter( + e => e.summary.ClusterStatus === 'InService' + ); + + // Step 2: Filter by EKS orchestrator + const eksClusters = inServiceClusters.filter( + e => e.detail.orchestratorType === 'EKS' + ); + + // Step 3: Apply limit + const result = eksClusters.slice(0, limit); + + // Property: All results must be InService AND EKS + for (const cluster of result) { + assert.strictEqual(cluster.summary.ClusterStatus, 'InService', + `Cluster "${cluster.summary.ClusterName}" should be InService`); + assert.strictEqual(cluster.detail.orchestratorType, 'EKS', + `Cluster "${cluster.summary.ClusterName}" should use EKS orchestrator`); + } + + // Property: No Slurm clusters in result + const slurmInResult = result.filter(e => e.detail.orchestratorType === 'Slurm'); + assert.strictEqual(slurmInResult.length, 0, + 'No Slurm clusters should be in the result'); + + // Property: No non-InService clusters in result + const nonInServiceInResult = result.filter(e => e.summary.ClusterStatus !== 'InService'); + assert.strictEqual(nonInServiceInResult.length, 0, + 'No non-InService clusters should be in the result'); + + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + + it('Slurm clusters are always excluded', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + arbClusterName, + arbClusterArn, + fc.array(arbInstanceGroup, { minLength: 1, maxLength: 3 }), + (name, arn, instanceGroups) => { + // Create a Slurm cluster that is InService + const slurmCluster = { + clusterName: name, + clusterArn: arn, + status: 'InService', + orchestratorType: 'Slurm', + instanceGroups + }; + + // Simulate filtering: Slurm should be excluded + const isEks = slurmCluster.orchestratorType === 'EKS'; + assert.strictEqual(isEks, false, + 'Slurm cluster should not pass EKS filter'); + + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + }); + + // Feature: sagemaker-hyperpod-deployment, Property 5: Cluster Discovery Response Completeness + describe('Property 5: Cluster Discovery Response Completeness', () => { + it('every returned cluster includes name, ARN, status, and instance groups', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + fc.array( + fc.record({ + clusterName: arbClusterName, + clusterArn: arbClusterArn, + status: fc.constant('InService'), + instanceGroups: fc.array(arbInstanceGroup, { minLength: 0, maxLength: 4 }) + }), + { minLength: 0, maxLength: 10 } + ), + (clusters) => { + const result = buildResponse(clusters); + + // If clusters exist, metadata must be complete + if (clusters.length > 0) { + assert.ok(result.values.hyperPodCluster, + 'values.hyperPodCluster should be set'); + assert.ok(result.choices.hyperPodCluster.length > 0, + 'choices.hyperPodCluster should have entries'); + assert.ok(result.metadata, + 'metadata should be present'); + + // Check each cluster in metadata + for (const cluster of clusters) { + const meta = result.metadata[cluster.clusterName]; + assert.ok(meta, `Metadata for "${cluster.clusterName}" should exist`); + assert.ok(meta.clusterArn, 'clusterArn should be present'); + assert.ok(meta.status, 'status should be present'); + assert.ok(Array.isArray(meta.instanceGroups), + 'instanceGroups should be an array'); + } + } + + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + + it('instance group metadata includes name, instanceType, and count', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + arbClusterName, + arbClusterArn, + fc.array(arbInstanceGroup, { minLength: 1, maxLength: 4 }), + (name, arn, instanceGroups) => { + const clusters = [{ + clusterName: name, + clusterArn: arn, + status: 'InService', + instanceGroups + }]; + + const result = buildResponse(clusters); + const meta = result.metadata[name]; + + assert.ok(meta, 'Cluster metadata should exist'); + assert.strictEqual(meta.instanceGroups.length, instanceGroups.length, + 'Instance group count should match'); + + for (let i = 0; i < instanceGroups.length; i++) { + const group = meta.instanceGroups[i]; + assert.ok(group.name, 'Instance group name should be present'); + assert.ok(group.instanceType, 'Instance group instanceType should be present'); + assert.ok(typeof group.count === 'number', 'Instance group count should be a number'); + } + + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + }); + + // Additional response format properties + describe('Response Format Invariants', () => { + it('empty clusters returns empty choices with descriptive message', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + const result = buildResponse([]); + assert.deepStrictEqual(result.choices.hyperPodCluster, []); + assert.deepStrictEqual(result.values, {}); + assert.ok(result.message, 'Should include a descriptive message'); + }); + + it('values.hyperPodCluster equals first choice when non-empty', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + fc.array( + fc.record({ + clusterName: arbClusterName, + clusterArn: arbClusterArn, + status: fc.constant('InService'), + instanceGroups: fc.array(arbInstanceGroup, { minLength: 0, maxLength: 2 }) + }), + { minLength: 1, maxLength: 10 } + ), + (clusters) => { + const result = buildResponse(clusters); + assert.strictEqual(result.values.hyperPodCluster, result.choices.hyperPodCluster[0], + 'values.hyperPodCluster should equal first choice'); + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + + it('choices length equals input clusters length', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + fc.array( + fc.record({ + clusterName: arbClusterName, + clusterArn: arbClusterArn, + status: fc.constant('InService'), + instanceGroups: fc.array(arbInstanceGroup, { minLength: 0, maxLength: 2 }) + }), + { minLength: 0, maxLength: 10 } + ), + (clusters) => { + const result = buildResponse(clusters); + assert.strictEqual(result.choices.hyperPodCluster.length, clusters.length, + 'choices length should match input clusters length'); + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + }); +}); diff --git a/test/property/template-manager-hyperpod-validation.property.test.js b/test/property/template-manager-hyperpod-validation.property.test.js new file mode 100644 index 0000000..3d747ce --- /dev/null +++ b/test/property/template-manager-hyperpod-validation.property.test.js @@ -0,0 +1,349 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/** + * Template Manager HyperPod Validation Property-Based Tests + * + * Property 12: HyperPod Validation Rules + * Validates: Requirements 10.3, 10.4, 10.5, 10.6 + * + * Property 13: Enum Validation + * Validates: Requirements 1.5, 1.6, 10.1, 10.2 + * + * Feature: sagemaker-hyperpod-deployment + */ + +import fc from 'fast-check'; +import { describe, it } from 'mocha'; +import assert from 'assert'; +import TemplateManager from '../../generators/app/lib/template-manager.js'; + +const FAST_PROPERTY_CONFIG = { + numRuns: 100, + timeout: 30000, + verbose: false +}; + +// โ”€โ”€ Shared arbitrary generators โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/** Base answers that are always valid (non-HyperPod fields) */ +const baseValidAnswers = { + deploymentConfig: 'sklearn-flask', + awsRegion: 'us-east-1' +}; + +/** Valid RFC 1123 DNS label: lowercase alphanumeric + hyphens, 1-63 chars, no leading/trailing hyphen */ +const arbValidNamespace = fc.stringMatching(/^[a-z0-9]([a-z0-9-]{0,10}[a-z0-9])?$/) + .filter(s => s.length >= 1 && s.length <= 63); + +/** Invalid namespace: strings that violate RFC 1123 */ +const arbInvalidNamespace = fc.oneof( + // Starts with hyphen + fc.stringMatching(/^-[a-z0-9-]{0,10}$/).filter(s => s.length > 0), + // Ends with hyphen + fc.stringMatching(/^[a-z0-9][a-z0-9-]{0,10}-$/).filter(s => s.length > 1), + // Contains uppercase + fc.stringMatching(/^[a-z0-9]*[A-Z][a-zA-Z0-9-]*$/).filter(s => s.length > 0), + // Contains special characters + fc.stringMatching(/^[a-z0-9]*[_.!@#][a-z0-9]*$/).filter(s => s.length > 0), + // Too long (>63 chars) + fc.stringMatching(/^[a-z][a-z0-9]{63,80}$/) +); + +/** Valid cluster name */ +const arbValidClusterName = fc.stringMatching(/^[a-z][a-z0-9-]{0,30}$/).filter(s => s.length > 0); + +/** Valid replicas (integer >= 1) */ +const arbValidReplicas = fc.integer({ min: 1, max: 1000 }); + +/** Invalid replicas */ +const arbInvalidReplicas = fc.oneof( + fc.integer({ min: -100, max: 0 }), + fc.double({ min: 0.1, max: 99.9, noNaN: true }).filter(v => !Number.isInteger(v)) +); + +// โ”€โ”€ Property 12: HyperPod Validation Rules โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +describe('Template Manager HyperPod Validation Property-Based Tests', () => { + + describe('Property 12: HyperPod Validation Rules', () => { + + it('valid HyperPod config always passes validation (Req 10.3, 10.4, 10.5)', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + arbValidClusterName, + arbValidNamespace, + arbValidReplicas, + (cluster, namespace, replicas) => { + const answers = { + ...baseValidAnswers, + buildTarget: 'codebuild', + deploymentTarget: 'hyperpod-eks', + hyperPodCluster: cluster, + hyperPodNamespace: namespace, + hyperPodReplicas: replicas + }; + + const manager = new TemplateManager(answers); + // Should not throw + manager.validate(); + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + + it('empty or missing hyperPodCluster always fails validation (Req 10.3)', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + fc.oneof( + fc.constant(''), + fc.constant(' '), + fc.constant(undefined), + fc.constant(null) + ), + arbValidNamespace, + arbValidReplicas, + (cluster, namespace, replicas) => { + const answers = { + ...baseValidAnswers, + buildTarget: 'codebuild', + deploymentTarget: 'hyperpod-eks', + hyperPodCluster: cluster, + hyperPodNamespace: namespace, + hyperPodReplicas: replicas + }; + + const manager = new TemplateManager(answers); + assert.throws( + () => manager.validate(), + /hyperPodCluster is required/, + 'Should throw for empty/missing hyperPodCluster' + ); + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + + it('invalid namespace always fails validation (Req 10.4, 10.6)', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + arbValidClusterName, + arbInvalidNamespace, + arbValidReplicas, + (cluster, namespace, replicas) => { + const answers = { + ...baseValidAnswers, + buildTarget: 'codebuild', + deploymentTarget: 'hyperpod-eks', + hyperPodCluster: cluster, + hyperPodNamespace: namespace, + hyperPodReplicas: replicas + }; + + const manager = new TemplateManager(answers); + assert.throws( + () => manager.validate(), + /Invalid hyperPodNamespace/, + `Namespace "${namespace}" should fail RFC 1123 validation` + ); + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + + it('invalid replicas always fails validation (Req 10.5, 10.6)', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + arbValidClusterName, + arbValidNamespace, + arbInvalidReplicas, + (cluster, namespace, replicas) => { + const answers = { + ...baseValidAnswers, + buildTarget: 'codebuild', + deploymentTarget: 'hyperpod-eks', + hyperPodCluster: cluster, + hyperPodNamespace: namespace, + hyperPodReplicas: replicas + }; + + const manager = new TemplateManager(answers); + assert.throws( + () => manager.validate(), + /Invalid hyperPodReplicas/, + `Replicas "${replicas}" should fail validation` + ); + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + + it('validation errors always include descriptive field name (Req 10.6)', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + // Test cluster error message + const clusterAnswers = { + ...baseValidAnswers, + buildTarget: 'codebuild', + deploymentTarget: 'hyperpod-eks', + hyperPodCluster: '', + hyperPodNamespace: 'default', + hyperPodReplicas: 1 + }; + const clusterManager = new TemplateManager(clusterAnswers); + assert.throws(() => clusterManager.validate(), /hyperPodCluster/); + + // Test namespace error message + const nsAnswers = { + ...baseValidAnswers, + buildTarget: 'codebuild', + deploymentTarget: 'hyperpod-eks', + hyperPodCluster: 'my-cluster', + hyperPodNamespace: '-invalid', + hyperPodReplicas: 1 + }; + const nsManager = new TemplateManager(nsAnswers); + assert.throws(() => nsManager.validate(), /hyperPodNamespace/); + + // Test replicas error message + const repAnswers = { + ...baseValidAnswers, + buildTarget: 'codebuild', + deploymentTarget: 'hyperpod-eks', + hyperPodCluster: 'my-cluster', + hyperPodNamespace: 'default', + hyperPodReplicas: 0 + }; + const repManager = new TemplateManager(repAnswers); + assert.throws(() => repManager.validate(), /hyperPodReplicas/); + }); + + it('HyperPod validation is skipped for managed-inference (Req 10.3, 10.4, 10.5)', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + fc.constant('managed-inference'), + (deploymentTarget) => { + // managed-inference should not require HyperPod fields + const answers = { + ...baseValidAnswers, + buildTarget: 'codebuild', + deploymentTarget, + instanceType: 'ml.m5.large' + // No hyperPodCluster, hyperPodNamespace, hyperPodReplicas + }; + + const manager = new TemplateManager(answers); + manager.validate(); + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + }); + + // โ”€โ”€ Property 13: Enum Validation โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + describe('Property 13: Enum Validation', () => { + + it('valid buildTarget values always pass validation (Req 1.5, 10.1)', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + fc.constantFrom('codebuild'), + (buildTarget) => { + const answers = { + ...baseValidAnswers, + buildTarget, + deploymentTarget: 'managed-inference', + instanceType: 'ml.m5.large' + }; + + const manager = new TemplateManager(answers); + manager.validate(); + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + + it('invalid buildTarget values always fail validation (Req 1.5, 10.1)', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + fc.string({ minLength: 1, maxLength: 30 }) + .filter(s => !['codebuild'].includes(s)), + (buildTarget) => { + const answers = { + ...baseValidAnswers, + buildTarget, + deploymentTarget: 'managed-inference', + instanceType: 'ml.m5.large' + }; + + const manager = new TemplateManager(answers); + assert.throws( + () => manager.validate(), + /not implemented yet for buildTarget/, + `buildTarget "${buildTarget}" should fail validation` + ); + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + + it('valid deploymentTarget values always pass validation (Req 1.6, 10.2)', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + fc.constantFrom('managed-inference', 'hyperpod-eks'), + (deploymentTarget) => { + const answers = { + ...baseValidAnswers, + buildTarget: 'codebuild', + deploymentTarget + }; + + // Add required fields based on deployment target + if (deploymentTarget === 'managed-inference') { + answers.instanceType = 'ml.m5.large'; + } else { + answers.hyperPodCluster = 'my-cluster'; + answers.hyperPodNamespace = 'default'; + answers.hyperPodReplicas = 1; + } + + const manager = new TemplateManager(answers); + manager.validate(); + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + + it('invalid deploymentTarget values always fail validation (Req 1.6, 10.2)', function () { + this.timeout(FAST_PROPERTY_CONFIG.timeout); + + fc.assert(fc.property( + fc.string({ minLength: 1, maxLength: 30 }) + .filter(s => !['managed-inference', 'hyperpod-eks'].includes(s)), + (deploymentTarget) => { + const answers = { + ...baseValidAnswers, + buildTarget: 'codebuild', + deploymentTarget + }; + + const manager = new TemplateManager(answers); + assert.throws( + () => manager.validate(), + /not implemented yet for deploymentTarget/, + `deploymentTarget "${deploymentTarget}" should fail validation` + ); + return true; + } + ), { numRuns: FAST_PROPERTY_CONFIG.numRuns, verbose: FAST_PROPERTY_CONFIG.verbose }); + }); + }); +}); diff --git a/test/template-manager.test.js b/test/template-manager.test.js index 79c3aef..839a3fd 100644 --- a/test/template-manager.test.js +++ b/test/template-manager.test.js @@ -17,7 +17,8 @@ describe('TemplateManager', () => { it('should pass validation for supported deployment configurations', () => { const answers = { deploymentConfig: 'sklearn-flask', - deployTarget: 'sagemaker', + buildTarget: 'codebuild', + deploymentTarget: 'managed-inference', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', awsRoleArn: '', @@ -32,7 +33,8 @@ describe('TemplateManager', () => { it('should pass validation for transformers deployment configurations', () => { const answers = { deploymentConfig: 'transformers-vllm', - deployTarget: 'sagemaker', + buildTarget: 'codebuild', + deploymentTarget: 'managed-inference', instanceType: 'ml.g5.xlarge', awsRegion: 'us-east-1', awsRoleArn: '', @@ -47,7 +49,7 @@ describe('TemplateManager', () => { it('should throw error for unsupported deployment configuration', () => { const answers = { deploymentConfig: 'pytorch-torchserve', - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', awsRoleArn: '' @@ -61,7 +63,7 @@ describe('TemplateManager', () => { const answers = { framework: 'sklearn', modelServer: 'flask', - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', awsRoleArn: '', @@ -77,7 +79,7 @@ describe('TemplateManager', () => { const answers = { framework: 'pytorch', modelServer: 'flask', - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', awsRoleArn: '' @@ -91,7 +93,7 @@ describe('TemplateManager', () => { const answers = { framework: 'sklearn', modelServer: 'torchserve', - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', awsRoleArn: '' @@ -105,7 +107,7 @@ describe('TemplateManager', () => { const answers = { framework: 'sklearn', modelServer: 'tensorrt-llm', - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.g5.xlarge', awsRegion: 'us-east-1', awsRoleArn: '' diff --git a/test/unit.test.js b/test/unit.test.js index 0ffc21e..7835806 100644 --- a/test/unit.test.js +++ b/test/unit.test.js @@ -13,7 +13,7 @@ describe('Unit Tests', () => { it('should validate supported deployment configurations', () => { const answers = { deploymentConfig: 'sklearn-flask', - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', awsRoleArn: '' @@ -26,7 +26,7 @@ describe('Unit Tests', () => { it('should validate transformers deployment configurations', () => { const answers = { deploymentConfig: 'transformers-vllm', - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.g5.xlarge', awsRegion: 'us-east-1', awsRoleArn: '' @@ -40,7 +40,7 @@ describe('Unit Tests', () => { const answers = { framework: 'sklearn', modelServer: 'flask', - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', awsRoleArn: '' @@ -54,7 +54,7 @@ describe('Unit Tests', () => { const answers = { framework: 'pytorch', modelServer: 'flask', - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', awsRoleArn: '' @@ -67,7 +67,7 @@ describe('Unit Tests', () => { it('should throw error for unsupported deployment configuration', () => { const answers = { deploymentConfig: 'pytorch-torchserve', - deployTarget: 'sagemaker', + buildTarget: 'codebuild', instanceType: 'ml.m5.large', awsRegion: 'us-east-1', awsRoleArn: '' @@ -77,4 +77,4 @@ describe('Unit Tests', () => { assert.throws(() => manager.validate(), /pytorch-torchserve not implemented yet/); }); }); -}); \ No newline at end of file +}); diff --git a/test/unit/config-manager-unit.test.js b/test/unit/config-manager-unit.test.js index 8b6d211..966356e 100644 --- a/test/unit/config-manager-unit.test.js +++ b/test/unit/config-manager-unit.test.js @@ -136,16 +136,16 @@ describe('ConfigManager Unit Tests', () => { assert.strictEqual(config.instanceType, 'ml.g5.xlarge'); }); - it('should load ML_DEPLOY_TARGET from environment', async () => { - process.env.ML_DEPLOY_TARGET = 'codebuild'; - envVarsToCleanup.push('ML_DEPLOY_TARGET'); + it('should load ML_BUILD_TARGET from environment', async () => { + process.env.ML_BUILD_TARGET = 'codebuild'; + envVarsToCleanup.push('ML_BUILD_TARGET'); mockGenerator = createMockGenerator(); configManager = new ConfigManager(mockGenerator); const config = await configManager.loadConfiguration(); - assert.strictEqual(config.deployTarget, 'codebuild'); + assert.strictEqual(config.buildTarget, 'codebuild'); }); it('should load AWS_ROLE from environment', async () => { @@ -208,7 +208,7 @@ describe('ConfigManager Unit Tests', () => { const config = await configManager.loadConfiguration(); assert.strictEqual(config.awsRegion, 'us-east-1'); // Default - assert.strictEqual(config.deployTarget, 'codebuild'); // Default + assert.strictEqual(config.buildTarget, 'codebuild'); // Default assert.strictEqual(config.includeTesting, true); // Default }); }); @@ -398,7 +398,7 @@ describe('ConfigManager Unit Tests', () => { const finalConfig = configManager.getFinalConfiguration({}); assert.strictEqual(finalConfig.awsRegion, 'us-east-1'); - assert.strictEqual(finalConfig.deployTarget, 'codebuild'); + assert.strictEqual(finalConfig.buildTarget, 'codebuild'); assert.strictEqual(finalConfig.includeTesting, true); }); @@ -415,13 +415,13 @@ describe('ConfigManager Unit Tests', () => { assert.strictEqual(finalConfig.includeSampleModel, false); }); - it('should generate CodeBuild project name when deployTarget is codebuild', async () => { + it('should generate CodeBuild project name when buildTarget is codebuild', async () => { await configManager.loadConfiguration(); const promptAnswers = { projectName: 'my-project', framework: 'sklearn', - deployTarget: 'codebuild' + buildTarget: 'codebuild' }; const finalConfig = configManager.getFinalConfiguration(promptAnswers); @@ -518,7 +518,8 @@ describe('ConfigManager Unit Tests', () => { instanceType: 'ml.m5.large', projectName: 'test-project', destinationDir: '.', - deployTarget: 'codebuild', + buildTarget: 'codebuild', + deploymentTarget: 'managed-inference', includeSampleModel: false, includeTesting: true }; @@ -554,7 +555,8 @@ describe('ConfigManager Unit Tests', () => { instanceType: 'ml.g5.xlarge', projectName: 'test-project', destinationDir: '.', - deployTarget: 'codebuild', + buildTarget: 'codebuild', + deploymentTarget: 'managed-inference', includeSampleModel: false, includeTesting: true }; @@ -583,6 +585,19 @@ describe('ConfigManager Unit Tests', () => { }); it('should include environment variables in explicit config', async () => { + process.env.ML_INSTANCE_TYPE = 'ml.m5.xlarge'; + envVarsToCleanup.push('ML_INSTANCE_TYPE'); + + mockGenerator = createMockGenerator(); + configManager = new ConfigManager(mockGenerator); + + await configManager.loadConfiguration(); + const explicitConfig = configManager.getExplicitConfiguration(); + + assert.strictEqual(explicitConfig.instanceType, 'ml.m5.xlarge'); + }); + + it('should treat ambient env vars as defaults, not explicit config', async () => { process.env.AWS_REGION = 'eu-west-1'; envVarsToCleanup.push('AWS_REGION'); @@ -592,7 +607,9 @@ describe('ConfigManager Unit Tests', () => { await configManager.loadConfiguration(); const explicitConfig = configManager.getExplicitConfiguration(); - assert.strictEqual(explicitConfig.awsRegion, 'eu-west-1'); + // AWS_REGION is ambient โ€” sets the config value but not explicit config + assert.strictEqual(configManager.config.awsRegion, 'eu-west-1'); + assert.strictEqual(explicitConfig.awsRegion, undefined); }); }); diff --git a/test/unit/mcp-client.test.js b/test/unit/mcp-client.test.js index 4be0ccb..176ccd3 100644 --- a/test/unit/mcp-client.test.js +++ b/test/unit/mcp-client.test.js @@ -74,7 +74,8 @@ describe('McpClient Unit Tests', () => { assert.ok(names.includes('instanceType')); assert.ok(names.includes('awsRoleArn')); assert.ok(names.includes('awsRegion')); - assert.strictEqual(names.length, 3); + assert.ok(names.includes('hyperPodCluster')); + assert.strictEqual(names.length, 4); }); it('should not include bounded parameters', () => { @@ -383,8 +384,8 @@ describe('McpClient Unit Tests', () => { const context = client._buildContext(); - // deployTarget has default 'codebuild' and is bounded - assert.strictEqual(context.deployTarget, 'codebuild'); + // buildTarget has default 'codebuild' and is bounded + assert.strictEqual(context.buildTarget, 'codebuild'); }); it('should not include parameters with null defaults', () => {