From 544ae48f99e3fd62acde902f61c0687cb0bcbc3f Mon Sep 17 00:00:00 2001 From: Alex Streed Date: Wed, 14 Jan 2026 13:03:41 -0600 Subject: [PATCH 1/8] Add documentation for prefect sdk generate CLI - Add auto-generated CLI reference for `prefect sdk` command - Add how-to guide for generating typed SDKs from deployments - Update navigation in docs.json for both new pages - Regenerate CLI docs to include new --output options Co-Authored-By: Claude Opus 4.5 --- docs/docs.json | 4 +- docs/v3/api-ref/cli/artifact.mdx | 4 + docs/v3/api-ref/cli/block.mdx | 45 ++++ docs/v3/api-ref/cli/blocks.mdx | 45 ++++ docs/v3/api-ref/cli/deployment.mdx | 12 + docs/v3/api-ref/cli/deployments.mdx | 12 + docs/v3/api-ref/cli/dev.mdx | 4 +- docs/v3/api-ref/cli/gcl.mdx | 15 ++ .../api-ref/cli/global-concurrency-limit.mdx | 15 ++ docs/v3/api-ref/cli/sdk.mdx | 82 +++++++ docs/v3/api-ref/cli/variable.mdx | 4 + .../deployments/generate-typed-sdk.mdx | 232 ++++++++++++++++++ 12 files changed, 471 insertions(+), 3 deletions(-) create mode 100644 docs/v3/api-ref/cli/sdk.mdx create mode 100644 docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx diff --git a/docs/docs.json b/docs/docs.json index 85dfbd232fa8..17109ebb9b06 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -157,7 +157,8 @@ "v3/how-to-guides/deployments/prefect-yaml", "v3/how-to-guides/deployments/store-flow-code", "v3/how-to-guides/deployments/versioning", - "v3/how-to-guides/deployments/customize-job-variables" + "v3/how-to-guides/deployments/customize-job-variables", + "v3/how-to-guides/deployments/generate-typed-sdk" ] }, { @@ -800,6 +801,7 @@ "v3/api-ref/cli/init", "v3/api-ref/cli/profile", "v3/api-ref/cli/profiles", + "v3/api-ref/cli/sdk", "v3/api-ref/cli/server", "v3/api-ref/cli/shell", "v3/api-ref/cli/task-run", diff --git a/docs/v3/api-ref/cli/artifact.mdx b/docs/v3/api-ref/cli/artifact.mdx index 770c70079dd6..37f9918c68e9 100644 --- a/docs/v3/api-ref/cli/artifact.mdx +++ b/docs/v3/api-ref/cli/artifact.mdx @@ -58,6 +58,10 @@ List artifacts. Whether or not to only return the latest version of each artifact. + + Specify an output format. Currently supports: json + + diff --git a/docs/v3/api-ref/cli/block.mdx b/docs/v3/api-ref/cli/block.mdx index b4211aaaf66e..ea7a73a98db3 100644 --- a/docs/v3/api-ref/cli/block.mdx +++ b/docs/v3/api-ref/cli/block.mdx @@ -96,6 +96,21 @@ View all configured blocks. + + + + + + + + + Specify an output format. Currently supports: json + + + + + + ## `prefect block delete` @@ -256,6 +271,21 @@ List all block types. + + + + + + + + + Specify an output format. Currently supports: json + + + + + + ### `prefect block types inspect` @@ -366,6 +396,21 @@ List all block types. + + + + + + + + + Specify an output format. Currently supports: json + + + + + + ### `prefect block type inspect` diff --git a/docs/v3/api-ref/cli/blocks.mdx b/docs/v3/api-ref/cli/blocks.mdx index 18af72fc95cf..a6221c948be5 100644 --- a/docs/v3/api-ref/cli/blocks.mdx +++ b/docs/v3/api-ref/cli/blocks.mdx @@ -96,6 +96,21 @@ View all configured blocks. + + + + + + + + + Specify an output format. Currently supports: json + + + + + + ## `prefect blocks delete` @@ -256,6 +271,21 @@ List all block types. + + + + + + + + + Specify an output format. Currently supports: json + + + + + + ### `prefect blocks types inspect` @@ -366,6 +396,21 @@ List all block types. + + + + + + + + + Specify an output format. Currently supports: json + + + + + + ### `prefect blocks type inspect` diff --git a/docs/v3/api-ref/cli/deployment.mdx b/docs/v3/api-ref/cli/deployment.mdx index 3df06e0b4d12..8ae49a36cce1 100644 --- a/docs/v3/api-ref/cli/deployment.mdx +++ b/docs/v3/api-ref/cli/deployment.mdx @@ -143,6 +143,10 @@ View all deployments or deployments for specific flows. + + Specify an output format. Currently supports: json + + @@ -576,6 +580,14 @@ View all schedules for a deployment. + + + + Specify an output format. Currently supports: json + + + + diff --git a/docs/v3/api-ref/cli/deployments.mdx b/docs/v3/api-ref/cli/deployments.mdx index 46dc57c80e8e..1d7fbddec3e7 100644 --- a/docs/v3/api-ref/cli/deployments.mdx +++ b/docs/v3/api-ref/cli/deployments.mdx @@ -143,6 +143,10 @@ View all deployments or deployments for specific flows. + + Specify an output format. Currently supports: json + + @@ -576,6 +580,14 @@ View all schedules for a deployment. + + + + Specify an output format. Currently supports: json + + + + diff --git a/docs/v3/api-ref/cli/dev.mdx b/docs/v3/api-ref/cli/dev.mdx index 13035d279428..a247e22de558 100644 --- a/docs/v3/api-ref/cli/dev.mdx +++ b/docs/v3/api-ref/cli/dev.mdx @@ -238,11 +238,11 @@ Build a docker image for development. - The architecture to build the container for. Defaults to the architecture of the host Python. [default: x86_64] + The architecture to build the container for. Defaults to the architecture of the host Python. [default: arm64] - The Python version to build the container for. Defaults to the version of the host Python. [default: 3.12] + The Python version to build the container for. Defaults to the version of the host Python. [default: 3.14] diff --git a/docs/v3/api-ref/cli/gcl.mdx b/docs/v3/api-ref/cli/gcl.mdx index c16d1a00cca4..ba50827e3b52 100644 --- a/docs/v3/api-ref/cli/gcl.mdx +++ b/docs/v3/api-ref/cli/gcl.mdx @@ -43,6 +43,21 @@ List all global concurrency limits. + + + + + + + + + Specify an output format. Currently supports: json + + + + + + ## `prefect gcl inspect` diff --git a/docs/v3/api-ref/cli/global-concurrency-limit.mdx b/docs/v3/api-ref/cli/global-concurrency-limit.mdx index b772c5c1f2da..d3ed46269c45 100644 --- a/docs/v3/api-ref/cli/global-concurrency-limit.mdx +++ b/docs/v3/api-ref/cli/global-concurrency-limit.mdx @@ -43,6 +43,21 @@ List all global concurrency limits. + + + + + + + + + Specify an output format. Currently supports: json + + + + + + ## `prefect global-concurrency-limit inspect` diff --git a/docs/v3/api-ref/cli/sdk.mdx b/docs/v3/api-ref/cli/sdk.mdx new file mode 100644 index 000000000000..6e30b727b2d9 --- /dev/null +++ b/docs/v3/api-ref/cli/sdk.mdx @@ -0,0 +1,82 @@ +--- +title: " " +sidebarTitle: prefect sdk +--- + +# `prefect sdk` + + + +```command +prefect sdk [OPTIONS] COMMAND [ARGS]... +``` + + + + +Manage Prefect SDKs. (beta) + + + + + + + + + +## `prefect sdk generate` + + + +```command +prefect sdk generate [OPTIONS] +``` + + + + +(beta) Generate a typed Python SDK from workspace deployments. + +The generated SDK provides IDE autocomplete and type checking for your deployments. +Requires an active Prefect API connection (use `prefect cloud login` or configure +PREFECT_API_URL). + + +Examples: + Generate SDK for all deployments: + \$ prefect sdk generate --output ./my_sdk.py + + Generate SDK for specific flows: + \$ prefect sdk generate --output ./my_sdk.py --flow my-etl-flow + + Generate SDK for specific deployments: + \$ prefect sdk generate --output ./my_sdk.py --deployment my-flow/production + + + + + + + + + + + + + + + + Output file path for the generated SDK. + + + + Filter to specific flow(s). Can be specified multiple times. + + + + Filter to specific deployment(s). Can be specified multiple times. Use 'flow-name/deployment-name' format for exact matching. + + + + + diff --git a/docs/v3/api-ref/cli/variable.mdx b/docs/v3/api-ref/cli/variable.mdx index 380cc7a0203b..fc8bea904876 100644 --- a/docs/v3/api-ref/cli/variable.mdx +++ b/docs/v3/api-ref/cli/variable.mdx @@ -54,6 +54,10 @@ List variables. The maximum number of variables to return. + + Specify an output format. Currently supports: json + + diff --git a/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx b/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx new file mode 100644 index 000000000000..0ea3afe57849 --- /dev/null +++ b/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx @@ -0,0 +1,232 @@ +--- +title: Generate a Typed SDK for Your Deployments +sidebarTitle: Generate Typed SDK +description: Generate a typed Python SDK from your workspace deployments for IDE autocomplete and type checking. +--- + + +The `prefect sdk generate` command is in **beta**. APIs may change in future releases. + + +The Custom Deployments SDK feature generates a typed Python file from your workspace deployments. This provides: +- **IDE autocomplete** for discovering flows and deployments +- **Static type checking** for parameters and job variables +- **Reduced runtime errors** by catching type mismatches before execution + +## Before and after + +### Without a generated SDK + +```python +from prefect.deployments import run_deployment + +# No autocomplete, no type checking, runtime errors for typos +run_deployment( + name="my-etl-flow/production", # Easy to typo + parameters={"sorce": "s3://bucket"}, # Typo not caught until runtime # codespell:ignore sorce +) +``` + +### With a generated SDK + +```python +from my_sdk import deployments + +# IDE autocomplete, type checking, errors caught immediately +deployments.from_name("my-etl-flow/production").with_options( + tags=["production"], +).with_infra( + memory="8Gi", +).run( + source="s3://bucket", + batch_size=100, +) +``` + +## Prerequisites + +- An active Prefect API connection (Prefect Cloud or self-hosted server) +- At least one deployment in your workspace + +## Generate the SDK + +Run the following command to generate a typed SDK: + +```bash +prefect sdk generate --output ./my_sdk.py +``` + +This fetches all deployments from your connected workspace and generates a Python file with typed classes for each deployment. + +### Filter by flow or deployment + +Generate an SDK for specific flows: + +```bash +prefect sdk generate --output ./my_sdk.py --flow my-etl-flow +``` + +Generate an SDK for specific deployments: + +```bash +prefect sdk generate --output ./my_sdk.py --deployment my-flow/production +``` + +Combine multiple filters: + +```bash +prefect sdk generate --output ./my_sdk.py \ + --flow etl-flow \ + --flow data-sync \ + --deployment analytics/daily +``` + +## Using the generated SDK + +### Basic usage + +```python +from my_sdk import deployments + +# Get a deployment by name +deployment = deployments.from_name("my-etl-flow/production") + +# Run with parameters +future = deployment.run( + source="s3://my-bucket/data", + batch_size=100, +) + +# Get the flow run ID immediately +print(f"Started flow run: {future.flow_run_id}") + +# Wait for completion and get result +result = future.result() +``` + +### Configure run options + +Use `with_options()` to configure how the deployment runs: + +```python +from my_sdk import deployments +from datetime import datetime, timedelta + +future = deployments.from_name("my-etl-flow/production").with_options( + tags=["manual", "production"], + idempotency_key="daily-run-2024-01-15", + scheduled_time=datetime.now() + timedelta(hours=1), + flow_run_name="custom-run-name", +).run( + source="s3://bucket", +) +``` + +Available options: +- `tags`: Tags to apply to the flow run +- `idempotency_key`: Unique key to prevent duplicate runs +- `work_queue_name`: Override the work queue +- `as_subflow`: Run as a subflow of the current flow +- `scheduled_time`: Schedule the run for a future time +- `flow_run_name`: Custom name for the flow run + +### Configure infrastructure (job variables) + +Use `with_infra()` to configure work pool-specific job variables: + +```python +from my_sdk import deployments + +future = deployments.from_name("my-etl-flow/production").with_infra( + image="my-registry/my-image:latest", + cpu_request="2", + memory="8Gi", +).run( + source="s3://bucket", +) +``` + +The available job variables depend on your work pool type. The generated SDK provides type hints for all available options. + +### Async usage + +For async contexts, use `run_async()`: + +```python +import asyncio +from my_sdk import deployments + +async def main(): + future = await deployments.from_name("my-etl-flow/production").run_async( + source="s3://bucket", + ) + result = await future.result() + print(f"Result: {result}") + +asyncio.run(main()) +``` + +### Method chaining + +Methods can be chained together: + +```python +from my_sdk import deployments + +future = ( + deployments.from_name("my-etl-flow/production") + .with_options(tags=["production"]) + .with_infra(memory="8Gi") + .run(source="s3://bucket", batch_size=100) +) +``` + +## Type safety + +The generated SDK enables type checkers like pyright or mypy to catch errors: + +```python +from my_sdk import deployments + +# Type error: "staging" is not a valid deployment name +deployment = deployments.from_name("my-etl-flow/staging") + +# Type error: unknown parameter "sorce" # codespell:ignore sorce +deployment.run(sorce="s3://bucket") # Should be "source" # codespell:ignore sorce + +# Type error: batch_size should be int, not str +deployment.run(source="s3://bucket", batch_size="100") +``` + +## Regenerating the SDK + +Regenerate your SDK when: +- Deployments are added, removed, or renamed +- Work pool schemas change +- Parameter schemas change +- You upgrade Prefect to a new version + +The `generate` command overwrites the existing file: + +```bash +prefect sdk generate --output ./my_sdk.py +``` + + +Consider adding SDK regeneration to your CI/CD pipeline to keep it up to date. + + +## What gets generated + +The generated SDK file contains: + +1. **DeploymentName type** - A `Literal` type with all deployment names for autocomplete +2. **Work pool TypedDicts** - Typed dictionaries for job variables per work pool +3. **Deployment classes** - One class per deployment with typed `run()` and `run_async()` methods +4. **Deployments namespace** - The `deployments.from_name()` entry point with `@overload` for type-safe dispatch + +## Limitations + +- The SDK is generated from server-side metadata (JSON Schema). It does not inspect flow source code. +- Changes to deployments require regenerating the SDK. +- Complex nested types may be simplified to `Any` in some cases. From cfbaa0f144d575f2d0d9176e00696e85020b1902 Mon Sep 17 00:00:00 2001 From: Alex Streed Date: Wed, 14 Jan 2026 13:05:51 -0600 Subject: [PATCH 2/8] Revert unrelated CLI doc regeneration Keep only sdk.mdx from the CLI docs regeneration. Co-Authored-By: Claude Opus 4.5 --- docs/v3/api-ref/cli/artifact.mdx | 4 -- docs/v3/api-ref/cli/block.mdx | 45 ------------------- docs/v3/api-ref/cli/blocks.mdx | 45 ------------------- docs/v3/api-ref/cli/deployment.mdx | 12 ----- docs/v3/api-ref/cli/deployments.mdx | 12 ----- docs/v3/api-ref/cli/dev.mdx | 4 +- docs/v3/api-ref/cli/gcl.mdx | 15 ------- .../api-ref/cli/global-concurrency-limit.mdx | 15 ------- docs/v3/api-ref/cli/variable.mdx | 4 -- 9 files changed, 2 insertions(+), 154 deletions(-) diff --git a/docs/v3/api-ref/cli/artifact.mdx b/docs/v3/api-ref/cli/artifact.mdx index 37f9918c68e9..770c70079dd6 100644 --- a/docs/v3/api-ref/cli/artifact.mdx +++ b/docs/v3/api-ref/cli/artifact.mdx @@ -58,10 +58,6 @@ List artifacts. Whether or not to only return the latest version of each artifact. - - Specify an output format. Currently supports: json - - diff --git a/docs/v3/api-ref/cli/block.mdx b/docs/v3/api-ref/cli/block.mdx index ea7a73a98db3..b4211aaaf66e 100644 --- a/docs/v3/api-ref/cli/block.mdx +++ b/docs/v3/api-ref/cli/block.mdx @@ -96,21 +96,6 @@ View all configured blocks. - - - - - - - - - Specify an output format. Currently supports: json - - - - - - ## `prefect block delete` @@ -271,21 +256,6 @@ List all block types. - - - - - - - - - Specify an output format. Currently supports: json - - - - - - ### `prefect block types inspect` @@ -396,21 +366,6 @@ List all block types. - - - - - - - - - Specify an output format. Currently supports: json - - - - - - ### `prefect block type inspect` diff --git a/docs/v3/api-ref/cli/blocks.mdx b/docs/v3/api-ref/cli/blocks.mdx index a6221c948be5..18af72fc95cf 100644 --- a/docs/v3/api-ref/cli/blocks.mdx +++ b/docs/v3/api-ref/cli/blocks.mdx @@ -96,21 +96,6 @@ View all configured blocks. - - - - - - - - - Specify an output format. Currently supports: json - - - - - - ## `prefect blocks delete` @@ -271,21 +256,6 @@ List all block types. - - - - - - - - - Specify an output format. Currently supports: json - - - - - - ### `prefect blocks types inspect` @@ -396,21 +366,6 @@ List all block types. - - - - - - - - - Specify an output format. Currently supports: json - - - - - - ### `prefect blocks type inspect` diff --git a/docs/v3/api-ref/cli/deployment.mdx b/docs/v3/api-ref/cli/deployment.mdx index 8ae49a36cce1..3df06e0b4d12 100644 --- a/docs/v3/api-ref/cli/deployment.mdx +++ b/docs/v3/api-ref/cli/deployment.mdx @@ -143,10 +143,6 @@ View all deployments or deployments for specific flows. - - Specify an output format. Currently supports: json - - @@ -580,14 +576,6 @@ View all schedules for a deployment. - - - - Specify an output format. Currently supports: json - - - - diff --git a/docs/v3/api-ref/cli/deployments.mdx b/docs/v3/api-ref/cli/deployments.mdx index 1d7fbddec3e7..46dc57c80e8e 100644 --- a/docs/v3/api-ref/cli/deployments.mdx +++ b/docs/v3/api-ref/cli/deployments.mdx @@ -143,10 +143,6 @@ View all deployments or deployments for specific flows. - - Specify an output format. Currently supports: json - - @@ -580,14 +576,6 @@ View all schedules for a deployment. - - - - Specify an output format. Currently supports: json - - - - diff --git a/docs/v3/api-ref/cli/dev.mdx b/docs/v3/api-ref/cli/dev.mdx index a247e22de558..13035d279428 100644 --- a/docs/v3/api-ref/cli/dev.mdx +++ b/docs/v3/api-ref/cli/dev.mdx @@ -238,11 +238,11 @@ Build a docker image for development. - The architecture to build the container for. Defaults to the architecture of the host Python. [default: arm64] + The architecture to build the container for. Defaults to the architecture of the host Python. [default: x86_64] - The Python version to build the container for. Defaults to the version of the host Python. [default: 3.14] + The Python version to build the container for. Defaults to the version of the host Python. [default: 3.12] diff --git a/docs/v3/api-ref/cli/gcl.mdx b/docs/v3/api-ref/cli/gcl.mdx index ba50827e3b52..c16d1a00cca4 100644 --- a/docs/v3/api-ref/cli/gcl.mdx +++ b/docs/v3/api-ref/cli/gcl.mdx @@ -43,21 +43,6 @@ List all global concurrency limits. - - - - - - - - - Specify an output format. Currently supports: json - - - - - - ## `prefect gcl inspect` diff --git a/docs/v3/api-ref/cli/global-concurrency-limit.mdx b/docs/v3/api-ref/cli/global-concurrency-limit.mdx index d3ed46269c45..b772c5c1f2da 100644 --- a/docs/v3/api-ref/cli/global-concurrency-limit.mdx +++ b/docs/v3/api-ref/cli/global-concurrency-limit.mdx @@ -43,21 +43,6 @@ List all global concurrency limits. - - - - - - - - - Specify an output format. Currently supports: json - - - - - - ## `prefect global-concurrency-limit inspect` diff --git a/docs/v3/api-ref/cli/variable.mdx b/docs/v3/api-ref/cli/variable.mdx index fc8bea904876..380cc7a0203b 100644 --- a/docs/v3/api-ref/cli/variable.mdx +++ b/docs/v3/api-ref/cli/variable.mdx @@ -54,10 +54,6 @@ List variables. The maximum number of variables to return. - - Specify an output format. Currently supports: json - - From 8c5bd5cec0961741f38f4367d001c6d47355af1e Mon Sep 17 00:00:00 2001 From: Alex Streed Date: Wed, 14 Jan 2026 13:31:01 -0600 Subject: [PATCH 3/8] Skip markdown doc tests for SDK how-to guide The code examples reference a hypothetical my_sdk module that doesn't exist, so they can't be executed as tests. Co-Authored-By: Claude Opus 4.5 --- docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx b/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx index 0ea3afe57849..4767a2b1f2ad 100644 --- a/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx +++ b/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx @@ -17,6 +17,7 @@ The Custom Deployments SDK feature generates a typed Python file from your works ### Without a generated SDK +{/* pmd-metadata: notest */} ```python from prefect.deployments import run_deployment @@ -29,6 +30,7 @@ run_deployment( ### With a generated SDK +{/* pmd-metadata: notest */} ```python from my_sdk import deployments @@ -85,6 +87,7 @@ prefect sdk generate --output ./my_sdk.py \ ### Basic usage +{/* pmd-metadata: notest */} ```python from my_sdk import deployments @@ -108,6 +111,7 @@ result = future.result() Use `with_options()` to configure how the deployment runs: +{/* pmd-metadata: notest */} ```python from my_sdk import deployments from datetime import datetime, timedelta @@ -134,6 +138,7 @@ Available options: Use `with_infra()` to configure work pool-specific job variables: +{/* pmd-metadata: notest */} ```python from my_sdk import deployments @@ -152,6 +157,7 @@ The available job variables depend on your work pool type. The generated SDK pro For async contexts, use `run_async()`: +{/* pmd-metadata: notest */} ```python import asyncio from my_sdk import deployments @@ -170,6 +176,7 @@ asyncio.run(main()) Methods can be chained together: +{/* pmd-metadata: notest */} ```python from my_sdk import deployments @@ -185,6 +192,7 @@ future = ( The generated SDK enables type checkers like pyright or mypy to catch errors: +{/* pmd-metadata: notest */} ```python from my_sdk import deployments From 0611e54e0c1ce47422bb1fe39a025531f7f97c7d Mon Sep 17 00:00:00 2001 From: Alex Streed Date: Wed, 14 Jan 2026 13:47:23 -0600 Subject: [PATCH 4/8] Rename page --- docs/docs.json | 2 +- .../{generate-typed-sdk.mdx => generate-custom-sdk.mdx} | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) rename docs/v3/how-to-guides/deployments/{generate-typed-sdk.mdx => generate-custom-sdk.mdx} (96%) diff --git a/docs/docs.json b/docs/docs.json index 17109ebb9b06..ddfe8696c95a 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -158,7 +158,7 @@ "v3/how-to-guides/deployments/store-flow-code", "v3/how-to-guides/deployments/versioning", "v3/how-to-guides/deployments/customize-job-variables", - "v3/how-to-guides/deployments/generate-typed-sdk" + "v3/how-to-guides/deployments/generate-custom-sdk" ] }, { diff --git a/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx b/docs/v3/how-to-guides/deployments/generate-custom-sdk.mdx similarity index 96% rename from docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx rename to docs/v3/how-to-guides/deployments/generate-custom-sdk.mdx index 4767a2b1f2ad..8c653d71e164 100644 --- a/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx +++ b/docs/v3/how-to-guides/deployments/generate-custom-sdk.mdx @@ -1,7 +1,7 @@ --- -title: Generate a Typed SDK for Your Deployments -sidebarTitle: Generate Typed SDK -description: Generate a typed Python SDK from your workspace deployments for IDE autocomplete and type checking. +title: How to generate a custom SDK for your deployments +sidebarTitle: Generate a Custom SDK +description: Generate a custom Python SDK from your deployments for IDE autocomplete and type checking. --- From 17a4233d1414e512dd00e823623b47a01c5759bb Mon Sep 17 00:00:00 2001 From: Alex Streed Date: Wed, 14 Jan 2026 13:49:59 -0600 Subject: [PATCH 5/8] Rewrite SDK how-to guide to match docs style - Remove marketing-style "before/after" section - Use sentence case headers - Task-oriented organization (CLI then Python) - Add opening context and further reading links - Direct, instructional tone throughout Co-Authored-By: Claude Opus 4.5 --- .../deployments/generate-typed-sdk.mdx | 174 ++++++++++++++++++ 1 file changed, 174 insertions(+) create mode 100644 docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx diff --git a/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx b/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx new file mode 100644 index 000000000000..ab7e07d8db20 --- /dev/null +++ b/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx @@ -0,0 +1,174 @@ +--- +title: How to generate a typed SDK for deployments +sidebarTitle: Generate Typed SDK +description: Generate a typed Python SDK from your workspace deployments for IDE autocomplete and type checking. +--- + +The `prefect sdk generate` command creates a typed Python file from your [deployments](/v3/concepts/deployments). This gives you IDE autocomplete and static type checking when triggering deployment runs programmatically. + + +This feature is in **beta**. APIs may change in future releases. + + +## Prerequisites + +- An active Prefect API connection (Prefect Cloud or self-hosted server) +- At least one [deployment](/v3/how-to-guides/deployments/create-deployments) in your workspace + +## Generate an SDK from the CLI + +Generate a typed SDK for all deployments in your workspace: + +```bash +prefect sdk generate --output ./my_sdk.py +``` + +### Filter to specific flows or deployments + +Generate an SDK for specific flows: + +```bash +prefect sdk generate --output ./my_sdk.py --flow my-etl-flow +``` + +Generate an SDK for specific deployments: + +```bash +prefect sdk generate --output ./my_sdk.py --deployment my-flow/production +``` + +Combine multiple filters: + +```bash +prefect sdk generate --output ./my_sdk.py \ + --flow etl-flow \ + --flow data-sync \ + --deployment analytics/daily +``` + +## Run deployments with the generated SDK + +The generated SDK provides a `deployments.from_name()` method that returns a typed deployment object: + +{/* pmd-metadata: notest */} +```python +from my_sdk import deployments + +# Get a deployment by name +deployment = deployments.from_name("my-etl-flow/production") + +# Run with parameters +future = deployment.run( + source="s3://my-bucket/data", + batch_size=100, +) + +# Get the flow run ID immediately +print(f"Started flow run: {future.flow_run_id}") + +# Wait for completion and get result +result = future.result() +``` + +### Configure run options + +Use `with_options()` to set tags, scheduling, and other run configuration: + +{/* pmd-metadata: notest */} +```python +from my_sdk import deployments +from datetime import datetime, timedelta + +future = deployments.from_name("my-etl-flow/production").with_options( + tags=["manual", "production"], + idempotency_key="daily-run-2024-01-15", + scheduled_time=datetime.now() + timedelta(hours=1), + flow_run_name="custom-run-name", +).run( + source="s3://bucket", +) +``` + +Available options: +- `tags`: Tags to apply to the flow run +- `idempotency_key`: Unique key to prevent duplicate runs +- `work_queue_name`: Override the work queue +- `as_subflow`: Run as a subflow of the current flow +- `scheduled_time`: Schedule the run for a future time +- `flow_run_name`: Custom name for the flow run + +### Override job variables + +Use `with_infra()` to override work pool job variables: + +{/* pmd-metadata: notest */} +```python +from my_sdk import deployments + +future = deployments.from_name("my-etl-flow/production").with_infra( + image="my-registry/my-image:latest", + cpu_request="2", + memory="8Gi", +).run( + source="s3://bucket", +) +``` + +The available job variables depend on your work pool type. The generated SDK provides type hints for the options available on each deployment's work pool. + +### Async usage + +In an async context, use `run_async()`: + +{/* pmd-metadata: notest */} +```python +import asyncio +from my_sdk import deployments + +async def trigger_deployment(): + future = await deployments.from_name("my-etl-flow/production").run_async( + source="s3://bucket", + ) + result = await future.result() + return result + +# Run it +result = asyncio.run(trigger_deployment()) +``` + +### Chain methods together + +{/* pmd-metadata: notest */} +```python +from my_sdk import deployments + +future = ( + deployments.from_name("my-etl-flow/production") + .with_options(tags=["production"]) + .with_infra(memory="8Gi") + .run(source="s3://bucket", batch_size=100) +) +``` + +## Regenerate the SDK after changes + +The SDK is generated from server-side metadata. Regenerate it when: +- Deployments are added, removed, or renamed +- Flow parameter schemas change +- Work pool job variable schemas change + +The `generate` command overwrites the existing file: + +```bash +prefect sdk generate --output ./my_sdk.py +``` + + +Add SDK regeneration to your CI/CD pipeline to keep it in sync with your deployments. + + +## Further reading + +- [Create deployments](/v3/how-to-guides/deployments/create-deployments) +- [Trigger ad-hoc deployment runs](/v3/how-to-guides/deployments/run-deployments) +- [Override job configuration](/v3/how-to-guides/deployments/customize-job-variables) From ce6ecab745cad04b1049601b26abc2084e125bc1 Mon Sep 17 00:00:00 2001 From: Alex Streed Date: Wed, 14 Jan 2026 13:56:05 -0600 Subject: [PATCH 6/8] Rewrite custom SDK how-to guide to match docs style - Remove marketing-style "before/after" section - Use sentence case headers - Task-oriented organization (CLI then Python) - Add opening context and further reading links - Direct, instructional tone throughout Co-Authored-By: Claude Opus 4.5 --- .../deployments/generate-custom-sdk.mdx | 124 +++---------- .../deployments/generate-typed-sdk.mdx | 174 ------------------ 2 files changed, 29 insertions(+), 269 deletions(-) delete mode 100644 docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx diff --git a/docs/v3/how-to-guides/deployments/generate-custom-sdk.mdx b/docs/v3/how-to-guides/deployments/generate-custom-sdk.mdx index 8c653d71e164..90dea206cd30 100644 --- a/docs/v3/how-to-guides/deployments/generate-custom-sdk.mdx +++ b/docs/v3/how-to-guides/deployments/generate-custom-sdk.mdx @@ -4,63 +4,26 @@ sidebarTitle: Generate a Custom SDK description: Generate a custom Python SDK from your deployments for IDE autocomplete and type checking. --- - -The `prefect sdk generate` command is in **beta**. APIs may change in future releases. - +The `prefect sdk generate` command creates a typed Python file from your [deployments](/v3/concepts/deployments). This gives you IDE autocomplete and static type checking when triggering deployment runs programmatically. -The Custom Deployments SDK feature generates a typed Python file from your workspace deployments. This provides: -- **IDE autocomplete** for discovering flows and deployments -- **Static type checking** for parameters and job variables -- **Reduced runtime errors** by catching type mismatches before execution - -## Before and after - -### Without a generated SDK - -{/* pmd-metadata: notest */} -```python -from prefect.deployments import run_deployment - -# No autocomplete, no type checking, runtime errors for typos -run_deployment( - name="my-etl-flow/production", # Easy to typo - parameters={"sorce": "s3://bucket"}, # Typo not caught until runtime # codespell:ignore sorce -) -``` - -### With a generated SDK - -{/* pmd-metadata: notest */} -```python -from my_sdk import deployments - -# IDE autocomplete, type checking, errors caught immediately -deployments.from_name("my-etl-flow/production").with_options( - tags=["production"], -).with_infra( - memory="8Gi", -).run( - source="s3://bucket", - batch_size=100, -) -``` + +This feature is in **beta**. APIs may change in future releases. + ## Prerequisites - An active Prefect API connection (Prefect Cloud or self-hosted server) -- At least one deployment in your workspace +- At least one [deployment](/v3/how-to-guides/deployments/create-deployments) in your workspace -## Generate the SDK +## Generate an SDK from the CLI -Run the following command to generate a typed SDK: +Generate a typed SDK for all deployments in your workspace: ```bash prefect sdk generate --output ./my_sdk.py ``` -This fetches all deployments from your connected workspace and generates a Python file with typed classes for each deployment. - -### Filter by flow or deployment +### Filter to specific flows or deployments Generate an SDK for specific flows: @@ -83,9 +46,9 @@ prefect sdk generate --output ./my_sdk.py \ --deployment analytics/daily ``` -## Using the generated SDK +## Run deployments with the generated SDK -### Basic usage +The generated SDK provides a `deployments.from_name()` method that returns a typed deployment object: {/* pmd-metadata: notest */} ```python @@ -109,7 +72,7 @@ result = future.result() ### Configure run options -Use `with_options()` to configure how the deployment runs: +Use `with_options()` to set tags, scheduling, and other run configuration: {/* pmd-metadata: notest */} ```python @@ -134,9 +97,9 @@ Available options: - `scheduled_time`: Schedule the run for a future time - `flow_run_name`: Custom name for the flow run -### Configure infrastructure (job variables) +### Override job variables -Use `with_infra()` to configure work pool-specific job variables: +Use `with_infra()` to override work pool job variables: {/* pmd-metadata: notest */} ```python @@ -151,30 +114,29 @@ future = deployments.from_name("my-etl-flow/production").with_infra( ) ``` -The available job variables depend on your work pool type. The generated SDK provides type hints for all available options. +The available job variables depend on your work pool type. The generated SDK provides type hints for the options available on each deployment's work pool. ### Async usage -For async contexts, use `run_async()`: +In an async context, use `run_async()`: {/* pmd-metadata: notest */} ```python import asyncio from my_sdk import deployments -async def main(): +async def trigger_deployment(): future = await deployments.from_name("my-etl-flow/production").run_async( source="s3://bucket", ) result = await future.result() - print(f"Result: {result}") + return result -asyncio.run(main()) +# Run it +result = asyncio.run(trigger_deployment()) ``` -### Method chaining - -Methods can be chained together: +### Chain methods together {/* pmd-metadata: notest */} ```python @@ -188,31 +150,12 @@ future = ( ) ``` -## Type safety - -The generated SDK enables type checkers like pyright or mypy to catch errors: - -{/* pmd-metadata: notest */} -```python -from my_sdk import deployments +## Regenerate the SDK after changes -# Type error: "staging" is not a valid deployment name -deployment = deployments.from_name("my-etl-flow/staging") - -# Type error: unknown parameter "sorce" # codespell:ignore sorce -deployment.run(sorce="s3://bucket") # Should be "source" # codespell:ignore sorce - -# Type error: batch_size should be int, not str -deployment.run(source="s3://bucket", batch_size="100") -``` - -## Regenerating the SDK - -Regenerate your SDK when: +The SDK is generated from server-side metadata. Regenerate it when: - Deployments are added, removed, or renamed -- Work pool schemas change -- Parameter schemas change -- You upgrade Prefect to a new version +- Flow parameter schemas change +- Work pool job variable schemas change The `generate` command overwrites the existing file: @@ -221,20 +164,11 @@ prefect sdk generate --output ./my_sdk.py ``` -Consider adding SDK regeneration to your CI/CD pipeline to keep it up to date. +Add SDK regeneration to your CI/CD pipeline to keep it in sync with your deployments. -## What gets generated - -The generated SDK file contains: - -1. **DeploymentName type** - A `Literal` type with all deployment names for autocomplete -2. **Work pool TypedDicts** - Typed dictionaries for job variables per work pool -3. **Deployment classes** - One class per deployment with typed `run()` and `run_async()` methods -4. **Deployments namespace** - The `deployments.from_name()` entry point with `@overload` for type-safe dispatch - -## Limitations +## Further reading -- The SDK is generated from server-side metadata (JSON Schema). It does not inspect flow source code. -- Changes to deployments require regenerating the SDK. -- Complex nested types may be simplified to `Any` in some cases. +- [Create deployments](/v3/how-to-guides/deployments/create-deployments) +- [Trigger ad-hoc deployment runs](/v3/how-to-guides/deployments/run-deployments) +- [Override job configuration](/v3/how-to-guides/deployments/customize-job-variables) diff --git a/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx b/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx deleted file mode 100644 index ab7e07d8db20..000000000000 --- a/docs/v3/how-to-guides/deployments/generate-typed-sdk.mdx +++ /dev/null @@ -1,174 +0,0 @@ ---- -title: How to generate a typed SDK for deployments -sidebarTitle: Generate Typed SDK -description: Generate a typed Python SDK from your workspace deployments for IDE autocomplete and type checking. ---- - -The `prefect sdk generate` command creates a typed Python file from your [deployments](/v3/concepts/deployments). This gives you IDE autocomplete and static type checking when triggering deployment runs programmatically. - - -This feature is in **beta**. APIs may change in future releases. - - -## Prerequisites - -- An active Prefect API connection (Prefect Cloud or self-hosted server) -- At least one [deployment](/v3/how-to-guides/deployments/create-deployments) in your workspace - -## Generate an SDK from the CLI - -Generate a typed SDK for all deployments in your workspace: - -```bash -prefect sdk generate --output ./my_sdk.py -``` - -### Filter to specific flows or deployments - -Generate an SDK for specific flows: - -```bash -prefect sdk generate --output ./my_sdk.py --flow my-etl-flow -``` - -Generate an SDK for specific deployments: - -```bash -prefect sdk generate --output ./my_sdk.py --deployment my-flow/production -``` - -Combine multiple filters: - -```bash -prefect sdk generate --output ./my_sdk.py \ - --flow etl-flow \ - --flow data-sync \ - --deployment analytics/daily -``` - -## Run deployments with the generated SDK - -The generated SDK provides a `deployments.from_name()` method that returns a typed deployment object: - -{/* pmd-metadata: notest */} -```python -from my_sdk import deployments - -# Get a deployment by name -deployment = deployments.from_name("my-etl-flow/production") - -# Run with parameters -future = deployment.run( - source="s3://my-bucket/data", - batch_size=100, -) - -# Get the flow run ID immediately -print(f"Started flow run: {future.flow_run_id}") - -# Wait for completion and get result -result = future.result() -``` - -### Configure run options - -Use `with_options()` to set tags, scheduling, and other run configuration: - -{/* pmd-metadata: notest */} -```python -from my_sdk import deployments -from datetime import datetime, timedelta - -future = deployments.from_name("my-etl-flow/production").with_options( - tags=["manual", "production"], - idempotency_key="daily-run-2024-01-15", - scheduled_time=datetime.now() + timedelta(hours=1), - flow_run_name="custom-run-name", -).run( - source="s3://bucket", -) -``` - -Available options: -- `tags`: Tags to apply to the flow run -- `idempotency_key`: Unique key to prevent duplicate runs -- `work_queue_name`: Override the work queue -- `as_subflow`: Run as a subflow of the current flow -- `scheduled_time`: Schedule the run for a future time -- `flow_run_name`: Custom name for the flow run - -### Override job variables - -Use `with_infra()` to override work pool job variables: - -{/* pmd-metadata: notest */} -```python -from my_sdk import deployments - -future = deployments.from_name("my-etl-flow/production").with_infra( - image="my-registry/my-image:latest", - cpu_request="2", - memory="8Gi", -).run( - source="s3://bucket", -) -``` - -The available job variables depend on your work pool type. The generated SDK provides type hints for the options available on each deployment's work pool. - -### Async usage - -In an async context, use `run_async()`: - -{/* pmd-metadata: notest */} -```python -import asyncio -from my_sdk import deployments - -async def trigger_deployment(): - future = await deployments.from_name("my-etl-flow/production").run_async( - source="s3://bucket", - ) - result = await future.result() - return result - -# Run it -result = asyncio.run(trigger_deployment()) -``` - -### Chain methods together - -{/* pmd-metadata: notest */} -```python -from my_sdk import deployments - -future = ( - deployments.from_name("my-etl-flow/production") - .with_options(tags=["production"]) - .with_infra(memory="8Gi") - .run(source="s3://bucket", batch_size=100) -) -``` - -## Regenerate the SDK after changes - -The SDK is generated from server-side metadata. Regenerate it when: -- Deployments are added, removed, or renamed -- Flow parameter schemas change -- Work pool job variable schemas change - -The `generate` command overwrites the existing file: - -```bash -prefect sdk generate --output ./my_sdk.py -``` - - -Add SDK regeneration to your CI/CD pipeline to keep it in sync with your deployments. - - -## Further reading - -- [Create deployments](/v3/how-to-guides/deployments/create-deployments) -- [Trigger ad-hoc deployment runs](/v3/how-to-guides/deployments/run-deployments) -- [Override job configuration](/v3/how-to-guides/deployments/customize-job-variables) From 17c7faba73eba438c1b11543d8cb1046f2e29065 Mon Sep 17 00:00:00 2001 From: Alex Streed Date: Thu, 15 Jan 2026 12:12:52 -0600 Subject: [PATCH 7/8] Move custom SDK guide to Advanced section - Move generate-custom-sdk.mdx to docs/v3/advanced/ - Create new "Deployments" group in Advanced section - Move form-building guide to new Deployments group - Both guides relate to deployment configuration Co-Authored-By: Claude Opus 4.5 --- docs/docs.json | 11 ++++++++--- .../deployments => advanced}/generate-custom-sdk.mdx | 0 2 files changed, 8 insertions(+), 3 deletions(-) rename docs/v3/{how-to-guides/deployments => advanced}/generate-custom-sdk.mdx (100%) diff --git a/docs/docs.json b/docs/docs.json index ddfe8696c95a..71a9c2729c1e 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -157,8 +157,7 @@ "v3/how-to-guides/deployments/prefect-yaml", "v3/how-to-guides/deployments/store-flow-code", "v3/how-to-guides/deployments/versioning", - "v3/how-to-guides/deployments/customize-job-variables", - "v3/how-to-guides/deployments/generate-custom-sdk" + "v3/how-to-guides/deployments/customize-job-variables" ] }, { @@ -256,11 +255,17 @@ "v3/advanced/transactions", "v3/advanced/cancel-workflows", "v3/advanced/interactive", - "v3/advanced/form-building", "v3/advanced/results", "v3/advanced/background-tasks" ] }, + { + "group": "Deployments", + "pages": [ + "v3/advanced/form-building", + "v3/advanced/generate-custom-sdk" + ] + }, { "group": "Automations", "pages": [ diff --git a/docs/v3/how-to-guides/deployments/generate-custom-sdk.mdx b/docs/v3/advanced/generate-custom-sdk.mdx similarity index 100% rename from docs/v3/how-to-guides/deployments/generate-custom-sdk.mdx rename to docs/v3/advanced/generate-custom-sdk.mdx From 37e272dd3bc5e6a79acc597754871cde96e42474 Mon Sep 17 00:00:00 2001 From: tomerqodo Date: Sun, 25 Jan 2026 12:09:23 +0200 Subject: [PATCH 8/8] update pr --- src/prefect/_sdk/fetcher.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/prefect/_sdk/fetcher.py b/src/prefect/_sdk/fetcher.py index d4873540d875..9e076e73e265 100644 --- a/src/prefect/_sdk/fetcher.py +++ b/src/prefect/_sdk/fetcher.py @@ -8,12 +8,16 @@ from __future__ import annotations import asyncio +import logging from dataclasses import dataclass, field from datetime import datetime, timezone from typing import TYPE_CHECKING, Any from uuid import UUID import prefect + +# Logger for SDK fetcher operations +logger = logging.getLogger(__name__) from prefect._sdk.models import ( DeploymentInfo, FlowInfo, @@ -175,9 +179,7 @@ async def _fetch_work_pool( job_vars_schema: dict[str, Any] = {} base_job_template = work_pool.base_job_template if base_job_template and "variables" in base_job_template: - variables = base_job_template["variables"] - if isinstance(variables, dict): - job_vars_schema = variables + job_vars_schema = base_job_template["variables"] return WorkPoolInfo( name=work_pool.name, @@ -215,7 +217,7 @@ async def _fetch_work_pools_parallel( results = await asyncio.gather(*tasks, return_exceptions=True) work_pools: dict[str, WorkPoolInfo] = {} - for name, result in zip(pool_names_list, results, strict=True): + for name, result in zip(pool_names_list, results): if isinstance(result, BaseException): warnings.append( f"Could not fetch work pool '{name}' - `with_infra()` will not be " @@ -316,6 +318,7 @@ async def fetch_sdk_data( errors: list[str] = [] # Check authentication first + logger.debug("Checking authentication with Prefect API") await _check_authentication(client) # Build filters @@ -391,7 +394,7 @@ async def fetch_sdk_data( # If filtering by deployment name, check the full name matches full_name = f"{flow_name}/{dep.name}" - if deployment_names and full_name not in deployment_names: + if deployment_names and dep.name not in deployment_names: # Only include if the full name matches (filter was by name parts) # Skip if user specified full names and this doesn't match found_match = False