Merge pull request #74 from VrianCao/perf/homepage-artifact-json #138
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Deploy to Cloudflare | |
| on: | |
| push: | |
| branches: ['main', 'master'] | |
| workflow_dispatch: | |
| permissions: | |
| contents: read | |
| concurrency: | |
| group: deploy-${{ github.ref }} | |
| cancel-in-progress: true | |
| jobs: | |
| deploy: | |
| runs-on: ubuntu-latest | |
| env: | |
| # Keep secrets out of expressions like `if:` (fork-/sync-friendly) by | |
| # projecting them into env once, then referencing `env.*` everywhere. | |
| CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} | |
| CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID || vars.CLOUDFLARE_ACCOUNT_ID }} | |
| UPTIMER_ADMIN_TOKEN: ${{ secrets.UPTIMER_ADMIN_TOKEN }} | |
| # Build-time Vite env (optional) | |
| # - Set repo secret `VITE_ADMIN_PATH` to customize the admin URL path (e.g. /37dh3hi2) | |
| # - If unset/empty, the web app falls back to /admin | |
| VITE_ADMIN_PATH: ${{ secrets.VITE_ADMIN_PATH || vars.VITE_ADMIN_PATH || vars.UPTIMER_ADMIN_PATH }} | |
| steps: | |
| - uses: actions/checkout@v4 | |
| - uses: actions/setup-node@v4 | |
| with: | |
| node-version-file: '.node-version' | |
| - uses: pnpm/action-setup@v4 | |
| with: | |
| version: '10.8.1' | |
| - name: Resolve pnpm store path | |
| id: pnpm_store | |
| shell: bash | |
| run: | | |
| set -euo pipefail | |
| echo "path=$(pnpm store path --silent)" >> "$GITHUB_OUTPUT" | |
| - name: Cache pnpm store | |
| uses: actions/cache@v4 | |
| with: | |
| path: ${{ steps.pnpm_store.outputs.path }} | |
| key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} | |
| restore-keys: | | |
| ${{ runner.os }}-pnpm-store- | |
| - name: Install | |
| run: pnpm install --frozen-lockfile | |
| # --------------------------------------------------------------------- | |
| # Configuration model (important for fork sync compatibility / idempotence) | |
| # | |
| # 1) All per-instance config should live in GitHub Secrets / Variables, NOT | |
| # in the workflow file itself. This way upstream updates to this workflow | |
| # won't force users to re-edit it, and won't change resource names. | |
| # | |
| # 2) The defaults below derive from the fork repository name (lowercased), | |
| # so they remain stable across upstream syncs. | |
| # | |
| # Required secrets: | |
| # - CLOUDFLARE_API_TOKEN: Cloudflare API Token with permissions for | |
| # Workers, Pages and D1. | |
| # | |
| # Optional (recommended) secret/variable: | |
| # - CLOUDFLARE_ACCOUNT_ID: If not provided, we will fetch the first account | |
| # id accessible by the token. | |
| # | |
| # Optional variables (recommended to avoid collisions if you deploy multiple instances): | |
| # - UPTIMER_PREFIX: Base name for Worker/Pages/D1. Defaults to repo name slug. | |
| # - UPTIMER_WORKER_NAME / UPTIMER_PAGES_PROJECT / UPTIMER_D1_NAME: Override individual names. | |
| # - UPTIMER_D1_BINDING: D1 binding name in Worker (default: DB). | |
| # - UPTIMER_API_BASE / UPTIMER_API_ORIGIN: Either one can be set; workflow derives the other. | |
| # (API_BASE drives Vite build requests; API_ORIGIN drives Pages HTML preload worker.) | |
| # | |
| # Required secret: | |
| # - UPTIMER_ADMIN_TOKEN: Admin dashboard access key; written to Worker secret ADMIN_TOKEN. | |
| # --------------------------------------------------------------------- | |
| - name: Resolve Cloudflare Account ID | |
| id: cf_account | |
| shell: bash | |
| run: | | |
| set -euo pipefail | |
| ACCOUNT_ID="${CLOUDFLARE_ACCOUNT_ID:-}" | |
| if [[ -z "$ACCOUNT_ID" ]]; then | |
| ACCOUNT_ID="$( | |
| curl -fsSL "https://api.cloudflare.com/client/v4/accounts" \ | |
| -H "Authorization: Bearer ${CLOUDFLARE_API_TOKEN}" \ | |
| -H "Content-Type: application/json" \ | |
| | jq -r '.result[0].id' | |
| )" | |
| fi | |
| if [[ -z "$ACCOUNT_ID" || "$ACCOUNT_ID" == "null" ]]; then | |
| echo "Failed to resolve Cloudflare account id." | |
| echo "Set CLOUDFLARE_ACCOUNT_ID (secret or variable) or ensure CLOUDFLARE_API_TOKEN has access." | |
| exit 1 | |
| fi | |
| echo "CLOUDFLARE_ACCOUNT_ID=$ACCOUNT_ID" >> "$GITHUB_ENV" | |
| echo "account_id=$ACCOUNT_ID" >> "$GITHUB_OUTPUT" | |
| - name: Compute deploy names | |
| id: names | |
| shell: bash | |
| run: | | |
| set -euo pipefail | |
| # Slugify repo name: keep [a-z0-9-], map other chars to '-'. | |
| repo_name="${GITHUB_REPOSITORY#*/}" | |
| default_slug="$(echo "$repo_name" | tr '[:upper:]' '[:lower:]' | sed -E 's/[^a-z0-9-]+/-/g; s/^-+//; s/-+$//')" | |
| if [[ -z "$default_slug" ]]; then | |
| default_slug="uptimer" | |
| fi | |
| prefix="${UPTIMER_PREFIX:-$default_slug}" | |
| worker_name="${UPTIMER_WORKER_NAME:-$prefix}" | |
| pages_project="${UPTIMER_PAGES_PROJECT:-$prefix}" | |
| d1_name="${UPTIMER_D1_NAME:-$prefix}" | |
| d1_binding="${UPTIMER_D1_BINDING:-DB}" | |
| echo "WORKER_NAME=$worker_name" >> "$GITHUB_ENV" | |
| echo "PAGES_PROJECT=$pages_project" >> "$GITHUB_ENV" | |
| echo "D1_NAME=$d1_name" >> "$GITHUB_ENV" | |
| echo "D1_BINDING=$d1_binding" >> "$GITHUB_ENV" | |
| echo "DEFAULT_BRANCH=${DEFAULT_BRANCH:-main}" >> "$GITHUB_ENV" | |
| echo "worker_name=$worker_name" >> "$GITHUB_OUTPUT" | |
| echo "pages_project=$pages_project" >> "$GITHUB_OUTPUT" | |
| echo "d1_name=$d1_name" >> "$GITHUB_OUTPUT" | |
| echo "d1_binding=$d1_binding" >> "$GITHUB_OUTPUT" | |
| env: | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} | |
| UPTIMER_PREFIX: ${{ vars.UPTIMER_PREFIX }} | |
| UPTIMER_WORKER_NAME: ${{ vars.UPTIMER_WORKER_NAME }} | |
| UPTIMER_PAGES_PROJECT: ${{ vars.UPTIMER_PAGES_PROJECT }} | |
| UPTIMER_D1_NAME: ${{ vars.UPTIMER_D1_NAME }} | |
| UPTIMER_D1_BINDING: ${{ vars.UPTIMER_D1_BINDING }} | |
| - name: Ensure D1 exists and get id | |
| id: d1 | |
| shell: bash | |
| working-directory: apps/worker | |
| run: | | |
| set -euo pipefail | |
| # NOTE: `wrangler d1 info <name>` may prefer local wrangler.toml bindings | |
| # (and therefore the placeholder database_id) when a config exists. | |
| # Use `d1 list --json` to resolve by remote database name instead. | |
| resolve_id() { | |
| pnpm exec wrangler d1 list --json \ | |
| | jq -r ' | |
| def as_array: | |
| if type == "array" then . | |
| elif (type == "object" and (.result | type) == "array") then .result | |
| else [] end; | |
| (as_array | map(select(.name == env.D1_NAME)) | .[0].uuid // .[0].id // empty) | |
| ' | |
| } | |
| D1_ID="$(resolve_id || true)" | |
| if [[ -z "$D1_ID" || "$D1_ID" == "null" ]]; then | |
| echo "Creating D1 database: $D1_NAME" | |
| set +e | |
| create_out="$(pnpm exec wrangler d1 create "$D1_NAME" 2>&1)" | |
| create_code="$?" | |
| set -e | |
| if [[ "$create_code" -ne 0 ]] && ! echo "$create_out" | grep -qiE "already exists"; then | |
| echo "$create_out" | |
| exit "$create_code" | |
| fi | |
| D1_ID="$(resolve_id || true)" | |
| else | |
| echo "D1 database exists: $D1_NAME" | |
| fi | |
| if [[ -z "$D1_ID" || "$D1_ID" == "null" ]]; then | |
| echo "Failed to resolve D1 id for database: $D1_NAME" | |
| exit 1 | |
| fi | |
| echo "D1_ID=$D1_ID" >> "$GITHUB_ENV" | |
| echo "d1_id=$D1_ID" >> "$GITHUB_OUTPUT" | |
| - name: Generate CI wrangler config (inject D1 id + stable names) | |
| shell: bash | |
| run: | | |
| set -euo pipefail | |
| # IMPORTANT: writing to $GITHUB_ENV only affects *subsequent steps*. | |
| # We also export it for the python process in this step. | |
| # Use $GITHUB_WORKSPACE so relative paths in wrangler config (e.g. `main = "src/index.ts"`) | |
| # resolve correctly when Wrangler reads this config. | |
| WRANGLER_CI_CONFIG="$GITHUB_WORKSPACE/apps/worker/wrangler.ci.toml" | |
| echo "WRANGLER_CI_CONFIG=$WRANGLER_CI_CONFIG" >> "$GITHUB_ENV" | |
| export WRANGLER_CI_CONFIG | |
| python3 - <<'PY' | |
| import os | |
| import pathlib | |
| import re | |
| import sys | |
| src = pathlib.Path("apps/worker/wrangler.toml") | |
| dst = pathlib.Path(os.environ["WRANGLER_CI_CONFIG"]) | |
| worker_name = os.environ["WORKER_NAME"] | |
| d1_binding = os.environ.get("D1_BINDING", "DB") | |
| d1_name = os.environ["D1_NAME"] | |
| d1_id = os.environ["D1_ID"] | |
| migrations_dir = (src.parent / "migrations").resolve().as_posix() | |
| text = src.read_text(encoding="utf-8") | |
| # 1) Patch top-level Worker name for stable, fork-configurable deploys. | |
| text, n = re.subn( | |
| r'(?m)^name\s*=\s*"[^\"]*"\s*$', | |
| f'name = "{worker_name}"', | |
| text, | |
| count=1, | |
| ) | |
| if n < 1: | |
| raise SystemExit("Expected a `name = \"...\"` entry in apps/worker/wrangler.toml") | |
| # 2) Patch the first [[d1_databases]] block that matches binding. | |
| # Keep other blocks untouched for forward-compat (future resources/envs). | |
| block_re = re.compile(r'(?ms)^\[\[d1_databases\]\]\s*$.*?(?=^\[\[|^\[[a-zA-Z]|\Z)') | |
| def set_or_insert_kv(block: str, key: str, value: str) -> str: | |
| kv_re = re.compile(rf'(?m)^{re.escape(key)}\s*=\s*"[^\"]*"\s*$') | |
| if kv_re.search(block): | |
| return kv_re.sub(f'{key} = "{value}"', block, count=1) | |
| # Insert right after the binding line if present; otherwise after the header. | |
| binding_line = re.search(r'(?m)^binding\s*=\s*"[^\"]*"\s*$', block) | |
| if binding_line: | |
| insert_at = binding_line.end() | |
| return block[:insert_at] + f'\n{key} = "{value}"' + block[insert_at:] | |
| header_end = block.find("\n") | |
| if header_end == -1: | |
| return block + f'\n{key} = "{value}"\n' | |
| return block[: header_end + 1] + f'{key} = "{value}"\n' + block[header_end + 1 :] | |
| replaced = False | |
| out = [] | |
| last = 0 | |
| for m in block_re.finditer(text): | |
| out.append(text[last : m.start()]) | |
| block = m.group(0) | |
| binding_m = re.search(r'(?m)^binding\s*=\s*"([^\"]+)"\s*$', block) | |
| if (not replaced) and binding_m and binding_m.group(1) == d1_binding: | |
| block = set_or_insert_kv(block, "database_name", d1_name) | |
| block = set_or_insert_kv(block, "database_id", d1_id) | |
| block = set_or_insert_kv(block, "migrations_dir", migrations_dir) | |
| replaced = True | |
| out.append(block) | |
| last = m.end() | |
| out.append(text[last:]) | |
| if not replaced: | |
| raise SystemExit(f'No [[d1_databases]] block found with binding = "{d1_binding}".') | |
| final = "".join(out) | |
| # Optional optimization: create a self service binding so the scheduler can | |
| # offload snapshot refresh into a separate invocation without public HTTP self-fetch. | |
| if 'binding = "SELF"' not in final: | |
| final = final.rstrip() + f'\n\n[[services]]\nbinding = "SELF"\nservice = "{worker_name}"\n' | |
| dst.write_text(final, encoding="utf-8") | |
| print(f"Wrote {dst}") | |
| PY | |
| - name: Apply D1 migrations (remote) | |
| shell: bash | |
| working-directory: apps/worker | |
| run: | | |
| set -euo pipefail | |
| pnpm exec wrangler d1 migrations apply "$D1_NAME" --remote --config "$WRANGLER_CI_CONFIG" | |
| - name: Deploy Worker (idempotent) | |
| id: deploy_worker | |
| shell: bash | |
| working-directory: apps/worker | |
| run: | | |
| set -euo pipefail | |
| # Capture output so we can derive workers.dev URL for the web build (optional). | |
| set +e | |
| deploy_out="$(pnpm exec wrangler deploy --config "$WRANGLER_CI_CONFIG" --keep-vars 2>&1)" | |
| deploy_code="$?" | |
| set -e | |
| echo "$deploy_out" | |
| if [[ "$deploy_code" -ne 0 ]]; then | |
| exit "$deploy_code" | |
| fi | |
| # Best-effort parse; if it fails we can still use a user-provided API base. | |
| worker_url="$(echo "$deploy_out" | grep -oE 'https://[^ ]+[.]workers[.]dev' | head -n1 || true)" | |
| if [[ -n "$worker_url" ]]; then | |
| echo "WORKER_URL=$worker_url" >> "$GITHUB_ENV" | |
| echo "worker_url=$worker_url" >> "$GITHUB_OUTPUT" | |
| fi | |
| - name: Warm public status snapshot (best-effort) | |
| if: ${{ steps.deploy_worker.outputs.worker_url != '' }} | |
| shell: bash | |
| run: | | |
| set +e | |
| curl -fsSL "${WORKER_URL}/api/v1/public/status" >/dev/null | |
| true | |
| env: | |
| WORKER_URL: ${{ steps.deploy_worker.outputs.worker_url }} | |
| - name: Upsert Worker secrets (optional) | |
| if: ${{ env.UPTIMER_ADMIN_TOKEN != '' }} | |
| shell: bash | |
| working-directory: apps/worker | |
| run: | | |
| set -euo pipefail | |
| printf '%s' "$UPTIMER_ADMIN_TOKEN" | pnpm exec wrangler secret put ADMIN_TOKEN --name "$WORKER_NAME" --config "$WRANGLER_CI_CONFIG" | |
| - name: Resolve API base + origin (for Vite build + Pages preload) | |
| shell: bash | |
| run: | | |
| set -euo pipefail | |
| trim_trailing_slashes() { | |
| local value="$1" | |
| while [[ "$value" == */ ]]; do | |
| value="${value%/}" | |
| done | |
| printf '%s' "$value" | |
| } | |
| derive_api_base_from_origin() { | |
| local origin="$1" | |
| local trimmed | |
| trimmed="$(trim_trailing_slashes "$origin")" | |
| if [[ "$trimmed" == */api/v1 ]]; then | |
| printf '%s' "$trimmed" | |
| else | |
| printf '%s/api/v1' "$trimmed" | |
| fi | |
| } | |
| if [[ -n "${UPTIMER_API_BASE:-}" ]]; then | |
| api_base="$UPTIMER_API_BASE" | |
| elif [[ -n "${UPTIMER_API_ORIGIN:-}" ]]; then | |
| api_base="$(derive_api_base_from_origin "$UPTIMER_API_ORIGIN")" | |
| elif [[ -n "${WORKER_URL:-}" ]]; then | |
| api_base="${WORKER_URL}/api/v1" | |
| else | |
| api_base="/api/v1" | |
| fi | |
| api_origin="" | |
| if [[ -n "${UPTIMER_API_ORIGIN:-}" ]]; then | |
| api_origin="$UPTIMER_API_ORIGIN" | |
| elif [[ -n "${UPTIMER_API_BASE:-}" ]]; then | |
| if [[ "$UPTIMER_API_BASE" =~ ^https?://[^/]+ ]]; then | |
| api_origin="${BASH_REMATCH[0]}" | |
| fi | |
| if [[ -z "$api_origin" && -n "${WORKER_URL:-}" ]]; then | |
| api_origin="$WORKER_URL" | |
| fi | |
| elif [[ -n "${WORKER_URL:-}" ]]; then | |
| api_origin="$WORKER_URL" | |
| fi | |
| if [[ -n "${UPTIMER_API_BASE:-}" && -n "${UPTIMER_API_ORIGIN:-}" ]]; then | |
| implied_base="$(derive_api_base_from_origin "$UPTIMER_API_ORIGIN")" | |
| if [[ "$UPTIMER_API_BASE" != "$implied_base" ]]; then | |
| echo "::warning::UPTIMER_API_BASE and UPTIMER_API_ORIGIN resolve to different API bases. UPTIMER_API_BASE wins for web build." | |
| fi | |
| fi | |
| echo "Using VITE_API_BASE=$api_base" | |
| echo "VITE_API_BASE=$api_base" >> "$GITHUB_ENV" | |
| echo "UPTIMER_RESOLVED_API_BASE=$api_base" >> "$GITHUB_ENV" | |
| if [[ -n "$api_origin" ]]; then | |
| echo "Using UPTIMER_API_ORIGIN=$api_origin" | |
| echo "UPTIMER_RESOLVED_API_ORIGIN=$api_origin" >> "$GITHUB_ENV" | |
| fi | |
| env: | |
| UPTIMER_API_BASE: ${{ vars.UPTIMER_API_BASE }} | |
| UPTIMER_API_ORIGIN: ${{ vars.UPTIMER_API_ORIGIN }} | |
| WORKER_URL: ${{ steps.deploy_worker.outputs.worker_url }} | |
| - name: Build Pages (Vite) | |
| working-directory: apps/web | |
| run: pnpm build | |
| - name: Ensure Pages project exists (idempotent) | |
| shell: bash | |
| run: | | |
| set -euo pipefail | |
| set +e | |
| output="$(pnpm -C apps/worker exec wrangler --cwd ../.. pages project create "$PAGES_PROJECT" --production-branch "$DEFAULT_BRANCH" 2>&1)" | |
| code="$?" | |
| set -e | |
| if [[ "$code" -eq 0 ]]; then | |
| echo "Created Pages project: $PAGES_PROJECT" | |
| exit 0 | |
| fi | |
| if echo "$output" | grep -qiE "already exists|already a project"; then | |
| echo "Pages project already exists: $PAGES_PROJECT" | |
| exit 0 | |
| fi | |
| # Fallback: if the error message format changes, verify via listing. | |
| if pnpm -C apps/worker exec wrangler --cwd ../.. pages project list 2>/dev/null | grep -q "$PAGES_PROJECT"; then | |
| echo "Pages project already exists: $PAGES_PROJECT" | |
| exit 0 | |
| fi | |
| echo "$output" | |
| exit "$code" | |
| - name: Upsert Pages secret for HTML preload (best-effort) | |
| shell: bash | |
| run: | | |
| set -euo pipefail | |
| api_origin="${UPTIMER_RESOLVED_API_ORIGIN:-}" | |
| if [[ -z "$api_origin" ]]; then | |
| echo "Skipping Pages secret: no UPTIMER_API_ORIGIN / absolute UPTIMER_API_BASE / WORKER_URL" | |
| exit 0 | |
| fi | |
| printf %s "$api_origin" | pnpm -C apps/worker exec wrangler --cwd ../.. pages secret put UPTIMER_API_ORIGIN --project-name "$PAGES_PROJECT" | |
| - name: Deploy Pages (idempotent) | |
| shell: bash | |
| run: | | |
| set -euo pipefail | |
| pnpm -C apps/worker exec wrangler --cwd ../.. pages deploy apps/web/dist \ | |
| --project-name "$PAGES_PROJECT" \ | |
| --branch "$DEFAULT_BRANCH" \ | |
| --commit-hash "$GITHUB_SHA" \ | |
| --commit-message "$GITHUB_SHA" \ | |
| --commit-dirty=true |