diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index a7aa7b8..16d66e9 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -6,6 +6,14 @@ on: push: branches: [main] +permissions: + contents: read + +env: + DAYTONA_API_KEY: ${{ secrets.DAYTONA_API_KEY }} + DAYTONA_API_URL: ${{ vars.DAYTONA_API_URL }} + ZHIPU_API_KEY: ${{ secrets.ZHIPU_API_KEY }} + jobs: Check: name: Check @@ -27,3 +35,79 @@ jobs: - name: TypeScript typecheck run: bun run typecheck + + - name: Unit tests + run: bun test + + - name: CLI smoke checks + run: | + bun run analyze -- --help + bun run start -- --help + bun run setup -- --help + + BuildPackage: + name: Build Package Artifact + runs-on: blacksmith-4vcpu-ubuntu-2404 + needs: Check + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + with: + bun-version: 1.3.8 + + - name: Install dependencies + run: bun install --frozen-lockfile + + - name: Build package + run: bun run build + + - name: Pack tarball + run: | + mkdir -p artifacts + npm pack --pack-destination artifacts + ls -la artifacts + + - name: Upload package artifact + uses: actions/upload-artifact@v4 + with: + name: npm-package + path: artifacts/*.tgz + if-no-files-found: error + + PackageE2E: + name: Package Install E2E + runs-on: blacksmith-4vcpu-ubuntu-2404 + needs: BuildPackage + steps: + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Download package artifact + uses: actions/download-artifact@v4 + with: + name: npm-package + path: artifacts + + - name: Install package into clean project + run: | + mkdir e2e-install + cd e2e-install + npm init -y + npm install ../artifacts/*.tgz + + - name: Run installed CLI binaries + run: | + cd e2e-install + ./node_modules/.bin/opencode-sandboxed-research-analyze --help + ./node_modules/.bin/opencode-sandboxed-research-start --help + ./node_modules/.bin/opencode-sandboxed-research-setup --help diff --git a/.github/workflows/daytona-e2e.yml b/.github/workflows/daytona-e2e.yml new file mode 100644 index 0000000..60ddec3 --- /dev/null +++ b/.github/workflows/daytona-e2e.yml @@ -0,0 +1,70 @@ +name: Daytona E2E + +on: + workflow_dispatch: + inputs: + repo_url: + description: "Repository URL to analyze" + required: false + default: "https://github.com/octocat/Hello-World" + model: + description: "Model to use" + required: false + default: "zai-coding-plan/glm-4.7-flash" + analyze_timeout_sec: + description: "Analyze timeout seconds" + required: false + default: "600" + install_timeout_sec: + description: "Install timeout seconds" + required: false + default: "300" + +permissions: + contents: read + +env: + DAYTONA_API_KEY: ${{ secrets.DAYTONA_API_KEY }} + DAYTONA_API_URL: ${{ vars.DAYTONA_API_URL }} + ZHIPU_API_KEY: ${{ secrets.ZHIPU_API_KEY }} + +jobs: + E2E: + name: Run Daytona Analyze E2E + runs-on: blacksmith-8vcpu-ubuntu-2404 + timeout-minutes: 45 + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + with: + bun-version: 1.3.8 + + - name: Install dependencies + run: bun install --frozen-lockfile + + - name: Validate required environment + run: | + if [ -z "${DAYTONA_API_KEY:-}" ]; then + echo "DAYTONA_API_KEY is required" >&2 + exit 1 + fi + + - name: Run analyze e2e + run: | + mkdir -p .memory/daytona-e2e + bun run analyze -- \ + --out-dir .memory/daytona-e2e/findings \ + --model "${{ inputs.model }}" \ + --install-timeout-sec "${{ inputs.install_timeout_sec }}" \ + --analyze-timeout-sec "${{ inputs.analyze_timeout_sec }}" \ + "${{ inputs.repo_url }}" + + - name: Upload findings artifact + uses: actions/upload-artifact@v4 + with: + name: daytona-e2e-findings + path: .memory/daytona-e2e/findings + if-no-files-found: error diff --git a/.github/workflows/publish-package.yml b/.github/workflows/publish-package.yml new file mode 100644 index 0000000..3fea1d8 --- /dev/null +++ b/.github/workflows/publish-package.yml @@ -0,0 +1,98 @@ +name: Publish Package + +on: + push: + tags: + - "v*" + workflow_dispatch: + +permissions: + contents: read + packages: write + +jobs: + Publish: + name: Publish To GitHub Packages + runs-on: blacksmith-4vcpu-ubuntu-2404 + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + with: + bun-version: 1.3.8 + + - name: Install dependencies + run: bun install --frozen-lockfile + + - name: Validate package + run: | + bun run check + bun run typecheck + bun test + bun run build + + - name: Publish package + env: + NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: npm publish --registry https://npm.pkg.github.com + + VerifyInstall: + name: Verify Registry Install + runs-on: blacksmith-4vcpu-ubuntu-2404 + needs: Publish + permissions: + contents: read + packages: read + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Configure npm for GitHub Packages + env: + NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + npm config set @shpitdev:registry https://npm.pkg.github.com + npm config set //npm.pkg.github.com/:_authToken "$NODE_AUTH_TOKEN" + npm config set always-auth true + + - name: Install published package + env: + NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + PACKAGE_NAME=$(node -p "require('./package.json').name") + PACKAGE_VERSION=$(node -p "require('./package.json').version") + PACKAGE_REF="${PACKAGE_NAME}@${PACKAGE_VERSION}" + + mkdir -p e2e-install + cd e2e-install + npm init -y + + for attempt in 1 2 3 4 5 6; do + if npm install "$PACKAGE_REF"; then + break + fi + if [ "$attempt" -eq 6 ]; then + echo "Failed to install $PACKAGE_REF after retries" >&2 + exit 1 + fi + sleep 10 + done + + - name: Run installed CLI binaries + run: | + cd e2e-install + ./node_modules/.bin/opencode-sandboxed-research-analyze --help + ./node_modules/.bin/opencode-sandboxed-research-start --help + ./node_modules/.bin/opencode-sandboxed-research-setup --help diff --git a/.gitignore b/.gitignore index 14b3b17..324c6da 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ node_modules/ dist/ coverage/ .turbo/ +.memory/ # Logs *.log diff --git a/.ignore b/.ignore new file mode 100644 index 0000000..5f2598b --- /dev/null +++ b/.ignore @@ -0,0 +1 @@ +!.memory diff --git a/README.md b/README.md index 85bd9ac..c5e269a 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,9 @@ This project automates Daytona sandbox setup and OpenCode execution. - [What is this?](#what-is-this) - [Prerequisites](#prerequisites) +- [Install On New Machine](#install-on-new-machine) - [Quick Start](#quick-start) +- [Installer & Obsidian Cataloging](#installer--obsidian-cataloging) - [Commands](#commands) - [Repository Audit Workflow](#repository-audit-workflow) - [Output Layout](#output-layout) @@ -42,6 +44,24 @@ This project automates Daytona sandbox setup and OpenCode execution. - `DAYTONA_API_KEY` - `DAYTONA_API_URL` for self-hosted Daytona (example: `https://daytona.example.com/api`) - Optional but recommended: `OPENCODE_SERVER_PASSWORD` +- Optional: `obsidian` command in `PATH` (for Obsidian note cataloging/open) + +--- + +## Install On New Machine + +Use the bootstrap installer: + +```bash +curl -fsSL https://raw.githubusercontent.com/shpitdev/opencode-sandboxed-ad-hoc-research/main/scripts/install-gh-package.sh | bash +``` + +It will: + +- ask for (or reuse) a GitHub token with `read:packages` +- configure `~/.npmrc` for GitHub Packages +- install `@shpitdev/opencode-sandboxed-ad-hoc-research` globally +- launch the guided setup flow for Daytona/model credentials --- @@ -58,13 +78,50 @@ Stop with `Ctrl+C`. --- +## Installer & Obsidian Cataloging + +Run the guided installer: + +```bash +bun run setup +``` + +It sets up: + +- `~/.config/opencode/shpit.toml` for shared preferences +- `~/.config/opencode/.env` for optional credential storage + +No provider API key is required if you only use free `opencode/*` models (for example `opencode/minimax-m2.5-free`). + +`analyze` automatically catalogs findings to Obsidian when enabled in `shpit.toml`. + +Example config: + +```toml +[obsidian] +enabled = true +command = "obsidian" +vault_path = "/absolute/path/to/vault" +notes_root = "Research/OpenCode" +catalog_mode = "date" # date | repo +open_after_catalog = false +``` + +Project-level `shpit.toml` or `.shpit.toml` overrides global config. +The configured command must be `obsidian` (not `obs`). + +--- + ## Commands | Command | Purpose | |---|---| +| `scripts/install-gh-package.sh` | Bootstrap install from GitHub Packages on a new machine | +| `bun run setup` | Guided setup for shared config/env and Obsidian cataloging | | `bun run start` | Launch OpenCode web in a Daytona sandbox | | `bun run analyze -- --input example.md` | Analyze repos listed in a file | | `bun run analyze -- ` | Analyze direct repo URLs | +| `bun run build` | Compile distributable CLI files into `dist/` | | `bun run lint` | Lint with Biome | | `bun run format` | Format with Biome | | `bun run check` | Run Biome checks | @@ -94,17 +151,22 @@ bun run start -- --no-open ### Defaults and behavior -- Default model: `opencode/gpt-5-nano` +- Default model selection: + - Standard: `zai-coding-plan/glm-4.7-flash` + - Vision mode (`--vision`): `zai-coding-plan/glm-4.6v` +- Override with `--model`, `--variant`, `OPENCODE_ANALYZE_MODEL`, or `OPENCODE_ANALYZE_VARIANT` - Auto-installs missing `git` and `node/npm` inside sandbox -- Forwards provider env vars (`OPENAI_*`, `ANTHROPIC_*`, `XAI_*`, `OPENROUTER_*`, etc.) +- Forwards provider env vars (`OPENAI_*`, `ANTHROPIC_*`, `XAI_*`, `OPENROUTER_*`, `ZHIPU_*`, `MINIMAX_*`, etc.) - Syncs local OpenCode config files from `~/.config/opencode` when present +- Auto-catalogs findings into Obsidian when enabled via `shpit.toml` ### Examples ```bash bun run analyze -- --input example.md bun run analyze -- https://github.com/owner/repo-one https://github.com/owner/repo-two -bun run analyze -- --out-dir findings --model openai/gpt-5 --target us +bun run analyze -- --out-dir findings --model zai-coding-plan/glm-4.7-flash --target us +bun run analyze -- --vision bun run analyze -- --analyze-timeout-sec 3600 --keep-sandbox ``` @@ -141,6 +203,7 @@ Project config files: - `biome.json` - `.zed/settings.json` - `.zed/tasks.json` +- `tsconfig.build.json` --- diff --git a/bin/analyze.js b/bin/analyze.js new file mode 100755 index 0000000..53d2824 --- /dev/null +++ b/bin/analyze.js @@ -0,0 +1,2 @@ +#!/usr/bin/env node +import "../dist/analyze-repos.js"; diff --git a/bin/setup.js b/bin/setup.js new file mode 100755 index 0000000..18d8d74 --- /dev/null +++ b/bin/setup.js @@ -0,0 +1,2 @@ +#!/usr/bin/env node +import "../dist/install.js"; diff --git a/bin/start.js b/bin/start.js new file mode 100755 index 0000000..7fb91ad --- /dev/null +++ b/bin/start.js @@ -0,0 +1,2 @@ +#!/usr/bin/env node +import "../dist/start-opencode-daytona.js"; diff --git a/package.json b/package.json index 4b50a1b..a7cb4ce 100644 --- a/package.json +++ b/package.json @@ -1,13 +1,31 @@ { - "name": "opencode-sandboxed-ad-hoc-research", + "name": "@shpitdev/opencode-sandboxed-ad-hoc-research", "version": "1.0.0", "description": "Run OpenCode in Daytona sandboxes for web sessions and ad hoc repository research", - "private": true, + "private": false, "type": "module", "packageManager": "bun@1.3.8", + "publishConfig": { + "registry": "https://npm.pkg.github.com" + }, + "files": [ + "dist", + "bin", + "README.md", + "LICENSE" + ], + "bin": { + "opencode-sandboxed-research-start": "./bin/start.js", + "opencode-sandboxed-research-analyze": "./bin/analyze.js", + "opencode-sandboxed-research-setup": "./bin/setup.js" + }, "scripts": { "start": "bun run src/start-opencode-daytona.ts", "analyze": "bun run src/analyze-repos.ts", + "setup": "bun run src/install.ts", + "build": "bunx tsc -p tsconfig.build.json", + "prepack": "bun run build", + "test": "bun test", "typecheck": "bunx tsc --noEmit", "lint": "biome lint .", "format": "biome format . --write", @@ -19,13 +37,15 @@ "sandbox" ], "author": "", - "license": "ISC", + "license": "MIT", "engines": { "bun": ">=1.3.0" }, + "dependencies": { + "@daytonaio/sdk": "^0.143.0" + }, "devDependencies": { "@biomejs/biome": "^2.4.3", - "@daytonaio/sdk": "^0.143.0", "typescript": "^5.9.3" } } diff --git a/scripts/install-gh-package.sh b/scripts/install-gh-package.sh new file mode 100755 index 0000000..dfaf47c --- /dev/null +++ b/scripts/install-gh-package.sh @@ -0,0 +1,108 @@ +#!/usr/bin/env bash +set -euo pipefail + +PACKAGE_NAME="${OPENCODE_PACKAGE:-@shpitdev/opencode-sandboxed-ad-hoc-research}" +PACKAGE_SCOPE="${OPENCODE_SCOPE:-@shpitdev}" +REGISTRY_URL="${OPENCODE_REGISTRY:-https://npm.pkg.github.com}" +SETUP_BIN="${OPENCODE_SETUP_BIN:-opencode-sandboxed-research-setup}" +NPMRC_PATH="${HOME}/.npmrc" + +log() { + printf '[install] %s\n' "$*" +} + +fail() { + printf '[install] ERROR: %s\n' "$*" >&2 + exit 1 +} + +require_command() { + if ! command -v "$1" >/dev/null 2>&1; then + fail "Required command not found: $1" + fi +} + +upsert_npmrc_line() { + local key_prefix="$1" + local line_value="$2" + + touch "$NPMRC_PATH" + + if grep -Fq "$key_prefix" "$NPMRC_PATH"; then + awk -v key_prefix="$key_prefix" -v line_value="$line_value" ' + index($0, key_prefix) == 1 { + print line_value + next + } + { print } + ' "$NPMRC_PATH" >"${NPMRC_PATH}.tmp" + mv "${NPMRC_PATH}.tmp" "$NPMRC_PATH" + else + printf '%s\n' "$line_value" >>"$NPMRC_PATH" + fi +} + +read_token_interactive() { + local token="" + printf 'GitHub token (read:packages): ' + read -r -s token + printf '\n' >&2 + printf '%s' "$token" +} + +main() { + require_command npm + + local registry_host="${REGISTRY_URL#https://}" + registry_host="${registry_host#http://}" + registry_host="${registry_host%%/}" + + local token="${NODE_AUTH_TOKEN:-}" + if [[ -z "$token" ]] && command -v gh >/dev/null 2>&1; then + if gh auth status >/dev/null 2>&1; then + token="$(gh auth token 2>/dev/null || true)" + if [[ -n "$token" ]]; then + log "Using token from gh auth session." + fi + fi + fi + + if [[ -z "$token" ]]; then + log "A GitHub token with read:packages is required to install from GitHub Packages." + token="$(read_token_interactive)" + fi + + if [[ -z "$token" ]]; then + fail "No GitHub token provided." + fi + + local npmrc_dir + npmrc_dir="$(dirname "$NPMRC_PATH")" + mkdir -p "$npmrc_dir" + + upsert_npmrc_line "${PACKAGE_SCOPE}:registry=" "${PACKAGE_SCOPE}:registry=${REGISTRY_URL}" + upsert_npmrc_line "//${registry_host}/:_authToken=" "//${registry_host}/:_authToken=${token}" + upsert_npmrc_line "always-auth=" "always-auth=true" + log "Updated ${NPMRC_PATH} for ${PACKAGE_SCOPE}." + + log "Installing ${PACKAGE_NAME} globally..." + npm install -g "$PACKAGE_NAME" + + if ! command -v "$SETUP_BIN" >/dev/null 2>&1; then + fail "Install completed but ${SETUP_BIN} is not in PATH." + fi + + log "Installed successfully." + + local run_setup="" + printf 'Run guided setup now? [Y/n]: ' + read -r run_setup + run_setup="${run_setup:-Y}" + if [[ "$run_setup" =~ ^([yY]|[yY][eE][sS])$ ]]; then + "$SETUP_BIN" + else + log "You can run setup later with: ${SETUP_BIN}" + fi +} + +main "$@" diff --git a/src/analyze-model.test.ts b/src/analyze-model.test.ts new file mode 100644 index 0000000..acd68ca --- /dev/null +++ b/src/analyze-model.test.ts @@ -0,0 +1,65 @@ +import { afterEach, describe, expect, test } from "bun:test"; +import { resolveAnalyzeModel } from "./analyze-model.js"; + +const originalCi = process.env.CI; +const originalModel = process.env.OPENCODE_ANALYZE_MODEL; +const originalVariant = process.env.OPENCODE_ANALYZE_VARIANT; +const originalVisionModel = process.env.OPENCODE_ANALYZE_VISION_MODEL; + +afterEach(() => { + process.env.CI = originalCi; + process.env.OPENCODE_ANALYZE_MODEL = originalModel; + process.env.OPENCODE_ANALYZE_VARIANT = originalVariant; + process.env.OPENCODE_ANALYZE_VISION_MODEL = originalVisionModel; +}); + +describe("resolveAnalyzeModel", () => { + test("uses default non-vision model", () => { + delete process.env.OPENCODE_ANALYZE_MODEL; + delete process.env.OPENCODE_ANALYZE_VARIANT; + delete process.env.OPENCODE_ANALYZE_VISION_MODEL; + + const resolved = resolveAnalyzeModel({}); + expect(resolved.model).toBe("zai-coding-plan/glm-4.7-flash"); + expect(resolved.variant).toBeUndefined(); + }); + + test("uses vision default when requested", () => { + delete process.env.OPENCODE_ANALYZE_MODEL; + delete process.env.OPENCODE_ANALYZE_VARIANT; + delete process.env.OPENCODE_ANALYZE_VISION_MODEL; + + const resolved = resolveAnalyzeModel({ vision: true }); + expect(resolved.model).toBe("zai-coding-plan/glm-4.6v"); + expect(resolved.variant).toBeUndefined(); + }); + + test("respects env overrides", () => { + process.env.OPENCODE_ANALYZE_MODEL = "zai-coding-plan/glm-5"; + process.env.OPENCODE_ANALYZE_VARIANT = "high"; + + const resolved = resolveAnalyzeModel({}); + expect(resolved.model).toBe("zai-coding-plan/glm-5"); + expect(resolved.variant).toBe("high"); + }); + + test("cli args override env", () => { + process.env.OPENCODE_ANALYZE_MODEL = "zai-coding-plan/glm-4.7-flash"; + process.env.OPENCODE_ANALYZE_VARIANT = "low"; + + const resolved = resolveAnalyzeModel({ + model: "zai-coding-plan/glm-4.6v", + variant: "high", + }); + + expect(resolved.model).toBe("zai-coding-plan/glm-4.6v"); + expect(resolved.variant).toBe("high"); + }); + + test("uses env vision override", () => { + delete process.env.OPENCODE_ANALYZE_MODEL; + process.env.OPENCODE_ANALYZE_VISION_MODEL = "zhipuai-coding-plan/glm-4.6v-flash"; + const resolved = resolveAnalyzeModel({ vision: true }); + expect(resolved.model).toBe("zhipuai-coding-plan/glm-4.6v-flash"); + }); +}); diff --git a/src/analyze-model.ts b/src/analyze-model.ts new file mode 100644 index 0000000..cc828d3 --- /dev/null +++ b/src/analyze-model.ts @@ -0,0 +1,25 @@ +import process from "node:process"; + +export type AnalyzeModelInput = { + model?: string; + variant?: string; + vision?: boolean; +}; + +export type ResolvedAnalyzeModel = { + model: string; + variant?: string; +}; + +export function resolveAnalyzeModel(input: AnalyzeModelInput): ResolvedAnalyzeModel { + const defaultModel = input.vision + ? (process.env.OPENCODE_ANALYZE_VISION_MODEL ?? "zai-coding-plan/glm-4.6v") + : "zai-coding-plan/glm-4.7-flash"; + const model = input.model ?? process.env.OPENCODE_ANALYZE_MODEL ?? defaultModel; + const variant = input.variant ?? process.env.OPENCODE_ANALYZE_VARIANT; + + return { + model, + variant, + }; +} diff --git a/src/analyze-repos.ts b/src/analyze-repos.ts index 7299dc0..4c64287 100644 --- a/src/analyze-repos.ts +++ b/src/analyze-repos.ts @@ -3,6 +3,10 @@ import path from "node:path"; import process from "node:process"; import { parseArgs } from "node:util"; import { Daytona, type Sandbox } from "@daytonaio/sdk"; +import { resolveAnalyzeModel } from "./analyze-model.js"; +import { catalogAnalysisResult } from "./obsidian-catalog.js"; +import { buildInstallOpencodeCommand, buildOpencodeRunCommand } from "./opencode-cli.js"; +import { loadConfiguredEnv, type ResolvedShpitConfig, resolveShpitConfig } from "./shpit-config.js"; type CliOptions = { inputFile?: string; @@ -13,6 +17,8 @@ type CliOptions = { keepSandbox: boolean; target?: string; model?: string; + variant?: string; + vision: boolean; urls: string[]; }; @@ -33,6 +39,7 @@ type AnalyzeResult = { localDir: string; findingsPath: string; readmePath?: string; + obsidianNotePath?: string; success: boolean; error?: string; }; @@ -63,7 +70,9 @@ function collectForwardedEnvEntries(): Array<[string, string]> { "GOOGLE_", "GOOGLE_GENERATIVE_AI_", "GROQ_", + "MINIMAX_", "MISTRAL_", + "ZHIPU_", "TOGETHER_", "DEEPSEEK_", "OPENROUTER_", @@ -171,7 +180,9 @@ function parseCliOptions(): CliOptions { "analyze-timeout-sec": { type: "string", default: "2400" }, "keep-sandbox": { type: "boolean", default: false }, target: { type: "string" }, - model: { type: "string", default: "opencode/gpt-5-nano" }, + model: { type: "string" }, + variant: { type: "string" }, + vision: { type: "boolean", default: false }, }, strict: true, allowPositionals: true, @@ -183,7 +194,8 @@ function parseCliOptions(): CliOptions { Examples: bun run analyze -- --input example.md bun run analyze -- https://github.com/agenticnotetaking/arscontexta - bun run analyze -- --input links.md --out-dir findings --model openai/gpt-5 + bun run analyze -- --input links.md --out-dir findings --model zai-coding-plan/glm-4.7-flash + bun run analyze -- --vision Options: -i, --input Markdown/text file containing links @@ -192,7 +204,9 @@ Options: --install-timeout-sec OpenCode install timeout (default: 900) --analyze-timeout-sec Per-repo analysis timeout (default: 2400) --target Daytona target override - --model OpenCode model (default: opencode/gpt-5-nano) + --model OpenCode model (default: zai-coding-plan/glm-4.7-flash) + --variant Model variant (example: xhigh) + --vision Prefer vision-capable default model (zai-coding-plan/glm-4.6v) --keep-sandbox Keep each sandbox instead of deleting it -h, --help Show this help `); @@ -208,6 +222,8 @@ Options: keepSandbox: values["keep-sandbox"], target: values.target, model: values.model, + variant: values.variant, + vision: values.vision, urls: positionals, }; } @@ -347,6 +363,10 @@ function detectOpencodeFatalError(output: string): string | undefined { return undefined; } +function hasReadyResponse(output: string): boolean { + return /\bready\b/i.test(output); +} + async function withRetries(params: { label: string; maxAttempts?: number; @@ -575,11 +595,12 @@ function buildAnalysisPrompt(params: { inputUrl: string; reportPath: string }): async function analyzeOneRepo(params: { daytona: Daytona; options: CliOptions; + config: ResolvedShpitConfig; url: string; index: number; total: number; }): Promise { - const { daytona, options, url, index, total } = params; + const { daytona, options, config, url, index, total } = params; const slug = slugFromRepoUrl(url); const runPrefix = `${String(index + 1).padStart(2, "0")}-${slug}`; const localDir = path.join(options.outDir, runPrefix); @@ -646,7 +667,7 @@ async function analyzeOneRepo(params: { await requireSuccess( sandbox, - 'if command -v bun >/dev/null 2>&1; then bun add -g opencode-ai@latest; else npm install -g opencode-ai@latest --prefix "$HOME/.local"; fi', + buildInstallOpencodeCommand(), "Install OpenCode CLI", options.installTimeoutSec, ); @@ -668,32 +689,38 @@ async function analyzeOneRepo(params: { ); if (!hasProviderCredential) { console.warn( - `[analyze] (${runPrefix}) No model provider env vars detected locally (OPENAI_*, ANTHROPIC_*, etc). OpenCode may fail or block.`, + `[analyze] (${runPrefix}) No model provider env vars detected locally (OPENAI_*, ANTHROPIC_*, ZHIPU_*, etc). OpenCode may fail or block.`, ); } - const forwardedEnvPrefix = forwardedEnvEntries - .map(([name, value]) => `${name}=${shellEscape(value)}`) - .join(" "); - const modelArg = options.model ? ` --model ${shellEscape(options.model)}` : ""; - - const buildOpenCodeRunCommand = (promptText: string): string => - `${forwardedEnvPrefix ? `${forwardedEnvPrefix} ` : ""}` + - `OPENCODE_BIN="$(${resolveOpencodeBin})"; ` + - `"${"$"}OPENCODE_BIN" run --print-logs${modelArg} --dir ${shellEscape(remoteRepoDir)} ${shellEscape(promptText)}`; + const selectedModel = resolveAnalyzeModel({ + model: options.model, + variant: options.variant, + vision: options.vision, + }); + console.log( + `[analyze] (${runPrefix}) Model: ${selectedModel.model}${selectedModel.variant ? ` (variant: ${selectedModel.variant})` : ""}`, + ); const preflightPrompt = "Reply with exactly one word: ready"; const preflightTimeoutSec = Math.max( 90, Math.min(300, Math.floor(options.analyzeTimeoutSec / 4)), ); - const preflightCommandText = buildOpenCodeRunCommand(preflightPrompt); + const preflightCommandText = buildOpencodeRunCommand({ + resolveOpencodeBinCommand: resolveOpencodeBin, + workingDir: remoteRepoDir, + prompt: preflightPrompt, + model: selectedModel.model, + variant: selectedModel.variant, + forwardedEnvEntries, + }); console.log(`[analyze] (${runPrefix}) Running OpenCode preflight...`); const preflightResult = await runCommand(sandbox, preflightCommandText, preflightTimeoutSec); const preflightOutput = preflightResult.output; await writeFile(path.join(localDir, "opencode-preflight.log"), preflightOutput, "utf8"); const preflightFatalError = detectOpencodeFatalError(preflightOutput); - const preflightReady = /^ready\s*$/im.test(preflightOutput); + const preflightReady = hasReadyResponse(preflightOutput); if (preflightResult.exitCode !== 0 || preflightFatalError || !preflightReady) { const preview = preflightOutput.split("\n").slice(-120).join("\n"); const reason = @@ -704,7 +731,14 @@ async function analyzeOneRepo(params: { throw new Error(`OpenCode preflight failed (${reason})\n${preview}`); } - const runCommandText = buildOpenCodeRunCommand(prompt); + const runCommandText = buildOpencodeRunCommand({ + resolveOpencodeBinCommand: resolveOpencodeBin, + workingDir: remoteRepoDir, + prompt, + model: selectedModel.model, + variant: selectedModel.variant, + forwardedEnvEntries, + }); const remoteOpencodeLogPath = `/tmp/${slug}.opencode.log`; const localOpencodeLogPath = path.join(localDir, "opencode-run.log"); @@ -743,8 +777,7 @@ async function analyzeOneRepo(params: { readmePath = localReadmeFile; } - console.log(`[analyze] (${runPrefix}) Wrote ${findingsPath}`); - return { + const result: AnalyzeResult = { url, slug, localDir, @@ -752,6 +785,13 @@ async function analyzeOneRepo(params: { readmePath, success: true, }; + await maybeCatalogResult({ + config, + result, + runPrefix, + }); + console.log(`[analyze] (${runPrefix}) Wrote ${findingsPath}`); + return result; } catch (error) { const message = error instanceof Error ? (error.stack ?? error.message) : String(error); const fallback = [ @@ -768,7 +808,7 @@ async function analyzeOneRepo(params: { await writeFile(findingsPath, fallback, "utf8"); console.error(`[analyze] (${runPrefix}) Failed: ${message}`); - return { + const result: AnalyzeResult = { url, slug, localDir, @@ -776,6 +816,12 @@ async function analyzeOneRepo(params: { success: false, error: message, }; + await maybeCatalogResult({ + config, + result, + runPrefix, + }); + return result; } finally { if (sandbox && !options.keepSandbox) { try { @@ -791,6 +837,45 @@ async function analyzeOneRepo(params: { } } +async function maybeCatalogResult(params: { + config: ResolvedShpitConfig; + result: AnalyzeResult; + runPrefix: string; +}): Promise { + const { config, result, runPrefix } = params; + + try { + const catalogResult = await catalogAnalysisResult({ + config, + slug: result.slug, + runLabel: runPrefix, + sourceUrl: result.url, + findingsPath: result.findingsPath, + success: result.success, + error: result.error, + }); + + if (!catalogResult.attempted && catalogResult.skippedReason) { + console.log( + `[analyze] (${runPrefix}) Obsidian catalog skipped: ${catalogResult.skippedReason}`, + ); + return; + } + + if (catalogResult.written && catalogResult.notePath) { + result.obsidianNotePath = catalogResult.notePath; + console.log(`[analyze] (${runPrefix}) Obsidian note: ${catalogResult.notePath}`); + } + + if (catalogResult.warning) { + console.warn(`[analyze] (${runPrefix}) Obsidian warning: ${catalogResult.warning}`); + } + } catch (error) { + const message = error instanceof Error ? (error.stack ?? error.message) : String(error); + console.warn(`[analyze] (${runPrefix}) Obsidian catalog failed: ${message}`); + } +} + async function writeIndex(results: AnalyzeResult[], outDir: string): Promise { const lines: string[] = []; lines.push("# Repository Audits"); @@ -809,6 +894,9 @@ async function writeIndex(results: AnalyzeResult[], outDir: string): Promise { + const loadedEnv = await loadConfiguredEnv(); + if (loadedEnv.keysLoaded.length > 0) { + console.log( + `[analyze] Loaded ${loadedEnv.keysLoaded.length} env var(s) from config (.env) files.`, + ); + } + const config = await resolveShpitConfig(); + const options = parseCliOptions(); const urls = await resolveInputUrls(options); const apiKey = requireEnv("DAYTONA_API_KEY"); @@ -841,6 +937,7 @@ async function main(): Promise { const result = await analyzeOneRepo({ daytona, options, + config, url, index, total: urls.length, diff --git a/src/install.ts b/src/install.ts new file mode 100644 index 0000000..17b1543 --- /dev/null +++ b/src/install.ts @@ -0,0 +1,393 @@ +import { execFile } from "node:child_process"; +import { mkdir, readFile, writeFile } from "node:fs/promises"; +import path from "node:path"; +import process from "node:process"; +import { createInterface } from "node:readline/promises"; +import { parseArgs, promisify } from "node:util"; +import { loadConfiguredEnv, resolveShpitConfig } from "./shpit-config.js"; + +type CliOptions = { + yes: boolean; + vaultPath?: string; + notesRoot?: string; + catalogMode?: "date" | "repo"; + openAfterCatalog?: boolean; + daytonaApiKey?: string; + openaiApiKey?: string; + zhipuApiKey?: string; +}; + +const execFileAsync = promisify(execFile); + +function parseCliOptions(): CliOptions { + const { values } = parseArgs({ + options: { + help: { type: "boolean", short: "h", default: false }, + yes: { type: "boolean", short: "y", default: false }, + "vault-path": { type: "string" }, + "notes-root": { type: "string" }, + "catalog-mode": { type: "string" }, + "open-after-catalog": { type: "boolean", default: false }, + "daytona-api-key": { type: "string" }, + "openai-api-key": { type: "string" }, + "zhipu-api-key": { type: "string" }, + }, + strict: true, + allowPositionals: false, + }); + + if (values.help) { + console.log(`Usage: bun run setup -- [options] + +Options: + -y, --yes Non-interactive setup using defaults/flags + --vault-path Obsidian vault path (absolute or ~/...) + --notes-root Folder inside vault for audit notes (default: Research/OpenCode) + --catalog-mode date | repo (default: date) + --open-after-catalog Open each new note via obsidian CLI after writing + --daytona-api-key Seed DAYTONA_API_KEY into ~/.config/opencode/.env + --openai-api-key Seed OPENAI_API_KEY into ~/.config/opencode/.env + --zhipu-api-key Seed ZHIPU_API_KEY into ~/.config/opencode/.env + -h, --help Show this help +`); + process.exit(0); + } + + const rawCatalogMode = values["catalog-mode"]; + if (rawCatalogMode && rawCatalogMode !== "date" && rawCatalogMode !== "repo") { + throw new Error(`--catalog-mode must be "date" or "repo". Received "${rawCatalogMode}".`); + } + + return { + yes: values.yes, + vaultPath: values["vault-path"], + notesRoot: values["notes-root"], + catalogMode: rawCatalogMode as "date" | "repo" | undefined, + openAfterCatalog: values["open-after-catalog"], + daytonaApiKey: values["daytona-api-key"], + openaiApiKey: values["openai-api-key"], + zhipuApiKey: values["zhipu-api-key"], + }; +} + +function expandHomeDir(value: string | undefined): string | undefined { + if (!value) { + return undefined; + } + if (!value.startsWith("~")) { + return value; + } + const home = process.env.HOME; + if (!home) { + return value; + } + return path.join(home, value.slice(1)); +} + +async function detectObsidianBinary(): Promise { + try { + const { stdout } = await execFileAsync("sh", ["-lc", "command -v obsidian"]); + const resolved = stdout.trim(); + return resolved || undefined; + } catch { + return undefined; + } +} + +function parseEnvFile(content: string): Map { + const result = new Map(); + const lines = content.split(/\r?\n/); + for (const rawLine of lines) { + const line = rawLine.trim(); + if (!line || line.startsWith("#")) { + continue; + } + const separatorIndex = line.indexOf("="); + if (separatorIndex === -1) { + continue; + } + const key = line.slice(0, separatorIndex).trim(); + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) { + continue; + } + let value = line.slice(separatorIndex + 1).trim(); + if (value.startsWith('"') && value.endsWith('"')) { + value = value.slice(1, -1); + } + if (value.startsWith("'") && value.endsWith("'")) { + value = value.slice(1, -1); + } + result.set(key, value); + } + return result; +} + +async function loadEnvMap(filePath: string): Promise> { + try { + const content = await readFile(filePath, "utf8"); + return parseEnvFile(content); + } catch { + return new Map(); + } +} + +function serializeEnvMap(env: Map): string { + const lines: string[] = []; + lines.push("# Managed by bun run setup"); + lines.push("# Shell-exported env vars still override these values."); + lines.push(""); + + const keys = [...env.keys()].sort((a, b) => a.localeCompare(b)); + for (const key of keys) { + const value = env.get(key); + if (value === undefined) { + continue; + } + lines.push(`${key}=${JSON.stringify(value)}`); + } + + lines.push(""); + return lines.join("\n"); +} + +async function askYesNo(params: { + rl: ReturnType; + prompt: string; + defaultValue: boolean; +}): Promise { + const suffix = params.defaultValue ? "Y/n" : "y/N"; + const answer = (await params.rl.question(`${params.prompt} (${suffix}): `)).trim().toLowerCase(); + if (!answer) { + return params.defaultValue; + } + return answer === "y" || answer === "yes"; +} + +async function askText(params: { + rl: ReturnType; + prompt: string; + defaultValue?: string; +}): Promise { + const renderedPrompt = params.defaultValue + ? `${params.prompt} [${params.defaultValue}]: ` + : `${params.prompt}: `; + const answer = (await params.rl.question(renderedPrompt)).trim(); + if (!answer) { + return params.defaultValue; + } + return answer; +} + +async function main(): Promise { + const options = parseCliOptions(); + await loadConfiguredEnv(); + const existingConfig = await resolveShpitConfig(); + + const home = process.env.HOME; + if (!home) { + throw new Error("HOME is not set. Cannot write ~/.config/opencode files."); + } + + const configDir = path.join(home, ".config", "opencode"); + const configPath = path.join(configDir, "shpit.toml"); + const envPath = path.join(configDir, ".env"); + + const obsidianBinary = await detectObsidianBinary(); + console.log( + obsidianBinary + ? `[install] Detected Obsidian CLI command at: ${obsidianBinary}` + : "[install] Obsidian CLI command not found in PATH. Expected command name: obsidian", + ); + console.log( + "[install] The installer will not execute Obsidian commands; it only configures them.", + ); + + const rl = createInterface({ input: process.stdin, output: process.stdout }); + try { + const nonInteractive = options.yes; + + const enableObsidian = nonInteractive + ? Boolean(options.vaultPath ?? existingConfig.obsidian.enabled) + : await askYesNo({ + rl, + prompt: "Enable automatic Obsidian cataloging for analyze results?", + defaultValue: existingConfig.obsidian.enabled, + }); + + const vaultPath = expandHomeDir( + options.vaultPath ?? + (nonInteractive + ? existingConfig.obsidian.vaultPath + : await askText({ + rl, + prompt: "Obsidian vault path", + defaultValue: existingConfig.obsidian.vaultPath, + })), + ); + + const notesRoot = + options.notesRoot ?? + (nonInteractive + ? existingConfig.obsidian.notesRoot + : await askText({ + rl, + prompt: "Vault folder for repo audits", + defaultValue: existingConfig.obsidian.notesRoot, + })); + + const catalogMode = + options.catalogMode ?? + (nonInteractive + ? existingConfig.obsidian.catalogMode + : ((await askText({ + rl, + prompt: "Catalog mode (date|repo)", + defaultValue: existingConfig.obsidian.catalogMode, + })) as "date" | "repo" | undefined)); + + if (catalogMode && catalogMode !== "date" && catalogMode !== "repo") { + throw new Error(`Invalid catalog mode "${catalogMode}".`); + } + + const openAfterCatalog = nonInteractive + ? Boolean(options.openAfterCatalog) + : await askYesNo({ + rl, + prompt: "Open each created note via obsidian command", + defaultValue: existingConfig.obsidian.openAfterCatalog, + }); + + if (enableObsidian && !vaultPath) { + throw new Error("Obsidian cataloging is enabled, but no vault path was provided."); + } + + await mkdir(configDir, { recursive: true }); + + const shpitTomlLines: string[] = []; + shpitTomlLines.push("# Managed by bun run setup"); + shpitTomlLines.push(""); + shpitTomlLines.push("[obsidian]"); + shpitTomlLines.push(`enabled = ${enableObsidian ? "true" : "false"}`); + shpitTomlLines.push('command = "obsidian"'); + if (vaultPath) { + shpitTomlLines.push(`vault_path = ${JSON.stringify(vaultPath)}`); + } + shpitTomlLines.push( + `notes_root = ${JSON.stringify(notesRoot ?? existingConfig.obsidian.notesRoot)}`, + ); + shpitTomlLines.push( + `catalog_mode = ${JSON.stringify(catalogMode ?? existingConfig.obsidian.catalogMode)}`, + ); + shpitTomlLines.push(`open_after_catalog = ${openAfterCatalog ? "true" : "false"}`); + shpitTomlLines.push(""); + + await writeFile(configPath, shpitTomlLines.join("\n"), "utf8"); + console.log(`[install] Wrote ${configPath}`); + + const envMap = await loadEnvMap(envPath); + const seededKeys: string[] = []; + + const daytonaApiKey = options.daytonaApiKey ?? process.env.DAYTONA_API_KEY; + if (!daytonaApiKey && !nonInteractive) { + const entered = await askText({ + rl, + prompt: "DAYTONA_API_KEY (leave blank to skip)", + }); + if (entered) { + envMap.set("DAYTONA_API_KEY", entered); + seededKeys.push("DAYTONA_API_KEY"); + } + } else if (daytonaApiKey) { + envMap.set("DAYTONA_API_KEY", daytonaApiKey); + seededKeys.push("DAYTONA_API_KEY"); + } + + const hasProviderKey = + Boolean(process.env.OPENAI_API_KEY) || + Boolean(process.env.ANTHROPIC_API_KEY) || + Boolean(process.env.XAI_API_KEY) || + Boolean(process.env.OPENROUTER_API_KEY) || + Boolean(process.env.ZHIPU_API_KEY) || + Boolean(envMap.get("OPENAI_API_KEY")) || + Boolean(envMap.get("ANTHROPIC_API_KEY")) || + Boolean(envMap.get("XAI_API_KEY")) || + Boolean(envMap.get("OPENROUTER_API_KEY")) || + Boolean(envMap.get("ZHIPU_API_KEY")); + + const openaiApiKey = options.openaiApiKey ?? process.env.OPENAI_API_KEY; + const zhipuApiKey = options.zhipuApiKey ?? process.env.ZHIPU_API_KEY; + if (!hasProviderKey && !openaiApiKey && !zhipuApiKey && !nonInteractive) { + console.log( + "[install] No provider API key detected. Free opencode/* models can work without a key, but provider keys are needed for OpenAI/Anthropic/Z.AI/etc.", + ); + const entered = await askText({ + rl, + prompt: "OPENAI_API_KEY (leave blank to skip)", + }); + if (entered) { + envMap.set("OPENAI_API_KEY", entered); + seededKeys.push("OPENAI_API_KEY"); + } + } else if (openaiApiKey) { + envMap.set("OPENAI_API_KEY", openaiApiKey); + seededKeys.push("OPENAI_API_KEY"); + } + + if (zhipuApiKey) { + envMap.set("ZHIPU_API_KEY", zhipuApiKey); + seededKeys.push("ZHIPU_API_KEY"); + } else if (!hasProviderKey && !openaiApiKey && !nonInteractive) { + const entered = await askText({ + rl, + prompt: "ZHIPU_API_KEY (for Z.AI / GLM, leave blank to skip)", + }); + if (entered) { + envMap.set("ZHIPU_API_KEY", entered); + seededKeys.push("ZHIPU_API_KEY"); + } + } + + if (envMap.size > 0) { + await writeFile(envPath, serializeEnvMap(envMap), "utf8"); + console.log(`[install] Wrote ${envPath}`); + } + + if (!process.env.DAYTONA_API_KEY && !envMap.get("DAYTONA_API_KEY")) { + console.warn( + "[install] DAYTONA_API_KEY is still missing. start/analyze will fail until it is set.", + ); + } + + const hasAnyProviderKey = + Boolean(process.env.OPENAI_API_KEY) || + Boolean(process.env.ANTHROPIC_API_KEY) || + Boolean(process.env.XAI_API_KEY) || + Boolean(process.env.OPENROUTER_API_KEY) || + Boolean(process.env.ZHIPU_API_KEY) || + Boolean(envMap.get("OPENAI_API_KEY")) || + Boolean(envMap.get("ANTHROPIC_API_KEY")) || + Boolean(envMap.get("XAI_API_KEY")) || + Boolean(envMap.get("OPENROUTER_API_KEY")) || + Boolean(envMap.get("ZHIPU_API_KEY")); + + if (!hasAnyProviderKey) { + console.warn( + "[install] No model provider key configured. This is OK if you use free opencode/* models; set OPENAI_API_KEY or ZHIPU_API_KEY for broader model access.", + ); + } + + if (seededKeys.length > 0) { + console.log(`[install] Seeded credential keys: ${[...new Set(seededKeys)].join(", ")}`); + } + + console.log("[install] Setup complete."); + } finally { + rl.close(); + } +} + +main().catch((error: unknown) => { + const message = error instanceof Error ? (error.stack ?? error.message) : String(error); + console.error(`[install] Failed: ${message}`); + process.exit(1); +}); diff --git a/src/obsidian-catalog.test.ts b/src/obsidian-catalog.test.ts new file mode 100644 index 0000000..afceb81 --- /dev/null +++ b/src/obsidian-catalog.test.ts @@ -0,0 +1,30 @@ +import { describe, expect, test } from "bun:test"; +import { __testables } from "./obsidian-catalog.js"; + +describe("obsidian catalog pathing", () => { + test("builds date-based note path", () => { + const relativePath = __testables.buildRelativeNotePath({ + notesRoot: "Research/OpenCode", + catalogMode: "date", + slug: "owner-repo", + runLabel: "01-owner-repo", + }); + + expect(relativePath).toMatch( + /^Research[\\/]OpenCode[\\/]\d{4}[\\/]\d{2}[\\/]\d{2}-01-owner-repo\.md$/, + ); + }); + + test("builds repo-based note path", () => { + const relativePath = __testables.buildRelativeNotePath({ + notesRoot: "Research/OpenCode", + catalogMode: "repo", + slug: "owner/repo", + runLabel: "01-owner-repo", + }); + + expect(relativePath).toMatch( + /^Research[\\/]OpenCode[\\/]owner-repo[\\/]\d{4}-\d{2}-\d{2}-01-owner-repo\.md$/, + ); + }); +}); diff --git a/src/obsidian-catalog.ts b/src/obsidian-catalog.ts new file mode 100644 index 0000000..b74bf49 --- /dev/null +++ b/src/obsidian-catalog.ts @@ -0,0 +1,206 @@ +import { spawn } from "node:child_process"; +import { mkdir, readFile, writeFile } from "node:fs/promises"; +import path from "node:path"; +import type { ResolvedShpitConfig } from "./shpit-config.js"; + +type CatalogInput = { + config: ResolvedShpitConfig; + slug: string; + runLabel: string; + sourceUrl: string; + findingsPath: string; + success: boolean; + error?: string; +}; + +export type CatalogResult = { + attempted: boolean; + written: boolean; + notePath?: string; + skippedReason?: string; + warning?: string; +}; + +function sanitizePathSegment(value: string): string { + return value + .toLowerCase() + .replace(/[^a-z0-9._-]+/g, "-") + .replace(/^-+|-+$/g, "") + .slice(0, 120); +} + +function getDateParts(): { year: string; month: string; day: string } { + const now = new Date(); + const year = String(now.getFullYear()); + const month = String(now.getMonth() + 1).padStart(2, "0"); + const day = String(now.getDate()).padStart(2, "0"); + return { year, month, day }; +} + +function buildRelativeNotePath(params: { + notesRoot: string; + catalogMode: "date" | "repo"; + slug: string; + runLabel: string; +}): string { + const notesRoot = params.notesRoot.replace(/^\/+|\/+$/g, ""); + const safeSlug = sanitizePathSegment(params.slug) || "repo"; + const safeRunLabel = sanitizePathSegment(params.runLabel) || safeSlug; + const { year, month, day } = getDateParts(); + + if (params.catalogMode === "repo") { + return path.join(notesRoot, safeSlug, `${year}-${month}-${day}-${safeRunLabel}.md`); + } + + return path.join(notesRoot, year, month, `${day}-${safeRunLabel}.md`); +} + +function escapeYaml(value: string): string { + return value.replace(/\\/g, "\\\\").replace(/"/g, '\\"'); +} + +function toMarkdownPath(value: string): string { + return value.replace(/\\/g, "/"); +} + +function buildCatalogNote(input: { + sourceUrl: string; + findings: string; + success: boolean; + error?: string; + runLabel: string; + slug: string; +}): string { + const generatedAt = new Date().toISOString(); + const status = input.success ? "success" : "failed"; + + const lines: string[] = []; + lines.push("---"); + lines.push(`source_url: "${escapeYaml(input.sourceUrl)}"`); + lines.push(`generated_at: "${generatedAt}"`); + lines.push(`status: "${status}"`); + lines.push(`run_label: "${escapeYaml(input.runLabel)}"`); + lines.push(`slug: "${escapeYaml(input.slug)}"`); + lines.push("---"); + lines.push(""); + lines.push(`# Repository Audit - ${input.slug}`); + lines.push(""); + lines.push(`- Source: ${input.sourceUrl}`); + lines.push(`- Status: ${status}`); + if (input.error) { + lines.push(`- Error: ${input.error.split("\n")[0]}`); + } + lines.push(""); + lines.push("## Findings"); + lines.push(""); + lines.push(input.findings.trim()); + lines.push(""); + + return lines.join("\n"); +} + +async function tryOpenInObsidian(params: { + command: string; + vaultPath: string; + relativeNotePath: string; +}): Promise { + return await new Promise((resolve) => { + let settled = false; + const child = spawn( + params.command, + ["open", `path=${toMarkdownPath(params.relativeNotePath)}`], + { + cwd: params.vaultPath, + detached: true, + stdio: "ignore", + }, + ); + + child.once("error", (error) => { + if (!settled) { + settled = true; + resolve(String(error.message ?? error)); + } + }); + + child.once("spawn", () => { + child.unref(); + if (!settled) { + settled = true; + resolve(undefined); + } + }); + + setTimeout(() => { + if (!settled) { + settled = true; + resolve(`Timed out while launching ${params.command}.`); + } + }, 2000); + }); +} + +export async function catalogAnalysisResult(input: CatalogInput): Promise { + const { obsidian } = input.config; + + if (!obsidian.enabled) { + return { + attempted: false, + written: false, + skippedReason: "Obsidian cataloging is disabled in shpit.toml.", + }; + } + + if (!obsidian.vaultPath) { + return { + attempted: false, + written: false, + skippedReason: "Obsidian cataloging is enabled but `obsidian.vault_path` is not set.", + }; + } + + const relativeNotePath = buildRelativeNotePath({ + notesRoot: obsidian.notesRoot, + catalogMode: obsidian.catalogMode, + slug: input.slug, + runLabel: input.runLabel, + }); + const notePath = path.join(obsidian.vaultPath, relativeNotePath); + const findings = await readFile(input.findingsPath, "utf8"); + const note = buildCatalogNote({ + sourceUrl: input.sourceUrl, + findings, + success: input.success, + error: input.error, + runLabel: input.runLabel, + slug: input.slug, + }); + + await mkdir(path.dirname(notePath), { recursive: true }); + await writeFile(notePath, note, "utf8"); + + if (!obsidian.openAfterCatalog) { + return { + attempted: true, + written: true, + notePath, + }; + } + + const warning = await tryOpenInObsidian({ + command: obsidian.command, + vaultPath: obsidian.vaultPath, + relativeNotePath, + }); + + return { + attempted: true, + written: true, + notePath, + warning, + }; +} + +export const __testables = { + buildRelativeNotePath, +}; diff --git a/src/opencode-cli.test.ts b/src/opencode-cli.test.ts new file mode 100644 index 0000000..dcd86ac --- /dev/null +++ b/src/opencode-cli.test.ts @@ -0,0 +1,36 @@ +import { describe, expect, test } from "bun:test"; +import { buildInstallOpencodeCommand, buildOpencodeRunCommand } from "./opencode-cli.js"; + +describe("buildInstallOpencodeCommand", () => { + test("tries bun first and falls back to npm", () => { + const command = buildInstallOpencodeCommand(); + + expect(command).toContain("if command -v bun >/dev/null 2>&1; then"); + expect(command).toContain("bun add -g opencode-ai@latest"); + expect(command).toContain("falling back to npm"); + expect(command).toContain('npm install -g opencode-ai@latest --prefix "$HOME/.local"'); + }); +}); + +describe("buildOpencodeRunCommand", () => { + test("builds run command with cwd switch and provider env forwarding", () => { + const command = buildOpencodeRunCommand({ + resolveOpencodeBinCommand: "command -v opencode", + workingDir: "/home/daytona/audit/repo", + prompt: "Reply with exactly one word: ready", + model: "zai-coding-plan/glm-4.7-flash", + variant: "high", + forwardedEnvEntries: [ + ["OPENAI_API_KEY", "sk-test"], + ["ZHIPU_API_KEY", "z-test"], + ], + }); + + expect(command).toContain('OPENCODE_BIN="$(command -v opencode)"'); + expect(command).toContain("cd '/home/daytona/audit/repo'"); + expect(command).toContain("env OPENAI_API_KEY='sk-test' ZHIPU_API_KEY='z-test'"); + expect(command).toContain("--model 'zai-coding-plan/glm-4.7-flash'"); + expect(command).toContain("--variant 'high'"); + expect(command).not.toContain("--dir"); + }); +}); diff --git a/src/opencode-cli.ts b/src/opencode-cli.ts new file mode 100644 index 0000000..5d043b4 --- /dev/null +++ b/src/opencode-cli.ts @@ -0,0 +1,38 @@ +type BuildRunCommandInput = { + resolveOpencodeBinCommand: string; + workingDir: string; + prompt: string; + model: string; + variant?: string; + forwardedEnvEntries?: Array<[string, string]>; +}; + +function shellEscape(value: string): string { + return `'${value.replace(/'/g, `'"'"'`)}'`; +} + +export function buildInstallOpencodeCommand(): string { + return [ + "if command -v bun >/dev/null 2>&1; then", + ' echo "[bootstrap] Attempting OpenCode install via bun..."', + " if bun add -g opencode-ai@latest; then exit 0; fi", + ' echo "[bootstrap] bun install failed; falling back to npm..." >&2', + "fi", + 'npm install -g opencode-ai@latest --prefix "$HOME/.local" --no-audit --no-fund', + ].join("\n"); +} + +export function buildOpencodeRunCommand(input: BuildRunCommandInput): string { + const modelArg = ` --model ${shellEscape(input.model)}`; + const variantArg = input.variant ? ` --variant ${shellEscape(input.variant)}` : ""; + const forwardedEnvArgs = (input.forwardedEnvEntries ?? []) + .map(([name, value]) => `${name}=${shellEscape(value)}`) + .join(" "); + + return ( + `OPENCODE_BIN="$(${input.resolveOpencodeBinCommand})"; ` + + `cd ${shellEscape(input.workingDir)} && ` + + `${forwardedEnvArgs ? `env ${forwardedEnvArgs} ` : ""}` + + `"${"$"}OPENCODE_BIN" run --print-logs${modelArg}${variantArg} ${shellEscape(input.prompt)}` + ); +} diff --git a/src/shpit-config.test.ts b/src/shpit-config.test.ts new file mode 100644 index 0000000..c3581e0 --- /dev/null +++ b/src/shpit-config.test.ts @@ -0,0 +1,66 @@ +import { describe, expect, test } from "bun:test"; +import { __testables } from "./shpit-config.js"; + +describe("shpit config parsing", () => { + test("parses obsidian table values", () => { + const parsed = __testables.parseToml( + [ + "[obsidian]", + "enabled = true", + 'command = "obsidian"', + 'vault_path = "/vault"', + 'notes_root = "Research/OpenCode"', + 'catalog_mode = "repo"', + "open_after_catalog = false", + ].join("\n"), + "shpit.toml", + ); + + expect(parsed).toBeObject(); + const obsidian = parsed.obsidian as Record; + expect(obsidian.enabled).toBe(true); + expect(obsidian.command).toBe("obsidian"); + expect(obsidian.vault_path).toBe("/vault"); + expect(obsidian.notes_root).toBe("Research/OpenCode"); + expect(obsidian.catalog_mode).toBe("repo"); + expect(obsidian.open_after_catalog).toBe(false); + }); + + test("rejects obs command alias", () => { + expect(() => + __testables.resolveFinalConfig({ + obsidian: { + command: "obs", + }, + }), + ).toThrow(/Use "obsidian"/); + }); + + test("uses sane defaults", () => { + const config = __testables.resolveFinalConfig({}); + expect(config.enabled).toBe(false); + expect(config.command).toBe("obsidian"); + expect(config.catalogMode).toBe("date"); + expect(config.notesRoot).toBe("Research/OpenCode"); + expect(config.openAfterCatalog).toBe(false); + }); +}); + +describe("env parsing", () => { + test("parses quoted and unquoted values", () => { + const env = __testables.parseEnvFile( + [ + "# comment", + "DAYTONA_API_KEY=abc123", + 'OPENAI_API_KEY="sk-test"', + "EMPTY=", + "NOT_A_KEY hi", + ].join("\n"), + ); + + expect(env.get("DAYTONA_API_KEY")).toBe("abc123"); + expect(env.get("OPENAI_API_KEY")).toBe("sk-test"); + expect(env.get("EMPTY")).toBe(""); + expect(env.has("NOT_A_KEY hi")).toBe(false); + }); +}); diff --git a/src/shpit-config.ts b/src/shpit-config.ts new file mode 100644 index 0000000..701610b --- /dev/null +++ b/src/shpit-config.ts @@ -0,0 +1,457 @@ +import { readFile } from "node:fs/promises"; +import path from "node:path"; +import process from "node:process"; + +type TomlPrimitive = string | number | boolean; +interface ParsedTomlTable { + [key: string]: TomlPrimitive | ParsedTomlTable; +} +type ParsedTomlValue = TomlPrimitive | ParsedTomlTable; + +type PartialObsidianConfig = { + enabled?: boolean; + command?: string; + vaultPath?: string; + notesRoot?: string; + catalogMode?: "date" | "repo"; + openAfterCatalog?: boolean; +}; + +type PartialShpitConfig = { + obsidian?: PartialObsidianConfig; +}; + +export type ResolvedShpitConfig = { + paths: { + globalConfigPath?: string; + projectConfigPath?: string; + }; + obsidian: { + enabled: boolean; + command: string; + vaultPath?: string; + notesRoot: string; + catalogMode: "date" | "repo"; + openAfterCatalog: boolean; + }; +}; + +export type LoadedEnvInfo = { + globalEnvPath?: string; + projectEnvPath?: string; + keysLoaded: string[]; +}; + +const DEFAULT_NOTES_ROOT = "Research/OpenCode"; + +function stripInlineComment(value: string): string { + let inSingle = false; + let inDouble = false; + let escaped = false; + + for (let i = 0; i < value.length; i += 1) { + const char = value[i]; + + if (escaped) { + escaped = false; + continue; + } + + if (char === "\\" && inDouble) { + escaped = true; + continue; + } + + if (char === '"' && !inSingle) { + inDouble = !inDouble; + continue; + } + + if (char === "'" && !inDouble) { + inSingle = !inSingle; + continue; + } + + if (char === "#" && !inSingle && !inDouble) { + return value.slice(0, i).trim(); + } + } + + return value.trim(); +} + +function parseTomlPrimitive(rawValue: string, filePath: string, lineNumber: number): TomlPrimitive { + const value = stripInlineComment(rawValue); + + if (value.startsWith('"') && value.endsWith('"')) { + try { + return JSON.parse(value) as string; + } catch { + throw new Error(`${filePath}:${lineNumber} invalid double-quoted TOML string.`); + } + } + + if (value.startsWith("'") && value.endsWith("'")) { + return value.slice(1, -1); + } + + if (value === "true") { + return true; + } + + if (value === "false") { + return false; + } + + if (/^-?\d+$/.test(value)) { + return Number.parseInt(value, 10); + } + + throw new Error( + `${filePath}:${lineNumber} unsupported TOML value "${value}". Use quoted strings, booleans, or integers.`, + ); +} + +function setNestedTableValue( + target: ParsedTomlTable, + keyPath: string[], + value: TomlPrimitive, +): void { + let current = target; + + for (const segment of keyPath.slice(0, -1)) { + const existing = current[segment]; + if (existing === undefined) { + const next: ParsedTomlTable = {}; + current[segment] = next; + current = next; + continue; + } + + if (typeof existing !== "object" || Array.isArray(existing)) { + throw new Error(`Cannot assign nested key through primitive TOML key: ${keyPath.join(".")}`); + } + + current = existing; + } + + const leaf = keyPath.at(-1); + if (!leaf) { + throw new Error("Invalid TOML key path."); + } + current[leaf] = value; +} + +function parseToml(content: string, filePath: string): ParsedTomlTable { + const root: ParsedTomlTable = {}; + let currentSection: string[] = []; + + const lines = content.split(/\r?\n/); + for (const [index, originalLine] of lines.entries()) { + const lineNumber = index + 1; + const line = originalLine.trim(); + if (!line || line.startsWith("#")) { + continue; + } + + if (line.startsWith("[") && line.endsWith("]")) { + const sectionName = line.slice(1, -1).trim(); + if (!sectionName) { + throw new Error(`${filePath}:${lineNumber} invalid TOML table header.`); + } + currentSection = sectionName.split(".").map((segment) => segment.trim()); + if (currentSection.some((segment) => !segment)) { + throw new Error(`${filePath}:${lineNumber} invalid TOML table header "${sectionName}".`); + } + let pointer = root; + for (const segment of currentSection) { + const existing = pointer[segment]; + if (existing === undefined) { + const next: ParsedTomlTable = {}; + pointer[segment] = next; + pointer = next; + continue; + } + if (typeof existing !== "object" || Array.isArray(existing)) { + throw new Error( + `${filePath}:${lineNumber} table "${sectionName}" conflicts with existing primitive key.`, + ); + } + pointer = existing; + } + continue; + } + + const separatorIndex = line.indexOf("="); + if (separatorIndex === -1) { + throw new Error(`${filePath}:${lineNumber} expected "key = value".`); + } + + const rawKey = line.slice(0, separatorIndex).trim(); + const rawValue = line.slice(separatorIndex + 1).trim(); + + if (!rawKey) { + throw new Error(`${filePath}:${lineNumber} missing TOML key before "=".`); + } + + const keyPath = [...currentSection, ...rawKey.split(".").map((segment) => segment.trim())]; + if (keyPath.some((segment) => !segment)) { + throw new Error(`${filePath}:${lineNumber} invalid TOML key "${rawKey}".`); + } + + const primitive = parseTomlPrimitive(rawValue, filePath, lineNumber); + setNestedTableValue(root, keyPath, primitive); + } + + return root; +} + +function asTable(value: ParsedTomlValue | undefined): ParsedTomlTable | undefined { + if (!value || typeof value !== "object" || Array.isArray(value)) { + return undefined; + } + return value; +} + +function asString(value: ParsedTomlValue | undefined): string | undefined { + return typeof value === "string" ? value : undefined; +} + +function asBoolean(value: ParsedTomlValue | undefined): boolean | undefined { + return typeof value === "boolean" ? value : undefined; +} + +function toPartialConfig(parsed: ParsedTomlTable): PartialShpitConfig { + const obsidian = asTable(parsed.obsidian); + + return { + obsidian: obsidian + ? { + enabled: asBoolean(obsidian.enabled), + command: asString(obsidian.command), + vaultPath: asString(obsidian.vault_path), + notesRoot: asString(obsidian.notes_root), + catalogMode: asString(obsidian.catalog_mode) as "date" | "repo" | undefined, + openAfterCatalog: asBoolean(obsidian.open_after_catalog), + } + : undefined, + }; +} + +function mergePartialConfig( + base: PartialShpitConfig, + override: PartialShpitConfig, +): PartialShpitConfig { + return { + obsidian: { + ...(base.obsidian ?? {}), + ...(override.obsidian ?? {}), + }, + }; +} + +function normalizeObsidianCommand(command: string | undefined): string { + const normalized = (command ?? "obsidian").trim(); + if (!normalized) { + return "obsidian"; + } + if (normalized === "obs") { + throw new Error( + 'Invalid `obsidian.command`: "obs". Use "obsidian" to avoid launching the wrong binary.', + ); + } + return normalized; +} + +function resolveVaultPath(value: string | undefined): string | undefined { + if (!value) { + return undefined; + } + if (value.startsWith("~")) { + const home = process.env.HOME; + return home ? path.join(home, value.slice(1)) : value; + } + return value; +} + +async function fileExists(filePath: string): Promise { + try { + await readFile(filePath, "utf8"); + return true; + } catch { + return false; + } +} + +function candidateProjectConfigPaths(startDir: string): string[] { + const candidates: string[] = []; + let current = path.resolve(startDir); + + while (true) { + candidates.push(path.join(current, "shpit.toml")); + candidates.push(path.join(current, ".shpit.toml")); + const parent = path.dirname(current); + if (parent === current) { + break; + } + current = parent; + } + + return candidates; +} + +async function findProjectConfigPath(startDir: string): Promise { + for (const candidate of candidateProjectConfigPaths(startDir)) { + if (await fileExists(candidate)) { + return candidate; + } + } + return undefined; +} + +function getGlobalConfigPath(): string | undefined { + const home = process.env.HOME; + if (!home) { + return undefined; + } + return path.join(home, ".config", "opencode", "shpit.toml"); +} + +async function loadConfigAtPath(filePath: string | undefined): Promise { + if (!filePath || !(await fileExists(filePath))) { + return {}; + } + + const content = await readFile(filePath, "utf8"); + return toPartialConfig(parseToml(content, filePath)); +} + +function resolveFinalConfig(partial: PartialShpitConfig): ResolvedShpitConfig["obsidian"] { + const obsidian = partial.obsidian ?? {}; + const catalogMode = obsidian.catalogMode ?? "date"; + if (catalogMode !== "date" && catalogMode !== "repo") { + throw new Error(`Invalid obsidian.catalog_mode: ${catalogMode}. Expected "date" or "repo".`); + } + + return { + enabled: obsidian.enabled ?? false, + command: normalizeObsidianCommand(obsidian.command), + vaultPath: resolveVaultPath(obsidian.vaultPath), + notesRoot: (obsidian.notesRoot ?? DEFAULT_NOTES_ROOT).trim() || DEFAULT_NOTES_ROOT, + catalogMode, + openAfterCatalog: obsidian.openAfterCatalog ?? false, + }; +} + +function parseEnvFile(content: string): Map { + const env = new Map(); + const lines = content.split(/\r?\n/); + + for (const rawLine of lines) { + const line = rawLine.trim(); + if (!line || line.startsWith("#")) { + continue; + } + + const separatorIndex = line.indexOf("="); + if (separatorIndex === -1) { + continue; + } + + const key = line.slice(0, separatorIndex).trim(); + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) { + continue; + } + + const rawValue = line.slice(separatorIndex + 1).trim(); + const valueWithoutComment = stripInlineComment(rawValue); + + let value = valueWithoutComment; + if (valueWithoutComment.startsWith('"') && valueWithoutComment.endsWith('"')) { + try { + value = JSON.parse(valueWithoutComment) as string; + } catch { + value = valueWithoutComment.slice(1, -1); + } + } else if (valueWithoutComment.startsWith("'") && valueWithoutComment.endsWith("'")) { + value = valueWithoutComment.slice(1, -1); + } + + env.set(key, value); + } + + return env; +} + +async function loadEnvFile(filePath: string | undefined): Promise> { + if (!filePath || !(await fileExists(filePath))) { + return new Map(); + } + const content = await readFile(filePath, "utf8"); + return parseEnvFile(content); +} + +export async function resolveShpitConfig(cwd = process.cwd()): Promise { + const globalConfigPath = getGlobalConfigPath(); + const projectConfigPath = await findProjectConfigPath(cwd); + + const [globalConfig, projectConfig] = await Promise.all([ + loadConfigAtPath(globalConfigPath), + loadConfigAtPath(projectConfigPath), + ]); + + const merged = mergePartialConfig(globalConfig, projectConfig); + + return { + paths: { + globalConfigPath: + globalConfigPath && (await fileExists(globalConfigPath)) ? globalConfigPath : undefined, + projectConfigPath, + }, + obsidian: resolveFinalConfig(merged), + }; +} + +export async function loadConfiguredEnv(cwd = process.cwd()): Promise { + const home = process.env.HOME; + const globalEnvPath = home ? path.join(home, ".config", "opencode", ".env") : undefined; + const projectConfigPath = await findProjectConfigPath(cwd); + const projectEnvPath = projectConfigPath + ? path.join(path.dirname(projectConfigPath), ".env") + : undefined; + + const [globalEnv, projectEnv] = await Promise.all([ + loadEnvFile(globalEnvPath), + loadEnvFile(projectEnvPath), + ]); + + const merged = new Map(); + for (const [key, value] of globalEnv.entries()) { + merged.set(key, value); + } + for (const [key, value] of projectEnv.entries()) { + merged.set(key, value); + } + + const keysLoaded: string[] = []; + for (const [key, value] of merged.entries()) { + if (process.env[key] !== undefined) { + continue; + } + process.env[key] = value; + keysLoaded.push(key); + } + + return { + globalEnvPath: globalEnvPath && (await fileExists(globalEnvPath)) ? globalEnvPath : undefined, + projectEnvPath: + projectEnvPath && (await fileExists(projectEnvPath)) ? projectEnvPath : undefined, + keysLoaded: keysLoaded.sort(), + }; +} + +export const __testables = { + parseEnvFile, + parseToml, + resolveFinalConfig, +}; diff --git a/src/start-opencode-daytona.ts b/src/start-opencode-daytona.ts index e984529..9c5b2a5 100644 --- a/src/start-opencode-daytona.ts +++ b/src/start-opencode-daytona.ts @@ -4,6 +4,8 @@ import process from "node:process"; import { setTimeout as sleep } from "node:timers/promises"; import { parseArgs } from "node:util"; import { Daytona, type Sandbox } from "@daytonaio/sdk"; +import { buildInstallOpencodeCommand } from "./opencode-cli.js"; +import { loadConfiguredEnv } from "./shpit-config.js"; type CliOptions = { port: number; @@ -319,6 +321,13 @@ async function streamCommandLogsUntilExit(params: { } async function main(): Promise { + const loadedEnv = await loadConfiguredEnv(); + if (loadedEnv.keysLoaded.length > 0) { + console.log( + `[local] Loaded ${loadedEnv.keysLoaded.length} env var(s) from config (.env) files.`, + ); + } + const options = parseCliOptions(); const apiKey = requireEnv("DAYTONA_API_KEY"); const apiUrl = process.env.DAYTONA_API_URL; @@ -431,7 +440,7 @@ async function main(): Promise { console.log("[local] Installing latest OpenCode CLI in sandbox..."); await runCommand( sandbox, - 'if command -v bun >/dev/null 2>&1; then bun add -g opencode-ai@latest; else npm install -g opencode-ai@latest --prefix "$HOME/.local"; fi', + buildInstallOpencodeCommand(), "Install OpenCode CLI", options.installTimeoutSec, ); diff --git a/tsconfig.build.json b/tsconfig.build.json new file mode 100644 index 0000000..5232474 --- /dev/null +++ b/tsconfig.build.json @@ -0,0 +1,11 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "dist", + "rootDir": "src", + "declaration": false, + "sourceMap": false + }, + "include": ["src/**/*.ts"], + "exclude": ["src/**/*.test.ts"] +} diff --git a/tsconfig.json b/tsconfig.json index ca90a2b..f944e7d 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -9,5 +9,6 @@ "resolveJsonModule": true, "forceConsistentCasingInFileNames": true }, - "include": ["src/**/*.ts"] + "include": ["src/**/*.ts"], + "exclude": ["src/**/*.test.ts"] }