Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions crosslink/src/commands/config_registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,13 @@ pub static REGISTRY: &[ConfigKey] = &[
group: ConfigGroup::Agents,
hot_swappable: true,
},
ConfigKey {
key: "kickoff.allowed_tools",
config_type: ConfigType::StringArray,
description: "Extra Bash tool patterns appended to the kickoff agent's --allowedTools list",
group: ConfigGroup::Agents,
hot_swappable: true,
},
ConfigKey {
key: "signing_enforcement",
config_type: ConfigType::Enum(&["disabled", "audit", "enforced"]),
Expand Down
111 changes: 105 additions & 6 deletions crosslink/src/commands/kickoff/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,104 @@ pub(crate) fn extract_criteria(
}
}

/// Subdirectories skipped by [`has_manifest`] when scanning one level deep.
///
/// These contain vendored / build / cache artifacts whose manifests should
/// never light up toolchain support for the parent project (e.g. a stray
/// `Cargo.toml` deep inside `node_modules/` is not signal). The list also
/// includes infra dotdirs so we don't pull in `.crosslink/`'s own config.
const SKIP_SCAN_DIRS: &[&str] = &[
"node_modules",
"target",
"dist",
"build",
"out",
"vendor",
"venv",
"env",
"__pycache__",
".git",
".worktrees",
".crosslink",
".claude",
".venv",
".env",
".cache",
".idea",
".vscode",
".pytest_cache",
".mypy_cache",
".ruff_cache",
".cargo",
".rustup",
];

/// Return `true` when `repo_root` contains `filename` at the root or exactly
/// one directory level deep (skipping hidden and vendored/build dirs).
///
/// Catches the common monorepo layout where the canonical build manifest
/// lives in a named subdirectory -- e.g. `crosslink/Cargo.toml` here, or
/// `<repo>/santana-core/Cargo.toml` in santana. Without this, kickoff
/// agents in such repos see no Rust/Python/etc. tools in `--allowedTools`
/// and end up sandbox-denied for `cargo`, `uv`, etc. See GH#584.
fn has_manifest(repo_root: &Path, filename: &str) -> bool {
if repo_root.join(filename).is_file() {
return true;
}
let Ok(entries) = std::fs::read_dir(repo_root) else {
return false;
};
for entry in entries.flatten() {
let path = entry.path();
if !path.is_dir() {
continue;
}
let Some(name) = path.file_name().and_then(|n| n.to_str()) else {
continue;
};
// Skip hidden dirs except the few explicitly listed in SKIP_SCAN_DIRS
// (those are listed so future readers see why they're excluded).
if name.starts_with('.') {
continue;
}
if SKIP_SCAN_DIRS.contains(&name) {
continue;
}
if path.join(filename).is_file() {
return true;
}
}
false
}

/// Read additional `--allowedTools` patterns from
/// `hook-config.json`'s `kickoff.allowed_tools` array.
///
/// Returns an empty vector when the file is missing, unparseable, or has no
/// such key. Project owners use this to extend the kickoff agent's tool
/// surface beyond what convention detection picks up automatically -- e.g.
/// when the project's manifests live two or more levels deep, or when the
/// agent needs a tool the auto-detect doesn't know about. See GH#584.
pub(crate) fn read_kickoff_allowed_tools(crosslink_dir: &Path) -> Vec<String> {
let config_path = crosslink_dir.join("hook-config.json");
let Ok(content) = std::fs::read_to_string(&config_path) else {
return Vec::new();
};
let Ok(parsed) = serde_json::from_str::<serde_json::Value>(&content) else {
return Vec::new();
};
parsed
.get("kickoff")
.and_then(|k| k.get("allowed_tools"))
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.filter_map(|v| v.as_str().map(String::from))
.collect()
})
.unwrap_or_default()
}

/// Detect project conventions from the repo root.
pub(crate) fn detect_conventions(repo_root: &Path) -> ProjectConventions {
let mut conv = ProjectConventions {
Expand All @@ -128,7 +226,7 @@ pub(crate) fn detect_conventions(repo_root: &Path) -> ProjectConventions {
};

// Rust
if repo_root.join("Cargo.toml").is_file() || repo_root.join("crosslink/Cargo.toml").is_file() {
if has_manifest(repo_root, "Cargo.toml") {
conv.test_command = Some("cargo test".to_string());
conv.lint_commands
.push("cargo clippy -- -D warnings".to_string());
Expand All @@ -137,7 +235,7 @@ pub(crate) fn detect_conventions(repo_root: &Path) -> ProjectConventions {
}

// Node/TypeScript
if repo_root.join("package.json").is_file() {
if has_manifest(repo_root, "package.json") {
if conv.test_command.is_none() {
conv.test_command = Some("npm test".to_string());
}
Expand All @@ -146,17 +244,18 @@ pub(crate) fn detect_conventions(repo_root: &Path) -> ProjectConventions {
}

// Python
if repo_root.join("pyproject.toml").is_file() || repo_root.join("requirements.txt").is_file() {
if has_manifest(repo_root, "pyproject.toml") || has_manifest(repo_root, "requirements.txt") {
if conv.test_command.is_none() {
conv.test_command = Some("uv run pytest".to_string());
}
conv.lint_commands.push("ruff check .".to_string());
conv.allowed_tools.push("Bash(uv *)".to_string());
conv.allowed_tools.push("Bash(python3 *)".to_string());
conv.allowed_tools.push("Bash(pytest *)".to_string());
}

// Go
if repo_root.join("go.mod").is_file() {
if has_manifest(repo_root, "go.mod") {
if conv.test_command.is_none() {
conv.test_command = Some("go test ./...".to_string());
}
Expand All @@ -165,12 +264,12 @@ pub(crate) fn detect_conventions(repo_root: &Path) -> ProjectConventions {
}

// Just
if repo_root.join("justfile").is_file() || repo_root.join("Justfile").is_file() {
if has_manifest(repo_root, "justfile") || has_manifest(repo_root, "Justfile") {
conv.allowed_tools.push("Bash(just *)".to_string());
}

// Make
if repo_root.join("Makefile").is_file() || repo_root.join("makefile").is_file() {
if has_manifest(repo_root, "Makefile") || has_manifest(repo_root, "makefile") {
conv.allowed_tools.push("Bash(make *)".to_string());
}

Expand Down
23 changes: 23 additions & 0 deletions crosslink/src/commands/kickoff/launch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -627,10 +627,17 @@ pub(super) fn launch_local(
/// document passed via `--doc`. It is overlay-bind-mounted read-only on top of
/// the writable workspace mount so the agent physically cannot edit the
/// canonical design input. See GH#580.
///
/// `host_repo_root` is the host path to the main repo (the worktree's parent
/// repo). The main repo's `.git/` directory is bind-mounted at its host
/// absolute path inside the container so the worktree's `.git` file -- which
/// contains an absolute `gitdir: <host>/.git/worktrees/<branch>/` reference
/// -- resolves and git operations inside the container work. See GH#584.
#[allow(clippy::too_many_arguments)]
pub(super) fn launch_container(
runtime: &ContainerMode,
worktree_dir: &Path,
host_repo_root: &Path,
image: &str,
agent_id: &str,
model: &str,
Expand Down Expand Up @@ -690,6 +697,22 @@ pub(super) fn launch_container(
format!("AGENT_ID={}", agent_id),
];

// Bind-mount the main repo's `.git/` at its host absolute path. The
// worktree's `.git` is a single file containing an absolute
// `gitdir: <host>/.git/worktrees/<branch>/` pointer; without this mount
// that pointer dangles inside the container and every git operation
// (status, diff, commit, sync) fails. We mount rw because the per-
// worktree subdir under `.git/worktrees/<branch>/` legitimately needs
// writes (HEAD, index, refs) for the agent to commit. Hook policy still
// blocks the genuinely destructive ops (`push --force`, `reset --hard`,
// etc.) regardless of mount mode. See GH#584.
let host_git_dir = host_repo_root.join(".git");
if host_git_dir.exists() {
let git_path = host_git_dir.to_string_lossy();
args.push("-v".to_string());
args.push(format!("{git_path}:{git_path}:rw"));
}

// Pass UID/GID to container for user remapping (non-Windows only)
if let Some((uid, gid)) = &uid_gid {
args.extend([
Expand Down
11 changes: 9 additions & 2 deletions crosslink/src/commands/kickoff/run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,14 @@ pub fn run(
std::fs::write(worktree_dir.join(".kickoff-slug"), &compact_name)
.context("Failed to write .kickoff-slug sentinel")?;

// 4. Detect project conventions
let conventions = detect_conventions(&root);
// 4. Detect project conventions, then extend with any explicit additions
// from `hook-config.json`'s `kickoff.allowed_tools` array so projects
// can teach the kickoff agent about tools detection doesn't pick up
// automatically. See GH#584.
let mut conventions = detect_conventions(&root);
conventions
.allowed_tools
.extend(read_kickoff_allowed_tools(crosslink_dir));

// 5. Build the prompt
let prompt = build_prompt(opts, issue_id, &branch_name, &conventions);
Expand Down Expand Up @@ -217,6 +223,7 @@ pub fn run(
let container_id = launch_container(
mode,
&worktree_dir,
&root,
opts.image,
&agent_id,
opts.model,
Expand Down
134 changes: 134 additions & 0 deletions crosslink/src/commands/kickoff/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -258,6 +258,140 @@ fn test_detect_conventions_node() {
assert!(conv.allowed_tools.contains(&"Bash(npm *)".to_string()));
}

// --- GH#584: convention detection scans one level deep ---

#[test]
fn test_detect_conventions_rust_in_subdir() {
// Monorepo layout: Cargo.toml lives one directory level deep. Detection
// should still light up Rust tools. This is the santana-style case
// GH#584 calls out -- where the previous narrow detection missed
// anything outside the repo root.
let dir = tempfile::tempdir().unwrap();
std::fs::create_dir_all(dir.path().join("santana-core")).unwrap();
std::fs::write(dir.path().join("santana-core/Cargo.toml"), "[package]").unwrap();

let conv = detect_conventions(dir.path());
assert!(
conv.allowed_tools.contains(&"Bash(cargo *)".to_string()),
"expected Bash(cargo *) when Cargo.toml is one level deep, got {:?}",
conv.allowed_tools
);
}

#[test]
fn test_detect_conventions_rust_two_levels_deep_not_detected() {
// Contract: only ONE level deep matches. Two levels deep would risk
// false positives from vendored crates in unusual structures.
let dir = tempfile::tempdir().unwrap();
std::fs::create_dir_all(dir.path().join("crates/foo")).unwrap();
std::fs::write(dir.path().join("crates/foo/Cargo.toml"), "[package]").unwrap();

let conv = detect_conventions(dir.path());
assert!(
!conv.allowed_tools.contains(&"Bash(cargo *)".to_string()),
"two-levels-deep Cargo.toml should not trigger detection; got {:?}",
conv.allowed_tools
);
}

#[test]
fn test_detect_conventions_python_in_subdir_with_pytest() {
let dir = tempfile::tempdir().unwrap();
std::fs::create_dir_all(dir.path().join("python-svc")).unwrap();
std::fs::write(dir.path().join("python-svc/pyproject.toml"), "[project]").unwrap();

let conv = detect_conventions(dir.path());
assert!(conv.allowed_tools.contains(&"Bash(uv *)".to_string()));
assert!(
conv.allowed_tools.contains(&"Bash(pytest *)".to_string()),
"GH#584 explicitly mentioned pytest as a missing tool"
);
}

#[test]
fn test_detect_conventions_skips_node_modules() {
// A stray Cargo.toml inside node_modules/ must NOT enable cargo tools
// for the parent project. SKIP_SCAN_DIRS guards against this.
let dir = tempfile::tempdir().unwrap();
std::fs::write(dir.path().join("package.json"), "{}").unwrap();
std::fs::create_dir_all(dir.path().join("node_modules/weird-pkg")).unwrap();
std::fs::write(
dir.path().join("node_modules/weird-pkg/Cargo.toml"),
"[package]",
)
.unwrap();

let conv = detect_conventions(dir.path());
assert!(
!conv.allowed_tools.contains(&"Bash(cargo *)".to_string()),
"Cargo.toml inside node_modules/ must not enable cargo tools"
);
}

#[test]
fn test_detect_conventions_skips_hidden_dirs() {
let dir = tempfile::tempdir().unwrap();
std::fs::create_dir_all(dir.path().join(".cache/leaky")).unwrap();
std::fs::write(dir.path().join(".cache/leaky/Cargo.toml"), "[package]").unwrap();

let conv = detect_conventions(dir.path());
assert!(
!conv.allowed_tools.contains(&"Bash(cargo *)".to_string()),
"manifests under hidden dirs must not enable tooling"
);
}

#[test]
fn test_read_kickoff_allowed_tools_returns_empty_when_missing() {
let dir = tempfile::tempdir().unwrap();
// No hook-config.json present.
assert!(read_kickoff_allowed_tools(dir.path()).is_empty());
}

#[test]
fn test_read_kickoff_allowed_tools_returns_configured_array() {
let dir = tempfile::tempdir().unwrap();
std::fs::write(
dir.path().join("hook-config.json"),
r#"{
"kickoff": {
"allowed_tools": ["Bash(cargo *)", "Bash(make deploy *)"]
}
}"#,
)
.unwrap();

let tools = read_kickoff_allowed_tools(dir.path());
assert_eq!(
tools,
vec![
"Bash(cargo *)".to_string(),
"Bash(make deploy *)".to_string()
]
);
}

#[test]
fn test_read_kickoff_allowed_tools_returns_empty_when_key_absent() {
let dir = tempfile::tempdir().unwrap();
std::fs::write(
dir.path().join("hook-config.json"),
r#"{"tracking_mode": "strict"}"#,
)
.unwrap();

assert!(read_kickoff_allowed_tools(dir.path()).is_empty());
}

#[test]
fn test_read_kickoff_allowed_tools_tolerates_malformed_json() {
let dir = tempfile::tempdir().unwrap();
std::fs::write(dir.path().join("hook-config.json"), "not valid json").unwrap();

// Best-effort: malformed config silently yields empty, doesn't panic.
assert!(read_kickoff_allowed_tools(dir.path()).is_empty());
}

#[test]
fn test_rand_suffix_range() {
let s = rand_suffix();
Expand Down
10 changes: 10 additions & 0 deletions docs_src/reference/hook-config.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,16 @@ Explicit absolute path to the `crosslink` binary used by the Python hook scripts

Estimated character budget the `prompt-guard.py` hook uses to warn before context exhaustion. Integer. Default: `1000000` (1M chars, roughly aligned with a 200k-token window). Set to `0` (or any non-positive value) to disable the guard entirely.

### `kickoff.allowed_tools`

Extra `--allowedTools` patterns appended to the kickoff agent's CLI invocation. String array. Default: `[]`. Use when convention detection misses a tool the agent needs (e.g. manifests buried more than one directory deep, custom build wrappers, MCP server tool names). The list is *additive* to the auto-detected toolset -- it adds, it does not replace -- and entries should be in Claude Code's allowedTools pattern format:

```bash
crosslink config set kickoff.allowed_tools '["Bash(cargo *)", "Bash(make deploy *)", "mcp__tidewave__execute_sql_query"]'
```

Auto-detection already handles `Bash(cargo *)`, `Bash(npm *)`, `Bash(uv *)`, `Bash(pytest *)`, `Bash(go *)`, `Bash(just *)`, and `Bash(make *)` when the corresponding manifest exists at the repo root or one directory level deep -- reach for this key when your layout puts manifests deeper, or when you need a pattern detection doesn't cover.

### `sandbox.command` {#sandbox-command}

Optional sandbox-wrapper command applied to local kickoff launches. String. Default: unset (no sandboxing). When set, the kickoff agent's `claude` invocation is prefixed with this command; the value must be a binary on `$PATH` accepting the command to run as its trailing argument. See [Local sandbox alternative](../guides/container-agents.qmd#local-sandbox-alternative) in the container-agents guide for usage notes.
Expand Down
Loading