-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path.env.example
More file actions
43 lines (33 loc) · 2.05 KB
/
.env.example
File metadata and controls
43 lines (33 loc) · 2.05 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
# ── Neo4j (required for Graphiti memory engine) ─────────────
NEO4J_PASSWORD="your-secure-password"
NEO4J_URI="bolt://localhost:7687"
NEO4J_USER="neo4j"
# ── LiteLLM Proxy (required for cloud model routing) ────────
LITELLM_MASTER_KEY="sk-loom-internal-master"
# ── Azure AI Foundry (heavy tier primary) ────────────────────
# Deploy a serverless model in Azure AI Foundry, copy the endpoint + key
# Supports: GPT-4o, GPT-5, Phi, Mistral, Llama, and 1600+ models
AZURE_FOUNDRY_KEY="your-azure-serverless-key"
AZURE_HEAVY_BASE="https://your-deployment.models.ai.azure.com/v1"
# ── Gemini (heavy tier fallback + light tier primary) ────────
GEMINI_API_KEY="your-gemini-api-key"
# ── Ollama (local tier + light tier fallback) ────────────────
OLLAMA_BASE_URL=http://localhost:11434
# ── Model Overrides ─────────────────────────────────────────
# Override tier routing (default routes configured in litellm_config.yaml)
# LOOM_HEAVY_MODEL="heavy/default"
# LOOM_LIGHT_MODEL="light/default"
# Local agent models (for /loom:agent and LocalAgent)
LOOM_LOCAL_ANALYSIS_MODEL=deepseek-coder-v2:16b
LOOM_LOCAL_CREATIVE_MODEL=gemma4:e2b
# Agent tool-calling model (must support OpenAI-compatible tool calling)
# LOOM_AGENT_TOOL_MODEL=qwen3:4b
# LOOM_AGENT_ANALYSIS_MODEL=deepseek-coder-v2:16b
# ── Craft Mode ──────────────────────────────────────────────
# LOOM_CRAFT_MODE=cloud # "cloud" (LiteLLM proxy) or "local" (Ollama agent)
# ── Background Analysis ─────────────────────────────────────
LOOM_BACKGROUND_INTERVAL=30
# ── Nia AI (codebase grounding for Dynamic Agent Fabric) ─────
# Get your key at https://app.trynia.ai
# NIA_API_KEY=your-nia-api-key
# NIA_ENABLED=true