Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions packages/cli/src/repowise/cli/commands/init_cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@ def _resolve_embedder(embedder_flag: str | None) -> str:
return "gemini"
if os.environ.get("OPENAI_API_KEY"):
return "openai"
if os.environ.get("OPENROUTER_API_KEY"):
return "openrouter"
return "mock"


Expand Down
24 changes: 19 additions & 5 deletions packages/cli/src/repowise/cli/commands/serve_cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ def _setup_embedder() -> None:
# Detect which providers already have keys in the environment.
has_gemini = bool(os.environ.get("GEMINI_API_KEY") or os.environ.get("GOOGLE_API_KEY"))
has_openai = bool(os.environ.get("OPENAI_API_KEY"))
has_openrouter = bool(os.environ.get("OPENROUTER_API_KEY"))

console.print(
"\n[bold]Chat & search require an embedder.[/bold] "
Expand All @@ -51,18 +52,24 @@ def _setup_embedder() -> None:
labels = []
if has_gemini:
options.append("gemini")
labels.append("[1] gemini [green]✓ key set[/green]")
labels.append("[1] gemini [green]✓ key set[/green]")
else:
options.append("gemini")
labels.append("[1] gemini [dim]needs GEMINI_API_KEY / GOOGLE_API_KEY[/dim]")
labels.append("[1] gemini [dim]needs GEMINI_API_KEY / GOOGLE_API_KEY[/dim]")
if has_openai:
options.append("openai")
labels.append("[2] openai [green]✓ key set[/green]")
labels.append("[2] openai [green]✓ key set[/green]")
else:
options.append("openai")
labels.append("[2] openai [dim]needs OPENAI_API_KEY[/dim]")
labels.append("[2] openai [dim]needs OPENAI_API_KEY[/dim]")
if has_openrouter:
options.append("openrouter")
labels.append("[3] openrouter [green]✓ key set[/green]")
else:
options.append("openrouter")
labels.append("[3] openrouter [dim]needs OPENROUTER_API_KEY[/dim]")
options.append("skip")
labels.append("[3] skip [dim]no chat/search[/dim]")
labels.append(f"[{len(options)}] skip [dim]no chat/search[/dim]")

for label in labels:
console.print(f" {label}")
Expand Down Expand Up @@ -106,6 +113,11 @@ def _get_or_prompt_api_key(embedder: str) -> str:
if key:
return key
return click.prompt(" OPENAI_API_KEY", default="", show_default=False).strip()
if embedder == "openrouter":
key = os.environ.get("OPENROUTER_API_KEY", "")
if key:
return key
return click.prompt(" OPENROUTER_API_KEY", default="", show_default=False).strip()
return ""


Expand All @@ -116,6 +128,8 @@ def _set_api_key_env(embedder: str, key: str) -> None:
os.environ.setdefault("GEMINI_API_KEY", key)
elif embedder == "openai":
os.environ.setdefault("OPENAI_API_KEY", key)
elif embedder == "openrouter":
os.environ.setdefault("OPENROUTER_API_KEY", key)


def _save_global_embedder(embedder: str, api_key: str) -> None:
Expand Down
12 changes: 11 additions & 1 deletion packages/cli/src/repowise/cli/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,6 +249,8 @@ def resolve_provider(
os.environ.get("GEMINI_API_KEY") or os.environ.get("GOOGLE_API_KEY")
):
kwargs["api_key"] = os.environ.get("GEMINI_API_KEY") or os.environ.get("GOOGLE_API_KEY")
elif provider_name == "openrouter" and os.environ.get("OPENROUTER_API_KEY"):
kwargs["api_key"] = os.environ["OPENROUTER_API_KEY"]
elif provider_name == "ollama" and os.environ.get("OLLAMA_BASE_URL"):
kwargs["base_url"] = os.environ["OLLAMA_BASE_URL"]

Expand All @@ -269,6 +271,13 @@ def resolve_provider(
else {"api_key": os.environ["OPENAI_API_KEY"]}
)
return get_provider("openai", **kwargs)
if os.environ.get("OPENROUTER_API_KEY") and os.environ["OPENROUTER_API_KEY"].strip():
kwargs = (
{"model": model, "api_key": os.environ["OPENROUTER_API_KEY"]}
if model
else {"api_key": os.environ["OPENROUTER_API_KEY"]}
)
return get_provider("openrouter", **kwargs)
if os.environ.get("OLLAMA_BASE_URL") and os.environ["OLLAMA_BASE_URL"].strip():
kwargs = (
{"model": model, "base_url": os.environ["OLLAMA_BASE_URL"]}
Expand All @@ -285,7 +294,7 @@ def resolve_provider(

raise click.ClickException(
"No provider configured. Use --provider, set REPOWISE_PROVIDER, "
"or set ANTHROPIC_API_KEY / OPENAI_API_KEY / OLLAMA_BASE_URL / GEMINI_API_KEY / GOOGLE_API_KEY."
"or set ANTHROPIC_API_KEY / OPENAI_API_KEY / OPENROUTER_API_KEY / OLLAMA_BASE_URL / GEMINI_API_KEY / GOOGLE_API_KEY."
)


Expand Down Expand Up @@ -319,6 +328,7 @@ def _is_env_var_exists(var_name: str) -> bool:
provider_env_vars = {
"anthropic": ["ANTHROPIC_API_KEY"],
"openai": ["OPENAI_API_KEY"],
"openrouter": ["OPENROUTER_API_KEY"],
"gemini": ["GEMINI_API_KEY", "GOOGLE_API_KEY"], # Either one
"ollama": ["OLLAMA_BASE_URL"],
"litellm": ["LITELLM_API_KEY"], # May need others depending on backend
Expand Down
3 changes: 3 additions & 0 deletions packages/cli/src/repowise/cli/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ def print_phase_header(
"openai": "gpt-4.1",
"anthropic": "claude-sonnet-4-6",
"ollama": "llama3.2",
"openrouter": "anthropic/claude-sonnet-4.6",
"litellm": "groq/llama-3.1-70b-versatile",
}

Expand All @@ -89,13 +90,15 @@ def print_phase_header(
"openai": "OPENAI_API_KEY",
"anthropic": "ANTHROPIC_API_KEY",
"ollama": "OLLAMA_BASE_URL",
"openrouter": "OPENROUTER_API_KEY",
}

_PROVIDER_SIGNUP: dict[str, str] = {
"gemini": "https://aistudio.google.com/apikey",
"openai": "https://platform.openai.com/api-keys",
"anthropic": "https://console.anthropic.com/settings/keys",
"ollama": "https://ollama.com/download",
"openrouter": "https://openrouter.ai/keys",
}


Expand Down
2 changes: 1 addition & 1 deletion packages/core/src/repowise/core/providers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""repowise provider package.

Sub-packages:
llm/ — LLM providers (Anthropic, OpenAI, Gemini, Ollama, LiteLLM)
llm/ — LLM providers (Anthropic, OpenAI, OpenRouter, Gemini, Ollama, LiteLLM)
embedding/ — Embedding providers (OpenAI, Gemini, Mock)

Preferred entry points:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
"""OpenRouter embedding support for repowise semantic search.

Uses the OpenAI-compatible endpoint at ``https://openrouter.ai/api/v1``.
No additional pip install required — uses the ``openai`` package.

Default model: google/gemini-embedding-001 (768 dims)

Usage:
from repowise.core.providers.embedding.openrouter import OpenRouterEmbedder

embedder = OpenRouterEmbedder(api_key="sk-or-...")
vectors = await embedder.embed(["some text"])
"""

from __future__ import annotations

import asyncio
import math
import os


class OpenRouterEmbedder:
"""OpenRouter embedding adapter implementing the repowise Embedder protocol.

Args:
api_key: OpenRouter API key. Falls back to OPENROUTER_API_KEY env var.
model: Embedding model name. Default: "google/gemini-embedding-001".
"""

_DIMS: dict[str, int] = {
"google/gemini-embedding-001": 768,
"openai/text-embedding-3-small": 1536,
"openai/text-embedding-3-large": 3072,
}

_DEFAULT_TIMEOUT: float = 10.0

def __init__(
self,
api_key: str | None = None,
model: str = "google/gemini-embedding-001",
timeout: float = _DEFAULT_TIMEOUT,
) -> None:
self._api_key = api_key or os.environ.get("OPENROUTER_API_KEY")
if not self._api_key:
raise ValueError(
"OpenRouter API key required. Pass api_key= or set OPENROUTER_API_KEY env var."
)
self._model = model
self._timeout = timeout
self._client: object | None = None

@property
def dimensions(self) -> int:
return self._DIMS.get(self._model, 768)

async def embed(self, texts: list[str]) -> list[list[float]]:
"""Embed a batch of texts using OpenRouter.

Runs the synchronous SDK call in a thread pool to avoid blocking the
asyncio event loop.
"""
if not texts:
return []

model = self._model
timeout = self._timeout

def _embed_sync() -> list[list[float]]:
import openai

if self._client is None:
self._client = openai.OpenAI(
api_key=self._api_key,
base_url="https://openrouter.ai/api/v1",
timeout=timeout,
)
response = self._client.embeddings.create(model=model, input=texts) # type: ignore[union-attr]
raw_vectors = [list(item.embedding) for item in response.data]
return [_l2_normalize(v) for v in raw_vectors]

return await asyncio.to_thread(_embed_sync)


def _l2_normalize(vec: list[float]) -> list[float]:
"""L2-normalize a vector to unit length."""
norm = math.sqrt(sum(x * x for x in vec))
if norm == 0.0:
norm = 1.0
return [x / norm for x in vec]
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,9 @@

_BUILTIN_EMBEDDERS: dict[str, tuple[str, str]] = {
"openai": ("repowise.core.providers.embedding.openai", "OpenAIEmbedder"),
"gemini": ("repowise.core.providers.embedding.gemini", "GeminiEmbedder"),
"mock": ("repowise.core.providers.embedding.base", "MockEmbedder"),
"gemini": ("repowise.core.providers.embedding.gemini", "GeminiEmbedder"),
"openrouter": ("repowise.core.providers.embedding.openrouter", "OpenRouterEmbedder"),
"mock": ("repowise.core.providers.embedding.base", "MockEmbedder"),
}

_custom_embedders: dict[str, Callable[..., Embedder]] = {}
Expand Down Expand Up @@ -80,6 +81,7 @@ def get_embedder(name: str, **kwargs: Any) -> Embedder:
_missing = {
"openai": "openai",
"gemini": "google-genai",
"openrouter": "openai", # openrouter uses the openai package
}
try:
module = importlib.import_module(module_path)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
anthropic — claude-opus-4-6, claude-sonnet-4-6, claude-haiku-4-5
openai — gpt-5.4-nano, gpt-5.4-mini, gpt-5.4
gemini — gemini-3.1-flash-lite-preview, gemini-3-flash-preview, gemini-3.1-pro-preview
openrouter — 200+ models via OpenRouter (anthropic/claude-sonnet-4.6, etc.)
ollama — local inference (llama3.2, codellama, etc.)
litellm — 100+ providers via LiteLLM proxy
mock — deterministic test provider
Expand Down
Loading