Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions backend/migrations/000_one_shot_schema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ create table if not exists public.user_profiles (
tabular_model text not null default 'gemini-3-flash-preview',
claude_api_key text,
gemini_api_key text,
openrouter_api_key text,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
Expand Down
5 changes: 5 additions & 0 deletions backend/migrations/001_add_openrouter_api_key.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
-- Add OpenRouter API key column to user_profiles
-- Run this migration in your Supabase SQL Editor

alter table public.user_profiles
add column if not exists openrouter_api_key text;
3 changes: 3 additions & 0 deletions backend/src/lib/llm/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { streamClaude, completeClaudeText } from "./claude";
import { streamGemini, completeGeminiText } from "./gemini";
import { streamOpenRouter, completeOpenRouterText } from "./openrouter";
import { providerForModel } from "./models";
import type { StreamChatParams, StreamChatResult, UserApiKeys } from "./types";

Expand All @@ -11,6 +12,7 @@ export async function streamChatWithTools(
): Promise<StreamChatResult> {
const provider = providerForModel(params.model);
if (provider === "claude") return streamClaude(params);
if (provider === "openrouter") return streamOpenRouter(params);
return streamGemini(params);
}

Expand All @@ -23,5 +25,6 @@ export async function completeText(params: {
}): Promise<string> {
const provider = providerForModel(params.model);
if (provider === "claude") return completeClaudeText(params);
if (provider === "openrouter") return completeOpenRouterText(params);
return completeGeminiText(params);
}
15 changes: 15 additions & 0 deletions backend/src/lib/llm/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,25 @@ export const GEMINI_MAIN_MODELS = [
"gemini-3-flash-preview",
] as const;

// OpenRouter main-chat tier
export const OPENROUTER_MAIN_MODELS = [
"openrouter/openai/gpt-5.3-chat",
"openrouter/anthropic/claude-sonnet-4.6",
"openrouter/anthropic/claude-opus-4.7",
"openrouter/x-ai/grok-4.3",
"openrouter/openai/gpt-4o-mini",
] as const;

// Mid-tier (used for tabular review) — user picks one in account settings.
export const CLAUDE_MID_MODELS = ["claude-sonnet-4-6"] as const;
export const GEMINI_MID_MODELS = ["gemini-3-flash-preview"] as const;
export const OPENROUTER_MID_MODELS = ["openrouter/openai/gpt-4o-mini"] as const;

// Low-tier (used for title generation, lightweight extractions) — user picks
// one in account settings.
export const CLAUDE_LOW_MODELS = ["claude-haiku-4-5"] as const;
export const GEMINI_LOW_MODELS = ["gemini-3.1-flash-lite-preview"] as const;
export const OPENROUTER_LOW_MODELS = ["openrouter/openai/gpt-4o-mini"] as const;

export const DEFAULT_MAIN_MODEL = "gemini-3-flash-preview";
export const DEFAULT_TITLE_MODEL = "gemini-3.1-flash-lite-preview";
Expand All @@ -26,17 +37,21 @@ export const DEFAULT_TABULAR_MODEL = "gemini-3-flash-preview";
const ALL_MODELS = new Set<string>([
...CLAUDE_MAIN_MODELS,
...GEMINI_MAIN_MODELS,
...OPENROUTER_MAIN_MODELS,
...CLAUDE_MID_MODELS,
...GEMINI_MID_MODELS,
...OPENROUTER_MID_MODELS,
...CLAUDE_LOW_MODELS,
...GEMINI_LOW_MODELS,
...OPENROUTER_LOW_MODELS,
]);

// ---------------------------------------------------------------------------
// Provider inference
// ---------------------------------------------------------------------------

export function providerForModel(model: string): Provider {
if (model.startsWith("openrouter/")) return "openrouter";
if (model.startsWith("claude")) return "claude";
if (model.startsWith("gemini")) return "gemini";
throw new Error(`Unknown model id: ${model}`);
Expand Down
272 changes: 272 additions & 0 deletions backend/src/lib/llm/openrouter.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,272 @@
import type {
StreamChatParams,
StreamChatResult,
NormalizedToolCall,
} from "./types";

const OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions";
const MAX_TOKENS = 16384;

type OpenRouterMessage = {
role: "system" | "user" | "assistant" | "tool";
content: string | null;
tool_calls?: {
id: string;
type: "function";
function: { name: string; arguments: string };
}[];
tool_call_id?: string;
};

type OpenRouterChoice = {
delta?: {
content?: string | null;
tool_calls?: {
index: number;
id?: string;
type?: "function";
function?: { name?: string; arguments?: string };
}[];
};
finish_reason?: string | null;
};

type OpenRouterStreamChunk = {
choices: OpenRouterChoice[];
};

function getApiKey(override?: string | null): string {
return override?.trim() || process.env.OPENROUTER_API_KEY || "";
}

/**
* Strip the "openrouter/" prefix from model IDs.
* e.g., "openrouter/openai/gpt-4o" -> "openai/gpt-4o"
*/
function toOpenRouterModelId(model: string): string {
return model.startsWith("openrouter/") ? model.slice("openrouter/".length) : model;
}

function toOpenRouterMessages(
systemPrompt: string,
messages: StreamChatParams["messages"],
): OpenRouterMessage[] {
const result: OpenRouterMessage[] = [];
if (systemPrompt) {
result.push({ role: "system", content: systemPrompt });
}
for (const m of messages) {
result.push({ role: m.role, content: m.content });
}
return result;
}

export async function streamOpenRouter(
params: StreamChatParams,
): Promise<StreamChatResult> {
const {
model,
systemPrompt,
tools = [],
callbacks = {},
runTools,
apiKeys,
} = params;
const maxIter = params.maxIterations ?? 10;
const apiKey = getApiKey(apiKeys?.openrouter);
const openRouterModel = toOpenRouterModelId(model);

const messages: OpenRouterMessage[] = toOpenRouterMessages(systemPrompt, params.messages);
let fullText = "";

for (let iter = 0; iter < maxIter; iter++) {
const body: Record<string, unknown> = {
model: openRouterModel,
messages,
max_tokens: MAX_TOKENS,
stream: true,
};

if (tools.length > 0) {
body.tools = tools;
body.tool_choice = "auto";
}

const response = await fetch(OPENROUTER_API_URL, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
"HTTP-Referer": process.env.APP_URL || "http://localhost:3000",
"X-Title": "Mike",
},
body: JSON.stringify(body),
});

if (!response.ok) {
const errorText = await response.text();
throw new Error(`OpenRouter API error: ${response.status} - ${errorText}`);
}

if (!response.body) {
throw new Error("OpenRouter response body is null");
}

// Parse SSE stream
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = "";

// Per-iteration accumulators
const textParts: string[] = [];
const toolCalls: Map<number, { id: string; name: string; arguments: string }> = new Map();

while (true) {
const { done, value } = await reader.read();
if (done) break;

buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() || "";

for (const line of lines) {
const trimmed = line.trim();
if (!trimmed || trimmed === "data: [DONE]") continue;
if (!trimmed.startsWith("data: ")) continue;

const jsonStr = trimmed.slice(6);
let chunk: OpenRouterStreamChunk;
try {
chunk = JSON.parse(jsonStr);
} catch {
continue;
}

console.log("[openrouter stream chunk]", JSON.stringify(chunk, null, 2));

const choice = chunk.choices?.[0];
if (!choice?.delta) continue;

// Handle text content
if (choice.delta.content) {
textParts.push(choice.delta.content);
callbacks.onContentDelta?.(choice.delta.content);
}

// Handle tool calls
if (choice.delta.tool_calls) {
for (const tc of choice.delta.tool_calls) {
const existing = toolCalls.get(tc.index);
if (existing) {
// Accumulate function arguments
if (tc.function?.arguments) {
existing.arguments += tc.function.arguments;
}
} else {
// New tool call
toolCalls.set(tc.index, {
id: tc.id || `tool-${tc.index}`,
name: tc.function?.name || "",
arguments: tc.function?.arguments || "",
});
}
}
}
}
}

fullText += textParts.join("");

// Convert accumulated tool calls to normalized format
const normalizedCalls: NormalizedToolCall[] = [];
for (const [, tc] of toolCalls) {
if (!tc.name) continue;
let input: Record<string, unknown> = {};
try {
input = JSON.parse(tc.arguments || "{}");
} catch {
input = {};
}
const call: NormalizedToolCall = {
id: tc.id,
name: tc.name,
input,
};
callbacks.onToolCallStart?.(call);
normalizedCalls.push(call);
}

// If no tool calls or no runTools handler, we're done
if (!normalizedCalls.length || !runTools) {
break;
}

// Execute tools and continue the loop
const results = await runTools(normalizedCalls);

// Add assistant message with tool calls
messages.push({
role: "assistant",
content: textParts.join("") || null,
tool_calls: normalizedCalls.map((c) => ({
id: c.id,
type: "function" as const,
function: {
name: c.name,
arguments: JSON.stringify(c.input),
},
})),
});

// Add tool results
for (const r of results) {
messages.push({
role: "tool",
tool_call_id: r.tool_use_id,
content: r.content,
});
}
}

return { fullText };
}

export async function completeOpenRouterText(params: {
model: string;
systemPrompt?: string;
user: string;
maxTokens?: number;
apiKeys?: { openrouter?: string | null };
}): Promise<string> {
const apiKey = getApiKey(params.apiKeys?.openrouter);
const openRouterModel = toOpenRouterModelId(params.model);

const messages: OpenRouterMessage[] = [];
if (params.systemPrompt) {
messages.push({ role: "system", content: params.systemPrompt });
}
messages.push({ role: "user", content: params.user });

const response = await fetch(OPENROUTER_API_URL, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
"HTTP-Referer": process.env.APP_URL || "http://localhost:3000",
"X-Title": "Mike",
},
body: JSON.stringify({
model: openRouterModel,
messages,
max_tokens: params.maxTokens ?? 512,
}),
});

if (!response.ok) {
const errorText = await response.text();
throw new Error(`OpenRouter API error: ${response.status} - ${errorText}`);
}

const data = await response.json();
return data.choices?.[0]?.message?.content ?? "";
}
3 changes: 2 additions & 1 deletion backend/src/lib/llm/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// Callers always speak OpenAI-style tools + { role, content } messages; each
// provider translates internally.

export type Provider = "claude" | "gemini";
export type Provider = "claude" | "gemini" | "openrouter";

export type OpenAIToolSchema = {
type: "function";
Expand Down Expand Up @@ -39,6 +39,7 @@ export type StreamCallbacks = {
export type UserApiKeys = {
claude?: string | null;
gemini?: string | null;
openrouter?: string | null;
};

export type StreamChatParams = {
Expand Down
Loading