Skip to content

Commit 7df107d

Browse files
committed
Add PASSTHROUGH_API_KEY option to forward incoming API keys
1 parent dd4a29a commit 7df107d

3 files changed

Lines changed: 42 additions & 7 deletions

File tree

.env.example

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,12 @@ OPENAI_BASE_URL="https://api.openai.com/v1"
2929
# BIG_MODEL="gemini-2.5-pro"
3030
# SMALL_MODEL="gemini-2.5-flash"
3131

32+
# Optional: Pass through incoming API keys
33+
# When enabled, extracts API key from x-api-key or Authorization Bearer headers
34+
# and forwards it to upstream services instead of using static API keys above
35+
# Useful for multi-tenant environments with per-user quota tracking
36+
# PASSTHROUGH_API_KEY="false"
37+
3238
# Example "just an Anthropic proxy" mode:
3339
# PREFERRED_PROVIDER="anthropic"
3440
# (BIG_MODEL and SMALL_MODEL are ignored in this mode)

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ A proxy server that lets you use Anthropic clients with Gemini, OpenAI, or Anthr
4545
* `USE_VERTEX_AUTH` (Optional): Set to `true` to use Application Default Credentials (ADC) will be used (no static API key required). Note: when USE_VERTEX_AUTH=true, you must configure `VERTEX_PROJECT` and `VERTEX_LOCATION`.
4646
* `VERTEX_PROJECT` (Optional): Your Google Cloud Project ID (Required if `PREFERRED_PROVIDER=google` and `USE_VERTEX_AUTH=true`).
4747
* `VERTEX_LOCATION` (Optional): The Google Cloud region for Vertex AI (e.g., `us-central1`) (Required if `PREFERRED_PROVIDER=google` and `USE_VERTEX_AUTH=true`).
48+
* `PASSTHROUGH_API_KEY` (Optional): Set to `true` to extract API keys from incoming request headers (`x-api-key` or `Authorization Bearer`) and forward them to upstream services instead of using static API keys. Useful for multi-tenant environments with per-user quota tracking.
4849
* `PREFERRED_PROVIDER` (Optional): Set to `openai` (default), `google`, or `anthropic`. This determines the primary backend for mapping `haiku`/`sonnet`.
4950
* `BIG_MODEL` (Optional): The model to map `sonnet` requests to. Defaults to `gpt-4.1` (if `PREFERRED_PROVIDER=openai`) or `gemini-2.5-pro-preview-03-25`. Ignored when `PREFERRED_PROVIDER=anthropic`.
5051
* `SMALL_MODEL` (Optional): The model to map `haiku` requests to. Defaults to `gpt-4.1-mini` (if `PREFERRED_PROVIDER=openai`) or `gemini-2.0-flash`. Ignored when `PREFERRED_PROVIDER=anthropic`.

server.py

Lines changed: 35 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,11 @@ def format(self, record):
9292
# Get OpenAI base URL from environment (if set)
9393
OPENAI_BASE_URL = os.environ.get("OPENAI_BASE_URL")
9494

95+
# Option to pass through the incoming API key from request header to upstream
96+
# When enabled, the x-api-key or Authorization Bearer header from the incoming request is used instead of OPENAI_API_KEY
97+
# This enables per-user quota tracking when routing through a gateway
98+
PASSTHROUGH_API_KEY = os.environ.get("PASSTHROUGH_API_KEY", "False").lower() == "true"
99+
95100
# Get preferred provider (default to openai)
96101
PREFERRED_PROVIDER = os.environ.get("PREFERRED_PROVIDER", "openai").lower()
97102

@@ -1122,27 +1127,50 @@ async def create_message(
11221127
# Convert Anthropic request to LiteLLM format
11231128
litellm_request = convert_anthropic_to_litellm(request)
11241129

1130+
# Extract incoming API key from request header for passthrough mode
1131+
incoming_api_key = None
1132+
if PASSTHROUGH_API_KEY:
1133+
# Try x-api-key header first (Anthropic style), then Authorization Bearer
1134+
incoming_api_key = raw_request.headers.get("x-api-key")
1135+
if not incoming_api_key:
1136+
auth_header = raw_request.headers.get("authorization", "")
1137+
if auth_header.lower().startswith("bearer "):
1138+
incoming_api_key = auth_header[7:]
1139+
1140+
# Basic validation - check if key is not empty and has reasonable length
1141+
if incoming_api_key and len(incoming_api_key.strip()) >= 10:
1142+
logger.debug("Passthrough mode: using API key from request header")
1143+
else:
1144+
incoming_api_key = None
1145+
logger.warning("Passthrough mode enabled but no API key found in request headers (expected x-api-key or Authorization Bearer)")
1146+
1147+
# Determine whether to use passthrough for logging consistency
1148+
use_passthrough = PASSTHROUGH_API_KEY and incoming_api_key
1149+
11251150
# Determine which API key to use based on the model
11261151
if request.model.startswith("openai/"):
1127-
litellm_request["api_key"] = OPENAI_API_KEY
1152+
# Use passthrough key if enabled, otherwise fall back to env var
1153+
litellm_request["api_key"] = incoming_api_key if use_passthrough else OPENAI_API_KEY
11281154
# Use custom OpenAI base URL if configured
11291155
if OPENAI_BASE_URL:
11301156
litellm_request["api_base"] = OPENAI_BASE_URL
1131-
logger.debug(f"Using OpenAI API key and custom base URL {OPENAI_BASE_URL} for model: {request.model}")
1157+
logger.debug(f"Using {'passthrough' if use_passthrough else 'OpenAI'} API key and custom base URL {OPENAI_BASE_URL} for model: {request.model}")
11321158
else:
1133-
logger.debug(f"Using OpenAI API key for model: {request.model}")
1159+
logger.debug(f"Using {'passthrough' if use_passthrough else 'OpenAI'} API key for model: {request.model}")
11341160
elif request.model.startswith("gemini/"):
11351161
if USE_VERTEX_AUTH:
11361162
litellm_request["vertex_project"] = VERTEX_PROJECT
11371163
litellm_request["vertex_location"] = VERTEX_LOCATION
11381164
litellm_request["custom_llm_provider"] = "vertex_ai"
11391165
logger.debug(f"Using Gemini ADC with project={VERTEX_PROJECT}, location={VERTEX_LOCATION} and model: {request.model}")
11401166
else:
1141-
litellm_request["api_key"] = GEMINI_API_KEY
1142-
logger.debug(f"Using Gemini API key for model: {request.model}")
1167+
# Use passthrough key if enabled, otherwise fall back to env var
1168+
litellm_request["api_key"] = incoming_api_key if use_passthrough else GEMINI_API_KEY
1169+
logger.debug(f"Using {'passthrough' if use_passthrough else 'Gemini'} API key for model: {request.model}")
11431170
else:
1144-
litellm_request["api_key"] = ANTHROPIC_API_KEY
1145-
logger.debug(f"Using Anthropic API key for model: {request.model}")
1171+
# Use passthrough key if enabled, otherwise fall back to env var
1172+
litellm_request["api_key"] = incoming_api_key if use_passthrough else ANTHROPIC_API_KEY
1173+
logger.debug(f"Using {'passthrough' if use_passthrough else 'Anthropic'} API key for model: {request.model}")
11461174

11471175
# For OpenAI models - modify request format to work with limitations
11481176
if "openai" in litellm_request["model"] and "messages" in litellm_request:

0 commit comments

Comments
 (0)