From c883a87c8ecadedee9bc54f14c7a71d8d3ecfd82 Mon Sep 17 00:00:00 2001
From: Anshul Basia <73226526+anshulbasia27@users.noreply.github.com>
Date: Tue, 17 Mar 2026 17:27:14 +0530
Subject: [PATCH 1/5] Update integrations/llms/ollama.mdx
---
integrations/llms/ollama.mdx | 21 ++++++++++++++++++++-
1 file changed, 20 insertions(+), 1 deletion(-)
diff --git a/integrations/llms/ollama.mdx b/integrations/llms/ollama.mdx
index f9117eaf..b7710d71 100644
--- a/integrations/llms/ollama.mdx
+++ b/integrations/llms/ollama.mdx
@@ -22,11 +22,13 @@ docker run -d -p 8787:8787 portkeyai/gateway:latest
Then, connect to your local Ollama instance:
+
+
```python Python
from portkey_ai import Portkey
portkey = Portkey(
- base_url="http://localhost:8787", # Your local Gateway
+ base_url="http://localhost:8787/v1", # Your local Gateway
provider="ollama",
custom_host="http://localhost:11434" # Your Ollama instance
)
@@ -37,6 +39,23 @@ response = portkey.chat.completions.create(
)
```
+```javascript Node.js
+import Portkey from 'portkey-ai';
+
+const portkey = new Portkey({
+ baseURL: 'http://localhost:8787/v1', // Your local Gateway
+ provider: 'ollama',
+ customHost: 'http://localhost:11434' // Your Ollama instance
+});
+
+const response = await portkey.chat.completions.create({
+ model: 'llama3',
+ messages: [{ role: 'user', content: 'Hello!' }]
+});
+```
+
+
+
From 9047d5d24418d35c737a1d126017ac9e7f6aaa62 Mon Sep 17 00:00:00 2001
From: Anshul Basia <73226526+anshulbasia27@users.noreply.github.com>
Date: Tue, 17 Mar 2026 17:41:44 +0530
Subject: [PATCH 2/5] docs: clarify that base_url requires /v1 for local
gateway setup
---
integrations/llms/ollama.mdx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/integrations/llms/ollama.mdx b/integrations/llms/ollama.mdx
index b7710d71..aefd912e 100644
--- a/integrations/llms/ollama.mdx
+++ b/integrations/llms/ollama.mdx
@@ -153,7 +153,7 @@ const portkey = new Portkey({
-**Important:** For Ollama integration, you only need to pass the base URL to `customHost` **without** the version identifier (such as `/v1`) - Portkey handles the rest!
+**Important:** The `custom_host` / `customHost` parameter (your Ollama URL) should be passed **without** `/v1` — Portkey handles the provider routing automatically. However, when using a local gateway, the `base_url` / `baseURL` parameter **must** include `/v1` (e.g. `http://localhost:8787/v1`).
---
From 83ca538eaba132c8675dc30c145e0151cdeb435d Mon Sep 17 00:00:00 2001
From: Akhil Madhu Menon
Date: Fri, 3 Apr 2026 14:21:41 +0530
Subject: [PATCH 3/5] Added API Key reset usage example
---
.../control-plane/api-keys/update-api-key.mdx | 12 ++++++++++++
1 file changed, 12 insertions(+)
diff --git a/api-reference/admin-api/control-plane/api-keys/update-api-key.mdx b/api-reference/admin-api/control-plane/api-keys/update-api-key.mdx
index bfbc1b8b..b8feb04a 100644
--- a/api-reference/admin-api/control-plane/api-keys/update-api-key.mdx
+++ b/api-reference/admin-api/control-plane/api-keys/update-api-key.mdx
@@ -2,3 +2,15 @@
title: Update API Key
openapi: put /api-keys/{id}
---
+
+### Reset API key usage
+
+Use `reset_usage` when you want to clear the API key's current usage without changing other fields.
+
+```json
+{
+ "reset_usage": true
+}
+```
+
+If the key is currently `exhausted`, this reset reactivates it to `active` (when applicable). After the update, confirm the key reflects reset behavior in response fields such as `status` and `last_reset_at`.
From 0d05e5666ce38a4fd331ec99d198cd1292eb486d Mon Sep 17 00:00:00 2001
From: Akhil Madhu Menon
Date: Fri, 3 Apr 2026 14:42:11 +0530
Subject: [PATCH 4/5] Reverting changes to ollama.mdx
---
integrations/llms/ollama.mdx | 25 +++----------------------
1 file changed, 3 insertions(+), 22 deletions(-)
diff --git a/integrations/llms/ollama.mdx b/integrations/llms/ollama.mdx
index aefd912e..ef1896e8 100644
--- a/integrations/llms/ollama.mdx
+++ b/integrations/llms/ollama.mdx
@@ -22,13 +22,11 @@ docker run -d -p 8787:8787 portkeyai/gateway:latest
Then, connect to your local Ollama instance:
-
-
```python Python
from portkey_ai import Portkey
portkey = Portkey(
- base_url="http://localhost:8787/v1", # Your local Gateway
+ base_url="http://localhost:8787", # Your local Gateway
provider="ollama",
custom_host="http://localhost:11434" # Your Ollama instance
)
@@ -39,23 +37,6 @@ response = portkey.chat.completions.create(
)
```
-```javascript Node.js
-import Portkey from 'portkey-ai';
-
-const portkey = new Portkey({
- baseURL: 'http://localhost:8787/v1', // Your local Gateway
- provider: 'ollama',
- customHost: 'http://localhost:11434' // Your Ollama instance
-});
-
-const response = await portkey.chat.completions.create({
- model: 'llama3',
- messages: [{ role: 'user', content: 'Hello!' }]
-});
-```
-
-
-
@@ -153,7 +134,7 @@ const portkey = new Portkey({
-**Important:** The `custom_host` / `customHost` parameter (your Ollama URL) should be passed **without** `/v1` — Portkey handles the provider routing automatically. However, when using a local gateway, the `base_url` / `baseURL` parameter **must** include `/v1` (e.g. `http://localhost:8787/v1`).
+**Important:** For Ollama integration, you only need to pass the base URL to `customHost` **without** the version identifier (such as `/v1`) - Portkey handles the rest!
---
@@ -194,4 +175,4 @@ For complete SDK documentation:
Complete Portkey SDK documentation
-
+
\ No newline at end of file
From f6eb0f3f81d225ae26c703229ded9a54ad1a86a1 Mon Sep 17 00:00:00 2001
From: Akhil Madhu Menon
Date: Fri, 3 Apr 2026 14:46:29 +0530
Subject: [PATCH 5/5] Another attempt to revert this file
---
integrations/llms/ollama.mdx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/integrations/llms/ollama.mdx b/integrations/llms/ollama.mdx
index ef1896e8..f9117eaf 100644
--- a/integrations/llms/ollama.mdx
+++ b/integrations/llms/ollama.mdx
@@ -175,4 +175,4 @@ For complete SDK documentation:
Complete Portkey SDK documentation
-
\ No newline at end of file
+