From 64765839470be8ec86d26dbf29d182501778b92b Mon Sep 17 00:00:00 2001 From: Konstantin Sivakov Date: Fri, 27 Mar 2026 21:18:41 +0100 Subject: [PATCH 1/2] Show error message when token limit is exceeded --- anton/llm/anthropic.py | 16 ++++++++++------ anton/llm/openai.py | 16 ++++++++++------ 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/anton/llm/anthropic.py b/anton/llm/anthropic.py index 8aec58bc..57257fc7 100644 --- a/anton/llm/anthropic.py +++ b/anton/llm/anthropic.py @@ -57,9 +57,11 @@ async def complete( raise ContextOverflowError(str(exc)) from exc raise except anthropic.APIStatusError as exc: - raise ConnectionError( - f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." - ) from exc + if exc.status_code == 429 and isinstance(exc.body, dict) and exc.body.get("detail"): + msg = f"Server returned 429 — {exc.body['detail']}" + else: + msg = f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." + raise ConnectionError(msg) from exc except anthropic.APIConnectionError as exc: raise ConnectionError( "Could not reach the LLM server — check your connection or try again in a moment." @@ -164,9 +166,11 @@ async def stream( raise ContextOverflowError(str(exc)) from exc raise except anthropic.APIStatusError as exc: - raise ConnectionError( - f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." - ) from exc + if exc.status_code == 429 and isinstance(exc.body, dict) and exc.body.get("detail"): + msg = f"Server returned 429 — {exc.body['detail']}" + else: + msg = f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." + raise ConnectionError(msg) from exc except anthropic.APIConnectionError as exc: raise ConnectionError( "Could not reach the LLM server — check your connection or try again in a moment." diff --git a/anton/llm/openai.py b/anton/llm/openai.py index 403bc55b..212c25eb 100644 --- a/anton/llm/openai.py +++ b/anton/llm/openai.py @@ -212,9 +212,11 @@ async def complete( raise ContextOverflowError(str(exc)) from exc raise except openai.APIStatusError as exc: - raise ConnectionError( - f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." - ) from exc + if exc.status_code == 429 and isinstance(exc.body, dict) and exc.body.get("detail"): + msg = f"Server returned 429 — {exc.body['detail']}" + else: + msg = f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." + raise ConnectionError(msg) from exc except openai.APIConnectionError as exc: raise ConnectionError( "Could not reach the LLM server — check your connection or try again in a moment." @@ -336,9 +338,11 @@ async def stream( raise ContextOverflowError(str(exc)) from exc raise except openai.APIStatusError as exc: - raise ConnectionError( - f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." - ) from exc + if exc.status_code == 429 and isinstance(exc.body, dict) and exc.body.get("detail"): + msg = f"Server returned 429 — {exc.body['detail']}" + else: + msg = f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." + raise ConnectionError(msg) from exc except openai.APIConnectionError as exc: raise ConnectionError( "Could not reach the LLM server — check your connection or try again in a moment." From cfc84562ed694cf6d9f12d91e9e748dc1319d5d1 Mon Sep 17 00:00:00 2001 From: Konstantin Sivakov Date: Mon, 30 Mar 2026 14:09:20 +0200 Subject: [PATCH 2/2] Change the error message --- anton/llm/anthropic.py | 2 ++ anton/llm/openai.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/anton/llm/anthropic.py b/anton/llm/anthropic.py index 57257fc7..8ad0cc89 100644 --- a/anton/llm/anthropic.py +++ b/anton/llm/anthropic.py @@ -59,6 +59,7 @@ async def complete( except anthropic.APIStatusError as exc: if exc.status_code == 429 and isinstance(exc.body, dict) and exc.body.get("detail"): msg = f"Server returned 429 — {exc.body['detail']}" + msg += " Visit https://mdb.ai to upgrade or to top up your tokens." else: msg = f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." raise ConnectionError(msg) from exc @@ -168,6 +169,7 @@ async def stream( except anthropic.APIStatusError as exc: if exc.status_code == 429 and isinstance(exc.body, dict) and exc.body.get("detail"): msg = f"Server returned 429 — {exc.body['detail']}" + msg += " Visit https://mdb.ai to upgrade or to top up your tokens." else: msg = f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." raise ConnectionError(msg) from exc diff --git a/anton/llm/openai.py b/anton/llm/openai.py index 212c25eb..eb8b390a 100644 --- a/anton/llm/openai.py +++ b/anton/llm/openai.py @@ -214,6 +214,7 @@ async def complete( except openai.APIStatusError as exc: if exc.status_code == 429 and isinstance(exc.body, dict) and exc.body.get("detail"): msg = f"Server returned 429 — {exc.body['detail']}" + msg += " Visit https://mdb.ai to upgrade or to top up your tokens." else: msg = f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." raise ConnectionError(msg) from exc @@ -340,6 +341,7 @@ async def stream( except openai.APIStatusError as exc: if exc.status_code == 429 and isinstance(exc.body, dict) and exc.body.get("detail"): msg = f"Server returned 429 — {exc.body['detail']}" + msg += " Visit https://mdb.ai to upgrade or top up your tokens." else: msg = f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." raise ConnectionError(msg) from exc