diff --git a/anton/llm/anthropic.py b/anton/llm/anthropic.py index 8aec58b..8ad0cc8 100644 --- a/anton/llm/anthropic.py +++ b/anton/llm/anthropic.py @@ -57,9 +57,12 @@ async def complete( raise ContextOverflowError(str(exc)) from exc raise except anthropic.APIStatusError as exc: - raise ConnectionError( - f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." - ) from exc + if exc.status_code == 429 and isinstance(exc.body, dict) and exc.body.get("detail"): + msg = f"Server returned 429 — {exc.body['detail']}" + msg += " Visit https://mdb.ai to upgrade or to top up your tokens." + else: + msg = f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." + raise ConnectionError(msg) from exc except anthropic.APIConnectionError as exc: raise ConnectionError( "Could not reach the LLM server — check your connection or try again in a moment." @@ -164,9 +167,12 @@ async def stream( raise ContextOverflowError(str(exc)) from exc raise except anthropic.APIStatusError as exc: - raise ConnectionError( - f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." - ) from exc + if exc.status_code == 429 and isinstance(exc.body, dict) and exc.body.get("detail"): + msg = f"Server returned 429 — {exc.body['detail']}" + msg += " Visit https://mdb.ai to upgrade or to top up your tokens." + else: + msg = f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." + raise ConnectionError(msg) from exc except anthropic.APIConnectionError as exc: raise ConnectionError( "Could not reach the LLM server — check your connection or try again in a moment." diff --git a/anton/llm/openai.py b/anton/llm/openai.py index 403bc55..eb8b390 100644 --- a/anton/llm/openai.py +++ b/anton/llm/openai.py @@ -212,9 +212,12 @@ async def complete( raise ContextOverflowError(str(exc)) from exc raise except openai.APIStatusError as exc: - raise ConnectionError( - f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." - ) from exc + if exc.status_code == 429 and isinstance(exc.body, dict) and exc.body.get("detail"): + msg = f"Server returned 429 — {exc.body['detail']}" + msg += " Visit https://mdb.ai to upgrade or to top up your tokens." + else: + msg = f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." + raise ConnectionError(msg) from exc except openai.APIConnectionError as exc: raise ConnectionError( "Could not reach the LLM server — check your connection or try again in a moment." @@ -336,9 +339,12 @@ async def stream( raise ContextOverflowError(str(exc)) from exc raise except openai.APIStatusError as exc: - raise ConnectionError( - f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." - ) from exc + if exc.status_code == 429 and isinstance(exc.body, dict) and exc.body.get("detail"): + msg = f"Server returned 429 — {exc.body['detail']}" + msg += " Visit https://mdb.ai to upgrade or top up your tokens." + else: + msg = f"Server returned {exc.status_code} — the LLM endpoint may be temporarily unavailable. Try again in a moment." + raise ConnectionError(msg) from exc except openai.APIConnectionError as exc: raise ConnectionError( "Could not reach the LLM server — check your connection or try again in a moment."