Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions py/noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ def _pinned_python_version():
"autoevals",
"braintrust_core",
"litellm",
"mistralai",
"openrouter",
"opentelemetry-api",
"opentelemetry-sdk",
Expand Down Expand Up @@ -105,6 +106,7 @@ def _pinned_python_version():
GOOGLE_ADK_VERSIONS = (LATEST, "1.14.1")
LANGCHAIN_VERSIONS = (LATEST, "0.3.28")
OPENROUTER_VERSIONS = (LATEST, "0.6.0")
MISTRAL_VERSIONS = (LATEST, "1.12.4")
# temporalio 1.19.0+ requires Python >= 3.10; skip Python 3.9 entirely
TEMPORAL_VERSIONS = (LATEST, "1.20.0", "1.19.0")
PYTEST_VERSIONS = (LATEST, "8.4.2")
Expand Down Expand Up @@ -266,6 +268,16 @@ def test_openrouter(session, version):
_run_tests(session, f"{INTEGRATION_DIR}/openrouter/test_openrouter.py")


@nox.session()
@nox.parametrize("version", MISTRAL_VERSIONS, ids=MISTRAL_VERSIONS)
def test_mistral(session, version):
"""Test the native Mistral SDK integration."""
_install_test_deps(session)
_install(session, "mistralai", version)
_run_tests(session, f"{INTEGRATION_DIR}/mistral/test_mistral.py")
_run_core_tests(session)


@nox.session()
@nox.parametrize("version", LITELLM_VERSIONS, ids=LITELLM_VERSIONS)
def test_litellm(session, version):
Expand Down
5 changes: 5 additions & 0 deletions py/src/braintrust/auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
GoogleGenAIIntegration,
LangChainIntegration,
LiteLLMIntegration,
MistralIntegration,
OpenRouterIntegration,
PydanticAIIntegration,
)
Expand Down Expand Up @@ -46,6 +47,7 @@ def auto_instrument(
pydantic_ai: bool = True,
google_genai: bool = True,
openrouter: bool = True,
mistral: bool = True,
agno: bool = True,
agentscope: bool = True,
claude_agent_sdk: bool = True,
Expand All @@ -69,6 +71,7 @@ def auto_instrument(
pydantic_ai: Enable Pydantic AI instrumentation (default: True)
google_genai: Enable Google GenAI instrumentation (default: True)
openrouter: Enable OpenRouter instrumentation (default: True)
mistral: Enable Mistral instrumentation (default: True)
agno: Enable Agno instrumentation (default: True)
agentscope: Enable AgentScope instrumentation (default: True)
claude_agent_sdk: Enable Claude Agent SDK instrumentation (default: True)
Expand Down Expand Up @@ -134,6 +137,8 @@ def auto_instrument(
results["google_genai"] = _instrument_integration(GoogleGenAIIntegration)
if openrouter:
results["openrouter"] = _instrument_integration(OpenRouterIntegration)
if mistral:
results["mistral"] = _instrument_integration(MistralIntegration)
if agno:
results["agno"] = _instrument_integration(AgnoIntegration)
if agentscope:
Expand Down
1 change: 1 addition & 0 deletions py/src/braintrust/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,7 @@ def setup_braintrust():
os.environ.setdefault("GOOGLE_API_KEY", os.getenv("GEMINI_API_KEY", "your_google_api_key_here"))
os.environ.setdefault("OPENAI_API_KEY", "sk-test-dummy-api-key-for-vcr-tests")
os.environ.setdefault("ANTHROPIC_API_KEY", "sk-ant-test-dummy-api-key-for-vcr-tests")
os.environ.setdefault("MISTRAL_API_KEY", "mistral-test-dummy-api-key-for-vcr-tests")


@pytest.fixture(autouse=True)
Expand Down
2 changes: 2 additions & 0 deletions py/src/braintrust/integrations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from .google_genai import GoogleGenAIIntegration
from .langchain import LangChainIntegration
from .litellm import LiteLLMIntegration
from .mistral import MistralIntegration
from .openrouter import OpenRouterIntegration
from .pydantic_ai import PydanticAIIntegration

Expand All @@ -21,6 +22,7 @@
"GoogleGenAIIntegration",
"LiteLLMIntegration",
"LangChainIntegration",
"MistralIntegration",
"OpenRouterIntegration",
"PydanticAIIntegration",
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
"""Test auto_instrument for Mistral."""

import os
from pathlib import Path

from braintrust.auto import auto_instrument
from braintrust.wrappers.test_utils import autoinstrument_test_context


try:
from mistralai.client import Mistral
except ImportError:
from mistralai import Mistral


results = auto_instrument()
assert results.get("mistral") == True

results2 = auto_instrument()
assert results2.get("mistral") == True

MISTRAL_CASSETTES_DIR = Path(__file__).resolve().parent.parent / "mistral" / "cassettes"

with autoinstrument_test_context("test_auto_mistral", cassettes_dir=MISTRAL_CASSETTES_DIR) as memory_logger:
client = Mistral(api_key=os.environ.get("MISTRAL_API_KEY"))
response = client.chat.complete(
model="mistral-small-latest",
messages=[{"role": "user", "content": "What is 2+2? Reply with just the number."}],
max_tokens=10,
)
assert "4" in str(response.choices[0].message.content)

spans = memory_logger.pop()
assert len(spans) == 1, f"Expected 1 span, got {len(spans)}"
span = spans[0]
assert span["metadata"]["provider"] == "mistral"
assert span["metadata"]["model"] == "mistral-small-latest"
assert "4" in str(span["output"])

print("SUCCESS")
10 changes: 10 additions & 0 deletions py/src/braintrust/integrations/mistral/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
"""Braintrust integration for the Mistral Python SDK."""

from .integration import MistralIntegration
from .tracing import wrap_mistral


__all__ = [
"MistralIntegration",
"wrap_mistral",
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
interactions:
- request:
body: '{"model":"mistral-small-latest","messages":[{"content":"What is 2+2? Reply
with just the number.","role":"user"}],"max_tokens":10,"stream":false}'
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate, zstd
Connection:
- keep-alive
Content-Length:
- '145'
Host:
- api.mistral.ai
content-type:
- application/json
user-agent:
- mistral-client-python/1.12.4
method: POST
uri: https://api.mistral.ai/v1/chat/completions
response:
body:
string: '{"id":"e3b5125b7fb14e58a881fd961272620b","created":1775090570,"model":"mistral-small-latest","usage":{"prompt_tokens":28,"total_tokens":30,"completion_tokens":2,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"4"}}]}'
headers:
CF-RAY:
- 9e5bcbc17e7fe930-YYZ
Connection:
- keep-alive
Content-Type:
- application/json
Date:
- Thu, 02 Apr 2026 00:42:50 GMT
Server:
- cloudflare
Strict-Transport-Security:
- max-age=15552000; includeSubDomains; preload
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
cf-cache-status:
- DYNAMIC
content-length:
- '343'
mistral-correlation-id:
- 019d4ba4-952f-7c2e-ac5e-894689343339
set-cookie:
- __cf_bm=yF37zbYIJO9EqaPOC1bDh7_3kQPvMC8QC32ASLfT2h4-1775090570.477608-1.0.1.1-LaKQJ5wtfgLy9SLNfOCHStgMj3L4js0owc.thrbfcT8NNZTTeNKCI_IajFYVPRxRTJzhBtd_9_xMuCxdZHyGVxufwQxkZysWkTrscYV4kINNvqQeWJWjinIngrscU1gW;
HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 01:12:50
GMT
- _cfuvid=9SAmeM9YOFyYq3TU2EPLgqecu.JXGOfVKHopWq8EGYA-1775090570.477608-1.0.1.1-zmkuYsDfIm4OWQayZOKDtpjmdMkjOEdGvJnDk3bSb6o;
HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai
x-envoy-upstream-service-time:
- '266'
x-kong-proxy-latency:
- '15'
x-kong-request-id:
- 019d4ba4-952f-7c2e-ac5e-894689343339
x-kong-upstream-latency:
- '267'
x-ratelimit-limit-req-minute:
- '60'
x-ratelimit-limit-tokens-minute:
- '375000'
x-ratelimit-remaining-req-minute:
- '54'
x-ratelimit-remaining-tokens-minute:
- '374849'
x-ratelimit-tokens-query-cost:
- '30'
status:
code: 200
message: OK
version: 1
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
interactions:
- request:
body: '{"model":"mistral-small-latest","messages":[{"content":"What is 2+2? Reply
with just the number.","role":"user"}],"max_tokens":10,"stream":false}'
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate, zstd
Connection:
- keep-alive
Content-Length:
- '145'
Host:
- api.mistral.ai
content-type:
- application/json
user-agent:
- mistral-client-python/1.12.4
method: POST
uri: https://api.mistral.ai/v1/chat/completions
response:
body:
string: '{"id":"cfca3ec9b4d9406aa3d4edd7c6dd3e02","created":1775090565,"model":"mistral-small-latest","usage":{"prompt_tokens":28,"total_tokens":30,"completion_tokens":2,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"4"}}]}'
headers:
CF-RAY:
- 9e5bcba42f4af337-YYZ
Connection:
- keep-alive
Content-Type:
- application/json
Date:
- Thu, 02 Apr 2026 00:42:46 GMT
Server:
- cloudflare
Strict-Transport-Security:
- max-age=15552000; includeSubDomains; preload
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
cf-cache-status:
- DYNAMIC
content-length:
- '343'
mistral-correlation-id:
- 019d4ba4-82de-7d54-b8db-36eadad0d6fb
set-cookie:
- __cf_bm=LZgwv7VkE_uahQNBcsoAHDympjTRdYshtpiktxKSZUI-1775090565.7901893-1.0.1.1-WlnWQpklMhQAJkPSitfyvrMNeCfzJKpmIzUjs2T5nF.szt.W4_zuzgtFvNmOVbft08at8DXTWjsy5N5xQp3U4Ld1_miC9TnDJdXzgv0dALELidawrG5vM_X_ZDZDqRKm;
HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 01:12:46
GMT
- _cfuvid=iORpbZB2m.YumIunOW4.E2_K7fN_HKJtzYXjVULh0KE-1775090565.7901893-1.0.1.1-W3KSrmaSr.tgkkvkXF_jmOsW6IGGBpEk0utFIwEVm20;
HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai
x-envoy-upstream-service-time:
- '187'
x-kong-proxy-latency:
- '9'
x-kong-request-id:
- 019d4ba4-82de-7d54-b8db-36eadad0d6fb
x-kong-upstream-latency:
- '188'
x-ratelimit-limit-req-minute:
- '60'
x-ratelimit-limit-tokens-minute:
- '375000'
x-ratelimit-remaining-req-minute:
- '55'
x-ratelimit-remaining-tokens-minute:
- '374879'
x-ratelimit-tokens-query-cost:
- '30'
status:
code: 200
message: OK
- request:
body: '{"model":"mistral-small-latest","messages":[{"content":"What is 2+2? Reply
with just the number.","role":"user"}],"max_tokens":10,"stream":false}'
headers:
Accept:
- application/json
Accept-Encoding:
- gzip, deflate, zstd
Connection:
- keep-alive
Content-Length:
- '145'
Host:
- api.mistral.ai
content-type:
- application/json
user-agent:
- mistral-client-python/1.12.4
method: POST
uri: https://api.mistral.ai/v1/chat/completions
response:
body:
string: '{"id":"fc4f405716c9495f9f69c71a70c0315a","created":1775101277,"model":"mistral-small-latest","usage":{"prompt_tokens":28,"total_tokens":30,"completion_tokens":2,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"4"}}]}'
headers:
CF-RAY:
- 9e5cd1284d73ec72-YYZ
Connection:
- keep-alive
Content-Type:
- application/json
Date:
- Thu, 02 Apr 2026 03:41:17 GMT
Server:
- cloudflare
Strict-Transport-Security:
- max-age=15552000; includeSubDomains; preload
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
cf-cache-status:
- DYNAMIC
content-length:
- '343'
mistral-correlation-id:
- 019d4c47-f568-70ba-82e9-1dd031ca2281
set-cookie:
- __cf_bm=pJa5CFWeZmlNSxBwIZzCulbNsq77ciOi_tKoiaEoRFg-1775101277.4821184-1.0.1.1-4bsLhPMRrcfApvStpsoTV.fkBBREwW1.NrLSds4Ht4DIkkbAO1CP0_8nM06ljygtPHX3cvBUKsF.GkSqejx.hfLnkbxx7_z1ouMEXFcgzrZh8hZjwGCT.whBO.vHN1a9;
HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 04:11:17
GMT
- _cfuvid=cHleL7rWUZfTZ4B_COwe08XH7.4AE12NcSwOkHQM5qA-1775101277.4821184-1.0.1.1-IAavRWhHK0RAaUXKL8lRC4IPxJt.pRQt9NmnEtYNagM;
HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai
x-envoy-upstream-service-time:
- '251'
x-kong-proxy-latency:
- '10'
x-kong-request-id:
- 019d4c47-f568-70ba-82e9-1dd031ca2281
x-kong-upstream-latency:
- '252'
x-ratelimit-limit-req-minute:
- '60'
x-ratelimit-limit-tokens-minute:
- '375000'
x-ratelimit-remaining-req-minute:
- '51'
x-ratelimit-remaining-tokens-minute:
- '374879'
x-ratelimit-tokens-query-cost:
- '30'
status:
code: 200
message: OK
version: 1
Loading
Loading