diff --git a/py/noxfile.py b/py/noxfile.py index c92395da..0b854483 100644 --- a/py/noxfile.py +++ b/py/noxfile.py @@ -71,6 +71,7 @@ def _pinned_python_version(): "autoevals", "braintrust_core", "litellm", + "mistralai", "openrouter", "opentelemetry-api", "opentelemetry-sdk", @@ -105,6 +106,7 @@ def _pinned_python_version(): GOOGLE_ADK_VERSIONS = (LATEST, "1.14.1") LANGCHAIN_VERSIONS = (LATEST, "0.3.28") OPENROUTER_VERSIONS = (LATEST, "0.6.0") +MISTRAL_VERSIONS = (LATEST, "1.12.4") # temporalio 1.19.0+ requires Python >= 3.10; skip Python 3.9 entirely TEMPORAL_VERSIONS = (LATEST, "1.20.0", "1.19.0") PYTEST_VERSIONS = (LATEST, "8.4.2") @@ -266,6 +268,16 @@ def test_openrouter(session, version): _run_tests(session, f"{INTEGRATION_DIR}/openrouter/test_openrouter.py") +@nox.session() +@nox.parametrize("version", MISTRAL_VERSIONS, ids=MISTRAL_VERSIONS) +def test_mistral(session, version): + """Test the native Mistral SDK integration.""" + _install_test_deps(session) + _install(session, "mistralai", version) + _run_tests(session, f"{INTEGRATION_DIR}/mistral/test_mistral.py") + _run_core_tests(session) + + @nox.session() @nox.parametrize("version", LITELLM_VERSIONS, ids=LITELLM_VERSIONS) def test_litellm(session, version): diff --git a/py/src/braintrust/auto.py b/py/src/braintrust/auto.py index dc44c7d2..7e22714c 100644 --- a/py/src/braintrust/auto.py +++ b/py/src/braintrust/auto.py @@ -17,6 +17,7 @@ GoogleGenAIIntegration, LangChainIntegration, LiteLLMIntegration, + MistralIntegration, OpenRouterIntegration, PydanticAIIntegration, ) @@ -46,6 +47,7 @@ def auto_instrument( pydantic_ai: bool = True, google_genai: bool = True, openrouter: bool = True, + mistral: bool = True, agno: bool = True, agentscope: bool = True, claude_agent_sdk: bool = True, @@ -69,6 +71,7 @@ def auto_instrument( pydantic_ai: Enable Pydantic AI instrumentation (default: True) google_genai: Enable Google GenAI instrumentation (default: True) openrouter: Enable OpenRouter instrumentation (default: True) + mistral: Enable Mistral instrumentation (default: True) agno: Enable Agno instrumentation (default: True) agentscope: Enable AgentScope instrumentation (default: True) claude_agent_sdk: Enable Claude Agent SDK instrumentation (default: True) @@ -134,6 +137,8 @@ def auto_instrument( results["google_genai"] = _instrument_integration(GoogleGenAIIntegration) if openrouter: results["openrouter"] = _instrument_integration(OpenRouterIntegration) + if mistral: + results["mistral"] = _instrument_integration(MistralIntegration) if agno: results["agno"] = _instrument_integration(AgnoIntegration) if agentscope: diff --git a/py/src/braintrust/conftest.py b/py/src/braintrust/conftest.py index 2345b227..ee94881c 100644 --- a/py/src/braintrust/conftest.py +++ b/py/src/braintrust/conftest.py @@ -152,6 +152,7 @@ def setup_braintrust(): os.environ.setdefault("GOOGLE_API_KEY", os.getenv("GEMINI_API_KEY", "your_google_api_key_here")) os.environ.setdefault("OPENAI_API_KEY", "sk-test-dummy-api-key-for-vcr-tests") os.environ.setdefault("ANTHROPIC_API_KEY", "sk-ant-test-dummy-api-key-for-vcr-tests") + os.environ.setdefault("MISTRAL_API_KEY", "mistral-test-dummy-api-key-for-vcr-tests") @pytest.fixture(autouse=True) diff --git a/py/src/braintrust/integrations/__init__.py b/py/src/braintrust/integrations/__init__.py index 0062ec77..ebc33a52 100644 --- a/py/src/braintrust/integrations/__init__.py +++ b/py/src/braintrust/integrations/__init__.py @@ -7,6 +7,7 @@ from .google_genai import GoogleGenAIIntegration from .langchain import LangChainIntegration from .litellm import LiteLLMIntegration +from .mistral import MistralIntegration from .openrouter import OpenRouterIntegration from .pydantic_ai import PydanticAIIntegration @@ -21,6 +22,7 @@ "GoogleGenAIIntegration", "LiteLLMIntegration", "LangChainIntegration", + "MistralIntegration", "OpenRouterIntegration", "PydanticAIIntegration", ] diff --git a/py/src/braintrust/integrations/auto_test_scripts/test_auto_mistral.py b/py/src/braintrust/integrations/auto_test_scripts/test_auto_mistral.py new file mode 100644 index 00000000..ba6d2b7a --- /dev/null +++ b/py/src/braintrust/integrations/auto_test_scripts/test_auto_mistral.py @@ -0,0 +1,40 @@ +"""Test auto_instrument for Mistral.""" + +import os +from pathlib import Path + +from braintrust.auto import auto_instrument +from braintrust.wrappers.test_utils import autoinstrument_test_context + + +try: + from mistralai.client import Mistral +except ImportError: + from mistralai import Mistral + + +results = auto_instrument() +assert results.get("mistral") == True + +results2 = auto_instrument() +assert results2.get("mistral") == True + +MISTRAL_CASSETTES_DIR = Path(__file__).resolve().parent.parent / "mistral" / "cassettes" + +with autoinstrument_test_context("test_auto_mistral", cassettes_dir=MISTRAL_CASSETTES_DIR) as memory_logger: + client = Mistral(api_key=os.environ.get("MISTRAL_API_KEY")) + response = client.chat.complete( + model="mistral-small-latest", + messages=[{"role": "user", "content": "What is 2+2? Reply with just the number."}], + max_tokens=10, + ) + assert "4" in str(response.choices[0].message.content) + + spans = memory_logger.pop() + assert len(spans) == 1, f"Expected 1 span, got {len(spans)}" + span = spans[0] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["model"] == "mistral-small-latest" + assert "4" in str(span["output"]) + +print("SUCCESS") diff --git a/py/src/braintrust/integrations/mistral/__init__.py b/py/src/braintrust/integrations/mistral/__init__.py new file mode 100644 index 00000000..e9995361 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/__init__.py @@ -0,0 +1,10 @@ +"""Braintrust integration for the Mistral Python SDK.""" + +from .integration import MistralIntegration +from .tracing import wrap_mistral + + +__all__ = [ + "MistralIntegration", + "wrap_mistral", +] diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_auto_mistral.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_auto_mistral.yaml new file mode 100644 index 00000000..31d665e2 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_auto_mistral.yaml @@ -0,0 +1,79 @@ +interactions: +- request: + body: '{"model":"mistral-small-latest","messages":[{"content":"What is 2+2? Reply + with just the number.","role":"user"}],"max_tokens":10,"stream":false}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '145' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/chat/completions + response: + body: + string: '{"id":"e3b5125b7fb14e58a881fd961272620b","created":1775090570,"model":"mistral-small-latest","usage":{"prompt_tokens":28,"total_tokens":30,"completion_tokens":2,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"4"}}]}' + headers: + CF-RAY: + - 9e5bcbc17e7fe930-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 00:42:50 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '343' + mistral-correlation-id: + - 019d4ba4-952f-7c2e-ac5e-894689343339 + set-cookie: + - __cf_bm=yF37zbYIJO9EqaPOC1bDh7_3kQPvMC8QC32ASLfT2h4-1775090570.477608-1.0.1.1-LaKQJ5wtfgLy9SLNfOCHStgMj3L4js0owc.thrbfcT8NNZTTeNKCI_IajFYVPRxRTJzhBtd_9_xMuCxdZHyGVxufwQxkZysWkTrscYV4kINNvqQeWJWjinIngrscU1gW; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 01:12:50 + GMT + - _cfuvid=9SAmeM9YOFyYq3TU2EPLgqecu.JXGOfVKHopWq8EGYA-1775090570.477608-1.0.1.1-zmkuYsDfIm4OWQayZOKDtpjmdMkjOEdGvJnDk3bSb6o; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '266' + x-kong-proxy-latency: + - '15' + x-kong-request-id: + - 019d4ba4-952f-7c2e-ac5e-894689343339 + x-kong-upstream-latency: + - '267' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '375000' + x-ratelimit-remaining-req-minute: + - '54' + x-ratelimit-remaining-tokens-minute: + - '374849' + x-ratelimit-tokens-query-cost: + - '30' + status: + code: 200 + message: OK +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_mistral_integration_setup_creates_spans.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_mistral_integration_setup_creates_spans.yaml new file mode 100644 index 00000000..844a4fb6 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_mistral_integration_setup_creates_spans.yaml @@ -0,0 +1,156 @@ +interactions: +- request: + body: '{"model":"mistral-small-latest","messages":[{"content":"What is 2+2? Reply + with just the number.","role":"user"}],"max_tokens":10,"stream":false}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '145' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/chat/completions + response: + body: + string: '{"id":"cfca3ec9b4d9406aa3d4edd7c6dd3e02","created":1775090565,"model":"mistral-small-latest","usage":{"prompt_tokens":28,"total_tokens":30,"completion_tokens":2,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"4"}}]}' + headers: + CF-RAY: + - 9e5bcba42f4af337-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 00:42:46 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '343' + mistral-correlation-id: + - 019d4ba4-82de-7d54-b8db-36eadad0d6fb + set-cookie: + - __cf_bm=LZgwv7VkE_uahQNBcsoAHDympjTRdYshtpiktxKSZUI-1775090565.7901893-1.0.1.1-WlnWQpklMhQAJkPSitfyvrMNeCfzJKpmIzUjs2T5nF.szt.W4_zuzgtFvNmOVbft08at8DXTWjsy5N5xQp3U4Ld1_miC9TnDJdXzgv0dALELidawrG5vM_X_ZDZDqRKm; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 01:12:46 + GMT + - _cfuvid=iORpbZB2m.YumIunOW4.E2_K7fN_HKJtzYXjVULh0KE-1775090565.7901893-1.0.1.1-W3KSrmaSr.tgkkvkXF_jmOsW6IGGBpEk0utFIwEVm20; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '187' + x-kong-proxy-latency: + - '9' + x-kong-request-id: + - 019d4ba4-82de-7d54-b8db-36eadad0d6fb + x-kong-upstream-latency: + - '188' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '375000' + x-ratelimit-remaining-req-minute: + - '55' + x-ratelimit-remaining-tokens-minute: + - '374879' + x-ratelimit-tokens-query-cost: + - '30' + status: + code: 200 + message: OK +- request: + body: '{"model":"mistral-small-latest","messages":[{"content":"What is 2+2? Reply + with just the number.","role":"user"}],"max_tokens":10,"stream":false}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '145' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/chat/completions + response: + body: + string: '{"id":"fc4f405716c9495f9f69c71a70c0315a","created":1775101277,"model":"mistral-small-latest","usage":{"prompt_tokens":28,"total_tokens":30,"completion_tokens":2,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"4"}}]}' + headers: + CF-RAY: + - 9e5cd1284d73ec72-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 03:41:17 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '343' + mistral-correlation-id: + - 019d4c47-f568-70ba-82e9-1dd031ca2281 + set-cookie: + - __cf_bm=pJa5CFWeZmlNSxBwIZzCulbNsq77ciOi_tKoiaEoRFg-1775101277.4821184-1.0.1.1-4bsLhPMRrcfApvStpsoTV.fkBBREwW1.NrLSds4Ht4DIkkbAO1CP0_8nM06ljygtPHX3cvBUKsF.GkSqejx.hfLnkbxx7_z1ouMEXFcgzrZh8hZjwGCT.whBO.vHN1a9; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 04:11:17 + GMT + - _cfuvid=cHleL7rWUZfTZ4B_COwe08XH7.4AE12NcSwOkHQM5qA-1775101277.4821184-1.0.1.1-IAavRWhHK0RAaUXKL8lRC4IPxJt.pRQt9NmnEtYNagM; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '251' + x-kong-proxy-latency: + - '10' + x-kong-request-id: + - 019d4c47-f568-70ba-82e9-1dd031ca2281 + x-kong-upstream-latency: + - '252' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '375000' + x-ratelimit-remaining-req-minute: + - '51' + x-ratelimit-remaining-tokens-minute: + - '374879' + x-ratelimit-tokens-query-cost: + - '30' + status: + code: 200 + message: OK +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_agents_complete_async.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_agents_complete_async.yaml new file mode 100644 index 00000000..8cb9cbd4 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_agents_complete_async.yaml @@ -0,0 +1,198 @@ +interactions: +- request: + body: '{"model":"mistral-small-latest","name":"braintrust-test-agent-1775144108259","instructions":"You + are concise. Keep responses under five words."}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '144' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/agents + response: + body: + string: '{"instructions":"You are concise. Keep responses under five words.","tools":[],"completion_args":{"stop":null,"presence_penalty":null,"frequency_penalty":null,"temperature":null,"top_p":null,"max_tokens":null,"random_seed":null,"prediction":null,"response_format":null,"tool_choice":"auto","reasoning_effort":null},"guardrails":[],"model":"mistral-small-latest","name":"braintrust-test-agent-1775144108259","description":null,"handoffs":null,"metadata":null,"object":"agent","id":"ag_019d4ed5818e714dadd1555a16210946","version":0,"versions":[],"created_at":"2026-04-02T15:35:08.451687Z","updated_at":"2026-04-02T15:35:08.451690Z","deployment_chat":false,"source":"api","version_message":null}' + headers: + CF-RAY: + - 9e60e6d53b04378a-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 15:35:08 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '692' + mistral-correlation-id: + - 019d4ed5-8182-7f70-9f0f-a39ce5e5491f + set-cookie: + - __cf_bm=4anqu9e0f8pMfgsF97TFRQ6XM3Vaz3WNCEQdjUV8_3I-1775144108.3561726-1.0.1.1-VhS2MiZu5NQy95v978uXGwcqqsFJOUIn__g_F7pHA4K0g4WM641hhiqFOSuS9uRVsSbIQ70PREohvuh6Xx4B07NvRgA.wSCY3nDWuvjRRYBU6UCY03Vzhp5.VZzFbqKz; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 16:05:08 + GMT + - _cfuvid=5h8LVK_bIXffkDPpkmnYyVVsQliYacp_TzVuNPCR.pY-1775144108.3561726-1.0.1.1-_NTSO75M8uc4UgkD9jxDS4qcJsGmDoCx6jxVGCG.9DI; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '43' + x-kong-proxy-latency: + - '8' + x-kong-request-id: + - 019d4ed5-8182-7f70-9f0f-a39ce5e5491f + x-kong-upstream-latency: + - '44' + status: + code: 200 + message: OK +- request: + body: '{"messages":[{"content":"What is 7+2? Reply with just the number.","role":"user"}],"agent_id":"ag_019d4ed5818e714dadd1555a16210946","max_tokens":10,"stream":false}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '163' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/agents/completions + response: + body: + string: '{"id":"1a859bccebcd4704bade03b4fe36014f","created":1775144108,"model":"mistral-small-latest","usage":{"prompt_tokens":40,"total_tokens":42,"completion_tokens":2,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"9"}}]}' + headers: + CF-RAY: + - 9e60e6d6ed19ccd9-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 15:35:09 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '343' + mistral-correlation-id: + - 019d4ed5-8298-7f48-9875-5b9e3da6e427 + set-cookie: + - __cf_bm=lkhWxqz3x6B5zmDHibuBYzRCMcpypkuHqeAnfz50Xy4-1775144108.6276329-1.0.1.1-J8ckmc31kob.U59lnpJBlYu1OqZZDPpA1lcpTA0DOxyIB8GjaD1FN1DpFhoCLiBswhEgoLbcpiCkfLOdptbcqE7JUtAKjds4cfrA5H25alxW_A.xl0oJ8UrHmnqsWUqJ; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 16:05:09 + GMT + - _cfuvid=T3D_Br6YTyVTBpLBUXx2ZK928UomXdNtEQZntV8vD00-1775144108.6276329-1.0.1.1-H5m.erxFgj9qfkeCIuN6gap9FD_cyuLVEClOMt_lO3w; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '317' + x-kong-proxy-latency: + - '10' + x-kong-request-id: + - 019d4ed5-8298-7f48-9875-5b9e3da6e427 + x-kong-upstream-latency: + - '318' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '375000' + x-ratelimit-remaining-req-minute: + - '57' + x-ratelimit-remaining-tokens-minute: + - '374873' + x-ratelimit-tokens-query-cost: + - '42' + status: + code: 200 + message: OK +- request: + body: '' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Cookie: + - __cf_bm=4anqu9e0f8pMfgsF97TFRQ6XM3Vaz3WNCEQdjUV8_3I-1775144108.3561726-1.0.1.1-VhS2MiZu5NQy95v978uXGwcqqsFJOUIn__g_F7pHA4K0g4WM641hhiqFOSuS9uRVsSbIQ70PREohvuh6Xx4B07NvRgA.wSCY3nDWuvjRRYBU6UCY03Vzhp5.VZzFbqKz; + _cfuvid=5h8LVK_bIXffkDPpkmnYyVVsQliYacp_TzVuNPCR.pY-1775144108.3561726-1.0.1.1-_NTSO75M8uc4UgkD9jxDS4qcJsGmDoCx6jxVGCG.9DI + Host: + - api.mistral.ai + user-agent: + - mistral-client-python/1.12.4 + method: DELETE + uri: https://api.mistral.ai/v1/agents/ag_019d4ed5818e714dadd1555a16210946 + response: + body: + string: '' + headers: + CF-RAY: + - 9e60e6dadc3b44b0-YYZ + Connection: + - keep-alive + Date: + - Thu, 02 Apr 2026 15:35:09 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + mistral-correlation-id: + - 019d4ed5-8509-761b-a41f-28f7f60a37f8 + x-envoy-upstream-service-time: + - '51' + x-kong-proxy-latency: + - '17' + x-kong-request-id: + - 019d4ed5-8509-761b-a41f-28f7f60a37f8 + x-kong-upstream-latency: + - '51' + status: + code: 204 + message: No Content +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_agents_complete_sync.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_agents_complete_sync.yaml new file mode 100644 index 00000000..a9cf24d0 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_agents_complete_sync.yaml @@ -0,0 +1,195 @@ +interactions: +- request: + body: '{"model":"mistral-small-latest","name":"braintrust-test-agent-1775144105399","instructions":"You + are concise. Keep responses under five words."}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '144' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/agents + response: + body: + string: '{"instructions":"You are concise. Keep responses under five words.","tools":[],"completion_args":{"stop":null,"presence_penalty":null,"frequency_penalty":null,"temperature":null,"top_p":null,"max_tokens":null,"random_seed":null,"prediction":null,"response_format":null,"tool_choice":"auto","reasoning_effort":null},"guardrails":[],"model":"mistral-small-latest","name":"braintrust-test-agent-1775144105399","description":null,"handoffs":null,"metadata":null,"object":"agent","id":"ag_019d4ed576df73f2a4e8c69e084a980a","version":0,"versions":[],"created_at":"2026-04-02T15:35:05.738984Z","updated_at":"2026-04-02T15:35:05.738986Z","deployment_chat":false,"source":"api","version_message":null}' + headers: + CF-RAY: + - 9e60e6c41d5b92c6-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 15:35:05 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '692' + mistral-correlation-id: + - 019d4ed5-76ca-7447-9008-7615a7f0a62f + set-cookie: + - __cf_bm=CWegD5oTQWUxCc2e7alHuLDW35qKZoHGqWTQLwuNta4-1775144105.6165667-1.0.1.1-S8HpXz2GlW2cWEdK0LD5lZFZm2ifbfpQu62XnBGnwy61ieutgodGpLebE1zJaH6shkcJMI5kM5lOxg7l81O88WhilV2siG.ri2oROIsBiL9XfiqilWK0JymRgiC.PXCc; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 16:05:05 + GMT + - _cfuvid=ky5GFqrLXM0RWndDenTiyDH.JZJn.qRG.8.cHjYY_Tc-1775144105.6165667-1.0.1.1-_JfprR1cHnIyJ_aA3_46TkSDCaJb1rjOcgXxLrwLRWM; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '66' + x-kong-proxy-latency: + - '14' + x-kong-request-id: + - 019d4ed5-76ca-7447-9008-7615a7f0a62f + x-kong-upstream-latency: + - '67' + status: + code: 200 + message: OK +- request: + body: '{"messages":[{"content":"What is 8+1? Reply with just the number.","role":"user"}],"agent_id":"ag_019d4ed576df73f2a4e8c69e084a980a","max_tokens":10,"stream":false}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '163' + Cookie: + - __cf_bm=CWegD5oTQWUxCc2e7alHuLDW35qKZoHGqWTQLwuNta4-1775144105.6165667-1.0.1.1-S8HpXz2GlW2cWEdK0LD5lZFZm2ifbfpQu62XnBGnwy61ieutgodGpLebE1zJaH6shkcJMI5kM5lOxg7l81O88WhilV2siG.ri2oROIsBiL9XfiqilWK0JymRgiC.PXCc; + _cfuvid=ky5GFqrLXM0RWndDenTiyDH.JZJn.qRG.8.cHjYY_Tc-1775144105.6165667-1.0.1.1-_JfprR1cHnIyJ_aA3_46TkSDCaJb1rjOcgXxLrwLRWM + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/agents/completions + response: + body: + string: '{"id":"cd3cdbddd09d4e91b0d39df88dc8726c","created":1775144106,"model":"mistral-small-latest","usage":{"prompt_tokens":40,"total_tokens":42,"completion_tokens":2,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"9"}}]}' + headers: + CF-RAY: + - 9e60e6c5fcad94e3-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 15:35:06 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '343' + mistral-correlation-id: + - 019d4ed5-7801-7da1-9242-f35dafe1b705 + x-envoy-upstream-service-time: + - '391' + x-kong-proxy-latency: + - '9' + x-kong-request-id: + - 019d4ed5-7801-7da1-9242-f35dafe1b705 + x-kong-upstream-latency: + - '392' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '375000' + x-ratelimit-remaining-req-minute: + - '59' + x-ratelimit-remaining-tokens-minute: + - '374958' + x-ratelimit-tokens-query-cost: + - '42' + status: + code: 200 + message: OK +- request: + body: '' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Cookie: + - __cf_bm=CWegD5oTQWUxCc2e7alHuLDW35qKZoHGqWTQLwuNta4-1775144105.6165667-1.0.1.1-S8HpXz2GlW2cWEdK0LD5lZFZm2ifbfpQu62XnBGnwy61ieutgodGpLebE1zJaH6shkcJMI5kM5lOxg7l81O88WhilV2siG.ri2oROIsBiL9XfiqilWK0JymRgiC.PXCc; + _cfuvid=ky5GFqrLXM0RWndDenTiyDH.JZJn.qRG.8.cHjYY_Tc-1775144105.6165667-1.0.1.1-_JfprR1cHnIyJ_aA3_46TkSDCaJb1rjOcgXxLrwLRWM + Host: + - api.mistral.ai + user-agent: + - mistral-client-python/1.12.4 + method: DELETE + uri: https://api.mistral.ai/v1/agents/ag_019d4ed576df73f2a4e8c69e084a980a + response: + body: + string: '' + headers: + CF-RAY: + - 9e60e6cacf9dab8e-YYZ + Connection: + - keep-alive + Date: + - Thu, 02 Apr 2026 15:35:06 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + mistral-correlation-id: + - 019d4ed5-7b02-75f4-97fc-b044784e4933 + x-envoy-upstream-service-time: + - '45' + x-kong-proxy-latency: + - '13' + x-kong-request-id: + - 019d4ed5-7b02-75f4-97fc-b044784e4933 + x-kong-upstream-latency: + - '46' + status: + code: 204 + message: No Content +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_agents_stream_async.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_agents_stream_async.yaml new file mode 100644 index 00000000..65757676 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_agents_stream_async.yaml @@ -0,0 +1,198 @@ +interactions: +- request: + body: '{"model":"mistral-small-latest","name":"braintrust-test-agent-1775144109478","instructions":"You + are concise. Keep responses under five words."}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '144' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/agents + response: + body: + string: '{"instructions":"You are concise. Keep responses under five words.","tools":[],"completion_args":{"stop":null,"presence_penalty":null,"frequency_penalty":null,"temperature":null,"top_p":null,"max_tokens":null,"random_seed":null,"prediction":null,"response_format":null,"tool_choice":"auto","reasoning_effort":null},"guardrails":[],"model":"mistral-small-latest","name":"braintrust-test-agent-1775144109478","description":null,"handoffs":null,"metadata":null,"object":"agent","id":"ag_019d4ed5865d73448ec080e89343eae0","version":0,"versions":[],"created_at":"2026-04-02T15:35:09.690238Z","updated_at":"2026-04-02T15:35:09.690241Z","deployment_chat":false,"source":"api","version_message":null}' + headers: + CF-RAY: + - 9e60e6dcef53860b-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 15:35:09 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '692' + mistral-correlation-id: + - 019d4ed5-864e-73dd-a0d4-fc98d55eab6e + set-cookie: + - __cf_bm=DU9InpdCjBJhbsTq15i51tza30QtpO4NxVCiR5ZDONI-1775144109.5821874-1.0.1.1-mpvwOLB9yzKYi1idfPXP8tkT_FGDD7SNlLGhb2QqTqlz1GKWjo7vB5vc4U6rThVfzqvBToJxXUJ7T8PumXDAqDCKO7hWIDnp.laNB_nQn0BIJbuPSNW.aCwPKxlYXcoL; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 16:05:09 + GMT + - _cfuvid=0cCCUKDDZMr6fet1gK1gI1PsZhaP1flvZEyt_17GoNU-1775144109.5821874-1.0.1.1-KFxUaWRdiHtZH0rNyUsHSPbiOvG3MvvoU1nqIxVHW2g; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '52' + x-kong-proxy-latency: + - '11' + x-kong-request-id: + - 019d4ed5-864e-73dd-a0d4-fc98d55eab6e + x-kong-upstream-latency: + - '53' + status: + code: 200 + message: OK +- request: + body: '{"messages":[{"content":"What is 4+8? Reply with just the number.","role":"user"}],"agent_id":"ag_019d4ed5865d73448ec080e89343eae0","max_tokens":10,"stream":true}' + headers: + Accept: + - text/event-stream + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '162' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/agents/completions + response: + body: + string: 'data: {"id":"ed08e6612e4e4f89bf7b5de434fb96d8","object":"chat.completion.chunk","created":1775144110,"model":"mistral-small-latest","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]} + + + data: {"id":"ed08e6612e4e4f89bf7b5de434fb96d8","object":"chat.completion.chunk","created":1775144110,"model":"mistral-small-latest","choices":[{"index":0,"delta":{"content":"1"},"finish_reason":null}],"p":"abcdefghijklmnopqrst"} + + + data: {"id":"ed08e6612e4e4f89bf7b5de434fb96d8","object":"chat.completion.chunk","created":1775144110,"model":"mistral-small-latest","choices":[{"index":0,"delta":{"content":"2"},"finish_reason":"stop"}],"usage":{"prompt_tokens":40,"total_tokens":43,"completion_tokens":3,"prompt_tokens_details":{"cached_tokens":0}},"p":"abcdefghijklmnopqrstuvwxyz012345"} + + + data: [DONE] + + + ' + headers: + CF-RAY: + - 9e60e6debf14ac30-YYZ + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Thu, 02 Apr 2026 15:35:10 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + mistral-correlation-id: + - 019d4ed5-8778-78df-a966-c3ce0120a7fd + set-cookie: + - __cf_bm=KDop7nyegtYSeOFbXzikJCgQrpdO6yTls8JnuHGD40w-1775144109.877293-1.0.1.1-3MSiabvKpFB4CfN0VhI91bwUbiXowXaDoB9fejWzrhhV1D0qn6PpW1ZMxDYkNdNZxZMzBmUmr2p96iEqUrJrxMzpFcCvmZkedF2nZvO.F0Pq72su71.Ust4.V4t082M2; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 16:05:10 + GMT + - _cfuvid=BvFOySNAsd0v.J9WiNUil46WddRbkW6f0DF6zvHPkPY-1775144109.877293-1.0.1.1-d2Lv4KgDzt9v.3vlT3NR0HqwkYjWxVTbu17.ZFTukSQ; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '292' + x-kong-proxy-latency: + - '15' + x-kong-request-id: + - 019d4ed5-8778-78df-a966-c3ce0120a7fd + x-kong-upstream-latency: + - '293' + status: + code: 200 + message: OK +- request: + body: '' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Cookie: + - __cf_bm=DU9InpdCjBJhbsTq15i51tza30QtpO4NxVCiR5ZDONI-1775144109.5821874-1.0.1.1-mpvwOLB9yzKYi1idfPXP8tkT_FGDD7SNlLGhb2QqTqlz1GKWjo7vB5vc4U6rThVfzqvBToJxXUJ7T8PumXDAqDCKO7hWIDnp.laNB_nQn0BIJbuPSNW.aCwPKxlYXcoL; + _cfuvid=0cCCUKDDZMr6fet1gK1gI1PsZhaP1flvZEyt_17GoNU-1775144109.5821874-1.0.1.1-KFxUaWRdiHtZH0rNyUsHSPbiOvG3MvvoU1nqIxVHW2g + Host: + - api.mistral.ai + user-agent: + - mistral-client-python/1.12.4 + method: DELETE + uri: https://api.mistral.ai/v1/agents/ag_019d4ed5865d73448ec080e89343eae0 + response: + body: + string: '' + headers: + CF-RAY: + - 9e60e6e22f1036c3-YYZ + Connection: + - keep-alive + Date: + - Thu, 02 Apr 2026 15:35:10 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + mistral-correlation-id: + - 019d4ed5-899d-7165-b987-128682174c48 + x-envoy-upstream-service-time: + - '52' + x-kong-proxy-latency: + - '9' + x-kong-request-id: + - 019d4ed5-899d-7165-b987-128682174c48 + x-kong-upstream-latency: + - '52' + status: + code: 204 + message: No Content +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_agents_stream_sync.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_agents_stream_sync.yaml new file mode 100644 index 00000000..04d8263c --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_agents_stream_sync.yaml @@ -0,0 +1,195 @@ +interactions: +- request: + body: '{"model":"mistral-small-latest","name":"braintrust-test-agent-1775144106966","instructions":"You + are concise. Keep responses under five words."}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '144' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/agents + response: + body: + string: '{"instructions":"You are concise. Keep responses under five words.","tools":[],"completion_args":{"stop":null,"presence_penalty":null,"frequency_penalty":null,"temperature":null,"top_p":null,"max_tokens":null,"random_seed":null,"prediction":null,"response_format":null,"tool_choice":"auto","reasoning_effort":null},"guardrails":[],"model":"mistral-small-latest","name":"braintrust-test-agent-1775144106966","description":null,"handoffs":null,"metadata":null,"object":"agent","id":"ag_019d4ed57cde760d906f3f8a44e2c966","version":0,"versions":[],"created_at":"2026-04-02T15:35:07.252042Z","updated_at":"2026-04-02T15:35:07.252045Z","deployment_chat":false,"source":"api","version_message":null}' + headers: + CF-RAY: + - 9e60e6cdb8dca06e-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 15:35:07 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '692' + mistral-correlation-id: + - 019d4ed5-7ccd-7121-b6ec-651b7023b3fe + set-cookie: + - __cf_bm=Jiq0wI8VEdM0HAgdhmP_lRDHGE1tb7ipoIjLHAmxqI8-1775144107.1543226-1.0.1.1-KB7VUF.XHsIp8hYPDAYQBRaR6N2znFlXybkJdqgxCdkpMPICreFObSLkdclxKa4W7RWPxPPLTO4THV7SAi_ErnaSF0KPkmlbF8MrPRm7c85smmncgHjWJ.3bnRBcCQMR; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 16:05:07 + GMT + - _cfuvid=MpsQrtuJaSRoc.W47eLnYJhviLNw0KvwqmNJQSddkBQ-1775144107.1543226-1.0.1.1-S8TbTQJSZ_xGdbtZIK8OV_C.o3On8vdFrCOkybNdIlg; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '46' + x-kong-proxy-latency: + - '12' + x-kong-request-id: + - 019d4ed5-7ccd-7121-b6ec-651b7023b3fe + x-kong-upstream-latency: + - '47' + status: + code: 200 + message: OK +- request: + body: '{"messages":[{"content":"What is 6+5? Reply with just the number.","role":"user"}],"agent_id":"ag_019d4ed57cde760d906f3f8a44e2c966","max_tokens":10,"stream":true}' + headers: + Accept: + - text/event-stream + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '162' + Cookie: + - __cf_bm=Jiq0wI8VEdM0HAgdhmP_lRDHGE1tb7ipoIjLHAmxqI8-1775144107.1543226-1.0.1.1-KB7VUF.XHsIp8hYPDAYQBRaR6N2znFlXybkJdqgxCdkpMPICreFObSLkdclxKa4W7RWPxPPLTO4THV7SAi_ErnaSF0KPkmlbF8MrPRm7c85smmncgHjWJ.3bnRBcCQMR; + _cfuvid=MpsQrtuJaSRoc.W47eLnYJhviLNw0KvwqmNJQSddkBQ-1775144107.1543226-1.0.1.1-S8TbTQJSZ_xGdbtZIK8OV_C.o3On8vdFrCOkybNdIlg + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/agents/completions + response: + body: + string: 'data: {"id":"5d10bb5311744c5f91f44c03044f0f49","object":"chat.completion.chunk","created":1775144107,"model":"mistral-small-latest","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]} + + + data: {"id":"5d10bb5311744c5f91f44c03044f0f49","object":"chat.completion.chunk","created":1775144107,"model":"mistral-small-latest","choices":[{"index":0,"delta":{"content":"1"},"finish_reason":null}],"p":"abcdefghijklmnopqrstuvwxyz"} + + + data: {"id":"5d10bb5311744c5f91f44c03044f0f49","object":"chat.completion.chunk","created":1775144107,"model":"mistral-small-latest","choices":[{"index":0,"delta":{"content":"1"},"finish_reason":"stop"}],"usage":{"prompt_tokens":40,"total_tokens":43,"completion_tokens":3,"prompt_tokens_details":{"cached_tokens":0}},"p":"abcdefghij"} + + + data: [DONE] + + + ' + headers: + CF-RAY: + - 9e60e6cf6ddeb1a2-YYZ + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Thu, 02 Apr 2026 15:35:07 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + mistral-correlation-id: + - 019d4ed5-7de3-7699-a1e0-02006cc04fb5 + x-envoy-upstream-service-time: + - '380' + x-kong-proxy-latency: + - '7' + x-kong-request-id: + - 019d4ed5-7de3-7699-a1e0-02006cc04fb5 + x-kong-upstream-latency: + - '381' + status: + code: 200 + message: OK +- request: + body: '' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Cookie: + - __cf_bm=Jiq0wI8VEdM0HAgdhmP_lRDHGE1tb7ipoIjLHAmxqI8-1775144107.1543226-1.0.1.1-KB7VUF.XHsIp8hYPDAYQBRaR6N2znFlXybkJdqgxCdkpMPICreFObSLkdclxKa4W7RWPxPPLTO4THV7SAi_ErnaSF0KPkmlbF8MrPRm7c85smmncgHjWJ.3bnRBcCQMR; + _cfuvid=MpsQrtuJaSRoc.W47eLnYJhviLNw0KvwqmNJQSddkBQ-1775144107.1543226-1.0.1.1-S8TbTQJSZ_xGdbtZIK8OV_C.o3On8vdFrCOkybNdIlg + Host: + - api.mistral.ai + user-agent: + - mistral-client-python/1.12.4 + method: DELETE + uri: https://api.mistral.ai/v1/agents/ag_019d4ed57cde760d906f3f8a44e2c966 + response: + body: + string: '' + headers: + CF-RAY: + - 9e60e6d35806ac09-YYZ + Connection: + - keep-alive + Date: + - Thu, 02 Apr 2026 15:35:08 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + mistral-correlation-id: + - 019d4ed5-8051-7edd-97eb-f73a808a48cc + x-envoy-upstream-service-time: + - '46' + x-kong-proxy-latency: + - '8' + x-kong-request-id: + - 019d4ed5-8051-7edd-97eb-f73a808a48cc + x-kong-upstream-latency: + - '47' + status: + code: 204 + message: No Content +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_chat_complete_async.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_chat_complete_async.yaml new file mode 100644 index 00000000..6fcf1977 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_chat_complete_async.yaml @@ -0,0 +1,156 @@ +interactions: +- request: + body: '{"model":"mistral-small-latest","messages":[{"content":"What is 3+3? Reply + with just the number.","role":"user"}],"max_tokens":10,"stream":false}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '145' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/chat/completions + response: + body: + string: '{"id":"f1634b1a7d5e49efa531b49428a08aa0","created":1775090564,"model":"mistral-small-latest","usage":{"prompt_tokens":28,"total_tokens":30,"completion_tokens":2,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"6"}}]}' + headers: + CF-RAY: + - 9e5bcb9e5f4390a8-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 00:42:45 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '343' + mistral-correlation-id: + - 019d4ba4-7f2e-7d1a-a681-6249fea8e854 + set-cookie: + - __cf_bm=u5i53J4wyDHQLzdtjJ0g7DeYXr1YD_Bob1laFr42VZk-1775090564.8536549-1.0.1.1-J_chFRQxPbc7nAYFlhi_sihK5.Siqsd_J2FuwmvcJvB2no9G8l3i0rLBp.eo2rTfhAZiVt9gZSutUboeIVBfnE_WhqoiVOCbFAcJqk9ZnUB5ARw.t2hpf0OIRlAlY8x9; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 01:12:45 + GMT + - _cfuvid=7oQo152pogNyGmM43PTtdeX8Q8jY3tdlMTqLeNJIkUQ-1775090564.8536549-1.0.1.1-Se5L_uPurPT49o02zHEm5MwZ_r0vMYOt2hZsmhgtLkI; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '174' + x-kong-proxy-latency: + - '9' + x-kong-request-id: + - 019d4ba4-7f2e-7d1a-a681-6249fea8e854 + x-kong-upstream-latency: + - '175' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '375000' + x-ratelimit-remaining-req-minute: + - '57' + x-ratelimit-remaining-tokens-minute: + - '374909' + x-ratelimit-tokens-query-cost: + - '30' + status: + code: 200 + message: OK +- request: + body: '{"model":"mistral-small-latest","messages":[{"content":"What is 3+3? Reply + with just the number.","role":"user"}],"max_tokens":10,"stream":false}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '145' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/chat/completions + response: + body: + string: '{"id":"66064af0c08e4977be098e9536a7db05","created":1775101273,"model":"mistral-small-latest","usage":{"prompt_tokens":28,"total_tokens":30,"completion_tokens":2,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"6"}}]}' + headers: + CF-RAY: + - 9e5cd10b9c0852cf-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 03:41:13 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '343' + mistral-correlation-id: + - 019d4c47-e376-7b5e-b288-ead2e6133315 + set-cookie: + - __cf_bm=x5CrBTy7lJMEuIJLYbI1fLPa6Wkyd_.23rf8iZTxu4A-1775101272.892413-1.0.1.1-g_cPrZ4MBlVWiQ17EMZuELSK79F98.1MK1L6vi.0pK9Zb338RdlLNKbiDRPq6E__YrKebRQcKjDDTHZXBM2l9OdJCjpIiB2A.vJZUfJEyHMaGakR4ug3GOGa2xgkh8La; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 04:11:13 + GMT + - _cfuvid=8S3pLh59YcylKlv0L1fYCzWuBZ2TpQjjtE2ofk0tljI-1775101272.892413-1.0.1.1-t0bnWK5xPPNx2.xtwlsTdj_tVAMszWXGK2xa7tVyCdI; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '469' + x-kong-proxy-latency: + - '8' + x-kong-request-id: + - 019d4c47-e376-7b5e-b288-ead2e6133315 + x-kong-upstream-latency: + - '470' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '375000' + x-ratelimit-remaining-req-minute: + - '57' + x-ratelimit-remaining-tokens-minute: + - '374909' + x-ratelimit-tokens-query-cost: + - '30' + status: + code: 200 + message: OK +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_chat_complete_sync.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_chat_complete_sync.yaml new file mode 100644 index 00000000..dfcd3826 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_chat_complete_sync.yaml @@ -0,0 +1,156 @@ +interactions: +- request: + body: '{"model":"mistral-small-latest","messages":[{"content":"What is 2+2? Reply + with just the number.","role":"user"}],"max_tokens":10,"stream":false}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '145' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/chat/completions + response: + body: + string: '{"id":"83874cf3953f4838a208ea3893abc993","created":1775090564,"model":"mistral-small-latest","usage":{"prompt_tokens":28,"total_tokens":30,"completion_tokens":2,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"4"}}]}' + headers: + CF-RAY: + - 9e5bcb97684f39fa-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 00:42:44 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '343' + mistral-correlation-id: + - 019d4ba4-7bd1-7caa-895e-daf93fb4918b + set-cookie: + - __cf_bm=s.vJ2j_EWp2DnRjyRb7_KMBnVMc6IxEDz9H1Vfe2mK4-1775090563.7506278-1.0.1.1-UCrW2xSPlBGoDa5XxP_uyggSmRHVMpaJlFjLMXKSoG4aZBgT16V6S5y.EF42zFU11yY6V3cuBCZM7zc_xSHVV4GHtyU4q1fQ0wN3zxSWb63bBJ4RlmwPDt44Q2PlATFB; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 01:12:44 + GMT + - _cfuvid=QCtHKzeZBxCauqcHIzazcfpvKtSJyR.TVy2BWGW9Qr0-1775090563.7506278-1.0.1.1-w26JcGsuM78VfoHl_z.OKcI7zvigmNfJ.XcPEoF1gos; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '181' + x-kong-proxy-latency: + - '12' + x-kong-request-id: + - 019d4ba4-7bd1-7caa-895e-daf93fb4918b + x-kong-upstream-latency: + - '182' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '375000' + x-ratelimit-remaining-req-minute: + - '59' + x-ratelimit-remaining-tokens-minute: + - '374970' + x-ratelimit-tokens-query-cost: + - '30' + status: + code: 200 + message: OK +- request: + body: '{"model":"mistral-small-latest","messages":[{"content":"What is 2+2? Reply + with just the number.","role":"user"}],"max_tokens":10,"stream":false}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '145' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/chat/completions + response: + body: + string: '{"id":"4b090b7cd0764c2d982b2c1edfccaeaa","created":1775101271,"model":"mistral-small-latest","usage":{"prompt_tokens":28,"total_tokens":30,"completion_tokens":2,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"4"}}]}' + headers: + CF-RAY: + - 9e5cd1043884f337-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 03:41:12 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '343' + mistral-correlation-id: + - 019d4c47-dee1-7862-8f9a-27740074f57f + set-cookie: + - __cf_bm=60F_Nc8LyzMHaMZYI9BNPeOERQlLFNXmkxyysZE2MX4-1775101271.712749-1.0.1.1-GEPl7mZMSQBtsCVbR0m5IqZyOBm9_jr_3vaX_KEwo7bEBZKI.Jb5IIbfdd2QUh6JKarWWqueaLiW_Ng6CXiNLWvb7BAAAIgww3kZshB3kBlbymKzLW9TpEDWLMjkFJFc; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 04:11:12 + GMT + - _cfuvid=fdcvA4e0RXpG_MFBDztniMSgGrZV4NdoOafC7WKflxs-1775101271.712749-1.0.1.1-atYmHTTBO.ZluUI8ND5VhUpoFUFuEiexsgi7Sg9NvP4; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '380' + x-kong-proxy-latency: + - '11' + x-kong-request-id: + - 019d4c47-dee1-7862-8f9a-27740074f57f + x-kong-upstream-latency: + - '380' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '375000' + x-ratelimit-remaining-req-minute: + - '59' + x-ratelimit-remaining-tokens-minute: + - '374970' + x-ratelimit-tokens-query-cost: + - '30' + status: + code: 200 + message: OK +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_chat_stream_sync.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_chat_stream_sync.yaml new file mode 100644 index 00000000..e871ac66 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_chat_stream_sync.yaml @@ -0,0 +1,156 @@ +interactions: +- request: + body: '{"model":"mistral-small-latest","messages":[{"content":"What is 5+5? Reply + with just the number.","role":"user"}],"max_tokens":10,"stream":true}' + headers: + Accept: + - text/event-stream + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '144' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/chat/completions + response: + body: + string: 'data: {"id":"d74778c10ea74b679285a60d6b3bcedb","object":"chat.completion.chunk","created":1775090564,"model":"mistral-small-latest","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]} + + + data: {"id":"d74778c10ea74b679285a60d6b3bcedb","object":"chat.completion.chunk","created":1775090564,"model":"mistral-small-latest","choices":[{"index":0,"delta":{"content":"1"},"finish_reason":null}],"p":"abcdefghijklmnopqrstuvw"} + + + data: {"id":"d74778c10ea74b679285a60d6b3bcedb","object":"chat.completion.chunk","created":1775090564,"model":"mistral-small-latest","choices":[{"index":0,"delta":{"content":"0"},"finish_reason":"stop"}],"usage":{"prompt_tokens":28,"total_tokens":31,"completion_tokens":3,"prompt_tokens_details":{"cached_tokens":0}},"p":"abcdefghijklmnopqrstuvwxyz"} + + + data: [DONE] + + + ' + headers: + CF-RAY: + - 9e5bcb9baf8697b9-YYZ + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Thu, 02 Apr 2026 00:42:44 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + mistral-correlation-id: + - 019d4ba4-7d86-772c-a5ea-4ae77c788356 + set-cookie: + - __cf_bm=GeK2TUC2q5inmFWO_YhlxJJ4qmiBPYAA7D0Mk_SLYwc-1775090564.427458-1.0.1.1-WoI47URvynPt3w_jFRCMujdoa82aCXu586fkam_ynd6D7uJbrtGApoLqvj4U._iF.xu8O3DMK7KcoLbORlxrwAmTmHL01yqw2zJYw4UXJSgvQSlK.XFUOTC_MYwmKkhW; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 01:12:44 + GMT + - _cfuvid=BbPEV2uIjL_RIQOoQ.3R7qqWm8dGnhcQGBc_V48VLv4-1775090564.427458-1.0.1.1-bA9Ra9snmzI_2grboJY9JhIWtTyalWmfWoOSnZ0A9nQ; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '153' + x-kong-proxy-latency: + - '10' + x-kong-request-id: + - 019d4ba4-7d86-772c-a5ea-4ae77c788356 + x-kong-upstream-latency: + - '154' + status: + code: 200 + message: OK +- request: + body: '{"model":"mistral-small-latest","messages":[{"content":"What is 5+5? Reply + with just the number.","role":"user"}],"max_tokens":10,"stream":true}' + headers: + Accept: + - text/event-stream + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '144' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/chat/completions + response: + body: + string: 'data: {"id":"73ae7d454ce14e35a480c03658212e1b","object":"chat.completion.chunk","created":1775101272,"model":"mistral-small-latest","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]} + + + data: {"id":"73ae7d454ce14e35a480c03658212e1b","object":"chat.completion.chunk","created":1775101272,"model":"mistral-small-latest","choices":[{"index":0,"delta":{"content":"1"},"finish_reason":null}],"p":"abcdefghijklmnopqrstuvwxyz"} + + + data: {"id":"73ae7d454ce14e35a480c03658212e1b","object":"chat.completion.chunk","created":1775101272,"model":"mistral-small-latest","choices":[{"index":0,"delta":{"content":"0"},"finish_reason":"stop"}],"usage":{"prompt_tokens":28,"total_tokens":31,"completion_tokens":3,"prompt_tokens_details":{"cached_tokens":0}},"p":"abcdefghijklmnopqrstuvwxyz0"} + + + data: [DONE] + + + ' + headers: + CF-RAY: + - 9e5cd1082f85f54f-YYZ + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Thu, 02 Apr 2026 03:41:12 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + mistral-correlation-id: + - 019d4c47-e152-7dc5-a835-0074f488f5a4 + set-cookie: + - __cf_bm=bFZVFAV39NUHcLzAC8zzrXqQncL0MJNk9vfyeEU0jDw-1775101272.345449-1.0.1.1-QTCIIYWEf1ZZ.ptwW959o5JMFP0c14dStFM9q9TyikJQ7ABEEIHbxTQkychNbGhGW2vSZGby6JuAwHaTHAF9F7qBR0ka3dIvrarXgLNYoc6818R5s5In1qhge8TYYtZy; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 04:11:12 + GMT + - _cfuvid=Xm0CSyl8EZNCiXwXuc3rcKWORXAFQf7Mgq79sU97hZQ-1775101272.345449-1.0.1.1-kePj0l_EVIg2Om4s82mukflc0miPMR8AeTmhG9gXdfY; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '290' + x-kong-proxy-latency: + - '11' + x-kong-request-id: + - 019d4c47-e152-7dc5-a835-0074f488f5a4 + x-kong-upstream-latency: + - '290' + status: + code: 200 + message: OK +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_embeddings_create.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_embeddings_create.yaml new file mode 100644 index 00000000..ec765d69 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_embeddings_create.yaml @@ -0,0 +1,162 @@ +interactions: +- request: + body: '{"model":"mistral-embed","input":"braintrust tracing"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '54' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/embeddings + response: + body: + string: '{"id":"e1e26f79eb1e426b807f4daabd9c48c7","object":"list","data":[{"object":"embedding","embedding":[-0.033782958984375,0.022064208984375,0.040130615234375,-0.01519775390625,0.07403564453125,-0.01427459716796875,-0.0007185935974121094,0.01056671142578125,-0.004802703857421875,0.0203704833984375,-0.047821044921875,0.0293121337890625,-0.0200653076171875,0.00887298583984375,-0.021759033203125,0.0882568359375,0.018585205078125,0.02252197265625,0.0203704833984375,0.031005859375,0.00879669189453125,-0.0183563232421875,6.258487701416016e-06,0.027923583984375,0.00917816162109375,0.02777099609375,0.0010271072387695312,0.01149749755859375,-0.0291595458984375,0.0036830902099609375,0.039794921875,-0.02191162109375,-0.018280029296875,0.0136566162109375,0.0009016990661621094,-0.016815185546875,-0.0239105224609375,-0.03424072265625,-0.002170562744140625,0.0256195068359375,0.019287109375,-0.03179931640625,0.00372314453125,0.00821685791015625,0.0285491943359375,-0.0731201171875,-0.047210693359375,0.0175933837890625,0.047821044921875,-0.038116455078125,0.02716064453125,0.038421630859375,0.01149749755859375,-0.007175445556640625,-0.0245361328125,-0.0141143798828125,-0.035491943359375,-0.007793426513671875,-0.01187896728515625,0.00887298583984375,-0.041046142578125,-0.0078277587890625,0.01558685302734375,-0.07098388671875,0.0262298583984375,0.0198974609375,-0.04876708984375,-0.005748748779296875,0.02252197265625,0.008026123046875,0.0180511474609375,0.00383758544921875,0.0180511474609375,0.0018510818481445312,0.042572021484375,-0.04876708984375,0.004146575927734375,0.04876708984375,-0.0035686492919921875,-0.041961669921875,0.038421630859375,0.0135040283203125,0.037811279296875,0.0203704833984375,-0.0268402099609375,-0.0076751708984375,0.021759033203125,0.004512786865234375,-0.07958984375,0.034881591796875,0.0003857612609863281,-0.0245361328125,0.018280029296875,-0.030242919921875,0.08453369140625,-0.005207061767578125,-0.0110321044921875,-0.0237579345703125,0.0174407958984375,0.0262298583984375,0.02252197265625,-0.015350341796875,-0.00756072998046875,-0.0438232421875,-0.0287017822265625,-0.0140380859375,0.0262298583984375,-0.0111846923828125,-0.02423095703125,-0.04473876953125,-0.08642578125,-0.027618408203125,0.0193634033203125,-0.003490447998046875,0.053680419921875,-0.006519317626953125,-0.0014467239379882812,0.00482177734375,-0.033782958984375,-0.036407470703125,0.032867431640625,-0.0236053466796875,0.0438232421875,0.01056671142578125,-0.0205230712890625,-0.037353515625,-0.003932952880859375,0.03887939453125,0.0032405853271484375,0.01480865478515625,-0.0014753341674804688,-0.0021305084228515625,0.00457000732421875,-0.0198974609375,0.0391845703125,-0.0111846923828125,0.015655517578125,0.00218963623046875,0.031951904296875,0.0200653076171875,0.00449371337890625,-0.007137298583984375,-0.08148193359375,-0.00302886962890625,-0.01605224609375,-0.0018510818481445312,-0.0002639293670654297,0.0175933837890625,-0.01056671142578125,-0.00933837890625,-0.03179931640625,-0.035797119140625,0.0211334228515625,-0.01959228515625,0.00975799560546875,-0.04876708984375,-0.0022563934326171875,-0.014892578125,0.007099151611328125,0.00968170166015625,-0.0296173095703125,0.0288543701171875,0.005710601806640625,-0.006519317626953125,-0.01180267333984375,-0.0789794921875,0.019439697265625,0.005207061767578125,-0.0012826919555664062,0.018585205078125,0.0660400390625,-0.07281494140625,0.0239105224609375,0.039794921875,0.0107269287109375,0.09564208984375,-0.005939483642578125,-0.0379638671875,-0.0008292198181152344,-0.005054473876953125,-0.00994873046875,0.0601806640625,0.0007519721984863281,0.0138092041015625,0.007137298583984375,-0.017822265625,-0.0216064453125,-0.03717041015625,-0.038421630859375,0.03948974609375,-0.01064300537109375,-0.034881591796875,0.00859832763671875,0.01751708984375,0.0203704833984375,0.072509765625,-0.0208282470703125,-0.00609588623046875,0.0209808349609375,0.01041412353515625,-0.013580322265625,-0.0170440673828125,-0.005710601806640625,0.0257720947265625,0.040740966796875,-0.035186767578125,-0.03948974609375,0.02716064453125,-0.036407470703125,-0.0203704833984375,-0.04534912109375,-0.01288604736328125,-0.0113372802734375,-0.0110321044921875,-0.0095672607421875,-0.0229949951171875,-0.0135040283203125,-0.0004820823669433594,0.00226593017578125,0.00682830810546875,-0.0333251953125,0.011260986328125,0.035491943359375,0.0288543701171875,-0.049072265625,0.02545166015625,0.0233001708984375,-0.01519775390625,-0.0014276504516601562,0.0085601806640625,-0.0081024169921875,-0.0294647216796875,-0.01519775390625,-0.002777099609375,-0.003993988037109375,0.0287017822265625,0.0660400390625,0.034881591796875,0.01442718505859375,0.0206756591796875,-0.031463623046875,0.007366180419921875,-0.0266876220703125,0.00968170166015625,0.0139617919921875,-0.029937744140625,0.00994873046875,0.01419830322265625,0.040740966796875,0.034088134765625,-0.0229949951171875,-0.03009033203125,-0.040435791015625,-0.0020923614501953125,-0.041351318359375,-0.0198974609375,-4.2498111724853516e-05,-0.05059814453125,0.040740966796875,-0.051239013671875,0.0005640983581542969,-0.03240966796875,-0.01311492919921875,0.0291595458984375,0.0543212890625,-0.005245208740234375,-0.021453857421875,-0.0555419921875,0.019287109375,0.0203704833984375,0.001735687255859375,0.0198974609375,0.03131103515625,-0.024688720703125,0.0229949951171875,-0.0159759521484375,-0.00864410400390625,0.0206756591796875,0.005748748779296875,0.006595611572265625,-0.01342010498046875,0.0005712509155273438,-0.0379638671875,0.0509033203125,-0.04534912109375,-0.027618408203125,-0.040130615234375,0.025299072265625,-0.053375244140625,-0.0182037353515625,0.00748443603515625,-0.0172882080078125,-0.0028553009033203125,-0.0206756591796875,0.0205230712890625,-0.06695556640625,-0.03131103515625,-0.001514434814453125,-0.038421630859375,0.0211334228515625,0.07958984375,-0.01003265380859375,0.040740966796875,0.040740966796875,0.002796173095703125,-0.01751708984375,-0.00624847412109375,-0.03533935546875,-0.0108795166015625,-0.007251739501953125,0.02252197265625,0.03948974609375,-0.01435089111328125,0.01157379150390625,0.0136566162109375,0.002410888671875,-0.034393310546875,0.00441741943359375,0.003047943115234375,-0.027313232421875,-0.017120361328125,0.0228424072265625,0.0245361328125,-0.01334381103515625,-0.005016326904296875,-0.033172607421875,0.0157318115234375,-0.01157379150390625,0.0111846923828125,0.03948974609375,-0.032257080078125,-0.0236053466796875,0.05615234375,0.0626220703125,0.03656005859375,0.05059814453125,0.0182037353515625,0.00096893310546875,-0.043212890625,0.034393310546875,-0.0265350341796875,0.0211334228515625,0.0081787109375,-0.0203704833984375,-0.019134521484375,-0.03533935546875,-0.057098388671875,0.0206756591796875,0.04351806640625,0.0263824462890625,0.040130615234375,-0.0029125213623046875,0.006404876708984375,0.07342529296875,-0.041046142578125,0.11846923828125,-0.0543212890625,-0.05615234375,0.04534912109375,0.038116455078125,0.002170562744140625,-0.00921630859375,-0.01280975341796875,0.06756591796875,-0.01165008544921875,-0.005939483642578125,-0.053680419921875,0.037811279296875,-0.01003265380859375,0.028228759765625,0.051544189453125,-0.01666259765625,0.0311737060546875,0.06048583984375,0.031005859375,0.03656005859375,0.0078277587890625,-0.0157318115234375,-0.05462646484375,0.0095672607421875,-0.0205230712890625,0.034881591796875,0.02191162109375,0.005092620849609375,-0.0015811920166015625,-0.0139617919921875,-0.06390380859375,0.005092620849609375,-0.030548095703125,0.00771331787109375,0.0789794921875,-0.0178985595703125,0.041656494140625,-0.0208282470703125,0.006134033203125,-0.012115478515625,0.01049041748046875,0.0216064453125,0.045684814453125,0.0543212890625,0.01203155517578125,-0.025299072265625,0.01465606689453125,-0.012420654296875,0.0209808349609375,0.01033782958984375,0.041351318359375,0.03179931640625,-0.0019378662109375,-0.0635986328125,0.028228759765625,0.04290771484375,-0.047210693359375,-0.0164337158203125,-0.03363037109375,0.007328033447265625,-0.04998779296875,-0.035186767578125,0.010955810546875,-0.003086090087890625,-0.01172637939453125,-0.040740966796875,-0.04534912109375,-0.005901336669921875,0.041351318359375,-0.049072265625,-0.0260772705078125,0.0167388916015625,-0.015899658203125,0.02545166015625,0.0200653076171875,-0.058013916015625,0.0543212890625,-0.045989990234375,-0.04876708984375,-0.007137298583984375,0.0036640167236328125,-0.034393310546875,-0.00836944580078125,-0.0159759521484375,-0.0193634033203125,-0.002044677734375,-0.0079498291015625,0.0438232421875,0.00428009033203125,0.027618408203125,0.04290771484375,0.041046142578125,-0.01465606689453125,-0.054931640625,-0.004688262939453125,0.012115478515625,0.007251739501953125,0.0239105224609375,0.03271484375,-0.00968170166015625,-0.03857421875,-0.075927734375,0.03564453125,-0.0321044921875,-0.001132965087890625,0.0283966064453125,-0.024993896484375,-0.06048583984375,-0.00339508056640625,0.04351806640625,0.007366180419921875,-0.0123443603515625,-0.08148193359375,-0.0048980712890625,-0.002429962158203125,-0.031005859375,-0.03009033203125,-0.0203704833984375,-0.004688262939453125,0.0239105224609375,0.005245208740234375,-0.017120361328125,0.0140380859375,-0.0209808349609375,0.03948974609375,0.0151214599609375,0.00493621826171875,-0.045684814453125,0.0151214599609375,0.063232421875,0.00310516357421875,0.0170440673828125,0.0180511474609375,0.0135040283203125,-0.004512786865234375,0.01172637939453125,0.01056671142578125,-0.02716064453125,-0.038421630859375,-0.032867431640625,-0.00921630859375,-0.0074462890625,-0.0206756591796875,0.00771331787109375,0.00848388671875,-0.00339508056640625,0.0165863037109375,0.0595703125,-0.05059814453125,-0.01457977294921875,0.0078277587890625,-0.040435791015625,-0.008026123046875,-0.064208984375,-0.04473876953125,-0.0231475830078125,0.0056304931640625,0.004550933837890625,0.051239013671875,0.022674560546875,-0.017822265625,0.01080322265625,-0.007289886474609375,-0.0107269287109375,-0.0206756591796875,-0.034088134765625,-0.00013434886932373047,-0.021759033203125,0.045684814453125,-0.00682830810546875,-0.01342010498046875,-0.0229949951171875,-0.0033168792724609375,-0.029937744140625,-0.0200653076171875,-0.033477783203125,-0.02484130859375,-0.041046142578125,0.03826904296875,-0.0108795166015625,0.019744873046875,0.040435791015625,-0.0174407958984375,0.03948974609375,-0.06915283203125,-0.0240631103515625,0.005825042724609375,0.0154266357421875,-0.0257720947265625,-0.0141143798828125,-0.016815185546875,0.04534912109375,0.052764892578125,-0.00690460205078125,-0.0177459716796875,-0.024383544921875,0.0084075927734375,-0.006134033203125,0.013885498046875,-0.0085601806640625,-0.0095672607421875,-0.003528594970703125,-0.00690460205078125,-0.00775146484375,0.037017822265625,0.01442718505859375,0.038116455078125,0.0126495361328125,-0.0186767578125,-0.015350341796875,0.0184326171875,0.00902557373046875,-0.0157318115234375,-0.0152740478515625,0.0004436969757080078,0.0291595458984375,-0.028228759765625,-0.0081787109375,0.06451416015625,0.035186767578125,-0.0078277587890625,-0.021453857421875,-0.025146484375,-0.048126220703125,0.00609588623046875,0.0296173095703125,-0.05645751953125,-0.063232421875,0.0141143798828125,0.006671905517578125,0.027618408203125,0.033935546875,-0.00748443603515625,-0.037017822265625,-0.029937744140625,0.052459716796875,0.0038967132568359375,-0.033477783203125,-0.035797119140625,0.0211334228515625,-0.007793426513671875,-0.00566864013671875,0.003490447998046875,0.05615234375,0.00814056396484375,0.01157379150390625,0.03302001953125,0.052764892578125,0.0268402099609375,-0.0011377334594726562,-0.016357421875,-0.0200653076171875,0.00536346435546875,-0.045989990234375,0.0021610260009765625,0.00994873046875,0.0088348388671875,-0.09625244140625,-0.0183563232421875,-0.0291595458984375,-0.0260772705078125,0.01003265380859375,0.035186767578125,-0.017120361328125,0.005710601806640625,0.01519775390625,-0.0290069580078125,-0.02191162109375,0.0212860107421875,0.00705718994140625,0.054931640625,0.042572021484375,-0.027313232421875,0.00902557373046875,-0.0009832382202148438,-0.018280029296875,-0.0391845703125,0.039031982421875,0.0162811279296875,0.0206756591796875,0.009490966796875,0.048126220703125,0.0268402099609375,0.051239013671875,0.0450439453125,0.05029296875,0.05462646484375,-0.015350341796875,-0.037353515625,-0.006015777587890625,-0.019439697265625,-0.01064300537109375,-0.0167388916015625,-0.05523681640625,0.03179931640625,-0.0233001708984375,0.012115478515625,-0.02252197265625,0.0086822509765625,0.008331298828125,-0.004241943359375,-0.022369384765625,0.00994873046875,0.019134521484375,-0.056793212890625,-0.05029296875,0.025146484375,-0.01326751708984375,-0.0316162109375,0.0288543701171875,-0.07037353515625,0.078369140625,-0.022064208984375,-0.0712890625,-0.007213592529296875,0.057098388671875,0.01580810546875,-0.0256195068359375,-0.0016012191772460938,-0.01465606689453125,0.0157318115234375,-0.0149688720703125,-0.0161285400390625,-0.0123443603515625,-0.00917816162109375,-0.063232421875,0.0208282470703125,0.00408935546875,-0.0025463104248046875,-0.01605224609375,-0.027923583984375,-0.0379638671875,0.00994873046875,-0.037353515625,0.06201171875,0.03131103515625,-0.007404327392578125,-0.00015914440155029297,0.032867431640625,0.00690460205078125,0.00200653076171875,-0.00011026859283447266,0.0229949951171875,-0.034576416015625,-0.006595611572265625,-0.01296234130859375,-0.0170440673828125,-0.022674560546875,0.00339508056640625,-0.024993896484375,0.0240631103515625,0.03240966796875,0.01334381103515625,-0.0038967132568359375,0.0063629150390625,0.01558685302734375,-0.016357421875,0.02716064453125,-0.000988006591796875,-0.00690460205078125,0.0165863037109375,-0.046600341796875,0.04351806640625,-0.01812744140625,-0.0017843246459960938,-0.00844573974609375,-0.005401611328125,0.0259246826171875,-0.0438232421875,-0.02484130859375,-0.0601806640625,-0.006015777587890625,0.0303955078125,0.0017642974853515625,-0.030242919921875,0.001003265380859375,-0.0206756591796875,-0.033172607421875,0.00237274169921875,0.01026153564453125,0.018829345703125,0.0228424072265625,0.002063751220703125,-0.015350341796875,0.0152740478515625,-0.033477783203125,-0.03857421875,-0.0149688720703125,0.01751708984375,-0.03009033203125,-0.019744873046875,-0.00910186767578125,0.0081024169921875,0.027618408203125,0.01288604736328125,0.0291595458984375,0.027923583984375,-0.0177459716796875,0.011260986328125,0.034088134765625,-0.001407623291015625,0.005168914794921875,0.0032596588134765625,0.016204833984375,0.00945281982421875,-0.03533935546875,0.022369384765625,0.037506103515625,0.0107269287109375,-0.017120361328125,-0.0209808349609375,-0.029937744140625,0.04412841796875,0.0141143798828125,0.003509521484375,0.0333251953125,0.048126220703125,-0.09503173828125,0.031951904296875,0.007904052734375,0.027313232421875,0.019744873046875,0.00273895263671875,-0.052154541015625,-0.01419830322265625,-0.021759033203125,0.005016326904296875,0.003162384033203125,0.046600341796875,0.0011816024780273438,0.005710601806640625,0.05462646484375,-0.0209808349609375,0.003875732421875,0.0183563232421875,-0.01142120361328125,-0.0138092041015625,0.0438232421875,-0.07159423828125,0.00682830810546875,-0.053680419921875,-0.0124969482421875,0.047821044921875,0.0026607513427734375,0.01010894775390625,-0.004322052001953125,0.00836944580078125,0.012115478515625,0.00701904296875,0.0075225830078125,0.01033782958984375,-0.04876708984375,-0.017669677734375,-0.0010366439819335938,0.0283966064453125,-0.0202178955078125,-0.024383544921875,-0.022064208984375,-0.10675048828125,-0.072509765625,0.00925445556640625,-0.0391845703125,-0.0036258697509765625,0.0228424072265625,-0.01580810546875,-0.001861572265625,-0.0321044921875,0.00864410400390625,0.017669677734375,-0.05059814453125,0.006671905517578125,-0.0007233619689941406,-0.0154266357421875,0.04876708984375,0.051544189453125,-0.039031982421875,0.004398345947265625,-0.07867431640625,-0.01959228515625,-0.024383544921875,0.0234527587890625,0.0123443603515625,0.036407470703125,0.007328033447265625,0.003993988037109375,-0.052459716796875,-0.00910186767578125,0.00879669189453125,-0.006866455078125,-0.022216796875,-0.037506103515625,-0.0036830902099609375,0.058013916015625,-0.01303863525390625,-0.0200653076171875,-0.0190582275390625,-0.02545166015625,0.0316162109375,-0.0054779052734375,0.054931640625,-0.00189971923828125,0.02191162109375,0.003376007080078125,0.021453857421875,0.031005859375,-0.021453857421875,0.046295166015625,-0.01473236083984375,0.040130615234375,-0.017120361328125,0.0178985595703125,0.01519775390625,-0.052764892578125,-0.042266845703125,0.00339508056640625,-0.02978515625,-0.002323150634765625,0.0614013671875,0.00482177734375,-0.016815185546875,0.00609588623046875,0.057098388671875,0.0555419921875,0.0141143798828125,0.0013408660888671875,-0.0712890625,-0.006557464599609375,-0.01056671142578125,0.040130615234375,0.0012197494506835938,0.00698089599609375,0.0391845703125,0.01334381103515625,-0.07037353515625,0.0303955078125,-0.0008969306945800781,0.053070068359375,0.028076171875,0.058624267578125,-0.027618408203125,0.0110321044921875,-0.0002110004425048828,-0.0015811920166015625,0.028076171875,0.004589080810546875,-0.0206756591796875,0.04998779296875,-0.04412841796875,-0.00902557373046875,0.010955810546875,0.05987548828125,-0.04534912109375,0.0067901611328125,-0.0333251953125,0.045989990234375,0.00019884109497070312,-0.035797119140625,-0.0111083984375,-0.0113372802734375,-0.0063629150390625,-0.028076171875,0.0484619140625,0.0126495361328125,0.04998779296875,0.0296173095703125,-0.03533935546875,0.0110321044921875,0.002208709716796875,-0.01373291015625,0.008758544921875,-0.048126220703125,0.02252197265625,-0.035491943359375,0.05645751953125,-0.0124969482421875,0.063232421875,0.004108428955078125,0.0193634033203125,-0.01427459716796875,-0.0175933837890625,0.049072265625,0.005977630615234375,0.0311737060546875,0.05987548828125,-0.00482177734375,0.046295166015625,-0.0036067962646484375,-0.04534912109375,0.00756072998046875,0.005939483642578125,0.01018524169921875,-0.0888671875,-0.02545166015625,0.00933837890625,-0.0236053466796875,-0.0496826171875,-0.0121917724609375,0.07684326171875,-0.03594970703125,-0.00559234619140625,-0.021759033203125,0.0266876220703125,-0.040740966796875,0.0136566162109375,0.0391845703125,-0.0177459716796875,0.05987548828125,-0.04876708984375,0.0303955078125,0.0202178955078125,0.051239013671875,-0.01203155517578125,0.01666259765625,0.0259246826171875,-0.0054779052734375,0.03363037109375,0.025299072265625,-0.047210693359375,-0.0043792724609375,0.0029125213623046875,0.0014848709106445312,-0.042266845703125,-0.0056304931640625,0.011260986328125,-0.10247802734375,0.00705718994140625,0.0229949951171875,0.017974853515625,-0.035491943359375,-0.030853271484375,0.0032596588134765625,-0.09197998046875,0.007251739501953125,0.01319122314453125,0.024688720703125,0.06878662109375,-0.053680419921875,0.0161285400390625,0.0294647216796875,0.001918792724609375,-0.046600341796875,0.021453857421875,-0.00478363037109375,0.0438232421875,-0.03717041015625,0.03271484375,0.03594970703125,0.018280029296875,-0.03887939453125,-0.01064300537109375,-0.022369384765625,0.0025463104248046875,-0.0205230712890625,-0.0157318115234375,0.042572021484375,0.0196685791015625,0.022064208984375,-0.019744873046875,0.017669677734375,0.02545166015625,0.036407470703125,0.08270263671875,0.00412750244140625,0.0626220703125,0.0239105224609375,0.015350341796875,-0.040130615234375,0.01080322265625],"index":0}],"model":"mistral-embed","usage":{"prompt_audio_seconds":null,"prompt_tokens":6,"total_tokens":6,"completion_tokens":0,"request_count":null,"prompt_tokens_details":null,"prompt_token_details":null}}' + headers: + CF-RAY: + - 9e5bcba10a98ac30-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 00:42:45 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '19739' + mistral-correlation-id: + - 019d4ba4-80e7-774b-b319-000bb3901f84 + set-cookie: + - __cf_bm=zL__d4XaxqM97GxXlYeaHuPwv5GvYcBOqFLmuv7yuHA-1775090565.2878609-1.0.1.1-K1xb5i6KE__7LZHTJ9r7jjh.c.O1AnWnqNqlDA6LBAIvAhzMfIcBuojVcL.z24rGYjVnYdrRtnDPdNayI1KeQScUQaiDXubmLI3w7QatFitU929PaNKklvnSlWQtFqyb; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 01:12:45 + GMT + - _cfuvid=Pevq7GeGri2PHDN0iB2w85GEEqxF_M6v2XkWQl5fCHE-1775090565.2878609-1.0.1.1-GK9OmIaaUIuJYsdnJVF7ab1nzqLniogNHFMWLrfDgf4; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '179' + x-kong-proxy-latency: + - '13' + x-kong-request-id: + - 019d4ba4-80e7-774b-b319-000bb3901f84 + x-kong-upstream-latency: + - '179' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '20000000' + x-ratelimit-limit-tokens-month: + - '200000000000' + x-ratelimit-remaining-req-minute: + - '56' + x-ratelimit-remaining-tokens-minute: + - '19999994' + x-ratelimit-remaining-tokens-month: + - '199999999994' + x-ratelimit-tokens-query-cost: + - '6' + status: + code: 200 + message: OK +- request: + body: '{"model":"mistral-embed","input":"braintrust tracing"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '54' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/embeddings + response: + body: + string: '{"id":"88fd0e095d9546bb998f05dfcae166e3","object":"list","data":[{"object":"embedding","embedding":[-0.033782958984375,0.0225372314453125,0.040130615234375,-0.01519775390625,0.0740966796875,-0.01435089111328125,-0.0008969306945800781,0.0105743408203125,-0.004650115966796875,0.0200653076171875,-0.0478515625,0.029327392578125,-0.0202178955078125,0.0088348388671875,-0.021759033203125,0.087646484375,0.01837158203125,0.0225372314453125,0.0205230712890625,0.031341552734375,0.00887298583984375,-0.017822265625,-0.00015437602996826172,0.0279388427734375,0.009185791015625,0.027618408203125,0.0010128021240234375,0.01157379150390625,-0.0288543701171875,0.003742218017578125,0.040435791015625,-0.022064208984375,-0.018280029296875,0.0135040283203125,0.0012350082397460938,-0.017059326171875,-0.024078369140625,-0.034271240234375,-0.002285003662109375,0.0254669189453125,0.0195159912109375,-0.031951904296875,0.003936767578125,0.008331298828125,0.0285491943359375,-0.0731201171875,-0.046905517578125,0.017822265625,0.047515869140625,-0.038116455078125,0.027313232421875,0.0379638671875,0.01134490966796875,-0.007717132568359375,-0.024383544921875,-0.01427459716796875,-0.03533935546875,-0.00756072998046875,-0.011962890625,0.0092620849609375,-0.041046142578125,-0.007678985595703125,0.01558685302734375,-0.07098388671875,0.0260772705078125,0.0200653076171875,-0.04937744140625,-0.006134033203125,0.0225372314453125,0.008026123046875,0.0177459716796875,0.0037994384765625,0.01837158203125,0.0019092559814453125,0.04290771484375,-0.04815673828125,0.004032135009765625,0.04876708984375,-0.00335693359375,-0.041656494140625,0.03857421875,0.0136566162109375,0.038116455078125,0.0203704833984375,-0.027008056640625,-0.00733184814453125,0.021453857421875,0.00405120849609375,-0.07965087890625,0.03533935546875,-5.0008296966552734e-05,-0.0245361328125,0.01837158203125,-0.03009033203125,0.0845947265625,-0.005168914794921875,-0.01126861572265625,-0.0237579345703125,0.017822265625,0.0257720947265625,0.0225372314453125,-0.01551055908203125,-0.007755279541015625,-0.04412841796875,-0.0280914306640625,-0.01419830322265625,0.0262298583984375,-0.0111083984375,-0.02423095703125,-0.04443359375,-0.08642578125,-0.027618408203125,0.019439697265625,-0.0036449432373046875,0.0537109375,-0.0067138671875,-0.0013599395751953125,0.004802703857421875,-0.033935546875,-0.0367431640625,0.033172607421875,-0.0236053466796875,0.0438232421875,0.010650634765625,-0.0206756591796875,-0.037353515625,-0.00449371337890625,0.03887939453125,0.003124237060546875,0.0148162841796875,-0.0017557144165039062,-0.00208282470703125,0.0045318603515625,-0.0199127197265625,0.039337158203125,-0.01187896728515625,0.0156707763671875,0.0024700164794921875,0.032562255859375,0.0205230712890625,0.004878997802734375,-0.0075225830078125,-0.08148193359375,-0.00368499755859375,-0.01605224609375,-0.0015239715576171875,-0.0002110004425048828,0.0177459716796875,-0.00995635986328125,-0.009185791015625,-0.03179931640625,-0.035491943359375,0.021453857421875,-0.0200653076171875,0.01049041748046875,-0.04876708984375,-0.0022373199462890625,-0.014892578125,0.0069427490234375,0.01003265380859375,-0.029937744140625,0.0283966064453125,0.00543975830078125,-0.006519317626953125,-0.01187896728515625,-0.07904052734375,0.01959228515625,0.005130767822265625,-0.0012826919555664062,0.0186767578125,0.0660400390625,-0.07281494140625,0.0237579345703125,0.0391845703125,0.01049041748046875,0.095703125,-0.0060577392578125,-0.037811279296875,-0.0012350082397460938,-0.004550933837890625,-0.01026153564453125,0.05987548828125,0.0002567768096923828,0.01396942138671875,0.0067901611328125,-0.0182037353515625,-0.021453857421875,-0.036895751953125,-0.038726806640625,0.0391845703125,-0.010955810546875,-0.0350341796875,0.00887298583984375,0.01751708984375,0.0202178955078125,0.07220458984375,-0.0208282470703125,-0.006443023681640625,0.021148681640625,0.0105743408203125,-0.01373291015625,-0.01751708984375,-0.005863189697265625,0.0256195068359375,0.040435791015625,-0.035186767578125,-0.0391845703125,0.02716064453125,-0.0367431640625,-0.0203704833984375,-0.045074462890625,-0.01273345947265625,-0.01142120361328125,-0.0108795166015625,-0.00960540771484375,-0.0228424072265625,-0.01373291015625,-3.1948089599609375e-05,0.0024394989013671875,0.006519317626953125,-0.03302001953125,0.01142120361328125,0.03564453125,0.029327392578125,-0.0484619140625,0.0254669189453125,0.0233001708984375,-0.01535797119140625,-0.0010271072387695312,0.00887298583984375,-0.00791168212890625,-0.029327392578125,-0.014739990234375,-0.002410888671875,-0.00426483154296875,0.0282440185546875,0.0660400390625,0.035186767578125,0.01435089111328125,0.0205230712890625,-0.031341552734375,0.0074462890625,-0.0263824462890625,0.009185791015625,0.01404571533203125,-0.029937744140625,0.00960540771484375,0.01427459716796875,0.041046142578125,0.033935546875,-0.0228424072265625,-0.02947998046875,-0.040435791015625,-0.0020732879638671875,-0.041351318359375,-0.0200653076171875,-0.00038814544677734375,-0.050933837890625,0.040130615234375,-0.051239013671875,0.0006704330444335938,-0.032562255859375,-0.013427734375,0.0290069580078125,0.05462646484375,-0.005092620849609375,-0.02130126953125,-0.055877685546875,0.0195159912109375,0.0203704833984375,0.0021800994873046875,0.0199127197265625,0.03179931640625,-0.02392578125,0.022369384765625,-0.016357421875,-0.0089874267578125,0.0205230712890625,0.005634307861328125,0.00667572021484375,-0.013275146484375,0.0009889602661132812,-0.037811279296875,0.051239013671875,-0.045379638671875,-0.0274658203125,-0.040435791015625,0.0253143310546875,-0.05340576171875,-0.0186004638671875,0.00756072998046875,-0.0174407958984375,-0.0032024383544921875,-0.0203704833984375,0.0208282470703125,-0.066650390625,-0.031341552734375,-0.0013885498046875,-0.03826904296875,0.02130126953125,0.07965087890625,-0.0105743408203125,0.041046142578125,0.040435791015625,0.0024890899658203125,-0.0175933837890625,-0.0059814453125,-0.03564453125,-0.01157379150390625,-0.00690460205078125,0.022216796875,0.039825439453125,-0.01404571533203125,0.01172637939453125,0.0136566162109375,0.0025272369384765625,-0.034271240234375,0.0043792724609375,0.003337860107421875,-0.02716064453125,-0.016815185546875,0.0233001708984375,0.0245361328125,-0.01319122314453125,-0.0047454833984375,-0.0333251953125,0.01551055908203125,-0.01172637939453125,0.0108795166015625,0.0391845703125,-0.03240966796875,-0.024078369140625,0.056182861328125,0.06268310546875,0.036102294921875,0.050628662109375,0.017974853515625,0.0006122589111328125,-0.04290771484375,0.034576416015625,-0.0263824462890625,0.0208282470703125,0.008026123046875,-0.0203704833984375,-0.019134521484375,-0.035186767578125,-0.057098388671875,0.0216064453125,0.04351806640625,0.0263824462890625,0.040130615234375,-0.0025653839111328125,0.006519317626953125,0.073486328125,-0.040435791015625,0.1185302734375,-0.05462646484375,-0.056182861328125,0.045379638671875,0.03826904296875,0.0022182464599609375,-0.00952911376953125,-0.01273345947265625,0.06756591796875,-0.01187896728515625,-0.005710601806640625,-0.0537109375,0.038116455078125,-0.01010894775390625,0.0285491943359375,0.051544189453125,-0.0165863037109375,0.031646728515625,0.060791015625,0.0311737060546875,0.0367431640625,0.0081024169921875,-0.015899658203125,-0.05462646484375,0.00945281982421875,-0.0208282470703125,0.034881591796875,0.02191162109375,0.005207061767578125,-0.0014276504516601562,-0.01419830322265625,-0.06390380859375,0.005092620849609375,-0.030242919921875,0.007678985595703125,0.07904052734375,-0.0177459716796875,0.04229736328125,-0.0203704833984375,0.00624847412109375,-0.012115478515625,0.01033782958984375,0.0216064453125,0.045684814453125,0.0543212890625,0.01187896728515625,-0.0256195068359375,0.014739990234375,-0.0123443603515625,0.021453857421875,0.010955810546875,0.041656494140625,0.031494140625,-0.0022182464599609375,-0.06390380859375,0.02777099609375,0.04290771484375,-0.047515869140625,-0.0159759521484375,-0.033477783203125,0.007099151611328125,-0.04998779296875,-0.035186767578125,0.01134490966796875,-0.0032215118408203125,-0.0120391845703125,-0.041046142578125,-0.045379638671875,-0.005092620849609375,0.041351318359375,-0.049072265625,-0.0263824462890625,0.0167388916015625,-0.0159759521484375,0.0254669189453125,0.0195159912109375,-0.058319091796875,0.0543212890625,-0.046295166015625,-0.0484619140625,-0.00682830810546875,0.003376007080078125,-0.034423828125,-0.0085296630859375,-0.0158233642578125,-0.0192108154296875,-0.0016298294067382812,-0.00844573974609375,0.0438232421875,0.00426483154296875,0.0282440185546875,0.0426025390625,0.040740966796875,-0.01450347900390625,-0.054931640625,-0.004108428955078125,0.01187896728515625,0.0073699951171875,0.02423095703125,0.03271484375,-0.00968170166015625,-0.038726806640625,-0.07562255859375,0.03594970703125,-0.032257080078125,-0.0013790130615234375,0.0282440185546875,-0.024688720703125,-0.060791015625,-0.0036449432373046875,0.04351806640625,0.007755279541015625,-0.012420654296875,-0.08087158203125,-0.005245208740234375,-0.002208709716796875,-0.031341552734375,-0.029632568359375,-0.0205230712890625,-0.004878997802734375,0.0234527587890625,0.005710601806640625,-0.0175933837890625,0.01412200927734375,-0.0205230712890625,0.039031982421875,0.0151214599609375,0.005168914794921875,-0.045379638671875,0.014892578125,0.06329345703125,0.00302886962890625,0.01751708984375,0.01812744140625,0.0133514404296875,-0.004032135009765625,0.01142120361328125,0.01049041748046875,-0.02716064453125,-0.03857421875,-0.032257080078125,-0.009185791015625,-0.00682830810546875,-0.0203704833984375,0.00763702392578125,0.00829315185546875,-0.0031833648681640625,0.0168914794921875,0.05926513671875,-0.050628662109375,-0.015045166015625,0.007793426513671875,-0.040435791015625,-0.0079498291015625,-0.064208984375,-0.04443359375,-0.0234527587890625,0.0059814453125,0.004184722900390625,0.051239013671875,0.0228424072265625,-0.0178985595703125,0.01041412353515625,-0.007061004638671875,-0.010650634765625,-0.0205230712890625,-0.033935546875,-6.121397018432617e-05,-0.0216064453125,0.045379638671875,-0.0067901611328125,-0.01404571533203125,-0.0229949951171875,-0.0036449432373046875,-0.030242919921875,-0.0197601318359375,-0.033172607421875,-0.024993896484375,-0.040740966796875,0.03826904296875,-0.01126861572265625,0.0199127197265625,0.041046142578125,-0.017059326171875,0.039825439453125,-0.06854248046875,-0.02392578125,0.005901336669921875,0.01535797119140625,-0.0254669189453125,-0.01435089111328125,-0.0171356201171875,0.045379638671875,0.052764892578125,-0.007022857666015625,-0.01812744140625,-0.02484130859375,0.008758544921875,-0.005710601806640625,0.0135040283203125,-0.00821685791015625,-0.01003265380859375,-0.003124237060546875,-0.006984710693359375,-0.00798797607421875,0.0367431640625,0.014892578125,0.038421630859375,0.012420654296875,-0.0187530517578125,-0.0156707763671875,0.01812744140625,0.00910186767578125,-0.01551055908203125,-0.01543426513671875,0.00013256072998046875,0.0285491943359375,-0.0280914306640625,-0.00798797607421875,0.0648193359375,0.03533935546875,-0.007213592529296875,-0.0209808349609375,-0.0253143310546875,-0.04815673828125,0.006404876708984375,0.0303955078125,-0.056488037109375,-0.0635986328125,0.01427459716796875,0.006557464599609375,0.027618408203125,0.033782958984375,-0.007598876953125,-0.037200927734375,-0.02978515625,0.052154541015625,0.0036258697509765625,-0.033477783203125,-0.035491943359375,0.0205230712890625,-0.007755279541015625,-0.00551605224609375,0.00376129150390625,0.056182861328125,0.008026123046875,0.0111846923828125,0.032867431640625,0.052764892578125,0.026702880859375,-0.0009212493896484375,-0.0164337158203125,-0.0205230712890625,0.005401611328125,-0.045989990234375,0.0019292831420898438,0.01003265380859375,0.0084075927734375,-0.0963134765625,-0.0177459716796875,-0.0288543701171875,-0.0259246826171875,0.00975799560546875,0.035186767578125,-0.017364501953125,0.005786895751953125,0.01519775390625,-0.02947998046875,-0.021759033203125,0.021759033203125,0.007061004638671875,0.05462646484375,0.0426025390625,-0.0279388427734375,0.0089111328125,-0.0014562606811523438,-0.018280029296875,-0.039337158203125,0.0391845703125,0.0162811279296875,0.0205230712890625,0.00937652587890625,0.0484619140625,0.02685546875,0.050933837890625,0.04473876953125,0.050323486328125,0.0543212890625,-0.01551055908203125,-0.037353515625,-0.00601959228515625,-0.0195159912109375,-0.0107269287109375,-0.016815185546875,-0.054931640625,0.03179931640625,-0.0231475830078125,0.0120391845703125,-0.0225372314453125,0.00910186767578125,0.00821685791015625,-0.00420379638671875,-0.0225372314453125,0.00995635986328125,0.01898193359375,-0.056793212890625,-0.050323486328125,0.024993896484375,-0.013427734375,-0.031341552734375,0.0288543701171875,-0.070068359375,0.07867431640625,-0.02191162109375,-0.07159423828125,-0.007099151611328125,0.056793212890625,0.01605224609375,-0.0256195068359375,-0.0018129348754882812,-0.01457977294921875,0.0157470703125,-0.0148162841796875,-0.016204833984375,-0.01265716552734375,-0.00952911376953125,-0.06390380859375,0.0206756591796875,0.004222869873046875,-0.00273895263671875,-0.0157470703125,-0.0282440185546875,-0.03765869140625,0.01041412353515625,-0.037200927734375,0.062042236328125,0.031341552734375,-0.00733184814453125,-0.0003159046173095703,0.03363037109375,0.0067901611328125,0.002044677734375,-0.0004968643188476562,0.0229949951171875,-0.0350341796875,-0.00624847412109375,-0.01311492919921875,-0.0171356201171875,-0.0226898193359375,0.0032978057861328125,-0.0253143310546875,0.024078369140625,0.03179931640625,0.013580322265625,-0.003818511962890625,0.00624847412109375,0.015045166015625,-0.0161285400390625,0.0274658203125,-0.0010805130004882812,-0.00675201416015625,0.01666259765625,-0.046295166015625,0.0438232421875,-0.0186004638671875,-0.00153350830078125,-0.00864410400390625,-0.00559234619140625,0.0257720947265625,-0.0438232421875,-0.02484130859375,-0.05987548828125,-0.00617218017578125,0.030242919921875,0.0019388198852539062,-0.030242919921875,0.0011186599731445312,-0.0203704833984375,-0.033477783203125,0.0024890899658203125,0.01003265380859375,0.01898193359375,0.0228424072265625,0.0017843246459960938,-0.0151214599609375,0.0156707763671875,-0.03363037109375,-0.03857421875,-0.0152740478515625,0.01751708984375,-0.030242919921875,-0.0197601318359375,-0.0089874267578125,0.00791168212890625,0.0274658203125,0.01288604736328125,0.02947998046875,0.02777099609375,-0.0172882080078125,0.01142120361328125,0.033782958984375,-0.001331329345703125,0.005634307861328125,0.0032215118408203125,0.0158233642578125,0.00937652587890625,-0.035491943359375,0.022216796875,0.03704833984375,0.01080322265625,-0.0168914794921875,-0.0203704833984375,-0.029632568359375,0.04473876953125,0.01396942138671875,0.00372314453125,0.033477783203125,0.04815673828125,-0.09442138671875,0.032257080078125,0.00783538818359375,0.027008056640625,0.0200653076171875,0.0028743743896484375,-0.052459716796875,-0.0146636962890625,-0.02130126953125,0.005245208740234375,0.002777099609375,0.046905517578125,0.0010080337524414062,0.00601959228515625,0.0543212890625,-0.0205230712890625,0.0040130615234375,0.0182037353515625,-0.01126861572265625,-0.013580322265625,0.04351806640625,-0.07159423828125,0.007022857666015625,-0.0537109375,-0.01265716552734375,0.047515869140625,0.002719879150390625,0.01049041748046875,-0.004302978515625,0.0079498291015625,0.01187896728515625,0.007289886474609375,0.0073699951171875,0.01026153564453125,-0.04937744140625,-0.017822265625,-0.001514434814453125,0.0287017822265625,-0.0193634033203125,-0.024688720703125,-0.021759033203125,-0.1068115234375,-0.072509765625,0.00933837890625,-0.039337158203125,-0.0036258697509765625,0.0228424072265625,-0.0158233642578125,-0.001697540283203125,-0.031951904296875,0.0081024169921875,0.0182037353515625,-0.050933837890625,0.006557464599609375,-0.0006365776062011719,-0.01558685302734375,0.049072265625,0.051239013671875,-0.039031982421875,0.004688262939453125,-0.07904052734375,-0.0197601318359375,-0.02484130859375,0.0231475830078125,0.012115478515625,0.03656005859375,0.007251739501953125,0.003818511962890625,-0.052459716796875,-0.008758544921875,0.0088348388671875,-0.00682830810546875,-0.022216796875,-0.03765869140625,-0.0036258697509765625,0.058013916015625,-0.013275146484375,-0.0200653076171875,-0.0186004638671875,-0.0256195068359375,0.031646728515625,-0.005092620849609375,0.0543212890625,-0.002063751220703125,0.022216796875,0.0028171539306640625,0.021759033203125,0.03009033203125,-0.02130126953125,0.045989990234375,-0.01427459716796875,0.040130615234375,-0.0165863037109375,0.017669677734375,0.0158233642578125,-0.052764892578125,-0.04229736328125,0.0031452178955078125,-0.030242919921875,-0.0018129348754882812,0.061431884765625,0.005130767822265625,-0.0169677734375,0.00617218017578125,0.057098388671875,0.05523681640625,0.0146636962890625,0.00171661376953125,-0.07098388671875,-0.0067901611328125,-0.01018524169921875,0.040130615234375,0.001132965087890625,0.00675201416015625,0.039031982421875,0.01311492919921875,-0.07037353515625,0.030548095703125,-0.0008196830749511719,0.0531005859375,0.02777099609375,0.058319091796875,-0.02716064453125,0.010955810546875,-0.00025200843811035156,-0.0018129348754882812,0.027618408203125,0.0047454833984375,-0.0209808349609375,0.050323486328125,-0.04473876953125,-0.00872039794921875,0.01080322265625,0.05987548828125,-0.045684814453125,0.0066375732421875,-0.0333251953125,0.045684814453125,0.00028324127197265625,-0.035797119140625,-0.0110321044921875,-0.01165008544921875,-0.005672454833984375,-0.0280914306640625,0.04815673828125,0.01265716552734375,0.050323486328125,0.029632568359375,-0.034881591796875,0.0111083984375,0.0018901824951171875,-0.013275146484375,0.00894927978515625,-0.0484619140625,0.022216796875,-0.035186767578125,0.056182861328125,-0.012115478515625,0.06390380859375,0.003704071044921875,0.0193634033203125,-0.01412200927734375,-0.017974853515625,0.0496826171875,0.005748748779296875,0.0311737060546875,0.05987548828125,-0.004878997802734375,0.045989990234375,-0.00335693359375,-0.045379638671875,0.00806427001953125,0.00559234619140625,0.010650634765625,-0.0888671875,-0.0254669189453125,0.00988006591796875,-0.0236053466796875,-0.04998779296875,-0.0121917724609375,0.0765380859375,-0.035186767578125,-0.00559234619140625,-0.022216796875,0.026702880859375,-0.041046142578125,0.01396942138671875,0.039031982421875,-0.0178985595703125,0.05987548828125,-0.04876708984375,0.03009033203125,0.0200653076171875,0.051239013671875,-0.0121917724609375,0.0172882080078125,0.0254669189453125,-0.00536346435546875,0.033477783203125,0.0254669189453125,-0.047210693359375,-0.0038776397705078125,0.002681732177734375,0.0020160675048828125,-0.041961669921875,-0.005634307861328125,0.01149749755859375,-0.10247802734375,0.00756072998046875,0.0231475830078125,0.0180511474609375,-0.03564453125,-0.0307159423828125,0.00339508056640625,-0.09197998046875,0.006557464599609375,0.01296234130859375,0.02484130859375,0.06915283203125,-0.0537109375,0.0161285400390625,0.029632568359375,0.0021419525146484375,-0.046600341796875,0.02130126953125,-0.004222869873046875,0.0438232421875,-0.037353515625,0.032867431640625,0.03564453125,0.017974853515625,-0.03887939453125,-0.01041412353515625,-0.0226898193359375,0.0028553009033203125,-0.0206756591796875,-0.01558685302734375,0.04290771484375,0.019439697265625,0.022216796875,-0.0199127197265625,0.0180511474609375,0.0259246826171875,0.036407470703125,0.08331298828125,0.0038585662841796875,0.062347412109375,0.024078369140625,0.01558685302734375,-0.039520263671875,0.01080322265625],"index":0}],"model":"mistral-embed","usage":{"prompt_audio_seconds":null,"prompt_tokens":6,"total_tokens":6,"completion_tokens":0,"request_count":null,"prompt_tokens_details":null,"prompt_token_details":null}}' + headers: + CF-RAY: + - 9e5cd10ffcbfd8d9-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 03:41:15 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '19813' + mistral-correlation-id: + - 019d4c47-e638-7306-8d2d-83fd8cf8d10f + set-cookie: + - __cf_bm=7EqwPrwtPFhQkuGcfLm.RfWJAYY7cQn_qpft9jmV7Ac-1775101273.5943146-1.0.1.1-O0EbKTDZk4rE1QaE26SYDH8H7AJeMjAdygU3IP4g8Cgb4qoqmOgfGD0_807.OFtkrSpxwMVSokvkBM_G9RW2ifhKNYiXMF_5LQlQUsAydGlAWktQoaYpK3ctfETR.Jr7; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 04:11:15 + GMT + - _cfuvid=lQ8Oh8SdjjGS_ixqqFymzmlYft8yy_HttxySV0XMQRU-1775101273.5943146-1.0.1.1-umEAhbUMv6uwio9nIlfjZ6FY8BRUqThQcSMc4CEETTk; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '1799' + x-kong-proxy-latency: + - '9' + x-kong-request-id: + - 019d4c47-e638-7306-8d2d-83fd8cf8d10f + x-kong-upstream-latency: + - '1801' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '20000000' + x-ratelimit-limit-tokens-month: + - '200000000000' + x-ratelimit-remaining-req-minute: + - '56' + x-ratelimit-remaining-tokens-minute: + - '19999994' + x-ratelimit-remaining-tokens-month: + - '199999999988' + x-ratelimit-tokens-query-cost: + - '6' + status: + code: 200 + message: OK +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_fim_complete_async.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_fim_complete_async.yaml new file mode 100644 index 00000000..140f5b87 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_fim_complete_async.yaml @@ -0,0 +1,84 @@ +interactions: +- request: + body: '{"model":"codestral-latest","prompt":"def subtract(a, b):\n return ","top_p":1.0,"max_tokens":16,"stream":false,"suffix":"\n\nprint(subtract(5, + 2))"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '152' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/fim/completions + response: + body: + string: '{"id":"5bf620f1202744e3b8e8eb239898cc8b","created":1775101276,"model":"codestral-latest","usage":{"prompt_tokens":23,"total_tokens":27,"completion_tokens":4,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"a + - b"}}]}' + headers: + CF-RAY: + - 9e5cd1227a2dac2e-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 03:41:16 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '343' + mistral-correlation-id: + - 019d4c47-f1d0-75a5-af4d-06959103ef56 + set-cookie: + - __cf_bm=dVTL16bCz2seB8j5ETZlNmOw3h8nfVCE_dAwLMeTrQA-1775101276.5557144-1.0.1.1-LMbd62qipvXbKAineVU9B2Q23zuB14zQZ7MLtG2aUiP4rpDvzYxuACzcCed4ZR3yCYyhIDl9aCnrP1WcvNo3pn8fHtsQ6dQryAcV.WWHiTUt77qOKLclKDN4y71FWJmG; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 04:11:16 + GMT + - _cfuvid=gCBg_Fuvsy6pu3I3XslSAZbSAwLhzp2GyPIIoTnHijI-1775101276.5557144-1.0.1.1-vzTfjWW.TqeD67n60J8PQukdeUyHE9qdttS_FFzn_yU; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '105' + x-kong-proxy-latency: + - '13' + x-kong-request-id: + - 019d4c47-f1d0-75a5-af4d-06959103ef56 + x-kong-upstream-latency: + - '106' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '50000' + x-ratelimit-limit-tokens-month: + - '4000000' + x-ratelimit-remaining-req-minute: + - '53' + x-ratelimit-remaining-tokens-minute: + - '49918' + x-ratelimit-remaining-tokens-month: + - '3999918' + x-ratelimit-tokens-query-cost: + - '27' + status: + code: 200 + message: OK +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_fim_complete_sync.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_fim_complete_sync.yaml new file mode 100644 index 00000000..47450337 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_fim_complete_sync.yaml @@ -0,0 +1,84 @@ +interactions: +- request: + body: '{"model":"codestral-latest","prompt":"def add(a, b):\n return ","top_p":1.0,"max_tokens":16,"stream":false,"suffix":"\n\nprint(add(2, + 3))"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '142' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/fim/completions + response: + body: + string: '{"id":"60b50f12cbc24eb192f62dcab03f95eb","created":1775101275,"model":"codestral-latest","usage":{"prompt_tokens":22,"total_tokens":27,"completion_tokens":5,"prompt_tokens_details":{"cached_tokens":0}},"object":"chat.completion","choices":[{"index":0,"finish_reason":"stop","message":{"role":"assistant","tool_calls":null,"content":"a + + b\n"}}]}' + headers: + CF-RAY: + - 9e5cd11e4bdaac99-YYZ + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 02 Apr 2026 03:41:16 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + content-length: + - '345' + mistral-correlation-id: + - 019d4c47-ef33-740d-b115-7229b75322a4 + set-cookie: + - __cf_bm=BiV4l059p8lV76JHmiera4znDnWeqqfgMKBwQ8B7UkM-1775101275.887504-1.0.1.1-gkvmyiac4AsQF.cGCGEUypjz75LpRg6Ba8arQZvtlaNDbzDNQ4EQowkjJAsmtQ.hlhEPoxzAlrSfe53g22JhKNxqK2JLYgYNIz3p1IJbuBIxXXrDy0zRx18uvXczzjqL; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 04:11:16 + GMT + - _cfuvid=4uO2Jh4pXZDrSB1eaY0wDQ6vKXKKBhkV0VpVYy3bvT4-1775101275.887504-1.0.1.1-cJzmw0apOlaPDMWRzaeV0Z6KebHGh64kikctyk2_Qak; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '84' + x-kong-proxy-latency: + - '11' + x-kong-request-id: + - 019d4c47-ef33-740d-b115-7229b75322a4 + x-kong-upstream-latency: + - '85' + x-ratelimit-limit-req-minute: + - '60' + x-ratelimit-limit-tokens-minute: + - '50000' + x-ratelimit-limit-tokens-month: + - '4000000' + x-ratelimit-remaining-req-minute: + - '55' + x-ratelimit-remaining-tokens-minute: + - '49973' + x-ratelimit-remaining-tokens-month: + - '3999973' + x-ratelimit-tokens-query-cost: + - '27' + status: + code: 200 + message: OK +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_fim_stream_async.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_fim_stream_async.yaml new file mode 100644 index 00000000..e530bcec --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_fim_stream_async.yaml @@ -0,0 +1,84 @@ +interactions: +- request: + body: '{"model":"codestral-latest","prompt":"def divide(a, b):\n return ","top_p":1.0,"max_tokens":16,"stream":true,"suffix":"\n\nprint(divide(8, + 2))"}' + headers: + Accept: + - text/event-stream + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '147' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/fim/completions + response: + body: + string: 'data: {"id":"8d337d2df4fe4b7686c29bb29fa9f237","object":"chat.completion.chunk","created":1775101277,"model":"codestral-latest","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]} + + + data: {"id":"8d337d2df4fe4b7686c29bb29fa9f237","object":"chat.completion.chunk","created":1775101277,"model":"codestral-latest","choices":[{"index":0,"delta":{"content":"a"},"finish_reason":null}],"p":"abcdefghijklmnopqrstuvwxyz01"} + + + data: {"id":"8d337d2df4fe4b7686c29bb29fa9f237","object":"chat.completion.chunk","created":1775101277,"model":"codestral-latest","choices":[{"index":0,"delta":{"content":" + //"},"finish_reason":null}],"p":"abcdefghijklmnopqrstuvwxyz0123456"} + + + data: {"id":"8d337d2df4fe4b7686c29bb29fa9f237","object":"chat.completion.chunk","created":1775101277,"model":"codestral-latest","choices":[{"index":0,"delta":{"content":" + b"},"finish_reason":"stop"}],"usage":{"prompt_tokens":23,"total_tokens":27,"completion_tokens":4,"prompt_tokens_details":{"cached_tokens":0}},"p":"abcdefghijklmnopqrstuvwxyz0123456789"} + + + data: [DONE] + + + ' + headers: + CF-RAY: + - 9e5cd1254fb77091-YYZ + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Thu, 02 Apr 2026 03:41:17 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + mistral-correlation-id: + - 019d4c47-f385-7a1e-8466-92cc23331038 + set-cookie: + - __cf_bm=hj0ybOEdEgNAD1ttbVO8wxegnUj_75N4M4UbNaG2BU8-1775101277.004963-1.0.1.1-wIyg6HOs_as0R50iRRH2P0Q215XxIy30DC5Jb1lWxY7_rcWaatschHLK0UpQNjEPUFFp2JlL4DlLnwHvoOJGhUohBAbJPBom51ZG3sLZVWORZgQoXzxeycBx9OPA6s3P; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 04:11:17 + GMT + - _cfuvid=NTwIqGShenboX5hlFFr6d7BF.bISXqB_wjxm0KA31zc-1775101277.004963-1.0.1.1-OOKbslRssmYH7aq.JRmOlUMSY0Kg355XzpGyksC4qvU; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '64' + x-kong-proxy-latency: + - '11' + x-kong-request-id: + - 019d4c47-f385-7a1e-8466-92cc23331038 + x-kong-upstream-latency: + - '64' + status: + code: 200 + message: OK +version: 1 diff --git a/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_fim_stream_sync.yaml b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_fim_stream_sync.yaml new file mode 100644 index 00000000..47eca5cf --- /dev/null +++ b/py/src/braintrust/integrations/mistral/cassettes/test_wrap_mistral_fim_stream_sync.yaml @@ -0,0 +1,80 @@ +interactions: +- request: + body: '{"model":"codestral-latest","prompt":"def multiply(a, b):\n return ","top_p":1.0,"max_tokens":16,"stream":true,"suffix":"\n\nprint(multiply(3, + 4))"}' + headers: + Accept: + - text/event-stream + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '151' + Host: + - api.mistral.ai + content-type: + - application/json + user-agent: + - mistral-client-python/1.12.4 + method: POST + uri: https://api.mistral.ai/v1/fim/completions + response: + body: + string: 'data: {"id":"ac6e3fca52c64bcbae627fe44b008c4b","object":"chat.completion.chunk","created":1775101276,"model":"codestral-latest","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]} + + + data: {"id":"ac6e3fca52c64bcbae627fe44b008c4b","object":"chat.completion.chunk","created":1775101276,"model":"codestral-latest","choices":[{"index":0,"delta":{"content":"a"},"finish_reason":null}],"p":"abcdefghijklmnopqrstuvwxyz0123456789"} + + + data: {"id":"ac6e3fca52c64bcbae627fe44b008c4b","object":"chat.completion.chunk","created":1775101276,"model":"codestral-latest","choices":[{"index":0,"delta":{"content":" + * b"},"finish_reason":"stop"}],"usage":{"prompt_tokens":24,"total_tokens":28,"completion_tokens":4,"prompt_tokens_details":{"cached_tokens":0}},"p":"abcdefghijklmnopqrstuvwxy"} + + + data: [DONE] + + + ' + headers: + CF-RAY: + - 9e5cd1206c93ac4e-YYZ + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Thu, 02 Apr 2026 03:41:16 GMT + Server: + - cloudflare + Strict-Transport-Security: + - max-age=15552000; includeSubDomains; preload + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + mistral-correlation-id: + - 019d4c47-f08a-7ea3-b82b-853a3bf729bc + set-cookie: + - __cf_bm=SSJXyFpCd.aLHcpHuN36xFWqdKgdYPdvHJJzx7bsQ0A-1775101276.2305315-1.0.1.1-g7VRycpM4ICSPd71eamrf3tK963O8TYF94dIKzE6TShV03ge6AVqWjxtbMFCkdZkU2IztJmWTX6pnqq8MpUpeZn7z_PM7xSJbEwyIRPqMYCpGsIBAHDmHmVtAbjdnfYA; + HttpOnly; Secure; Path=/; Domain=mistral.ai; Expires=Thu, 02 Apr 2026 04:11:16 + GMT + - _cfuvid=lJeeTTQ69mjYeNDKnk6h3Hqr6Xg2YM9YMFhuZ_seaSU-1775101276.2305315-1.0.1.1-A5rbpzUpj.sqPI_7aZkWmergzzzioU5a4pcXCQFcj0k; + HttpOnly; SameSite=None; Secure; Path=/; Domain=mistral.ai + x-envoy-upstream-service-time: + - '78' + x-kong-proxy-latency: + - '10' + x-kong-request-id: + - 019d4c47-f08a-7ea3-b82b-853a3bf729bc + x-kong-upstream-latency: + - '79' + status: + code: 200 + message: OK +version: 1 diff --git a/py/src/braintrust/integrations/mistral/integration.py b/py/src/braintrust/integrations/mistral/integration.py new file mode 100644 index 00000000..a0fac75c --- /dev/null +++ b/py/src/braintrust/integrations/mistral/integration.py @@ -0,0 +1,19 @@ +"""Mistral integration orchestration.""" + +from braintrust.integrations.base import BaseIntegration + +from .patchers import AgentsPatcher, ChatPatcher, EmbeddingsPatcher, FimPatcher + + +class MistralIntegration(BaseIntegration): + """Braintrust instrumentation for the Mistral Python SDK.""" + + name = "mistral" + import_names = ("mistralai", "mistralai.client") + min_version = "1.12.4" + patchers = ( + ChatPatcher, + EmbeddingsPatcher, + FimPatcher, + AgentsPatcher, + ) diff --git a/py/src/braintrust/integrations/mistral/patchers.py b/py/src/braintrust/integrations/mistral/patchers.py new file mode 100644 index 00000000..1133b4a4 --- /dev/null +++ b/py/src/braintrust/integrations/mistral/patchers.py @@ -0,0 +1,282 @@ +"""Mistral patchers.""" + +from braintrust.integrations.base import CompositeFunctionWrapperPatcher, FunctionWrapperPatcher + +from .tracing import ( + _agents_complete_async_wrapper, + _agents_complete_wrapper, + _agents_stream_async_wrapper, + _agents_stream_wrapper, + _chat_complete_async_wrapper, + _chat_complete_wrapper, + _chat_stream_async_wrapper, + _chat_stream_wrapper, + _embeddings_create_async_wrapper, + _embeddings_create_wrapper, + _fim_complete_async_wrapper, + _fim_complete_wrapper, + _fim_stream_async_wrapper, + _fim_stream_wrapper, +) + + +class _ChatCompleteV2Patcher(FunctionWrapperPatcher): + name = "mistral.chat.complete.v2" + target_module = "mistralai.client.chat" + target_path = "Chat.complete" + wrapper = _chat_complete_wrapper + + +class _ChatCompleteV1Patcher(FunctionWrapperPatcher): + name = "mistral.chat.complete.v1" + target_module = "mistralai.chat" + target_path = "Chat.complete" + wrapper = _chat_complete_wrapper + superseded_by = (_ChatCompleteV2Patcher,) + + +class _ChatCompleteAsyncV2Patcher(FunctionWrapperPatcher): + name = "mistral.chat.complete_async.v2" + target_module = "mistralai.client.chat" + target_path = "Chat.complete_async" + wrapper = _chat_complete_async_wrapper + + +class _ChatCompleteAsyncV1Patcher(FunctionWrapperPatcher): + name = "mistral.chat.complete_async.v1" + target_module = "mistralai.chat" + target_path = "Chat.complete_async" + wrapper = _chat_complete_async_wrapper + superseded_by = (_ChatCompleteAsyncV2Patcher,) + + +class _ChatStreamV2Patcher(FunctionWrapperPatcher): + name = "mistral.chat.stream.v2" + target_module = "mistralai.client.chat" + target_path = "Chat.stream" + wrapper = _chat_stream_wrapper + + +class _ChatStreamV1Patcher(FunctionWrapperPatcher): + name = "mistral.chat.stream.v1" + target_module = "mistralai.chat" + target_path = "Chat.stream" + wrapper = _chat_stream_wrapper + superseded_by = (_ChatStreamV2Patcher,) + + +class _ChatStreamAsyncV2Patcher(FunctionWrapperPatcher): + name = "mistral.chat.stream_async.v2" + target_module = "mistralai.client.chat" + target_path = "Chat.stream_async" + wrapper = _chat_stream_async_wrapper + + +class _ChatStreamAsyncV1Patcher(FunctionWrapperPatcher): + name = "mistral.chat.stream_async.v1" + target_module = "mistralai.chat" + target_path = "Chat.stream_async" + wrapper = _chat_stream_async_wrapper + superseded_by = (_ChatStreamAsyncV2Patcher,) + + +class ChatPatcher(CompositeFunctionWrapperPatcher): + name = "mistral.chat" + sub_patchers = ( + _ChatCompleteV2Patcher, + _ChatCompleteV1Patcher, + _ChatCompleteAsyncV2Patcher, + _ChatCompleteAsyncV1Patcher, + _ChatStreamV2Patcher, + _ChatStreamV1Patcher, + _ChatStreamAsyncV2Patcher, + _ChatStreamAsyncV1Patcher, + ) + + +class _EmbeddingsCreateV2Patcher(FunctionWrapperPatcher): + name = "mistral.embeddings.create.v2" + target_module = "mistralai.client.embeddings" + target_path = "Embeddings.create" + wrapper = _embeddings_create_wrapper + + +class _EmbeddingsCreateV1Patcher(FunctionWrapperPatcher): + name = "mistral.embeddings.create.v1" + target_module = "mistralai.embeddings" + target_path = "Embeddings.create" + wrapper = _embeddings_create_wrapper + superseded_by = (_EmbeddingsCreateV2Patcher,) + + +class _EmbeddingsCreateAsyncV2Patcher(FunctionWrapperPatcher): + name = "mistral.embeddings.create_async.v2" + target_module = "mistralai.client.embeddings" + target_path = "Embeddings.create_async" + wrapper = _embeddings_create_async_wrapper + + +class _EmbeddingsCreateAsyncV1Patcher(FunctionWrapperPatcher): + name = "mistral.embeddings.create_async.v1" + target_module = "mistralai.embeddings" + target_path = "Embeddings.create_async" + wrapper = _embeddings_create_async_wrapper + superseded_by = (_EmbeddingsCreateAsyncV2Patcher,) + + +class EmbeddingsPatcher(CompositeFunctionWrapperPatcher): + name = "mistral.embeddings" + sub_patchers = ( + _EmbeddingsCreateV2Patcher, + _EmbeddingsCreateV1Patcher, + _EmbeddingsCreateAsyncV2Patcher, + _EmbeddingsCreateAsyncV1Patcher, + ) + + +class _FimCompleteV2Patcher(FunctionWrapperPatcher): + name = "mistral.fim.complete.v2" + target_module = "mistralai.client.fim" + target_path = "Fim.complete" + wrapper = _fim_complete_wrapper + + +class _FimCompleteV1Patcher(FunctionWrapperPatcher): + name = "mistral.fim.complete.v1" + target_module = "mistralai.fim" + target_path = "Fim.complete" + wrapper = _fim_complete_wrapper + superseded_by = (_FimCompleteV2Patcher,) + + +class _FimCompleteAsyncV2Patcher(FunctionWrapperPatcher): + name = "mistral.fim.complete_async.v2" + target_module = "mistralai.client.fim" + target_path = "Fim.complete_async" + wrapper = _fim_complete_async_wrapper + + +class _FimCompleteAsyncV1Patcher(FunctionWrapperPatcher): + name = "mistral.fim.complete_async.v1" + target_module = "mistralai.fim" + target_path = "Fim.complete_async" + wrapper = _fim_complete_async_wrapper + superseded_by = (_FimCompleteAsyncV2Patcher,) + + +class _FimStreamV2Patcher(FunctionWrapperPatcher): + name = "mistral.fim.stream.v2" + target_module = "mistralai.client.fim" + target_path = "Fim.stream" + wrapper = _fim_stream_wrapper + + +class _FimStreamV1Patcher(FunctionWrapperPatcher): + name = "mistral.fim.stream.v1" + target_module = "mistralai.fim" + target_path = "Fim.stream" + wrapper = _fim_stream_wrapper + superseded_by = (_FimStreamV2Patcher,) + + +class _FimStreamAsyncV2Patcher(FunctionWrapperPatcher): + name = "mistral.fim.stream_async.v2" + target_module = "mistralai.client.fim" + target_path = "Fim.stream_async" + wrapper = _fim_stream_async_wrapper + + +class _FimStreamAsyncV1Patcher(FunctionWrapperPatcher): + name = "mistral.fim.stream_async.v1" + target_module = "mistralai.fim" + target_path = "Fim.stream_async" + wrapper = _fim_stream_async_wrapper + superseded_by = (_FimStreamAsyncV2Patcher,) + + +class FimPatcher(CompositeFunctionWrapperPatcher): + name = "mistral.fim" + sub_patchers = ( + _FimCompleteV2Patcher, + _FimCompleteV1Patcher, + _FimCompleteAsyncV2Patcher, + _FimCompleteAsyncV1Patcher, + _FimStreamV2Patcher, + _FimStreamV1Patcher, + _FimStreamAsyncV2Patcher, + _FimStreamAsyncV1Patcher, + ) + + +class _AgentsCompleteV2Patcher(FunctionWrapperPatcher): + name = "mistral.agents.complete.v2" + target_module = "mistralai.client.agents" + target_path = "Agents.complete" + wrapper = _agents_complete_wrapper + + +class _AgentsCompleteV1Patcher(FunctionWrapperPatcher): + name = "mistral.agents.complete.v1" + target_module = "mistralai.agents" + target_path = "Agents.complete" + wrapper = _agents_complete_wrapper + superseded_by = (_AgentsCompleteV2Patcher,) + + +class _AgentsCompleteAsyncV2Patcher(FunctionWrapperPatcher): + name = "mistral.agents.complete_async.v2" + target_module = "mistralai.client.agents" + target_path = "Agents.complete_async" + wrapper = _agents_complete_async_wrapper + + +class _AgentsCompleteAsyncV1Patcher(FunctionWrapperPatcher): + name = "mistral.agents.complete_async.v1" + target_module = "mistralai.agents" + target_path = "Agents.complete_async" + wrapper = _agents_complete_async_wrapper + superseded_by = (_AgentsCompleteAsyncV2Patcher,) + + +class _AgentsStreamV2Patcher(FunctionWrapperPatcher): + name = "mistral.agents.stream.v2" + target_module = "mistralai.client.agents" + target_path = "Agents.stream" + wrapper = _agents_stream_wrapper + + +class _AgentsStreamV1Patcher(FunctionWrapperPatcher): + name = "mistral.agents.stream.v1" + target_module = "mistralai.agents" + target_path = "Agents.stream" + wrapper = _agents_stream_wrapper + superseded_by = (_AgentsStreamV2Patcher,) + + +class _AgentsStreamAsyncV2Patcher(FunctionWrapperPatcher): + name = "mistral.agents.stream_async.v2" + target_module = "mistralai.client.agents" + target_path = "Agents.stream_async" + wrapper = _agents_stream_async_wrapper + + +class _AgentsStreamAsyncV1Patcher(FunctionWrapperPatcher): + name = "mistral.agents.stream_async.v1" + target_module = "mistralai.agents" + target_path = "Agents.stream_async" + wrapper = _agents_stream_async_wrapper + superseded_by = (_AgentsStreamAsyncV2Patcher,) + + +class AgentsPatcher(CompositeFunctionWrapperPatcher): + name = "mistral.agents" + sub_patchers = ( + _AgentsCompleteV2Patcher, + _AgentsCompleteV1Patcher, + _AgentsCompleteAsyncV2Patcher, + _AgentsCompleteAsyncV1Patcher, + _AgentsStreamV2Patcher, + _AgentsStreamV1Patcher, + _AgentsStreamAsyncV2Patcher, + _AgentsStreamAsyncV1Patcher, + ) diff --git a/py/src/braintrust/integrations/mistral/test_mistral.py b/py/src/braintrust/integrations/mistral/test_mistral.py new file mode 100644 index 00000000..ab1cfdab --- /dev/null +++ b/py/src/braintrust/integrations/mistral/test_mistral.py @@ -0,0 +1,670 @@ +import importlib +import inspect +import os +import time +from contextlib import contextmanager +from pathlib import Path + +import pytest +from braintrust import logger +from braintrust.integrations.mistral import MistralIntegration, wrap_mistral +from braintrust.integrations.mistral.tracing import ( + _aggregate_completion_events, + _chat_complete_async_wrapper, + _chat_complete_wrapper, +) +from braintrust.test_helpers import init_test_logger +from braintrust.wrappers.test_utils import assert_metrics_are_valid, verify_autoinstrument_script + + +pytest.importorskip("mistralai") + +try: + from mistralai.client import Mistral +except ImportError: + from mistralai import Mistral + +try: + Chat = importlib.import_module("mistralai.client.chat").Chat + Embeddings = importlib.import_module("mistralai.client.embeddings").Embeddings + Fim = importlib.import_module("mistralai.client.fim").Fim + Agents = importlib.import_module("mistralai.client.agents").Agents + models = importlib.import_module("mistralai.client.models") +except ImportError: + Chat = importlib.import_module("mistralai.chat").Chat + Embeddings = importlib.import_module("mistralai.embeddings").Embeddings + Fim = importlib.import_module("mistralai.fim").Fim + Agents = importlib.import_module("mistralai.agents").Agents + models = importlib.import_module("mistralai.models") + + +PROJECT_NAME = "test-mistral-sdk" +CHAT_MODEL = "mistral-small-latest" +AGENT_MODEL = CHAT_MODEL +EMBEDDING_MODEL = "mistral-embed" +FIM_MODEL = "codestral-latest" + + +@pytest.fixture(scope="module") +def vcr_cassette_dir(): + return str(Path(__file__).resolve().parent / "cassettes") + + +@pytest.fixture +def memory_logger(): + init_test_logger(PROJECT_NAME) + with logger._internal_with_memory_background_logger() as bgl: + yield bgl + + +def _get_client(): + return Mistral(api_key=os.environ.get("MISTRAL_API_KEY")) + + +@contextmanager +def _temporary_agent(client): + manager = getattr(getattr(client, "beta", None), "agents", None) + assert manager is not None, "Mistral beta.agents is required for agent tests" + + agent = manager.create( + model=AGENT_MODEL, + name=f"braintrust-test-agent-{int(time.time() * 1000)}", + instructions="You are concise. Keep responses under five words.", + ) + agent_id = getattr(agent, "id", None) or getattr(agent, "agent_id", None) + assert agent_id, "Expected created agent to include an id" + + try: + yield agent_id + finally: + manager.delete(agent_id=agent_id) + + +@pytest.mark.vcr +def test_wrap_mistral_chat_complete_sync(memory_logger): + assert not memory_logger.pop() + + client = wrap_mistral(_get_client()) + start = time.time() + response = client.chat.complete( + model=CHAT_MODEL, + messages=[{"role": "user", "content": "What is 2+2? Reply with just the number."}], + max_tokens=10, + ) + end = time.time() + + assert "4" in str(response.choices[0].message.content) + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["input"] == [{"role": "user", "content": "What is 2+2? Reply with just the number."}] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["model"] == CHAT_MODEL + assert "4" in str(span["output"]) + assert_metrics_are_valid(span["metrics"], start, end) + + +@pytest.mark.vcr +def test_wrap_mistral_chat_stream_sync(memory_logger): + assert not memory_logger.pop() + + client = wrap_mistral(_get_client()) + start = time.time() + with client.chat.stream( + model=CHAT_MODEL, + messages=[{"role": "user", "content": "What is 5+5? Reply with just the number."}], + max_tokens=10, + ) as stream: + chunks = list(stream) + end = time.time() + + assert chunks + streamed_text = "".join( + choice.delta.content or "" + for chunk in chunks + for choice in (chunk.data.choices or []) + if getattr(choice, "delta", None) is not None and isinstance(choice.delta.content, str) + ) + assert "10" in streamed_text + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["model"] == CHAT_MODEL + assert span["metadata"]["stream"] == True + assert span["metrics"]["time_to_first_token"] >= 0 + assert "10" in str(span["output"]) + assert_metrics_are_valid(span["metrics"], start, end) + + +@pytest.mark.vcr +@pytest.mark.asyncio +async def test_wrap_mistral_chat_complete_async(memory_logger): + assert not memory_logger.pop() + + client = wrap_mistral(_get_client()) + start = time.time() + response = await client.chat.complete_async( + model=CHAT_MODEL, + messages=[{"role": "user", "content": "What is 3+3? Reply with just the number."}], + max_tokens=10, + ) + end = time.time() + + assert "6" in str(response.choices[0].message.content) + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["model"] == CHAT_MODEL + assert "6" in str(span["output"]) + assert_metrics_are_valid(span["metrics"], start, end) + + +@pytest.mark.vcr +def test_wrap_mistral_agents_complete_sync(memory_logger): + assert not memory_logger.pop() + + client = wrap_mistral(_get_client()) + with _temporary_agent(client) as agent_id: + start = time.time() + response = client.agents.complete( + agent_id=agent_id, + messages=[{"role": "user", "content": "What is 8+1? Reply with just the number."}], + max_tokens=10, + ) + end = time.time() + + assert "9" in str(response.choices[0].message.content) + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["input"] == [{"role": "user", "content": "What is 8+1? Reply with just the number."}] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["agent_id"] == agent_id + assert "9" in str(span["output"]) + assert_metrics_are_valid(span["metrics"], start, end) + + +@pytest.mark.vcr +def test_wrap_mistral_agents_stream_sync(memory_logger): + assert not memory_logger.pop() + + client = wrap_mistral(_get_client()) + with _temporary_agent(client) as agent_id: + start = time.time() + with client.agents.stream( + agent_id=agent_id, + messages=[{"role": "user", "content": "What is 6+5? Reply with just the number."}], + max_tokens=10, + ) as stream: + chunks = list(stream) + end = time.time() + + assert chunks + streamed_text = "".join( + choice.delta.content or "" + for chunk in chunks + for choice in (chunk.data.choices or []) + if getattr(choice, "delta", None) is not None and isinstance(choice.delta.content, str) + ) + assert "11" in streamed_text + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["agent_id"] == agent_id + assert span["metadata"]["stream"] == True + assert span["metrics"]["time_to_first_token"] >= 0 + assert "11" in str(span["output"]) + assert_metrics_are_valid(span["metrics"], start, end) + + +@pytest.mark.vcr +@pytest.mark.asyncio +async def test_wrap_mistral_agents_complete_async(memory_logger): + assert not memory_logger.pop() + + client = wrap_mistral(_get_client()) + with _temporary_agent(client) as agent_id: + start = time.time() + response = await client.agents.complete_async( + agent_id=agent_id, + messages=[{"role": "user", "content": "What is 7+2? Reply with just the number."}], + max_tokens=10, + ) + end = time.time() + + assert "9" in str(response.choices[0].message.content) + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["agent_id"] == agent_id + assert "9" in str(span["output"]) + assert_metrics_are_valid(span["metrics"], start, end) + + +@pytest.mark.vcr +@pytest.mark.asyncio +async def test_wrap_mistral_agents_stream_async(memory_logger): + assert not memory_logger.pop() + + client = wrap_mistral(_get_client()) + with _temporary_agent(client) as agent_id: + start = time.time() + stream = await client.agents.stream_async( + agent_id=agent_id, + messages=[{"role": "user", "content": "What is 4+8? Reply with just the number."}], + max_tokens=10, + ) + chunks = [] + async for chunk in stream: + chunks.append(chunk) + end = time.time() + + assert chunks + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["agent_id"] == agent_id + assert span["metadata"]["stream"] == True + assert span["metrics"]["time_to_first_token"] >= 0 + assert span["output"] + assert_metrics_are_valid(span["metrics"], start, end) + + +@pytest.mark.vcr +def test_wrap_mistral_embeddings_create(memory_logger): + assert not memory_logger.pop() + + client = wrap_mistral(_get_client()) + start = time.time() + response = client.embeddings.create( + model=EMBEDDING_MODEL, + inputs="braintrust tracing", + ) + end = time.time() + + assert response.data + assert response.data[0].embedding + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["input"] == "braintrust tracing" + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["model"] == EMBEDDING_MODEL + assert span["output"]["embeddings_count"] == 1 + assert span["output"]["embedding_length"] == len(response.data[0].embedding) + assert span["metrics"]["prompt_tokens"] > 0 + assert span["metrics"]["tokens"] > 0 + assert start <= span["metrics"]["start"] <= span["metrics"]["end"] <= end + + +@pytest.mark.vcr +def test_wrap_mistral_fim_complete_sync(memory_logger): + assert not memory_logger.pop() + + client = wrap_mistral(_get_client()) + start = time.time() + response = client.fim.complete( + model=FIM_MODEL, + prompt="def add(a, b):\n return ", + suffix="\n\nprint(add(2, 3))", + max_tokens=16, + ) + end = time.time() + + assert response.choices + assert response.choices[0].message.content + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["input"]["prompt"] == "def add(a, b):\n return " + assert span["input"]["suffix"] == "\n\nprint(add(2, 3))" + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["model"] == FIM_MODEL + assert "return" in str(span["input"]) + assert span["output"] + assert_metrics_are_valid(span["metrics"], start, end) + + +@pytest.mark.vcr +def test_wrap_mistral_fim_stream_sync(memory_logger): + assert not memory_logger.pop() + + client = wrap_mistral(_get_client()) + start = time.time() + with client.fim.stream( + model=FIM_MODEL, + prompt="def multiply(a, b):\n return ", + suffix="\n\nprint(multiply(3, 4))", + max_tokens=16, + ) as stream: + chunks = list(stream) + end = time.time() + + assert chunks + streamed_text = "".join( + choice.delta.content or "" + for chunk in chunks + for choice in (chunk.data.choices or []) + if getattr(choice, "delta", None) is not None and isinstance(choice.delta.content, str) + ) + assert streamed_text + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["model"] == FIM_MODEL + assert span["metadata"]["stream"] == True + assert span["metrics"]["time_to_first_token"] >= 0 + assert span["output"] + assert_metrics_are_valid(span["metrics"], start, end) + + +@pytest.mark.vcr +@pytest.mark.asyncio +async def test_wrap_mistral_fim_complete_async(memory_logger): + assert not memory_logger.pop() + + client = wrap_mistral(_get_client()) + start = time.time() + response = await client.fim.complete_async( + model=FIM_MODEL, + prompt="def subtract(a, b):\n return ", + suffix="\n\nprint(subtract(5, 2))", + max_tokens=16, + ) + end = time.time() + + assert response.choices + assert response.choices[0].message.content + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["model"] == FIM_MODEL + assert span["output"] + assert_metrics_are_valid(span["metrics"], start, end) + + +@pytest.mark.vcr +@pytest.mark.asyncio +async def test_wrap_mistral_fim_stream_async(memory_logger): + assert not memory_logger.pop() + + client = wrap_mistral(_get_client()) + start = time.time() + stream = await client.fim.stream_async( + model=FIM_MODEL, + prompt="def divide(a, b):\n return ", + suffix="\n\nprint(divide(8, 2))", + max_tokens=16, + ) + chunks = [] + async for chunk in stream: + chunks.append(chunk) + end = time.time() + + assert chunks + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["model"] == FIM_MODEL + assert span["metadata"]["stream"] == True + assert span["metrics"]["time_to_first_token"] >= 0 + assert span["output"] + assert_metrics_are_valid(span["metrics"], start, end) + + +@pytest.mark.vcr +def test_mistral_integration_setup_creates_spans(memory_logger, monkeypatch): + assert not memory_logger.pop() + + original_complete = inspect.getattr_static(Chat, "complete") + original_complete_async = inspect.getattr_static(Chat, "complete_async") + original_stream = inspect.getattr_static(Chat, "stream") + original_stream_async = inspect.getattr_static(Chat, "stream_async") + original_embeddings_create = inspect.getattr_static(Embeddings, "create") + original_embeddings_create_async = inspect.getattr_static(Embeddings, "create_async") + original_fim_complete = inspect.getattr_static(Fim, "complete") + original_fim_complete_async = inspect.getattr_static(Fim, "complete_async") + original_fim_stream = inspect.getattr_static(Fim, "stream") + original_fim_stream_async = inspect.getattr_static(Fim, "stream_async") + original_agents_complete = inspect.getattr_static(Agents, "complete") + original_agents_complete_async = inspect.getattr_static(Agents, "complete_async") + original_agents_stream = inspect.getattr_static(Agents, "stream") + original_agents_stream_async = inspect.getattr_static(Agents, "stream_async") + + assert MistralIntegration.setup() + client = _get_client() + start = time.time() + response = client.chat.complete( + model=CHAT_MODEL, + messages=[{"role": "user", "content": "What is 2+2? Reply with just the number."}], + max_tokens=10, + ) + end = time.time() + + monkeypatch.setattr(Chat, "complete", original_complete) + monkeypatch.setattr(Chat, "complete_async", original_complete_async) + monkeypatch.setattr(Chat, "stream", original_stream) + monkeypatch.setattr(Chat, "stream_async", original_stream_async) + monkeypatch.setattr(Embeddings, "create", original_embeddings_create) + monkeypatch.setattr(Embeddings, "create_async", original_embeddings_create_async) + monkeypatch.setattr(Fim, "complete", original_fim_complete) + monkeypatch.setattr(Fim, "complete_async", original_fim_complete_async) + monkeypatch.setattr(Fim, "stream", original_fim_stream) + monkeypatch.setattr(Fim, "stream_async", original_fim_stream_async) + monkeypatch.setattr(Agents, "complete", original_agents_complete) + monkeypatch.setattr(Agents, "complete_async", original_agents_complete_async) + monkeypatch.setattr(Agents, "stream", original_agents_stream) + monkeypatch.setattr(Agents, "stream_async", original_agents_stream_async) + + assert "4" in str(response.choices[0].message.content) + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["model"] == CHAT_MODEL + assert "4" in str(span["output"]) + assert_metrics_are_valid(span["metrics"], start, end) + + +def test_mistral_integration_setup_is_idempotent(monkeypatch): + first_complete = inspect.getattr_static(Chat, "complete") + first_complete_async = inspect.getattr_static(Chat, "complete_async") + first_stream = inspect.getattr_static(Chat, "stream") + first_stream_async = inspect.getattr_static(Chat, "stream_async") + first_embeddings_create = inspect.getattr_static(Embeddings, "create") + first_embeddings_create_async = inspect.getattr_static(Embeddings, "create_async") + first_fim_complete = inspect.getattr_static(Fim, "complete") + first_fim_complete_async = inspect.getattr_static(Fim, "complete_async") + first_fim_stream = inspect.getattr_static(Fim, "stream") + first_fim_stream_async = inspect.getattr_static(Fim, "stream_async") + first_agents_complete = inspect.getattr_static(Agents, "complete") + first_agents_complete_async = inspect.getattr_static(Agents, "complete_async") + first_agents_stream = inspect.getattr_static(Agents, "stream") + first_agents_stream_async = inspect.getattr_static(Agents, "stream_async") + + assert MistralIntegration.setup() + patched_complete = inspect.getattr_static(Chat, "complete") + patched_complete_async = inspect.getattr_static(Chat, "complete_async") + patched_stream = inspect.getattr_static(Chat, "stream") + patched_stream_async = inspect.getattr_static(Chat, "stream_async") + patched_embeddings_create = inspect.getattr_static(Embeddings, "create") + patched_embeddings_create_async = inspect.getattr_static(Embeddings, "create_async") + patched_fim_complete = inspect.getattr_static(Fim, "complete") + patched_fim_complete_async = inspect.getattr_static(Fim, "complete_async") + patched_fim_stream = inspect.getattr_static(Fim, "stream") + patched_fim_stream_async = inspect.getattr_static(Fim, "stream_async") + patched_agents_complete = inspect.getattr_static(Agents, "complete") + patched_agents_complete_async = inspect.getattr_static(Agents, "complete_async") + patched_agents_stream = inspect.getattr_static(Agents, "stream") + patched_agents_stream_async = inspect.getattr_static(Agents, "stream_async") + + assert MistralIntegration.setup() + assert inspect.getattr_static(Chat, "complete") is patched_complete + assert inspect.getattr_static(Chat, "complete_async") is patched_complete_async + assert inspect.getattr_static(Chat, "stream") is patched_stream + assert inspect.getattr_static(Chat, "stream_async") is patched_stream_async + assert inspect.getattr_static(Embeddings, "create") is patched_embeddings_create + assert inspect.getattr_static(Embeddings, "create_async") is patched_embeddings_create_async + assert inspect.getattr_static(Fim, "complete") is patched_fim_complete + assert inspect.getattr_static(Fim, "complete_async") is patched_fim_complete_async + assert inspect.getattr_static(Fim, "stream") is patched_fim_stream + assert inspect.getattr_static(Fim, "stream_async") is patched_fim_stream_async + assert inspect.getattr_static(Agents, "complete") is patched_agents_complete + assert inspect.getattr_static(Agents, "complete_async") is patched_agents_complete_async + assert inspect.getattr_static(Agents, "stream") is patched_agents_stream + assert inspect.getattr_static(Agents, "stream_async") is patched_agents_stream_async + + monkeypatch.setattr(Chat, "complete", first_complete) + monkeypatch.setattr(Chat, "complete_async", first_complete_async) + monkeypatch.setattr(Chat, "stream", first_stream) + monkeypatch.setattr(Chat, "stream_async", first_stream_async) + monkeypatch.setattr(Embeddings, "create", first_embeddings_create) + monkeypatch.setattr(Embeddings, "create_async", first_embeddings_create_async) + monkeypatch.setattr(Fim, "complete", first_fim_complete) + monkeypatch.setattr(Fim, "complete_async", first_fim_complete_async) + monkeypatch.setattr(Fim, "stream", first_fim_stream) + monkeypatch.setattr(Fim, "stream_async", first_fim_stream_async) + monkeypatch.setattr(Agents, "complete", first_agents_complete) + monkeypatch.setattr(Agents, "complete_async", first_agents_complete_async) + monkeypatch.setattr(Agents, "stream", first_agents_stream) + monkeypatch.setattr(Agents, "stream_async", first_agents_stream_async) + + +def test_chat_complete_wrapper_logs_errors(memory_logger): + assert not memory_logger.pop() + + def fail(*args, **kwargs): + raise RuntimeError("sync boom") + + with pytest.raises(RuntimeError, match="sync boom"): + _chat_complete_wrapper( + fail, + None, + (), + { + "model": CHAT_MODEL, + "messages": [{"role": "user", "content": "hello"}], + }, + ) + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["input"] == [{"role": "user", "content": "hello"}] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["model"] == CHAT_MODEL + assert "sync boom" in span["error"] + + +@pytest.mark.asyncio +async def test_chat_complete_async_wrapper_logs_errors(memory_logger): + assert not memory_logger.pop() + + async def fail(*args, **kwargs): + raise RuntimeError("async boom") + + with pytest.raises(RuntimeError, match="async boom"): + await _chat_complete_async_wrapper( + fail, + None, + (), + { + "model": CHAT_MODEL, + "messages": [{"role": "user", "content": "hello"}], + }, + ) + + spans = memory_logger.pop() + assert len(spans) == 1 + span = spans[0] + assert span["input"] == [{"role": "user", "content": "hello"}] + assert span["metadata"]["provider"] == "mistral" + assert span["metadata"]["model"] == CHAT_MODEL + assert "async boom" in span["error"] + + +def test_aggregate_completion_events_merges_tool_calls_and_content(): + events = [ + models.CompletionEvent( + data={ + "id": "cmpl_123", + "model": CHAT_MODEL, + "choices": [ + { + "index": 0, + "delta": { + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_1", + "type": "function", + "function": {"name": "lookup_", "arguments": '{"city":"San'}, + } + ], + }, + "finish_reason": None, + } + ], + } + ), + models.CompletionEvent( + data={ + "id": "cmpl_123", + "model": CHAT_MODEL, + "choices": [ + { + "index": 0, + "delta": { + "tool_calls": [ + { + "index": 0, + "type": "function", + "function": {"name": "weather", "arguments": ' Francisco"}'}, + } + ], + }, + "finish_reason": "tool_calls", + } + ], + "usage": {"prompt_tokens": 10, "completion_tokens": 4, "total_tokens": 14}, + } + ), + ] + + aggregated = _aggregate_completion_events(events) + + assert aggregated["id"] == "cmpl_123" + assert aggregated["model"] == CHAT_MODEL + assert aggregated["usage"]["total_tokens"] == 14 + assert aggregated["choices"][0]["finish_reason"] == "tool_calls" + tool_call = aggregated["choices"][0]["message"]["tool_calls"][0] + assert tool_call["id"] == "call_1" + assert tool_call["function"]["name"] == "lookup_weather" + assert tool_call["function"]["arguments"] == '{"city":"San Francisco"}' + + +class TestAutoInstrumentMistral: + def test_auto_instrument_mistral(self): + verify_autoinstrument_script("test_auto_mistral.py") diff --git a/py/src/braintrust/integrations/mistral/tracing.py b/py/src/braintrust/integrations/mistral/tracing.py new file mode 100644 index 00000000..08d8640f --- /dev/null +++ b/py/src/braintrust/integrations/mistral/tracing.py @@ -0,0 +1,819 @@ +"""Mistral-specific tracing helpers.""" + +import base64 +import binascii +import logging +import re +import time +from collections.abc import AsyncIterator, Iterator +from numbers import Real +from typing import Any + +from braintrust.bt_json import bt_safe_deep_copy +from braintrust.logger import Attachment, start_span +from braintrust.span_types import SpanTypeAttribute + + +logger = logging.getLogger(__name__) + +_DATA_URL_RE = re.compile(r"^data:([^;]+);base64,(.+)$") +_BASE64_RE = re.compile(r"^[A-Za-z0-9+/]+={0,2}$") +_TOKEN_NAME_MAP = { + "total_tokens": "tokens", +} +_CHAT_METADATA_KEYS = ( + "model", + "temperature", + "top_p", + "max_tokens", + "stop", + "random_seed", + "response_format", + "tools", + "tool_choice", + "presence_penalty", + "frequency_penalty", + "n", + "prediction", + "parallel_tool_calls", + "reasoning_effort", + "prompt_mode", + "guardrails", + "safe_prompt", +) +_AGENTS_METADATA_KEYS = ( + "agent_id", + "max_tokens", + "stop", + "random_seed", + "response_format", + "tools", + "tool_choice", + "presence_penalty", + "frequency_penalty", + "n", + "prediction", + "parallel_tool_calls", + "prompt_mode", +) +_EMBEDDINGS_METADATA_KEYS = ( + "model", + "output_dimension", + "output_dtype", + "encoding_format", +) +_FIM_METADATA_KEYS = ( + "model", + "temperature", + "top_p", + "max_tokens", + "stop", + "random_seed", + "min_tokens", +) + + +def _camel_to_snake(value: str) -> str: + out = [] + for char in value: + if char.isupper(): + out.append("_") + out.append(char.lower()) + else: + out.append(char) + return "".join(out).lstrip("_") + + +def _is_unset(value: Any) -> bool: + return value.__class__.__name__ == "Unset" + + +def _is_supported_metric_value(value: Any) -> bool: + return isinstance(value, Real) and not isinstance(value, bool) + + +def _convert_data_url_to_attachment(data_url: str, filename: str | None = None) -> Attachment | str: + match = _DATA_URL_RE.match(data_url) + if not match: + return data_url + + mime_type, base64_data = match.groups() + try: + binary_data = base64.b64decode(base64_data, validate=True) + except (binascii.Error, ValueError): + return data_url + + if filename is None: + extension = mime_type.split("/")[1] if "/" in mime_type else "bin" + prefix = "image" if mime_type.startswith("image/") else "file" + filename = f"{prefix}.{extension}" + + return Attachment(data=binary_data, filename=filename, content_type=mime_type) + + +def _convert_input_audio_to_attachment(value: str) -> Attachment | str: + normalized = value.strip().replace("\n", "") + if len(normalized) < 64 or len(normalized) % 4 != 0 or not _BASE64_RE.fullmatch(normalized): + return value + + try: + binary_data = base64.b64decode(normalized, validate=True) + except (binascii.Error, ValueError): + return value + + return Attachment(data=binary_data, filename="input_audio.bin", content_type="application/octet-stream") + + +def _normalize_special_payloads(value: Any) -> Any: + if not isinstance(value, dict): + return value + + item_type = value.get("type") + if item_type == "image_url": + image_url = value.get("image_url") + if isinstance(image_url, str): + return { + **value, + "image_url": _convert_data_url_to_attachment(image_url), + } + if isinstance(image_url, dict) and isinstance(image_url.get("url"), str): + return { + **value, + "image_url": { + **image_url, + "url": _convert_data_url_to_attachment(image_url["url"]), + }, + } + + if item_type == "input_audio" and isinstance(value.get("input_audio"), str): + return { + **value, + "input_audio": _convert_input_audio_to_attachment(value["input_audio"]), + } + + return value + + +def sanitize_mistral_logged_value(value: Any) -> Any: + if _is_unset(value): + return None + + if hasattr(value, "model_dump"): + try: + value = value.model_dump(mode="json", by_alias=True) + except TypeError: + value = value.model_dump() + + safe = bt_safe_deep_copy(value) + safe = _normalize_special_payloads(safe) + + if callable(safe): + return "[Function]" + if isinstance(safe, list): + return [sanitize_mistral_logged_value(item) for item in safe] + if isinstance(safe, tuple): + return [sanitize_mistral_logged_value(item) for item in safe] + if isinstance(safe, dict): + sanitized = {} + for key, entry in safe.items(): + if _is_unset(entry): + continue + sanitized[key] = sanitize_mistral_logged_value(entry) + return sanitized + return safe + + +def _build_request_metadata( + kwargs: dict[str, Any], keys: tuple[str, ...], *, stream: bool | None = None +) -> dict[str, Any]: + metadata = {"provider": "mistral"} + + for key in keys: + value = kwargs.get(key) + if value is None or _is_unset(value): + continue + metadata[key] = sanitize_mistral_logged_value(value) + + request_metadata = kwargs.get("metadata") + if request_metadata is not None and not _is_unset(request_metadata): + metadata["request_metadata"] = sanitize_mistral_logged_value(request_metadata) + + if stream is not None: + metadata["stream"] = stream + + return metadata + + +def _build_chat_metadata(kwargs: dict[str, Any], *, stream: bool | None = None) -> dict[str, Any]: + return _build_request_metadata(kwargs, _CHAT_METADATA_KEYS, stream=stream) + + +def _build_agents_metadata(kwargs: dict[str, Any], *, stream: bool | None = None) -> dict[str, Any]: + return _build_request_metadata(kwargs, _AGENTS_METADATA_KEYS, stream=stream) + + +def _build_embeddings_metadata(kwargs: dict[str, Any]) -> dict[str, Any]: + return _build_request_metadata(kwargs, _EMBEDDINGS_METADATA_KEYS) + + +def _build_fim_metadata(kwargs: dict[str, Any], *, stream: bool | None = None) -> dict[str, Any]: + return _build_request_metadata(kwargs, _FIM_METADATA_KEYS, stream=stream) + + +def _fim_input(kwargs: dict[str, Any]) -> dict[str, Any]: + span_input = {"prompt": kwargs.get("prompt")} + suffix = kwargs.get("suffix") + if suffix is not None and not _is_unset(suffix): + span_input["suffix"] = suffix + return span_input + + +def _start_span(name: str, span_input: Any, metadata: dict[str, Any]): + return start_span( + name=name, + type=SpanTypeAttribute.LLM, + input=sanitize_mistral_logged_value(span_input), + metadata=metadata, + ) + + +def _timing_metrics(start_time: float, first_token_time: float | None = None) -> dict[str, float]: + end_time = time.time() + metrics = { + "start": start_time, + "end": end_time, + "duration": end_time - start_time, + } + if first_token_time is not None: + metrics["time_to_first_token"] = first_token_time - start_time + return metrics + + +def _parse_usage_metrics(usage: Any) -> dict[str, float]: + usage_data = sanitize_mistral_logged_value(usage) + if not isinstance(usage_data, dict): + return {} + + metrics = {} + for key, value in usage_data.items(): + if not _is_supported_metric_value(value): + continue + metrics[_TOKEN_NAME_MAP.get(key, _camel_to_snake(key))] = float(value) + + if "tokens" not in metrics and "prompt_tokens" in metrics and "completion_tokens" in metrics: + metrics["tokens"] = metrics["prompt_tokens"] + metrics["completion_tokens"] + + return metrics + + +def _merge_metrics(start_time: float, usage: Any, first_token_time: float | None = None) -> dict[str, float]: + return { + **_timing_metrics(start_time, first_token_time), + **_parse_usage_metrics(usage), + } + + +def _response_to_metadata(response: Any) -> dict[str, Any]: + data = sanitize_mistral_logged_value(response) + if not isinstance(data, dict): + return {} + + metadata = {} + for key in ("id", "model", "object", "created"): + value = data.get(key) + if value is not None: + metadata[key] = value + return metadata + + +def _completion_response_to_output(response: Any) -> Any: + data = sanitize_mistral_logged_value(response) + if isinstance(data, dict): + return data.get("choices") + return None + + +def _embeddings_output(response: Any) -> dict[str, Any]: + items = getattr(response, "data", None) or [] + first = items[0] if items else None + embedding = getattr(first, "embedding", None) if first is not None else None + + output = { + "embeddings_count": len(items), + "embedding_length": len(embedding) if isinstance(embedding, list) else None, + } + if first is not None and getattr(first, "index", None) is not None: + output["first_index"] = first.index + return output + + +def _log_and_end( + span: Any, + *, + output: Any = None, + metrics: dict[str, Any] | None = None, + metadata: dict[str, Any] | None = None, +): + event = {} + if output is not None: + event["output"] = output + if metrics: + event["metrics"] = metrics + if metadata: + event["metadata"] = metadata + if event: + span.log(**event) + span.end() + + +def _log_error_and_end(span: Any, error: Exception): + span.log(error=error) + span.end() + + +def _call_with_error_logging(span: Any, wrapped: Any, args: tuple[Any, ...], kwargs: dict[str, Any]) -> Any: + try: + return wrapped(*args, **kwargs) + except Exception as error: + _log_error_and_end(span, error) + raise + + +async def _call_async_with_error_logging( + span: Any, wrapped: Any, args: tuple[Any, ...], kwargs: dict[str, Any] +) -> Any: + try: + return await wrapped(*args, **kwargs) + except Exception as error: + _log_error_and_end(span, error) + raise + + +def _append_delta_content(message: dict[str, Any], delta_content: Any) -> None: + if delta_content is None: + return + + content = sanitize_mistral_logged_value(delta_content) + existing = message.get("content") + + if isinstance(content, str): + if isinstance(existing, str): + message["content"] = existing + content + elif isinstance(existing, list): + existing.append({"type": "text", "text": content}) + elif existing is None: + message["content"] = content + else: + message["content"] = sanitize_mistral_logged_value(existing) + return + + if isinstance(content, list): + if isinstance(existing, list): + existing.extend(content) + elif isinstance(existing, str) and existing: + message["content"] = [{"type": "text", "text": existing}, *content] + else: + message["content"] = content + + +def _merge_tool_calls(message: dict[str, Any], tool_calls: Any) -> None: + if not isinstance(tool_calls, list): + return + + accumulated = message.setdefault("tool_calls", []) + for tool_call in tool_calls: + call = sanitize_mistral_logged_value(tool_call) + if not isinstance(call, dict): + continue + + index = call.get("index") + if not isinstance(index, int) or index < 0: + index = len(accumulated) + + while len(accumulated) <= index: + accumulated.append({"id": None, "type": None, "function": {"name": "", "arguments": ""}}) + + target = accumulated[index] + if call.get("id") not in (None, "null"): + target["id"] = call["id"] + if call.get("type") is not None: + target["type"] = call["type"] + + function = call.get("function") + if not isinstance(function, dict): + continue + + target_function = target.setdefault("function", {"name": "", "arguments": ""}) + name = function.get("name") + if isinstance(name, str) and name: + target_function["name"] = f"{target_function.get('name', '')}{name}" + + arguments = function.get("arguments") + if isinstance(arguments, str) and arguments: + existing_arguments = target_function.get("arguments", "") + if isinstance(existing_arguments, str): + target_function["arguments"] = f"{existing_arguments}{arguments}" + else: + target_function["arguments"] = arguments + elif isinstance(arguments, dict): + target_function["arguments"] = { + **(target_function.get("arguments") if isinstance(target_function.get("arguments"), dict) else {}), + **arguments, + } + + +def _chunk_has_output(item: Any) -> bool: + data = getattr(item, "data", item) + choices = getattr(data, "choices", None) or [] + for choice in choices: + delta = getattr(choice, "delta", None) + if delta is None: + continue + content = getattr(delta, "content", None) + tool_calls = getattr(delta, "tool_calls", None) + if isinstance(content, str) and content: + return True + if isinstance(content, list) and content: + return True + if isinstance(tool_calls, list) and tool_calls: + return True + return False + + +def _aggregate_completion_events(items: list[Any]) -> dict[str, Any]: + response_id = None + model = None + object_type = None + created = None + usage = None + choices: dict[int, dict[str, Any]] = {} + + for item in items: + data = getattr(item, "data", item) + response_id = response_id or getattr(data, "id", None) + model = model or getattr(data, "model", None) + object_type = object_type or getattr(data, "object", None) + created = created or getattr(data, "created", None) + usage = getattr(data, "usage", None) or usage + + for choice in getattr(data, "choices", None) or []: + index = getattr(choice, "index", 0) + if not isinstance(index, int): + index = 0 + accumulated = choices.setdefault( + index, + { + "index": index, + "message": {"role": "assistant", "content": ""}, + "finish_reason": None, + }, + ) + message = accumulated["message"] + delta = getattr(choice, "delta", None) + if delta is None: + continue + + role = getattr(delta, "role", None) + if isinstance(role, str) and role: + message["role"] = role + + _append_delta_content(message, getattr(delta, "content", None)) + _merge_tool_calls(message, getattr(delta, "tool_calls", None)) + + finish_reason = getattr(choice, "finish_reason", None) + if isinstance(finish_reason, str) and finish_reason: + accumulated["finish_reason"] = finish_reason + + result: dict[str, Any] = { + "choices": [choices[idx] for idx in sorted(choices)], + } + if response_id is not None: + result["id"] = response_id + if model is not None: + result["model"] = model + if object_type is not None: + result["object"] = object_type + if created is not None: + result["created"] = created + if usage is not None: + result["usage"] = sanitize_mistral_logged_value(usage) + return result + + +def _finalize_completion_response(span: Any, request_metadata: dict[str, Any], response: Any, start_time: float): + response_metadata = _response_to_metadata(response) + _log_and_end( + span, + output=_completion_response_to_output(response), + metrics=_merge_metrics(start_time, getattr(response, "usage", None)), + metadata={**request_metadata, **response_metadata}, + ) + + +def _finalize_embeddings_response(span: Any, request_metadata: dict[str, Any], response: Any, start_time: float): + response_metadata = _response_to_metadata(response) + _log_and_end( + span, + output=_embeddings_output(response), + metrics=_merge_metrics(start_time, getattr(response, "usage", None)), + metadata={**request_metadata, **response_metadata}, + ) + + +class _TracedMistralSyncStream: + def __init__(self, stream: Any, span: Any, metadata: dict[str, Any], start_time: float): + self._stream = stream + self._span = span + self._metadata = metadata + self._start_time = start_time + self._first_token_time = None + self._items = [] + self._closed = False + + def __getattr__(self, name: str) -> Any: + return getattr(self._stream, name) + + def __iter__(self) -> Iterator[Any]: + return self + + def __next__(self) -> Any: + try: + item = next(self._stream) + except StopIteration: + self._finalize() + raise + except Exception as error: + self._finalize(error=error) + raise + + if self._first_token_time is None and _chunk_has_output(item): + self._first_token_time = time.time() + self._items.append(item) + return item + + def __enter__(self): + if hasattr(self._stream, "__enter__"): + self._stream.__enter__() + return self + + def __exit__(self, exc_type, exc_value, traceback): + try: + if hasattr(self._stream, "__exit__"): + return self._stream.__exit__(exc_type, exc_value, traceback) + return False + finally: + self._finalize(error=exc_value) + + def _finalize(self, *, error: Exception | None = None): + if self._closed: + return + self._closed = True + + if error is not None: + _log_error_and_end(self._span, error) + return + + response = _aggregate_completion_events(self._items) + _log_and_end( + self._span, + output=response.get("choices"), + metrics=_merge_metrics(self._start_time, response.get("usage"), self._first_token_time), + metadata={**self._metadata, **_response_to_metadata(response)}, + ) + + +class _TracedMistralAsyncStream: + def __init__(self, stream: Any, span: Any, metadata: dict[str, Any], start_time: float): + self._stream = stream + self._span = span + self._metadata = metadata + self._start_time = start_time + self._first_token_time = None + self._items = [] + self._closed = False + + def __getattr__(self, name: str) -> Any: + return getattr(self._stream, name) + + def __aiter__(self) -> AsyncIterator[Any]: + return self + + async def __anext__(self) -> Any: + try: + item = await self._stream.__anext__() + except StopAsyncIteration: + self._finalize() + raise + except Exception as error: + self._finalize(error=error) + raise + + if self._first_token_time is None and _chunk_has_output(item): + self._first_token_time = time.time() + self._items.append(item) + return item + + async def __aenter__(self): + if hasattr(self._stream, "__aenter__"): + await self._stream.__aenter__() + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + try: + if hasattr(self._stream, "__aexit__"): + return await self._stream.__aexit__(exc_type, exc_value, traceback) + return False + finally: + self._finalize(error=exc_value) + + def _finalize(self, *, error: Exception | None = None): + if self._closed: + return + self._closed = True + + if error is not None: + _log_error_and_end(self._span, error) + return + + response = _aggregate_completion_events(self._items) + _log_and_end( + self._span, + output=response.get("choices"), + metrics=_merge_metrics(self._start_time, response.get("usage"), self._first_token_time), + metadata={**self._metadata, **_response_to_metadata(response)}, + ) + + +def _chat_complete_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_chat_metadata(kwargs, stream=bool(kwargs.get("stream"))) + span = _start_span("mistral.chat.complete", kwargs.get("messages"), request_metadata) + start_time = time.time() + result = _call_with_error_logging(span, wrapped, args, kwargs) + + if kwargs.get("stream"): + return _TracedMistralSyncStream(result, span, request_metadata, start_time) + + _finalize_completion_response(span, request_metadata, result, start_time) + return result + + +async def _chat_complete_async_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_chat_metadata(kwargs, stream=bool(kwargs.get("stream"))) + span = _start_span("mistral.chat.complete", kwargs.get("messages"), request_metadata) + start_time = time.time() + result = await _call_async_with_error_logging(span, wrapped, args, kwargs) + + if kwargs.get("stream"): + return _TracedMistralAsyncStream(result, span, request_metadata, start_time) + + _finalize_completion_response(span, request_metadata, result, start_time) + return result + + +def _chat_stream_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_chat_metadata(kwargs, stream=True) + span = _start_span("mistral.chat.stream", kwargs.get("messages"), request_metadata) + start_time = time.time() + result = _call_with_error_logging(span, wrapped, args, kwargs) + + return _TracedMistralSyncStream(result, span, request_metadata, start_time) + + +async def _chat_stream_async_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_chat_metadata(kwargs, stream=True) + span = _start_span("mistral.chat.stream", kwargs.get("messages"), request_metadata) + start_time = time.time() + result = await _call_async_with_error_logging(span, wrapped, args, kwargs) + + return _TracedMistralAsyncStream(result, span, request_metadata, start_time) + + +def _agents_complete_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_agents_metadata(kwargs, stream=bool(kwargs.get("stream"))) + span = _start_span("mistral.agents.complete", kwargs.get("messages"), request_metadata) + start_time = time.time() + result = _call_with_error_logging(span, wrapped, args, kwargs) + + if kwargs.get("stream"): + return _TracedMistralSyncStream(result, span, request_metadata, start_time) + + _finalize_completion_response(span, request_metadata, result, start_time) + return result + + +async def _agents_complete_async_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_agents_metadata(kwargs, stream=bool(kwargs.get("stream"))) + span = _start_span("mistral.agents.complete", kwargs.get("messages"), request_metadata) + start_time = time.time() + result = await _call_async_with_error_logging(span, wrapped, args, kwargs) + + if kwargs.get("stream"): + return _TracedMistralAsyncStream(result, span, request_metadata, start_time) + + _finalize_completion_response(span, request_metadata, result, start_time) + return result + + +def _agents_stream_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_agents_metadata(kwargs, stream=True) + span = _start_span("mistral.agents.stream", kwargs.get("messages"), request_metadata) + start_time = time.time() + result = _call_with_error_logging(span, wrapped, args, kwargs) + + return _TracedMistralSyncStream(result, span, request_metadata, start_time) + + +async def _agents_stream_async_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_agents_metadata(kwargs, stream=True) + span = _start_span("mistral.agents.stream", kwargs.get("messages"), request_metadata) + start_time = time.time() + result = await _call_async_with_error_logging(span, wrapped, args, kwargs) + + return _TracedMistralAsyncStream(result, span, request_metadata, start_time) + + +def _embeddings_create_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_embeddings_metadata(kwargs) + span = _start_span("mistral.embeddings.create", kwargs.get("inputs"), request_metadata) + start_time = time.time() + result = _call_with_error_logging(span, wrapped, args, kwargs) + + _finalize_embeddings_response(span, request_metadata, result, start_time) + return result + + +async def _embeddings_create_async_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_embeddings_metadata(kwargs) + span = _start_span("mistral.embeddings.create", kwargs.get("inputs"), request_metadata) + start_time = time.time() + result = await _call_async_with_error_logging(span, wrapped, args, kwargs) + + _finalize_embeddings_response(span, request_metadata, result, start_time) + return result + + +def _fim_complete_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_fim_metadata(kwargs, stream=bool(kwargs.get("stream"))) + span = _start_span("mistral.fim.complete", _fim_input(kwargs), request_metadata) + start_time = time.time() + result = _call_with_error_logging(span, wrapped, args, kwargs) + + if kwargs.get("stream"): + return _TracedMistralSyncStream(result, span, request_metadata, start_time) + + _finalize_completion_response(span, request_metadata, result, start_time) + return result + + +async def _fim_complete_async_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_fim_metadata(kwargs, stream=bool(kwargs.get("stream"))) + span = _start_span("mistral.fim.complete", _fim_input(kwargs), request_metadata) + start_time = time.time() + result = await _call_async_with_error_logging(span, wrapped, args, kwargs) + + if kwargs.get("stream"): + return _TracedMistralAsyncStream(result, span, request_metadata, start_time) + + _finalize_completion_response(span, request_metadata, result, start_time) + return result + + +def _fim_stream_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_fim_metadata(kwargs, stream=True) + span = _start_span("mistral.fim.stream", _fim_input(kwargs), request_metadata) + start_time = time.time() + result = _call_with_error_logging(span, wrapped, args, kwargs) + + return _TracedMistralSyncStream(result, span, request_metadata, start_time) + + +async def _fim_stream_async_wrapper(wrapped, instance, args, kwargs): + request_metadata = _build_fim_metadata(kwargs, stream=True) + span = _start_span("mistral.fim.stream", _fim_input(kwargs), request_metadata) + start_time = time.time() + result = await _call_async_with_error_logging(span, wrapped, args, kwargs) + + return _TracedMistralAsyncStream(result, span, request_metadata, start_time) + + +def wrap_mistral(client: Any) -> Any: + """Wrap a single Mistral client instance for tracing.""" + from .patchers import AgentsPatcher, ChatPatcher, EmbeddingsPatcher, FimPatcher + + chat = getattr(client, "chat", None) + if chat is not None: + ChatPatcher.wrap_target(chat) + + embeddings = getattr(client, "embeddings", None) + if embeddings is not None: + EmbeddingsPatcher.wrap_target(embeddings) + + fim = getattr(client, "fim", None) + if fim is not None: + FimPatcher.wrap_target(fim) + + agents = getattr(client, "agents", None) + if agents is not None: + AgentsPatcher.wrap_target(agents) + + return client