Skip to content

Commit 1d7d31f

Browse files
committed
added test
1 parent 1431375 commit 1d7d31f

1 file changed

Lines changed: 74 additions & 0 deletions

File tree

tests/test_interface/test_model_integration/test_model_adapters.py

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2390,6 +2390,80 @@ def __init__(self):
23902390
gen_config = call_kwargs.kwargs.get("generation_config", {})
23912391
assert gen_config.get("seed") == 99
23922392

2393+
def test_structured_chat_separates_instructor_top_level_keys(self):
2394+
"""thinking_config stays top-level, generation params go into generation_config."""
2395+
pytest.importorskip("google.genai")
2396+
from maseval.interface.inference.google_genai import GoogleGenAIModelAdapter
2397+
2398+
class MockClient:
2399+
class Models:
2400+
def generate_content(self, model, contents, config=None):
2401+
class Response:
2402+
text = "ok"
2403+
2404+
return Response()
2405+
2406+
def __init__(self):
2407+
self.models = self.Models()
2408+
2409+
adapter = GoogleGenAIModelAdapter(client=MockClient(), model_id="gemini-pro", seed=42)
2410+
2411+
mock_result = _make_mock_instructor_result()
2412+
mock_instructor = MagicMock()
2413+
mock_instructor.chat.completions.create.return_value = mock_result
2414+
adapter._instructor_client = mock_instructor
2415+
2416+
adapter._structured_chat(
2417+
messages=[{"role": "user", "content": "Hi"}],
2418+
response_model=object,
2419+
generation_params={"temperature": 0.5, "thinking_config": {"thinking_budget": 1024}},
2420+
)
2421+
2422+
call_kwargs = mock_instructor.chat.completions.create.call_args.kwargs
2423+
# thinking_config must be top-level (instructor pops it from kwargs directly)
2424+
assert call_kwargs.get("thinking_config") == {"thinking_budget": 1024}
2425+
# generation params must be nested inside generation_config
2426+
gen_config = call_kwargs.get("generation_config", {})
2427+
assert gen_config.get("temperature") == 0.5
2428+
assert gen_config.get("seed") == 42
2429+
# thinking_config must NOT be in generation_config
2430+
assert "thinking_config" not in gen_config
2431+
2432+
def test_structured_chat_uses_structured_outputs_mode(self):
2433+
"""Instructor client is created with GENAI_STRUCTURED_OUTPUTS mode."""
2434+
pytest.importorskip("google.genai")
2435+
pytest.importorskip("instructor")
2436+
import instructor
2437+
from maseval.interface.inference.google_genai import GoogleGenAIModelAdapter
2438+
2439+
class MockClient:
2440+
class Models:
2441+
def generate_content(self, model, contents, config=None):
2442+
class Response:
2443+
text = "ok"
2444+
2445+
return Response()
2446+
2447+
def __init__(self):
2448+
self.models = self.Models()
2449+
2450+
adapter = GoogleGenAIModelAdapter(client=MockClient(), model_id="gemini-pro")
2451+
assert adapter._instructor_client is None
2452+
2453+
with patch("instructor.from_genai") as mock_from_genai:
2454+
mock_instructor = MagicMock()
2455+
mock_instructor.chat.completions.create.return_value = _make_mock_instructor_result()
2456+
mock_from_genai.return_value = mock_instructor
2457+
2458+
adapter._structured_chat(
2459+
messages=[{"role": "user", "content": "Hi"}],
2460+
response_model=object,
2461+
)
2462+
2463+
mock_from_genai.assert_called_once()
2464+
call_kwargs = mock_from_genai.call_args
2465+
assert call_kwargs.kwargs.get("mode") == instructor.Mode.GENAI_STRUCTURED_OUTPUTS
2466+
23932467

23942468
@pytest.mark.interface
23952469
class TestLiteLLMStructuredChat:

0 commit comments

Comments
 (0)