Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 19 additions & 1 deletion crates/coverage-report/src/requests_expected_differences.json
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,9 @@
{ "pattern": "params.top_p", "reason": "Google uses f32 for top_p, causing precision loss (0.9 → 0.8999999761581421)" },
{ "pattern": "messages[*].content[*].media_type", "reason": "Google requires mime_type for images; null defaults to image/jpeg" },
{ "pattern": "messages[*].content[*].provider_options", "reason": "Google doesn't support provider_options on content parts" },
{ "pattern": "messages.length", "reason": "Google moves system messages to systemInstruction and merges consecutive same-role messages" }
{ "pattern": "messages.length", "reason": "Google moves system messages to systemInstruction and merges consecutive same-role messages" },
{ "pattern": "messages[*].content[*].tool_name", "reason": "Google requires functionResponse.name; tool names are populated from preceding tool calls when not present in source format (ChatCompletions and Anthropic tool results don't carry tool names)" },
{ "pattern": "messages[*].content[*].tool_call_id", "reason": "Google doesn't preserve tool call IDs; synthetic positional IDs are generated on re-parse" }
],
"errors": [
{ "pattern": "is not supported by Google", "reason": "Provider-specific built-in tool has no Google equivalent" }
Expand Down Expand Up @@ -422,6 +424,22 @@
"fields": [
{ "pattern": "params.reasoning", "reason": "Anthropic rejects enabled thinking when tool_choice forces a specific tool; thinking is intentionally dropped" }
]
},
{
"testCase": "parallelToolCallsRequest",
"source": "*",
"target": "ChatCompletions",
"fields": [
{ "pattern": "messages.length", "reason": "Parallel tool results grouped in one Tool message expand to separate role:tool messages in ChatCompletions" }
]
},
{
"testCase": "parallelToolCallsRequest",
"source": "*",
"target": "Responses",
"fields": [
{ "pattern": "messages.length", "reason": "Parallel tool results grouped in one Tool message expand to separate function_call_output items in Responses API" }
]
}
]
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,13 @@
{ "pattern": "params.service_tier", "reason": "OpenAI-specific billing tier not universal" }
]
},
{
"source": "*",
"target": "Google",
"fields": [
{ "pattern": "messages[*].content[*].tool_call_id", "reason": "Google doesn't preserve tool call IDs; synthetic positional IDs are generated on re-parse" }
]
},
{
"source": "Responses",
"target": "Google",
Expand Down
45 changes: 45 additions & 0 deletions crates/lingua/src/providers/google/adapter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ use crate::universal::convert::TryFromLLM;
use crate::universal::message::{AssistantContent, AssistantContentPart, Message};
use crate::universal::request::ToolChoiceConfig;
use crate::universal::tools::UniversalTool;
use crate::universal::ToolContentPart;
use crate::universal::{
extract_system_messages, flatten_consecutive_messages, FinishReason, TokenBudget,
UniversalParams, UniversalRequest, UniversalResponse, UniversalStreamChoice,
Expand Down Expand Up @@ -181,6 +182,10 @@ impl ProviderAdapter for GoogleAdapter {
// Flatten consecutive messages of the same role (Google doesn't allow them)
flatten_consecutive_messages(&mut messages);

// Fill in tool names from preceding tool_calls — Google requires functionResponse.name
// but some formats (e.g. OpenAI chat-completions role:tool) don't carry the function name
fill_tool_names_from_context(&mut messages);

// Convert messages to Google contents
let google_contents: Vec<GoogleContent> =
<Vec<GoogleContent> as TryFromLLM<Vec<Message>>>::try_from(messages)
Expand Down Expand Up @@ -663,6 +668,46 @@ impl ProviderAdapter for GoogleAdapter {
}
}

/// Build a tool_call_id → tool_name map from assistant messages and use it to
/// fill in empty tool names on Tool messages. Google requires `functionResponse.name`
/// but formats like OpenAI chat-completions don't include the name on tool result
/// messages — only the preceding assistant message has it.
fn fill_tool_names_from_context(messages: &mut [Message]) {
let mut id_to_name: std::collections::HashMap<String, String> = Default::default();
for msg in messages.iter() {
if let Message::Assistant {
content: AssistantContent::Array(parts),
..
} = msg
{
for part in parts {
if let AssistantContentPart::ToolCall {
tool_call_id,
tool_name,
..
} = part
{
if !tool_name.is_empty() {
id_to_name.insert(tool_call_id.clone(), tool_name.clone());
}
}
}
}
}
for msg in messages.iter_mut() {
if let Message::Tool { content } = msg {
for part in content.iter_mut() {
let ToolContentPart::ToolResult(result) = part;
if result.tool_name.is_empty() {
if let Some(name) = id_to_name.get(&result.tool_call_id) {
result.tool_name = name.clone();
}
}
}
}
}
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down
61 changes: 56 additions & 5 deletions crates/lingua/src/providers/google/convert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@ use crate::universal::response::{FinishReason, UniversalUsage};
use crate::universal::tools::{BuiltinToolProvider, UniversalTool, UniversalToolType};
use crate::util::media::parse_base64_data_url;

/// Prefix for synthetic tool call IDs generated when Google omits them.
const SYNTHETIC_CALL_ID_PREFIX: &str = "call_";

// ============================================================================
// Google Content -> Universal Message
// ============================================================================
Expand Down Expand Up @@ -155,8 +158,18 @@ impl TryFromLLM<GoogleContent> for Message {
reason: format!("Failed to serialize function call args: {e}"),
}
})?;
// Google omits `id` on functionCall parts; generate a
// positional synthetic ID so other providers get a non-empty
// call_id to correlate calls with results.
let call_index = assistant_parts
.iter()
.filter(|p| matches!(p, AssistantContentPart::ToolCall { .. }))
.count();
let tool_call_id = fc.id.clone().unwrap_or_else(|| {
format!("{SYNTHETIC_CALL_ID_PREFIX}{call_index}")
});
assistant_parts.push(AssistantContentPart::ToolCall {
tool_call_id: fc.id.clone().unwrap_or_default(),
tool_call_id,
tool_name: tool_name.clone(),
arguments: ToolCallArguments::from(args_string),
encrypted_content,
Expand Down Expand Up @@ -219,8 +232,13 @@ impl TryFromLLM<GoogleContent> for Message {
Some(map) => Value::Object(map.clone()),
None => Value::Null,
};
// Mirror the synthetic ID logic used for functionCall parts.
let response_index = tool_parts.len();
let tool_call_id = fr.id.clone().unwrap_or_else(|| {
format!("{SYNTHETIC_CALL_ID_PREFIX}{response_index}")
});
tool_parts.push(ToolContentPart::ToolResult(ToolResultContentPart {
tool_call_id: fr.id.clone().unwrap_or_default(),
tool_call_id,
tool_name: tool_name.clone(),
output,
provider_options: None,
Expand Down Expand Up @@ -365,7 +383,10 @@ impl TryFromLLM<Message> for GoogleContent {

converted.push(GooglePart {
function_call: Some(GoogleFunctionCall {
id: Some(tool_call_id).filter(|s| !s.is_empty()),
id: Some(tool_call_id).filter(|s| {
!s.is_empty()
&& !s.starts_with(SYNTHETIC_CALL_ID_PREFIX)
}),
name: Some(tool_name),
args,
}),
Expand Down Expand Up @@ -416,7 +437,9 @@ impl TryFromLLM<Message> for GoogleContent {

Ok(GooglePart {
function_response: Some(GoogleFunctionResponse {
id: Some(result.tool_call_id).filter(|s| !s.is_empty()),
id: Some(result.tool_call_id).filter(|s| {
!s.is_empty() && !s.starts_with(SYNTHETIC_CALL_ID_PREFIX)
}),
name: Some(result.tool_name),
response,
..Default::default()
Expand Down Expand Up @@ -526,6 +549,7 @@ impl From<&FunctionDeclaration> for UniversalTool {
.as_ref()
.as_ref()
.and_then(|schema| serde_json::to_value(schema).ok())
.map(normalize_google_schema_types)
});

UniversalTool::function(
Expand All @@ -537,6 +561,33 @@ impl From<&FunctionDeclaration> for UniversalTool {
}
}

/// Recursively lowercase `"type"` values in a JSON schema.
///
/// Google serializes its `Type` enum as SCREAMING_SNAKE_CASE (`"OBJECT"`, `"STRING"`, …),
/// but the universal/JSON-Schema convention (and what Anthropic expects) is lowercase.
/// `parameters_json_schema` (when present) is already in standard form; this function
/// is only needed for schemas serialized from the typed `Schema` struct.
fn normalize_google_schema_types(mut value: Value) -> Value {
match &mut value {
Value::Object(map) => {
if let Some(Value::String(t)) = map.get_mut("type") {
*t = t.to_lowercase();
}
map.retain(|_, v| !v.is_null());
for v in map.values_mut() {
*v = normalize_google_schema_types(std::mem::take(v));
}
}
Value::Array(arr) => {
for v in arr.iter_mut() {
*v = normalize_google_schema_types(std::mem::take(v));
}
}
_ => {}
}
value
}

impl TryFrom<&UniversalTool> for FunctionDeclaration {
type Error = ConvertError;

Expand Down Expand Up @@ -996,7 +1047,7 @@ mod tests {
..
} => {
assert_eq!(tool_name, "get_weather");
assert_eq!(tool_call_id, "");
assert_eq!(tool_call_id, "call_0");
}
_ => panic!("Expected tool call part"),
}
Expand Down
8 changes: 3 additions & 5 deletions crates/lingua/src/providers/openai/adapter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ use crate::processing::adapters::{
use crate::processing::transform::TransformError;
use crate::providers::openai::capabilities::{apply_model_transforms, model_needs_transforms};
use crate::providers::openai::convert::{
ChatCompletionRequestMessageExt, ChatCompletionResponseMessageExt,
messages_to_chat_completion_messages, ChatCompletionRequestMessageExt,
ChatCompletionResponseMessageExt,
};
use crate::providers::openai::params::{OpenAIChatExtrasView, OpenAIChatParams};
use crate::providers::openai::tool_parsing::parse_openai_chat_tools_array;
Expand Down Expand Up @@ -211,10 +212,7 @@ impl ProviderAdapter for OpenAIAdapter {
if let Some(raw_messages) = openai_extras_view.messages.as_ref() {
obj.insert("messages".into(), raw_messages.clone());
} else {
let openai_messages: Vec<ChatCompletionRequestMessageExt> =
<Vec<ChatCompletionRequestMessageExt> as TryFromLLM<Vec<Message>>>::try_from(
req.messages.clone(),
)
let openai_messages = messages_to_chat_completion_messages(req.messages.clone())
.map_err(|e| TransformError::FromUniversalFailed(e.to_string()))?;
obj.insert(
"messages".into(),
Expand Down
98 changes: 61 additions & 37 deletions crates/lingua/src/providers/openai/convert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3030,48 +3030,72 @@ impl TryFromLLM<Message> for ChatCompletionRequestMessageExt {
})
}
Message::Tool { content } => {
// Extract the tool result content
let tool_result = content
.iter()
.map(|part| {
let ToolContentPart::ToolResult(result) = part;
result
})
.next()
.ok_or_else(|| ConvertError::MissingRequiredField {
let part = content.into_iter().next().ok_or_else(|| {
ConvertError::MissingRequiredField {
field: "tool_result".to_string(),
})?;

// Convert output to string for OpenAI
let content_string = match &tool_result.output {
serde_json::Value::String(s) => s.clone(),
other => serde_json::to_string(other).map_err(|e| {
ConvertError::JsonSerializationFailed {
field: "tool_result_content".to_string(),
error: e.to_string(),
}
})?,
};
}
})?;
let ToolContentPart::ToolResult(result) = part;
tool_result_to_chat_completion_message(result)
}
}
}
}

Ok(ChatCompletionRequestMessageExt {
base: openai::ChatCompletionRequestMessage {
role: openai::ChatCompletionRequestMessageRole::Tool,
content: Some(openai::ChatCompletionRequestMessageContent::String(
content_string,
)),
name: None,
tool_calls: None,
tool_call_id: Some(tool_result.tool_call_id.clone()),
audio: None,
function_call: None,
refusal: None,
},
reasoning: None,
reasoning_signature: None,
})
/// Convert `Vec<Message>` to `Vec<ChatCompletionRequestMessageExt>`, expanding
/// any `Message::Tool` with multiple results into one message per result.
///
/// Anthropic (and others) group parallel tool results into a single
/// `Message::Tool { content: [result1, result2] }`, but OpenAI Chat Completions
/// requires a separate `role: "tool"` message for each result.
pub(crate) fn messages_to_chat_completion_messages(
messages: Vec<Message>,
) -> Result<Vec<ChatCompletionRequestMessageExt>, ConvertError> {
let mut result = Vec::new();
for msg in messages {
match msg {
Message::Tool { content } => {
for part in content {
let ToolContentPart::ToolResult(tool_result) = part;
result.push(tool_result_to_chat_completion_message(tool_result)?);
}
}
other => result
.push(<ChatCompletionRequestMessageExt as TryFromLLM<Message>>::try_from(other)?),
}
}
Ok(result)
}

/// Convert a single tool result into a chat completions tool-role message.
pub(crate) fn tool_result_to_chat_completion_message(
result: ToolResultContentPart,
) -> Result<ChatCompletionRequestMessageExt, ConvertError> {
let content_string = match &result.output {
serde_json::Value::String(s) => s.clone(),
other => {
serde_json::to_string(other).map_err(|e| ConvertError::JsonSerializationFailed {
field: "tool_result_content".to_string(),
error: e.to_string(),
})?
}
};
Ok(ChatCompletionRequestMessageExt {
base: openai::ChatCompletionRequestMessage {
role: openai::ChatCompletionRequestMessageRole::Tool,
content: Some(openai::ChatCompletionRequestMessageContent::String(
content_string,
)),
name: None,
tool_calls: None,
tool_call_id: Some(result.tool_call_id),
audio: None,
function_call: None,
refusal: None,
},
reasoning: None,
reasoning_signature: None,
})
}

/// Convert UserContent to ChatCompletionRequestMessageContent
Expand Down
Loading
Loading