diff --git a/crates/token_proxy_core/src/proxy/codex_compat.test.rs b/crates/token_proxy_core/src/proxy/codex_compat.test.rs index 180e558..71dfd07 100644 --- a/crates/token_proxy_core/src/proxy/codex_compat.test.rs +++ b/crates/token_proxy_core/src/proxy/codex_compat.test.rs @@ -125,3 +125,45 @@ fn responses_request_to_codex_strips_prompt_cache_retention() { assert!(value.get("previous_response_id").is_none()); assert!(value.get("safety_identifier").is_none()); } + +#[test] +fn responses_request_to_codex_preserves_parallel_tool_calls_false() { + let input = json!({ + "model": "gpt-5", + "input": [ + { + "type": "message", + "role": "user", + "content": [{ "type": "input_text", "text": "hi" }] + } + ], + "parallel_tool_calls": false + }); + let bytes = Bytes::from(input.to_string()); + let output = responses_request_to_codex(&bytes, Some("gpt-5-codex")).expect("convert"); + let value: serde_json::Value = serde_json::from_slice(&output).expect("json"); + assert_eq!(value["parallel_tool_calls"], json!(false)); +} + +#[test] +fn responses_request_to_codex_strips_output_parts_from_function_call_output() { + let input = json!({ + "model": "gpt-5", + "input": [ + { + "type": "function_call_output", + "call_id": "call_1", + "output": "ok", + "output_parts": [ + { "type": "text", "text": "ok" } + ] + } + ] + }); + let bytes = Bytes::from(input.to_string()); + let output = responses_request_to_codex(&bytes, Some("gpt-5-codex")).expect("convert"); + let value: serde_json::Value = serde_json::from_slice(&output).expect("json"); + let input_items = value["input"].as_array().expect("input array"); + assert_eq!(input_items.len(), 1); + assert!(input_items[0].get("output_parts").is_none()); +} diff --git a/crates/token_proxy_core/src/proxy/codex_compat/request.rs b/crates/token_proxy_core/src/proxy/codex_compat/request.rs index 429f9fa..f9ccc06 100644 --- a/crates/token_proxy_core/src/proxy/codex_compat/request.rs +++ b/crates/token_proxy_core/src/proxy/codex_compat/request.rs @@ -413,7 +413,9 @@ fn normalize_responses_payload(object: &mut Map, model_hint: Opti object.insert("model".to_string(), Value::String(model.to_string())); object.insert("stream".to_string(), Value::Bool(true)); object.insert("store".to_string(), Value::Bool(false)); - object.insert("parallel_tool_calls".to_string(), Value::Bool(true)); + if !object.contains_key("parallel_tool_calls") { + object.insert("parallel_tool_calls".to_string(), Value::Bool(true)); + } object.insert( "include".to_string(), json!(["reasoning.encrypted_content"]), @@ -441,12 +443,34 @@ fn normalize_responses_payload(object: &mut Map, model_hint: Opti "role": "user", "content": [json!({"type":"input_text","text": text})] })], - Some(Value::Array(items)) => items.clone(), + Some(Value::Array(items)) => sanitize_responses_input_for_codex(items), _ => Vec::new(), }; object.insert("input".to_string(), Value::Array(input)); } +fn sanitize_responses_input_for_codex(items: &[Value]) -> Vec { + items + .iter() + .map(sanitize_responses_input_item_for_codex) + .collect() +} + +fn sanitize_responses_input_item_for_codex(item: &Value) -> Value { + let Some(object) = item.as_object() else { + return item.clone(); + }; + if object.get("type").and_then(Value::as_str) != Some("function_call_output") { + return item.clone(); + } + let mut sanitized = object.clone(); + // Claude -> Responses may carry structured tool output in `output_parts`. + // Codex only needs the flattened `output` string here; forwarding the extra field + // breaks composition without adding value. + sanitized.remove("output_parts"); + Value::Object(sanitized) +} + fn rewrite_input_function_names(input: &mut Value, tool_map: &ToolNameMap) { let Some(items) = input.as_array_mut() else { return;