Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
164 changes: 164 additions & 0 deletions src/Anthropic.Tests/AnthropicClientBetaExtensionsTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2214,4 +2214,168 @@ public async Task GetResponseAsync_WithHostedToolsAndExistingBetas_PreservesAndD
Assert.Contains("code-execution-2025-08-25", capturedBetaHeaders);
Assert.Contains("mcp-client-2025-11-20", capturedBetaHeaders);
}

[Fact]
public async Task GetResponseAsync_WithNullableUnionType_TransformsToSimpleType()
{
// The C# MCP SDK generates schemas with "type": ["integer", "null"], "default": null
// for optional nullable parameters. This test verifies the transformation removes
// the null from the union type and the default: null for non-required properties.
VerbatimHttpHandler handler = new(
expectedRequest: """
{
"max_tokens": 1024,
"model": "claude-haiku-4-5",
"messages": [{
"role": "user",
"content": [{
"type": "text",
"text": "Call tool with optional param"
}]
}],
"tools": [{
"name": "tool_with_optional",
"description": "A tool with an optional nullable parameter",
"input_schema": {
"required_param": {
"type": "string"
},
"optional_number": {
"type": "integer"
},
"type": "object",
"required": ["required_param"]
}
}]
}
""",
actualResponse: """
{
"id": "msg_nullable_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
"text": "Tool ready"
}],
"stop_reason": "end_turn",
"usage": {
"input_tokens": 30,
"output_tokens": 5
}
}
"""
);

IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");

// Create a function with an optional nullable parameter - the schema will have
// "type": ["integer", "null"], "default": null which should be transformed
var functionWithOptional = AIFunctionFactory.Create(
(string required_param, int? optional_number = null) => "result",
new AIFunctionFactoryOptions
{
Name = "tool_with_optional",
Description = "A tool with an optional nullable parameter",
}
);

ChatOptions options = new() { Tools = [functionWithOptional] };

ChatResponse response = await chatClient.GetResponseAsync(
"Call tool with optional param",
options
);
Assert.NotNull(response);
}

[Fact]
public async Task GetResponseAsync_WithRequiredNullableUnionType_PreservesUnionType()
{
// When a property IS in the required array but has a nullable union type,
// we should NOT transform it - let the API fail with a meaningful error
// rather than silently misrepresenting the schema.
// Using BetaTool.AsAITool() bypasses the schema transformation, so this test
// verifies the raw tool passes through unchanged.
VerbatimHttpHandler handler = new(
expectedRequest: """
{
"max_tokens": 1024,
"model": "claude-haiku-4-5",
"messages": [{
"role": "user",
"content": [{
"type": "text",
"text": "Call tool with required nullable"
}]
}],
"tools": [{
"name": "tool_with_required_nullable",
"description": "A tool with a required nullable parameter",
"input_schema": {
"nullable_number": {
"type": ["integer", "null"],
"description": "A required but nullable number"
},
"type": "object",
"required": ["nullable_number"]
}
}]
}
""",
actualResponse: """
{
"id": "msg_required_nullable_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
"text": "Tool ready"
}],
"stop_reason": "end_turn",
"usage": {
"input_tokens": 30,
"output_tokens": 5
}
}
"""
);

IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");

// Create a BetaTool with a required nullable parameter using raw schema
// This simulates a schema that came from elsewhere (not C# MCP SDK pattern)
#pragma warning disable CA1861 // Prefer 'static readonly' fields over constant array arguments - test method only runs once
BetaToolUnion rawTool = new BetaTool
{
Name = "tool_with_required_nullable",
Description = "A tool with a required nullable parameter",
InputSchema = new InputSchema(
new Dictionary<string, JsonElement>
{
["nullable_number"] = JsonSerializer.SerializeToElement(
new
{
type = new[] { "integer", "null" },
description = "A required but nullable number",
}
),
}
)
{
Required = ["nullable_number"],
},
};
#pragma warning restore CA1861

ChatOptions options = new() { Tools = [rawTool.AsAITool()] };

ChatResponse response = await chatClient.GetResponseAsync(
"Call tool with required nullable",
options
);
Assert.NotNull(response);
}
}
217 changes: 217 additions & 0 deletions src/Anthropic.Tests/AnthropicClientExtensionsTestsBase.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4533,6 +4533,223 @@ public async Task GetResponseAsync_WithFunctionResultContent_UriContent_PDF()
Assert.NotNull(response);
}

[Fact]
public void WithCacheControl_SetsAdditionalProperty()
{
var content = new TextContent("Hello, world!");

content.WithCacheControl(Anthropic.Models.Messages.TTL.TTL5m);

Assert.NotNull(content.AdditionalProperties);
var cacheControl = content.GetCacheControl();
Assert.NotNull(cacheControl);
Assert.True(cacheControl.TTL == Anthropic.Models.Messages.TTL.TTL5m);
}

[Fact]
public void WithCacheControl_CacheControlEphemeral_SetsAdditionalProperty()
{
var content = new TextContent("Hello, world!");
var cacheControl = new Anthropic.Models.Messages.CacheControlEphemeral
{
TTL = Anthropic.Models.Messages.TTL.TTL1h,
};

content.WithCacheControl(cacheControl);

var retrieved = content.GetCacheControl();
Assert.NotNull(retrieved);
Assert.True(retrieved.TTL == Anthropic.Models.Messages.TTL.TTL1h);
}

[Fact]
public void WithCacheControl_Null_RemovesCacheControl()
{
var content = new TextContent("Hello, world!");
content.WithCacheControl(Anthropic.Models.Messages.TTL.TTL5m);

Assert.NotNull(content.GetCacheControl());

content.WithCacheControl((Anthropic.Models.Messages.CacheControlEphemeral?)null);

Assert.Null(content.GetCacheControl());
}

[Fact]
public async Task GetResponseAsync_WithCacheControlOnSystemMessage()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
"model": "claude-haiku-4-5",
"messages": [{
"role": "user",
"content": [{
"type": "text",
"text": "Hello"
}]
}],
"max_tokens": 1024,
"system": [{
"type": "text",
"text": "You are a helpful assistant.",
"cache_control": {
"type": "ephemeral",
"ttl": "1h"
}
}]
}
""",
actualResponse: """
{
"id": "msg_cache_01",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
"text": "Hello!"
}],
"stop_reason": "end_turn",
"usage": {
"input_tokens": 10,
"output_tokens": 5
}
}
"""
);

IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");

var systemContent = new TextContent("You are a helpful assistant.").WithCacheControl(
Anthropic.Models.Messages.TTL.TTL1h
);

List<ChatMessage> messages =
[
new(ChatRole.System, [systemContent]),
new(ChatRole.User, "Hello"),
];

ChatResponse response = await chatClient.GetResponseAsync(messages);
Assert.NotNull(response);
}

[Fact]
public async Task GetResponseAsync_WithCacheControlOnUserMessage()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
"model": "claude-haiku-4-5",
"messages": [{
"role": "user",
"content": [{
"type": "text",
"text": "What is the meaning of life?",
"cache_control": {
"type": "ephemeral",
"ttl": "5m"
}
}]
}],
"max_tokens": 1024
}
""",
actualResponse: """
{
"id": "msg_cache_02",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
"text": "42"
}],
"stop_reason": "end_turn",
"usage": {
"input_tokens": 15,
"output_tokens": 3
}
}
"""
);

IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");

var userContent = new TextContent("What is the meaning of life?").WithCacheControl(
Anthropic.Models.Messages.TTL.TTL5m
);

List<ChatMessage> messages = [new(ChatRole.User, [userContent])];

ChatResponse response = await chatClient.GetResponseAsync(messages);
Assert.NotNull(response);
}

[Fact]
public async Task GetResponseAsync_WithCacheControlOnImage()
{
VerbatimHttpHandler handler = new(
expectedRequest: """
{
"model": "claude-haiku-4-5",
"messages": [{
"role": "user",
"content": [{
"type": "image",
"source": {
"type": "base64",
"media_type": "image/png",
"data": "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
},
"cache_control": {
"type": "ephemeral",
"ttl": "1h"
}
}, {
"type": "text",
"text": "What do you see?"
}]
}],
"max_tokens": 1024
}
""",
actualResponse: """
{
"id": "msg_cache_03",
"type": "message",
"role": "assistant",
"model": "claude-haiku-4-5",
"content": [{
"type": "text",
"text": "I see a small image."
}],
"stop_reason": "end_turn",
"usage": {
"input_tokens": 100,
"output_tokens": 10
}
}
"""
);

IChatClient chatClient = CreateChatClient(handler, "claude-haiku-4-5");

var imageContent = new DataContent(
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==",
"image/png"
).WithCacheControl(Anthropic.Models.Messages.TTL.TTL1h);

List<ChatMessage> messages =
[
new(ChatRole.User, [imageContent, new TextContent("What do you see?")]),
];

ChatResponse response = await chatClient.GetResponseAsync(messages);
Assert.NotNull(response);
}

protected sealed class VerbatimHttpHandler(string expectedRequest, string actualResponse)
: HttpMessageHandler
{
Expand Down
Loading