Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,18 @@ public class OpenAIResponse {
@JsonProperty("error")
private OpenAIError error;

/** Error code for non-standard error responses. */
@JsonProperty("code")
private String code;

/** Error message for non-standard error responses. */
@JsonProperty("message")
private String message;

/** Status for non-standard error responses. */
@JsonProperty("status")
private String status;

public OpenAIResponse() {}

public String getId() {
Expand Down Expand Up @@ -166,13 +178,62 @@ public void setError(OpenAIError error) {
this.error = error;
}

public String getCode() {
return code;
}

public void setCode(String code) {
this.code = code;
}

public String getMessage() {
return message;
}

public void setMessage(String message) {
this.message = message;
}

public String getStatus() {
return status;
}

public void setStatus(String status) {
this.status = status;
}

/**
* Check if this response represents an error.
*
* Support the detection of standard OpenAI error structures
* and non-standard code/status error structures.
*
* @return true if the response contains an error
*/
public boolean isError() {
return error != null;
return error != null
|| "error".equalsIgnoreCase(status)
|| (code != null && !code.equals("200") && !code.equals("0"));
}

/**
* Get the effective error message, handling both standard and non-standard formats.
*/
public String getEffectiveErrorMessage() {
if (error != null && error.getMessage() != null) {
return error.getMessage();
}
return message != null ? message : "Unknown error";
}

/**
* Get the effective error code, handling both standard and non-standard formats.
*/
public String getEffectiveErrorCode() {
if (error != null && error.getCode() != null) {
return error.getCode();
}
return code != null ? code : "unknown_error";
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
package io.agentscope.core.model;

import io.agentscope.core.Version;
import io.agentscope.core.formatter.openai.dto.OpenAIError;
import io.agentscope.core.formatter.openai.dto.OpenAIRequest;
import io.agentscope.core.formatter.openai.dto.OpenAIResponse;
import io.agentscope.core.model.exception.OpenAIException;
Expand Down Expand Up @@ -333,22 +332,11 @@ public OpenAIResponse call(
}

if (response.isError()) {
OpenAIError error = response.getError();
if (error == null) {
throw new OpenAIException(
"OpenAI API returned error but error details are null",
400,
"unknown_error",
responseBody);
}
String errorMessage =
error.getMessage() != null ? error.getMessage() : "Unknown error";
String errorCode = error.getCode() != null ? error.getCode() : "unknown_error";
String errorMessage = response.getEffectiveErrorMessage();
String errorCode = response.getEffectiveErrorCode();
int statusCode = resolveErrorStatusCode(httpResponse.getStatusCode(), errorCode);
throw OpenAIException.create(
httpResponse.getStatusCode(),
"OpenAI API error: " + errorMessage,
errorCode,
responseBody);
statusCode, "OpenAI API error: " + errorMessage, errorCode, responseBody);
}

return response;
Expand Down Expand Up @@ -415,22 +403,16 @@ public Flux<OpenAIResponse> stream(
if (response != null) {
// Check for error in streaming response chunk
if (response.isError()) {
OpenAIError error = response.getError();
String errorMessage =
error != null && error.getMessage() != null
? error.getMessage()
: "Unknown error in streaming response";
String errorCode =
error != null && error.getCode() != null
? error.getCode()
: null;
String errorMessage = response.getEffectiveErrorMessage();
String errorCode = response.getEffectiveErrorCode();
int statusCode = resolveErrorStatusCode(200, errorCode);
sink.error(
OpenAIException.create(
400,
statusCode,
"OpenAI API error in streaming response: "
+ errorMessage,
errorCode,
null));
data));
return;
}
sink.next(response);
Expand Down Expand Up @@ -458,6 +440,39 @@ public Flux<OpenAIResponse> stream(
}
}

/**
* Resolve the actual HTTP error status code when the API returns 200 OK
* but contains an error payload.
*
* @param httpStatusCode the original HTTP status code
* @param errorCode the error code extracted from the response body
* @return a valid HTTP error status code (4xx or 5xx)
*/
private int resolveErrorStatusCode(int httpStatusCode, String errorCode) {
if (httpStatusCode >= 400) {
return httpStatusCode;
}

// Handling HTTP 200 with Body containing errors
if (errorCode != null) {
if (errorCode.contains("429")) {
return 429;
}

try {
int parsedCode = Integer.parseInt(errorCode);
if (parsedCode >= 400 && parsedCode <= 599) {
return parsedCode;
}
} catch (NumberFormatException e) {
// Ignore error codes of non numeric types
}
}

// Extraction failed, return to default
return 400;
}

/**
* Parse a single SSE data line to OpenAIResponse.
*
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
/*
* Copyright 2024-2026 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.agentscope.core.formatter.openai.dto;

import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;

import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;

/**
* Tests for OpenAIResponse error detection logic covering both standard OpenAI errors
* and non-standard gateway errors based on code, status, or message fields.
*/
class OpenAIResponseTest {

@Test
@DisplayName("Should detect standard OpenAI error")
void testStandardOpenAIError() {
OpenAIResponse response = new OpenAIResponse();
OpenAIError error = new OpenAIError();
error.setCode("invalid_api_key");
response.setError(error);

assertTrue(response.isError());
}

@Test
@DisplayName("Should detect non-standard gateway error by code")
void testNonStandardErrorByCode() {
OpenAIResponse response = new OpenAIResponse();
response.setCode("429");
response.setMessage("Rate limit exceeded");

assertTrue(response.isError());
}

@Test
@DisplayName("Should detect non-standard gateway error by status")
void testNonStandardErrorByStatus() {
OpenAIResponse response = new OpenAIResponse();
response.setStatus("error");

assertTrue(response.isError());
}

@Test
@DisplayName("Should not detect error for successful response")
void testSuccessfulResponse() {
OpenAIResponse response = new OpenAIResponse();
response.setCode("200");
response.setStatus("success");

assertFalse(response.isError());

OpenAIResponse response2 = new OpenAIResponse();
response2.setCode("0");

assertFalse(response2.isError());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import reactor.core.publisher.Flux;

/**
* Unit tests for OpenAIClient.
Expand Down Expand Up @@ -915,4 +916,93 @@ void testThrowOpenAIExceptionWhenCustomEndpointPathInStreamCall() {
OpenAIException.class,
() -> client.stream(TEST_API_KEY, baseUrl, request, options).collectList().block());
}

@Test
@DisplayName("Should handle non-standard rate limit error in sync response body")
void testNonStandardErrorInResponseBody() {
String errorResponse =
"""
{
"code": "429",
"message": "The request has triggered the maximum tokens per minute limit.",
"status": "error"
}
""";

mockServer.enqueue(
new MockResponse()
.setResponseCode(200)
.setBody(errorResponse)
.setHeader("Content-Type", "application/json"));

OpenAIRequest request =
OpenAIRequest.builder()
.model("gpt-4")
.messages(
List.of(
OpenAIMessage.builder()
.role("user")
.content("Hello")
.build()))
.build();

OpenAIException exception =
assertThrows(
OpenAIException.class, () -> client.call(TEST_API_KEY, baseUrl, request));

assertNotNull(exception);
assertEquals(429, exception.getStatusCode());
assertEquals("429", exception.getErrorCode());
assertTrue(exception.getMessage().contains("maximum tokens per minute"));
}

@Test
@DisplayName("Should handle non-standard rate limit error in streaming chunk")
void testNonStandardErrorInStreamChunk() {
String chunk1 =
"data:"
+ " {\"id\":\"chatcmpl-123\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"}}]}\n\n";
String chunk2 =
"data: {\"code\":\"429\",\"message\":\"MAX_TPM limit"
+ " exceeded\",\"status\":\"error\"}\n\n";

mockServer.enqueue(
new MockResponse()
.setResponseCode(200)
.setBody(chunk1 + chunk2)
.setHeader("Content-Type", "text/event-stream"));

OpenAIRequest request =
OpenAIRequest.builder()
.model("gpt-4")
.messages(
List.of(
OpenAIMessage.builder()
.role("user")
.content("Hello")
.build()))
.build();

GenerateOptions options = GenerateOptions.builder().build();

List<OpenAIResponse> responses = new ArrayList<>();
Throwable[] capturedError = new Throwable[1];

client.stream(TEST_API_KEY, baseUrl, request, options)
.doOnNext(responses::add)
.doOnError(e -> capturedError[0] = e)
.onErrorResume(e -> Flux.empty())
.blockLast();

assertEquals(1, responses.size());
assertEquals("chatcmpl-123", responses.get(0).getId());

assertNotNull(capturedError[0]);
assertTrue(capturedError[0] instanceof OpenAIException);

OpenAIException exception = (OpenAIException) capturedError[0];
assertEquals(429, exception.getStatusCode());
assertEquals("429", exception.getErrorCode());
assertTrue(exception.getMessage().contains("MAX_TPM limit exceeded"));
}
}
Loading