Skip to content

Commit 3bfb047

Browse files
committed
OPIK-610 encapsulate openai specific error handling
1 parent 07e8525 commit 3bfb047

File tree

2 files changed

+25
-14
lines changed

2 files changed

+25
-14
lines changed

apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/LlmProviderStreamHandler.java

+15-13
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
11
package com.comet.opik.domain.llmproviders;
22

33
import com.comet.opik.utils.JsonUtils;
4-
import dev.ai4j.openai4j.OpenAiHttpException;
54
import io.dropwizard.jersey.errors.ErrorMessage;
65
import lombok.extern.slf4j.Slf4j;
76
import org.glassfish.jersey.server.ChunkedOutput;
87

98
import java.io.IOException;
109
import java.io.UncheckedIOException;
10+
import java.util.function.Consumer;
11+
import java.util.function.Function;
1112

1213
@Slf4j
1314
public class LlmProviderStreamHandler {
@@ -33,17 +34,18 @@ public void handleClose(ChunkedOutput<String> chunkedOutput) {
3334
}
3435
}
3536

36-
public void handleError(Throwable throwable, ChunkedOutput<String> chunkedOutput) {
37-
log.error(UNEXPECTED_ERROR_CALLING_LLM_PROVIDER, throwable);
38-
var errorMessage = new ErrorMessage(UNEXPECTED_ERROR_CALLING_LLM_PROVIDER);
39-
if (throwable instanceof OpenAiHttpException openAiHttpException) {
40-
errorMessage = new ErrorMessage(openAiHttpException.code(), openAiHttpException.getMessage());
41-
}
42-
try {
43-
handleMessage(errorMessage, chunkedOutput);
44-
} catch (UncheckedIOException uncheckedIOException) {
45-
log.error("Failed to stream error message to client", uncheckedIOException);
46-
}
47-
handleClose(chunkedOutput);
37+
public Consumer<Throwable> getErrorHandler(
38+
Function<Throwable, ErrorMessage> mapper, ChunkedOutput<String> chunkedOutput) {
39+
return throwable -> {
40+
log.error(UNEXPECTED_ERROR_CALLING_LLM_PROVIDER, throwable);
41+
42+
var errorMessage = mapper.apply(throwable);
43+
try {
44+
handleMessage(errorMessage, chunkedOutput);
45+
} catch (UncheckedIOException uncheckedIOException) {
46+
log.error("Failed to stream error message to client", uncheckedIOException);
47+
}
48+
handleClose(chunkedOutput);
49+
};
4850
}
4951
}

apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/OpenAi.java

+10-1
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import dev.ai4j.openai4j.chat.ChatCompletionRequest;
77
import dev.ai4j.openai4j.chat.ChatCompletionResponse;
88
import dev.langchain4j.internal.RetryUtils;
9+
import io.dropwizard.jersey.errors.ErrorMessage;
910
import jakarta.inject.Inject;
1011
import jakarta.ws.rs.ClientErrorException;
1112
import jakarta.ws.rs.InternalServerErrorException;
@@ -61,7 +62,7 @@ public ChunkedOutput<String> generateStream(@NonNull ChatCompletionRequest reque
6162
.onPartialResponse(
6263
chatCompletionResponse -> streamHandler.handleMessage(chatCompletionResponse, chunkedOutput))
6364
.onComplete(() -> streamHandler.handleClose(chunkedOutput))
64-
.onError(throwable -> streamHandler.handleError(throwable, chunkedOutput))
65+
.onError(streamHandler.getErrorHandler(this::errorMapper, chunkedOutput))
6566
.execute();
6667
log.info("Created and streaming chat completions, workspaceId '{}', model '{}'", workspaceId, request.model());
6768
return chunkedOutput;
@@ -97,4 +98,12 @@ private OpenAiClient newOpenAiClient(String apiKey) {
9798
.openAiApiKey(apiKey)
9899
.build();
99100
}
101+
102+
private ErrorMessage errorMapper(Throwable throwable) {
103+
if (throwable instanceof OpenAiHttpException openAiHttpException) {
104+
return new ErrorMessage(openAiHttpException.code(), openAiHttpException.getMessage());
105+
}
106+
107+
return new ErrorMessage(UNEXPECTED_ERROR_CALLING_LLM_PROVIDER);
108+
}
100109
}

0 commit comments

Comments
 (0)