Skip to content

Commit 8b7025b

Browse files
committed
OPIK-610 removed unnecessary interface
1 parent ab343fe commit 8b7025b

File tree

3 files changed

+49
-62
lines changed

3 files changed

+49
-62
lines changed

apps/opik-backend/src/main/java/com/comet/opik/domain/ChatCompletionService.java

+5-6
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
package com.comet.opik.domain;
22

3-
import com.comet.opik.domain.llmproviders.DefaultLlmProviderStreamHandler;
43
import com.comet.opik.domain.llmproviders.LlmProviderFactory;
4+
import com.comet.opik.domain.llmproviders.LlmProviderStreamHandler;
55
import dev.ai4j.openai4j.chat.ChatCompletionRequest;
66
import dev.ai4j.openai4j.chat.ChatCompletionResponse;
77
import jakarta.inject.Inject;
@@ -14,13 +14,12 @@
1414
@Slf4j
1515
public class ChatCompletionService {
1616
private final LlmProviderFactory llmProviderFactory;
17-
private final DefaultLlmProviderStreamHandler defaultStreamHandler;
17+
private final LlmProviderStreamHandler streamHandler;
1818

1919
@Inject
20-
public ChatCompletionService(LlmProviderFactory llmProviderFactory,
21-
DefaultLlmProviderStreamHandler defaultStreamHandler) {
20+
public ChatCompletionService(LlmProviderFactory llmProviderFactory, LlmProviderStreamHandler streamHandler) {
2221
this.llmProviderFactory = llmProviderFactory;
23-
this.defaultStreamHandler = defaultStreamHandler;
22+
this.streamHandler = streamHandler;
2423
}
2524

2625
public ChatCompletionResponse create(@NonNull ChatCompletionRequest request, @NonNull String workspaceId) {
@@ -35,7 +34,7 @@ public ChunkedOutput<String> createAndStreamResponse(
3534
@NonNull ChatCompletionRequest request, @NonNull String workspaceId) {
3635
log.info("Creating and streaming chat completions, workspaceId '{}', model '{}'", workspaceId, request.model());
3736
var llmProviderClient = llmProviderFactory.getService(workspaceId, request.model());
38-
var chunkedOutput = llmProviderClient.generateStream(request, workspaceId, defaultStreamHandler);
37+
var chunkedOutput = llmProviderClient.generateStream(request, workspaceId, streamHandler);
3938
log.info("Created and streaming chat completions, workspaceId '{}', model '{}'", workspaceId, request.model());
4039
return chunkedOutput;
4140
}

apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/DefaultLlmProviderStreamHandler.java

-52
This file was deleted.
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,49 @@
11
package com.comet.opik.domain.llmproviders;
22

3+
import com.comet.opik.utils.JsonUtils;
4+
import dev.ai4j.openai4j.OpenAiHttpException;
5+
import io.dropwizard.jersey.errors.ErrorMessage;
6+
import lombok.extern.slf4j.Slf4j;
37
import org.glassfish.jersey.server.ChunkedOutput;
48

5-
public interface LlmProviderStreamHandler {
6-
void handleMessage(Object item, ChunkedOutput<String> chunkedOutput);
7-
void handleClose(ChunkedOutput<String> chunkedOutput);
8-
void handleError(Throwable throwable, ChunkedOutput<String> chunkedOutput);
9+
import java.io.IOException;
10+
import java.io.UncheckedIOException;
11+
12+
@Slf4j
13+
public class LlmProviderStreamHandler {
14+
private static final String UNEXPECTED_ERROR_CALLING_LLM_PROVIDER = "Unexpected error calling LLM provider";
15+
16+
public void handleMessage(Object item, ChunkedOutput<String> chunkedOutput) {
17+
if (chunkedOutput.isClosed()) {
18+
log.warn("Output stream is already closed");
19+
return;
20+
}
21+
try {
22+
chunkedOutput.write(JsonUtils.writeValueAsString(item));
23+
} catch (IOException ioException) {
24+
throw new UncheckedIOException(ioException);
25+
}
26+
}
27+
28+
public void handleClose(ChunkedOutput<String> chunkedOutput) {
29+
try {
30+
chunkedOutput.close();
31+
} catch (IOException ioException) {
32+
log.error("Failed to close output stream", ioException);
33+
}
34+
}
35+
36+
public void handleError(Throwable throwable, ChunkedOutput<String> chunkedOutput) {
37+
log.error(UNEXPECTED_ERROR_CALLING_LLM_PROVIDER, throwable);
38+
var errorMessage = new ErrorMessage(UNEXPECTED_ERROR_CALLING_LLM_PROVIDER);
39+
if (throwable instanceof OpenAiHttpException openAiHttpException) {
40+
errorMessage = new ErrorMessage(openAiHttpException.code(), openAiHttpException.getMessage());
41+
}
42+
try {
43+
handleMessage(errorMessage, chunkedOutput);
44+
} catch (UncheckedIOException uncheckedIOException) {
45+
log.error("Failed to stream error message to client", uncheckedIOException);
46+
}
47+
handleClose(chunkedOutput);
48+
}
949
}

0 commit comments

Comments
 (0)