Skip to content

Commit 9bcab7c

Browse files
author
Ido Berkovich
committed
OPIK-610 mandatory Anthropic fields validation
1 parent 3857d42 commit 9bcab7c

2 files changed

Lines changed: 45 additions & 9 deletions

File tree

apps/opik-backend/src/main/java/com/comet/opik/domain/llmproviders/Anthropic.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,9 @@
3636

3737
@Slf4j
3838
public class Anthropic implements LlmProviderService {
39+
public static final String ERROR_EMPTY_MESSAGES = "messages cannot be empty";
40+
public static final String ERROR_NO_COMPLETION_TOKENS = "maxCompletionTokens cannot be null";
41+
3942
private final LlmProviderClientConfig llmProviderClientConfig;
4043
private final AnthropicClient anthropicClient;
4144

@@ -76,13 +79,10 @@ public void generateStream(
7679
public void validateRequest(ChatCompletionRequest request) {
7780
// see https://github.com/anthropics/courses/blob/master/anthropic_api_fundamentals/04_parameters.ipynb
7881
if (CollectionUtils.isEmpty(request.messages())) {
79-
throw new BadRequestException("messages cannot be empty");
82+
throw new BadRequestException(ERROR_EMPTY_MESSAGES);
8083
}
8184
if (request.maxCompletionTokens() == null) {
82-
throw new BadRequestException("maxCompletionTokens cannot be null");
83-
}
84-
if (StringUtils.isEmpty(request.model())) {
85-
throw new BadRequestException("model cannot be empty");
85+
throw new BadRequestException(ERROR_NO_COMPLETION_TOKENS);
8686
}
8787
}
8888

apps/opik-backend/src/test/java/com/comet/opik/api/resources/v1/priv/ChatCompletionsResourceTest.java

Lines changed: 40 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,13 @@
3939
import java.util.UUID;
4040
import java.util.stream.Stream;
4141

42+
import static com.comet.opik.domain.llmproviders.Anthropic.ERROR_EMPTY_MESSAGES;
43+
import static com.comet.opik.domain.llmproviders.Anthropic.ERROR_NO_COMPLETION_TOKENS;
4244
import static com.comet.opik.domain.llmproviders.LlmProviderFactory.ERROR_MODEL_NOT_SUPPORTED;
4345
import static org.assertj.core.api.Assertions.assertThat;
4446
import static org.assertj.core.api.Assumptions.assumeThat;
47+
import static org.junit.jupiter.api.Named.named;
48+
import static org.junit.jupiter.params.provider.Arguments.arguments;
4549

4650
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
4751
public class ChatCompletionsResourceTest {
@@ -208,14 +212,12 @@ void createAndStreamResponse(String expectedModel, LlmProvider llmProvider, Stri
208212

209213
private static Stream<Arguments> testModelsProvider() {
210214
return Stream.of(
211-
Arguments.of(ChatCompletionModel.GPT_4O_MINI.toString(), LlmProvider.OPEN_AI,
215+
arguments(ChatCompletionModel.GPT_4O_MINI.toString(), LlmProvider.OPEN_AI,
212216
UUID.randomUUID().toString()),
213-
Arguments.of(AnthropicChatModelName.CLAUDE_3_5_SONNET_20240620.toString(), LlmProvider.ANTHROPIC,
217+
arguments(AnthropicChatModelName.CLAUDE_3_5_SONNET_20240620.toString(), LlmProvider.ANTHROPIC,
214218
System.getenv("ANTHROPIC_API_KEY")));
215219
}
216220

217-
// TODO: add coverage for anthropic missing model, messages or maxCompletionTokens for both streaming and non-streaming
218-
219221
@ParameterizedTest
220222
@ValueSource(strings = {"", "non-existing-model"})
221223
void createAndStreamResponseReturnsBadRequestWhenNoModel(String model) {
@@ -240,6 +242,40 @@ void createAndStreamResponseReturnsBadRequestWhenNoModel(String model) {
240242

241243
}
242244

245+
@ParameterizedTest
246+
@MethodSource
247+
void createAnthropicValidateMandatoryFields(ChatCompletionRequest request, String expectedErrorMessage) {
248+
String llmProviderApiKey = UUID.randomUUID().toString();
249+
250+
var workspaceName = RandomStringUtils.randomAlphanumeric(20);
251+
var workspaceId = UUID.randomUUID().toString();
252+
mockTargetWorkspace(workspaceName, workspaceId);
253+
createLlmProviderApiKey(workspaceName, LlmProvider.ANTHROPIC, llmProviderApiKey);
254+
255+
var errorMessage = chatCompletionsClient.create(API_KEY, workspaceName, request, HttpStatus.SC_BAD_REQUEST);
256+
257+
assertThat(errorMessage.getCode()).isEqualTo(HttpStatus.SC_BAD_REQUEST);
258+
assertThat(errorMessage.getMessage())
259+
.containsIgnoringCase(expectedErrorMessage);
260+
}
261+
262+
private Stream<Arguments> createAnthropicValidateMandatoryFields() {
263+
ChatCompletionRequest.Builder baseRequest = podamFactory.manufacturePojo(ChatCompletionRequest.Builder.class)
264+
.stream(false)
265+
.model(AnthropicChatModelName.CLAUDE_3_5_SONNET_20240620.toString());
266+
return Stream.of(
267+
arguments(named("no messages", podamFactory.manufacturePojo(ChatCompletionRequest.Builder.class)
268+
.stream(false)
269+
.model(AnthropicChatModelName.CLAUDE_3_5_SONNET_20240620.toString())
270+
.maxCompletionTokens(100).build()),
271+
ERROR_EMPTY_MESSAGES),
272+
arguments(named("no max tokens", podamFactory.manufacturePojo(ChatCompletionRequest.Builder.class)
273+
.stream(false)
274+
.model(AnthropicChatModelName.CLAUDE_3_5_SONNET_20240620.toString())
275+
.addUserMessage("Say 'Hello World'").build()),
276+
ERROR_NO_COMPLETION_TOKENS));
277+
}
278+
243279
private void createLlmProviderApiKey(String workspaceName) {
244280
createLlmProviderApiKey(workspaceName, LlmProvider.OPEN_AI, UUID.randomUUID().toString());
245281
}

0 commit comments

Comments
 (0)