From e1b442cee59ed33f4c5d8442072a4da933137253 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 12 Jun 2025 21:14:58 +0000 Subject: [PATCH 1/3] Update dependency com.azure:azure-ai-openai to v1.0.0-beta.16 --- framework/codemodder-base/build.gradle.kts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/codemodder-base/build.gradle.kts b/framework/codemodder-base/build.gradle.kts index 42106fa13..acf1cc5c2 100644 --- a/framework/codemodder-base/build.gradle.kts +++ b/framework/codemodder-base/build.gradle.kts @@ -29,7 +29,7 @@ dependencies { api(libs.javaparser.symbolsolver.model) api(libs.javadiff) api(libs.jtokkit) - api("com.azure:azure-ai-openai:1.0.0-beta.10") + api("com.azure:azure-ai-openai:1.0.0-beta.16") api("io.github.classgraph:classgraph:4.8.160") implementation(libs.tuples) From 991b3da53273bb40d67f76d3e00c0e3c81d07da2 Mon Sep 17 00:00:00 2001 From: Arshan Dabirsiaghi Date: Thu, 12 Jun 2025 17:30:04 -0400 Subject: [PATCH 2/3] remove test for logging project name, structured logging no longer an important goal for the project --- .../plugins/llm/SarifToLLMForMultiOutcomeCodemod.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugins/codemodder-plugin-llm/src/main/java/io/codemodder/plugins/llm/SarifToLLMForMultiOutcomeCodemod.java b/plugins/codemodder-plugin-llm/src/main/java/io/codemodder/plugins/llm/SarifToLLMForMultiOutcomeCodemod.java index a5d06cea8..eed8acadf 100644 --- a/plugins/codemodder-plugin-llm/src/main/java/io/codemodder/plugins/llm/SarifToLLMForMultiOutcomeCodemod.java +++ b/plugins/codemodder-plugin-llm/src/main/java/io/codemodder/plugins/llm/SarifToLLMForMultiOutcomeCodemod.java @@ -201,7 +201,8 @@ private boolean estimatedToExceedContextWindow(final CodemodInvocationContext co int tokenCount = model.tokens( List.of( - getSystemMessage().getContent(), estimatedUserMessage.getContent().toString())); + getSystemMessage().getStringContent(), + estimatedUserMessage.getContent().toString())); // estimated token count doesn't include the function (~100 tokens) or the reply // (~200 tokens) so add those estimates before checking against window size tokenCount += 300; From 0e887f8362f72a2cfe662f054d54b04c245efa1c Mon Sep 17 00:00:00 2001 From: Arshan Dabirsiaghi Date: Thu, 12 Jun 2025 18:06:52 -0400 Subject: [PATCH 3/3] fix another instance of breaking api change --- .../llm/SarifToLLMForBinaryVerificationAndFixingCodemod.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugins/codemodder-plugin-llm/src/main/java/io/codemodder/plugins/llm/SarifToLLMForBinaryVerificationAndFixingCodemod.java b/plugins/codemodder-plugin-llm/src/main/java/io/codemodder/plugins/llm/SarifToLLMForBinaryVerificationAndFixingCodemod.java index 58b3e9973..2ade70631 100644 --- a/plugins/codemodder-plugin-llm/src/main/java/io/codemodder/plugins/llm/SarifToLLMForBinaryVerificationAndFixingCodemod.java +++ b/plugins/codemodder-plugin-llm/src/main/java/io/codemodder/plugins/llm/SarifToLLMForBinaryVerificationAndFixingCodemod.java @@ -151,7 +151,8 @@ private BinaryThreatAnalysis analyzeThreat( // If the estimated token count, which doesn't include the function (~100 tokens) or the reply // (~200 tokens), is close to the max, then assume the code is safe (for now). int tokenCount = - model.tokens(List.of(systemMessage.getContent(), userMessage.getContent().toString())); + model.tokens( + List.of(systemMessage.getStringContent(), userMessage.getContent().toString())); if (tokenCount > model.contextWindow() - 300) { return new BinaryThreatAnalysis( "Ignoring file: estimated prompt token count (" + tokenCount + ") is too high.",