Skip to content

Commit 022c050

Browse files
authored
Merge branch 'SciSharp:master' into master
2 parents 3439809 + 7b649c0 commit 022c050

File tree

18 files changed

+157
-123
lines changed

18 files changed

+157
-123
lines changed

src/Infrastructure/BotSharp.Abstraction/Knowledges/IKnowledgeHook.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@ public interface IKnowledgeHook
55
Task<List<KnowledgeChunk>> CollectChunkedKnowledge()
66
=> Task.FromResult(new List<KnowledgeChunk>());
77

8-
Task<List<string>> GetRelevantKnowledges(string text)
8+
Task<List<string>> GetRelevantKnowledges(RoleDialogModel message, string text)
99
=> Task.FromResult(new List<string>());
1010

11-
Task<List<string>> GetGlobalKnowledges()
11+
Task<List<string>> GetGlobalKnowledges(RoleDialogModel message)
1212
=> Task.FromResult(new List<string>());
1313
}

src/Plugins/BotSharp.Plugin.AnthropicAI/Providers/ChatCompletionProvider.cs

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -50,10 +50,10 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
5050
{
5151
var toolResult = response.Content.OfType<ToolUseContent>().First();
5252

53-
responseMessage = new RoleDialogModel(AgentRole.Function, response.FirstMessage?.Text)
53+
responseMessage = new RoleDialogModel(AgentRole.Function, response.FirstMessage?.Text ?? string.Empty)
5454
{
5555
CurrentAgentId = agent.Id,
56-
MessageId = conversations.Last().MessageId,
56+
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
5757
ToolCallId = toolResult.Id,
5858
FunctionName = toolResult.Name,
5959
FunctionArgs = JsonSerializer.Serialize(toolResult.Input)
@@ -62,10 +62,10 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
6262
else
6363
{
6464
var message = response.FirstMessage;
65-
responseMessage = new RoleDialogModel(AgentRole.Assistant, message.Text)
65+
responseMessage = new RoleDialogModel(AgentRole.Assistant, message?.Text ?? string.Empty)
6666
{
6767
CurrentAgentId = agent.Id,
68-
MessageId = conversations.Last().MessageId
68+
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
6969
};
7070
}
7171

@@ -77,8 +77,8 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
7777
Prompt = prompt,
7878
Provider = Provider,
7979
Model = _model,
80-
PromptCount = response.Usage.InputTokens,
81-
CompletionCount = response.Usage.OutputTokens
80+
PromptCount = response.Usage?.InputTokens ?? 0,
81+
CompletionCount = response.Usage?.OutputTokens ?? 0
8282
});
8383
}
8484

@@ -120,7 +120,14 @@ public Task<bool> GetChatCompletionsStreamingAsync(Agent agent, List<RoleDialogM
120120
prompt += "\r\n\r\n" + response_with_function;*/
121121

122122
var messages = new List<Message>();
123-
foreach (var conv in conversations)
123+
var filteredMessages = conversations.Select(x => x).ToList();
124+
var firstUserMsgIdx = filteredMessages.FindIndex(x => x.Role == AgentRole.User);
125+
if (firstUserMsgIdx > 0)
126+
{
127+
filteredMessages = filteredMessages.Where((_, idx) => idx >= firstUserMsgIdx).ToList();
128+
}
129+
130+
foreach (var conv in filteredMessages)
124131
{
125132
if (conv.Role == AgentRole.User)
126133
{

src/Plugins/BotSharp.Plugin.AzureOpenAI/BotSharp.Plugin.AzureOpenAI.csproj

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
</PropertyGroup>
1212

1313
<ItemGroup>
14-
<PackageReference Include="Azure.AI.OpenAI" Version="2.0.0-beta.2" />
14+
<PackageReference Include="Azure.AI.OpenAI" Version="2.0.0" />
1515
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
1616
</ItemGroup>
1717

src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/Audio/AudioCompletionProvider.SpeechToText.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ private AudioTranscriptionOptions PrepareTranscriptionOptions(string? text)
2424
var options = new AudioTranscriptionOptions
2525
{
2626
ResponseFormat = format,
27-
Granularities = granularity,
27+
TimestampGranularities = granularity,
2828
Temperature = temperature,
2929
Prompt = text
3030
};

src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/Audio/AudioCompletionProvider.TextToSpeech.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ public async Task<BinaryData> GenerateAudioFromTextAsync(string text)
1010
.GetAudioClient(_model);
1111

1212
var (voice, options) = PrepareGenerationOptions();
13-
var result = await audioClient.GenerateSpeechFromTextAsync(text, voice, options);
13+
var result = await audioClient.GenerateSpeechAsync(text, voice, options);
1414
return result.Value;
1515
}
1616

@@ -24,7 +24,7 @@ public async Task<BinaryData> GenerateAudioFromTextAsync(string text)
2424
var options = new SpeechGenerationOptions
2525
{
2626
ResponseFormat = format,
27-
Speed = speed
27+
SpeedRatio = speed
2828
};
2929

3030
return (voice, options);

src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/Chat/ChatCompletionProvider.cs

Lines changed: 57 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -50,14 +50,15 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
5050
var content = value.Content;
5151
var text = content.FirstOrDefault()?.Text ?? string.Empty;
5252

53-
if (reason == ChatFinishReason.FunctionCall)
53+
if (reason == ChatFinishReason.FunctionCall || reason == ChatFinishReason.ToolCalls)
5454
{
55+
var toolCall = value.ToolCalls.FirstOrDefault();
5556
responseMessage = new RoleDialogModel(AgentRole.Function, text)
5657
{
5758
CurrentAgentId = agent.Id,
5859
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
59-
FunctionName = value.FunctionCall.FunctionName,
60-
FunctionArgs = value.FunctionCall.FunctionArguments
60+
FunctionName = toolCall?.FunctionName,
61+
FunctionArgs = toolCall?.FunctionArguments?.ToString()
6162
};
6263

6364
// Somethings LLM will generate a function name with agent name.
@@ -66,17 +67,17 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
6667
responseMessage.FunctionName = responseMessage.FunctionName.Split('.').Last();
6768
}
6869
}
69-
else if (reason == ChatFinishReason.ToolCalls)
70-
{
71-
var toolCall = value.ToolCalls.FirstOrDefault();
72-
responseMessage = new RoleDialogModel(AgentRole.Function, text)
73-
{
74-
CurrentAgentId = agent.Id,
75-
MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
76-
FunctionName = toolCall?.FunctionName,
77-
FunctionArgs = toolCall?.FunctionArguments
78-
};
79-
}
70+
//else if (reason == ChatFinishReason.ToolCalls)
71+
//{
72+
// var toolCall = value.ToolCalls.FirstOrDefault();
73+
// responseMessage = new RoleDialogModel(AgentRole.Function, text)
74+
// {
75+
// CurrentAgentId = agent.Id,
76+
// MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty,
77+
// FunctionName = toolCall?.FunctionName,
78+
// FunctionArgs = toolCall?.FunctionArguments
79+
// };
80+
//}
8081
else
8182
{
8283
responseMessage = new RoleDialogModel(AgentRole.Assistant, text)
@@ -113,8 +114,8 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
113114
Prompt = prompt,
114115
Provider = Provider,
115116
Model = _model,
116-
PromptCount = value?.Usage?.InputTokens ?? 0,
117-
CompletionCount = value?.Usage?.OutputTokens ?? 0
117+
PromptCount = value?.Usage?.InputTokenCount ?? 0,
118+
CompletionCount = value?.Usage?.OutputTokenCount ?? 0
118119
});
119120
}
120121

@@ -157,20 +158,21 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent,
157158
Prompt = prompt,
158159
Provider = Provider,
159160
Model = _model,
160-
PromptCount = response.Value.Usage.InputTokens,
161-
CompletionCount = response.Value.Usage.OutputTokens
161+
PromptCount = response.Value?.Usage?.InputTokenCount ?? 0,
162+
CompletionCount = response.Value?.Usage?.OutputTokenCount ?? 0
162163
});
163164
}
164165

165-
if (reason == ChatFinishReason.FunctionCall)
166+
if (reason == ChatFinishReason.FunctionCall || reason == ChatFinishReason.ToolCalls)
166167
{
167-
_logger.LogInformation($"[{agent.Name}]: {value.FunctionCall.FunctionName}({value.FunctionCall.FunctionArguments})");
168+
var toolCall = value.ToolCalls?.FirstOrDefault();
169+
_logger.LogInformation($"[{agent.Name}]: {toolCall?.FunctionName}({toolCall?.FunctionArguments})");
168170

169171
var funcContextIn = new RoleDialogModel(AgentRole.Function, text)
170172
{
171173
CurrentAgentId = agent.Id,
172-
FunctionName = value.FunctionCall?.FunctionName,
173-
FunctionArgs = value.FunctionCall?.FunctionArguments
174+
FunctionName = toolCall?.FunctionName,
175+
FunctionArgs = toolCall?.FunctionArguments?.ToString()
174176
};
175177

176178
// Somethings LLM will generate a function name with agent name.
@@ -201,19 +203,20 @@ public async Task<bool> GetChatCompletionsStreamingAsync(Agent agent, List<RoleD
201203

202204
await foreach (var choice in response)
203205
{
204-
if (choice.FinishReason == ChatFinishReason.FunctionCall)
206+
if (choice.FinishReason == ChatFinishReason.FunctionCall || choice.FinishReason == ChatFinishReason.ToolCalls)
205207
{
206-
Console.Write(choice.FunctionCallUpdate?.FunctionArgumentsUpdate);
208+
var update = choice.ToolCallUpdates?.FirstOrDefault()?.FunctionArgumentsUpdate?.ToString() ?? string.Empty;
209+
Console.Write(update);
207210

208-
await onMessageReceived(new RoleDialogModel(AgentRole.Assistant, choice.FunctionCallUpdate?.FunctionArgumentsUpdate));
211+
await onMessageReceived(new RoleDialogModel(AgentRole.Assistant, update));
209212
continue;
210213
}
211214

212215
if (choice.ContentUpdate.IsNullOrEmpty()) continue;
213216

214217
_logger.LogInformation(choice.ContentUpdate[0]?.Text);
215218

216-
await onMessageReceived(new RoleDialogModel(choice.Role.ToString(), choice.ContentUpdate[0]?.Text ?? string.Empty));
219+
await onMessageReceived(new RoleDialogModel(choice.Role?.ToString() ?? ChatMessageRole.Assistant.ToString(), choice.ContentUpdate[0]?.Text ?? string.Empty));
217220
}
218221

219222
return true;
@@ -235,7 +238,7 @@ public async Task<bool> GetChatCompletionsStreamingAsync(Agent agent, List<RoleD
235238
var options = new ChatCompletionOptions()
236239
{
237240
Temperature = temperature,
238-
MaxTokens = maxTokens
241+
MaxOutputTokenCount = maxTokens
239242
};
240243

241244
foreach (var function in agent.Functions)
@@ -267,21 +270,35 @@ public async Task<bool> GetChatCompletionsStreamingAsync(Agent agent, List<RoleD
267270
messages.Add(sample.Role == AgentRole.User ? new UserChatMessage(sample.Content) : new AssistantChatMessage(sample.Content));
268271
}
269272

270-
foreach (var message in conversations)
273+
var filteredMessages = conversations.Select(x => x).ToList();
274+
var firstUserMsgIdx = filteredMessages.FindIndex(x => x.Role == AgentRole.User);
275+
if (firstUserMsgIdx > 0)
276+
{
277+
filteredMessages = filteredMessages.Where((_, idx) => idx >= firstUserMsgIdx).ToList();
278+
}
279+
280+
foreach (var message in filteredMessages)
271281
{
272282
if (message.Role == AgentRole.Function)
273283
{
274-
messages.Add(new AssistantChatMessage(string.Empty)
284+
//messages.Add(new AssistantChatMessage(string.Empty)
285+
//{
286+
// FunctionCall = new ChatFunctionCall(message.FunctionName, message.FunctionArgs ?? string.Empty)
287+
//});
288+
289+
//messages.Add(new FunctionChatMessage(message.FunctionName, message.Content));
290+
291+
messages.Add(new AssistantChatMessage(new List<ChatToolCall>
275292
{
276-
FunctionCall = new ChatFunctionCall(message.FunctionName, message.FunctionArgs ?? string.Empty)
277-
});
293+
ChatToolCall.CreateFunctionToolCall(message.FunctionName, message.FunctionName, BinaryData.FromString(message.FunctionArgs ?? string.Empty))
294+
}));
278295

279-
messages.Add(new FunctionChatMessage(message.FunctionName, message.Content));
296+
messages.Add(new ToolChatMessage(message.FunctionName, message.Content));
280297
}
281298
else if (message.Role == AgentRole.User)
282299
{
283300
var text = !string.IsNullOrWhiteSpace(message.Payload) ? message.Payload : message.Content;
284-
var textPart = ChatMessageContentPart.CreateTextMessageContentPart(text);
301+
var textPart = ChatMessageContentPart.CreateTextPart(text);
285302
var contentParts = new List<ChatMessageContentPart> { textPart };
286303

287304
if (allowMultiModal && !message.Files.IsNullOrEmpty())
@@ -291,20 +308,20 @@ public async Task<bool> GetChatCompletionsStreamingAsync(Agent agent, List<RoleD
291308
if (!string.IsNullOrEmpty(file.FileData))
292309
{
293310
var (contentType, bytes) = FileUtility.GetFileInfoFromData(file.FileData);
294-
var contentPart = ChatMessageContentPart.CreateImageMessageContentPart(BinaryData.FromBytes(bytes), contentType, ImageChatMessageContentPartDetail.Low);
311+
var contentPart = ChatMessageContentPart.CreateImagePart(BinaryData.FromBytes(bytes), contentType, ChatImageDetailLevel.Low);
295312
contentParts.Add(contentPart);
296313
}
297314
else if (!string.IsNullOrEmpty(file.FileStorageUrl))
298315
{
299316
var contentType = FileUtility.GetFileContentType(file.FileStorageUrl);
300317
var bytes = fileStorage.GetFileBytes(file.FileStorageUrl);
301-
var contentPart = ChatMessageContentPart.CreateImageMessageContentPart(BinaryData.FromBytes(bytes), contentType, ImageChatMessageContentPartDetail.Low);
318+
var contentPart = ChatMessageContentPart.CreateImagePart(BinaryData.FromBytes(bytes), contentType, ChatImageDetailLevel.Low);
302319
contentParts.Add(contentPart);
303320
}
304321
else if (!string.IsNullOrEmpty(file.FileUrl))
305322
{
306323
var uri = new Uri(file.FileUrl);
307-
var contentPart = ChatMessageContentPart.CreateImageMessageContentPart(uri, ImageChatMessageContentPartDetail.Low);
324+
var contentPart = ChatMessageContentPart.CreateImagePart(uri, ChatImageDetailLevel.Low);
308325
contentParts.Add(contentPart);
309326
}
310327
}
@@ -347,7 +364,7 @@ private string GetPrompt(IEnumerable<ChatMessage> messages, ChatCompletionOption
347364
.Where(x => x as SystemChatMessage == null)
348365
.Select(x =>
349366
{
350-
var fnMessage = x as FunctionChatMessage;
367+
var fnMessage = x as ToolChatMessage;
351368
if (fnMessage != null)
352369
{
353370
return $"{AgentRole.Function}: {fnMessage.Content.FirstOrDefault()?.Text ?? string.Empty}";
@@ -365,8 +382,9 @@ private string GetPrompt(IEnumerable<ChatMessage> messages, ChatCompletionOption
365382
var assistMessage = x as AssistantChatMessage;
366383
if (assistMessage != null)
367384
{
368-
return assistMessage.FunctionCall != null ?
369-
$"{AgentRole.Assistant}: Call function {assistMessage.FunctionCall.FunctionName}({assistMessage.FunctionCall.FunctionArguments})" :
385+
var toolCall = assistMessage.ToolCalls?.FirstOrDefault();
386+
return toolCall != null ?
387+
$"{AgentRole.Assistant}: Call function {toolCall?.FunctionName}({toolCall?.FunctionArguments})" :
370388
$"{AgentRole.Assistant}: {assistMessage.Content.FirstOrDefault()?.Text ?? string.Empty}";
371389
}
372390

src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/Embedding/TextEmbeddingProvider.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ public async Task<float[]> GetVectorAsync(string text)
3131
var options = PrepareOptions();
3232
var response = await embeddingClient.GenerateEmbeddingAsync(text, options);
3333
var value = response.Value;
34-
return value.Vector.ToArray();
34+
return value.ToFloats().ToArray();
3535
}
3636

3737
public async Task<List<float[]>> GetVectorsAsync(List<string> texts)
@@ -41,7 +41,7 @@ public async Task<List<float[]>> GetVectorsAsync(List<string> texts)
4141
var options = PrepareOptions();
4242
var response = await embeddingClient.GenerateEmbeddingsAsync(texts, options);
4343
var value = response.Value;
44-
return value.Select(x => x.Vector.ToArray()).ToList();
44+
return value.Select(x => x.ToFloats().ToArray()).ToList();
4545
}
4646

4747
public void SetModelName(string model)

src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/ProviderHelper.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
using Azure.AI.OpenAI;
2-
using Azure;
2+
using System.ClientModel;
33

44
namespace BotSharp.Plugin.AzureOpenAI.Providers;
55

@@ -9,7 +9,7 @@ public static AzureOpenAIClient GetClient(string provider, string model, IServic
99
{
1010
var settingsService = services.GetRequiredService<ILlmProviderService>();
1111
var settings = settingsService.GetSetting(provider, model);
12-
var client = new AzureOpenAIClient(new Uri(settings.Endpoint), new AzureKeyCredential(settings.ApiKey));
12+
var client = new AzureOpenAIClient(new Uri(settings.Endpoint), new ApiKeyCredential(settings.ApiKey));
1313
return client;
1414
}
1515

src/Plugins/BotSharp.Plugin.OpenAI/BotSharp.Plugin.OpenAI.csproj

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
</PropertyGroup>
1212

1313
<ItemGroup>
14-
<PackageReference Include="OpenAI" Version="2.0.0-beta.5" />
14+
<PackageReference Include="OpenAI" Version="2.0.0" />
1515
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
1616
</ItemGroup>
1717

src/Plugins/BotSharp.Plugin.OpenAI/Providers/Audio/AudioCompletionProvider.SpeechToText.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ private AudioTranscriptionOptions PrepareTranscriptionOptions(string? text)
2424
var options = new AudioTranscriptionOptions
2525
{
2626
ResponseFormat = format,
27-
Granularities = granularity,
27+
TimestampGranularities = granularity,
2828
Temperature = temperature,
2929
Prompt = text
3030
};

0 commit comments

Comments
 (0)