|
1 | 1 | using System.Runtime.CompilerServices;
|
2 |
| -using Microsoft.Extensions.Options; |
| 2 | +using Cnblogs.DashScope.Sdk; |
3 | 3 | using Microsoft.SemanticKernel;
|
4 | 4 | using Microsoft.SemanticKernel.ChatCompletion;
|
5 | 5 | using Microsoft.SemanticKernel.Services;
|
6 |
| -using Sdcb.DashScope; |
7 |
| -using Sdcb.DashScope.TextGeneration; |
| 6 | +using Microsoft.SemanticKernel.TextGeneration; |
8 | 7 |
|
9 | 8 | namespace Cnblogs.SemanticKernel.Connectors.DashScope;
|
10 | 9 |
|
11 |
| -public sealed class DashScopeChatCompletionService : IChatCompletionService |
| 10 | +/// <summary> |
| 11 | +/// DashScope chat completion service. |
| 12 | +/// </summary> |
| 13 | +public sealed class DashScopeChatCompletionService : IChatCompletionService, ITextGenerationService |
12 | 14 | {
|
13 |
| - private readonly DashScopeClient _dashScopeClient; |
| 15 | + private readonly IDashScopeClient _dashScopeClient; |
| 16 | + private readonly Dictionary<string, object?> _attributes = new(); |
14 | 17 | private readonly string _modelId;
|
15 |
| - private readonly Dictionary<string, object?> _attribues = []; |
16 | 18 |
|
17 |
| - public DashScopeChatCompletionService( |
18 |
| - IOptions<DashScopeClientOptions> options, |
19 |
| - HttpClient httpClient) |
| 19 | + /// <summary> |
| 20 | + /// Creates a new DashScope chat completion service. |
| 21 | + /// </summary> |
| 22 | + /// <param name="modelId"></param> |
| 23 | + /// <param name="dashScopeClient"></param> |
| 24 | + public DashScopeChatCompletionService(string modelId, IDashScopeClient dashScopeClient) |
20 | 25 | {
|
21 |
| - _dashScopeClient = new(options.Value.ApiKey, httpClient); |
22 |
| - _modelId = options.Value.ModelId; |
23 |
| - _attribues.Add(AIServiceExtensions.ModelIdKey, _modelId); |
| 26 | + _dashScopeClient = dashScopeClient; |
| 27 | + _modelId = modelId; |
| 28 | + _attributes.Add(AIServiceExtensions.ModelIdKey, _modelId); |
24 | 29 | }
|
25 | 30 |
|
26 |
| - public IReadOnlyDictionary<string, object?> Attributes => _attribues; |
27 |
| - |
28 |
| - public async Task<IReadOnlyList<ChatMessageContent>> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) |
| 31 | + /// <inheritdoc /> |
| 32 | + public async Task<IReadOnlyList<ChatMessageContent>> GetChatMessageContentsAsync( |
| 33 | + ChatHistory chatHistory, |
| 34 | + PromptExecutionSettings? executionSettings = null, |
| 35 | + Kernel? kernel = null, |
| 36 | + CancellationToken cancellationToken = default) |
29 | 37 | {
|
30 | 38 | var chatMessages = chatHistory.ToChatMessages();
|
31 |
| - var chatParameters = executionSettings?.ToChatParameters(); |
32 |
| - var response = await _dashScopeClient.TextGeneration.Chat(_modelId, chatMessages, chatParameters, cancellationToken); |
| 39 | + var chatParameters = DashScopePromptExecutionSettings.FromPromptExecutionSettings(executionSettings); |
| 40 | + chatParameters ??= new DashScopePromptExecutionSettings(); |
| 41 | + chatParameters.IncrementalOutput = false; |
| 42 | + chatParameters.ResultFormat = ResultFormats.Message; |
| 43 | + var response = await _dashScopeClient.GetTextCompletionAsync( |
| 44 | + new ModelRequest<TextGenerationInput, ITextGenerationParameters> |
| 45 | + { |
| 46 | + Input = new TextGenerationInput { Messages = chatMessages }, |
| 47 | + Model = string.IsNullOrEmpty(chatParameters.ModelId) ? _modelId : chatParameters.ModelId, |
| 48 | + Parameters = chatParameters |
| 49 | + }, |
| 50 | + cancellationToken); |
| 51 | + var message = response.Output.Choices![0].Message; |
33 | 52 | var chatMessageContent = new ChatMessageContent(
|
34 |
| - new AuthorRole(chatMessages[0].Role), |
35 |
| - response.Output.Text, |
36 |
| - metadata: response.Usage.ToMetadata()); |
| 53 | + new AuthorRole(message.Role), |
| 54 | + message.Content, |
| 55 | + metadata: response.ToMetaData()); |
37 | 56 | return [chatMessageContent];
|
38 | 57 | }
|
39 | 58 |
|
| 59 | + /// <inheritdoc /> |
40 | 60 | public async IAsyncEnumerable<StreamingChatMessageContent> GetStreamingChatMessageContentsAsync(
|
41 | 61 | ChatHistory chatHistory,
|
42 | 62 | PromptExecutionSettings? executionSettings = null,
|
43 | 63 | Kernel? kernel = null,
|
44 | 64 | [EnumeratorCancellation] CancellationToken cancellationToken = default)
|
45 | 65 | {
|
46 | 66 | var chatMessages = chatHistory.ToChatMessages();
|
47 |
| - var chatParameters = executionSettings?.ToChatParameters() ?? new ChatParameters(); |
48 |
| - chatParameters.IncrementalOutput = true; |
| 67 | + executionSettings ??= new DashScopePromptExecutionSettings(); |
| 68 | + var parameters = DashScopePromptExecutionSettings.FromPromptExecutionSettings(executionSettings); |
| 69 | + parameters.IncrementalOutput = true; |
| 70 | + parameters.ResultFormat = ResultFormats.Message; |
| 71 | + var responses = _dashScopeClient.GetTextCompletionStreamAsync( |
| 72 | + new ModelRequest<TextGenerationInput, ITextGenerationParameters> |
| 73 | + { |
| 74 | + Input = new TextGenerationInput { Messages = chatMessages }, |
| 75 | + Model = string.IsNullOrEmpty(parameters.ModelId) ? _modelId : parameters.ModelId, |
| 76 | + Parameters = parameters |
| 77 | + }, |
| 78 | + cancellationToken); |
49 | 79 |
|
50 |
| - var responses = _dashScopeClient.TextGeneration.ChatStreamed(_modelId, chatMessages, chatParameters, cancellationToken); |
51 | 80 | await foreach (var response in responses)
|
52 | 81 | {
|
| 82 | + var message = response.Output.Choices![0].Message; |
53 | 83 | yield return new StreamingChatMessageContent(
|
54 |
| - new AuthorRole(chatMessages[0].Role), |
| 84 | + new AuthorRole(message.Role), |
| 85 | + message.Content, |
| 86 | + modelId: _modelId, |
| 87 | + metadata: response.ToMetaData()); |
| 88 | + } |
| 89 | + } |
| 90 | + |
| 91 | + /// <inheritdoc /> |
| 92 | + public IReadOnlyDictionary<string, object?> Attributes => _attributes; |
| 93 | + |
| 94 | + /// <inheritdoc /> |
| 95 | + public async Task<IReadOnlyList<TextContent>> GetTextContentsAsync( |
| 96 | + string prompt, |
| 97 | + PromptExecutionSettings? executionSettings = null, |
| 98 | + Kernel? kernel = null, |
| 99 | + CancellationToken cancellationToken = new()) |
| 100 | + { |
| 101 | + var chatParameters = DashScopePromptExecutionSettings.FromPromptExecutionSettings(executionSettings); |
| 102 | + chatParameters ??= new DashScopePromptExecutionSettings(); |
| 103 | + chatParameters.IncrementalOutput = false; |
| 104 | + chatParameters.ResultFormat = ResultFormats.Text; |
| 105 | + var response = await _dashScopeClient.GetTextCompletionAsync( |
| 106 | + new ModelRequest<TextGenerationInput, ITextGenerationParameters> |
| 107 | + { |
| 108 | + Input = new TextGenerationInput { Prompt = prompt }, |
| 109 | + Model = string.IsNullOrEmpty(chatParameters.ModelId) ? _modelId : chatParameters.ModelId, |
| 110 | + Parameters = chatParameters |
| 111 | + }, |
| 112 | + cancellationToken); |
| 113 | + return [new TextContent(response.Output.Text, _modelId, metadata: response.ToMetaData())]; |
| 114 | + } |
| 115 | + |
| 116 | + /// <inheritdoc /> |
| 117 | + public async IAsyncEnumerable<StreamingTextContent> GetStreamingTextContentsAsync( |
| 118 | + string prompt, |
| 119 | + PromptExecutionSettings? executionSettings = null, |
| 120 | + Kernel? kernel = null, |
| 121 | + [EnumeratorCancellation] CancellationToken cancellationToken = new()) |
| 122 | + { |
| 123 | + executionSettings ??= new DashScopePromptExecutionSettings(); |
| 124 | + var parameters = DashScopePromptExecutionSettings.FromPromptExecutionSettings(executionSettings); |
| 125 | + parameters.IncrementalOutput = true; |
| 126 | + parameters.ResultFormat = ResultFormats.Text; |
| 127 | + var responses = _dashScopeClient.GetTextCompletionStreamAsync( |
| 128 | + new ModelRequest<TextGenerationInput, ITextGenerationParameters> |
| 129 | + { |
| 130 | + Input = new TextGenerationInput { Prompt = prompt }, |
| 131 | + Model = string.IsNullOrEmpty(parameters.ModelId) ? _modelId : parameters.ModelId, |
| 132 | + Parameters = parameters |
| 133 | + }, |
| 134 | + cancellationToken); |
| 135 | + |
| 136 | + await foreach (var response in responses) |
| 137 | + { |
| 138 | + yield return new StreamingTextContent( |
55 | 139 | response.Output.Text,
|
56 |
| - metadata: response.Usage.ToMetadata()); |
| 140 | + modelId: _modelId, |
| 141 | + metadata: response.ToMetaData()); |
57 | 142 | }
|
58 | 143 | }
|
59 | 144 | }
|
0 commit comments