diff --git a/README.md b/README.md index 87766459..c3dac65f 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ Open Chat Playground (OCP) is a web UI that is able to connect virtually any LLM - [x] [Docker Model Runner](https://docs.docker.com/ai/model-runner) - [x] [Foundry Local](https://learn.microsoft.com/azure/ai-foundry/foundry-local/what-is-foundry-local) - [x] [Hugging Face](https://huggingface.co/docs) -- [ ] [Ollama](https://github.com/ollama/ollama/tree/main/docs) +- [x] [Ollama](https://github.com/ollama/ollama/tree/main/docs) - [ ] [Anthropic](https://docs.anthropic.com) - [ ] [Naver](https://api.ncloud-docs.com/docs/ai-naver-clovastudio-summary) - [x] [LG](https://github.com/LG-AI-EXAONE) @@ -66,6 +66,7 @@ Open Chat Playground (OCP) is a web UI that is able to connect virtually any LLM - [Use Docker Model Runner](./docs/docker-model-runner.md#run-on-local-machine) - [Use Foundry Local](./docs/foundry-local.md#run-on-local-machine) - [Use Hugging Face](./docs/hugging-face.md#run-on-local-machine) +- [Use Ollama](./docs/ollama.md#run-on-local-machine) - [Use LG](./docs/lg.md#run-on-local-machine) - [Use OpenAI](./docs/openai.md#run-on-local-machine) - [Use Upstage](./docs/upstage.md#run-on-local-machine) @@ -78,6 +79,7 @@ Open Chat Playground (OCP) is a web UI that is able to connect virtually any LLM - [Use Docker Model Runner](./docs/docker-model-runner.md#run-in-local-container) - ~~Use Foundry Local~~ 👉 NOT SUPPORTED - [Use Hugging Face](./docs/hugging-face.md#run-in-local-container) +- [Use Ollama](./docs/ollama.md#run-on-local-container) - [Use LG](./docs/lg.md#run-in-local-container) - [Use OpenAI](./docs/openai.md#run-in-local-container) - [Use Upstage](./docs/upstage.md#run-in-local-container) @@ -90,6 +92,7 @@ Open Chat Playground (OCP) is a web UI that is able to connect virtually any LLM - ~~Use Docker Model Runner~~ 👉 NOT SUPPORTED - ~~Use Foundry Local~~ 👉 NOT SUPPORTED - [Use Hugging Face](./docs/hugging-face.md#run-on-azure) +- [Use Ollama](./docs/ollama.md#run-on-azure) - [Use LG](./docs/lg.md#run-on-azure) - [Use OpenAI](./docs/openai.md#run-on-azure) - [Use Upstage](./docs/upstage.md#run-on-azure) diff --git a/docs/README.md b/docs/README.md index 65b06539..15f8cfa2 100644 --- a/docs/README.md +++ b/docs/README.md @@ -6,6 +6,7 @@ - [Docker Model Runner](./docker-model-runner.md) - [Foundry Local](./foundry-local.md) - [Hugging Face](./hugging-face.md) +- [Ollama](ollama.md) - [LG](./lg.md) - [OpenAI](./openai.md) - [Upstage](./upstage.md) diff --git a/docs/ollama.md b/docs/ollama.md new file mode 100644 index 00000000..a332564c --- /dev/null +++ b/docs/ollama.md @@ -0,0 +1,250 @@ +# OpenChat Playground with Ollama + +This page describes how to run OpenChat Playground (OCP) with [Ollama](https://ollama.com/search) integration. + +## Get the repository root + +1. Get the repository root. + + ```bash + # bash/zsh + REPOSITORY_ROOT=$(git rev-parse --show-toplevel) + ``` + + ```powershell + # PowerShell + $REPOSITORY_ROOT = git rev-parse --show-toplevel + ``` + +## Run on local machine + +1. Make sure Ollama is installed and running on your local machine. If not, install Ollama from [ollama.com](https://ollama.com/) and start the service. + + ```bash + ollama serve + ``` + +1. Pull the model you want to use. The default model OCP uses is "llama3.2" + + ```bash + ollama pull llama3.2 + ``` + + Alternatively, if you want to run with a different model, say [qwen](https://ollama.com/library/qwen) other than the default one, download it first by running the following command. + + ```bash + ollama pull qwen + ``` + +2. Make sure you are at the repository root. + + ```bash + cd $REPOSITORY_ROOT + ``` + +3. Run the app. + + ```bash + # bash/zsh + dotnet run --project $REPOSITORY_ROOT/src/OpenChat.PlaygroundApp -- \ + --connector-type Ollama + ``` + + ```powershell + # PowerShell + dotnet run --project $REPOSITORY_ROOT\src\OpenChat.PlaygroundApp -- ` + --connector-type Ollama + ``` + + Alternatively, if you want to run with a different model, say [qwen](https://ollama.com/library/qwen) other than the default one, download it first by running the following command. + + ```bash + # bash/zsh + dotnet run --project $REPOSITORY_ROOT/src/OpenChat.PlaygroundApp -- \ + --connector-type Ollama \ + --model qwen + ``` + + ```powershell + # PowerShell + dotnet run --project $REPOSITORY_ROOT\src\OpenChat.PlaygroundApp -- ` + --connector-type Ollama ` + --model qwen + ``` + +4. Open your web browser, navigate to `http://localhost:5280`, and enter prompts. + +## Run on local container + +This approach runs OpenChat Playground in a container while connecting to Ollama running on the host machine. + +1. Configure Ollama to accept connections from containers. + + ```bash + ollama serve + ``` + +1. Pull the model you want to use, and verify Ollama is accessible + + ```bash + ollama pull llama3.2 + curl http://localhost:11434/api/version + ``` + + ```powershell + ollama pull llama3.2 + Invoke-RestMethod -Uri http://localhost:11434/api/version + ``` + +1. Make sure you are at the repository root. + + ```bash + cd $REPOSITORY_ROOT + ``` + +1. Build a container. + + ```bash + docker build -f Dockerfile -t openchat-playground:latest . + ``` + +1. Run the app. + + ```bash + # bash/zsh - from locally built container + docker run -i --rm -p 8080:8080 openchat-playground:latest \ + --connector-type Ollama \ + --base-url http://host.docker.internal:11434 \ + ``` + + ```powershell + # PowerShell - from locally built container + docker run -i --rm -p 8080:8080 openchat-playground:latest ` + --connector-type Ollama ` + --base-url http://host.docker.internal:11434 + ``` + + ```bash + # bash/zsh - from GitHub Container Registry + docker run -i --rm -p 8080:8080 ghcr.io/aliencube/open-chat-playground/openchat-playground:latest\ + --connector-type Ollama \ + --base-url http://host.docker.internal:11434 + ``` + + ```powershell + # PowerShell - from GitHub Container Registry + docker run -i --rm -p 8080:8080 ghcr.io/aliencube/open-chat-playground/openchat-playground:latest ` + --connector-type Ollama ` + --base-url http://host.docker.internal:11434 + ``` + + Alternatively, if you want to run with a different model, say [qwen](https://ollama.com/library/qwen), make sure you've already downloaded the model by running the `ollama pull qwen` command. + + ```bash + ollama pull qwen + ``` + + ```bash + # bash/zsh - from locally built container + docker run -i --rm -p 8080:8080 openchat-playground:latest \ + --connector-type Ollama \ + --base-url http://host.docker.internal:11434 \ + --model qwen + ``` + + ```powershell + # PowerShell - from locally built container + docker run -i --rm -p 8080:8080 openchat-playground:latest ` + --connector-type Ollama ` + --base-url http://host.docker.internal:11434 ` + --model qwen + ``` + + ```bash + # bash/zsh - from GitHub Container Registry + docker run -i --rm -p 8080:8080 ghcr.io/aliencube/open-chat-playground/openchat-playground:latest\ + --connector-type Ollama \ + --base-url http://host.docker.internal:11434 \ + --model qwen + ``` + + ```powershell + # PowerShell - from GitHub Container Registry + docker run -i --rm -p 8080:8080 ghcr.io/aliencube/open-chat-playground/openchat-playground:latest ` + --connector-type Ollama ` + --base-url http://host.docker.internal:11434 ` + --model qwen + ``` + + > **NOTE**: Use `host.docker.internal:11434` to connect to Ollama running on the host machine from inside the container. + +1. Open your web browser, navigate to `http://localhost:8080`, and enter prompts. + +## Run on Azure + +1. Make sure you are at the repository root. + + ```bash + cd $REPOSITORY_ROOT + ``` + +1. Login to Azure. + + ```bash + # Login to Azure Dev CLI + azd auth login + ``` + +1. Check login status. + + ```bash + # Azure Dev CLI + azd auth login --check-status + ``` + +1. Initialize `azd` template. + + ```bash + azd init + ``` + + > **NOTE**: You will be asked to provide environment name for provisioning. + +1. Set the connector type to `Ollama`. + + ```bash + azd env set CONNECTOR_TYPE "Ollama" + ``` + + The default model OCP uses is [llama3.2](https://ollama.com/library/llama3.2). If you want to run with a different model, say [qwen](https://ollama.com/library/qwen) other than the default one, add it to azd environment variables. + + ```bash + azd env set OLLAMA_MODEL "qwen" + ``` + +2. As a default, the app uses a Serverless GPU with NVIDIA T4 (`NC8as-T4`). If you want to use NVIDIA A100, set the GPU profile. + + ```bash + azd env set GPU_PROFILE_NAME "NC24-A100" + ``` + + If you want to know more about Serverless GPU, visit [Using serverless GPUs in Azure Container Apps](https://learn.microsoft.com/azure/container-apps/gpu-serverless-overview#use-serverless-gpus). + +3. Run the following commands in order to provision and deploy the app. + + ```bash + azd up + ``` + + > **NOTE**: You will be asked to provide Azure subscription and location for deployment. + > **IMPORTANT**: Due to the limitation for GPU support, the available regions are limited to `Australia East`, `Sweden Central` and `West US 3`. For more details, visit [Using serverless GPUs in Azure Container Apps](https://learn.microsoft.com/azure/container-apps/gpu-serverless-overview#supported-regions). + + Once deployed, you will be able to see the deployed OCP app URL. + +4. Open your web browser, navigate to the OCP app URL, and enter prompts. + +5. Clean up all the resources. + + ```bash + azd down --force --purge + ``` \ No newline at end of file diff --git a/infra/main.parameters.json b/infra/main.parameters.json index 3b1a159f..68881e8d 100644 --- a/infra/main.parameters.json +++ b/infra/main.parameters.json @@ -41,6 +41,9 @@ "huggingFaceModel": { "value": "${HUGGING_FACE_MODEL=hf.co/Qwen/Qwen3-0.6B-GGUF}" }, + "ollamaModel": { + "value": "${OLLAMA_MODEL=llama3.2}" + }, "lgModel": { "value": "${LG_MODEL=hf.co/LGAI-EXAONE/EXAONE-4.0-1.2B-GGUF}" }, diff --git a/infra/resources.bicep b/infra/resources.bicep index 3151514d..f24034a3 100644 --- a/infra/resources.bicep +++ b/infra/resources.bicep @@ -457,4 +457,4 @@ module ollama 'br/public:avm/res/app/container-app:0.18.1' = if (useOllama == tr } output AZURE_CONTAINER_REGISTRY_ENDPOINT string = containerRegistry.outputs.loginServer -output AZURE_RESOURCE_OPENCHAT_PLAYGROUNDAPP_ID string = openchatPlaygroundApp.outputs.resourceId +output AZURE_RESOURCE_OPENCHAT_PLAYGROUNDAPP_ID string = openchatPlaygroundApp.outputs.resourceId \ No newline at end of file diff --git a/src/OpenChat.PlaygroundApp/Abstractions/LanguageModelConnector.cs b/src/OpenChat.PlaygroundApp/Abstractions/LanguageModelConnector.cs index 1c3ef29b..929d3cc4 100644 --- a/src/OpenChat.PlaygroundApp/Abstractions/LanguageModelConnector.cs +++ b/src/OpenChat.PlaygroundApp/Abstractions/LanguageModelConnector.cs @@ -42,6 +42,7 @@ public static async Task CreateChatClientAsync(AppSettings settings ConnectorType.DockerModelRunner => new DockerModelRunnerConnector(settings), ConnectorType.FoundryLocal => new FoundryLocalConnector(settings), ConnectorType.HuggingFace => new HuggingFaceConnector(settings), + ConnectorType.Ollama => new OllamaConnector(settings), ConnectorType.LG => new LGConnector(settings), ConnectorType.OpenAI => new OpenAIConnector(settings), ConnectorType.Upstage => new UpstageConnector(settings), diff --git a/src/OpenChat.PlaygroundApp/Connectors/OllamaConnector.cs b/src/OpenChat.PlaygroundApp/Connectors/OllamaConnector.cs new file mode 100644 index 00000000..2240fcee --- /dev/null +++ b/src/OpenChat.PlaygroundApp/Connectors/OllamaConnector.cs @@ -0,0 +1,63 @@ +using Microsoft.Extensions.AI; + +using OllamaSharp; + +using OpenChat.PlaygroundApp.Abstractions; +using OpenChat.PlaygroundApp.Configurations; + +using System.Linq; + +namespace OpenChat.PlaygroundApp.Connectors; + +/// +/// This represents the connector entity for Ollama. +/// +public class OllamaConnector(AppSettings settings) : LanguageModelConnector(settings.Ollama) +{ + private readonly AppSettings _appSettings = settings ?? throw new ArgumentNullException(nameof(settings)); + /// + public override bool EnsureLanguageModelSettingsValid() + { + var settings = this.Settings as OllamaSettings; + if (settings is null) + { + throw new InvalidOperationException("Missing configuration: Ollama."); + } + + if (string.IsNullOrWhiteSpace(settings.BaseUrl!.Trim()) == true) + { + throw new InvalidOperationException("Missing configuration: Ollama:BaseUrl."); + } + + if (string.IsNullOrWhiteSpace(settings.Model!.Trim()) == true) + { + throw new InvalidOperationException("Missing configuration: Ollama:Model."); + } + + return true; + } + + /// + public override async Task GetChatClientAsync() + { + var settings = this.Settings as OllamaSettings; + var baseUrl = settings!.BaseUrl!; + var model = settings!.Model!; + + var config = new OllamaApiClient.Configuration + { + Uri = new Uri(baseUrl), + Model = model, + }; + + var chatClient = new OllamaApiClient(config); + var pulls = chatClient.PullModelAsync(model); + await foreach (var pull in pulls) + { + Console.WriteLine($"Pull status: {pull!.Status}"); + } + + Console.WriteLine($"The {this._appSettings.ConnectorType} connector created with model: {settings.Model}"); + return await Task.FromResult(chatClient).ConfigureAwait(false); + } +} \ No newline at end of file diff --git a/test/OpenChat.PlaygroundApp.Tests/Abstractions/LanguageModelConnectorTests.cs b/test/OpenChat.PlaygroundApp.Tests/Abstractions/LanguageModelConnectorTests.cs index 9de192c6..8bdc5a69 100644 --- a/test/OpenChat.PlaygroundApp.Tests/Abstractions/LanguageModelConnectorTests.cs +++ b/test/OpenChat.PlaygroundApp.Tests/Abstractions/LanguageModelConnectorTests.cs @@ -75,7 +75,6 @@ public void Given_Null_Settings_When_CreateChatClient_Invoked_Then_It_Should_Thr [Theory] [InlineData(ConnectorType.Unknown)] [InlineData(ConnectorType.GoogleVertexAI)] - [InlineData(ConnectorType.Ollama)] [InlineData(ConnectorType.Anthropic)] [InlineData(ConnectorType.Naver)] public void Given_Unsupported_ConnectorType_When_CreateChatClient_Invoked_Then_It_Should_Throw(ConnectorType connectorType) diff --git a/test/OpenChat.PlaygroundApp.Tests/Connectors/OllamaConnectorTests.cs b/test/OpenChat.PlaygroundApp.Tests/Connectors/OllamaConnectorTests.cs new file mode 100644 index 00000000..6130f403 --- /dev/null +++ b/test/OpenChat.PlaygroundApp.Tests/Connectors/OllamaConnectorTests.cs @@ -0,0 +1,284 @@ +using Microsoft.Extensions.AI; + +using OpenChat.PlaygroundApp.Abstractions; +using OpenChat.PlaygroundApp.Configurations; +using OpenChat.PlaygroundApp.Connectors; + +namespace OpenChat.PlaygroundApp.Tests.Connectors; + +public class OllamaConnectorTests +{ + private const string BaseUrl = "http://localhost:11434"; + private const string Model = "llama3.2"; + + private static AppSettings BuildAppSettings(string? baseUrl = BaseUrl, string? model = Model) + { + return new AppSettings + { + ConnectorType = ConnectorType.Ollama, + Ollama = new OllamaSettings + { + BaseUrl = baseUrl, + Model = model + } + }; + } + + [Trait("Category", "UnitTest")] + [Theory] + [InlineData(typeof(LanguageModelConnector), typeof(OllamaConnector), true)] + [InlineData(typeof(OllamaConnector), typeof(LanguageModelConnector), false)] + public void Given_BaseType_Then_It_Should_Be_AssignableFrom_DerivedType(Type baseType, Type derivedType, bool expected) + { + // Act + var result = baseType.IsAssignableFrom(derivedType); + + // Assert + result.ShouldBe(expected); + } + + [Trait("Category", "UnitTest")] + [Fact] + public void Given_Null_Settings_When_Instantiated_Then_It_Should_Throw() + { + // Act + Action action = () => new OllamaConnector(null!); + + // Assert + action.ShouldThrow() + .Message.ShouldContain("settings"); + } + + [Trait("Category", "UnitTest")] + [InlineData("Ollama")] + [Theory] + public void Given_Settings_Is_Null_When_EnsureLanguageModelSettingsValid_Invoked_Then_It_Should_Throw(string expectedMessage) + { + // Arrange + var settings = new AppSettings + { + ConnectorType = ConnectorType.Ollama, + Ollama = null + }; + var connector = new OllamaConnector(settings); + + // Act + Action action = () => connector.EnsureLanguageModelSettingsValid(); + + // Assert + action.ShouldThrow() + .Message.ShouldContain(expectedMessage); + } + + [Trait("Category", "UnitTest")] + [Fact] + public void Given_Settings_When_Instantiated_Then_It_Should_Return() + { + // Arrange + var settings = BuildAppSettings(); + + // Act + var result = new OllamaConnector(settings); + + // Assert + result.ShouldNotBeNull(); + } + + [Trait("Category", "UnitTest")] + [InlineData("Ollama")] + [Theory] + public void Given_Null_Settings_When_EnsureLanguageModelSettingsValid_Invoked_Then_It_Should_Throw(string expectedMessage) + { + // Arrange + var settings = new AppSettings + { + ConnectorType = ConnectorType.Ollama, + Ollama = null + }; + var connector = new OllamaConnector(settings); + + // Act + Action action = () => connector.EnsureLanguageModelSettingsValid(); + + // Assert + action.ShouldThrow() + .Message.ShouldContain(expectedMessage); + } + + [Trait("Category", "UnitTest")] + [Theory] + [InlineData(null, typeof(NullReferenceException), "Object reference not set to an instance of an object")] + [InlineData("", typeof(InvalidOperationException), "Ollama:BaseUrl")] + [InlineData(" ", typeof(InvalidOperationException), "Ollama:BaseUrl")] + [InlineData("\t\n\r", typeof(InvalidOperationException), "Ollama:BaseUrl")] + public void Given_Invalid_BaseUrl_When_EnsureLanguageModelSettingsValid_Invoked_Then_It_Should_Throw(string? baseUrl, Type expectedType, string expectedMessage) + { + // Arrange + var settings = BuildAppSettings(baseUrl: baseUrl); + var connector = new OllamaConnector(settings); + + // Act + Action action = () => connector.EnsureLanguageModelSettingsValid(); + + // Assert + action.ShouldThrow(expectedType) + .Message.ShouldContain(expectedMessage); + } + + [Trait("Category", "UnitTest")] + [Theory] + [InlineData(null, typeof(NullReferenceException), "Object reference not set to an instance of an object")] + [InlineData("", typeof(InvalidOperationException), "Ollama:Model")] + [InlineData(" ", typeof(InvalidOperationException), "Ollama:Model")] + [InlineData("\t\n\r", typeof(InvalidOperationException), "Ollama:Model")] + public void Given_Invalid_Model_When_EnsureLanguageModelSettingsValid_Invoked_Then_It_Should_Throw(string? model, Type expectedType, string expectedMessage) + { + // Arrange + var settings = BuildAppSettings(model: model); + var connector = new OllamaConnector(settings); + + // Act + Action action = () => connector.EnsureLanguageModelSettingsValid(); + + // Assert + action.ShouldThrow(expectedType) + .Message.ShouldContain(expectedMessage); + } + + [Trait("Category", "UnitTest")] + [Fact] + public void Given_Valid_Settings_When_EnsureLanguageModelSettingsValid_Invoked_Then_It_Should_Return_True() + { + // Arrange + var settings = BuildAppSettings(); + var connector = new OllamaConnector(settings); + + // Act + var result = connector.EnsureLanguageModelSettingsValid(); + + // Assert + result.ShouldBeTrue(); + } + + [Trait("Category", "UnitTest")] + [Theory] + [InlineData(null, typeof(ArgumentNullException), "null")] + [InlineData("", typeof(UriFormatException), "empty")] + [InlineData(" ", typeof(UriFormatException), "Invalid URI: The format of the URI could not be determined.")] + [InlineData("\t\n\r", typeof(UriFormatException), "Invalid URI:")] + [InlineData("invalid-uri-format", typeof(UriFormatException), "Invalid URI: The format of the URI could not be determined.")] + [InlineData("not-a-url", typeof(UriFormatException), "Invalid URI: The format of the URI could not be determined.")] + public void Given_Invalid_BaseUrl_When_GetChatClientAsync_Invoked_Then_It_Should_Throw(string? baseUrl, Type expected, string message) + { + // Arrange + var settings = BuildAppSettings(baseUrl: baseUrl); + var connector = new OllamaConnector(settings); + + // Act + Func func = async () => await connector.GetChatClientAsync(); + + // Assert + func.ShouldThrow(expected) + .Message.ShouldContain(message); + } + + [Trait("Category", "UnitTest")] + [Theory] + [InlineData(null, typeof(NullReferenceException), "Object reference not set to an instance of an object")] + public void Given_Null_Model_When_GetChatClientAsync_Invoked_Then_It_Should_Throw(string? model, Type expected, string message) + { + // Arrange + var settings = BuildAppSettings(model: model); + var connector = new HuggingFaceConnector(settings); + + // Act + Func func = async () => await connector.GetChatClientAsync(); + + // Assert + func.ShouldThrow(expected) + .Message.ShouldContain(message); + } + + [Trait("Category", "IntegrationTest")] + [Trait("Category", "LLMRequired")] + [Fact] + public async Task Given_Valid_Settings_When_GetChatClientAsync_Invoked_Then_It_Should_Return_ChatClient() + { + // Arrange + var settings = BuildAppSettings(); + var connector = new OllamaConnector(settings); + + // Act + var client = await connector.GetChatClientAsync(); + + // Assert + client.ShouldNotBeNull(); + client.ShouldBeAssignableTo(); + } + + [Trait("Category", "UnitTest")] + [InlineData(typeof(InvalidOperationException), "Ollama")] + [Theory] + public async Task Given_Null_Settings_When_CreateChatClientAsync_Invoked_Then_It_Should_Throw(Type expected, string expectedMessage) + { + // Arrange + var settings = new AppSettings + { + ConnectorType = ConnectorType.Ollama, + Ollama = null + }; + + // Act + Func func = async () => await LanguageModelConnector.CreateChatClientAsync(settings); + + // Assert + func.ShouldThrow(expected) + .Message.ShouldContain(expectedMessage); + } + + [Trait("Category", "UnitTest")] + [Theory] + [InlineData(null, null, typeof(NullReferenceException), "Object reference not set to an instance of an object")] + [InlineData(null, Model, typeof(NullReferenceException), "Object reference not set to an instance of an object")] + [InlineData("", Model, typeof(InvalidOperationException), "Missing configuration: Ollama")] + [InlineData(" ", Model, typeof(InvalidOperationException), "Missing configuration: Ollama")] + [InlineData(BaseUrl, null, typeof(NullReferenceException), "Object reference not set to an instance of an object")] + [InlineData(BaseUrl, "", typeof(InvalidOperationException), "Missing configuration: Ollama")] + [InlineData(BaseUrl, " ", typeof(InvalidOperationException), "Missing configuration: Ollama")] + public void Given_Invalid_Settings_When_CreateChatClientAsync_Invoked_Then_It_Should_Throw(string? baseUrl, string? model, Type expected, string expectedMessage) + { + // Arrange + var settings = new AppSettings + { + ConnectorType = ConnectorType.Ollama, + Ollama = new OllamaSettings + { + BaseUrl = baseUrl, + Model = model + } + }; + + // Act + Func func = async () => await LanguageModelConnector.CreateChatClientAsync(settings); + + // Assert + func.ShouldThrow(expected) + .Message.ShouldContain(expectedMessage); + } + + [Trait("Category", "IntegrationTest")] + [Trait("Category", "LLMRequired")] + [Fact] + public async Task Given_Valid_Settings_When_CreateChatClientAsync_Invoked_Then_It_Should_Return_IChatClient() + { + // Arrange + var settings = BuildAppSettings(); + + // Act + var result = await LanguageModelConnector.CreateChatClientAsync(settings); + + // Assert + result.ShouldNotBeNull(); + result.ShouldBeAssignableTo(); + } +} \ No newline at end of file