Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
9598325
Connector Implementation & Inheritance
donghyeon639 Sep 16, 2025
a1f9683
Update OllamaConnectorTests.cs with latest improvements
donghyeon639 Sep 17, 2025
8add602
fix OllamaconnetcorTests.cs
donghyeon639 Sep 19, 2025
22eafbd
fix local container ollama
donghyeon639 Sep 22, 2025
ae1f2a5
fix: ollama bicep, ollama.md
donghyeon639 Sep 26, 2025
0d42437
conflicts fix and README.MD ollama
donghyeon639 Sep 27, 2025
8bb60fa
fix resuorces.bicep, mainparemeter.json
donghyeon639 Sep 27, 2025
61b73b0
comfilct fix
donghyeon639 Sep 28, 2025
791da59
fix conflict and Ollama Tests,ollama.md
donghyeon639 Sep 29, 2025
f7c1d84
ollama test fix
donghyeon639 Sep 29, 2025
eebb2cf
fix ollama.md Ollamatests
donghyeon639 Sep 30, 2025
a3a109a
conflict fix
donghyeon639 Oct 12, 2025
e78460d
confilct fix
donghyeon639 Oct 12, 2025
52417c2
fix ollama test
donghyeon639 Oct 12, 2025
226b5fc
fix ollama bicep
donghyeon639 Oct 12, 2025
68302e9
fix root README.md
donghyeon639 Oct 12, 2025
a03cb5c
fix ollamatest , ollama.md
donghyeon639 Oct 15, 2025
c1abb53
Merge latest changes from upstream/main and fix OllamaConnectorTests
donghyeon639 Oct 18, 2025
bf43b02
fix ollamatest, ollama.md
donghyeon639 Oct 18, 2025
45ff64b
ollama test languagemodel unit test
donghyeon639 Oct 19, 2025
3c4bdd5
ollama IntegrationTest fix
donghyeon639 Oct 21, 2025
bd231cd
Update ollama.md
tae0y Oct 24, 2025
adee6cf
Update OllamaConnectorTests.cs
tae0y Oct 25, 2025
ac958a0
Merge branch 'main' into feat/269-ollama-connector-implementation-clean
tae0y Oct 25, 2025
c76f1d3
Update OllamaConnectorTests.cs
tae0y Oct 25, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
171 changes: 171 additions & 0 deletions docs/ollama.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
# OpenChat Playground with Ollama

This page describes how to run OpenChat Playground (OCP) with Ollama integration.

## Get the repository root

1. Get the repository root.

```bash
# bash/zsh
REPOSITORY_ROOT=$(git rev-parse --show-toplevel)
```

```powershell
# PowerShell
$REPOSITORY_ROOT = git rev-parse --show-toplevel
```

## Run on local machine

1. Make sure you are at the repository root.

```bash
cd $REPOSITORY_ROOT
```

1. Make sure Ollama is installed and running on your local machine. If not, install Ollama from [ollama.com](https://ollama.com/) and start the service.

```bash
# Start Ollama service
ollama serve
```

1. Pull the model you want to use. Replace `{{MODEL_NAME}}` with your desired model.

```bash
# Example: Pull llama3.2 model
ollama pull llama3.2

# Or pull other models
ollama pull mistral
ollama pull phi3
ollama pull qwen
```

1. Run the app.

```bash
dotnet run --project $REPOSITORY_ROOT/src/OpenChat.PlaygroundApp -- --connector-type Ollama --model llama3.2
```

1. Open your web browser, navigate to `http://localhost:5280`, and enter prompts.

## Run in local container

This approach runs OpenChat Playground in a container while connecting to Ollama running on the host machine.

1. Make sure you are at the repository root.

```bash
cd $REPOSITORY_ROOT
```

1. Configure Ollama to accept connections from containers.

```powershell
# PowerShell (Windows)
$env:OLLAMA_HOST = "0.0.0.0:11434"

# Start Ollama service
ollama serve
```

```bash
# bash/zsh (Linux/macOS)
export OLLAMA_HOST=0.0.0.0:11434
ollama serve
```

1. Pull the model you want to use.

```bash
# Pull llama3.2 model (recommended)
ollama pull llama3.2

# Verify Ollama is accessible
curl http://localhost:11434/api/version
```

1. Build a container.

```bash
docker build -f Dockerfile -t openchat-playground:latest .
```

1. Run the app.

```bash
# Using command-line arguments
docker run -i --rm -p 8080:8080 \
openchat-playground:latest \
--connector-type Ollama \
--base-url http://host.docker.internal:11434 \
--model llama3.2
```

```bash
# Alternative: Using environment variables
docker run -i --rm -p 8080:8080 \
-e ConnectorType=Ollama \
-e Ollama__BaseUrl=http://host.docker.internal:11434 \
-e Ollama__Model=llama3.2 \
openchat-playground:latest
```

> **NOTE**: Use `host.docker.internal:11434` to connect to Ollama running on the host machine from inside the container. Make sure `OLLAMA_HOST=0.0.0.0:11434` is set on the host.

1. Open your web browser, navigate to `http://localhost:8080`, and enter prompts.

## Run on Azure

1. Make sure you are at the repository root.

```bash
cd $REPOSITORY_ROOT
```

1. Login to Azure.

```bash
# Login to Azure Dev CLI
azd auth login
```

1. Check login status.

```bash
# Azure Dev CLI
azd auth login --check-status
```

1. Initialize `azd` template.

```bash
azd init
```

> **NOTE**: You will be asked to provide environment name for provisioning.

1. Set Ollama configuration to azd environment variables.
```bash
# Set connector type to Ollama
azd env set CONNECTOR_TYPE "Ollama"

# Optionally, set a specific model (default is llama3.2)
azd env set OLLAMA_MODEL "llama3.2"
```

1. Run the following commands in order to provision and deploy the app.

```bash
azd up
```

> **NOTE**: You will be asked to provide Azure subscription and location for deployment.

1. Clean up all the resources.

```bash
azd down --force --purge
```
2 changes: 2 additions & 0 deletions infra/main.bicep
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ param githubModelsToken string = ''
// Foundry Local
// Hugging Face
// Ollama
param ollamaModel string = ''
// Anthropic
// LG
// Naver
Expand Down Expand Up @@ -64,6 +65,7 @@ module resources 'resources.bicep' = {
connectorType: connectorType
githubModelsModel: githubModelsModel
githubModelsToken: githubModelsToken
ollamaModel: ollamaModel
openchatPlaygroundappExists: openchatPlaygroundappExists
}
}
Expand Down
3 changes: 3 additions & 0 deletions infra/main.parameters.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@
"githubModelsToken": {
"value": "${GH_MODELS_TOKEN}"
},
"ollamaModel": {
"value": "${OLLAMA_MODEL}"
},
"openchatPlaygroundappExists": {
"value": "${SERVICE_OPENCHAT_PLAYGROUNDAPP_RESOURCE_EXISTS=false}"
},
Expand Down
12 changes: 10 additions & 2 deletions infra/resources.bicep
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ param githubModelsToken string = ''
// Foundry Local
// Hugging Face
// Ollama
param ollamaModel string = ''
// Anthropic
// LG
// Naver
Expand Down Expand Up @@ -118,6 +119,12 @@ var envGitHubModels = (connectorType == '' || connectorType == 'GitHubModels') ?
// Foundry Local
// Hugging Face
// Ollama
var envOllama = (connectorType == '' || connectorType == 'Ollama') ? (ollamaModel != '' ? [
{
name: 'Ollama__Model'
value: ollamaModel
}
] : []) : []
// Anthropic
// LG
// Naver
Expand Down Expand Up @@ -161,7 +168,8 @@ module openchatPlaygroundapp 'br/public:avm/res/app/container-app:0.18.1' = {
value: '8080'
}],
envConnectorType,
envGitHubModels)
envGitHubModels,
envOllama)
}
]
managedIdentities:{
Expand All @@ -181,4 +189,4 @@ module openchatPlaygroundapp 'br/public:avm/res/app/container-app:0.18.1' = {
}

output AZURE_CONTAINER_REGISTRY_ENDPOINT string = containerRegistry.outputs.loginServer
output AZURE_RESOURCE_OPENCHAT_PLAYGROUNDAPP_ID string = openchatPlaygroundapp.outputs.resourceId
output AZURE_RESOURCE_OPENCHAT_PLAYGROUNDAPP_ID string = openchatPlaygroundapp.outputs.resourceId
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ public static async Task<IChatClient> CreateChatClientAsync(AppSettings settings
LanguageModelConnector connector = settings.ConnectorType switch
{
ConnectorType.GitHubModels => new GitHubModelsConnector(settings),
ConnectorType.Ollama => new OllamaConnector(settings),
ConnectorType.OpenAI => new OpenAIConnector(settings),
_ => throw new NotSupportedException($"Connector type '{settings.ConnectorType}' is not supported.")
};
Expand Down
53 changes: 53 additions & 0 deletions src/OpenChat.PlaygroundApp/Connectors/OllamaConnector.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
using Microsoft.Extensions.AI;
using OllamaSharp;

using OpenChat.PlaygroundApp.Abstractions;
using OpenChat.PlaygroundApp.Configurations;

namespace OpenChat.PlaygroundApp.Connectors;

/// <summary>
/// This represents the connector entity for Ollama.
/// </summary>
public class OllamaConnector(AppSettings settings) : LanguageModelConnector(settings.Ollama)
{
/// <inheritdoc/>
public override bool EnsureLanguageModelSettingsValid()
{
var settings = this.Settings as OllamaSettings;
if (settings is null)
{
throw new InvalidOperationException("Missing configuration: Ollama.");
}

if (string.IsNullOrWhiteSpace(settings.BaseUrl!.Trim()) == true)
{
throw new InvalidOperationException("Missing configuration: Ollama:BaseUrl.");
}

if (string.IsNullOrWhiteSpace(settings.Model!.Trim()) == true)
{
throw new InvalidOperationException("Missing configuration: Ollama:Model.");
}

return true;
}

/// <inheritdoc/>
public override async Task<IChatClient> GetChatClientAsync()
{
var settings = this.Settings as OllamaSettings;
var baseUrl = settings!.BaseUrl!;
var model = settings!.Model!;

var config = new OllamaApiClient.Configuration
{
Uri = new Uri(baseUrl),
Model = model,
};

var chatClient = new OllamaApiClient(config);

return await Task.FromResult(chatClient).ConfigureAwait(false);
}
}
Loading