Skip to content
This repository was archived by the owner on Oct 6, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion commands/backend.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,5 +37,7 @@ func ensureAPIKey(backend string) (string, error) {
}

func ValidBackendsKeys() string {
return strings.Join(slices.Collect(maps.Keys(ValidBackends)), ", ")
keys := slices.Collect(maps.Keys(ValidBackends))
slices.Sort(keys)
return strings.Join(keys, ", ")
}
1 change: 1 addition & 0 deletions commands/list.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ func newListCmd() *cobra.Command {
c.Flags().BoolVar(&openai, "openai", false, "List models in an OpenAI format")
c.Flags().BoolVarP(&quiet, "quiet", "q", false, "Only show model IDs")
c.Flags().StringVar(&backend, "backend", "", fmt.Sprintf("Specify the backend to use (%s)", ValidBackendsKeys()))
c.Flags().MarkHidden("backend")
return c
}

Expand Down
1 change: 1 addition & 0 deletions commands/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,7 @@ func newRunCmd() *cobra.Command {

c.Flags().BoolVar(&debug, "debug", false, "Enable debug logging")
c.Flags().StringVar(&backend, "backend", "", fmt.Sprintf("Specify the backend to use (%s)", ValidBackendsKeys()))
c.Flags().MarkHidden("backend")

return c
}
2 changes: 1 addition & 1 deletion docs/reference/docker_model_list.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ options:
value_type: string
description: Specify the backend to use (llama.cpp, openai)
deprecated: false
hidden: false
hidden: true
experimental: false
experimentalcli: false
kubernetes: false
Expand Down
2 changes: 1 addition & 1 deletion docs/reference/docker_model_run.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ options:
value_type: string
description: Specify the backend to use (llama.cpp, openai)
deprecated: false
hidden: false
hidden: true
experimental: false
experimentalcli: false
kubernetes: false
Expand Down
11 changes: 5 additions & 6 deletions docs/reference/model_list.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,11 @@ List the models pulled to your local environment

### Options

| Name | Type | Default | Description |
|:----------------|:---------|:--------|:-----------------------------------------------|
| `--backend` | `string` | | Specify the backend to use (llama.cpp, openai) |
| `--json` | `bool` | | List models in a JSON format |
| `--openai` | `bool` | | List models in an OpenAI format |
| `-q`, `--quiet` | `bool` | | Only show model IDs |
| Name | Type | Default | Description |
|:----------------|:-------|:--------|:--------------------------------|
| `--json` | `bool` | | List models in a JSON format |
| `--openai` | `bool` | | List models in an OpenAI format |
| `-q`, `--quiet` | `bool` | | Only show model IDs |


<!---MARKER_GEN_END-->
Expand Down
7 changes: 3 additions & 4 deletions docs/reference/model_run.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,9 @@ Run a model and interact with it using a submitted prompt or chat mode

### Options

| Name | Type | Default | Description |
|:------------|:---------|:--------|:-----------------------------------------------|
| `--backend` | `string` | | Specify the backend to use (llama.cpp, openai) |
| `--debug` | `bool` | | Enable debug logging |
| Name | Type | Default | Description |
|:----------|:-------|:--------|:---------------------|
| `--debug` | `bool` | | Enable debug logging |


<!---MARKER_GEN_END-->
Expand Down