Skip to content
This repository was archived by the owner on Oct 6, 2025. It is now read-only.

Commit e501d42

Browse files
authored
Merge pull request #111 from doringeman/misc
Use model structs imported from model-runner
2 parents c3efc04 + 79fcf99 commit e501d42

File tree

4 files changed

+25
-58
lines changed

4 files changed

+25
-58
lines changed

commands/compose.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ import (
1212
"github.com/docker/model-cli/desktop"
1313
"github.com/docker/model-runner/pkg/inference/backends/llamacpp"
1414
"github.com/docker/model-runner/pkg/inference/scheduling"
15+
dmrm "github.com/docker/model-runner/pkg/inference/models"
1516
"github.com/spf13/cobra"
1617
)
1718

@@ -143,7 +144,7 @@ func downloadModelsOnlyIfNotFound(desktopClient *desktop.Client, models []string
143144
}
144145
for _, model := range models {
145146
// Download the model if not already present in the local model store
146-
if !slices.ContainsFunc(modelsDownloaded, func(m desktop.Model) bool {
147+
if !slices.ContainsFunc(modelsDownloaded, func(m dmrm.Model) bool {
147148
if model == m.ID {
148149
return true
149150
}

commands/list.go

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,15 +3,17 @@ package commands
33
import (
44
"bytes"
55
"fmt"
6+
"os"
7+
"time"
8+
69
"github.com/docker/go-units"
710
"github.com/docker/model-cli/commands/completion"
811
"github.com/docker/model-cli/commands/formatter"
912
"github.com/docker/model-cli/desktop"
1013
"github.com/docker/model-cli/pkg/standalone"
14+
dmrm "github.com/docker/model-runner/pkg/inference/models"
1115
"github.com/olekukonko/tablewriter"
1216
"github.com/spf13/cobra"
13-
"os"
14-
"time"
1517
)
1618

1719
func newListCmd() *cobra.Command {
@@ -79,7 +81,7 @@ func listModels(openai bool, desktopClient *desktop.Client, quiet bool, jsonForm
7981
return prettyPrintModels(models), nil
8082
}
8183

82-
func prettyPrintModels(models []desktop.Model) string {
84+
func prettyPrintModels(models []dmrm.Model) string {
8385
var buf bytes.Buffer
8486
table := tablewriter.NewWriter(&buf)
8587

@@ -116,7 +118,7 @@ func prettyPrintModels(models []desktop.Model) string {
116118
return buf.String()
117119
}
118120

119-
func appendRow(table *tablewriter.Table, tag string, model desktop.Model) {
121+
func appendRow(table *tablewriter.Table, tag string, model dmrm.Model) {
120122
if len(model.ID) < 19 {
121123
fmt.Fprintf(os.Stderr, "invalid model ID for model: %v\n", model)
122124
return

desktop/api.go

Lines changed: 0 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -31,39 +31,3 @@ type OpenAIChatResponse struct {
3131
FinishReason string `json:"finish_reason"`
3232
} `json:"choices"`
3333
}
34-
35-
type OpenAIModel struct {
36-
ID string `json:"id"`
37-
Object string `json:"object"`
38-
Created int64 `json:"created"`
39-
OwnedBy string `json:"owned_by"`
40-
}
41-
42-
type OpenAIModelList struct {
43-
Object string `json:"object"`
44-
Data []*OpenAIModel `json:"data"`
45-
}
46-
47-
// TODO: To be replaced by the Model struct from pianta's common/pkg/inference/models/api.go.
48-
// (https://github.com/docker/pinata/pull/33331)
49-
type Format string
50-
51-
type Config struct {
52-
Format Format `json:"format,omitempty"`
53-
Quantization string `json:"quantization,omitempty"`
54-
Parameters string `json:"parameters,omitempty"`
55-
Architecture string `json:"architecture,omitempty"`
56-
Size string `json:"size,omitempty"`
57-
ContextSize uint64 `json:"context_size,omitzero"`
58-
}
59-
60-
type Model struct {
61-
// ID is the globally unique model identifier.
62-
ID string `json:"id"`
63-
// Tags are the list of tags associated with the model.
64-
Tags []string `json:"tags,omitempty"`
65-
// Created is the Unix epoch timestamp corresponding to the model creation.
66-
Created int64 `json:"created"`
67-
// Config describes the model.
68-
Config Config `json:"config"`
69-
}

desktop/desktop.go

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ import (
1313
"time"
1414

1515
"github.com/docker/model-runner/pkg/inference"
16-
"github.com/docker/model-runner/pkg/inference/models"
16+
dmrm "github.com/docker/model-runner/pkg/inference/models"
1717
"github.com/docker/model-runner/pkg/inference/scheduling"
1818
"github.com/pkg/errors"
1919
"go.opentelemetry.io/otel"
@@ -102,7 +102,7 @@ func (c *Client) Status() Status {
102102

103103
func (c *Client) Pull(model string, progress func(string)) (string, bool, error) {
104104
model = normalizeHuggingFaceModelName(model)
105-
jsonData, err := json.Marshal(models.ModelCreateRequest{From: model})
105+
jsonData, err := json.Marshal(dmrm.ModelCreateRequest{From: model})
106106
if err != nil {
107107
return "", false, fmt.Errorf("error marshaling request: %w", err)
108108
}
@@ -207,73 +207,73 @@ func (c *Client) Push(model string, progress func(string)) (string, bool, error)
207207
return "", progressShown, fmt.Errorf("unexpected end of stream while pushing model %s", model)
208208
}
209209

210-
func (c *Client) List() ([]Model, error) {
210+
func (c *Client) List() ([]dmrm.Model, error) {
211211
modelsRoute := inference.ModelsPrefix
212212
body, err := c.listRaw(modelsRoute, "")
213213
if err != nil {
214-
return []Model{}, err
214+
return []dmrm.Model{}, err
215215
}
216216

217-
var modelsJson []Model
217+
var modelsJson []dmrm.Model
218218
if err := json.Unmarshal(body, &modelsJson); err != nil {
219219
return modelsJson, fmt.Errorf("failed to unmarshal response body: %w", err)
220220
}
221221

222222
return modelsJson, nil
223223
}
224224

225-
func (c *Client) ListOpenAI() (OpenAIModelList, error) {
225+
func (c *Client) ListOpenAI() (dmrm.OpenAIModelList, error) {
226226
modelsRoute := inference.InferencePrefix + "/v1/models"
227227
rawResponse, err := c.listRaw(modelsRoute, "")
228228
if err != nil {
229-
return OpenAIModelList{}, err
229+
return dmrm.OpenAIModelList{}, err
230230
}
231-
var modelsJson OpenAIModelList
231+
var modelsJson dmrm.OpenAIModelList
232232
if err := json.Unmarshal(rawResponse, &modelsJson); err != nil {
233233
return modelsJson, fmt.Errorf("failed to unmarshal response body: %w", err)
234234
}
235235
return modelsJson, nil
236236
}
237237

238-
func (c *Client) Inspect(model string, remote bool) (Model, error) {
238+
func (c *Client) Inspect(model string, remote bool) (dmrm.Model, error) {
239239
model = normalizeHuggingFaceModelName(model)
240240
if model != "" {
241241
if !strings.Contains(strings.Trim(model, "/"), "/") {
242242
// Do an extra API call to check if the model parameter isn't a model ID.
243243
modelId, err := c.fullModelID(model)
244244
if err != nil {
245-
return Model{}, fmt.Errorf("invalid model name: %s", model)
245+
return dmrm.Model{}, fmt.Errorf("invalid model name: %s", model)
246246
}
247247
model = modelId
248248
}
249249
}
250250
rawResponse, err := c.listRawWithQuery(fmt.Sprintf("%s/%s", inference.ModelsPrefix, model), model, remote)
251251
if err != nil {
252-
return Model{}, err
252+
return dmrm.Model{}, err
253253
}
254-
var modelInspect Model
254+
var modelInspect dmrm.Model
255255
if err := json.Unmarshal(rawResponse, &modelInspect); err != nil {
256256
return modelInspect, fmt.Errorf("failed to unmarshal response body: %w", err)
257257
}
258258

259259
return modelInspect, nil
260260
}
261261

262-
func (c *Client) InspectOpenAI(model string) (OpenAIModel, error) {
262+
func (c *Client) InspectOpenAI(model string) (dmrm.OpenAIModel, error) {
263263
model = normalizeHuggingFaceModelName(model)
264264
modelsRoute := inference.InferencePrefix + "/v1/models"
265265
if !strings.Contains(strings.Trim(model, "/"), "/") {
266266
// Do an extra API call to check if the model parameter isn't a model ID.
267267
var err error
268268
if model, err = c.fullModelID(model); err != nil {
269-
return OpenAIModel{}, fmt.Errorf("invalid model name: %s", model)
269+
return dmrm.OpenAIModel{}, fmt.Errorf("invalid model name: %s", model)
270270
}
271271
}
272272
rawResponse, err := c.listRaw(fmt.Sprintf("%s/%s", modelsRoute, model), model)
273273
if err != nil {
274-
return OpenAIModel{}, err
274+
return dmrm.OpenAIModel{}, err
275275
}
276-
var modelInspect OpenAIModel
276+
var modelInspect dmrm.OpenAIModel
277277
if err := json.Unmarshal(rawResponse, &modelInspect); err != nil {
278278
return modelInspect, fmt.Errorf("failed to unmarshal response body: %w", err)
279279
}
@@ -315,7 +315,7 @@ func (c *Client) fullModelID(id string) (string, error) {
315315
return "", err
316316
}
317317

318-
var modelsJson []Model
318+
var modelsJson []dmrm.Model
319319
if err := json.Unmarshal(bodyResponse, &modelsJson); err != nil {
320320
return "", fmt.Errorf("failed to unmarshal response body: %w", err)
321321
}

0 commit comments

Comments
 (0)