Skip to content

Commit 937552f

Browse files
committed
Refactor list and run commands to support OpenAI
Change --openai flag from boolean to URL string in both commands. Implement OpenAI endpoint connection for model listing and chat. Update internal functions to accept interface parameters instead of concrete types for better abstraction. Add validation to prevent incompatible flag combinations. Move OpenAI-specific logic to separate functions for clarity Signed-off-by: Eric Curtin <[email protected]>
1 parent 50c9b8a commit 937552f

File tree

2 files changed

+72
-32
lines changed

2 files changed

+72
-32
lines changed

cmd/cli/commands/list.go

Lines changed: 38 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -20,31 +20,45 @@ import (
2020
)
2121

2222
func newListCmd() *cobra.Command {
23-
var jsonFormat, openai, quiet bool
23+
var jsonFormat, quiet bool
24+
var openaiURL string
2425
c := &cobra.Command{
2526
Use: "list [OPTIONS] [MODEL]",
2627
Aliases: []string{"ls"},
2728
Short: "List the models pulled to your local environment",
2829
Args: cobra.MaximumNArgs(1),
2930
RunE: func(cmd *cobra.Command, args []string) error {
30-
if openai && quiet {
31-
return fmt.Errorf("--quiet flag cannot be used with --openai flag or OpenAI backend")
31+
useOpenAI := openaiURL != ""
32+
if useOpenAI && quiet {
33+
return fmt.Errorf("--quiet flag cannot be used with --openai flag")
34+
}
35+
36+
var modelFilter string
37+
if len(args) > 0 {
38+
modelFilter = args[0]
39+
}
40+
41+
// If --openai URL is provided, connect to external OpenAI endpoint
42+
if useOpenAI {
43+
openaiClient := desktop.NewOpenAIClient(openaiURL)
44+
models, err := listModelsFromOpenAI(openaiClient, modelFilter)
45+
if err != nil {
46+
return err
47+
}
48+
fmt.Fprint(cmd.OutOrStdout(), models)
49+
return nil
3250
}
3351

3452
// If we're doing an automatic install, only show the installation
3553
// status if it won't corrupt machine-readable output.
3654
var standaloneInstallPrinter standalone.StatusPrinter
37-
if !jsonFormat && !openai && !quiet {
55+
if !jsonFormat && !quiet {
3856
standaloneInstallPrinter = asPrinter(cmd)
3957
}
4058
if _, err := ensureStandaloneRunnerAvailable(cmd.Context(), standaloneInstallPrinter, false); err != nil {
4159
return fmt.Errorf("unable to initialize standalone model runner: %w", err)
4260
}
43-
var modelFilter string
44-
if len(args) > 0 {
45-
modelFilter = args[0]
46-
}
47-
models, err := listModels(openai, desktopClient, quiet, jsonFormat, modelFilter)
61+
models, err := listModels(desktopClient, quiet, jsonFormat, modelFilter)
4862
if err != nil {
4963
return err
5064
}
@@ -54,7 +68,7 @@ func newListCmd() *cobra.Command {
5468
ValidArgsFunction: completion.ModelNamesAndTags(getDesktopClient, 1),
5569
}
5670
c.Flags().BoolVar(&jsonFormat, "json", false, "List models in a JSON format")
57-
c.Flags().BoolVar(&openai, "openai", false, "List models in an OpenAI format")
71+
c.Flags().StringVar(&openaiURL, "openai", "", "List models from an OpenAI-compatible endpoint URL")
5872
c.Flags().BoolVarP(&quiet, "quiet", "q", false, "Only show model IDs")
5973
return c
6074
}
@@ -74,25 +88,24 @@ func matchesModelFilter(tag, filter string) bool {
7488
return repository == filter
7589
}
7690

77-
func listModels(openai bool, desktopClient *desktop.Client, quiet bool, jsonFormat bool, modelFilter string) (string, error) {
78-
if openai {
79-
models, err := desktopClient.ListOpenAI()
80-
if err != nil {
81-
return "", handleClientError(err, "Failed to list models")
82-
}
83-
if modelFilter != "" {
84-
filter := normalizeModelFilter(modelFilter)
85-
filtered := models.Data[:0]
86-
for _, m := range models.Data {
87-
if matchesModelFilter(m.ID, filter) {
88-
filtered = append(filtered, m)
89-
}
91+
func listModelsFromOpenAI(openaiClient *desktop.OpenAIClient, modelFilter string) (string, error) {
92+
models, err := openaiClient.ListModels()
93+
if err != nil {
94+
return "", fmt.Errorf("failed to list models: %w", err)
95+
}
96+
if modelFilter != "" {
97+
filtered := models.Data[:0]
98+
for _, m := range models.Data {
99+
if strings.Contains(m.ID, modelFilter) {
100+
filtered = append(filtered, m)
90101
}
91-
models.Data = filtered
92102
}
93-
return formatter.ToStandardJSON(models)
103+
models.Data = filtered
94104
}
105+
return formatter.ToStandardJSON(models)
106+
}
95107

108+
func listModels(desktopClient *desktop.Client, quiet bool, jsonFormat bool, modelFilter string) (string, error) {
96109
models, err := desktopClient.List()
97110
if err != nil {
98111
return "", handleClientError(err, "Failed to list models")

cmd/cli/commands/run.go

Lines changed: 34 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ func readMultilineInput(cmd *cobra.Command, scanner *bufio.Scanner) (string, err
8787
}
8888

8989
// generateInteractiveWithReadline provides an enhanced interactive mode with readline support
90-
func generateInteractiveWithReadline(cmd *cobra.Command, desktopClient *desktop.Client, model string) error {
90+
func generateInteractiveWithReadline(cmd *cobra.Command, client desktop.ChatClient, model string) error {
9191
usage := func() {
9292
fmt.Fprintln(os.Stderr, "Available Commands:")
9393
fmt.Fprintln(os.Stderr, " /bye Exit")
@@ -141,7 +141,7 @@ func generateInteractiveWithReadline(cmd *cobra.Command, desktopClient *desktop.
141141
})
142142
if err != nil {
143143
// Fall back to basic input mode if readline initialization fails
144-
return generateInteractiveBasic(cmd, desktopClient, model)
144+
return generateInteractiveBasic(cmd, client, model)
145145
}
146146

147147
// Disable history if the environment variable is set
@@ -245,7 +245,7 @@ func generateInteractiveWithReadline(cmd *cobra.Command, desktopClient *desktop.
245245
}
246246
}()
247247

248-
err := chatWithMarkdownContext(chatCtx, cmd, desktopClient, model, userInput)
248+
err := chatWithMarkdownContext(chatCtx, cmd, client, model, userInput)
249249

250250
// Clean up signal handler
251251
signal.Stop(sigChan)
@@ -270,7 +270,7 @@ func generateInteractiveWithReadline(cmd *cobra.Command, desktopClient *desktop.
270270
}
271271

272272
// generateInteractiveBasic provides a basic interactive mode (fallback)
273-
func generateInteractiveBasic(cmd *cobra.Command, desktopClient *desktop.Client, model string) error {
273+
func generateInteractiveBasic(cmd *cobra.Command, client desktop.ChatClient, model string) error {
274274
scanner := bufio.NewScanner(os.Stdin)
275275
for {
276276
userInput, err := readMultilineInput(cmd, scanner)
@@ -306,7 +306,7 @@ func generateInteractiveBasic(cmd *cobra.Command, desktopClient *desktop.Client,
306306
}
307307
}()
308308

309-
err = chatWithMarkdownContext(chatCtx, cmd, desktopClient, model, userInput)
309+
err = chatWithMarkdownContext(chatCtx, cmd, client, model, userInput)
310310

311311
cancelChat()
312312
signal.Stop(sigChan)
@@ -508,12 +508,12 @@ func renderMarkdown(content string) (string, error) {
508508
}
509509

510510
// chatWithMarkdown performs chat and streams the response with selective markdown rendering.
511-
func chatWithMarkdown(cmd *cobra.Command, client *desktop.Client, model, prompt string) error {
511+
func chatWithMarkdown(cmd *cobra.Command, client desktop.ChatClient, model, prompt string) error {
512512
return chatWithMarkdownContext(cmd.Context(), cmd, client, model, prompt)
513513
}
514514

515515
// chatWithMarkdownContext performs chat with context support and streams the response with selective markdown rendering.
516-
func chatWithMarkdownContext(ctx context.Context, cmd *cobra.Command, client *desktop.Client, model, prompt string) error {
516+
func chatWithMarkdownContext(ctx context.Context, cmd *cobra.Command, client desktop.ChatClient, model, prompt string) error {
517517
colorMode, _ := cmd.Flags().GetString("color")
518518
useMarkdown := shouldUseMarkdown(colorMode)
519519
debug, _ := cmd.Flags().GetBool("debug")
@@ -571,6 +571,7 @@ func newRunCmd() *cobra.Command {
571571
var debug bool
572572
var colorMode string
573573
var detach bool
574+
var openaiURL string
574575

575576
const cmdArgs = "MODEL [PROMPT]"
576577
c := &cobra.Command{
@@ -617,6 +618,31 @@ func newRunCmd() *cobra.Command {
617618
}
618619
}
619620

621+
// Handle --openai flag: connect to external OpenAI-compatible endpoint
622+
if openaiURL != "" {
623+
if detach {
624+
return fmt.Errorf("--detach flag cannot be used with --openai flag")
625+
}
626+
openaiClient := desktop.NewOpenAIClient(openaiURL)
627+
628+
if prompt != "" {
629+
if err := chatWithMarkdown(cmd, openaiClient, model, prompt); err != nil {
630+
return fmt.Errorf("failed to generate a response: %w", err)
631+
}
632+
cmd.Println()
633+
return nil
634+
}
635+
636+
// Interactive mode
637+
if term.IsTerminal(int(os.Stdin.Fd())) {
638+
termenv.SetDefaultOutput(
639+
termenv.NewOutput(asPrinter(cmd), termenv.WithColorCache(true)),
640+
)
641+
return generateInteractiveWithReadline(cmd, openaiClient, model)
642+
}
643+
return generateInteractiveBasic(cmd, openaiClient, model)
644+
}
645+
620646
// Check if this is an NVIDIA NIM image
621647
if isNIMImage(model) {
622648
// NIM images are handled differently - they run as Docker containers
@@ -733,6 +759,7 @@ func newRunCmd() *cobra.Command {
733759
c.Flags().BoolVar(&debug, "debug", false, "Enable debug logging")
734760
c.Flags().StringVar(&colorMode, "color", "no", "Use colored output (auto|yes|no)")
735761
c.Flags().BoolVarP(&detach, "detach", "d", false, "Load the model in the background without interaction")
762+
c.Flags().StringVar(&openaiURL, "openai", "", "Connect to an OpenAI-compatible endpoint URL")
736763

737764
return c
738765
}

0 commit comments

Comments
 (0)