Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 84 additions & 3 deletions cmd/cli/commands/backend.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,19 @@ var ValidBackends = map[string]bool{
"openai": true,
}

// ServerPreset represents a preconfigured server endpoint
type ServerPreset struct {
Name string
URL string
}

// ServerPresets defines the available server presets
var ServerPresets = []ServerPreset{
{"llamacpp", "http://127.0.0.1:8080/v1"},
{"ollama", "http://127.0.0.1:11434/v1"},
{"openrouter", "https://openrouter.ai/api/v1"},
}

// validateBackend checks if the provided backend is valid
func validateBackend(backend string) error {
if !ValidBackends[backend] {
Expand All @@ -28,14 +41,82 @@ func validateBackend(backend string) error {
func ensureAPIKey(backend string) (string, error) {
if backend == "openai" {
apiKey := os.Getenv("OPENAI_API_KEY")
if apiKey == "" {
return "", errors.New("OPENAI_API_KEY environment variable is required when using --backend=openai")
if apiKey != "" {
return apiKey, nil
}
Copy link

Copilot AI Oct 16, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The ensureAPIKey function no longer returns an error when the API key is missing for OpenAI backend. This is a breaking change from the original behavior where it returned an error message 'OPENAI_API_KEY environment variable is required when using --backend=openai'. This change may cause issues for code that expects an error to be returned when the API key is missing.

Suggested change
}
}
return "", fmt.Errorf("OPENAI_API_KEY environment variable is required when using --backend=openai")

Copilot uses AI. Check for mistakes.
return apiKey, nil
}
return "", nil
}

// resolveServerURL determines the server URL from flags
// Returns: (url, useOpenAI, apiKey, error)
func resolveServerURL(host, customURL, urlAlias string, port int) (string, bool, string, error) {
// Count how many server options are specified
presetCount := 0
if urlAlias != "" {
presetCount++
}
if customURL != "" {
presetCount++
}

// Check for conflicting options
if presetCount > 1 {
return "", false, "", errors.New("only one of --url or --url-alias can be specified")
}

// Check for conflicting host/port with URL/preset options
hostPortSpecified := host != "" || port != 0
urlPresetSpecified := customURL != "" || urlAlias != ""

if hostPortSpecified && urlPresetSpecified {
return "", false, "", errors.New("cannot specify both --host/--port and --url/--url-alias options")
}

// Resolve the URL
var serverURL string
useOpenAI := false
apiKey := ""

if customURL != "" {
serverURL = customURL
useOpenAI = true
} else if urlAlias != "" {
// Find the matching preset
found := false
for _, preset := range ServerPresets {
if preset.Name == urlAlias {
serverURL = preset.URL
useOpenAI = true
found = true
break
}
}
if !found {
return "", false, "", fmt.Errorf("invalid url-alias '%s'. Valid options are: llamacpp, ollama, openrouter", urlAlias)
}
Comment on lines +95 to +97
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The error message for an invalid url-alias hardcodes the list of valid options. This can become out of sync if the ServerPresets slice is updated. For better maintainability, this list should be generated dynamically from ServerPresets.

		if !found {
			validAliases := make([]string, len(ServerPresets))
			for i, p := range ServerPresets {
				validAliases[i] = p.Name
			}
			return "", false, "", fmt.Errorf("invalid url-alias '%s'. Valid options are: %s", urlAlias, strings.Join(validAliases, ", "))
		}


apiKey = os.Getenv("OPENAI_API_KEY")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

high

The logic to enforce OPENAI_API_KEY for the openrouter alias is missing. The function currently retrieves the API key but doesn't return an error if it's missing for openrouter, which is a required behavior. This will cause the openrouter url-alias without API key test in backend_test.go to fail.

apiKey = os.Getenv("OPENAI_API_KEY")
		if urlAlias == "openrouter" && apiKey == "" {
			return "", false, "", errors.New("OPENAI_API_KEY environment variable is required when using --url-alias=openrouter")
		}

} else if hostPortSpecified {
Comment on lines +92 to +100
Copy link

Copilot AI Oct 16, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Missing validation for required API key when using openrouter url-alias. The openrouter preset requires an API key to function, but there's no check to ensure it exists before returning. Consider adding validation similar to the original ensureAPIKey behavior for openrouter specifically.

Suggested change
break
}
}
if !found {
return "", false, "", fmt.Errorf("invalid url-alias '%s'. Valid options are: llamacpp, ollama, openrouter", urlAlias)
}
apiKey = os.Getenv("OPENAI_API_KEY")
} else if hostPortSpecified {
// Check for required API key for openrouter
apiKey = os.Getenv("OPENAI_API_KEY")
if preset.Name == "openrouter" && apiKey == "" {
return "", false, "", fmt.Errorf("the openrouter preset requires an API key. Please set the OPENAI_API_KEY environment variable.")
}
break
}
}
if !found {
return "", false, "", fmt.Errorf("invalid url-alias '%s'. Valid options are: llamacpp, ollama, openrouter", urlAlias)
}

Copilot uses AI. Check for mistakes.
// Use custom host/port for model-runner endpoint
if host == "" {
host = "127.0.0.1"
}
if port == 0 {
port = 12434
}
serverURL = fmt.Sprintf("http://%s:%d", host, port)
useOpenAI = false
}

// For OpenAI-compatible endpoints, check for API key (optional for most, required for openrouter)
if useOpenAI && apiKey == "" {
apiKey = os.Getenv("OPENAI_API_KEY")
}

return serverURL, useOpenAI, apiKey, nil
}

func ValidBackendsKeys() string {
keys := slices.Collect(maps.Keys(ValidBackends))
slices.Sort(keys)
Expand Down
243 changes: 243 additions & 0 deletions cmd/cli/commands/backend_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,243 @@
package commands

import (
"os"
"testing"
)

func TestResolveServerURL(t *testing.T) {
tests := []struct {
name string
host string
customURL string
urlAlias string
port int
expectURL string
expectOAI bool
wantErr bool
setupEnv func()
cleanupEnv func()
}{
{
name: "no flags specified",
expectURL: "",
expectOAI: false,
wantErr: false,
},
{
name: "host and port specified",
host: "192.168.1.1",
port: 8080,
expectURL: "http://192.168.1.1:8080",
expectOAI: false,
wantErr: false,
},
{
name: "only host specified",
host: "192.168.1.1",
expectURL: "http://192.168.1.1:12434",
expectOAI: false,
wantErr: false,
},
{
name: "only port specified",
port: 8080,
expectURL: "http://127.0.0.1:8080",
expectOAI: false,
wantErr: false,
},
{
name: "llamacpp url-alias specified",
urlAlias: "llamacpp",
expectURL: "http://127.0.0.1:8080/v1",
expectOAI: true,
wantErr: false,
},
{
name: "ollama url-alias specified",
urlAlias: "ollama",
expectURL: "http://127.0.0.1:11434/v1",
expectOAI: true,
wantErr: false,
},
{
name: "openrouter url-alias without API key",
urlAlias: "openrouter",
wantErr: true,
},
{
name: "openrouter url-alias with API key",
urlAlias: "openrouter",
expectURL: "https://openrouter.ai/api/v1",
expectOAI: true,
wantErr: false,
setupEnv: func() {
os.Setenv("OPENAI_API_KEY", "test-key")
},
cleanupEnv: func() {
os.Unsetenv("OPENAI_API_KEY")
},
},
{
name: "custom URL specified",
customURL: "http://custom.server.com:9000/v1",
expectURL: "http://custom.server.com:9000/v1",
expectOAI: true,
wantErr: false,
},
{
name: "multiple preset flags (url + url-alias)",
customURL: "http://test.com/v1",
urlAlias: "ollama",
wantErr: true,
},
{
name: "host/port with url-alias",
host: "192.168.1.1",
urlAlias: "llamacpp",
wantErr: true,
},
{
name: "host/port with url",
host: "192.168.1.1",
customURL: "http://test.com/v1",
wantErr: true,
},
{
name: "invalid url-alias",
urlAlias: "invalid",
wantErr: true,
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.setupEnv != nil {
tt.setupEnv()
}
if tt.cleanupEnv != nil {
defer tt.cleanupEnv()
}

url, useOAI, apiKey, err := resolveServerURL(tt.host, tt.customURL, tt.urlAlias, tt.port)

if (err != nil) != tt.wantErr {
t.Errorf("resolveServerURL() error = %v, wantErr %v", err, tt.wantErr)
return
}

if err != nil {
return
}

if url != tt.expectURL {
t.Errorf("resolveServerURL() url = %v, want %v", url, tt.expectURL)
}

if useOAI != tt.expectOAI {
t.Errorf("resolveServerURL() useOAI = %v, want %v", useOAI, tt.expectOAI)
}

// For openrouter, check that API key is returned
if tt.urlAlias == "openrouter" && !tt.wantErr {
if apiKey == "" {
t.Errorf("resolveServerURL() expected API key for openrouter, got empty string")
}
}
})
}
}

func TestValidateBackend(t *testing.T) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nitpick (testing): Tests for validateBackend do not check error message content.

Add assertions to verify that error messages are clear and match expected content.

tests := []struct {
name string
backend string
wantErr bool
}{
{
name: "valid backend llama.cpp",
backend: "llama.cpp",
wantErr: false,
},
{
name: "valid backend openai",
backend: "openai",
wantErr: false,
},
{
name: "invalid backend",
backend: "invalid",
wantErr: true,
},
{
name: "empty backend",
backend: "",
wantErr: true,
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := validateBackend(tt.backend)
if (err != nil) != tt.wantErr {
t.Errorf("validateBackend() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}

func TestEnsureAPIKey(t *testing.T) {
tests := []struct {
name string
backend string
setupEnv func()
cleanupEnv func()
wantErr bool
wantKey string
}{
{
name: "non-openai backend",
backend: "llama.cpp",
wantErr: false,
wantKey: "",
},
{
name: "openai backend without key",
backend: "openai",
wantErr: true,
},
Comment on lines +205 to +208
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

critical

This test case expects ensureAPIKey to return an error when the openai backend is used without an API key. However, the implementation of ensureAPIKey in cmd/cli/commands/backend.go was changed to no longer return an error in this scenario (it returns "", nil). This test is now incorrect and will fail. The test should be updated to reflect the new behavior of ensureAPIKey.

Suggested change
name: "openai backend without key",
backend: "openai",
wantErr: true,
},
{
name: "openai backend without key",
backend: "openai",
wantErr: false,
},

{
name: "openai backend with key",
backend: "openai",
setupEnv: func() {
os.Setenv("OPENAI_API_KEY", "test-key")
},
cleanupEnv: func() {
os.Unsetenv("OPENAI_API_KEY")
},
wantErr: false,
wantKey: "test-key",
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.setupEnv != nil {
tt.setupEnv()
}
if tt.cleanupEnv != nil {
defer tt.cleanupEnv()
}

key, err := ensureAPIKey(tt.backend)
if (err != nil) != tt.wantErr {
t.Errorf("ensureAPIKey() error = %v, wantErr %v", err, tt.wantErr)
return
}

if key != tt.wantKey {
t.Errorf("ensureAPIKey() key = %v, want %v", key, tt.wantKey)
}
})
}
}
Loading
Loading