summaryrefslogtreecommitdiff
path: root/internal/llmutils/client_test.go
blob: c68821320a08f05ec4209e223306875608140a1e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
package llmutils

import (
	"os"
	"testing"

	"codeberg.org/snonux/hexai/internal/appconfig"
	"codeberg.org/snonux/hexai/internal/llm"
)

// TestMain registers all built-in LLM providers before tests run.
func TestMain(m *testing.M) {
	llm.RegisterAllProviders()
	os.Exit(m.Run())
}

func TestNewClientFromApp_Ollama(t *testing.T) {
	cfg := appconfig.App{CoreConfig: appconfig.CoreConfig{Provider: "ollama"}}
	c, err := NewClientFromApp(cfg)
	if err != nil || c == nil {
		t.Fatalf("ollama client failed: %v %v", c, err)
	}
}

func TestNewClientFromApp_OpenAI_WithKey(t *testing.T) {
	t.Setenv("HEXAI_OPENAI_API_KEY", "test-key")
	cfg := appconfig.App{CoreConfig: appconfig.CoreConfig{Provider: "openai"}}
	c, err := NewClientFromApp(cfg)
	if err != nil || c == nil {
		t.Fatalf("openai client failed: %v %v", c, err)
	}
	// ensure env override precedence
	_ = os.Unsetenv("OPENAI_API_KEY")
}

func TestCanonicalProvider(t *testing.T) {
	if got := CanonicalProvider("  OpenRouter "); got != "openrouter" {
		t.Fatalf("CanonicalProvider(openrouter) = %q", got)
	}
	if got := CanonicalProvider(" "); got != "openai" {
		t.Fatalf("CanonicalProvider(empty) = %q", got)
	}
}

func TestDefaultModelForProvider(t *testing.T) {
	cfg := appconfig.App{
		ProviderConfig: appconfig.ProviderConfig{
			OpenAIModel:     "gpt-4.1",
			OpenRouterModel: "openrouter/auto",
			OllamaModel:     "qwen3",
			AnthropicModel:  "claude",
		},
	}
	if got := DefaultModelForProvider(cfg, "openai"); got != "gpt-4.1" {
		t.Fatalf("openai model = %q", got)
	}
	if got := DefaultModelForProvider(cfg, "openrouter"); got != "openrouter/auto" {
		t.Fatalf("openrouter model = %q", got)
	}
	if got := DefaultModelForProvider(cfg, "ollama"); got != "qwen3" {
		t.Fatalf("ollama model = %q", got)
	}
	if got := DefaultModelForProvider(cfg, "anthropic"); got != "claude" {
		t.Fatalf("anthropic model = %q", got)
	}
}

func TestDefaultModelForProvider_Fallbacks(t *testing.T) {
	cfg := appconfig.App{}
	if got := DefaultModelForProvider(cfg, "openai"); got != "gpt-4.1" {
		t.Fatalf("openai fallback = %q", got)
	}
	if got := DefaultModelForProvider(cfg, "openrouter"); got != "openrouter/auto" {
		t.Fatalf("openrouter fallback = %q", got)
	}
	if got := DefaultModelForProvider(cfg, "ollama"); got != "qwen3-coder:30b-a3b-q4_K_M" {
		t.Fatalf("ollama fallback = %q", got)
	}
	if got := DefaultModelForProvider(cfg, "anthropic"); got != "claude-3-5-sonnet-20240620" {
		t.Fatalf("anthropic fallback = %q", got)
	}
}

func TestConfigForProvider(t *testing.T) {
	base := appconfig.App{
		CoreConfig: appconfig.CoreConfig{Provider: "openai"},
		ProviderConfig: appconfig.ProviderConfig{
			OpenAIModel:    "gpt-4.1",
			OllamaModel:    "qwen3",
			AnthropicModel: "claude",
		},
	}
	got := ConfigForProvider(base, "ollama", "qwen3-coder")
	if got.Provider != "ollama" {
		t.Fatalf("provider = %q", got.Provider)
	}
	if got.OllamaModel != "qwen3-coder" {
		t.Fatalf("ollama model = %q", got.OllamaModel)
	}
	if got.OpenAIModel != "gpt-4.1" {
		t.Fatalf("openai model unexpectedly changed: %q", got.OpenAIModel)
	}
}