summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPaul Buetow <paul@buetow.org>2025-08-17 22:57:19 +0300
committerPaul Buetow <paul@buetow.org>2025-08-17 22:57:19 +0300
commit8dfbbbb6de0f0c67413ee157e976fc3eaee4f914 (patch)
tree8147798438d61a7ddb2769e005c05aece9b586fc
parentc83acd3f5749fe240464283a43f8b03797a1b544 (diff)
logging: move ChatLogger to value semantics; llm: switch clients to value receivers and return values from constructors
-rw-r--r--internal/llm/copilot.go22
-rw-r--r--internal/llm/ollama.go14
-rw-r--r--internal/llm/openai.go28
-rw-r--r--internal/logging/chatlogger.go6
4 files changed, 35 insertions, 35 deletions
diff --git a/internal/llm/copilot.go b/internal/llm/copilot.go
index 7cc0278..cf24565 100644
--- a/internal/llm/copilot.go
+++ b/internal/llm/copilot.go
@@ -21,7 +21,7 @@ type copilotClient struct {
apiKey string
baseURL string
defaultModel string
- chatLogger *logging.ChatLogger
+ chatLogger logging.ChatLogger
}
func newCopilot(baseURL, model, apiKey string) Client {
@@ -31,13 +31,13 @@ func newCopilot(baseURL, model, apiKey string) Client {
if strings.TrimSpace(model) == "" {
model = "gpt-4.1"
}
- return &copilotClient{
- httpClient: &http.Client{Timeout: 30 * time.Second},
- apiKey: apiKey,
- baseURL: strings.TrimRight(baseURL, "/"),
- defaultModel: model,
- chatLogger: logging.NewChatLogger("copilot"),
- }
+ return copilotClient{
+ httpClient: &http.Client{Timeout: 30 * time.Second},
+ apiKey: apiKey,
+ baseURL: strings.TrimRight(baseURL, "/"),
+ defaultModel: model,
+ chatLogger: logging.NewChatLogger("copilot"),
+ }
}
type copilotChatRequest struct {
@@ -70,7 +70,7 @@ type copilotChatResponse struct {
} `json:"error,omitempty"`
}
-func (c *copilotClient) Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error) {
+func (c copilotClient) Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error) {
if strings.TrimSpace(c.apiKey) == "" {
return nilStringErr("missing Copilot API key")
}
@@ -158,5 +158,5 @@ func (c *copilotClient) Chat(ctx context.Context, messages []Message, opts ...Re
}
// Provider metadata
-func (c *copilotClient) Name() string { return "copilot" }
-func (c *copilotClient) DefaultModel() string { return c.defaultModel } \ No newline at end of file
+func (c copilotClient) Name() string { return "copilot" }
+func (c copilotClient) DefaultModel() string { return c.defaultModel }
diff --git a/internal/llm/ollama.go b/internal/llm/ollama.go
index 49adcb2..a53716b 100644
--- a/internal/llm/ollama.go
+++ b/internal/llm/ollama.go
@@ -21,7 +21,7 @@ type ollamaClient struct {
httpClient *http.Client
baseURL string
defaultModel string
- chatLogger *logging.ChatLogger
+ chatLogger logging.ChatLogger
}
func newOllama(baseURL, model string) Client {
@@ -31,7 +31,7 @@ func newOllama(baseURL, model string) Client {
if strings.TrimSpace(model) == "" {
model = "qwen2.5-coder:latest"
}
- return &ollamaClient{
+ return ollamaClient{
httpClient: &http.Client{Timeout: 30 * time.Second},
baseURL: strings.TrimRight(baseURL, "/"),
defaultModel: model,
@@ -55,7 +55,7 @@ type ollamaChatResponse struct {
Error string `json:"error,omitempty"`
}
-func (c *ollamaClient) Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error) {
+func (c ollamaClient) Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error) {
o := Options{Model: c.defaultModel}
for _, opt := range opts {
opt(&o)
@@ -143,11 +143,11 @@ func (c *ollamaClient) Chat(ctx context.Context, messages []Message, opts ...Req
}
// Provider metadata
-func (c *ollamaClient) Name() string { return "ollama" }
-func (c *ollamaClient) DefaultModel() string { return c.defaultModel }
+func (c ollamaClient) Name() string { return "ollama" }
+func (c ollamaClient) DefaultModel() string { return c.defaultModel }
// Streaming support (optional)
-func (c *ollamaClient) ChatStream(ctx context.Context, messages []Message, onDelta func(string), opts ...RequestOption) error {
+func (c ollamaClient) ChatStream(ctx context.Context, messages []Message, onDelta func(string), opts ...RequestOption) error {
o := Options{Model: c.defaultModel}
for _, opt := range opts {
opt(&o)
@@ -242,4 +242,4 @@ func (c *ollamaClient) ChatStream(ctx context.Context, messages []Message, onDel
}
logging.Logf("llm/ollama ", "stream end duration=%s", time.Since(start))
return nil
-} \ No newline at end of file
+}
diff --git a/internal/llm/openai.go b/internal/llm/openai.go
index fe6705b..6b77144 100644
--- a/internal/llm/openai.go
+++ b/internal/llm/openai.go
@@ -22,7 +22,7 @@ type openAIClient struct {
apiKey string
baseURL string
defaultModel string
- chatLogger *logging.ChatLogger
+ chatLogger logging.ChatLogger
}
// newOpenAI constructs an OpenAI client using explicit configuration values.
@@ -34,13 +34,13 @@ func newOpenAI(baseURL, model, apiKey string) Client {
if strings.TrimSpace(model) == "" {
model = "gpt-4.1"
}
- return &openAIClient{
- httpClient: &http.Client{Timeout: 30 * time.Second},
- apiKey: apiKey,
- baseURL: baseURL,
- defaultModel: model,
- chatLogger: logging.NewChatLogger("openai"),
- }
+ return openAIClient{
+ httpClient: &http.Client{Timeout: 30 * time.Second},
+ apiKey: apiKey,
+ baseURL: baseURL,
+ defaultModel: model,
+ chatLogger: logging.NewChatLogger("openai"),
+ }
}
type oaChatRequest struct {
@@ -74,7 +74,7 @@ type oaChatResponse struct {
} `json:"error,omitempty"`
}
-func (c *openAIClient) Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error) {
+func (c openAIClient) Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error) {
if c.apiKey == "" {
return nilStringErr("missing OpenAI API key")
}
@@ -159,11 +159,11 @@ func (c *openAIClient) Chat(ctx context.Context, messages []Message, opts ...Req
return content, nil
}
-func (c *openAIClient) logf(format string, args ...any) { logging.Logf("llm/openai ", format, args...) }
+func (c openAIClient) logf(format string, args ...any) { logging.Logf("llm/openai ", format, args...) }
// Provider metadata
-func (c *openAIClient) Name() string { return "openai" }
-func (c *openAIClient) DefaultModel() string { return c.defaultModel }
+func (c openAIClient) Name() string { return "openai" }
+func (c openAIClient) DefaultModel() string { return c.defaultModel }
// Streaming support (optional)
type oaStreamChunk struct {
@@ -181,7 +181,7 @@ type oaStreamChunk struct {
} `json:"error,omitempty"`
}
-func (c *openAIClient) ChatStream(ctx context.Context, messages []Message, onDelta func(string), opts ...RequestOption) error {
+func (c openAIClient) ChatStream(ctx context.Context, messages []Message, onDelta func(string), opts ...RequestOption) error {
if c.apiKey == "" {
return errors.New("missing OpenAI API key")
}
@@ -290,4 +290,4 @@ func (c *openAIClient) ChatStream(ctx context.Context, messages []Message, onDel
}
logging.Logf("llm/openai ", "stream end duration=%s", time.Since(start))
return nil
-} \ No newline at end of file
+}
diff --git a/internal/logging/chatlogger.go b/internal/logging/chatlogger.go
index b6b84a3..2f2fc99 100644
--- a/internal/logging/chatlogger.go
+++ b/internal/logging/chatlogger.go
@@ -6,12 +6,12 @@ type ChatLogger struct {
}
// NewChatLogger creates a new ChatLogger for a given provider.
-func NewChatLogger(provider string) *ChatLogger {
- return &ChatLogger{Provider: provider}
+func NewChatLogger(provider string) ChatLogger {
+ return ChatLogger{Provider: provider}
}
// LogStart logs the beginning of a chat or stream interaction.
-func (cl *ChatLogger) LogStart(stream bool, model string, temp float64, maxTokens int, stop []string, messages []struct {
+func (cl ChatLogger) LogStart(stream bool, model string, temp float64, maxTokens int, stop []string, messages []struct {
Role string
Content string
}) {