summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPaul Buetow <paul@buetow.org>2026-01-29 20:29:00 +0200
committerPaul Buetow <paul@buetow.org>2026-01-29 20:29:00 +0200
commit40eb437f4826f69ce96cc30656a753519251cc4a (patch)
treef77e7d5d74271c3211342f413b2cb8eff421749f
parentc6bb463837ec8c41261604e416aeab023663ba09 (diff)
refactor: apply code style best practices to Anthropic implementation
- Reorganize anthropic.go: types, interface checks, constructor, public methods, private methods - Extract helper methods from Chat() and ChatStream() to keep functions under 50 lines - Add resolveOptions(), sendRequest(), extractContent() private methods - Add explicit interface satisfaction check for Client and Streamer - Add documentation comments to all public methods (Chat, Name, DefaultModel, ChatStream) - Apply gofmt and gofumpt formatting - Apply goimports for import ordering All 51 tests in llm package pass All code adheres to best practices from go-projects.md: - Value semantics (value receivers only) - Constructors before methods - Public before private - Functions under 50 lines - Explicit interface satisfaction - Documentation on all public identifiers - Proper error handling - Context as first parameter for I/O functions Amp-Thread-ID: https://ampcode.com/threads/T-019c0af1-f215-72cf-9940-b014b1a9576b Co-authored-by: Amp <amp@ampcode.com>
-rw-r--r--internal/llm/anthropic.go125
-rw-r--r--internal/llm/anthropic_test.go2
-rw-r--r--internal/llm/provider_test.go1
3 files changed, 69 insertions, 59 deletions
diff --git a/internal/llm/anthropic.go b/internal/llm/anthropic.go
index 6f14eea..ebb6826 100644
--- a/internal/llm/anthropic.go
+++ b/internal/llm/anthropic.go
@@ -26,12 +26,12 @@ type anthropicClient struct {
}
type anthropicChatRequest struct {
- Model string `json:"model"`
- Messages []anthropicMessage `json:"messages"`
- Temperature *float64 `json:"temperature,omitempty"`
- MaxTokens int `json:"max_tokens"`
- Stream bool `json:"stream,omitempty"`
- System string `json:"system,omitempty"`
+ Model string `json:"model"`
+ Messages []anthropicMessage `json:"messages"`
+ Temperature *float64 `json:"temperature,omitempty"`
+ MaxTokens int `json:"max_tokens"`
+ Stream bool `json:"stream,omitempty"`
+ System string `json:"system,omitempty"`
}
type anthropicMessage struct {
@@ -80,6 +80,12 @@ type anthropicStreamError struct {
} `json:"error"`
}
+// Ensure anthropicClient implements Client and Streamer.
+var (
+ _ Client = (*anthropicClient)(nil)
+ _ Streamer = (*anthropicClient)(nil)
+)
+
// Constructor
// newAnthropic constructs an Anthropic client using explicit configuration values.
// The apiKey may be empty; calls will fail until a valid key is supplied.
@@ -100,33 +106,17 @@ func newAnthropic(baseURL, model, apiKey string, defaultTemp *float64) Client {
}
}
+// Chat sends a request to Anthropic and returns the response.
func (c anthropicClient) Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error) {
if c.apiKey == "" {
return nilStringErr("missing Anthropic API key")
}
- o := Options{Model: c.defaultModel}
- for _, opt := range opts {
- opt(&o)
- }
- if o.Model == "" {
- o.Model = c.defaultModel
- }
+ o := c.resolveOptions(opts)
start := time.Now()
c.logStart(false, o, messages)
- req := buildAnthropicChatRequest(o, messages, c.defaultModel, c.defaultTemperature, false)
- body, err := json.Marshal(req)
- if err != nil {
- c.logf("marshal error: %v", err)
- return "", err
- }
- endpoint := c.baseURL + "/messages"
- logging.Logf("llm/anthropic ", "POST %s", endpoint)
- resp, err := c.doJSON(ctx, endpoint, body, map[string]string{
- "x-api-key": c.apiKey,
- "anthropic-version": "2023-06-01",
- })
+
+ resp, err := c.sendRequest(ctx, o, messages, false, start)
if err != nil {
- logging.Logf("llm/anthropic ", "%shttp error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return "", err
}
defer func() {
@@ -134,6 +124,7 @@ func (c anthropicClient) Chat(ctx context.Context, messages []Message, opts ...R
logging.Logf("llm/anthropic", "failed to close response body: %v", err)
}
}()
+
if err := handleAnthropicNon2xx(resp, start); err != nil {
return "", err
}
@@ -141,47 +132,26 @@ func (c anthropicClient) Chat(ctx context.Context, messages []Message, opts ...R
if err != nil {
return "", err
}
- if len(out.Content) == 0 {
- logging.Logf("llm/anthropic ", "%sno content returned duration=%s%s", logging.AnsiRed, time.Since(start), logging.AnsiBase)
- return "", errors.New("anthropic: no content returned")
- }
- content := out.Content[0].Text
- logging.Logf("llm/anthropic ", "success stop_reason=%s size=%d preview=%s%s%s duration=%s", out.StopReason, len(content), logging.AnsiGreen, logging.PreviewForLog(content), logging.AnsiBase, time.Since(start))
- return content, nil
+ return c.extractContent(out, start)
}
-// Provider metadata
-func (c anthropicClient) Name() string { return "anthropic" }
+// Name returns the provider's short name.
+func (c anthropicClient) Name() string { return "anthropic" }
+
+// DefaultModel returns the configured default model name.
func (c anthropicClient) DefaultModel() string { return c.defaultModel }
-// Streaming support (optional)
+// ChatStream sends a streaming request and invokes onDelta for each text chunk.
func (c anthropicClient) ChatStream(ctx context.Context, messages []Message, onDelta func(string), opts ...RequestOption) error {
if c.apiKey == "" {
return errors.New("missing Anthropic API key")
}
- o := Options{Model: c.defaultModel}
- for _, opt := range opts {
- opt(&o)
- }
- if o.Model == "" {
- o.Model = c.defaultModel
- }
+ o := c.resolveOptions(opts)
start := time.Now()
c.logStart(true, o, messages)
- req := buildAnthropicChatRequest(o, messages, c.defaultModel, c.defaultTemperature, true)
- body, err := json.Marshal(req)
- if err != nil {
- c.logf("marshal error: %v", err)
- return err
- }
- endpoint := c.baseURL + "/messages"
- logging.Logf("llm/anthropic ", "POST %s (stream)", endpoint)
- resp, err := c.doJSON(ctx, endpoint, body, map[string]string{
- "x-api-key": c.apiKey,
- "anthropic-version": "2023-06-01",
- })
+
+ resp, err := c.sendRequest(ctx, o, messages, true, start)
if err != nil {
- logging.Logf("llm/anthropic ", "%shttp error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return err
}
defer func() {
@@ -189,10 +159,10 @@ func (c anthropicClient) ChatStream(ctx context.Context, messages []Message, onD
logging.Logf("llm/anthropic", "failed to close response body: %v", err)
}
}()
+
if err := handleAnthropicNon2xx(resp, start); err != nil {
return err
}
-
if err := parseAnthropicStream(resp, start, onDelta); err != nil {
return err
}
@@ -201,6 +171,47 @@ func (c anthropicClient) ChatStream(ctx context.Context, messages []Message, onD
}
// Private helpers
+
+func (c anthropicClient) resolveOptions(opts []RequestOption) Options {
+ o := Options{Model: c.defaultModel}
+ for _, opt := range opts {
+ opt(&o)
+ }
+ if o.Model == "" {
+ o.Model = c.defaultModel
+ }
+ return o
+}
+
+func (c anthropicClient) sendRequest(ctx context.Context, o Options, messages []Message, stream bool, start time.Time) (*http.Response, error) {
+ req := buildAnthropicChatRequest(o, messages, c.defaultModel, c.defaultTemperature, stream)
+ body, err := json.Marshal(req)
+ if err != nil {
+ c.logf("marshal error: %v", err)
+ return nil, err
+ }
+ endpoint := c.baseURL + "/messages"
+ mode := "POST"
+ if stream {
+ mode = "POST (stream)"
+ }
+ logging.Logf("llm/anthropic ", "%s %s", mode, endpoint)
+ return c.doJSON(ctx, endpoint, body, map[string]string{
+ "x-api-key": c.apiKey,
+ "anthropic-version": "2023-06-01",
+ })
+}
+
+func (c anthropicClient) extractContent(out anthropicChatResponse, start time.Time) (string, error) {
+ if len(out.Content) == 0 {
+ logging.Logf("llm/anthropic ", "%sno content returned duration=%s%s", logging.AnsiRed, time.Since(start), logging.AnsiBase)
+ return "", errors.New("anthropic: no content returned")
+ }
+ content := out.Content[0].Text
+ logging.Logf("llm/anthropic ", "success stop_reason=%s size=%d preview=%s%s%s duration=%s", out.StopReason, len(content), logging.AnsiGreen, logging.PreviewForLog(content), logging.AnsiBase, time.Since(start))
+ return content, nil
+}
+
func (c anthropicClient) logf(format string, args ...any) {
logging.Logf("llm/anthropic ", format, args...)
}
diff --git a/internal/llm/anthropic_test.go b/internal/llm/anthropic_test.go
index 15756f5..578b536 100644
--- a/internal/llm/anthropic_test.go
+++ b/internal/llm/anthropic_test.go
@@ -118,7 +118,7 @@ func TestAnthropicChat_EmptyResponse(t *testing.T) {
ID: "msg-123",
Type: "message",
StopReason: "end_turn",
- Content: []struct {
+ Content: []struct {
Type string `json:"type"`
Text string `json:"text"`
}{},
diff --git a/internal/llm/provider_test.go b/internal/llm/provider_test.go
index bd565b3..46c7ea8 100644
--- a/internal/llm/provider_test.go
+++ b/internal/llm/provider_test.go
@@ -18,4 +18,3 @@ func TestNewFromConfig_DefaultsAndErrors(t *testing.T) {
t.Fatalf("expected key error")
}
}
-