summaryrefslogtreecommitdiff
path: root/internal
diff options
context:
space:
mode:
Diffstat (limited to 'internal')
-rw-r--r--internal/appconfig/config.go35
-rw-r--r--internal/hexailsp/run.go10
-rw-r--r--internal/llm/anthropic.go316
-rw-r--r--internal/llm/anthropic_test.go259
-rw-r--r--internal/llm/openai_temp_test.go6
-rw-r--r--internal/llm/provider.go15
-rw-r--r--internal/llm/provider_more2_test.go2
-rw-r--r--internal/llm/provider_more_test.go4
-rw-r--r--internal/llm/provider_test.go6
-rw-r--r--internal/llmutils/client.go9
-rw-r--r--internal/lsp/server.go9
11 files changed, 658 insertions, 13 deletions
diff --git a/internal/appconfig/config.go b/internal/appconfig/config.go
index 59ffd89..f41d4d9 100644
--- a/internal/appconfig/config.go
+++ b/internal/appconfig/config.go
@@ -68,6 +68,10 @@ type App struct {
CopilotModel string `json:"copilot_model" toml:"copilot_model"`
// Default temperature for Copilot requests (nil means use provider default)
CopilotTemperature *float64 `json:"copilot_temperature" toml:"copilot_temperature"`
+ AnthropicBaseURL string `json:"anthropic_base_url" toml:"anthropic_base_url"`
+ AnthropicModel string `json:"anthropic_model" toml:"anthropic_model"`
+ // Default temperature for Anthropic requests (nil means use provider default)
+ AnthropicTemperature *float64 `json:"anthropic_temperature" toml:"anthropic_temperature"`
// Per-surface provider/model configurations (ordered; first entry is primary)
CompletionConfigs []SurfaceConfig `json:"-" toml:"-"`
@@ -137,6 +141,7 @@ func newDefaultConfig() App {
OpenAITemperature: &t,
OllamaTemperature: &t,
CopilotTemperature: &t,
+ AnthropicTemperature: &t,
ManualInvokeMinPrefix: 0,
CompletionDebounceMs: 800,
CompletionThrottleMs: 0,
@@ -235,6 +240,7 @@ type fileConfig struct {
OpenRouter sectionOpenRouter `toml:"openrouter"`
Copilot sectionCopilot `toml:"copilot"`
Ollama sectionOllama `toml:"ollama"`
+ Anthropic sectionAnthropic `toml:"anthropic"`
Prompts sectionPrompts `toml:"prompts"`
Tmux sectionTmux `toml:"tmux"`
Stats sectionStats `toml:"stats"`
@@ -331,6 +337,12 @@ type sectionOllama struct {
Temperature *float64 `toml:"temperature"`
}
+type sectionAnthropic struct {
+ Model string `toml:"model"`
+ BaseURL string `toml:"base_url"`
+ Temperature *float64 `toml:"temperature"`
+}
+
// Prompts sections
type sectionPrompts struct {
Completion sectionPromptsCompletion `toml:"completion"`
@@ -486,6 +498,16 @@ func (fc *fileConfig) toApp() App {
out.mergeProviderFields(&tmp)
}
+ // anthropic
+ if (fc.Anthropic != sectionAnthropic{}) || fc.Anthropic.Temperature != nil {
+ tmp := App{
+ AnthropicBaseURL: fc.Anthropic.BaseURL,
+ AnthropicModel: fc.Anthropic.Model,
+ AnthropicTemperature: fc.Anthropic.Temperature,
+ }
+ out.mergeProviderFields(&tmp)
+ }
+
// prompts
// completion
if (fc.Prompts.Completion != sectionPromptsCompletion{}) {
@@ -1292,6 +1314,19 @@ func loadFromEnv(logger *log.Logger) *App {
any = true
}
+ if s := getenv("HEXAI_ANTHROPIC_BASE_URL"); s != "" {
+ out.AnthropicBaseURL = s
+ any = true
+ }
+ if model, ok := pickModel("anthropic", getenv("HEXAI_ANTHROPIC_MODEL")); ok {
+ out.AnthropicModel = model
+ any = true
+ }
+ if f, ok := parseFloatPtr("HEXAI_ANTHROPIC_TEMPERATURE"); ok {
+ out.AnthropicTemperature = f
+ any = true
+ }
+
// Per-surface overrides
buildEntry := func(modelKey, tempKey, providerKey string) ([]SurfaceConfig, bool) {
model := getenv(modelKey)
diff --git a/internal/hexailsp/run.go b/internal/hexailsp/run.go
index b7f777b..f39ea96 100644
--- a/internal/hexailsp/run.go
+++ b/internal/hexailsp/run.go
@@ -121,6 +121,9 @@ func buildClientIfNil(cfg appconfig.App, client llm.Client) llm.Client {
CopilotBaseURL: cfg.CopilotBaseURL,
CopilotModel: cfg.CopilotModel,
CopilotTemperature: cfg.CopilotTemperature,
+ AnthropicBaseURL: cfg.AnthropicBaseURL,
+ AnthropicModel: cfg.AnthropicModel,
+ AnthropicTemperature: cfg.AnthropicTemperature,
}
// Prefer HEXAI_OPENAI_API_KEY; fall back to OPENAI_API_KEY
oaKey := os.Getenv("HEXAI_OPENAI_API_KEY")
@@ -137,7 +140,12 @@ func buildClientIfNil(cfg appconfig.App, client llm.Client) llm.Client {
if strings.TrimSpace(cpKey) == "" {
cpKey = os.Getenv("COPILOT_API_KEY")
}
- if c, err := llm.NewFromConfig(llmCfg, oaKey, orKey, cpKey); err != nil {
+ // Prefer HEXAI_ANTHROPIC_API_KEY; fall back to ANTHROPIC_API_KEY
+ anKey := os.Getenv("HEXAI_ANTHROPIC_API_KEY")
+ if strings.TrimSpace(anKey) == "" {
+ anKey = os.Getenv("ANTHROPIC_API_KEY")
+ }
+ if c, err := llm.NewFromConfig(llmCfg, oaKey, orKey, cpKey, anKey); err != nil {
logging.Logf("lsp ", "llm disabled: %v", err)
return nil
} else {
diff --git a/internal/llm/anthropic.go b/internal/llm/anthropic.go
new file mode 100644
index 0000000..6f14eea
--- /dev/null
+++ b/internal/llm/anthropic.go
@@ -0,0 +1,316 @@
+// Summary: Anthropic client implementation using Messages API with optional streaming support.
+package llm
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "codeberg.org/snonux/hexai/internal/logging"
+)
+
+// anthropicClient implements Client against Anthropic's Messages API.
+type anthropicClient struct {
+ httpClient *http.Client
+ apiKey string
+ baseURL string
+ defaultModel string
+ chatLogger logging.ChatLogger
+ defaultTemperature *float64
+}
+
+type anthropicChatRequest struct {
+ Model string `json:"model"`
+ Messages []anthropicMessage `json:"messages"`
+ Temperature *float64 `json:"temperature,omitempty"`
+ MaxTokens int `json:"max_tokens"`
+ Stream bool `json:"stream,omitempty"`
+ System string `json:"system,omitempty"`
+}
+
+type anthropicMessage struct {
+ Role string `json:"role"`
+ Content string `json:"content"`
+}
+
+type anthropicChatResponse struct {
+ ID string `json:"id"`
+ Type string `json:"type"`
+ Content []struct {
+ Type string `json:"type"`
+ Text string `json:"text"`
+ } `json:"content"`
+ StopReason string `json:"stop_reason"`
+ Error *struct {
+ Type string `json:"type"`
+ Message string `json:"message"`
+ } `json:"error,omitempty"`
+}
+
+// Streaming event types
+type anthropicStreamStart struct {
+ Type string `json:"type"`
+ Message struct {
+ ID string `json:"id"`
+ Type string `json:"type"`
+ Role string `json:"role"`
+ Model string `json:"model"`
+ } `json:"message"`
+}
+
+type anthropicStreamDelta struct {
+ Type string `json:"type"`
+ Delta struct {
+ Type string `json:"type"`
+ Text string `json:"text"`
+ } `json:"delta"`
+}
+
+type anthropicStreamError struct {
+ Type string `json:"type"`
+ Error struct {
+ Type string `json:"type"`
+ Message string `json:"message"`
+ } `json:"error"`
+}
+
+// Constructor
+// newAnthropic constructs an Anthropic client using explicit configuration values.
+// The apiKey may be empty; calls will fail until a valid key is supplied.
+func newAnthropic(baseURL, model, apiKey string, defaultTemp *float64) Client {
+ if strings.TrimSpace(baseURL) == "" {
+ baseURL = "https://api.anthropic.com/v1"
+ }
+ if strings.TrimSpace(model) == "" {
+ model = "claude-3-5-sonnet-20241022"
+ }
+ return anthropicClient{
+ httpClient: &http.Client{Timeout: 30 * time.Second},
+ apiKey: apiKey,
+ baseURL: baseURL,
+ defaultModel: model,
+ chatLogger: logging.NewChatLogger("anthropic"),
+ defaultTemperature: defaultTemp,
+ }
+}
+
+func (c anthropicClient) Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error) {
+ if c.apiKey == "" {
+ return nilStringErr("missing Anthropic API key")
+ }
+ o := Options{Model: c.defaultModel}
+ for _, opt := range opts {
+ opt(&o)
+ }
+ if o.Model == "" {
+ o.Model = c.defaultModel
+ }
+ start := time.Now()
+ c.logStart(false, o, messages)
+ req := buildAnthropicChatRequest(o, messages, c.defaultModel, c.defaultTemperature, false)
+ body, err := json.Marshal(req)
+ if err != nil {
+ c.logf("marshal error: %v", err)
+ return "", err
+ }
+ endpoint := c.baseURL + "/messages"
+ logging.Logf("llm/anthropic ", "POST %s", endpoint)
+ resp, err := c.doJSON(ctx, endpoint, body, map[string]string{
+ "x-api-key": c.apiKey,
+ "anthropic-version": "2023-06-01",
+ })
+ if err != nil {
+ logging.Logf("llm/anthropic ", "%shttp error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
+ return "", err
+ }
+ defer func() {
+ if err := resp.Body.Close(); err != nil {
+ logging.Logf("llm/anthropic", "failed to close response body: %v", err)
+ }
+ }()
+ if err := handleAnthropicNon2xx(resp, start); err != nil {
+ return "", err
+ }
+ out, err := decodeAnthropicChat(resp, start)
+ if err != nil {
+ return "", err
+ }
+ if len(out.Content) == 0 {
+ logging.Logf("llm/anthropic ", "%sno content returned duration=%s%s", logging.AnsiRed, time.Since(start), logging.AnsiBase)
+ return "", errors.New("anthropic: no content returned")
+ }
+ content := out.Content[0].Text
+ logging.Logf("llm/anthropic ", "success stop_reason=%s size=%d preview=%s%s%s duration=%s", out.StopReason, len(content), logging.AnsiGreen, logging.PreviewForLog(content), logging.AnsiBase, time.Since(start))
+ return content, nil
+}
+
+// Provider metadata
+func (c anthropicClient) Name() string { return "anthropic" }
+func (c anthropicClient) DefaultModel() string { return c.defaultModel }
+
+// Streaming support (optional)
+func (c anthropicClient) ChatStream(ctx context.Context, messages []Message, onDelta func(string), opts ...RequestOption) error {
+ if c.apiKey == "" {
+ return errors.New("missing Anthropic API key")
+ }
+ o := Options{Model: c.defaultModel}
+ for _, opt := range opts {
+ opt(&o)
+ }
+ if o.Model == "" {
+ o.Model = c.defaultModel
+ }
+ start := time.Now()
+ c.logStart(true, o, messages)
+ req := buildAnthropicChatRequest(o, messages, c.defaultModel, c.defaultTemperature, true)
+ body, err := json.Marshal(req)
+ if err != nil {
+ c.logf("marshal error: %v", err)
+ return err
+ }
+ endpoint := c.baseURL + "/messages"
+ logging.Logf("llm/anthropic ", "POST %s (stream)", endpoint)
+ resp, err := c.doJSON(ctx, endpoint, body, map[string]string{
+ "x-api-key": c.apiKey,
+ "anthropic-version": "2023-06-01",
+ })
+ if err != nil {
+ logging.Logf("llm/anthropic ", "%shttp error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
+ return err
+ }
+ defer func() {
+ if err := resp.Body.Close(); err != nil {
+ logging.Logf("llm/anthropic", "failed to close response body: %v", err)
+ }
+ }()
+ if err := handleAnthropicNon2xx(resp, start); err != nil {
+ return err
+ }
+
+ if err := parseAnthropicStream(resp, start, onDelta); err != nil {
+ return err
+ }
+ logging.Logf("llm/anthropic ", "stream end duration=%s", time.Since(start))
+ return nil
+}
+
+// Private helpers
+func (c anthropicClient) logf(format string, args ...any) {
+ logging.Logf("llm/anthropic ", format, args...)
+}
+
+func (c anthropicClient) logStart(stream bool, o Options, messages []Message) {
+ logMessages := make([]struct{ Role, Content string }, len(messages))
+ for i, m := range messages {
+ logMessages[i] = struct{ Role, Content string }{m.Role, m.Content}
+ }
+ c.chatLogger.LogStart(stream, o.Model, o.Temperature, o.MaxTokens, o.Stop, logMessages)
+}
+
+func buildAnthropicChatRequest(o Options, messages []Message, defaultModel string, defaultTemp *float64, stream bool) anthropicChatRequest {
+ req := anthropicChatRequest{
+ Model: o.Model,
+ Stream: stream,
+ MaxTokens: 4096, // Anthropic requires max_tokens
+ }
+ req.Messages = make([]anthropicMessage, len(messages))
+ for i, m := range messages {
+ req.Messages[i] = anthropicMessage{
+ Role: m.Role,
+ Content: m.Content,
+ }
+ }
+ if o.Temperature != 0 {
+ req.Temperature = &o.Temperature
+ } else if defaultTemp != nil {
+ t := *defaultTemp
+ req.Temperature = &t
+ }
+ if o.MaxTokens > 0 {
+ req.MaxTokens = o.MaxTokens
+ }
+ // Note: Anthropic's API doesn't support stop sequences in the same way as OpenAI,
+ // but we keep them in the request for future compatibility.
+ return req
+}
+
+func (c anthropicClient) doJSON(ctx context.Context, url string, body []byte, headers map[string]string) (*http.Response, error) {
+ req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body))
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Set("Content-Type", "application/json")
+ for k, v := range headers {
+ req.Header.Set(k, v)
+ }
+ return c.httpClient.Do(req)
+}
+
+func handleAnthropicNon2xx(resp *http.Response, start time.Time) error {
+ if resp.StatusCode >= 200 && resp.StatusCode < 300 {
+ return nil
+ }
+ var apiErr anthropicChatResponse
+ _ = json.NewDecoder(resp.Body).Decode(&apiErr)
+ if apiErr.Error != nil && apiErr.Error.Message != "" {
+ logging.Logf("llm/anthropic ", "%sapi error status=%d type=%s msg=%s duration=%s%s", logging.AnsiRed, resp.StatusCode, apiErr.Error.Type, apiErr.Error.Message, time.Since(start), logging.AnsiBase)
+ return fmt.Errorf("anthropic error: %s (status %d)", apiErr.Error.Message, resp.StatusCode)
+ }
+ logging.Logf("llm/anthropic ", "%shttp non-2xx status=%d duration=%s%s", logging.AnsiRed, resp.StatusCode, time.Since(start), logging.AnsiBase)
+ return fmt.Errorf("anthropic http error: status %d", resp.StatusCode)
+}
+
+func decodeAnthropicChat(resp *http.Response, start time.Time) (anthropicChatResponse, error) {
+ var out anthropicChatResponse
+ if err := json.NewDecoder(resp.Body).Decode(&out); err != nil {
+ logging.Logf("llm/anthropic ", "%sdecode error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
+ return anthropicChatResponse{}, err
+ }
+ return out, nil
+}
+
+func parseAnthropicStream(resp *http.Response, start time.Time, onDelta func(string)) error {
+ // Parse server-sent events: lines starting with "data: " containing JSON
+ scanner := bufio.NewScanner(resp.Body)
+ const maxBuf = 1024 * 1024
+ buf := make([]byte, 0, 64*1024)
+ scanner.Buffer(buf, maxBuf)
+ for scanner.Scan() {
+ line := scanner.Text()
+ if !strings.HasPrefix(line, "data: ") {
+ continue
+ }
+ payload := strings.TrimPrefix(line, "data: ")
+ // Check for stream end event
+ if strings.Contains(payload, "\"type\":\"message_stop\"") {
+ break
+ }
+ // Try to parse as delta event
+ var delta anthropicStreamDelta
+ if err := json.Unmarshal([]byte(payload), &delta); err != nil {
+ continue
+ }
+ if delta.Type == "content_block_delta" && delta.Delta.Type == "text_delta" && delta.Delta.Text != "" {
+ onDelta(delta.Delta.Text)
+ }
+ // Check for errors in stream
+ var errEvent anthropicStreamError
+ if err := json.Unmarshal([]byte(payload), &errEvent); err == nil {
+ if errEvent.Type == "error" && errEvent.Error.Message != "" {
+ logging.Logf("llm/anthropic ", "%sstream error: %s%s", logging.AnsiRed, errEvent.Error.Message, logging.AnsiBase)
+ return fmt.Errorf("anthropic stream error: %s", errEvent.Error.Message)
+ }
+ }
+ }
+ if err := scanner.Err(); err != nil {
+ logging.Logf("llm/anthropic ", "%sstream read error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
+ return err
+ }
+ return nil
+}
diff --git a/internal/llm/anthropic_test.go b/internal/llm/anthropic_test.go
new file mode 100644
index 0000000..15756f5
--- /dev/null
+++ b/internal/llm/anthropic_test.go
@@ -0,0 +1,259 @@
+package llm
+
+import (
+ "context"
+ "encoding/json"
+ "io"
+ "net/http"
+ "net/http/httptest"
+ "strings"
+ "testing"
+)
+
+func TestAnthropicChat_Success(t *testing.T) {
+ srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if r.Method != http.MethodPost {
+ t.Fatalf("expected POST, got %s", r.Method)
+ }
+ if !strings.HasSuffix(r.URL.Path, "/messages") {
+ t.Fatalf("expected /messages endpoint, got %s", r.URL.Path)
+ }
+ // Check headers
+ if r.Header.Get("x-api-key") != "test-key" {
+ t.Fatalf("expected x-api-key header")
+ }
+ if r.Header.Get("anthropic-version") != "2023-06-01" {
+ t.Fatalf("expected anthropic-version header")
+ }
+ // Verify request body
+ var req anthropicChatRequest
+ if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
+ t.Fatalf("failed to decode request: %v", err)
+ }
+ if req.Model != "claude-3-5-sonnet-20241022" {
+ t.Fatalf("expected model claude-3-5-sonnet-20241022, got %s", req.Model)
+ }
+ if len(req.Messages) != 1 {
+ t.Fatalf("expected 1 message, got %d", len(req.Messages))
+ }
+ if req.Messages[0].Role != "user" {
+ t.Fatalf("expected user role, got %s", req.Messages[0].Role)
+ }
+ if req.Messages[0].Content != "Hello" {
+ t.Fatalf("expected content 'Hello', got '%s'", req.Messages[0].Content)
+ }
+ // Send response
+ resp := anthropicChatResponse{
+ ID: "msg-123",
+ Type: "message",
+ StopReason: "end_turn",
+ Content: []struct {
+ Type string `json:"type"`
+ Text string `json:"text"`
+ }{
+ {Type: "text", Text: "Hi there!"},
+ },
+ }
+ w.Header().Set("Content-Type", "application/json")
+ json.NewEncoder(w).Encode(resp)
+ }))
+ defer srv.Close()
+
+ c := newAnthropic(srv.URL, "claude-3-5-sonnet-20241022", "test-key", nil).(anthropicClient)
+ response, err := c.Chat(context.Background(), []Message{
+ {Role: "user", Content: "Hello"},
+ })
+ if err != nil {
+ t.Fatalf("Chat failed: %v", err)
+ }
+ if response != "Hi there!" {
+ t.Fatalf("expected 'Hi there!', got '%s'", response)
+ }
+}
+
+func TestAnthropicChat_NoAPIKey(t *testing.T) {
+ c := newAnthropic("https://api.anthropic.com/v1", "claude-3-5-sonnet-20241022", "", nil)
+ _, err := c.Chat(context.Background(), []Message{
+ {Role: "user", Content: "Hello"},
+ })
+ if err == nil {
+ t.Fatalf("expected error for missing API key")
+ }
+ if !strings.Contains(err.Error(), "missing Anthropic API key") {
+ t.Fatalf("expected 'missing Anthropic API key', got '%s'", err.Error())
+ }
+}
+
+func TestAnthropicChat_APIError(t *testing.T) {
+ srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusUnauthorized)
+ resp := anthropicChatResponse{
+ Error: &struct {
+ Type string `json:"type"`
+ Message string `json:"message"`
+ }{
+ Type: "authentication_error",
+ Message: "Invalid API key",
+ },
+ }
+ json.NewEncoder(w).Encode(resp)
+ }))
+ defer srv.Close()
+
+ c := newAnthropic(srv.URL, "claude-3-5-sonnet-20241022", "invalid-key", nil).(anthropicClient)
+ _, err := c.Chat(context.Background(), []Message{
+ {Role: "user", Content: "Hello"},
+ })
+ if err == nil {
+ t.Fatalf("expected error for API error response")
+ }
+ if !strings.Contains(err.Error(), "Invalid API key") {
+ t.Fatalf("expected 'Invalid API key' in error, got '%s'", err.Error())
+ }
+}
+
+func TestAnthropicChat_EmptyResponse(t *testing.T) {
+ srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ resp := anthropicChatResponse{
+ ID: "msg-123",
+ Type: "message",
+ StopReason: "end_turn",
+ Content: []struct {
+ Type string `json:"type"`
+ Text string `json:"text"`
+ }{},
+ }
+ w.Header().Set("Content-Type", "application/json")
+ json.NewEncoder(w).Encode(resp)
+ }))
+ defer srv.Close()
+
+ c := newAnthropic(srv.URL, "claude-3-5-sonnet-20241022", "test-key", nil).(anthropicClient)
+ _, err := c.Chat(context.Background(), []Message{
+ {Role: "user", Content: "Hello"},
+ })
+ if err == nil {
+ t.Fatalf("expected error for empty content")
+ }
+ if !strings.Contains(err.Error(), "no content returned") {
+ t.Fatalf("expected 'no content returned', got '%s'", err.Error())
+ }
+}
+
+func TestAnthropicChat_WithTemperature(t *testing.T) {
+ srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ var req anthropicChatRequest
+ json.NewDecoder(r.Body).Decode(&req)
+ if req.Temperature == nil || *req.Temperature != 0.5 {
+ t.Fatalf("expected temperature 0.5, got %v", req.Temperature)
+ }
+ resp := anthropicChatResponse{
+ ID: "msg-123",
+ Type: "message",
+ StopReason: "end_turn",
+ Content: []struct {
+ Type string `json:"type"`
+ Text string `json:"text"`
+ }{
+ {Type: "text", Text: "Response"},
+ },
+ }
+ w.Header().Set("Content-Type", "application/json")
+ json.NewEncoder(w).Encode(resp)
+ }))
+ defer srv.Close()
+
+ c := newAnthropic(srv.URL, "claude-3-5-sonnet-20241022", "test-key", nil).(anthropicClient)
+ _, err := c.Chat(context.Background(), []Message{
+ {Role: "user", Content: "Hello"},
+ }, WithTemperature(0.5))
+ if err != nil {
+ t.Fatalf("Chat failed: %v", err)
+ }
+}
+
+func TestAnthropicStream_Success(t *testing.T) {
+ srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Type", "text/event-stream")
+ // Send streaming response
+ streamEvents := []string{
+ `data: {"type":"message_start","message":{"id":"msg-123","type":"message"}}`,
+ `data: {"type":"content_block_start","content_block":{"type":"text"}}`,
+ `data: {"type":"content_block_delta","delta":{"type":"text_delta","text":"Hello"}}`,
+ `data: {"type":"content_block_delta","delta":{"type":"text_delta","text":" "}}`,
+ `data: {"type":"content_block_delta","delta":{"type":"text_delta","text":"world"}}`,
+ `data: {"type":"message_stop"}`,
+ }
+ for _, event := range streamEvents {
+ io.WriteString(w, event+"\n")
+ }
+ }))
+ defer srv.Close()
+
+ c := newAnthropic(srv.URL, "claude-3-5-sonnet-20241022", "test-key", nil)
+ streamer, ok := c.(Streamer)
+ if !ok {
+ t.Fatalf("Anthropic client does not implement Streamer interface")
+ }
+ var chunks []string
+ err := streamer.ChatStream(context.Background(), []Message{
+ {Role: "user", Content: "Say hello"},
+ }, func(chunk string) {
+ chunks = append(chunks, chunk)
+ })
+ if err != nil {
+ t.Fatalf("ChatStream failed: %v", err)
+ }
+ if len(chunks) != 3 {
+ t.Fatalf("expected 3 chunks, got %d", len(chunks))
+ }
+ if chunks[0] != "Hello" || chunks[1] != " " || chunks[2] != "world" {
+ t.Fatalf("unexpected chunks: %v", chunks)
+ }
+}
+
+func TestAnthropicStream_NoAPIKey(t *testing.T) {
+ c := newAnthropic("https://api.anthropic.com/v1", "claude-3-5-sonnet-20241022", "", nil)
+ streamer, ok := c.(Streamer)
+ if !ok {
+ t.Fatalf("Anthropic client does not implement Streamer interface")
+ }
+ err := streamer.ChatStream(context.Background(), []Message{
+ {Role: "user", Content: "Hello"},
+ }, func(chunk string) {})
+ if err == nil {
+ t.Fatalf("expected error for missing API key")
+ }
+ if !strings.Contains(err.Error(), "missing Anthropic API key") {
+ t.Fatalf("expected 'missing Anthropic API key', got '%s'", err.Error())
+ }
+}
+
+func TestAnthropicClient_Name(t *testing.T) {
+ c := newAnthropic("https://api.anthropic.com/v1", "claude-3-5-sonnet-20241022", "test-key", nil)
+ if c.Name() != "anthropic" {
+ t.Fatalf("expected 'anthropic', got '%s'", c.Name())
+ }
+}
+
+func TestAnthropicClient_DefaultModel(t *testing.T) {
+ model := "claude-3-opus-20250219"
+ c := newAnthropic("https://api.anthropic.com/v1", model, "test-key", nil).(anthropicClient)
+ if c.DefaultModel() != model {
+ t.Fatalf("expected '%s', got '%s'", model, c.DefaultModel())
+ }
+}
+
+func TestAnthropicClient_DefaultBaseURL(t *testing.T) {
+ c := newAnthropic("", "claude-3-5-sonnet-20241022", "test-key", nil).(anthropicClient)
+ if c.baseURL != "https://api.anthropic.com/v1" {
+ t.Fatalf("expected default base URL, got '%s'", c.baseURL)
+ }
+}
+
+func TestAnthropicClient_DefaultModel_Empty(t *testing.T) {
+ c := newAnthropic("https://api.anthropic.com/v1", "", "test-key", nil).(anthropicClient)
+ if c.defaultModel != "claude-3-5-sonnet-20241022" {
+ t.Fatalf("expected default model, got '%s'", c.defaultModel)
+ }
+}
diff --git a/internal/llm/openai_temp_test.go b/internal/llm/openai_temp_test.go
index 3d71b94..07abbd5 100644
--- a/internal/llm/openai_temp_test.go
+++ b/internal/llm/openai_temp_test.go
@@ -5,7 +5,7 @@ import "testing"
func TestNewFromConfig_DefaultTemp_ByModel(t *testing.T) {
// OpenAI, gpt-5.* → default temp 1.0 when not provided
cfg := Config{Provider: "openai", OpenAIModel: "gpt-5.0-preview"}
- c, err := NewFromConfig(cfg, "key", "", "")
+ c, err := NewFromConfig(cfg, "key", "", "", "")
if err != nil {
t.Fatalf("new: %v", err)
}
@@ -18,7 +18,7 @@ func TestNewFromConfig_DefaultTemp_ByModel(t *testing.T) {
}
// OpenAI, gpt-4.* → default temp 0.2 when not provided
cfg2 := Config{Provider: "openai", OpenAIModel: "gpt-4.1"}
- c2, err := NewFromConfig(cfg2, "key", "", "")
+ c2, err := NewFromConfig(cfg2, "key", "", "", "")
if err != nil {
t.Fatalf("new2: %v", err)
}
@@ -32,7 +32,7 @@ func TestNewFromConfig_DefaultTemp_UpgradeWhenGpt5AndDefault02(t *testing.T) {
// Simulate app-default of 0.2 while selecting a gpt-5 model: should upgrade to 1.0
v := 0.2
cfg := Config{Provider: "openai", OpenAIModel: "gpt-5.0", OpenAITemperature: &v}
- c, err := NewFromConfig(cfg, "key", "", "")
+ c, err := NewFromConfig(cfg, "key", "", "", "")
if err != nil {
t.Fatalf("new: %v", err)
}
diff --git a/internal/llm/provider.go b/internal/llm/provider.go
index b2c47e4..ae840b0 100644
--- a/internal/llm/provider.go
+++ b/internal/llm/provider.go
@@ -81,12 +81,16 @@ type Config struct {
CopilotBaseURL string
CopilotModel string
CopilotTemperature *float64
+ // Anthropic options
+ AnthropicBaseURL string
+ AnthropicModel string
+ AnthropicTemperature *float64
}
// NewFromConfig creates an LLM client using only the supplied configuration.
// The OpenAI API key is supplied separately and may be read from the environment
// by the caller; other environment-based configuration is not used.
-func NewFromConfig(cfg Config, openAIAPIKey, openRouterAPIKey, copilotAPIKey string) (Client, error) {
+func NewFromConfig(cfg Config, openAIAPIKey, openRouterAPIKey, copilotAPIKey, anthropicAPIKey string) (Client, error) {
p := strings.ToLower(strings.TrimSpace(cfg.Provider))
if p == "" {
p = "openai"
@@ -140,6 +144,15 @@ func NewFromConfig(cfg Config, openAIAPIKey, openRouterAPIKey, copilotAPIKey str
cfg.CopilotTemperature = &t
}
return newCopilot(cfg.CopilotBaseURL, cfg.CopilotModel, copilotAPIKey, cfg.CopilotTemperature), nil
+ case "anthropic":
+ if strings.TrimSpace(anthropicAPIKey) == "" {
+ return nil, errors.New("missing ANTHROPIC_API_KEY for provider anthropic")
+ }
+ if cfg.AnthropicTemperature == nil {
+ t := 0.2
+ cfg.AnthropicTemperature = &t
+ }
+ return newAnthropic(cfg.AnthropicBaseURL, cfg.AnthropicModel, anthropicAPIKey, cfg.AnthropicTemperature), nil
default:
return nil, errors.New("unknown LLM provider: " + p)
}
diff --git a/internal/llm/provider_more2_test.go b/internal/llm/provider_more2_test.go
index e001e5c..86b149a 100644
--- a/internal/llm/provider_more2_test.go
+++ b/internal/llm/provider_more2_test.go
@@ -5,7 +5,7 @@ import "testing"
func TestNewFromConfig_Copilot(t *testing.T) {
t.Setenv("COPILOT_API_KEY", "x")
cfg := Config{Provider: "copilot", CopilotModel: "small"}
- c, err := NewFromConfig(cfg, "", "", "x")
+ c, err := NewFromConfig(cfg, "", "", "x", "")
if err != nil || c == nil {
t.Fatalf("copilot provider failed: %v %v", c, err)
}
diff --git a/internal/llm/provider_more_test.go b/internal/llm/provider_more_test.go
index eff99e6..caad912 100644
--- a/internal/llm/provider_more_test.go
+++ b/internal/llm/provider_more_test.go
@@ -16,13 +16,13 @@ func TestWithOptions_Apply(t *testing.T) {
func TestNewFromConfig_Success_OpenAI_And_Copilot(t *testing.T) {
// OpenAI success
oc := Config{Provider: "openai", OpenAIBaseURL: "http://x", OpenAIModel: "gpt"}
- c, err := NewFromConfig(oc, "KEY", "", "")
+ c, err := NewFromConfig(oc, "KEY", "", "", "")
if err != nil || c == nil || c.Name() != "openai" || c.DefaultModel() == "" {
t.Fatalf("openai new: %v %v", c, err)
}
// Copilot success
cc := Config{Provider: "copilot", CopilotBaseURL: "http://x", CopilotModel: "gpt-4o-mini"}
- c2, err := NewFromConfig(cc, "", "", "KEY")
+ c2, err := NewFromConfig(cc, "", "", "KEY", "")
if err != nil || c2 == nil || c2.Name() != "copilot" || c2.DefaultModel() == "" {
t.Fatalf("copilot new: %v %v", c2, err)
}
diff --git a/internal/llm/provider_test.go b/internal/llm/provider_test.go
index 8c5d2cb..bd565b3 100644
--- a/internal/llm/provider_test.go
+++ b/internal/llm/provider_test.go
@@ -6,15 +6,15 @@ import (
func TestNewFromConfig_DefaultsAndErrors(t *testing.T) {
// Unknown provider
- if _, err := NewFromConfig(Config{Provider: "bogus"}, "", "", ""); err == nil {
+ if _, err := NewFromConfig(Config{Provider: "bogus"}, "", "", "", ""); err == nil {
t.Fatalf("expected error for unknown provider")
}
// OpenAI missing key
- if _, err := NewFromConfig(Config{Provider: "openai", OpenAIModel: "g"}, "", "", ""); err == nil {
+ if _, err := NewFromConfig(Config{Provider: "openai", OpenAIModel: "g"}, "", "", "", ""); err == nil {
t.Fatalf("expected key error")
}
// Copilot missing key
- if _, err := NewFromConfig(Config{Provider: "copilot", CopilotModel: "m"}, "", "", ""); err == nil {
+ if _, err := NewFromConfig(Config{Provider: "copilot", CopilotModel: "m"}, "", "", "", ""); err == nil {
t.Fatalf("expected key error")
}
}
diff --git a/internal/llmutils/client.go b/internal/llmutils/client.go
index 2f3da55..53fca9c 100644
--- a/internal/llmutils/client.go
+++ b/internal/llmutils/client.go
@@ -24,6 +24,9 @@ func NewClientFromApp(cfg appconfig.App) (llm.Client, error) {
CopilotBaseURL: cfg.CopilotBaseURL,
CopilotModel: cfg.CopilotModel,
CopilotTemperature: cfg.CopilotTemperature,
+ AnthropicBaseURL: cfg.AnthropicBaseURL,
+ AnthropicModel: cfg.AnthropicModel,
+ AnthropicTemperature: cfg.AnthropicTemperature,
}
oaKey := os.Getenv("HEXAI_OPENAI_API_KEY")
if strings.TrimSpace(oaKey) == "" {
@@ -37,5 +40,9 @@ func NewClientFromApp(cfg appconfig.App) (llm.Client, error) {
if strings.TrimSpace(cpKey) == "" {
cpKey = os.Getenv("COPILOT_API_KEY")
}
- return llm.NewFromConfig(llmCfg, oaKey, orKey, cpKey)
+ anKey := os.Getenv("HEXAI_ANTHROPIC_API_KEY")
+ if strings.TrimSpace(anKey) == "" {
+ anKey = os.Getenv("ANTHROPIC_API_KEY")
+ }
+ return llm.NewFromConfig(llmCfg, oaKey, orKey, cpKey, anKey)
}
diff --git a/internal/lsp/server.go b/internal/lsp/server.go
index e3a21f3..67e3cab 100644
--- a/internal/lsp/server.go
+++ b/internal/lsp/server.go
@@ -230,6 +230,9 @@ func newClientForProvider(cfg appconfig.App, provider string) (llm.Client, error
CopilotBaseURL: cfg.CopilotBaseURL,
CopilotModel: cfg.CopilotModel,
CopilotTemperature: cfg.CopilotTemperature,
+ AnthropicBaseURL: cfg.AnthropicBaseURL,
+ AnthropicModel: cfg.AnthropicModel,
+ AnthropicTemperature: cfg.AnthropicTemperature,
}
oaKey := strings.TrimSpace(os.Getenv("HEXAI_OPENAI_API_KEY"))
if oaKey == "" {
@@ -243,7 +246,11 @@ func newClientForProvider(cfg appconfig.App, provider string) (llm.Client, error
if cpKey == "" {
cpKey = strings.TrimSpace(os.Getenv("COPILOT_API_KEY"))
}
- return llm.NewFromConfig(llmCfg, oaKey, orKey, cpKey)
+ anKey := strings.TrimSpace(os.Getenv("HEXAI_ANTHROPIC_API_KEY"))
+ if anKey == "" {
+ anKey = strings.TrimSpace(os.Getenv("ANTHROPIC_API_KEY"))
+ }
+ return llm.NewFromConfig(llmCfg, oaKey, orKey, cpKey, anKey)
}
func (s *Server) clientFor(spec requestSpec) llm.Client {