summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPaul Buetow <paul@buetow.org>2025-08-19 23:12:56 +0300
committerPaul Buetow <paul@buetow.org>2025-08-19 23:12:56 +0300
commitfd8e2fe8177305c9271d12c90cc6ad2ed73a1673 (patch)
tree362e20bb2ddc20821b1666c207cf073b1ff13f34
parent7abb7c9177d34f3b2a1773624f0da7daa8c8e2de (diff)
config: add HEXAI_* env overrides with precedence; prefer HEXAI_OPENAI_API_KEY over OPENAI_API_KEY; update docs
-rw-r--r--README.md21
-rw-r--r--internal/appconfig/config.go119
-rw-r--r--internal/hexaicli/run.go10
-rw-r--r--internal/hexailsp/run.go8
4 files changed, 134 insertions, 24 deletions
diff --git a/README.md b/README.md
index fe70232..28d9348 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@ Hexai, the AI LSP for the Helix editor and also a simple command line tool to in
It has been coded with AI and human review.
-Hexai exposes a simple LLM provider interface. It supports OpenAI, GitHub Copilot, and a local Ollama server. Provider selection and models are configured via a JSON configuration file.
+Hexai exposes a simple LLM provider interface. It supports OpenAI, GitHub Copilot, and a local Ollama server. Provider selection and models are configured via a JSON configuration file (overridable via environment variables).
## Configuration
@@ -45,7 +45,22 @@ Hexai exposes a simple LLM provider interface. It supports OpenAI, GitHub Copilo
* copilot_model, copilot_base_url, copilot_temperature: Copilot-only options
* ollama_model, ollama_base_url, ollama_temperature: Ollama-only options
-Ensure `OPENAI_API_KEY` or `COPILOT_API_KEY` is set in your environment according to your chosen provider.
+Ensure `HEXAI_OPENAI_API_KEY` (or `OPENAI_API_KEY`) or `COPILOT_API_KEY` is set in your environment according to your chosen provider.
+
+### Environment overrides
+
+- All config-file options can be overridden by environment variables prefixed with `HEXAI_`.
+- Env values take precedence over `config.json`.
+- Examples:
+ - `HEXAI_PROVIDER`, `HEXAI_MAX_TOKENS`, `HEXAI_CONTEXT_MODE`, `HEXAI_CONTEXT_WINDOW_LINES`, `HEXAI_MAX_CONTEXT_TOKENS`, `HEXAI_LOG_PREVIEW_LIMIT`
+ - `HEXAI_CODING_TEMPERATURE`
+ - `HEXAI_TRIGGER_CHARACTERS` (comma-separated, e.g. `".,:,_ , "`)
+ - `HEXAI_OPENAI_MODEL`, `HEXAI_OPENAI_BASE_URL`, `HEXAI_OPENAI_TEMPERATURE`
+ - `HEXAI_COPILOT_MODEL`, `HEXAI_COPILOT_BASE_URL`, `HEXAI_COPILOT_TEMPERATURE`
+ - `HEXAI_OLLAMA_MODEL`, `HEXAI_OLLAMA_BASE_URL`, `HEXAI_OLLAMA_TEMPERATURE`
+- API keys:
+ - OpenAI: prefer `HEXAI_OPENAI_API_KEY`, falling back to `OPENAI_API_KEY`.
+ - Copilot: use `COPILOT_API_KEY`.
### Selecting a provider
@@ -54,7 +69,7 @@ Ensure `OPENAI_API_KEY` or `COPILOT_API_KEY` is set in your environment accordin
### OpenAI configuration
-- Required: `OPENAI_API_KEY` — provided via environment variable only.
+- Required: `HEXAI_OPENAI_API_KEY` (or `OPENAI_API_KEY`) — provided via environment variable only.
- In config file:
- `openai_model` — model name (default: `gpt-4.1`).
- `openai_base_url` — API base (default: `https://api.openai.com/v1`).
diff --git a/internal/appconfig/config.go b/internal/appconfig/config.go
index 3067dd1..58bcc3a 100644
--- a/internal/appconfig/config.go
+++ b/internal/appconfig/config.go
@@ -2,13 +2,14 @@
package appconfig
import (
- "encoding/json"
- "fmt"
- "log"
- "os"
- "path/filepath"
- "slices"
- "strings"
+ "encoding/json"
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+ "slices"
+ "strconv"
+ "strings"
)
// App holds user-configurable settings read from ~/.config/hexai/config.json.
@@ -60,10 +61,10 @@ func newDefaultConfig() App {
// Load reads configuration from a file and merges with defaults.
// It respects the XDG Base Directory Specification.
func Load(logger *log.Logger) App {
- cfg := newDefaultConfig()
- if logger == nil {
- return cfg // Return defaults if no logger is provided (e.g. in tests)
- }
+ cfg := newDefaultConfig()
+ if logger == nil {
+ return cfg // Return defaults if no logger is provided (e.g. in tests)
+ }
configPath, err := getConfigPath()
if err != nil {
@@ -76,8 +77,12 @@ func Load(logger *log.Logger) App {
return cfg
}
- cfg.mergeWith(fileCfg)
- return cfg
+ cfg.mergeWith(fileCfg)
+ // Environment overrides (take precedence over file)
+ if envCfg := loadFromEnv(logger); envCfg != nil {
+ cfg.mergeWith(envCfg)
+ }
+ return cfg
}
// Private helpers
@@ -103,8 +108,8 @@ func loadFromFile(path string, logger *log.Logger) (*App, error) {
}
func (a *App) mergeWith(other *App) {
- a.mergeBasics(other)
- a.mergeProviderFields(other)
+ a.mergeBasics(other)
+ a.mergeProviderFields(other)
}
// mergeBasics merges general (non-provider) fields.
@@ -177,5 +182,87 @@ func getConfigPath() (string, error) {
}
configPath = filepath.Join(home, ".config", "hexai", "config.json")
}
- return configPath, nil
+ return configPath, nil
+}
+
+// --- Environment overrides ---
+
+// loadFromEnv constructs an App containing only fields set via HEXAI_* env vars.
+// These values should take precedence over file config when merged.
+func loadFromEnv(logger *log.Logger) *App {
+ var out App
+ var any bool
+
+ // helpers
+ getenv := func(k string) string { return strings.TrimSpace(os.Getenv(k)) }
+ parseInt := func(k string) (int, bool) {
+ v := getenv(k)
+ if v == "" { return 0, false }
+ n, err := strconv.Atoi(v)
+ if err != nil {
+ if logger != nil { logger.Printf("invalid %s: %v", k, err) }
+ return 0, false
+ }
+ return n, true
+ }
+ parseFloatPtr := func(k string) (*float64, bool) {
+ v := getenv(k)
+ if v == "" { return nil, false }
+ f, err := strconv.ParseFloat(v, 64)
+ if err != nil {
+ if logger != nil { logger.Printf("invalid %s: %v", k, err) }
+ return nil, false
+ }
+ return &f, true
+ }
+
+ if n, ok := parseInt("HEXAI_MAX_TOKENS"); ok {
+ out.MaxTokens = n; any = true
+ }
+ if s := getenv("HEXAI_CONTEXT_MODE"); s != "" {
+ out.ContextMode = s; any = true
+ }
+ if n, ok := parseInt("HEXAI_CONTEXT_WINDOW_LINES"); ok {
+ out.ContextWindowLines = n; any = true
+ }
+ if n, ok := parseInt("HEXAI_MAX_CONTEXT_TOKENS"); ok {
+ out.MaxContextTokens = n; any = true
+ }
+ if n, ok := parseInt("HEXAI_LOG_PREVIEW_LIMIT"); ok {
+ out.LogPreviewLimit = n; any = true
+ }
+ if f, ok := parseFloatPtr("HEXAI_CODING_TEMPERATURE"); ok {
+ out.CodingTemperature = f; any = true
+ }
+ if s := getenv("HEXAI_TRIGGER_CHARACTERS"); s != "" {
+ parts := strings.Split(s, ",")
+ out.TriggerCharacters = nil
+ for _, p := range parts {
+ if t := strings.TrimSpace(p); t != "" {
+ out.TriggerCharacters = append(out.TriggerCharacters, t)
+ }
+ }
+ any = true
+ }
+ if s := getenv("HEXAI_PROVIDER"); s != "" {
+ out.Provider = s; any = true
+ }
+
+ // Provider-specific
+ if s := getenv("HEXAI_OPENAI_BASE_URL"); s != "" { out.OpenAIBaseURL = s; any = true }
+ if s := getenv("HEXAI_OPENAI_MODEL"); s != "" { out.OpenAIModel = s; any = true }
+ if f, ok := parseFloatPtr("HEXAI_OPENAI_TEMPERATURE"); ok { out.OpenAITemperature = f; any = true }
+
+ if s := getenv("HEXAI_OLLAMA_BASE_URL"); s != "" { out.OllamaBaseURL = s; any = true }
+ if s := getenv("HEXAI_OLLAMA_MODEL"); s != "" { out.OllamaModel = s; any = true }
+ if f, ok := parseFloatPtr("HEXAI_OLLAMA_TEMPERATURE"); ok { out.OllamaTemperature = f; any = true }
+
+ if s := getenv("HEXAI_COPILOT_BASE_URL"); s != "" { out.CopilotBaseURL = s; any = true }
+ if s := getenv("HEXAI_COPILOT_MODEL"); s != "" { out.CopilotModel = s; any = true }
+ if f, ok := parseFloatPtr("HEXAI_COPILOT_TEMPERATURE"); ok { out.CopilotTemperature = f; any = true }
+
+ if !any {
+ return nil
+ }
+ return &out
}
diff --git a/internal/hexaicli/run.go b/internal/hexaicli/run.go
index cfc70ec..8cd5c82 100644
--- a/internal/hexaicli/run.go
+++ b/internal/hexaicli/run.go
@@ -83,9 +83,13 @@ func newClientFromConfig(cfg appconfig.App) (llm.Client, error) {
CopilotModel: cfg.CopilotModel,
CopilotTemperature: cfg.CopilotTemperature,
}
- oaKey := os.Getenv("OPENAI_API_KEY")
- cpKey := os.Getenv("COPILOT_API_KEY")
- return llm.NewFromConfig(llmCfg, oaKey, cpKey)
+ // Prefer HEXAI_OPENAI_API_KEY; fall back to OPENAI_API_KEY
+ oaKey := os.Getenv("HEXAI_OPENAI_API_KEY")
+ if strings.TrimSpace(oaKey) == "" {
+ oaKey = os.Getenv("OPENAI_API_KEY")
+ }
+ cpKey := os.Getenv("COPILOT_API_KEY")
+ return llm.NewFromConfig(llmCfg, oaKey, cpKey)
}
// buildMessages creates system and user messages based on input content.
diff --git a/internal/hexailsp/run.go b/internal/hexailsp/run.go
index 8721a60..0d488f0 100644
--- a/internal/hexailsp/run.go
+++ b/internal/hexailsp/run.go
@@ -77,8 +77,12 @@ func buildClientIfNil(cfg appconfig.App, client llm.Client) llm.Client {
CopilotModel: cfg.CopilotModel,
CopilotTemperature: cfg.CopilotTemperature,
}
- oaKey := os.Getenv("OPENAI_API_KEY")
- cpKey := os.Getenv("COPILOT_API_KEY")
+ // Prefer HEXAI_OPENAI_API_KEY; fall back to OPENAI_API_KEY
+ oaKey := os.Getenv("HEXAI_OPENAI_API_KEY")
+ if strings.TrimSpace(oaKey) == "" {
+ oaKey = os.Getenv("OPENAI_API_KEY")
+ }
+ cpKey := os.Getenv("COPILOT_API_KEY")
if c, err := llm.NewFromConfig(llmCfg, oaKey, cpKey); err != nil {
logging.Logf("lsp ", "llm disabled: %v", err)
return nil