// Summary: Hexai LSP entrypoint; parses flags and delegates to internal/hexailsp.
package main
import (
"flag"
"fmt"
"log"
"os"
"strings"
"codeberg.org/snonux/hexai/internal"
"codeberg.org/snonux/hexai/internal/appconfig"
"codeberg.org/snonux/hexai/internal/hexailsp"
)
func main() {
logPath := flag.String("log", "/tmp/hexai-lsp.log", "path to log file (optional)")
defaultCfg := defaultConfigPath()
configPath := flag.String("config", "", fmt.Sprintf("path to config file (default: %s)", defaultCfg))
showVersion := flag.Bool("version", false, "print version and exit")
flag.Parse()
if *showVersion {
log.Println(internal.Version)
return
}
path := strings.TrimSpace(*configPath)
if err := hexailsp.RunWithConfig(*logPath, path, os.Stdin, os.Stdout, os.Stderr); err != nil {
log.Fatalf("server error: %v", err)
}
}
func defaultConfigPath() string {
path, err := appconfig.ConfigPath()
if err != nil {
return "$XDG_CONFIG_HOME/hexai/config.toml"
}
return path
}
package main
import (
"context"
"flag"
"fmt"
"os"
"strings"
"codeberg.org/snonux/hexai/internal/appconfig"
"codeberg.org/snonux/hexai/internal/hexaiaction"
)
func main() {
infile := flag.String("infile", "", "Read input from this file instead of stdin")
outfile := flag.String("outfile", "", "Write output to this file instead of stdout")
uiChild := flag.Bool("ui-child", false, "INTERNAL: run interactive UI and write to -outfile atomically")
defaultPath := defaultConfigPath()
configPath := flag.String("config", "", fmt.Sprintf("path to config file (default: %s)", defaultPath))
tmuxTarget := flag.String("tmux-target", "", "tmux split target (advanced)")
tmuxSplit := flag.String("tmux-split", "v", "tmux split orientation: v or h")
tmuxPercent := flag.Int("tmux-percent", 33, "tmux split size percentage (1-100)")
flag.Parse()
opts := hexaiaction.Options{
Infile: *infile, Outfile: *outfile,
UIChild: *uiChild, TmuxTarget: *tmuxTarget, TmuxSplit: *tmuxSplit, TmuxPercent: *tmuxPercent,
}
ctx := context.Background()
if path := strings.TrimSpace(*configPath); path != "" {
ctx = hexaiaction.WithConfigPath(ctx, path)
}
if err := hexaiaction.RunCommand(ctx, opts, os.Stdin, os.Stdout, os.Stderr); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
func defaultConfigPath() string {
path, err := appconfig.ConfigPath()
if err != nil {
return "$XDG_CONFIG_HOME/hexai/config.toml"
}
return path
}
// Summary: Hexai CLI entrypoint; parses flags and delegates to internal/hexaicli.
package main
import (
"context"
"flag"
"fmt"
"io"
"log"
"os"
"strconv"
"strings"
"codeberg.org/snonux/hexai/internal"
"codeberg.org/snonux/hexai/internal/appconfig"
"codeberg.org/snonux/hexai/internal/hexaicli"
)
func main() {
configPath, remaining := splitConfigPath(os.Args[1:])
logger := log.New(io.Discard, "", 0)
cfg := appconfig.LoadWithOptions(logger, appconfig.LoadOptions{ConfigPath: configPath})
cliEntries := cfg.CLIConfigs
if len(cliEntries) == 0 {
cliEntries = []appconfig.SurfaceConfig{{Provider: cfg.Provider}}
}
fs := flag.NewFlagSet(os.Args[0], flag.ExitOnError)
defaultPath := defaultConfigPath()
configFlag := fs.String("config", configPath, fmt.Sprintf("path to config file (default: %s)", defaultPath))
showVersion := fs.Bool("version", false, "print version and exit")
selectedFlags := make([]bool, len(cliEntries))
for i, entry := range cliEntries {
name := strconv.Itoa(i)
provider := strings.TrimSpace(entry.Provider)
if provider == "" {
provider = cfg.Provider
}
model := strings.TrimSpace(entry.Model)
if model == "" {
model = pickDefaultModel(cfg, provider)
}
desc := fmt.Sprintf("use only provider #%d (%s:%s)", i, provider, model)
fs.BoolVar(&selectedFlags[i], name, false, desc)
}
_ = fs.Parse(remaining)
if *showVersion {
fmt.Fprintln(os.Stdout, internal.Version)
return
}
var selection []int
for i, sel := range selectedFlags {
if sel {
selection = append(selection, i)
}
}
finalPath := strings.TrimSpace(*configFlag)
if finalPath == "" {
finalPath = configPath
}
ctx := context.Background()
if finalPath != "" {
ctx = hexaicli.WithCLIConfigPath(ctx, finalPath)
}
if len(selection) > 0 {
ctx = hexaicli.WithCLISelection(ctx, selection)
}
if err := hexaicli.Run(ctx, fs.Args(), os.Stdin, os.Stdout, os.Stderr); err != nil {
os.Exit(1)
}
}
func splitConfigPath(args []string) (string, []string) {
var path string
rest := make([]string, 0, len(args))
skip := false
for i := 0; i < len(args); i++ {
if skip {
skip = false
continue
}
arg := args[i]
switch {
case arg == "--config" || arg == "-config":
if i+1 < len(args) {
path = args[i+1]
skip = true
}
case strings.HasPrefix(arg, "--config="):
path = arg[len("--config="):]
case strings.HasPrefix(arg, "-config="):
path = arg[len("-config="):]
default:
rest = append(rest, arg)
}
}
return strings.TrimSpace(path), rest
}
func pickDefaultModel(cfg appconfig.App, provider string) string {
switch strings.ToLower(strings.TrimSpace(provider)) {
case "ollama":
return strings.TrimSpace(cfg.OllamaModel)
case "copilot":
return strings.TrimSpace(cfg.CopilotModel)
default:
return strings.TrimSpace(cfg.OpenAIModel)
}
}
func defaultConfigPath() string {
cfgPath, err := appconfig.ConfigPath()
if err != nil {
return "$XDG_CONFIG_HOME/hexai/config.toml"
}
return cfgPath
}
// Summary: Application configuration model and loader; reads ~/.config/hexai/config.toml and merges defaults.
package appconfig
import (
"fmt"
"log"
"os"
"path/filepath"
"slices"
"strconv"
"strings"
"github.com/pelletier/go-toml/v2"
)
// SurfaceConfig describes a provider/model pairing (with optional temperature).
type SurfaceConfig struct {
Provider string
Model string
Temperature *float64
}
// App holds user-configurable settings read from ~/.config/hexai/config.toml.
type App struct {
MaxTokens int `json:"max_tokens" toml:"max_tokens"`
ContextMode string `json:"context_mode" toml:"context_mode"`
ContextWindowLines int `json:"context_window_lines" toml:"context_window_lines"`
MaxContextTokens int `json:"max_context_tokens" toml:"max_context_tokens"`
LogPreviewLimit int `json:"log_preview_limit" toml:"log_preview_limit"`
// Single knob for LSP requests; if set, overrides hardcoded temps in LSP.
CodingTemperature *float64 `json:"coding_temperature" toml:"coding_temperature"`
// Minimum identifier characters required for manual (TriggerKind=1) invoke
// to proceed without structural triggers. 0 means always allow.
ManualInvokeMinPrefix int `json:"manual_invoke_min_prefix" toml:"manual_invoke_min_prefix"`
// Completion debounce in milliseconds. When > 0, the server waits until
// there has been no text change for at least this duration before sending
// an LLM completion request.
CompletionDebounceMs int `json:"completion_debounce_ms" toml:"completion_debounce_ms"`
// Completion throttle in milliseconds. When > 0, caps the minimum spacing
// between LLM requests (both chat and code-completer paths).
CompletionThrottleMs int `json:"completion_throttle_ms" toml:"completion_throttle_ms"`
TriggerCharacters []string `json:"trigger_characters" toml:"trigger_characters"`
Provider string `json:"provider" toml:"provider"`
// Inline prompt trigger characters (default: >!text> and >>!text>)
InlineOpen string `json:"inline_open" toml:"inline_open"`
InlineClose string `json:"inline_close" toml:"inline_close"`
// In-editor chat triggers (default: suffix ">" after one of [?, !, :, ;])
ChatSuffix string `json:"chat_suffix" toml:"chat_suffix"`
ChatPrefixes []string `json:"chat_prefixes" toml:"chat_prefixes"`
// Provider-specific options
OpenAIBaseURL string `json:"openai_base_url" toml:"openai_base_url"`
OpenAIModel string `json:"openai_model" toml:"openai_model"`
// Default temperature for OpenAI requests (nil means use provider default)
OpenAITemperature *float64 `json:"openai_temperature" toml:"openai_temperature"`
OpenRouterBaseURL string `json:"openrouter_base_url" toml:"openrouter_base_url"`
OpenRouterModel string `json:"openrouter_model" toml:"openrouter_model"`
// Default temperature for OpenRouter requests (nil means use provider default)
OpenRouterTemperature *float64 `json:"openrouter_temperature" toml:"openrouter_temperature"`
OllamaBaseURL string `json:"ollama_base_url" toml:"ollama_base_url"`
OllamaModel string `json:"ollama_model" toml:"ollama_model"`
// Default temperature for Ollama requests (nil means use provider default)
OllamaTemperature *float64 `json:"ollama_temperature" toml:"ollama_temperature"`
CopilotBaseURL string `json:"copilot_base_url" toml:"copilot_base_url"`
CopilotModel string `json:"copilot_model" toml:"copilot_model"`
// Default temperature for Copilot requests (nil means use provider default)
CopilotTemperature *float64 `json:"copilot_temperature" toml:"copilot_temperature"`
// Per-surface provider/model configurations (ordered; first entry is primary)
CompletionConfigs []SurfaceConfig `json:"-" toml:"-"`
CodeActionConfigs []SurfaceConfig `json:"-" toml:"-"`
ChatConfigs []SurfaceConfig `json:"-" toml:"-"`
CLIConfigs []SurfaceConfig `json:"-" toml:"-"`
// Prompt templates (configured only via file; no env overrides)
// Completion/chat/code action/CLI prompt strings. See config.toml.example for placeholders.
// Completion
PromptCompletionSystemGeneral string `json:"-" toml:"-"`
PromptCompletionSystemParams string `json:"-" toml:"-"`
PromptCompletionSystemInline string `json:"-" toml:"-"`
PromptCompletionUserGeneral string `json:"-" toml:"-"`
PromptCompletionUserParams string `json:"-" toml:"-"`
PromptCompletionExtraHeader string `json:"-" toml:"-"`
// Provider-native code-completer
PromptNativeCompletion string `json:"-" toml:"-"`
// In-editor chat
PromptChatSystem string `json:"-" toml:"-"`
// Code actions
PromptCodeActionRewriteSystem string `json:"-" toml:"-"`
PromptCodeActionDiagnosticsSystem string `json:"-" toml:"-"`
PromptCodeActionDocumentSystem string `json:"-" toml:"-"`
PromptCodeActionRewriteUser string `json:"-" toml:"-"`
PromptCodeActionDiagnosticsUser string `json:"-" toml:"-"`
PromptCodeActionDocumentUser string `json:"-" toml:"-"`
PromptCodeActionGoTestSystem string `json:"-" toml:"-"`
PromptCodeActionGoTestUser string `json:"-" toml:"-"`
PromptCodeActionSimplifySystem string `json:"-" toml:"-"`
PromptCodeActionSimplifyUser string `json:"-" toml:"-"`
// CLI
PromptCLIDefaultSystem string `json:"-" toml:"-"`
PromptCLIExplainSystem string `json:"-" toml:"-"`
// Custom code actions and tmux integration
CustomActions []CustomAction `json:"-" toml:"-"`
TmuxCustomMenuHotkey string `json:"-" toml:"-"`
// Stats
StatsWindowMinutes int `json:"-" toml:"-"`
}
// CustomAction describes a user-defined code action.
type CustomAction struct {
ID string
Title string
Kind string // optional; default "refactor"
Scope string // "selection" (default) | "diagnostics"
Hotkey string // optional, used by tmux submenu
Instruction string // optional; if set and User is empty, use global rewrite templates
System string // optional; used only when User is set
User string // optional; if set, render with available vars
}
// Constructor: defaults for App (kept first among functions)
func newDefaultConfig() App {
// Coding-friendly default temperature across providers
// Users can override per provider in config.toml (including 0.0).
t := 0.2
return App{
MaxTokens: 4000,
ContextMode: "always-full",
ContextWindowLines: 120,
MaxContextTokens: 4000,
LogPreviewLimit: 100,
CodingTemperature: &t,
OpenAITemperature: &t,
OllamaTemperature: &t,
CopilotTemperature: &t,
ManualInvokeMinPrefix: 0,
CompletionDebounceMs: 800,
CompletionThrottleMs: 0,
// Inline/chat trigger defaults
InlineOpen: ">!",
InlineClose: ">",
ChatSuffix: ">",
ChatPrefixes: []string{"?", "!", ":", ";"},
// Default prompt templates (match current hard-coded strings)
PromptCompletionSystemParams: "You are a code completion engine for function signatures. Return only the parameter list contents (without parentheses), no braces, no prose. Prefer idiomatic names and types.",
PromptCompletionUserParams: "Cursor is inside the function parameter list. Suggest only the parameter list (no parentheses).\nFunction line: {{function}}\nCurrent line (cursor at {{char}}): {{current}}",
PromptCompletionSystemGeneral: "You are a terse code completion engine. Return only the code to insert, no surrounding prose or backticks. Only continue from the cursor; never repeat characters already present to the left of the cursor on the current line (e.g., if 'name :=' is already typed, only return the right-hand side expression).",
PromptCompletionUserGeneral: "Provide the next likely code to insert at the cursor.\nFile: {{file}}\nFunction/context: {{function}}\nAbove line: {{above}}\nCurrent line (cursor at character {{char}}): {{current}}\nBelow line: {{below}}\nOnly return the completion snippet.",
PromptCompletionSystemInline: "You are a precise code completion/refactoring engine. Output only the code to insert with no prose, no comments, and no backticks. Return raw code only.",
PromptCompletionExtraHeader: "Additional context:\n{{context}}",
PromptNativeCompletion: "// Path: {{path}}\n{{before}}",
PromptChatSystem: "You are a helpful coding assistant. Answer concisely and clearly.",
PromptCodeActionRewriteSystem: "You are a precise code refactoring engine. Rewrite the given code strictly according to the instruction. Return only the updated code with no prose or backticks. Preserve formatting where reasonable.",
PromptCodeActionDiagnosticsSystem: "You are a precise code fixer. Resolve the given diagnostics by editing only the selected code. Return only the corrected code with no prose or backticks. Keep behavior and style, and avoid unrelated changes.",
PromptCodeActionDocumentSystem: "You are a precise code documentation engine. Add idiomatic documentation comments to the given code. Preserve exact behavior and formatting as much as possible. Return only the updated code with comments, no prose or backticks.",
PromptCodeActionRewriteUser: "Instruction: {{instruction}}\n\nSelected code to transform:\n{{selection}}",
PromptCodeActionDiagnosticsUser: "Diagnostics to resolve (selection only):\n{{diagnostics}}\n\nSelected code:\n{{selection}}",
PromptCodeActionDocumentUser: "Add documentation comments to this code:\n{{selection}}",
PromptCodeActionGoTestSystem: "You are a precise Go unit test generator. Given a Go function, write one or more Test* functions using the testing package. Do NOT include package or imports, only the test function(s). Prefer table-driven tests. Keep it minimal and idiomatic.",
PromptCodeActionGoTestUser: "Function under test:\n{{function}}",
PromptCodeActionSimplifySystem: "You are a precise code improvement engine. Simplify and improve the given code while preserving behavior. Return only the improved code with no prose or backticks.",
PromptCodeActionSimplifyUser: "Improve this code:\n{{selection}}",
PromptCLIDefaultSystem: "You are Hexai CLI. Default to very short, concise answers. If the user asks for commands, output only the commands (one per line) with no commentary or explanation. Only when the word 'explain' appears in the prompt, produce a verbose explanation.",
PromptCLIExplainSystem: "You are Hexai CLI. The user requested an explanation. Provide a clear, verbose explanation with reasoning and details. If commands are needed, include them with brief context.",
// Stats
StatsWindowMinutes: 60,
}
}
// Load reads configuration from a file and merges with defaults.
// It respects the XDG Base Directory Specification.
func Load(logger *log.Logger) App { return LoadWithOptions(logger, LoadOptions{}) }
// LoadOptions tune how configuration is loaded at runtime.
type LoadOptions struct {
// IgnoreEnv skips applying environment overrides when true.
IgnoreEnv bool
ConfigPath string
}
// LoadWithOptions reads configuration and applies the requested loading options.
func LoadWithOptions(logger *log.Logger, opts LoadOptions) App {
cfg := newDefaultConfig()
if logger == nil {
return cfg // Return defaults if no logger is provided (e.g. in tests)
}
configPath := strings.TrimSpace(opts.ConfigPath)
if configPath != "" {
if fileCfg, err := loadFromFile(configPath, logger); err == nil && fileCfg != nil {
cfg.mergeWith(fileCfg)
} else if err != nil {
logger.Printf("cannot open config file %s: %v", configPath, err)
}
} else {
path, err := getConfigPath()
if err != nil {
logger.Printf("%v", err)
} else if fileCfg, err := loadFromFile(path, logger); err == nil && fileCfg != nil {
cfg.mergeWith(fileCfg)
}
}
if !opts.IgnoreEnv {
// Environment overrides (take precedence over file)
if envCfg := loadFromEnv(logger); envCfg != nil {
cfg.mergeWith(envCfg)
}
}
return cfg
}
// Private helpers
// Sectioned (table-based) file format only.
type fileConfig struct {
// Section tables only (flat keys are not allowed)
General sectionGeneral `toml:"general"`
Logging sectionLogging `toml:"logging"`
Completion sectionCompletion `toml:"completion"`
Triggers sectionTriggers `toml:"triggers"`
Inline sectionInline `toml:"inline"`
Chat sectionChat `toml:"chat"`
Provider sectionProvider `toml:"provider"`
OpenAI sectionOpenAI `toml:"openai"`
OpenRouter sectionOpenRouter `toml:"openrouter"`
Copilot sectionCopilot `toml:"copilot"`
Ollama sectionOllama `toml:"ollama"`
Prompts sectionPrompts `toml:"prompts"`
Tmux sectionTmux `toml:"tmux"`
Stats sectionStats `toml:"stats"`
}
type sectionGeneral struct {
MaxTokens int `toml:"max_tokens"`
ContextMode string `toml:"context_mode"`
ContextWindowLines int `toml:"context_window_lines"`
MaxContextTokens int `toml:"max_context_tokens"`
CodingTemperature *float64 `toml:"coding_temperature"`
}
type sectionLogging struct {
LogPreviewLimit int `toml:"log_preview_limit"`
}
type sectionCompletion struct {
CompletionDebounceMs int `toml:"completion_debounce_ms"`
CompletionThrottleMs int `toml:"completion_throttle_ms"`
ManualInvokeMinPrefix int `toml:"manual_invoke_min_prefix"`
}
type sectionTriggers struct {
TriggerCharacters []string `toml:"trigger_characters"`
}
type sectionInline struct {
InlineOpen string `toml:"inline_open"`
InlineClose string `toml:"inline_close"`
}
type sectionChat struct {
ChatSuffix string `toml:"chat_suffix"`
ChatPrefixes []string `toml:"chat_prefixes"`
}
type sectionProvider struct {
Name string `toml:"name"`
}
type sectionStats struct {
WindowMinutes int `toml:"window_minutes"`
}
type sectionOpenAI struct {
Model string `toml:"model"`
BaseURL string `toml:"base_url"`
Temperature *float64 `toml:"temperature"`
Presets map[string]string `toml:"presets"`
}
func (s sectionOpenAI) isZero() bool {
return strings.TrimSpace(s.Model) == "" && strings.TrimSpace(s.BaseURL) == "" && s.Temperature == nil && len(s.Presets) == 0
}
func (s sectionOpenAI) resolvedModel() string {
model := strings.TrimSpace(s.Model)
if model == "" {
return ""
}
if len(s.Presets) == 0 {
return model
}
if mapped := strings.TrimSpace(s.Presets[model]); mapped != "" {
return mapped
}
lower := strings.ToLower(model)
for k, v := range s.Presets {
if strings.ToLower(strings.TrimSpace(k)) == lower {
if mapped := strings.TrimSpace(v); mapped != "" {
return mapped
}
}
}
return model
}
type sectionOpenRouter struct {
Model string `toml:"model"`
BaseURL string `toml:"base_url"`
Temperature *float64 `toml:"temperature"`
}
type sectionCopilot struct {
Model string `toml:"model"`
BaseURL string `toml:"base_url"`
Temperature *float64 `toml:"temperature"`
}
type sectionOllama struct {
Model string `toml:"model"`
BaseURL string `toml:"base_url"`
Temperature *float64 `toml:"temperature"`
}
// Prompts sections
type sectionPrompts struct {
Completion sectionPromptsCompletion `toml:"completion"`
Chat sectionPromptsChat `toml:"chat"`
CodeAction sectionPromptsCodeAction `toml:"code_action"`
CLI sectionPromptsCLI `toml:"cli"`
ProviderNative sectionPromptsProviderNative `toml:"provider_native"`
}
type sectionPromptsCompletion struct {
SystemGeneral string `toml:"system_general"`
SystemParams string `toml:"system_params"`
SystemInline string `toml:"system_inline"`
UserGeneral string `toml:"user_general"`
UserParams string `toml:"user_params"`
ExtraHeader string `toml:"additional_context"`
}
type sectionPromptsChat struct {
System string `toml:"system"`
}
type sectionPromptsCodeAction struct {
RewriteSystem string `toml:"rewrite_system"`
DiagnosticsSystem string `toml:"diagnostics_system"`
DocumentSystem string `toml:"document_system"`
RewriteUser string `toml:"rewrite_user"`
DiagnosticsUser string `toml:"diagnostics_user"`
DocumentUser string `toml:"document_user"`
GoTestSystem string `toml:"go_test_system"`
GoTestUser string `toml:"go_test_user"`
SimplifySystem string `toml:"simplify_system"`
SimplifyUser string `toml:"simplify_user"`
Custom []sectionCustomAction `toml:"custom"`
}
type sectionPromptsCLI struct {
DefaultSystem string `toml:"default_system"`
ExplainSystem string `toml:"explain_system"`
}
type sectionPromptsProviderNative struct {
Completion string `toml:"completion"`
}
type sectionCustomAction struct {
ID string `toml:"id"`
Title string `toml:"title"`
Kind string `toml:"kind"`
Scope string `toml:"scope"`
Hotkey string `toml:"hotkey"`
Instruction string `toml:"instruction"`
System string `toml:"system"`
User string `toml:"user"`
}
type sectionTmux struct {
CustomMenuHotkey string `toml:"custom_menu_hotkey"`
}
func (fc *fileConfig) toApp() App {
out := App{}
// Merge section: general
if (fc.General != sectionGeneral{}) || fc.General.CodingTemperature != nil {
tmp := App{
MaxTokens: fc.General.MaxTokens,
ContextMode: fc.General.ContextMode,
ContextWindowLines: fc.General.ContextWindowLines,
MaxContextTokens: fc.General.MaxContextTokens,
CodingTemperature: fc.General.CodingTemperature,
}
out.mergeBasics(&tmp)
}
// logging
if (fc.Logging != sectionLogging{}) {
tmp := App{LogPreviewLimit: fc.Logging.LogPreviewLimit}
out.mergeBasics(&tmp)
}
// completion
if (fc.Completion != sectionCompletion{}) {
tmp := App{
CompletionDebounceMs: fc.Completion.CompletionDebounceMs,
CompletionThrottleMs: fc.Completion.CompletionThrottleMs,
ManualInvokeMinPrefix: fc.Completion.ManualInvokeMinPrefix,
}
out.mergeBasics(&tmp)
}
// triggers
if len(fc.Triggers.TriggerCharacters) > 0 {
tmp := App{TriggerCharacters: fc.Triggers.TriggerCharacters}
out.mergeBasics(&tmp)
}
// inline
if (fc.Inline != sectionInline{}) {
tmp := App{InlineOpen: fc.Inline.InlineOpen, InlineClose: fc.Inline.InlineClose}
out.mergeBasics(&tmp)
}
// chat
if strings.TrimSpace(fc.Chat.ChatSuffix) != "" || len(fc.Chat.ChatPrefixes) > 0 {
tmp := App{ChatSuffix: fc.Chat.ChatSuffix, ChatPrefixes: fc.Chat.ChatPrefixes}
out.mergeBasics(&tmp)
}
// provider
if strings.TrimSpace(fc.Provider.Name) != "" {
tmp := App{Provider: fc.Provider.Name}
out.mergeBasics(&tmp)
}
// openai
if !fc.OpenAI.isZero() || fc.OpenAI.Temperature != nil {
tmp := App{
OpenAIBaseURL: fc.OpenAI.BaseURL,
OpenAIModel: fc.OpenAI.resolvedModel(),
OpenAITemperature: fc.OpenAI.Temperature,
}
out.mergeProviderFields(&tmp)
}
// openrouter
if (fc.OpenRouter != sectionOpenRouter{}) || fc.OpenRouter.Temperature != nil {
tmp := App{
OpenRouterBaseURL: fc.OpenRouter.BaseURL,
OpenRouterModel: fc.OpenRouter.Model,
OpenRouterTemperature: fc.OpenRouter.Temperature,
}
out.mergeProviderFields(&tmp)
}
// copilot
if (fc.Copilot != sectionCopilot{}) || fc.Copilot.Temperature != nil {
tmp := App{
CopilotBaseURL: fc.Copilot.BaseURL,
CopilotModel: fc.Copilot.Model,
CopilotTemperature: fc.Copilot.Temperature,
}
out.mergeProviderFields(&tmp)
}
// ollama
if (fc.Ollama != sectionOllama{}) || fc.Ollama.Temperature != nil {
tmp := App{
OllamaBaseURL: fc.Ollama.BaseURL,
OllamaModel: fc.Ollama.Model,
OllamaTemperature: fc.Ollama.Temperature,
}
out.mergeProviderFields(&tmp)
}
// prompts
// completion
if (fc.Prompts.Completion != sectionPromptsCompletion{}) {
if strings.TrimSpace(fc.Prompts.Completion.SystemGeneral) != "" {
out.PromptCompletionSystemGeneral = fc.Prompts.Completion.SystemGeneral
}
if strings.TrimSpace(fc.Prompts.Completion.SystemParams) != "" {
out.PromptCompletionSystemParams = fc.Prompts.Completion.SystemParams
}
if strings.TrimSpace(fc.Prompts.Completion.SystemInline) != "" {
out.PromptCompletionSystemInline = fc.Prompts.Completion.SystemInline
}
if strings.TrimSpace(fc.Prompts.Completion.UserGeneral) != "" {
out.PromptCompletionUserGeneral = fc.Prompts.Completion.UserGeneral
}
if strings.TrimSpace(fc.Prompts.Completion.UserParams) != "" {
out.PromptCompletionUserParams = fc.Prompts.Completion.UserParams
}
if strings.TrimSpace(fc.Prompts.Completion.ExtraHeader) != "" {
out.PromptCompletionExtraHeader = fc.Prompts.Completion.ExtraHeader
}
}
// chat
if strings.TrimSpace(fc.Prompts.Chat.System) != "" {
out.PromptChatSystem = fc.Prompts.Chat.System
}
// code action
if strings.TrimSpace(fc.Prompts.CodeAction.RewriteSystem) != "" ||
strings.TrimSpace(fc.Prompts.CodeAction.DiagnosticsSystem) != "" ||
strings.TrimSpace(fc.Prompts.CodeAction.DocumentSystem) != "" ||
strings.TrimSpace(fc.Prompts.CodeAction.RewriteUser) != "" ||
strings.TrimSpace(fc.Prompts.CodeAction.DiagnosticsUser) != "" ||
strings.TrimSpace(fc.Prompts.CodeAction.DocumentUser) != "" ||
strings.TrimSpace(fc.Prompts.CodeAction.GoTestSystem) != "" ||
strings.TrimSpace(fc.Prompts.CodeAction.GoTestUser) != "" ||
strings.TrimSpace(fc.Prompts.CodeAction.SimplifySystem) != "" ||
strings.TrimSpace(fc.Prompts.CodeAction.SimplifyUser) != "" ||
len(fc.Prompts.CodeAction.Custom) > 0 {
if strings.TrimSpace(fc.Prompts.CodeAction.RewriteSystem) != "" {
out.PromptCodeActionRewriteSystem = fc.Prompts.CodeAction.RewriteSystem
}
if strings.TrimSpace(fc.Prompts.CodeAction.DiagnosticsSystem) != "" {
out.PromptCodeActionDiagnosticsSystem = fc.Prompts.CodeAction.DiagnosticsSystem
}
if strings.TrimSpace(fc.Prompts.CodeAction.DocumentSystem) != "" {
out.PromptCodeActionDocumentSystem = fc.Prompts.CodeAction.DocumentSystem
}
if strings.TrimSpace(fc.Prompts.CodeAction.RewriteUser) != "" {
out.PromptCodeActionRewriteUser = fc.Prompts.CodeAction.RewriteUser
}
if strings.TrimSpace(fc.Prompts.CodeAction.DiagnosticsUser) != "" {
out.PromptCodeActionDiagnosticsUser = fc.Prompts.CodeAction.DiagnosticsUser
}
if strings.TrimSpace(fc.Prompts.CodeAction.DocumentUser) != "" {
out.PromptCodeActionDocumentUser = fc.Prompts.CodeAction.DocumentUser
}
if strings.TrimSpace(fc.Prompts.CodeAction.GoTestSystem) != "" {
out.PromptCodeActionGoTestSystem = fc.Prompts.CodeAction.GoTestSystem
}
if strings.TrimSpace(fc.Prompts.CodeAction.GoTestUser) != "" {
out.PromptCodeActionGoTestUser = fc.Prompts.CodeAction.GoTestUser
}
if strings.TrimSpace(fc.Prompts.CodeAction.SimplifySystem) != "" {
out.PromptCodeActionSimplifySystem = fc.Prompts.CodeAction.SimplifySystem
}
if strings.TrimSpace(fc.Prompts.CodeAction.SimplifyUser) != "" {
out.PromptCodeActionSimplifyUser = fc.Prompts.CodeAction.SimplifyUser
}
if len(fc.Prompts.CodeAction.Custom) > 0 {
for _, ca := range fc.Prompts.CodeAction.Custom {
out.CustomActions = append(out.CustomActions, CustomAction{
ID: strings.TrimSpace(ca.ID),
Title: strings.TrimSpace(ca.Title),
Kind: strings.TrimSpace(ca.Kind),
Scope: strings.ToLower(strings.TrimSpace(ca.Scope)),
Hotkey: strings.TrimSpace(ca.Hotkey),
Instruction: ca.Instruction,
System: ca.System,
User: ca.User,
})
}
}
}
// cli
if (fc.Prompts.CLI != sectionPromptsCLI{}) {
if strings.TrimSpace(fc.Prompts.CLI.DefaultSystem) != "" {
out.PromptCLIDefaultSystem = fc.Prompts.CLI.DefaultSystem
}
if strings.TrimSpace(fc.Prompts.CLI.ExplainSystem) != "" {
out.PromptCLIExplainSystem = fc.Prompts.CLI.ExplainSystem
}
}
// provider-native
if strings.TrimSpace(fc.Prompts.ProviderNative.Completion) != "" {
out.PromptNativeCompletion = fc.Prompts.ProviderNative.Completion
}
// tmux
if (fc.Tmux != sectionTmux{}) {
out.TmuxCustomMenuHotkey = strings.TrimSpace(fc.Tmux.CustomMenuHotkey)
}
// stats
if fc.Stats.WindowMinutes > 0 {
out.StatsWindowMinutes = fc.Stats.WindowMinutes
}
return out
}
func loadFromFile(path string, logger *log.Logger) (*App, error) {
b, err := os.ReadFile(path)
if err != nil {
if !os.IsNotExist(err) && logger != nil {
logger.Printf("cannot open TOML config file %s: %v", path, err)
}
return nil, err
}
var tables fileConfig
errTables := toml.NewDecoder(strings.NewReader(string(b))).Decode(&tables)
// Raw map for validation/presence checks
var raw map[string]any
_ = toml.Unmarshal(b, &raw)
if errTables != nil {
if logger != nil {
logger.Printf("invalid TOML config file %s: %v", path, errTables)
}
return nil, errTables
}
// Reject legacy flat keys at top-level (sectioned-only config is allowed)
legacy := map[string]struct{}{
"max_tokens": {}, "context_mode": {}, "context_window_lines": {}, "max_context_tokens": {},
"log_preview_limit": {}, "completion_debounce_ms": {}, "completion_throttle_ms": {},
"manual_invoke_min_prefix": {}, "trigger_characters": {}, "inline_open": {}, "inline_close": {},
"chat_suffix": {}, "chat_prefixes": {}, "coding_temperature": {}, "provider": {},
"openai_model": {}, "openai_base_url": {}, "openai_temperature": {},
"ollama_model": {}, "ollama_base_url": {}, "ollama_temperature": {},
"copilot_model": {}, "copilot_base_url": {}, "copilot_temperature": {},
}
for k := range raw {
if _, isTable := map[string]struct{}{"general": {}, "logging": {}, "completion": {}, "triggers": {}, "inline": {}, "chat": {}, "provider": {}, "models": {}, "openai": {}, "copilot": {}, "ollama": {}, "prompts": {}}[k]; isTable {
continue
}
if _, isLegacy := legacy[k]; isLegacy {
return nil, fmt.Errorf("unsupported flat key '%s' in config; use sectioned tables (see config.toml.example)", k)
}
}
if logger != nil {
logger.Printf("loaded configuration from %s (TOML)", path)
}
// Merge order: flat first, then tables (so tables win over zero flat values)
// Build App from tables only
tab := tables.toApp()
// Ensure explicit values from raw map are respected (defensive for ints)
if t, ok := raw["completion"].(map[string]any); ok {
if v, present := t["manual_invoke_min_prefix"]; present {
switch vv := v.(type) {
case int64:
tab.ManualInvokeMinPrefix = int(vv)
case int:
tab.ManualInvokeMinPrefix = vv
case float64:
tab.ManualInvokeMinPrefix = int(vv)
}
}
}
if t, ok := raw["logging"].(map[string]any); ok {
if v, present := t["log_preview_limit"]; present {
switch vv := v.(type) {
case int64:
tab.LogPreviewLimit = int(vv)
case int:
tab.LogPreviewLimit = vv
case float64:
tab.LogPreviewLimit = int(vv)
}
}
}
if m := parseSurfaceModels(raw, logger); m != nil {
tab.mergeSurfaceModels(m)
}
return &tab, nil
}
func parseSurfaceModels(raw map[string]any, logger *log.Logger) *App {
modelsRaw, ok := raw["models"]
if !ok {
return nil
}
table, ok := modelsRaw.(map[string]any)
if !ok {
if logger != nil {
logger.Printf("config: ignoring models section (expected table, got %T)", modelsRaw)
}
return nil
}
var out App
appendEntries := func(dest *[]SurfaceConfig, key string, val any) bool {
entries, ok := parseSurfaceEntries(val, key, logger)
if !ok || len(entries) == 0 {
return false
}
*dest = append(*dest, entries...)
return true
}
any := appendEntries(&out.CompletionConfigs, "models.completion", table["completion"])
if ok := appendEntries(&out.CodeActionConfigs, "models.code_action", table["code_action"]); ok {
if len(out.CodeActionConfigs) > 1 {
if logger != nil {
logger.Printf("config: models.code_action supports a single entry; ignoring %d extra", len(out.CodeActionConfigs)-1)
}
out.CodeActionConfigs = out.CodeActionConfigs[:1]
}
any = true
}
any = appendEntries(&out.ChatConfigs, "models.chat", table["chat"]) || any
any = appendEntries(&out.CLIConfigs, "models.cli", table["cli"]) || any
if !any {
return nil
}
return &out
}
func parseSurfaceEntries(raw any, path string, logger *log.Logger) ([]SurfaceConfig, bool) {
switch v := raw.(type) {
case nil:
return nil, false
case []any:
var out []SurfaceConfig
for i, entry := range v {
cfg, ok := decodeModelEntry(entry, fmt.Sprintf("%s[%d]", path, i), logger)
if !ok || cfg == nil {
continue
}
out = append(out, *cfg)
}
return out, len(out) > 0
default:
if cfg, ok := decodeModelEntry(v, path, logger); ok && cfg != nil {
return []SurfaceConfig{*cfg}, true
}
return nil, false
}
}
func cloneSurfaceConfigs(src []SurfaceConfig) []SurfaceConfig {
if len(src) == 0 {
return nil
}
out := make([]SurfaceConfig, len(src))
copy(out, src)
return out
}
func decodeModelEntry(raw any, path string, logger *log.Logger) (*SurfaceConfig, bool) {
if raw == nil {
return nil, false
}
switch v := raw.(type) {
case string:
model := strings.TrimSpace(v)
if model == "" {
return nil, false
}
return &SurfaceConfig{Model: model}, true
case map[string]any:
model := ""
provider := ""
if m, ok := v["model"]; ok {
s, ok := m.(string)
if !ok {
if logger != nil {
logger.Printf("config: %s.model must be a string", path)
}
return nil, false
}
model = strings.TrimSpace(s)
}
if pRaw, ok := v["provider"]; ok {
ps, ok := pRaw.(string)
if !ok {
if logger != nil {
logger.Printf("config: %s.provider must be a string", path)
}
return nil, false
}
provider = strings.TrimSpace(ps)
}
var tempPtr *float64
if tRaw, ok := v["temperature"]; ok {
parsed, ok := parseTemperatureValue(tRaw, path, logger)
if !ok {
return nil, false
}
tempPtr = parsed
}
if model == "" && tempPtr == nil && provider == "" {
return nil, false
}
return &SurfaceConfig{Provider: provider, Model: model, Temperature: tempPtr}, true
default:
if logger != nil {
logger.Printf("config: %s must be a string or table, got %T", path, raw)
}
return nil, false
}
}
func parseTemperatureValue(raw any, path string, logger *log.Logger) (*float64, bool) {
switch v := raw.(type) {
case float64:
return floatPtr(v), true
case int64:
return floatPtr(float64(v)), true
case string:
s := strings.TrimSpace(v)
if s == "" {
return nil, true
}
f, err := strconv.ParseFloat(s, 64)
if err != nil {
if logger != nil {
logger.Printf("config: %s.temperature invalid: %v", path, err)
}
return nil, false
}
return floatPtr(f), true
default:
if logger != nil {
logger.Printf("config: %s.temperature must be numeric or string, got %T", path, raw)
}
return nil, false
}
}
func floatPtr(v float64) *float64 {
f := v
return &f
}
func (a *App) mergeWith(other *App) {
a.mergeBasics(other)
a.mergeProviderFields(other)
a.mergeSurfaceModels(other)
a.mergePrompts(other)
}
// mergeBasics merges general (non-provider) fields.
func (a *App) mergeBasics(other *App) {
if other.MaxTokens > 0 {
a.MaxTokens = other.MaxTokens
}
if s := strings.TrimSpace(other.ContextMode); s != "" {
a.ContextMode = s
}
if other.ContextWindowLines > 0 {
a.ContextWindowLines = other.ContextWindowLines
}
if other.MaxContextTokens > 0 {
a.MaxContextTokens = other.MaxContextTokens
}
if other.LogPreviewLimit >= 0 {
a.LogPreviewLimit = other.LogPreviewLimit
}
if other.CodingTemperature != nil { // allow explicit 0.0
a.CodingTemperature = other.CodingTemperature
}
if other.ManualInvokeMinPrefix >= 0 {
a.ManualInvokeMinPrefix = other.ManualInvokeMinPrefix
}
if other.CompletionDebounceMs > 0 {
a.CompletionDebounceMs = other.CompletionDebounceMs
}
if other.CompletionThrottleMs > 0 {
a.CompletionThrottleMs = other.CompletionThrottleMs
}
if len(other.TriggerCharacters) > 0 {
a.TriggerCharacters = slices.Clone(other.TriggerCharacters)
}
if s := strings.TrimSpace(other.InlineOpen); s != "" {
a.InlineOpen = s
}
if s := strings.TrimSpace(other.InlineClose); s != "" {
a.InlineClose = s
}
if s := strings.TrimSpace(other.ChatSuffix); s != "" {
a.ChatSuffix = s
}
if len(other.ChatPrefixes) > 0 {
a.ChatPrefixes = slices.Clone(other.ChatPrefixes)
}
if s := strings.TrimSpace(other.Provider); s != "" {
a.Provider = s
}
}
// mergeSurfaceModels copies per-surface model and temperature overrides.
func (a *App) mergeSurfaceModels(other *App) {
if len(other.CompletionConfigs) > 0 {
a.CompletionConfigs = cloneSurfaceConfigs(other.CompletionConfigs)
}
if len(other.CodeActionConfigs) > 0 {
a.CodeActionConfigs = cloneSurfaceConfigs(other.CodeActionConfigs)
}
if len(other.ChatConfigs) > 0 {
a.ChatConfigs = cloneSurfaceConfigs(other.ChatConfigs)
}
if len(other.CLIConfigs) > 0 {
a.CLIConfigs = cloneSurfaceConfigs(other.CLIConfigs)
}
}
// mergePrompts copies non-empty prompt templates from other.
func (a *App) mergePrompts(other *App) {
// Completion
if strings.TrimSpace(other.PromptCompletionSystemGeneral) != "" {
a.PromptCompletionSystemGeneral = other.PromptCompletionSystemGeneral
}
if strings.TrimSpace(other.PromptCompletionSystemParams) != "" {
a.PromptCompletionSystemParams = other.PromptCompletionSystemParams
}
if strings.TrimSpace(other.PromptCompletionSystemInline) != "" {
a.PromptCompletionSystemInline = other.PromptCompletionSystemInline
}
if strings.TrimSpace(other.PromptCompletionUserGeneral) != "" {
a.PromptCompletionUserGeneral = other.PromptCompletionUserGeneral
}
if strings.TrimSpace(other.PromptCompletionUserParams) != "" {
a.PromptCompletionUserParams = other.PromptCompletionUserParams
}
if strings.TrimSpace(other.PromptCompletionExtraHeader) != "" {
a.PromptCompletionExtraHeader = other.PromptCompletionExtraHeader
}
// Provider-native
if strings.TrimSpace(other.PromptNativeCompletion) != "" {
a.PromptNativeCompletion = other.PromptNativeCompletion
}
// Chat
if strings.TrimSpace(other.PromptChatSystem) != "" {
a.PromptChatSystem = other.PromptChatSystem
}
// Code actions
if strings.TrimSpace(other.PromptCodeActionRewriteSystem) != "" {
a.PromptCodeActionRewriteSystem = other.PromptCodeActionRewriteSystem
}
if strings.TrimSpace(other.PromptCodeActionDiagnosticsSystem) != "" {
a.PromptCodeActionDiagnosticsSystem = other.PromptCodeActionDiagnosticsSystem
}
if strings.TrimSpace(other.PromptCodeActionDocumentSystem) != "" {
a.PromptCodeActionDocumentSystem = other.PromptCodeActionDocumentSystem
}
if strings.TrimSpace(other.PromptCodeActionRewriteUser) != "" {
a.PromptCodeActionRewriteUser = other.PromptCodeActionRewriteUser
}
if strings.TrimSpace(other.PromptCodeActionDiagnosticsUser) != "" {
a.PromptCodeActionDiagnosticsUser = other.PromptCodeActionDiagnosticsUser
}
if strings.TrimSpace(other.PromptCodeActionDocumentUser) != "" {
a.PromptCodeActionDocumentUser = other.PromptCodeActionDocumentUser
}
if strings.TrimSpace(other.PromptCodeActionGoTestSystem) != "" {
a.PromptCodeActionGoTestSystem = other.PromptCodeActionGoTestSystem
}
if strings.TrimSpace(other.PromptCodeActionGoTestUser) != "" {
a.PromptCodeActionGoTestUser = other.PromptCodeActionGoTestUser
}
if strings.TrimSpace(other.PromptCodeActionSimplifySystem) != "" {
a.PromptCodeActionSimplifySystem = other.PromptCodeActionSimplifySystem
}
if strings.TrimSpace(other.PromptCodeActionSimplifyUser) != "" {
a.PromptCodeActionSimplifyUser = other.PromptCodeActionSimplifyUser
}
// CLI
if strings.TrimSpace(other.PromptCLIDefaultSystem) != "" {
a.PromptCLIDefaultSystem = other.PromptCLIDefaultSystem
}
if strings.TrimSpace(other.PromptCLIExplainSystem) != "" {
a.PromptCLIExplainSystem = other.PromptCLIExplainSystem
}
// Custom actions
if len(other.CustomActions) > 0 {
a.CustomActions = append([]CustomAction{}, other.CustomActions...)
}
if strings.TrimSpace(other.TmuxCustomMenuHotkey) != "" {
a.TmuxCustomMenuHotkey = other.TmuxCustomMenuHotkey
}
}
// Validate checks custom actions and tmux settings for duplicates and consistency.
func (a App) Validate() error {
// Normalize and check duplicates for IDs and hotkeys
seenID := make(map[string]struct{})
seenHK := make(map[string]struct{})
for _, ca := range a.CustomActions {
id := strings.ToLower(strings.TrimSpace(ca.ID))
if id == "" {
return fmt.Errorf("config: custom action missing required field id")
}
if _, ok := seenID[id]; ok {
return fmt.Errorf("config: duplicate custom action id: %s", ca.ID)
}
seenID[id] = struct{}{}
if strings.TrimSpace(ca.Title) == "" {
return fmt.Errorf("config: custom action %s missing required field title", ca.ID)
}
// Validate scope
scope := strings.TrimSpace(ca.Scope)
if scope != "" && scope != "selection" && scope != "diagnostics" {
return fmt.Errorf("config: custom action %s has invalid scope: %s", ca.ID, ca.Scope)
}
// Instruction vs user
hasInstr := strings.TrimSpace(ca.Instruction) != ""
hasUser := strings.TrimSpace(ca.User) != ""
if hasInstr && hasUser {
return fmt.Errorf("config: custom action %s must set either instruction or user, not both", ca.ID)
}
if !hasInstr && !hasUser {
return fmt.Errorf("config: custom action %s requires instruction or user", ca.ID)
}
// Hotkey unique (case-insensitive), one rune if provided
if hk := strings.TrimSpace(ca.Hotkey); hk != "" {
if []rune(hk) == nil || len([]rune(hk)) != 1 {
return fmt.Errorf("config: custom action %s hotkey must be a single character", ca.ID)
}
lhk := strings.ToLower(hk)
if _, ok := seenHK[lhk]; ok {
return fmt.Errorf("config: duplicate custom action hotkey: %s", hk)
}
seenHK[lhk] = struct{}{}
}
}
// Tmux custom menu hotkey validation
if hk := strings.TrimSpace(a.TmuxCustomMenuHotkey); hk != "" {
if len([]rune(hk)) != 1 {
return fmt.Errorf("config: invalid tmux.custom_menu_hotkey: %s", hk)
}
// built-in hotkeys in tmux TUI: r,i,c,t,p,s
switch strings.ToLower(hk) {
case "r", "i", "c", "t", "p", "s":
return fmt.Errorf("config: invalid tmux.custom_menu_hotkey: %s (clashes with built-in)", hk)
}
}
return nil
}
// mergeProviderFields merges per-provider configuration.
func (a *App) mergeProviderFields(other *App) {
if s := strings.TrimSpace(other.OpenAIBaseURL); s != "" {
a.OpenAIBaseURL = s
}
if s := strings.TrimSpace(other.OpenAIModel); s != "" {
a.OpenAIModel = s
}
if other.OpenAITemperature != nil { // allow explicit 0.0
a.OpenAITemperature = other.OpenAITemperature
}
if s := strings.TrimSpace(other.OpenRouterBaseURL); s != "" {
a.OpenRouterBaseURL = s
}
if s := strings.TrimSpace(other.OpenRouterModel); s != "" {
a.OpenRouterModel = s
}
if other.OpenRouterTemperature != nil { // allow explicit 0.0
a.OpenRouterTemperature = other.OpenRouterTemperature
}
if s := strings.TrimSpace(other.OllamaBaseURL); s != "" {
a.OllamaBaseURL = s
}
if s := strings.TrimSpace(other.OllamaModel); s != "" {
a.OllamaModel = s
}
if other.OllamaTemperature != nil { // allow explicit 0.0
a.OllamaTemperature = other.OllamaTemperature
}
if s := strings.TrimSpace(other.CopilotBaseURL); s != "" {
a.CopilotBaseURL = s
}
if s := strings.TrimSpace(other.CopilotModel); s != "" {
a.CopilotModel = s
}
if other.CopilotTemperature != nil { // allow explicit 0.0
a.CopilotTemperature = other.CopilotTemperature
}
}
func getConfigPath() (string, error) {
return ConfigPath()
}
// ConfigPath returns the default config file path ($XDG_CONFIG_HOME/hexai/config.toml or ~/.config/hexai/config.toml).
func ConfigPath() (string, error) {
var configPath string
if xdgConfigHome := os.Getenv("XDG_CONFIG_HOME"); xdgConfigHome != "" {
configPath = filepath.Join(xdgConfigHome, "hexai", "config.toml")
} else {
home, err := os.UserHomeDir()
if err != nil {
return "", fmt.Errorf("cannot find user home directory: %v", err)
}
configPath = filepath.Join(home, ".config", "hexai", "config.toml")
}
return configPath, nil
}
// --- Environment overrides ---
// loadFromEnv constructs an App containing only fields set via HEXAI_* env vars.
// These values should take precedence over file config when merged.
func loadFromEnv(logger *log.Logger) *App {
var out App
var any bool
// helpers
getenv := func(k string) string { return strings.TrimSpace(os.Getenv(k)) }
parseInt := func(k string) (int, bool) {
v := getenv(k)
if v == "" {
return 0, false
}
n, err := strconv.Atoi(v)
if err != nil {
if logger != nil {
logger.Printf("invalid %s: %v", k, err)
}
return 0, false
}
return n, true
}
parseFloatPtr := func(k string) (*float64, bool) {
v := getenv(k)
if v == "" {
return nil, false
}
f, err := strconv.ParseFloat(v, 64)
if err != nil {
if logger != nil {
logger.Printf("invalid %s: %v", k, err)
}
return nil, false
}
return &f, true
}
if n, ok := parseInt("HEXAI_MAX_TOKENS"); ok {
out.MaxTokens = n
any = true
}
if s := getenv("HEXAI_CONTEXT_MODE"); s != "" {
out.ContextMode = s
any = true
}
if n, ok := parseInt("HEXAI_CONTEXT_WINDOW_LINES"); ok {
out.ContextWindowLines = n
any = true
}
if n, ok := parseInt("HEXAI_MAX_CONTEXT_TOKENS"); ok {
out.MaxContextTokens = n
any = true
}
if n, ok := parseInt("HEXAI_LOG_PREVIEW_LIMIT"); ok {
out.LogPreviewLimit = n
any = true
}
if n, ok := parseInt("HEXAI_MANUAL_INVOKE_MIN_PREFIX"); ok {
out.ManualInvokeMinPrefix = n
any = true
}
if n, ok := parseInt("HEXAI_COMPLETION_DEBOUNCE_MS"); ok {
out.CompletionDebounceMs = n
any = true
}
if n, ok := parseInt("HEXAI_COMPLETION_THROTTLE_MS"); ok {
out.CompletionThrottleMs = n
any = true
}
if f, ok := parseFloatPtr("HEXAI_CODING_TEMPERATURE"); ok {
out.CodingTemperature = f
any = true
}
if s := getenv("HEXAI_TRIGGER_CHARACTERS"); s != "" {
parts := strings.Split(s, ",")
out.TriggerCharacters = nil
for _, p := range parts {
if t := strings.TrimSpace(p); t != "" {
out.TriggerCharacters = append(out.TriggerCharacters, t)
}
}
any = true
}
if s := getenv("HEXAI_INLINE_OPEN"); s != "" {
out.InlineOpen = s
any = true
}
if s := getenv("HEXAI_INLINE_CLOSE"); s != "" {
out.InlineClose = s
any = true
}
if s := getenv("HEXAI_CHAT_SUFFIX"); s != "" {
out.ChatSuffix = s
any = true
}
if s := getenv("HEXAI_CHAT_PREFIXES"); s != "" {
parts := strings.Split(s, ",")
out.ChatPrefixes = nil
for _, p := range parts {
if t := strings.TrimSpace(p); t != "" {
out.ChatPrefixes = append(out.ChatPrefixes, t)
}
}
any = true
}
if s := getenv("HEXAI_PROVIDER"); s != "" {
out.Provider = s
any = true
}
modelForce := strings.TrimSpace(getenv("HEXAI_MODEL_FORCE"))
modelGeneric := strings.TrimSpace(getenv("HEXAI_MODEL"))
providerLower := strings.ToLower(strings.TrimSpace(out.Provider))
forceUsed := false
genericUsed := false
pickModel := func(providerName, specific string) (string, bool) {
specific = strings.TrimSpace(specific)
nameLower := strings.ToLower(strings.TrimSpace(providerName))
if modelForce != "" {
if providerLower == nameLower {
forceUsed = true
return modelForce, true
}
if providerLower == "" && !forceUsed {
forceUsed = true
return modelForce, true
}
}
if specific != "" {
return specific, true
}
if modelGeneric != "" {
if providerLower == nameLower {
return modelGeneric, true
}
if providerLower == "" && !genericUsed {
genericUsed = true
return modelGeneric, true
}
}
return "", false
}
// Provider-specific
if s := getenv("HEXAI_OPENAI_BASE_URL"); s != "" {
out.OpenAIBaseURL = s
any = true
}
if model, ok := pickModel("openai", getenv("HEXAI_OPENAI_MODEL")); ok {
out.OpenAIModel = model
any = true
}
if f, ok := parseFloatPtr("HEXAI_OPENAI_TEMPERATURE"); ok {
out.OpenAITemperature = f
any = true
}
if s := getenv("HEXAI_OPENROUTER_BASE_URL"); s != "" {
out.OpenRouterBaseURL = s
any = true
}
if model, ok := pickModel("openrouter", getenv("HEXAI_OPENROUTER_MODEL")); ok {
out.OpenRouterModel = model
any = true
}
if f, ok := parseFloatPtr("HEXAI_OPENROUTER_TEMPERATURE"); ok {
out.OpenRouterTemperature = f
any = true
}
if s := getenv("HEXAI_OLLAMA_BASE_URL"); s != "" {
out.OllamaBaseURL = s
any = true
}
if model, ok := pickModel("ollama", getenv("HEXAI_OLLAMA_MODEL")); ok {
out.OllamaModel = model
any = true
}
if f, ok := parseFloatPtr("HEXAI_OLLAMA_TEMPERATURE"); ok {
out.OllamaTemperature = f
any = true
}
if s := getenv("HEXAI_COPILOT_BASE_URL"); s != "" {
out.CopilotBaseURL = s
any = true
}
if model, ok := pickModel("copilot", getenv("HEXAI_COPILOT_MODEL")); ok {
out.CopilotModel = model
any = true
}
if f, ok := parseFloatPtr("HEXAI_COPILOT_TEMPERATURE"); ok {
out.CopilotTemperature = f
any = true
}
// Per-surface overrides
buildEntry := func(modelKey, tempKey, providerKey string) ([]SurfaceConfig, bool) {
model := getenv(modelKey)
tempPtr, tempSet := parseFloatPtr(tempKey)
provider := getenv(providerKey)
if model == "" && provider == "" && !tempSet {
return nil, false
}
entry := SurfaceConfig{Provider: provider, Model: model}
if tempSet {
entry.Temperature = tempPtr
}
return []SurfaceConfig{entry}, true
}
if entries, ok := buildEntry("HEXAI_MODEL_COMPLETION", "HEXAI_TEMPERATURE_COMPLETION", "HEXAI_PROVIDER_COMPLETION"); ok {
out.CompletionConfigs = entries
any = true
}
if entries, ok := buildEntry("HEXAI_MODEL_CODE_ACTION", "HEXAI_TEMPERATURE_CODE_ACTION", "HEXAI_PROVIDER_CODE_ACTION"); ok {
out.CodeActionConfigs = entries
any = true
}
if entries, ok := buildEntry("HEXAI_MODEL_CHAT", "HEXAI_TEMPERATURE_CHAT", "HEXAI_PROVIDER_CHAT"); ok {
out.ChatConfigs = entries
any = true
}
if entries, ok := buildEntry("HEXAI_MODEL_CLI", "HEXAI_TEMPERATURE_CLI", "HEXAI_PROVIDER_CLI"); ok {
out.CLIConfigs = entries
any = true
}
if !any {
return nil
}
return &out
}
package editor
import (
"errors"
"os"
"os/exec"
"path/filepath"
"strings"
)
// Resolve returns the editor command from HEXAI_EDITOR or EDITOR.
func Resolve() (string, error) {
ed := strings.TrimSpace(os.Getenv("HEXAI_EDITOR"))
if ed == "" {
ed = strings.TrimSpace(os.Getenv("EDITOR"))
}
if ed == "" {
return "", errors.New("no editor configured (set HEXAI_EDITOR or EDITOR)")
}
return ed, nil
}
// RunEditor is the seam that invokes the editor on the given file path.
// Override in tests to avoid launching a real editor.
var RunEditor = func(editor, path string) error {
cmd := exec.Command(editor, path)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
return cmd.Run()
}
// OpenTempAndEdit creates a temporary .md file, writes initial content if provided,
// opens it in the resolved editor, then reads the final content and removes the file.
// Returns the trimmed content.
func OpenTempAndEdit(initial []byte) (string, error) {
ed, err := Resolve()
if err != nil {
return "", err
}
// Create temp file under system temp dir; ensure .md suffix
dir := os.TempDir()
f, err := os.CreateTemp(dir, "hexai-*.md")
if err != nil {
return "", err
}
path := f.Name()
defer func() { _ = os.Remove(path) }()
if len(initial) > 0 {
if _, err := f.Write(initial); err != nil {
_ = f.Close()
return "", err
}
}
if err := f.Sync(); err != nil {
_ = f.Close()
return "", err
}
if err := f.Close(); err != nil {
return "", err
}
if err := RunEditor(ed, path); err != nil {
return "", err
}
b, err := os.ReadFile(filepath.Clean(path))
if err != nil {
return "", err
}
return strings.TrimSpace(string(b)), nil
}
package hexaiaction
import (
"context"
"fmt"
"io"
"os"
"path/filepath"
"time"
"codeberg.org/snonux/hexai/internal/tmux"
"golang.org/x/term"
)
// Options configures the command-line orchestration for hexai-tmux-action.
type Options struct {
Infile string
Outfile string
UIChild bool
TmuxTarget string
TmuxSplit string // "v" or "h"
TmuxPercent int // 1-100
}
// RunCommand is the CLI orchestrator used by cmd/hexai-tmux-action. It runs in tmux
// split-pane mode by default, or child mode when -ui-child is set.
func RunCommand(ctx context.Context, opts Options, stdin io.Reader, stdout, stderr io.Writer) error {
if opts.UIChild {
return runChild(ctx, opts.Infile, opts.Outfile, stdout, stderr)
}
// Always use tmux path
return runInTmuxParent(stdin, stdout, opts.TmuxTarget, opts.TmuxSplit, opts.TmuxPercent)
}
// seams for unit tests
var (
isTTYFn = func(fd uintptr) bool { return term.IsTerminal(int(fd)) }
splitRunFn = tmux.SplitRun
osExecutableFn = os.Executable
runFn = Run
)
// openIO returns readers/writers for infile/outfile flags with deferred closers.
func openIO(infile, outfile string) (io.Reader, io.Writer, func(), func(), error) {
in := io.Reader(os.Stdin)
out := io.Writer(os.Stdout)
closeIn := func() {}
closeOut := func() {}
if path := infile; path != "" {
f, err := os.Open(path)
if err != nil {
return nil, nil, func() {}, func() {}, fmt.Errorf("hexai-tmux-action: cannot open infile: %w", err)
}
in = f
closeIn = func() { _ = f.Close() }
}
if path := outfile; path != "" {
f, err := os.Create(path)
if err != nil {
return nil, nil, func() {}, func() {}, fmt.Errorf("hexai-tmux-action: cannot open outfile: %w", err)
}
out = f
closeOut = func() { _ = f.Close() }
}
return in, out, closeIn, closeOut, nil
}
// runChild runs the interactive flow and writes the final output atomically when outfile is set.
func runChild(ctx context.Context, infile, outfile string, stdout, stderr io.Writer) error {
if outfile == "" {
// No atomic handoff needed; just run normally to provided stdout
var in io.Reader = os.Stdin
if infile != "" {
f, err := os.Open(infile)
if err != nil {
return err
}
defer func() { _ = f.Close() }()
in = f
}
return runFn(ctx, in, stdout, stderr)
}
tmp := outfile + ".tmp"
in, out, closeIn, closeOut, err := openIO(infile, tmp)
if err != nil {
return err
}
defer closeIn()
if err := runFn(ctx, in, out, stderr); err != nil {
closeOut()
if copyErr := echoThrough(infile, tmp, os.Stdin, stdout); copyErr != nil {
return fmt.Errorf("hexai-tmux-action child: %v; echo failed: %v", err, copyErr)
}
} else {
closeOut()
}
return os.Rename(tmp, outfile)
}
func runInTmuxParent(stdin io.Reader, stdout io.Writer, target, split string, percent int) error {
dir, err := os.MkdirTemp("", "hexai-tmux-action-")
if err != nil {
return err
}
defer func() { _ = os.RemoveAll(dir) }()
inPath := filepath.Join(dir, "input.txt")
outPath := filepath.Join(dir, "reply.txt")
if err := persistStdin(inPath, stdin); err != nil {
return err
}
exe, err := osExecutableFn()
if err != nil {
return err
}
argv := []string{exe, "-ui-child", "-infile", inPath, "-outfile", outPath}
opts := tmux.SplitOpts{Target: target, Vertical: split != "h", Percent: percent}
if err := splitRunFn(opts, argv); err != nil {
return err
}
if err := waitForFile(outPath, 60*time.Second); err != nil {
return err
}
return catFileTo(stdout, outPath)
}
func persistStdin(path string, stdin io.Reader) error {
f, err := os.Create(path)
if err != nil {
return err
}
defer func() { _ = f.Close() }()
if _, err := io.Copy(f, stdin); err != nil {
return err
}
return f.Sync()
}
func waitForFile(path string, timeout time.Duration) error {
deadline := time.Now().Add(timeout)
for {
if _, err := os.Stat(path); err == nil {
return nil
}
if time.Now().After(deadline) {
return fmt.Errorf("hexai-tmux-action: timeout waiting for reply file")
}
time.Sleep(200 * time.Millisecond)
}
}
func catFileTo(w io.Writer, path string) error {
f, err := os.Open(path)
if err != nil {
return err
}
defer func() { _ = f.Close() }()
_, err = io.Copy(w, f)
return err
}
// echoThrough no longer used in tmux-only flow, but kept for potential reuse.
func echoThrough(infile, outfile string, stdin io.Reader, stdout io.Writer) error {
var in io.Reader = stdin
var out io.Writer = stdout
if infile != "" {
f, err := os.Open(infile)
if err != nil {
return err
}
defer func() { _ = f.Close() }()
in = f
}
if outfile != "" {
f, err := os.Create(outfile)
if err != nil {
return err
}
defer func() { _ = f.Close() }()
out = f
}
_, err := io.Copy(out, in)
return err
}
package hexaiaction
import (
"bufio"
"io"
"strings"
"codeberg.org/snonux/hexai/internal/textutil"
)
// ParseInput splits raw stdin into optional diagnostics and selection/code.
// Format:
//
// Diagnostics:\n
// <one per line>\n
// <blank line> (optional)\n
// <rest is selection/code>
//
// If the header is absent, the entire input is treated as selection.
func ParseInput(r io.Reader) (InputParts, error) {
b, err := io.ReadAll(bufio.NewReader(r))
if err != nil {
return InputParts{}, err
}
raw := strings.TrimSpace(string(b))
if raw == "" {
return InputParts{Selection: ""}, nil
}
lines := strings.Split(raw, "\n")
// find a case-insensitive line equal to "diagnostics:"
diagsIdx := -1
for i, ln := range lines {
t := strings.TrimSpace(strings.ToLower(ln))
if t == "diagnostics:" {
diagsIdx = i
break
}
}
if diagsIdx < 0 {
return InputParts{Selection: raw}, nil
}
// collect diagnostics until a blank line or EOF
diags := []string{}
i := diagsIdx + 1
for ; i < len(lines); i++ {
t := strings.TrimSpace(lines[i])
if t == "" {
i++
break
}
diags = append(diags, t)
}
sel := strings.Join(lines[i:], "\n")
sel = strings.TrimSpace(sel)
return InputParts{Selection: sel, Diagnostics: diags}, nil
}
// ExtractInstruction mirrors the LSP instructionFromSelection behavior (subset),
// scanning the first line for an instruction marker and removing it from the selection.
func ExtractInstruction(sel string) (string, string) { return textutil.InstructionFromSelection(sel) }
// findFirstInstructionInLine follows the same precedence as LSP:
// - ;text; (strict)
// - /* text */ (single-line)
// - <!-- text --> (single-line)
// - // text
// - # text
// - -- text
// helpers moved to textutil
package hexaiaction
import (
"context"
"strings"
"time"
"codeberg.org/snonux/hexai/internal/appconfig"
"codeberg.org/snonux/hexai/internal/llm"
"codeberg.org/snonux/hexai/internal/stats"
"codeberg.org/snonux/hexai/internal/textutil"
"codeberg.org/snonux/hexai/internal/tmux"
)
// Render performs simple {{var}} replacement like LSP.
func Render(t string, vars map[string]string) string { return textutil.RenderTemplate(t, vars) }
// StripFences removes surrounding markdown code fences.
func StripFences(s string) string { return textutil.StripCodeFences(s) }
type chatDoer interface {
Chat(ctx context.Context, msgs []llm.Message, opts ...llm.RequestOption) (string, error)
DefaultModel() string
}
type providerNamer interface{ Name() string }
type requestArgs struct {
model string
options []llm.RequestOption
}
func providerOf(c any) string {
if n, ok := c.(providerNamer); ok {
return n.Name()
}
return "llm"
}
func canonicalProvider(name string) string {
p := strings.ToLower(strings.TrimSpace(name))
if p == "" {
return "openai"
}
return p
}
func defaultModelForProvider(cfg appconfig.App, provider string) string {
switch provider {
case "ollama":
return cfg.OllamaModel
case "copilot":
return cfg.CopilotModel
default:
return cfg.OpenAIModel
}
}
func selectActionTemperature(cfg appconfig.App, provider string, entry appconfig.SurfaceConfig, model string) (float64, bool) {
if entry.Temperature != nil {
return *entry.Temperature, true
}
if cfg.CodingTemperature != nil {
temp := *cfg.CodingTemperature
if provider == "openai" && strings.HasPrefix(strings.ToLower(model), "gpt-5") && temp == 0.2 {
temp = 1.0
}
return temp, true
}
if provider == "openai" && strings.HasPrefix(strings.ToLower(model), "gpt-5") {
return 1.0, true
}
return 0, false
}
func runRewrite(ctx context.Context, cfg appconfig.App, client chatDoer, instruction, selection string) (string, error) {
sys := cfg.PromptCodeActionRewriteSystem
user := Render(cfg.PromptCodeActionRewriteUser, map[string]string{"instruction": instruction, "selection": selection})
return runOnceWithOpts(ctx, client, sys, user, reqOptsFrom(cfg))
}
func runDiagnostics(ctx context.Context, cfg appconfig.App, client chatDoer, diags []string, selection string) (string, error) {
var b strings.Builder
for i, d := range diags {
if strings.TrimSpace(d) == "" {
continue
}
b.WriteString(strings.TrimSpace(d))
if i < len(diags)-1 {
b.WriteString("\n")
}
}
sys := cfg.PromptCodeActionDiagnosticsSystem
user := Render(cfg.PromptCodeActionDiagnosticsUser, map[string]string{"diagnostics": b.String(), "selection": selection})
return runOnceWithOpts(ctx, client, sys, user, reqOptsFrom(cfg))
}
func runDocument(ctx context.Context, cfg appconfig.App, client chatDoer, selection string) (string, error) {
sys := cfg.PromptCodeActionDocumentSystem
user := Render(cfg.PromptCodeActionDocumentUser, map[string]string{"selection": selection})
return runOnceWithOpts(ctx, client, sys, user, reqOptsFrom(cfg))
}
func runSimplify(ctx context.Context, cfg appconfig.App, client chatDoer, selection string) (string, error) {
sys := cfg.PromptCodeActionSimplifySystem
user := Render(cfg.PromptCodeActionSimplifyUser, map[string]string{"selection": selection})
return runOnceWithOpts(ctx, client, sys, user, reqOptsFrom(cfg))
}
func runGoTest(ctx context.Context, cfg appconfig.App, client chatDoer, funcCode string) (string, error) {
sys := cfg.PromptCodeActionGoTestSystem
user := Render(cfg.PromptCodeActionGoTestUser, map[string]string{"function": funcCode})
return runOnceWithOpts(ctx, client, sys, user, reqOptsFrom(cfg))
}
func runCustom(ctx context.Context, cfg appconfig.App, client chatDoer, ca appconfig.CustomAction, parts InputParts) (string, error) {
// If user template is provided, prefer it and optional system
if strings.TrimSpace(ca.User) != "" {
sys := cfg.PromptCodeActionRewriteSystem
if strings.TrimSpace(ca.System) != "" {
sys = ca.System
}
// Currently only selection is available in tmux path; diagnostics list not wired
user := Render(ca.User, map[string]string{"selection": parts.Selection, "diagnostics": strings.Join(parts.Diagnostics, "\n")})
return runOnceWithOpts(ctx, client, sys, user, reqOptsFrom(cfg))
}
// Else, use fixed instruction through rewrite template
return runRewrite(ctx, cfg, client, ca.Instruction, parts.Selection)
}
func runOnce(ctx context.Context, client chatDoer, sys, user string) (string, error) {
msgs := []llm.Message{{Role: "system", Content: sys}, {Role: "user", Content: user}}
txt, err := client.Chat(ctx, msgs)
if err != nil {
return "", err
}
out := strings.TrimSpace(StripFences(txt))
// Contribute to global stats and update tmux status
sent := 0
for _, m := range msgs {
sent += len(m.Content)
}
recv := len(out)
_ = stats.Update(ctx, providerOf(client), client.DefaultModel(), sent, recv)
if snap, err := stats.TakeSnapshot(); err == nil {
minsWin := snap.Window.Minutes()
if minsWin <= 0 {
minsWin = 0.001
}
scopeReqs := int64(0)
if pe, ok := snap.Providers[providerOf(client)]; ok {
if mc, ok2 := pe.Models[client.DefaultModel()]; ok2 {
scopeReqs = mc.Reqs
}
}
scopeRPM := float64(scopeReqs) / minsWin
_ = tmux.SetStatus(tmux.FormatGlobalStatusColored(snap.Global.Reqs, snap.RPM, snap.Global.Sent, snap.Global.Recv, providerOf(client), client.DefaultModel(), scopeRPM, scopeReqs, snap.Window))
}
return out, nil
}
func runOnceWithOpts(ctx context.Context, client chatDoer, sys, user string, req requestArgs) (string, error) {
msgs := []llm.Message{{Role: "system", Content: sys}, {Role: "user", Content: user}}
txt, err := client.Chat(ctx, msgs, req.options...)
if err != nil {
return "", err
}
out := strings.TrimSpace(StripFences(txt))
// Contribute to global stats and update tmux status
sent := 0
for _, m := range msgs {
sent += len(m.Content)
}
recv := len(out)
model := strings.TrimSpace(req.model)
if model == "" {
model = client.DefaultModel()
}
_ = stats.Update(ctx, providerOf(client), model, sent, recv)
if snap, err := stats.TakeSnapshot(); err == nil {
minsWin := snap.Window.Minutes()
if minsWin <= 0 {
minsWin = 0.001
}
scopeReqs := int64(0)
if pe, ok := snap.Providers[providerOf(client)]; ok {
if mc, ok2 := pe.Models[model]; ok2 {
scopeReqs = mc.Reqs
}
}
scopeRPM := float64(scopeReqs) / minsWin
_ = tmux.SetStatus(tmux.FormatGlobalStatusColored(snap.Global.Reqs, snap.RPM, snap.Global.Sent, snap.Global.Recv, providerOf(client), model, scopeRPM, scopeReqs, snap.Window))
}
return out, nil
}
// reqOptsFrom builds LLM request options similar to LSP behavior.
func reqOptsFrom(cfg appconfig.App) requestArgs {
opts := make([]llm.RequestOption, 0, 3)
if cfg.MaxTokens > 0 {
opts = append(opts, llm.WithMaxTokens(cfg.MaxTokens))
}
provider := canonicalProvider(cfg.Provider)
entries := cfg.CodeActionConfigs
if len(entries) == 0 {
entries = []appconfig.SurfaceConfig{{Provider: cfg.Provider, Model: strings.TrimSpace(defaultModelForProvider(cfg, provider))}}
}
primary := entries[0]
if strings.TrimSpace(primary.Provider) != "" {
provider = canonicalProvider(primary.Provider)
}
model := strings.TrimSpace(primary.Model)
if model == "" {
model = strings.TrimSpace(defaultModelForProvider(cfg, provider))
}
if strings.TrimSpace(primary.Model) != "" {
opts = append(opts, llm.WithModel(strings.TrimSpace(primary.Model)))
}
if temp, ok := selectActionTemperature(cfg, provider, primary, model); ok {
opts = append(opts, llm.WithTemperature(temp))
}
return requestArgs{model: model, options: opts}
}
// Timeout helpers to mirror LSP behavior.
func timeout10s(parent context.Context) (context.Context, context.CancelFunc) {
return context.WithTimeout(parent, 20*time.Second)
}
func timeout8s(parent context.Context) (context.Context, context.CancelFunc) {
return context.WithTimeout(parent, 18*time.Second)
}
package hexaiaction
import (
"context"
"fmt"
"io"
"log"
"strings"
"time"
"codeberg.org/snonux/hexai/internal/appconfig"
"codeberg.org/snonux/hexai/internal/editor"
"codeberg.org/snonux/hexai/internal/llmutils"
"codeberg.org/snonux/hexai/internal/logging"
"codeberg.org/snonux/hexai/internal/stats"
"codeberg.org/snonux/hexai/internal/tmux"
)
// Run executes the hexai-tmux-action command flow.
// seams for testability
var (
chooseActionFn = RunTUI
newClientFromApp = llmutils.NewClientFromApp
)
type configPathKey struct{}
// selectedCustom carries the chosen custom action (if any) from the TUI submenu
// to the executor. Cleared after use.
var selectedCustom *appconfig.CustomAction
func Run(ctx context.Context, stdin io.Reader, stdout, stderr io.Writer) error {
logger := log.New(stderr, "hexai-tmux-action ", log.LstdFlags|log.Lmsgprefix)
cfg := appconfig.LoadWithOptions(logger, appconfig.LoadOptions{ConfigPath: configPathFromContext(ctx)})
if cfg.StatsWindowMinutes > 0 {
stats.SetWindow(time.Duration(cfg.StatsWindowMinutes) * time.Minute)
}
if err := cfg.Validate(); err != nil {
fmt.Fprintf(stderr, logging.AnsiBase+"hexai-tmux-action: %v"+logging.AnsiReset+"\n", err)
return err
}
// Enable custom action submenu with configurable hotkey
if len(cfg.CustomActions) > 0 {
chooseActionFn = func() (ActionKind, error) { return RunTUIWithCustom(cfg.CustomActions, cfg.TmuxCustomMenuHotkey) }
}
if len(cfg.CodeActionConfigs) > 0 {
if provider := strings.TrimSpace(cfg.CodeActionConfigs[0].Provider); provider != "" {
cfg.Provider = provider
}
}
cli, err := newClientFromApp(cfg)
if err != nil {
fmt.Fprintf(stderr, logging.AnsiBase+"hexai-tmux-action: LLM disabled: %v"+logging.AnsiReset+"\n", err)
return err
}
primaryModel := strings.TrimSpace(reqOptsFrom(cfg).model)
if primaryModel == "" {
primaryModel = cli.DefaultModel()
}
_ = tmux.SetStatus(tmux.FormatLLMStartStatus(cli.Name(), primaryModel))
var client chatDoer = cli
parts, err := ParseInput(stdin)
if err != nil {
fmt.Fprintln(stderr, logging.AnsiBase+"hexai-tmux-action: failed to read input"+logging.AnsiReset)
return err
}
if strings.TrimSpace(parts.Selection) == "" {
return fmt.Errorf("hexai-tmux-action: no input provided on stdin")
}
kind, err := chooseActionFn()
if err != nil {
return err
}
out, err := executeAction(ctx, kind, parts, cfg, client, stderr)
if err != nil {
return err
}
io.WriteString(stdout, out)
return nil
}
// WithConfigPath attaches a config path override to the context for Run/RunCommand.
func WithConfigPath(ctx context.Context, path string) context.Context {
if ctx == nil {
ctx = context.Background()
}
return context.WithValue(ctx, configPathKey{}, strings.TrimSpace(path))
}
func configPathFromContext(ctx context.Context) string {
if ctx == nil {
return ""
}
if v, ok := ctx.Value(configPathKey{}).(string); ok {
return strings.TrimSpace(v)
}
return ""
}
func executeAction(ctx context.Context, kind ActionKind, parts InputParts, cfg appconfig.App, client chatDoer, stderr io.Writer) (string, error) {
switch kind {
case ActionSkip:
return parts.Selection, nil
case ActionRewrite:
return handleRewriteAction(ctx, parts, cfg, client, stderr)
case ActionDiagnostics:
return handleDiagnosticsAction(ctx, parts, cfg, client)
case ActionDocument:
return handleDocumentAction(ctx, parts, cfg, client)
case ActionGoTest:
return handleGoTestAction(ctx, parts, cfg, client)
case ActionSimplify:
return handleSimplifyAction(ctx, parts, cfg, client)
case ActionCustom:
return handleCustomAction(ctx, parts, cfg, client)
case ActionCustomPrompt:
return handleCustomPromptAction(ctx, parts, cfg, client, stderr)
default:
return parts.Selection, nil
}
}
func handleRewriteAction(ctx context.Context, parts InputParts, cfg appconfig.App, client chatDoer, stderr io.Writer) (string, error) {
instr, cleaned := ExtractInstruction(parts.Selection)
if strings.TrimSpace(instr) == "" {
fmt.Fprintln(stderr, logging.AnsiBase+"hexai-tmux-action: no inline instruction found; echoing input"+logging.AnsiReset)
return parts.Selection, nil
}
return runWithTimeout(ctx, timeout10s, func(cctx context.Context) (string, error) {
return runRewrite(cctx, cfg, client, instr, cleaned)
})
}
func handleDiagnosticsAction(ctx context.Context, parts InputParts, cfg appconfig.App, client chatDoer) (string, error) {
return runWithTimeout(ctx, timeout10s, func(cctx context.Context) (string, error) {
return runDiagnostics(cctx, cfg, client, parts.Diagnostics, parts.Selection)
})
}
func handleDocumentAction(ctx context.Context, parts InputParts, cfg appconfig.App, client chatDoer) (string, error) {
return runWithTimeout(ctx, timeout10s, func(cctx context.Context) (string, error) {
return runDocument(cctx, cfg, client, parts.Selection)
})
}
func handleGoTestAction(ctx context.Context, parts InputParts, cfg appconfig.App, client chatDoer) (string, error) {
return runWithTimeout(ctx, timeout8s, func(cctx context.Context) (string, error) {
return runGoTest(cctx, cfg, client, parts.Selection)
})
}
func handleSimplifyAction(ctx context.Context, parts InputParts, cfg appconfig.App, client chatDoer) (string, error) {
return runWithTimeout(ctx, timeout10s, func(cctx context.Context) (string, error) {
return runSimplify(cctx, cfg, client, parts.Selection)
})
}
func handleCustomAction(ctx context.Context, parts InputParts, cfg appconfig.App, client chatDoer) (string, error) {
if selectedCustom == nil {
return parts.Selection, nil
}
return runWithTimeout(ctx, timeout10s, func(cctx context.Context) (string, error) {
out, err := runCustom(cctx, cfg, client, *selectedCustom, parts)
selectedCustom = nil
return out, err
})
}
func handleCustomPromptAction(ctx context.Context, parts InputParts, cfg appconfig.App, client chatDoer, stderr io.Writer) (string, error) {
prompt, err := editor.OpenTempAndEdit(nil)
if err != nil || strings.TrimSpace(prompt) == "" {
fmt.Fprintln(stderr, logging.AnsiBase+"hexai-tmux-action: custom prompt canceled or empty; echoing input"+logging.AnsiReset)
return parts.Selection, nil
}
return runWithTimeout(ctx, timeout10s, func(cctx context.Context) (string, error) {
return runRewrite(cctx, cfg, client, prompt, parts.Selection)
})
}
func runWithTimeout(ctx context.Context, timeout func(context.Context) (context.Context, context.CancelFunc), fn func(context.Context) (string, error)) (string, error) {
innerCtx, cancel := timeout(ctx)
defer cancel()
return fn(innerCtx)
}
// client construction is shared via internal/llmutils
package hexaiaction
import (
"fmt"
"strings"
"github.com/charmbracelet/bubbles/list"
tea "github.com/charmbracelet/bubbletea"
)
// item implements list.Item
type item struct {
title, desc string
kind ActionKind
hotkey rune
}
func (i item) Title() string { return i.title }
func (i item) Description() string { return i.desc }
func (i item) FilterValue() string { return i.title }
type model struct {
list list.Model
chosen ActionKind
done bool
}
func newModel() model {
items := []list.Item{
item{title: "Rewrite selection", desc: "", kind: ActionRewrite, hotkey: 'r'},
item{title: "Simplify and improve", desc: "", kind: ActionSimplify, hotkey: 'i'},
item{title: "Document code", desc: "", kind: ActionDocument, hotkey: 'c'},
item{title: "Generate Go unit test(s)", desc: "", kind: ActionGoTest, hotkey: 't'},
item{title: "Custom prompt", desc: "", kind: ActionCustomPrompt, hotkey: 'p'},
item{title: "Skip", desc: "", kind: ActionSkip, hotkey: 's'},
}
l := list.New(items, oneLineDelegate{}, 0, 0)
l.SetShowTitle(false)
l.SetShowHelp(false)
l.SetShowStatusBar(false)
l.SetFilteringEnabled(false)
return model{list: l}
}
func (m model) Init() tea.Cmd { return nil }
func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
switch msg := msg.(type) {
case tea.KeyMsg:
return handleKey(m, msg)
case tea.WindowSizeMsg:
m.list.SetSize(msg.Width, msg.Height)
}
var cmd tea.Cmd
m.list, cmd = m.list.Update(msg)
return m, cmd
}
func handleKey(m model, msg tea.KeyMsg) (tea.Model, tea.Cmd) {
raw := msg.String()
low := strings.ToLower(raw)
switch low {
case "esc", "q":
// Treat ESC and q as Skip/quit
m.chosen = ActionSkip
m.done = true
return m, tea.Quit
case "enter":
if it, ok := m.list.SelectedItem().(item); ok {
m.chosen = it.kind
m.done = true
return m, tea.Quit
}
case "j", "down":
m.list.CursorDown()
case "k", "up":
m.list.CursorUp()
case "g", "home":
m.list.Select(0)
case "end":
if n := len(m.list.Items()); n > 0 {
m.list.Select(n - 1)
}
case "s", "r", "c", "t", "i", "p":
items := m.list.Items()
for i := 0; i < len(items); i++ {
if it, ok := items[i].(item); ok && strings.ToLower(string(it.hotkey)) == low {
m.list.Select(i)
m.chosen = it.kind
m.done = true
return m, tea.Quit
}
}
}
if raw == "G" { // Shift+G jumps to end
if n := len(m.list.Items()); n > 0 {
m.list.Select(n - 1)
}
}
return m, nil
}
func (m model) View() string {
if m.done {
return ""
}
return m.list.View()
}
// RunTUI returns the chosen ActionKind.
func RunTUI() (ActionKind, error) {
p := tea.NewProgram(newModel())
md, err := p.Run()
if err != nil {
return ActionSkip, err
}
if m, ok := md.(model); ok {
if m.chosen == "" {
return ActionSkip, nil
}
return m.chosen, nil
}
return ActionSkip, fmt.Errorf("unexpected model type")
}
package hexaiaction
import (
"unicode/utf8"
"github.com/charmbracelet/bubbles/list"
tea "github.com/charmbracelet/bubbletea"
"codeberg.org/snonux/hexai/internal/appconfig"
)
// RunTUIWithCustom shows the main menu plus a configurable "Custom actions…" item.
// If the user selects that item, it shows a submenu listing user-defined custom actions.
// On picking one, it sets selectedCustom and returns ActionCustom.
func RunTUIWithCustom(customs []appconfig.CustomAction, menuHotkey string) (ActionKind, error) {
// When no customs, fall back to default menu
if len(customs) == 0 {
return RunTUI()
}
// Build main menu with an extra entry
hk := 'a'
if r, _ := utf8.DecodeRuneInString(menuHotkey); r != utf8.RuneError && r != 0 {
hk = r
}
// Create a model with default items plus Custom actions…
m := newModel()
items := m.list.Items()
items = append(items, item{title: "Custom actions…", desc: "", kind: ActionCustom, hotkey: hk})
m.list.SetItems(items)
// Run main menu
p := teaNewProgram(m)
md, err := p.Run()
if err != nil {
return ActionSkip, err
}
if mm, ok := md.(model); ok {
// If user chose built-in items (including Custom prompt), return immediately.
if mm.chosen != ActionCustom {
return mm.chosen, nil
}
}
// Custom submenu: list each action; select one maps to ActionCustom and sets global
sub := newModel()
subItems := make([]list.Item, 0, len(customs))
for _, ca := range customs {
r := rune(0)
if rr, _ := utf8.DecodeRuneInString(ca.Hotkey); rr != utf8.RuneError && rr != 0 {
r = rr
}
subItems = append(subItems, item{title: ca.Title, desc: "", kind: ActionCustom, hotkey: r})
}
sub.list.SetItems(subItems)
sp := teaNewProgram(sub)
smd, err := sp.Run()
if err != nil {
return ActionSkip, err
}
if sm, ok := smd.(model); ok {
if it, ok := sm.list.SelectedItem().(item); ok {
// Map by title
for i := range customs {
if customs[i].Title == it.title {
c := customs[i]
selectedCustom = &c
return ActionCustom, nil
}
}
}
}
return ActionSkip, nil
}
// teaNewProgram is a tiny seam for tests to stub bubbletea program creation.
var teaNewProgram = func(m model) teaProgram { return tea.NewProgram(m) }
// teaProgram is the subset of bubbletea.Program we need; enables testing seam.
type teaProgram interface{ Run() (tea.Model, error) }
package hexaiaction
import (
"fmt"
"io"
"github.com/charmbracelet/bubbles/list"
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
)
// oneLineDelegate renders a single compact line per item, no spacing.
type oneLineDelegate struct{}
var (
hotStyle = lipgloss.NewStyle().Bold(true).Foreground(lipgloss.Color("205"))
cursorStyle = lipgloss.NewStyle().Bold(true)
)
func (oneLineDelegate) Height() int { return 1 }
func (oneLineDelegate) Spacing() int { return 0 }
func (oneLineDelegate) Update(tea.Msg, *list.Model) tea.Cmd { return nil }
func (oneLineDelegate) Render(w io.Writer, m list.Model, index int, listItem list.Item) {
title := listItem.FilterValue()
hk := '?'
if it, ok := listItem.(item); ok {
hk = it.hotkey
}
hot := hotStyle.Render(fmt.Sprintf(" (%c)", hk))
cursor := " "
if index == m.Index() {
cursor = cursorStyle.Render("> ")
}
fmt.Fprintf(w, "%s%s%s", cursor, title, hot)
}
// Summary: Hexai CLI runner; reads input, creates an LLM client, builds messages,
// streams or collects the model output, and prints a short summary to stderr.
package hexaicli
import (
"bytes"
"context"
"fmt"
"io"
"log"
"os"
"strings"
"sync"
"time"
"codeberg.org/snonux/hexai/internal/appconfig"
"codeberg.org/snonux/hexai/internal/editor"
"codeberg.org/snonux/hexai/internal/llm"
"codeberg.org/snonux/hexai/internal/llmutils"
"codeberg.org/snonux/hexai/internal/logging"
"codeberg.org/snonux/hexai/internal/stats"
"codeberg.org/snonux/hexai/internal/tmux"
"github.com/mattn/go-runewidth"
"golang.org/x/term"
)
type requestArgs struct {
model string
options []llm.RequestOption
}
type cliJob struct {
index int
provider string
entry appconfig.SurfaceConfig
client llm.Client
req requestArgs
}
type columnPrinter struct {
mu sync.Mutex
stdout io.Writer
columns int
colWidth int
partial []string
providers []string
models []string
}
type columnWriter struct {
printer *columnPrinter
index int
}
type (
selectionContextKey struct{}
configPathContextKey struct{}
)
func buildCLIJobs(cfg appconfig.App) ([]cliJob, error) {
entries := cfg.CLIConfigs
if len(entries) == 0 {
entries = []appconfig.SurfaceConfig{{}}
}
jobs := make([]cliJob, 0, len(entries))
for i, raw := range entries {
entry := appconfig.SurfaceConfig{Provider: strings.TrimSpace(raw.Provider), Model: strings.TrimSpace(raw.Model), Temperature: raw.Temperature}
provider := entry.Provider
if provider == "" {
provider = cfg.Provider
}
provider = canonicalProvider(provider)
derived := cfg
derived.Provider = provider
switch provider {
case "openai":
if entry.Model != "" {
derived.OpenAIModel = entry.Model
}
case "copilot":
if entry.Model != "" {
derived.CopilotModel = entry.Model
}
case "ollama":
if entry.Model != "" {
derived.OllamaModel = entry.Model
}
}
client, err := newClientFromApp(derived)
if err != nil {
return nil, err
}
req := buildCLIRequest(entry, provider, cfg, client)
if strings.TrimSpace(req.model) == "" {
req.model = strings.TrimSpace(client.DefaultModel())
}
jobs = append(jobs, cliJob{index: i, provider: provider, entry: entry, client: client, req: req})
}
return jobs, nil
}
func buildCLIRequest(entry appconfig.SurfaceConfig, provider string, cfg appconfig.App, client llm.Client) requestArgs {
opts := make([]llm.RequestOption, 0, 2)
if cfg.MaxTokens > 0 {
opts = append(opts, llm.WithMaxTokens(cfg.MaxTokens))
}
model := strings.TrimSpace(entry.Model)
if model == "" {
if client != nil {
model = strings.TrimSpace(client.DefaultModel())
}
if model == "" {
model = strings.TrimSpace(defaultModelForProvider(cfg, provider))
}
}
if entry.Model != "" {
opts = append(opts, llm.WithModel(entry.Model))
}
if temp, ok := cliTemperatureFromEntry(cfg, provider, entry, model); ok {
opts = append(opts, llm.WithTemperature(temp))
}
return requestArgs{model: model, options: opts}
}
func cliTemperatureFromEntry(cfg appconfig.App, provider string, entry appconfig.SurfaceConfig, model string) (float64, bool) {
if entry.Temperature != nil {
return *entry.Temperature, true
}
if cfg.CodingTemperature != nil {
temp := *cfg.CodingTemperature
if provider == "openai" && strings.HasPrefix(strings.ToLower(model), "gpt-5") && temp == 0.2 {
temp = 1.0
}
return temp, true
}
if provider == "openai" && strings.HasPrefix(strings.ToLower(model), "gpt-5") {
return 1.0, true
}
return 0, false
}
func canonicalProvider(name string) string {
p := strings.ToLower(strings.TrimSpace(name))
if p == "" {
return "openai"
}
return p
}
func defaultModelForProvider(cfg appconfig.App, provider string) string {
switch provider {
case "ollama":
return cfg.OllamaModel
case "copilot":
return cfg.CopilotModel
default:
return cfg.OpenAIModel
}
}
// Run executes the Hexai CLI behavior given arguments and I/O streams.
// It assumes flags have already been parsed by the caller.
func Run(ctx context.Context, args []string, stdin io.Reader, stdout, stderr io.Writer) error {
// Load configuration with a logger so file-based config is respected.
logger := log.New(stderr, "hexai ", log.LstdFlags|log.Lmsgprefix)
configPath := configPathFromContext(ctx)
cfg := appconfig.LoadWithOptions(logger, appconfig.LoadOptions{ConfigPath: configPath})
if cfg.StatsWindowMinutes > 0 {
stats.SetWindow(time.Duration(cfg.StatsWindowMinutes) * time.Minute)
}
jobs, err := buildCLIJobs(cfg)
if err != nil {
fmt.Fprintf(stderr, logging.AnsiBase+"hexai: LLM disabled: %v"+logging.AnsiReset+"\n", err)
return err
}
if selected := selectionFromContext(ctx); len(selected) > 0 {
jobs, err = filterJobsBySelection(jobs, selected)
if err != nil {
fmt.Fprintf(stderr, logging.AnsiBase+"hexai: %v"+logging.AnsiReset+"\n", err)
return err
}
}
if len(jobs) == 0 {
return fmt.Errorf("hexai: no CLI providers configured")
}
// Prefer piped stdin when present; only open the editor when there are no args
// and no stdin content available.
input, rerr := readInput(stdin, args)
if rerr != nil && len(args) == 0 {
if prompt, eerr := editor.OpenTempAndEdit(nil); eerr == nil && strings.TrimSpace(prompt) != "" {
args = []string{prompt}
input, rerr = readInput(stdin, args)
}
}
if rerr != nil {
fmt.Fprintln(stderr, logging.AnsiBase+rerr.Error()+logging.AnsiReset)
return rerr
}
msgs := buildMessagesFromConfig(cfg, input)
if err := runCLIJobs(ctx, jobs, msgs, input, stdout, stderr); err != nil {
fmt.Fprintf(stderr, logging.AnsiBase+"hexai: error: %v"+logging.AnsiReset+"\n", err)
return err
}
return nil
}
// RunWithClient executes the CLI flow using an already-constructed client.
// Useful for testing and embedding.
func RunWithClient(ctx context.Context, args []string, stdin io.Reader, stdout, stderr io.Writer, client llm.Client) error {
input, err := readInput(stdin, args)
if err != nil {
fmt.Fprintln(stderr, logging.AnsiBase+err.Error()+logging.AnsiReset)
return err
}
req := requestArgs{model: strings.TrimSpace(client.DefaultModel())}
printProviderInfo(stderr, client, req.model)
msgs := buildMessages(input)
if err := runChat(ctx, client, req, msgs, input, stdout, stderr); err != nil {
fmt.Fprintf(stderr, logging.AnsiBase+"hexai: error: %v"+logging.AnsiReset+"\n", err)
return err
}
return nil
}
type cliJobResult struct {
provider string
model string
output string
summary string
err error
}
func runCLIJobs(ctx context.Context, jobs []cliJob, msgs []llm.Message, input string, stdout, stderr io.Writer) error {
results := make([]*cliJobResult, len(jobs))
var wg sync.WaitGroup
var printer *columnPrinter
if len(jobs) > 0 {
printer = newColumnPrinter(stdout, jobs)
printer.PrintHeader()
}
for _, job := range jobs {
job := job
wg.Add(1)
printProviderInfo(stderr, job.client, job.req.model)
go func() {
defer wg.Done()
var errBuf bytes.Buffer
var outBuf bytes.Buffer
jobMsgs := make([]llm.Message, len(msgs))
copy(jobMsgs, msgs)
writer := io.Writer(&outBuf)
if printer != nil {
writer = printer.Writer(job.index)
}
err := runChat(ctx, job.client, job.req, jobMsgs, input, writer, &errBuf)
if printer != nil {
printer.Flush(job.index)
}
results[job.index] = &cliJobResult{
provider: job.client.Name(),
model: job.req.model,
output: outBuf.String(),
summary: errBuf.String(),
err: err,
}
}()
}
wg.Wait()
var firstErr error
if printer == nil {
printed := false
for _, res := range results {
if res == nil {
continue
}
if printed {
if _, err := io.WriteString(stdout, "\n"); err != nil {
return err
}
}
heading := fmt.Sprintf("=== %s:%s ===\n", res.provider, res.model)
if _, err := io.WriteString(stdout, heading); err != nil {
return err
}
if res.output != "" {
if _, err := io.WriteString(stdout, res.output); err != nil {
return err
}
if !strings.HasSuffix(res.output, "\n") {
if _, err := io.WriteString(stdout, "\n"); err != nil {
return err
}
}
}
printed = true
}
}
for _, res := range results {
if res == nil {
continue
}
if res.summary != "" {
summary := strings.TrimLeft(res.summary, "\n")
if summary != "" {
if _, err := io.WriteString(stderr, summary); err != nil {
return err
}
}
}
if res.err != nil {
if _, err := fmt.Fprintf(stderr, logging.AnsiBase+"hexai: provider=%s model=%s error: %v"+logging.AnsiReset+"\n", res.provider, res.model, res.err); err != nil {
return err
}
}
if firstErr == nil && res.err != nil {
firstErr = res.err
}
}
return firstErr
}
func newColumnPrinter(stdout io.Writer, jobs []cliJob) *columnPrinter {
cols := len(jobs)
width := detectTerminalWidth(stdout)
if width <= 0 {
width = 100
}
sepWidth := (cols - 1) * 3
colWidth := (width - sepWidth) / cols
if colWidth < 20 {
colWidth = 20
}
providers := make([]string, cols)
models := make([]string, cols)
for _, job := range jobs {
providers[job.index] = job.client.Name()
models[job.index] = job.req.model
}
return &columnPrinter{
stdout: stdout,
columns: cols,
colWidth: colWidth,
partial: make([]string, cols),
providers: providers,
models: models,
}
}
func detectTerminalWidth(w io.Writer) int {
type fder interface{ Fd() uintptr }
if f, ok := w.(*os.File); ok {
if width, _, err := term.GetSize(int(f.Fd())); err == nil {
return width
}
}
if f, ok := w.(fder); ok {
if width, _, err := term.GetSize(int(f.Fd())); err == nil {
return width
}
}
return 0
}
func (cp *columnPrinter) Writer(idx int) io.Writer {
return columnWriter{printer: cp, index: idx}
}
func (cp *columnPrinter) PrintHeader() {
cp.mu.Lock()
defer cp.mu.Unlock()
combo := make([]string, cp.columns)
for i := 0; i < cp.columns; i++ {
provider := strings.TrimSpace(cp.providers[i])
model := strings.TrimSpace(cp.models[i])
switch {
case provider != "" && model != "":
combo[i] = provider + ":" + model
case provider != "":
combo[i] = provider
case model != "":
combo[i] = model
default:
combo[i] = ""
}
}
cp.writeLine(combo)
divider := make([]string, cp.columns)
line := strings.Repeat("─", cp.colWidth)
for i := range divider {
divider[i] = line
}
cp.writeLine(divider)
}
func (cp *columnPrinter) Flush(idx int) {
cp.mu.Lock()
defer cp.mu.Unlock()
if idx < 0 || idx >= len(cp.partial) {
return
}
if cp.partial[idx] == "" {
return
}
cp.emitJobLine(idx, cp.partial[idx])
cp.partial[idx] = ""
}
func (w columnWriter) Write(p []byte) (int, error) {
return w.printer.write(w.index, string(p))
}
func (cp *columnPrinter) write(idx int, data string) (int, error) {
cp.mu.Lock()
defer cp.mu.Unlock()
if idx < 0 || idx >= len(cp.partial) {
return len(data), nil
}
data = strings.ReplaceAll(data, "\r", "")
cp.partial[idx] += data
for strings.Contains(cp.partial[idx], "\n") {
line, rest, _ := strings.Cut(cp.partial[idx], "\n")
cp.partial[idx] = rest
cp.emitJobLine(idx, line)
}
return len(data), nil
}
func (cp *columnPrinter) emitJobLine(idx int, line string) {
segments := cp.wrap(line)
for _, seg := range segments {
cells := make([]string, cp.columns)
if idx >= 0 && idx < len(cells) {
cells[idx] = seg
}
cp.writeLine(cells)
}
}
func (cp *columnPrinter) wrap(text string) []string {
text = strings.ReplaceAll(text, "\t", " ")
if runewidth.StringWidth(text) <= cp.colWidth {
return []string{text}
}
var lines []string
var current strings.Builder
width := 0
for _, r := range text {
rw := runewidth.RuneWidth(r)
if width+rw > cp.colWidth && current.Len() > 0 {
lines = append(lines, current.String())
current.Reset()
width = 0
}
current.WriteRune(r)
width += rw
}
if current.Len() > 0 {
lines = append(lines, current.String())
}
if len(lines) == 0 {
lines = append(lines, "")
}
return lines
}
func (cp *columnPrinter) writeLine(cells []string) {
if len(cells) < cp.columns {
extra := make([]string, cp.columns-len(cells))
cells = append(cells, extra...)
}
var builder strings.Builder
for i := 0; i < cp.columns; i++ {
cell := cells[i]
width := runewidth.StringWidth(cell)
if width > cp.colWidth {
cell = runewidth.Truncate(cell, cp.colWidth, "…")
width = runewidth.StringWidth(cell)
}
builder.WriteString(cell)
if pad := cp.colWidth - width; pad > 0 {
builder.WriteString(strings.Repeat(" ", pad))
}
if i != cp.columns-1 {
builder.WriteString(" │ ")
}
}
builder.WriteByte('\n')
_, _ = cp.stdout.Write([]byte(builder.String()))
}
// WithCLISelection injects provider indices into the context so Run only executes those jobs.
func WithCLISelection(ctx context.Context, indices []int) context.Context {
if ctx == nil {
ctx = context.Background()
}
cpy := make([]int, len(indices))
copy(cpy, indices)
return context.WithValue(ctx, selectionContextKey{}, cpy)
}
// WithCLIConfigPath returns a context that carries the config file path override.
func WithCLIConfigPath(ctx context.Context, path string) context.Context {
if ctx == nil {
ctx = context.Background()
}
return context.WithValue(ctx, configPathContextKey{}, strings.TrimSpace(path))
}
func configPathFromContext(ctx context.Context) string {
if ctx == nil {
return ""
}
if v, ok := ctx.Value(configPathContextKey{}).(string); ok {
return strings.TrimSpace(v)
}
return ""
}
func selectionFromContext(ctx context.Context) []int {
if ctx == nil {
return nil
}
if v, ok := ctx.Value(selectionContextKey{}).([]int); ok {
cpy := make([]int, len(v))
copy(cpy, v)
return cpy
}
return nil
}
func filterJobsBySelection(jobs []cliJob, indices []int) ([]cliJob, error) {
if len(indices) == 0 {
return jobs, nil
}
filtered := make([]cliJob, 0, len(indices))
seen := make(map[int]struct{}, len(indices))
for _, idx := range indices {
if idx < 0 || idx >= len(jobs) {
return nil, fmt.Errorf("provider index %d out of range (0-%d)", idx, len(jobs)-1)
}
if _, ok := seen[idx]; ok {
continue
}
clone := jobs[idx]
filtered = append(filtered, clone)
seen[idx] = struct{}{}
}
for i := range filtered {
filtered[i].index = i
}
if len(filtered) == 0 {
return nil, fmt.Errorf("no CLI providers matched selection")
}
return filtered, nil
}
// readInput reads from stdin and args, then combines them per CLI rules.
func readInput(stdin io.Reader, args []string) (string, error) {
var stdinData string
if fi, err := os.Stdin.Stat(); err == nil && (fi.Mode()&os.ModeCharDevice) == 0 {
data, readErr := io.ReadAll(stdin)
if readErr != nil {
return "", fmt.Errorf("hexai: failed to read stdin: %w", readErr)
}
stdinData = strings.TrimSpace(string(data))
}
argData := strings.TrimSpace(strings.Join(args, " "))
switch {
case stdinData != "" && argData != "":
return fmt.Sprintf("%s:\n\n%s", argData, stdinData), nil
case stdinData != "":
return stdinData, nil
case argData != "":
return argData, nil
default:
return "", fmt.Errorf("hexai: no input provided; pass text as an argument or via stdin")
}
}
// newClientFromConfig builds an LLM client from the app config and env keys.
// client construction moved to internal/llmutils
// buildMessages creates system and user messages based on input content.
func buildMessages(input string) []llm.Message {
lower := strings.ToLower(input)
system := "You are Hexai CLI. Default to very short, concise answers. If the user asks for commands, output only the commands (one per line) with no commentary or explanation. Only when the word 'explain' appears in the prompt, produce a verbose explanation."
if strings.Contains(lower, "explain") {
system = "You are Hexai CLI. The user requested an explanation. Provide a clear, verbose explanation with reasoning and details. If commands are needed, include them with brief context."
}
return []llm.Message{
{Role: "system", Content: system},
{Role: "user", Content: input},
}
}
// buildMessagesFromConfig uses configured CLI system prompts.
func buildMessagesFromConfig(cfg appconfig.App, input string) []llm.Message {
lower := strings.ToLower(input)
system := cfg.PromptCLIDefaultSystem
if strings.Contains(lower, "explain") {
if strings.TrimSpace(cfg.PromptCLIExplainSystem) != "" {
system = cfg.PromptCLIExplainSystem
}
}
return []llm.Message{
{Role: "system", Content: system},
{Role: "user", Content: input},
}
}
// runChat executes the chat request, handling streaming and summary output.
func runChat(ctx context.Context, client llm.Client, req requestArgs, msgs []llm.Message, input string, out io.Writer, errw io.Writer) error {
start := time.Now()
// Best-effort tmux status update (colored start heartbeat)
model := strings.TrimSpace(req.model)
if model == "" {
model = client.DefaultModel()
}
_ = tmux.SetStatus(tmux.FormatLLMStartStatus(client.Name(), model))
var output string
if s, ok := client.(llm.Streamer); ok {
var b strings.Builder
if err := s.ChatStream(ctx, msgs, func(chunk string) {
b.WriteString(chunk)
fmt.Fprint(out, chunk)
}, req.options...); err != nil {
return err
}
output = b.String()
} else {
txt, err := client.Chat(ctx, msgs, req.options...)
if err != nil {
return err
}
output = txt
fmt.Fprint(out, output)
}
dur := time.Since(start)
// Contribute to global stats and update tmux status
sent := 0
for _, m := range msgs {
sent += len(m.Content)
}
recv := len(output)
_ = stats.Update(ctx, client.Name(), model, sent, recv)
snap, _ := stats.TakeSnapshot()
minsWin := snap.Window.Minutes()
if minsWin <= 0 {
minsWin = 0.001
}
scopeReqs := int64(0)
if pe, ok := snap.Providers[client.Name()]; ok {
if mc, ok2 := pe.Models[model]; ok2 {
scopeReqs = mc.Reqs
}
}
scopeRPM := float64(scopeReqs) / minsWin
fmt.Fprintf(errw, "\n"+logging.AnsiBase+"done provider=%s model=%s time=%s in_bytes=%d out_bytes=%d | global Σ reqs=%d rpm=%.2f"+logging.AnsiReset+"\n",
client.Name(), model, dur.Round(time.Millisecond), sent, recv, snap.Global.Reqs, snap.RPM)
_ = tmux.SetStatus(tmux.FormatGlobalStatusColored(snap.Global.Reqs, snap.RPM, snap.Global.Sent, snap.Global.Recv, client.Name(), model, scopeRPM, scopeReqs, snap.Window))
return nil
}
// printProviderInfo writes the provider/model line to stderr.
func printProviderInfo(errw io.Writer, client llm.Client, model string) {
if strings.TrimSpace(model) == "" {
model = client.DefaultModel()
}
fmt.Fprintf(errw, logging.AnsiBase+"provider=%s model=%s"+logging.AnsiReset+"\n", client.Name(), model)
}
// newClientFromConfig is kept for tests; delegates to llmutils.
var newClientFromApp = llmutils.NewClientFromApp
// Backcompat for tests referencing the older helper name.
func newClientFromConfig(cfg appconfig.App) (llm.Client, error) { return newClientFromApp(cfg) }
// Summary: Hexai LSP runner; configures logging, loads config, builds the LLM client,
// and constructs/runs the LSP server (with injectable factory for tests).
package hexailsp
import (
"io"
"log"
"os"
"strings"
"time"
"codeberg.org/snonux/hexai/internal/appconfig"
"codeberg.org/snonux/hexai/internal/llm"
"codeberg.org/snonux/hexai/internal/logging"
"codeberg.org/snonux/hexai/internal/lsp"
"codeberg.org/snonux/hexai/internal/runtimeconfig"
"codeberg.org/snonux/hexai/internal/stats"
)
// ServerRunner is the minimal interface satisfied by lsp.Server.
type ServerRunner interface{ Run() error }
// ServerFactory creates a ServerRunner. Default uses lsp.NewServer.
type ServerFactory func(r io.Reader, w io.Writer, logger *log.Logger, opts lsp.ServerOptions) ServerRunner
// Run configures logging, loads config, builds the LLM client and runs the LSP server.
// It is thin and delegates to RunWithFactory for testability.
func Run(logPath string, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
return RunWithConfig(logPath, "", stdin, stdout, stderr)
}
func RunWithConfig(logPath string, configPath string, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
logger := log.New(stderr, "hexai-lsp ", log.LstdFlags|log.Lmsgprefix)
if strings.TrimSpace(logPath) != "" {
f, err := os.OpenFile(logPath, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o644)
if err != nil {
logger.Fatalf("failed to open log file: %v", err)
}
defer f.Close()
logger.SetOutput(f)
}
logging.Bind(logger)
loadOpts := appconfig.LoadOptions{ConfigPath: configPath}
cfg := appconfig.LoadWithOptions(logger, loadOpts)
if err := cfg.Validate(); err != nil {
logger.Fatalf("invalid config: %v", err)
}
if cfg.StatsWindowMinutes > 0 {
stats.SetWindow(time.Duration(cfg.StatsWindowMinutes) * time.Minute)
}
return RunWithFactory(logPath, configPath, stdin, stdout, logger, cfg, nil, nil)
}
// RunWithFactory is the testable entrypoint. When client is nil, it is built from cfg+env.
// When factory is nil, lsp.NewServer is used.
func RunWithFactory(logPath string, configPath string, stdin io.Reader, stdout io.Writer, logger *log.Logger, cfg appconfig.App, client llm.Client, factory ServerFactory) error {
normalizeLoggingConfig(&cfg)
if err := cfg.Validate(); err != nil {
logger.Fatalf("invalid config: %v", err)
}
client = buildClientIfNil(cfg, client)
factory = ensureFactory(factory)
store := runtimeconfig.New(cfg)
logContext := strings.TrimSpace(logPath) != ""
loadOpts := appconfig.LoadOptions{ConfigPath: strings.TrimSpace(configPath)}
opts := makeServerOptions(cfg, logContext, client, loadOpts)
opts.ConfigLoadOptions = loadOpts
opts.ConfigStore = store
server := factory(stdin, stdout, logger, opts)
if configurable, ok := server.(interface{ ApplyOptions(lsp.ServerOptions) }); ok {
store.Subscribe(func(oldCfg, newCfg appconfig.App) {
updated := newCfg
normalizeLoggingConfig(&updated)
if updated.StatsWindowMinutes > 0 {
stats.SetWindow(time.Duration(updated.StatsWindowMinutes) * time.Minute)
}
if newClient := buildClientIfNil(updated, nil); newClient != nil {
client = newClient
}
opts := makeServerOptions(updated, logContext, client, loadOpts)
opts.ConfigStore = store
configurable.ApplyOptions(opts)
})
}
if err := server.Run(); err != nil {
logger.Fatalf("server error: %v", err)
}
return nil
}
// --- helpers to keep RunWithFactory small ---
func normalizeLoggingConfig(cfg *appconfig.App) {
cfg.ContextMode = strings.ToLower(strings.TrimSpace(cfg.ContextMode))
if cfg.LogPreviewLimit >= 0 {
logging.SetLogPreviewLimit(cfg.LogPreviewLimit)
}
}
func buildClientIfNil(cfg appconfig.App, client llm.Client) llm.Client {
if client != nil {
return client
}
llmCfg := llm.Config{
Provider: cfg.Provider,
OpenAIBaseURL: cfg.OpenAIBaseURL,
OpenAIModel: cfg.OpenAIModel,
OpenAITemperature: cfg.OpenAITemperature,
OpenRouterBaseURL: cfg.OpenRouterBaseURL,
OpenRouterModel: cfg.OpenRouterModel,
OpenRouterTemperature: cfg.OpenRouterTemperature,
OllamaBaseURL: cfg.OllamaBaseURL,
OllamaModel: cfg.OllamaModel,
OllamaTemperature: cfg.OllamaTemperature,
CopilotBaseURL: cfg.CopilotBaseURL,
CopilotModel: cfg.CopilotModel,
CopilotTemperature: cfg.CopilotTemperature,
}
// Prefer HEXAI_OPENAI_API_KEY; fall back to OPENAI_API_KEY
oaKey := os.Getenv("HEXAI_OPENAI_API_KEY")
if strings.TrimSpace(oaKey) == "" {
oaKey = os.Getenv("OPENAI_API_KEY")
}
// Prefer HEXAI_OPENROUTER_API_KEY; fall back to OPENROUTER_API_KEY
orKey := os.Getenv("HEXAI_OPENROUTER_API_KEY")
if strings.TrimSpace(orKey) == "" {
orKey = os.Getenv("OPENROUTER_API_KEY")
}
// Prefer HEXAI_COPILOT_API_KEY; fall back to COPILOT_API_KEY
cpKey := os.Getenv("HEXAI_COPILOT_API_KEY")
if strings.TrimSpace(cpKey) == "" {
cpKey = os.Getenv("COPILOT_API_KEY")
}
if c, err := llm.NewFromConfig(llmCfg, oaKey, orKey, cpKey); err != nil {
logging.Logf("lsp ", "llm disabled: %v", err)
return nil
} else {
logging.Logf("lsp ", "llm enabled provider=%s model=%s", c.Name(), c.DefaultModel())
return c
}
}
func ensureFactory(factory ServerFactory) ServerFactory {
if factory != nil {
return factory
}
return func(r io.Reader, w io.Writer, logger *log.Logger, opts lsp.ServerOptions) ServerRunner {
return lsp.NewServer(r, w, logger, opts)
}
}
func makeServerOptions(cfg appconfig.App, logContext bool, client llm.Client, loadOpts appconfig.LoadOptions) lsp.ServerOptions {
// Map custom actions from appconfig to lsp type
var customs []lsp.CustomAction
if len(cfg.CustomActions) > 0 {
customs = make([]lsp.CustomAction, 0, len(cfg.CustomActions))
for _, ca := range cfg.CustomActions {
customs = append(customs, lsp.CustomAction{
ID: ca.ID,
Title: ca.Title,
Kind: ca.Kind,
Scope: ca.Scope,
Instruction: ca.Instruction,
System: ca.System,
User: ca.User,
})
}
}
return lsp.ServerOptions{
ConfigLoadOptions: loadOpts,
LogContext: logContext,
ConfigStore: nil,
Config: &cfg,
MaxTokens: cfg.MaxTokens,
ContextMode: cfg.ContextMode,
WindowLines: cfg.ContextWindowLines,
MaxContextTokens: cfg.MaxContextTokens,
CodingTemperature: cfg.CodingTemperature,
Client: client,
TriggerCharacters: cfg.TriggerCharacters,
ManualInvokeMinPrefix: cfg.ManualInvokeMinPrefix,
CompletionDebounceMs: cfg.CompletionDebounceMs,
CompletionThrottleMs: cfg.CompletionThrottleMs,
InlineOpen: cfg.InlineOpen,
InlineClose: cfg.InlineClose,
ChatSuffix: cfg.ChatSuffix,
ChatPrefixes: cfg.ChatPrefixes,
// Prompts
PromptCompSysGeneral: cfg.PromptCompletionSystemGeneral,
PromptCompSysParams: cfg.PromptCompletionSystemParams,
PromptCompSysInline: cfg.PromptCompletionSystemInline,
PromptCompUserGeneral: cfg.PromptCompletionUserGeneral,
PromptCompUserParams: cfg.PromptCompletionUserParams,
PromptCompExtraHeader: cfg.PromptCompletionExtraHeader,
PromptNativeCompletion: cfg.PromptNativeCompletion,
PromptChatSystem: cfg.PromptChatSystem,
PromptRewriteSystem: cfg.PromptCodeActionRewriteSystem,
PromptDiagnosticsSystem: cfg.PromptCodeActionDiagnosticsSystem,
PromptDocumentSystem: cfg.PromptCodeActionDocumentSystem,
PromptRewriteUser: cfg.PromptCodeActionRewriteUser,
PromptDiagnosticsUser: cfg.PromptCodeActionDiagnosticsUser,
PromptDocumentUser: cfg.PromptCodeActionDocumentUser,
PromptGoTestSystem: cfg.PromptCodeActionGoTestSystem,
PromptGoTestUser: cfg.PromptCodeActionGoTestUser,
PromptSimplifySystem: cfg.PromptCodeActionSimplifySystem,
PromptSimplifyUser: cfg.PromptCodeActionSimplifyUser,
CustomActions: customs,
}
}
// Summary: GitHub Copilot client for chat and Codex-style code completion.
package llm
import (
"bytes"
"context"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"regexp"
"strings"
"time"
appver "codeberg.org/snonux/hexai/internal"
"codeberg.org/snonux/hexai/internal/logging"
)
// copilotClient implements Client against GitHub Copilot's Chat Completions API.
type copilotClient struct {
httpClient *http.Client
apiKey string
baseURL string
defaultModel string
chatLogger logging.ChatLogger
defaultTemperature *float64
// cached Copilot session token retrieved from GitHub API using apiKey
sessionToken string
tokenExpiry time.Time
}
type copilotChatRequest struct {
Model string `json:"model"`
Messages []copilotMessage `json:"messages"`
Temperature *float64 `json:"temperature,omitempty"`
MaxTokens *int `json:"max_tokens,omitempty"`
Stop []string `json:"stop,omitempty"`
}
type copilotMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
type copilotChatResponse struct {
Choices []struct {
Index int `json:"index"`
Message struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"message"`
FinishReason string `json:"finish_reason"`
} `json:"choices"`
Error *struct {
Message string `json:"message"`
Type string `json:"type"`
Param any `json:"param"`
Code any `json:"code"`
} `json:"error,omitempty"`
}
// Constructor (kept among the first functions by convention)
func newCopilot(baseURL, model, apiKey string, defaultTemp *float64) Client {
if strings.TrimSpace(baseURL) == "" {
baseURL = "https://api.githubcopilot.com"
}
if strings.TrimSpace(model) == "" {
// GitHub Models (Copilot API) commonly supports gpt-4o/gpt-4o-mini.
// Default to a broadly available, cost-effective option.
model = "gpt-4o-mini"
}
return copilotClient{
httpClient: &http.Client{Timeout: 30 * time.Second},
apiKey: apiKey,
baseURL: strings.TrimRight(baseURL, "/"),
defaultModel: model,
chatLogger: logging.NewChatLogger("copilot"),
defaultTemperature: defaultTemp,
}
}
func (c copilotClient) Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error) {
if strings.TrimSpace(c.apiKey) == "" {
return nilStringErr("missing Copilot API key")
}
// Ensure we have a fresh session token
if err := c.ensureSession(ctx); err != nil {
return "", err
}
o := Options{Model: c.defaultModel}
for _, opt := range opts {
opt(&o)
}
if o.Model == "" {
o.Model = c.defaultModel
}
start := time.Now()
logMessages := make([]struct{ Role, Content string }, len(messages))
for i, m := range messages {
logMessages[i] = struct{ Role, Content string }{m.Role, m.Content}
}
c.chatLogger.LogStart(false, o.Model, o.Temperature, o.MaxTokens, o.Stop, logMessages)
req := buildCopilotChatRequest(o, messages, c.defaultTemperature)
body, err := json.Marshal(req)
if err != nil {
logging.Logf("llm/copilot ", "marshal error: %v", err)
return "", err
}
endpoint := c.baseURL + "/chat/completions"
logging.Logf("llm/copilot ", "POST %s", endpoint)
resp, err := c.postJSON(ctx, endpoint, body, c.headersChat())
if err != nil {
logging.Logf("llm/copilot ", "%shttp error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return "", err
}
defer resp.Body.Close()
if err := handleCopilotNon2xx(resp, start); err != nil {
return "", err
}
out, err := decodeCopilotChat(resp, start)
if err != nil {
return "", err
}
if len(out.Choices) == 0 {
logging.Logf("llm/copilot ", "%sno choices returned duration=%s%s", logging.AnsiRed, time.Since(start), logging.AnsiBase)
return "", errors.New("copilot: no choices returned")
}
content := out.Choices[0].Message.Content
logging.Logf("llm/copilot ", "success choice=0 finish=%s size=%d preview=%s%s%s duration=%s", out.Choices[0].FinishReason, len(content), logging.AnsiGreen, logging.PreviewForLog(content), logging.AnsiBase, time.Since(start))
return content, nil
}
// Provider metadata
func (c copilotClient) Name() string { return "copilot" }
func (c copilotClient) DefaultModel() string { return c.defaultModel }
// helpers
func buildCopilotChatRequest(o Options, messages []Message, defaultTemp *float64) copilotChatRequest {
req := copilotChatRequest{Model: o.Model}
req.Messages = make([]copilotMessage, len(messages))
for i, m := range messages {
req.Messages[i] = copilotMessage{Role: m.Role, Content: m.Content}
}
if o.Temperature != 0 {
req.Temperature = &o.Temperature
} else if defaultTemp != nil {
t := *defaultTemp
req.Temperature = &t
}
if o.MaxTokens > 0 {
req.MaxTokens = &o.MaxTokens
}
if len(o.Stop) > 0 {
req.Stop = o.Stop
}
return req
}
func (c copilotClient) postJSON(ctx context.Context, url string, body []byte, headers map[string]string) (*http.Response, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body))
if err != nil {
return nil, err
}
for k, v := range headers {
req.Header.Set(k, v)
}
return c.httpClient.Do(req)
}
func handleCopilotNon2xx(resp *http.Response, start time.Time) error {
if resp.StatusCode >= 200 && resp.StatusCode < 300 {
return nil
}
var apiErr copilotChatResponse
_ = json.NewDecoder(resp.Body).Decode(&apiErr)
if apiErr.Error != nil && strings.TrimSpace(apiErr.Error.Message) != "" {
logging.Logf("llm/copilot ", "%sapi error status=%d type=%s msg=%s duration=%s%s", logging.AnsiRed, resp.StatusCode, apiErr.Error.Type, apiErr.Error.Message, time.Since(start), logging.AnsiBase)
return fmt.Errorf("copilot error: %s (status %d)", apiErr.Error.Message, resp.StatusCode)
}
logging.Logf("llm/copilot ", "%shttp non-2xx status=%d duration=%s%s", logging.AnsiRed, resp.StatusCode, time.Since(start), logging.AnsiBase)
return fmt.Errorf("copilot http error: status %d", resp.StatusCode)
}
func decodeCopilotChat(resp *http.Response, start time.Time) (copilotChatResponse, error) {
var out copilotChatResponse
if err := json.NewDecoder(resp.Body).Decode(&out); err != nil {
logging.Logf("llm/copilot ", "%sdecode error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return copilotChatResponse{}, err
}
return out, nil
}
// --- Copilot session token management ---
type ghCopilotTokenResp struct {
Token string `json:"token"`
}
func (c *copilotClient) ensureSession(ctx context.Context) error {
// If token valid for >60s, reuse
if c.sessionToken != "" && time.Now().Add(60*time.Second).Before(c.tokenExpiry) {
return nil
}
if strings.TrimSpace(c.apiKey) == "" {
return errors.New("missing Copilot API key")
}
req, err := http.NewRequestWithContext(ctx, http.MethodGet, "https://api.github.com/copilot_internal/v2/token", nil)
if err != nil {
return err
}
req.Header.Set("Authorization", "Bearer "+c.apiKey)
req.Header.Set("Accept", "application/json")
req.Header.Set("User-Agent", "hexai/"+appver.Version)
resp, err := c.httpClient.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
return fmt.Errorf("copilot token http error: %d", resp.StatusCode)
}
var out ghCopilotTokenResp
if err := json.NewDecoder(resp.Body).Decode(&out); err != nil {
return err
}
if strings.TrimSpace(out.Token) == "" {
return errors.New("empty copilot session token")
}
// Parse JWT exp
exp := parseJWTExp(out.Token)
if exp.IsZero() {
exp = time.Now().Add(10 * time.Minute)
}
c.sessionToken = out.Token
c.tokenExpiry = exp
return nil
}
var jwtExpRe = regexp.MustCompile(`"exp"\s*:\s*([0-9]+)`) // fallback if we can't base64 decode
func parseJWTExp(token string) time.Time {
parts := strings.Split(token, ".")
if len(parts) < 2 {
return time.Time{}
}
b, err := base64.RawURLEncoding.DecodeString(parts[1])
if err != nil {
if m := jwtExpRe.FindStringSubmatch(token); len(m) == 2 {
if n, err2 := parseInt64(m[1]); err2 == nil {
return time.Unix(n, 0)
}
}
return time.Time{}
}
var payload struct {
Exp int64 `json:"exp"`
}
_ = json.Unmarshal(b, &payload)
if payload.Exp == 0 {
return time.Time{}
}
return time.Unix(payload.Exp, 0)
}
func parseInt64(s string) (int64, error) { var n int64; _, err := fmt.Sscan(s, &n); return n, err }
// --- Copilot headers ---
func (c *copilotClient) headersChat() map[string]string {
_ = c.ensureSession(context.Background())
h := map[string]string{
"Content-Type": "application/json; charset=utf-8",
"Accept": "application/json",
"Authorization": "Bearer " + c.sessionToken,
"User-Agent": "GitHubCopilotChat/0.8.0",
"Editor-Plugin-Version": "copilot-chat/0.8.0",
"Editor-Version": "vscode/1.85.1",
"Openai-Intent": "conversation-panel",
"Openai-Organization": "github-copilot",
"VScode-MachineId": randHex(64),
"VScode-SessionId": randHex(8) + "-" + randHex(4) + "-" + randHex(4) + "-" + randHex(4) + "-" + randHex(12),
"X-Request-Id": randHex(8) + "-" + randHex(4) + "-" + randHex(4) + "-" + randHex(4) + "-" + randHex(12),
}
return h
}
func (c *copilotClient) headersGhost() map[string]string {
_ = c.ensureSession(context.Background())
h := map[string]string{
"Content-Type": "application/json; charset=utf-8",
"Accept": "*/*",
"Authorization": "Bearer " + c.sessionToken,
"User-Agent": "GithubCopilot/1.155.0",
"Editor-Plugin-Version": "copilot/1.155.0",
"Editor-Version": "vscode/1.85.1",
"Openai-Intent": "copilot-ghost",
"Openai-Organization": "github-copilot",
"VScode-MachineId": randHex(64),
"VScode-SessionId": randHex(8) + "-" + randHex(4) + "-" + randHex(4) + "-" + randHex(4) + "-" + randHex(12),
"X-Request-Id": randHex(8) + "-" + randHex(4) + "-" + randHex(4) + "-" + randHex(4) + "-" + randHex(12),
}
return h
}
func randHex(n int) string {
const hex = "0123456789abcdef"
b := make([]byte, n)
for i := range b {
b[i] = hex[int(time.Now().UnixNano()+int64(i))%len(hex)]
}
return string(b)
}
// --- Codex-style code completion ---
// CodeCompletion implements CodeCompleter; returns up to n suggestions.
func (c copilotClient) CodeCompletion(ctx context.Context, prompt string, suffix string, n int, language string, temperature float64) ([]string, error) {
if strings.TrimSpace(c.apiKey) == "" {
return nil, errors.New("missing Copilot API key")
}
if err := c.ensureSession(ctx); err != nil {
return nil, err
}
if n <= 0 {
n = 1
}
maxTokens := 500
body := map[string]any{
"extra": map[string]any{
"language": language,
"next_indent": 0,
"prompt_tokens": 500,
"suffix_tokens": 400,
"trim_by_indentation": true,
},
"max_tokens": maxTokens,
"n": n,
"nwo": "hexai",
"prompt": prompt,
"stop": []string{"\n\n"},
"stream": true,
"suffix": suffix,
"temperature": temperature,
"top_p": 1,
}
buf, _ := json.Marshal(body)
url := "https://copilot-proxy.githubusercontent.com/v1/engines/copilot-codex/completions"
resp, err := c.postJSON(ctx, url, buf, c.headersGhost())
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
return nil, fmt.Errorf("copilot codex http error: %d", resp.StatusCode)
}
// Read all and parse lines that start with "data: " accumulating by index
raw, _ := io.ReadAll(resp.Body)
byIndex := make(map[int]string)
lines := strings.Split(string(raw), "\n")
for _, ln := range lines {
if !strings.HasPrefix(ln, "data: ") {
continue
}
var evt struct {
Choices []struct {
Index int `json:"index"`
Text string `json:"text"`
} `json:"choices"`
}
if err := json.Unmarshal([]byte(strings.TrimPrefix(ln, "data: ")), &evt); err != nil {
continue
}
for _, ch := range evt.Choices {
byIndex[ch.Index] += ch.Text
}
}
out := make([]string, 0, len(byIndex))
for i := 0; i < n; i++ {
if s, ok := byIndex[i]; ok && strings.TrimSpace(s) != "" {
out = append(out, s)
}
}
return out, nil
}
// newLineDataReader wraps a streaming body and exposes a JSON decoder that
// decodes successive objects from lines prefixed by "data: ".
// (no streaming decoder needed; we parse whole body lines)
// Summary: Ollama client against a local server; supports chat responses and streaming via /api/chat.
package llm
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"strings"
"time"
"codeberg.org/snonux/hexai/internal/logging"
)
// ollamaClient implements Client against a local Ollama server.
type ollamaClient struct {
httpClient *http.Client
baseURL string
defaultModel string
chatLogger logging.ChatLogger
defaultTemperature *float64
}
type ollamaChatRequest struct {
Model string `json:"model"`
Messages []oaMessage `json:"messages"`
Stream bool `json:"stream"`
Options any `json:"options,omitempty"`
}
type ollamaChatResponse struct {
Message struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"message"`
Done bool `json:"done"`
Error string `json:"error,omitempty"`
}
// Constructor (kept among the first functions by convention)
func newOllama(baseURL, model string, defaultTemp *float64) Client {
if strings.TrimSpace(baseURL) == "" {
baseURL = "http://localhost:11434"
}
if strings.TrimSpace(model) == "" {
model = "qwen3-coder:30b-a3b-q4_K_M"
}
return ollamaClient{
httpClient: &http.Client{Timeout: 30 * time.Second},
baseURL: strings.TrimRight(baseURL, "/"),
defaultModel: model,
chatLogger: logging.NewChatLogger("ollama"),
defaultTemperature: defaultTemp,
}
}
func (c ollamaClient) Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error) {
o := Options{Model: c.defaultModel}
for _, opt := range opts {
opt(&o)
}
if o.Model == "" {
o.Model = c.defaultModel
}
start := time.Now()
c.logStart(false, o, messages)
req := buildOllamaRequest(o, messages, c.defaultTemperature, false)
body, err := json.Marshal(req)
if err != nil {
return "", err
}
endpoint := c.baseURL + "/api/chat"
logging.Logf("llm/ollama ", "POST %s", endpoint)
resp, err := c.doJSON(ctx, endpoint, body)
if err != nil {
logging.Logf("llm/ollama ", "%shttp error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return "", err
}
defer resp.Body.Close()
if err := handleOllamaNon2xx(resp, start); err != nil {
return "", err
}
var out ollamaChatResponse
if err := json.NewDecoder(resp.Body).Decode(&out); err != nil {
logging.Logf("llm/ollama ", "%sdecode error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return "", err
}
if strings.TrimSpace(out.Message.Content) == "" {
logging.Logf("llm/ollama ", "%sempty content returned duration=%s%s", logging.AnsiRed, time.Since(start), logging.AnsiBase)
return "", errors.New("ollama: empty content")
}
content := out.Message.Content
logging.Logf("llm/ollama ", "success size=%d preview=%s%s%s duration=%s", len(content), logging.AnsiGreen, logging.PreviewForLog(content), logging.AnsiBase, time.Since(start))
return content, nil
}
// Provider metadata
func (c ollamaClient) Name() string { return "ollama" }
func (c ollamaClient) DefaultModel() string { return c.defaultModel }
// Streaming support (optional)
func (c ollamaClient) ChatStream(ctx context.Context, messages []Message, onDelta func(string), opts ...RequestOption) error {
o := Options{Model: c.defaultModel}
for _, opt := range opts {
opt(&o)
}
if o.Model == "" {
o.Model = c.defaultModel
}
start := time.Now()
c.logStart(true, o, messages)
req := buildOllamaRequest(o, messages, c.defaultTemperature, true)
body, err := json.Marshal(req)
if err != nil {
return err
}
endpoint := c.baseURL + "/api/chat"
logging.Logf("llm/ollama ", "POST %s (stream)", endpoint)
resp, err := c.doJSON(ctx, endpoint, body)
if err != nil {
logging.Logf("llm/ollama ", "%shttp error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return err
}
defer resp.Body.Close()
if err := handleOllamaNon2xx(resp, start); err != nil {
return err
}
dec := json.NewDecoder(resp.Body)
for {
var ev ollamaChatResponse
if err := dec.Decode(&ev); err != nil {
if errors.Is(err, io.EOF) {
break
}
logging.Logf("llm/ollama ", "%sdecode stream error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return err
}
if strings.TrimSpace(ev.Error) != "" {
logging.Logf("llm/ollama ", "%sstream event error: %s%s", logging.AnsiRed, ev.Error, logging.AnsiBase)
return fmt.Errorf("ollama stream error: %s", ev.Error)
}
if s := ev.Message.Content; strings.TrimSpace(s) != "" {
onDelta(s)
}
if ev.Done {
break
}
}
logging.Logf("llm/ollama ", "stream end duration=%s", time.Since(start))
return nil
}
// helpers to keep methods small
func (c ollamaClient) logStart(stream bool, o Options, messages []Message) {
logMessages := make([]struct{ Role, Content string }, len(messages))
for i, m := range messages {
logMessages[i] = struct{ Role, Content string }{m.Role, m.Content}
}
c.chatLogger.LogStart(stream, o.Model, o.Temperature, o.MaxTokens, o.Stop, logMessages)
}
func buildOllamaRequest(o Options, messages []Message, defaultTemp *float64, stream bool) ollamaChatRequest {
req := ollamaChatRequest{Model: o.Model, Stream: stream}
req.Messages = make([]oaMessage, len(messages))
for i, m := range messages {
req.Messages[i] = oaMessage{Role: m.Role, Content: m.Content}
}
optsMap := map[string]any{}
if o.Temperature != 0 {
optsMap["temperature"] = o.Temperature
} else if defaultTemp != nil {
optsMap["temperature"] = *defaultTemp
}
if o.MaxTokens > 0 {
optsMap["num_predict"] = o.MaxTokens
}
if len(o.Stop) > 0 {
optsMap["stop"] = o.Stop
}
if len(optsMap) > 0 {
req.Options = optsMap
}
return req
}
func (c ollamaClient) doJSON(ctx context.Context, url string, body []byte) (*http.Response, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
return c.httpClient.Do(req)
}
func handleOllamaNon2xx(resp *http.Response, start time.Time) error {
if resp.StatusCode >= 200 && resp.StatusCode < 300 {
return nil
}
var apiErr ollamaChatResponse
_ = json.NewDecoder(resp.Body).Decode(&apiErr)
if strings.TrimSpace(apiErr.Error) != "" {
logging.Logf("llm/ollama ", "%sapi error status=%d msg=%s duration=%s%s", logging.AnsiRed, resp.StatusCode, apiErr.Error, time.Since(start), logging.AnsiBase)
return fmt.Errorf("ollama error: %s (status %d)", apiErr.Error, resp.StatusCode)
}
logging.Logf("llm/ollama ", "%shttp non-2xx status=%d duration=%s%s", logging.AnsiRed, resp.StatusCode, time.Since(start), logging.AnsiBase)
return fmt.Errorf("ollama http error: status %d", resp.StatusCode)
}
// Summary: OpenAI client implementation for chat completions with optional streaming and detailed logging.
package llm
import (
"bufio"
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"net/http"
"strings"
"time"
"codeberg.org/snonux/hexai/internal/logging"
)
// openAIClient implements Client against OpenAI's Chat Completions API.
type openAIClient struct {
httpClient *http.Client
apiKey string
baseURL string
defaultModel string
chatLogger logging.ChatLogger
defaultTemperature *float64
}
type oaChatRequest struct {
Model string `json:"model"`
Messages []oaMessage `json:"messages"`
Temperature *float64 `json:"temperature,omitempty"`
MaxTokens *int `json:"max_tokens,omitempty"`
MaxCompletionTokens *int `json:"max_completion_tokens,omitempty"`
Stop []string `json:"stop,omitempty"`
Stream bool `json:"stream,omitempty"`
}
type oaMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
type oaChatResponse struct {
Choices []struct {
Index int `json:"index"`
Message struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"message"`
FinishReason string `json:"finish_reason"`
} `json:"choices"`
Error *struct {
Message string `json:"message"`
Type string `json:"type"`
Param any `json:"param"`
Code any `json:"code"`
} `json:"error,omitempty"`
}
// Streaming response chunk type (SSE)
type oaStreamChunk struct {
Choices []struct {
Delta struct {
Content string `json:"content"`
} `json:"delta"`
FinishReason string `json:"finish_reason"`
} `json:"choices"`
Error *struct {
Message string `json:"message"`
Type string `json:"type"`
Param any `json:"param"`
Code any `json:"code"`
} `json:"error,omitempty"`
}
// Constructor (kept among the first functions by convention)
// newOpenAI constructs an OpenAI client using explicit configuration values.
// The apiKey may be empty; calls will fail until a valid key is supplied.
func newOpenAI(baseURL, model, apiKey string, defaultTemp *float64) Client {
if strings.TrimSpace(baseURL) == "" {
baseURL = "https://api.openai.com/v1"
}
if strings.TrimSpace(model) == "" {
model = "gpt-4.1"
}
return openAIClient{
httpClient: &http.Client{Timeout: 30 * time.Second},
apiKey: apiKey,
baseURL: baseURL,
defaultModel: model,
chatLogger: logging.NewChatLogger("openai"),
defaultTemperature: defaultTemp,
}
}
func (c openAIClient) Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error) {
if c.apiKey == "" {
return nilStringErr("missing OpenAI API key")
}
o := Options{Model: c.defaultModel}
for _, opt := range opts {
opt(&o)
}
if o.Model == "" {
o.Model = c.defaultModel
}
start := time.Now()
c.logStart(false, o, messages)
req := buildOAChatRequest(o, messages, c.defaultTemperature, false, "llm/openai ")
body, err := json.Marshal(req)
if err != nil {
c.logf("marshal error: %v", err)
return "", err
}
endpoint := c.baseURL + "/chat/completions"
logging.Logf("llm/openai ", "POST %s", endpoint)
resp, err := c.doJSON(ctx, endpoint, body, map[string]string{
"Authorization": "Bearer " + c.apiKey,
})
if err != nil {
logging.Logf("llm/openai ", "%shttp error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return "", err
}
defer resp.Body.Close()
if err := handleOpenAINon2xx(resp, start, "llm/openai ", "openai"); err != nil {
return "", err
}
out, err := decodeOpenAIChat(resp, start, "llm/openai ")
if err != nil {
return "", err
}
if len(out.Choices) == 0 {
logging.Logf("llm/openai ", "%sno choices returned duration=%s%s", logging.AnsiRed, time.Since(start), logging.AnsiBase)
return "", errors.New("openai: no choices returned")
}
content := out.Choices[0].Message.Content
logging.Logf("llm/openai ", "success choice=0 finish=%s size=%d preview=%s%s%s duration=%s", out.Choices[0].FinishReason, len(content), logging.AnsiGreen, logging.PreviewForLog(content), logging.AnsiBase, time.Since(start))
return content, nil
}
// Provider metadata
func (c openAIClient) Name() string { return "openai" }
func (c openAIClient) DefaultModel() string { return c.defaultModel }
// Streaming support (optional)
func (c openAIClient) ChatStream(ctx context.Context, messages []Message, onDelta func(string), opts ...RequestOption) error {
if c.apiKey == "" {
return errors.New("missing OpenAI API key")
}
o := Options{Model: c.defaultModel}
for _, opt := range opts {
opt(&o)
}
if o.Model == "" {
o.Model = c.defaultModel
}
start := time.Now()
c.logStart(true, o, messages)
req := buildOAChatRequest(o, messages, c.defaultTemperature, true, "llm/openai ")
body, err := json.Marshal(req)
if err != nil {
c.logf("marshal error: %v", err)
return err
}
endpoint := c.baseURL + "/chat/completions"
logging.Logf("llm/openai ", "POST %s (stream)", endpoint)
resp, err := c.doJSONWithAccept(ctx, endpoint, body, map[string]string{
"Authorization": "Bearer " + c.apiKey,
}, "text/event-stream")
if err != nil {
logging.Logf("llm/openai ", "%shttp error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return err
}
defer resp.Body.Close()
if err := handleOpenAINon2xx(resp, start, "llm/openai ", "openai"); err != nil {
return err
}
if err := parseOpenAIStream(resp, start, onDelta, "llm/openai ", "openai"); err != nil {
return err
}
logging.Logf("llm/openai ", "stream end duration=%s", time.Since(start))
return nil
}
// Private helpers
func (c openAIClient) logf(format string, args ...any) { logging.Logf("llm/openai ", format, args...) }
// helpers extracted to keep methods small
func (c openAIClient) logStart(stream bool, o Options, messages []Message) {
logMessages := make([]struct{ Role, Content string }, len(messages))
for i, m := range messages {
logMessages[i] = struct{ Role, Content string }{m.Role, m.Content}
}
c.chatLogger.LogStart(stream, o.Model, o.Temperature, o.MaxTokens, o.Stop, logMessages)
}
func buildOAChatRequest(o Options, messages []Message, defaultTemp *float64, stream bool, logPrefix string) oaChatRequest {
req := oaChatRequest{Model: o.Model, Stream: stream}
req.Messages = make([]oaMessage, len(messages))
for i, m := range messages {
req.Messages[i] = oaMessage{Role: m.Role, Content: m.Content}
}
if o.Temperature != 0 {
req.Temperature = &o.Temperature
} else if defaultTemp != nil {
t := *defaultTemp
req.Temperature = &t
}
if o.MaxTokens > 0 {
if requiresMaxCompletionTokens(o.Model) {
req.MaxCompletionTokens = &o.MaxTokens
} else {
req.MaxTokens = &o.MaxTokens
}
}
if len(o.Stop) > 0 {
req.Stop = o.Stop
}
// Enforce gpt-5 temperature constraints: only default (1.0) is supported.
if requiresMaxCompletionTokens(o.Model) {
if req.Temperature == nil || *req.Temperature != 1.0 {
t := 1.0
req.Temperature = &t
logging.Logf(logPrefix, "forcing temperature=1.0 for model=%s (gpt-5 constraint)", o.Model)
}
}
return req
}
// requiresMaxCompletionTokens reports whether the given model prefers the
// new parameter name "max_completion_tokens" instead of "max_tokens". Newer
// models (e.g., gpt-5 family) expect this per OpenAI's API error guidance.
func requiresMaxCompletionTokens(model string) bool {
m := strings.ToLower(strings.TrimSpace(model))
return strings.HasPrefix(m, "gpt-5")
}
func (c openAIClient) doJSON(ctx context.Context, url string, body []byte, headers map[string]string) (*http.Response, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
for k, v := range headers {
req.Header.Set(k, v)
}
return c.httpClient.Do(req)
}
func (c openAIClient) doJSONWithAccept(ctx context.Context, url string, body []byte, headers map[string]string, accept string) (*http.Response, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", accept)
for k, v := range headers {
req.Header.Set(k, v)
}
return c.httpClient.Do(req)
}
func handleOpenAINon2xx(resp *http.Response, start time.Time, logPrefix, provider string) error {
if resp.StatusCode >= 200 && resp.StatusCode < 300 {
return nil
}
var apiErr oaChatResponse
_ = json.NewDecoder(resp.Body).Decode(&apiErr)
if apiErr.Error != nil && apiErr.Error.Message != "" {
logging.Logf(logPrefix, "%sapi error status=%d type=%s msg=%s duration=%s%s", logging.AnsiRed, resp.StatusCode, apiErr.Error.Type, apiErr.Error.Message, time.Since(start), logging.AnsiBase)
return fmt.Errorf("%s error: %s (status %d)", provider, apiErr.Error.Message, resp.StatusCode)
}
logging.Logf(logPrefix, "%shttp non-2xx status=%d duration=%s%s", logging.AnsiRed, resp.StatusCode, time.Since(start), logging.AnsiBase)
return fmt.Errorf("%s http error: status %d", provider, resp.StatusCode)
}
func decodeOpenAIChat(resp *http.Response, start time.Time, logPrefix string) (oaChatResponse, error) {
var out oaChatResponse
if err := json.NewDecoder(resp.Body).Decode(&out); err != nil {
logging.Logf(logPrefix, "%sdecode error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return oaChatResponse{}, err
}
return out, nil
}
func parseOpenAIStream(resp *http.Response, start time.Time, onDelta func(string), logPrefix, provider string) error {
// Parse SSE: lines starting with "data: " containing JSON or [DONE]
scanner := bufio.NewScanner(resp.Body)
const maxBuf = 1024 * 1024
buf := make([]byte, 0, 64*1024)
scanner.Buffer(buf, maxBuf)
for scanner.Scan() {
line := scanner.Text()
if !strings.HasPrefix(line, "data: ") {
continue
}
payload := strings.TrimPrefix(line, "data: ")
if strings.TrimSpace(payload) == "[DONE]" {
break
}
var chunk oaStreamChunk
if err := json.Unmarshal([]byte(payload), &chunk); err != nil {
continue
}
if chunk.Error != nil && chunk.Error.Message != "" {
logging.Logf(logPrefix, "%sstream error: %s%s", logging.AnsiRed, chunk.Error.Message, logging.AnsiBase)
return fmt.Errorf("%s stream error: %s", provider, chunk.Error.Message)
}
for _, ch := range chunk.Choices {
if ch.Delta.Content != "" {
onDelta(ch.Delta.Content)
}
}
}
if err := scanner.Err(); err != nil {
logging.Logf(logPrefix, "%sstream read error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return err
}
return nil
}
// Summary: OpenRouter client implementation leveraging OpenAI-compatible helpers with provider-specific headers.
package llm
import (
"bytes"
"context"
"encoding/json"
"errors"
"net/http"
"strings"
"time"
"codeberg.org/snonux/hexai/internal/logging"
)
type openRouterClient struct {
httpClient *http.Client
apiKey string
baseURL string
defaultModel string
chatLogger logging.ChatLogger
defaultTemperature *float64
}
func newOpenRouter(baseURL, model, apiKey string, defaultTemp *float64) Client {
if strings.TrimSpace(baseURL) == "" {
baseURL = "https://openrouter.ai/api/v1"
}
if strings.TrimSpace(model) == "" {
model = "openrouter/auto"
}
return openRouterClient{
httpClient: &http.Client{Timeout: 30 * time.Second},
apiKey: apiKey,
baseURL: baseURL,
defaultModel: model,
chatLogger: logging.NewChatLogger("openrouter"),
defaultTemperature: defaultTemp,
}
}
func (c openRouterClient) Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error) {
if strings.TrimSpace(c.apiKey) == "" {
return nilStringErr("missing OpenRouter API key")
}
o := Options{Model: c.defaultModel}
for _, opt := range opts {
opt(&o)
}
if strings.TrimSpace(o.Model) == "" {
o.Model = c.defaultModel
}
start := time.Now()
c.logStart(false, o, messages)
req := buildOAChatRequest(o, messages, c.defaultTemperature, false, "llm/openrouter ")
body, err := json.Marshal(req)
if err != nil {
c.logf("marshal error: %v", err)
return "", err
}
endpoint := strings.TrimRight(c.baseURL, "/") + "/chat/completions"
logging.Logf("llm/openrouter ", "POST %s", endpoint)
resp, err := c.doJSON(ctx, endpoint, body)
if err != nil {
logging.Logf("llm/openrouter ", "%shttp error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return "", err
}
defer resp.Body.Close()
if err := handleOpenAINon2xx(resp, start, "llm/openrouter ", "openrouter"); err != nil {
return "", err
}
out, err := decodeOpenAIChat(resp, start, "llm/openrouter ")
if err != nil {
return "", err
}
if len(out.Choices) == 0 {
logging.Logf("llm/openrouter ", "%sno choices returned duration=%s%s", logging.AnsiRed, time.Since(start), logging.AnsiBase)
return "", errors.New("openrouter: no choices returned")
}
content := out.Choices[0].Message.Content
logging.Logf("llm/openrouter ", "success choice=0 finish=%s size=%d preview=%s%s%s duration=%s", out.Choices[0].FinishReason, len(content), logging.AnsiGreen, logging.PreviewForLog(content), logging.AnsiBase, time.Since(start))
return content, nil
}
func (c openRouterClient) Name() string { return "openrouter" }
func (c openRouterClient) DefaultModel() string { return c.defaultModel }
func (c openRouterClient) ChatStream(ctx context.Context, messages []Message, onDelta func(string), opts ...RequestOption) error {
if strings.TrimSpace(c.apiKey) == "" {
return errors.New("missing OpenRouter API key")
}
o := Options{Model: c.defaultModel}
for _, opt := range opts {
opt(&o)
}
if strings.TrimSpace(o.Model) == "" {
o.Model = c.defaultModel
}
start := time.Now()
c.logStart(true, o, messages)
req := buildOAChatRequest(o, messages, c.defaultTemperature, true, "llm/openrouter ")
body, err := json.Marshal(req)
if err != nil {
c.logf("marshal error: %v", err)
return err
}
endpoint := strings.TrimRight(c.baseURL, "/") + "/chat/completions"
logging.Logf("llm/openrouter ", "POST %s (stream)", endpoint)
resp, err := c.doJSONWithAccept(ctx, endpoint, body, "text/event-stream")
if err != nil {
logging.Logf("llm/openrouter ", "%shttp error after %s: %v%s", logging.AnsiRed, time.Since(start), err, logging.AnsiBase)
return err
}
defer resp.Body.Close()
if err := handleOpenAINon2xx(resp, start, "llm/openrouter ", "openrouter"); err != nil {
return err
}
if err := parseOpenAIStream(resp, start, onDelta, "llm/openrouter ", "openrouter"); err != nil {
return err
}
logging.Logf("llm/openrouter ", "stream end duration=%s", time.Since(start))
return nil
}
func (c openRouterClient) logf(format string, args ...any) {
logging.Logf("llm/openrouter ", format, args...)
}
func (c openRouterClient) logStart(stream bool, o Options, messages []Message) {
logMessages := make([]struct{ Role, Content string }, len(messages))
for i, m := range messages {
logMessages[i] = struct{ Role, Content string }{m.Role, m.Content}
}
c.chatLogger.LogStart(stream, o.Model, o.Temperature, o.MaxTokens, o.Stop, logMessages)
}
func (c openRouterClient) doJSON(ctx context.Context, url string, body []byte) (*http.Response, error) {
headers := map[string]string{
"Authorization": "Bearer " + c.apiKey,
"HTTP-Referer": "https://github.com/snonux/hexai",
"X-Title": "Hexai",
}
return c.doJSONWithHeaders(ctx, url, body, headers, "")
}
func (c openRouterClient) doJSONWithAccept(ctx context.Context, url string, body []byte, accept string) (*http.Response, error) {
headers := map[string]string{
"Authorization": "Bearer " + c.apiKey,
"HTTP-Referer": "https://github.com/snonux/hexai",
"X-Title": "Hexai",
}
return c.doJSONWithHeaders(ctx, url, body, headers, accept)
}
func (c openRouterClient) doJSONWithHeaders(ctx context.Context, url string, body []byte, headers map[string]string, accept string) (*http.Response, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
if strings.TrimSpace(accept) != "" {
req.Header.Set("Accept", accept)
}
for k, v := range headers {
req.Header.Set(k, v)
}
return c.httpClient.Do(req)
}
// Summary: LLM provider interfaces, request options, configuration, and factory to build a client from config.
package llm
import (
"context"
"errors"
"strings"
)
// Message represents a chat-style prompt message.
type Message struct {
Role string
Content string
}
// Client is a minimal LLM provider interface.
// Future providers (Ollama, etc.) should implement this.
type Client interface {
// Chat sends chat messages and returns the assistant text.
Chat(ctx context.Context, messages []Message, opts ...RequestOption) (string, error)
// Name returns the provider's short name (e.g., "openai", "ollama").
Name() string
// DefaultModel returns the configured default model name.
DefaultModel() string
}
// Streamer is an optional interface that providers may implement to support
// token-by-token streaming responses. Callers can type-assert to Streamer and
// fall back to Client.Chat when not implemented.
type Streamer interface {
// ChatStream sends chat messages and invokes onDelta with incremental text
// chunks as they are produced by the model. Implementations should call
// onDelta with empty strings sparingly (prefer only non-empty chunks).
ChatStream(ctx context.Context, messages []Message, onDelta func(string), opts ...RequestOption) error
}
// CodeCompleter is an optional interface for providers that support a
// prompt/suffix code-completion API (e.g., Copilot Codex endpoint). Clients
// can type-assert to this and prefer it over chat when available.
type CodeCompleter interface {
// CodeCompletion requests up to n suggestions given a left-hand prompt and
// right-hand suffix around the cursor. Language is advisory and may be
// ignored. Temperature applies when provider supports it.
CodeCompletion(ctx context.Context, prompt string, suffix string, n int, language string, temperature float64) ([]string, error)
}
// Options for a request. Providers may ignore unsupported fields.
type Options struct {
Model string
Temperature float64
MaxTokens int
Stop []string
}
// RequestOption mutates Options.
type RequestOption func(*Options)
func WithModel(model string) RequestOption { return func(o *Options) { o.Model = model } }
func WithTemperature(t float64) RequestOption { return func(o *Options) { o.Temperature = t } }
func WithMaxTokens(n int) RequestOption { return func(o *Options) { o.MaxTokens = n } }
func WithStop(stop ...string) RequestOption {
return func(o *Options) { o.Stop = append([]string{}, stop...) }
}
// Config defines provider configuration read from the Hexai config file.
type Config struct {
Provider string
// OpenAI options
OpenAIBaseURL string
OpenAIModel string
OpenAITemperature *float64
// OpenRouter options
OpenRouterBaseURL string
OpenRouterModel string
OpenRouterTemperature *float64
// Ollama options
OllamaBaseURL string
OllamaModel string
OllamaTemperature *float64
// Copilot options
CopilotBaseURL string
CopilotModel string
CopilotTemperature *float64
}
// NewFromConfig creates an LLM client using only the supplied configuration.
// The OpenAI API key is supplied separately and may be read from the environment
// by the caller; other environment-based configuration is not used.
func NewFromConfig(cfg Config, openAIAPIKey, openRouterAPIKey, copilotAPIKey string) (Client, error) {
p := strings.ToLower(strings.TrimSpace(cfg.Provider))
if p == "" {
p = "openai"
}
switch p {
case "openai":
if strings.TrimSpace(openAIAPIKey) == "" {
return nil, errors.New("missing OPENAI_API_KEY for provider openai")
}
// Default temperature selection:
// - When model is gpt-5*, prefer 1.0 by default (more exploratory).
// - Otherwise, prefer 0.2 by default (coding friendly).
// The app-wide defaults currently set provider temps to 0.2.
// If the user hasn't explicitly overridden and the model is gpt-5*,
// upgrade 0.2 → 1.0 to satisfy the requested default for gpt-5.
model := strings.ToLower(strings.TrimSpace(cfg.OpenAIModel))
if strings.HasPrefix(model, "gpt-5") {
if cfg.OpenAITemperature == nil {
v := 1.0
cfg.OpenAITemperature = &v
} else if *cfg.OpenAITemperature == 0.2 {
v := 1.0
cfg.OpenAITemperature = &v
}
} else if cfg.OpenAITemperature == nil {
v := 0.2
cfg.OpenAITemperature = &v
}
return newOpenAI(cfg.OpenAIBaseURL, cfg.OpenAIModel, openAIAPIKey, cfg.OpenAITemperature), nil
case "openrouter":
if strings.TrimSpace(openRouterAPIKey) == "" {
return nil, errors.New("missing OPENROUTER_API_KEY for provider openrouter")
}
if cfg.OpenRouterTemperature == nil {
t := 0.2
cfg.OpenRouterTemperature = &t
}
return newOpenRouter(cfg.OpenRouterBaseURL, cfg.OpenRouterModel, openRouterAPIKey, cfg.OpenRouterTemperature), nil
case "ollama":
if cfg.OllamaTemperature == nil {
t := 0.2
cfg.OllamaTemperature = &t
}
return newOllama(cfg.OllamaBaseURL, cfg.OllamaModel, cfg.OllamaTemperature), nil
case "copilot":
if strings.TrimSpace(copilotAPIKey) == "" {
return nil, errors.New("missing COPILOT_API_KEY for provider copilot")
}
if cfg.CopilotTemperature == nil {
t := 0.2
cfg.CopilotTemperature = &t
}
return newCopilot(cfg.CopilotBaseURL, cfg.CopilotModel, copilotAPIKey, cfg.CopilotTemperature), nil
default:
return nil, errors.New("unknown LLM provider: " + p)
}
}
package llm
import "errors"
// small helper to keep return type consistent
func nilStringErr(msg string) (string, error) { return "", errors.New(msg) }
package llmutils
import (
"os"
"strings"
"codeberg.org/snonux/hexai/internal/appconfig"
"codeberg.org/snonux/hexai/internal/llm"
)
// NewClientFromApp builds an llm.Client using app config and environment keys.
func NewClientFromApp(cfg appconfig.App) (llm.Client, error) {
llmCfg := llm.Config{
Provider: cfg.Provider,
OpenAIBaseURL: cfg.OpenAIBaseURL,
OpenAIModel: cfg.OpenAIModel,
OpenAITemperature: cfg.OpenAITemperature,
OpenRouterBaseURL: cfg.OpenRouterBaseURL,
OpenRouterModel: cfg.OpenRouterModel,
OpenRouterTemperature: cfg.OpenRouterTemperature,
OllamaBaseURL: cfg.OllamaBaseURL,
OllamaModel: cfg.OllamaModel,
OllamaTemperature: cfg.OllamaTemperature,
CopilotBaseURL: cfg.CopilotBaseURL,
CopilotModel: cfg.CopilotModel,
CopilotTemperature: cfg.CopilotTemperature,
}
oaKey := os.Getenv("HEXAI_OPENAI_API_KEY")
if strings.TrimSpace(oaKey) == "" {
oaKey = os.Getenv("OPENAI_API_KEY")
}
orKey := os.Getenv("HEXAI_OPENROUTER_API_KEY")
if strings.TrimSpace(orKey) == "" {
orKey = os.Getenv("OPENROUTER_API_KEY")
}
cpKey := os.Getenv("HEXAI_COPILOT_API_KEY")
if strings.TrimSpace(cpKey) == "" {
cpKey = os.Getenv("COPILOT_API_KEY")
}
return llm.NewFromConfig(llmCfg, oaKey, orKey, cpKey)
}
package logging
// ChatLogger provides a structured way to log chat interactions.
type ChatLogger struct {
Provider string
}
// NewChatLogger creates a new ChatLogger for a given provider.
func NewChatLogger(provider string) ChatLogger {
return ChatLogger{Provider: provider}
}
// LogStart logs the beginning of a chat or stream interaction.
func (cl ChatLogger) LogStart(stream bool, model string, temp float64, maxTokens int, stop []string, messages []struct {
Role string
Content string
},
) {
chatOrStream := "chat"
if stream {
chatOrStream = "stream"
}
Logf("llm/"+cl.Provider+" ", "%s start model=%s temp=%.2f max_tokens=%d stop=%d messages=%d",
chatOrStream, model, temp, maxTokens, len(stop), len(messages))
for i, m := range messages {
Logf("llm/"+cl.Provider+" ", "msg[%d] role=%s size=%d preview=%s%s%s",
i, m.Role, len(m.Content), AnsiCyan, PreviewForLog(m.Content), AnsiBase)
}
}
// Summary: ANSI-styled logging utilities with a bound standard logger and configurable preview truncation.
package logging
import (
"fmt"
"log"
)
// ANSI color utilities shared across Hexai.
const (
AnsiBgBlack = "\x1b[40m"
AnsiGrey = "\x1b[90m"
AnsiCyan = "\x1b[36m"
AnsiGreen = "\x1b[32m"
AnsiYellow = "\x1b[33m"
AnsiRed = "\x1b[31m"
AnsiReset = "\x1b[0m"
)
// AnsiBase is the default style: black background + grey foreground.
const AnsiBase = AnsiBgBlack + AnsiGrey
// singleton logger used across the codebase
var std *log.Logger
// Bind sets the underlying standard logger to use for Logf.
func Bind(l *log.Logger) { std = l }
// Logf prints a formatted message with a module prefix and base ANSI style.
func Logf(prefix, format string, args ...any) {
if std == nil {
return
}
msg := fmt.Sprintf(format, args...)
std.Print(AnsiBase + prefix + msg + AnsiReset)
}
// Logging configuration for previews (shared)
var logPreviewLimit int // 0 means unlimited
// SetLogPreviewLimit sets the maximum number of characters to log for
// request/response previews. Set to 0 for unlimited.
func SetLogPreviewLimit(n int) { logPreviewLimit = n }
// PreviewForLog returns the string truncated to the configured preview limit.
func PreviewForLog(s string) string {
if logPreviewLimit > 0 {
if len(s) <= logPreviewLimit {
return s
}
return s[:logPreviewLimit] + "…"
}
return s
}
package lsp
import (
"fmt"
"strings"
"codeberg.org/snonux/hexai/internal/runtimeconfig"
)
type chatCommandResult struct {
message string
}
func (s *Server) chatCommandResponse(uri string, lineIdx int, prompt string) (chatCommandResult, bool) {
trimmed := strings.TrimSpace(s.stripTrailingTrigger(prompt))
if trimmed == "" || !strings.HasPrefix(trimmed, "/") {
return chatCommandResult{}, false
}
switch {
case strings.HasPrefix(trimmed, "/reload"):
return s.handleReloadCommand(), true
case strings.HasPrefix(trimmed, "/help"):
return s.handleHelpCommand(), true
case strings.HasPrefix(trimmed, "/disable"):
return s.handleDisableCompletionCommand(), true
case strings.HasPrefix(trimmed, "/enable"):
return s.handleEnableCompletionCommand(), true
default:
return chatCommandResult{message: fmt.Sprintf("Unknown command %q. Try /help?>", trimmed)}, true
}
}
func (s *Server) handleHelpCommand() chatCommandResult {
lines := []string{
"Available slash commands:",
"- /reload?> reload configuration from file (ignores env overrides)",
"- /disable?> disable auto-completions for this session",
"- /enable?> re-enable auto-completions",
}
return chatCommandResult{message: strings.Join(lines, "\n")}
}
func (s *Server) handleReloadCommand() chatCommandResult {
if s.configStore == nil {
return chatCommandResult{message: "Reload unavailable: no config store"}
}
loadOpts := s.configLoadOpts
loadOpts.IgnoreEnv = true
changes, err := s.configStore.Reload(s.logger, loadOpts)
if err != nil {
s.logger.Printf("config reload failed: %v", err)
return chatCommandResult{message: fmt.Sprintf("Reload failed: %v", err)}
}
summary := runtimeconfig.FormatSummary("Reloaded config", changes)
s.logger.Print(summary)
return chatCommandResult{message: summary}
}
func (s *Server) handleDisableCompletionCommand() chatCommandResult {
prev := s.setCompletionsDisabled(true)
if prev {
return chatCommandResult{message: "Auto-completions were already disabled."}
}
return chatCommandResult{message: "Auto-completions disabled. Use /enable?> to restore."}
}
func (s *Server) handleEnableCompletionCommand() chatCommandResult {
prev := s.setCompletionsDisabled(false)
if !prev {
return chatCommandResult{message: "Auto-completions are already enabled."}
}
return chatCommandResult{message: "Auto-completions enabled."}
}
// Summary: Builds additional context snippets based on configured mode and truncates text by token heuristic.
package lsp
import (
"strings"
"codeberg.org/snonux/hexai/internal/logging"
)
// buildAdditionalContext builds extra context messages based on the configured mode.
// Modes:
// - minimal: no extra context
// - window: include a window of lines around the cursor
// - file-on-new-func: include full file only when defining a new function
// - always-full: always include the full file
func (s *Server) buildAdditionalContext(newFunc bool, uri string, pos Position) (string, bool) {
mode := s.contextMode()
switch mode {
case "minimal":
return "", false
case "window":
return s.windowContext(uri, pos), true
case "file-on-new-func":
if newFunc {
return s.fullFileContext(uri), true
}
return "", false
case "always-full":
return s.fullFileContext(uri), true
default:
// fallback to minimal if unknown
return "", false
}
}
func (s *Server) windowContext(uri string, pos Position) string {
d := s.getDocument(uri)
if d == nil || len(d.lines) == 0 {
logging.Logf("lsp ", "context: window requested but document not open; skipping uri=%s", uri)
return ""
}
n := len(d.lines)
half := s.windowLines() / 2
start := pos.Line - half
if start < 0 {
start = 0
}
end := pos.Line + half + 1
if end > n {
end = n
}
text := strings.Join(d.lines[start:end], "\n")
return truncateToApproxTokens(text, s.maxContextTokens())
}
func (s *Server) fullFileContext(uri string) string {
d := s.getDocument(uri)
if d == nil {
logging.Logf("lsp ", "context: full-file requested but document not open; skipping uri=%s", uri)
return ""
}
return truncateToApproxTokens(d.text, s.maxContextTokens())
}
// truncateToApproxTokens naively truncates the input to fit approx N tokens.
// Uses 4 chars/token heuristic for speed and determinism.
func truncateToApproxTokens(text string, maxTokens int) string {
if maxTokens <= 0 {
return ""
}
maxChars := maxTokens * 4
if len(text) <= maxChars {
return text
}
// try to cut on a line boundary near maxChars
cut := maxChars
if cut > len(text) {
cut = len(text)
}
if i := strings.LastIndex(text[:cut], "\n"); i > 0 {
cut = i
}
return text[:cut]
}
// Summary: In-memory document model for the LSP; tracks text, lines, and applies edits.
package lsp
import (
"strings"
"time"
)
type document struct {
uri string
text string
lines []string
}
func (s *Server) setDocument(uri, text string) {
s.mu.Lock()
defer s.mu.Unlock()
s.docs[uri] = &document{uri: uri, text: text, lines: splitLines(text)}
}
func (s *Server) deleteDocument(uri string) {
s.mu.Lock()
defer s.mu.Unlock()
delete(s.docs, uri)
}
func (s *Server) markActivity() {
s.mu.Lock()
s.lastInput = time.Now()
s.mu.Unlock()
}
func (s *Server) getDocument(uri string) *document {
s.mu.RLock()
defer s.mu.RUnlock()
return s.docs[uri]
}
// splitLines splits the input string into lines, normalizing line endings to '\n'.
func splitLines(sx string) []string {
sx = strings.ReplaceAll(sx, "\r\n", "\n")
return strings.Split(sx, "\n")
}
func (s *Server) lineContext(uri string, pos Position) (above, current, below, funcCtx string) {
d := s.getDocument(uri)
if d == nil || len(d.lines) == 0 {
return "", "", "", ""
}
idx := pos.Line
if idx < 0 {
idx = 0
}
if idx >= len(d.lines) {
idx = len(d.lines) - 1
}
current = d.lines[idx]
if idx-1 >= 0 {
above = d.lines[idx-1]
}
if idx+1 < len(d.lines) {
below = d.lines[idx+1]
}
for i := idx; i >= 0; i-- {
line := strings.TrimSpace(d.lines[i])
if hasAny(line, []string{"func ", "def ", "class ", "fn ", "procedure ", "sub "}) {
funcCtx = line
break
}
}
return above, current, below, funcCtx
}
// isDefiningNewFunction returns true when the cursor appears to be within
// a function declaration/signature and before the opening '{' of the body.
// Heuristic: find nearest preceding line containing "func "; ensure no '{'
// appears before the cursor across those lines.
func (s *Server) isDefiningNewFunction(uri string, pos Position) bool {
d := s.getDocument(uri)
if d == nil || len(d.lines) == 0 {
return false
}
idx := pos.Line
if idx < 0 {
idx = 0
}
if idx >= len(d.lines) {
idx = len(d.lines) - 1
}
// Find signature start
sigStart := -1
for i := idx; i >= 0; i-- {
if strings.Contains(d.lines[i], "func ") {
sigStart = i
break
}
// stop if we hit a closing brace which likely ends a previous block
if strings.Contains(d.lines[i], "}") {
break
}
}
if sigStart == -1 {
return false
}
// Scan for '{' from sigStart up to cursor position; if found before or at cursor, we're in body
for i := sigStart; i <= idx; i++ {
line := d.lines[i]
brace := strings.Index(line, "{")
if brace >= 0 {
if i < idx {
return false // body started on a previous line
}
// same line as cursor: if brace position < cursor character, then already in body
if pos.Character > brace {
return false
}
}
}
return true
}
func hasAny(s string, needles []string) bool {
for _, n := range needles {
if strings.Contains(s, n) {
return true
}
}
return false
}
func trimLen(s string) string {
s = strings.TrimSpace(s)
if len(s) > 200 {
return s[:200] + "…"
}
return s
}
func firstLine(s string) string {
s = strings.ReplaceAll(s, "\r\n", "\n")
if idx := strings.IndexByte(s, '\n'); idx >= 0 {
return s[:idx]
}
return s
}
// Summary: LSP JSON-RPC handlers; implements core methods and integrates with the LLM client when enabled.
package lsp
import (
"encoding/json"
"fmt"
"strings"
)
func (s *Server) handle(req Request) {
if h, ok := s.handlers[req.Method]; ok {
h(req)
return
}
if len(req.ID) != 0 {
s.reply(req.ID, nil, &RespError{Code: -32601, Message: fmt.Sprintf("method not found: %s", req.Method)})
}
}
// handleInitialize moved to handlers_init.go
// llmRequestOpts moved to handlers_utils.go
// instructionFromSelection extracts the first instruction from selection text.
// Preference order on each line: strict ;text; marker (no inner spaces), then
// a line comment (//, #, --). Returns the instruction string and the selection
// text cleaned of the matched instruction marker or comment.
func (s *Server) instructionFromSelection(sel string) (string, string) {
lines := splitLines(sel)
for idx, line := range lines {
if instr, cleaned, ok := s.findFirstInstructionInLine(line); ok && strings.TrimSpace(instr) != "" {
lines[idx] = cleaned
return instr, strings.Join(lines, "\n")
}
}
return "", sel
}
// findFirstInstructionInLine returns the earliest instruction marker on the
// line and the line with that marker removed. Supported markers, ordered by
// earliest byte offset in the line:
// - ;text; (strict, no space after first ';' or before last ';')
// - /* text */ (single-line only)
// - <!-- text --> (single-line only)
// - // text
// - # text
// - -- text
func (s *Server) findFirstInstructionInLine(line string) (instr string, cleaned string, ok bool) {
type cand struct {
start, end int
text string
}
cands := []cand{}
openStr, _, openChar, closeChar := s.inlineMarkers()
if t, l, r, ok := findStrictInlineTag(line, openStr, openChar, closeChar); ok {
cands = append(cands, cand{start: l, end: r, text: t})
}
if i := strings.Index(line, "/*"); i >= 0 {
if j := strings.Index(line[i+2:], "*/"); j >= 0 {
start := i
end := i + 2 + j + 2
text := strings.TrimSpace(line[i+2 : i+2+j])
cands = append(cands, cand{start: start, end: end, text: text})
}
}
if i := strings.Index(line, "<!--"); i >= 0 {
if j := strings.Index(line[i+4:], "-->"); j >= 0 {
start := i
end := i + 4 + j + 3
text := strings.TrimSpace(line[i+4 : i+4+j])
cands = append(cands, cand{start: start, end: end, text: text})
}
}
if i := strings.Index(line, "//"); i >= 0 {
cands = append(cands, cand{start: i, end: len(line), text: strings.TrimSpace(line[i+2:])})
}
if i := strings.Index(line, "#"); i >= 0 {
cands = append(cands, cand{start: i, end: len(line), text: strings.TrimSpace(line[i+1:])})
}
if i := strings.Index(line, "--"); i >= 0 {
cands = append(cands, cand{start: i, end: len(line), text: strings.TrimSpace(line[i+2:])})
}
if len(cands) == 0 {
return "", line, false
}
// pick earliest start index
best := cands[0]
for _, c := range cands[1:] {
if c.start >= 0 && (best.start < 0 || c.start < best.start) {
best = c
}
}
cleaned = strings.TrimRight(line[:best.start]+line[best.end:], " \t")
return best.text, cleaned, true
}
// diagnosticsInRange parses the CodeAction context and returns diagnostics
// that overlap the given selection range. If the context is missing or does
// not contain diagnostics, returns an empty slice.
// CodeAction-related handlers and helpers moved to handlers_codeaction.go
// extractRangeText moved to handlers_utils.go
// handleInitialized moved to handlers_init.go
// handleShutdown moved to handlers_init.go
// handleExit moved to handlers_init.go
// handleDidOpen moved to handlers_document.go
// handleDidChange moved to handlers_document.go
// handleDidClose moved to handlers_document.go
// handleCompletion moved to handlers_completion.go
func (s *Server) reply(id json.RawMessage, result any, err *RespError) {
resp := Response{JSONRPC: "2.0", ID: id, Result: result, Error: err}
s.writeMessage(resp)
}
// docBeforeAfter returns the full document text split at the given position.
// The returned strings are the text before the cursor (inclusive of anything
// left of the position) and the text after the cursor.
// docBeforeAfter moved to handlers_document.go
// extractTriggerInfo returns the LSP completion TriggerKind and TriggerCharacter
// if provided by the client; when absent it returns zeros.
// extractTriggerInfo moved to handlers_completion.go
// --- in-editor chat (";C ...") ---
// detectAndHandleChat scans the current document for any line that starts with
// ";C" and appears to be awaiting a response (i.e., followed by a blank line
// and no non-empty answer line yet). If found, it asks the LLM and inserts the
// answer below the blank line, leaving exactly one empty line between prompt
// and response.
// detectAndHandleChat moved to handlers_document.go
// applyChatEdits removes the triggering punctuation at end of the line and
// inserts two newlines followed by a new line with the response prefixed.
// applyChatEdits moved to handlers_document.go
// buildChatHistory walks upwards from the current line to collect the most recent
// Q/A pairs in the in-editor transcript. It returns messages in chronological order
// ending with the current user prompt. Limits to a small number of pairs to control tokens.
// buildChatHistory moved to handlers_document.go
// stripTrailingTrigger removes a single trailing punctuation from the set
// [?,!,:] or both semicolons if present at end, mirroring the inline trigger rules.
// stripTrailingTrigger moved to handlers_document.go
// clientApplyEdit sends a workspace/applyEdit request to the client.
// clientApplyEdit moved to handlers_document.go
// nextReqID returns a unique json.RawMessage id for server-initiated requests.
// nextReqID moved to handlers_document.go
// --- completion helpers ---
// buildDocString moved to handlers_completion.go
// logCompletionContext moved to handlers_completion.go
// tryLLMCompletion moved to handlers_completion.go
// parseManualInvoke inspects the LSP completion context and reports whether the user manually invoked completion.
// parseManualInvoke moved to handlers_completion.go
// shouldSuppressForChatTriggerEOL returns true when a chat trigger like ">" follows ?, !, :, or ; at EOL.
// shouldSuppressForChatTriggerEOL moved to handlers_completion.go
// prefixHeuristicAllows applies minimal prefix rules unless inlinePrompt or structural triggers apply.
// prefixHeuristicAllows moved to handlers_completion.go
// tryProviderNativeCompletion attempts provider-native completion and returns items when successful.
// tryProviderNativeCompletion moved to handlers_completion.go
// buildCompletionMessages constructs the LLM messages for completion.
// buildCompletionMessages moved to handlers_completion.go
// postProcessCompletion normalizes and deduplicates completion text and applies indentation rules.
// postProcessCompletion moved to handlers_completion.go
// busyCompletionItem builds a visible, non-inserting completion item indicating
// that an LLM request is already in flight.
// removed: previous single in-flight LLM busy gate and busy item
// --- small completion cache (last ~10 entries) ---
func (s *Server) completionCacheKey(p CompletionParams, above, current, below, funcCtx string, inParams bool, hasExtra bool, extraText string) string {
// Normalize left-of-cursor by trimming trailing spaces/tabs
idx := p.Position.Character
if idx > len(current) {
idx = len(current)
}
left := strings.TrimRight(current[:idx], " \t")
right := ""
if idx < len(current) {
right = current[idx:]
}
prov := ""
model := ""
if client := s.currentLLMClient(); client != nil {
prov = client.Name()
model = client.DefaultModel()
}
temp := ""
if tempPtr := s.codingTemperature(); tempPtr != nil {
temp = fmt.Sprintf("%.3f", *tempPtr)
}
extra := ""
if hasExtra {
extra = strings.TrimSpace(extraText)
}
// Compose a key from essential context parts
return strings.Join([]string{
"v1", // version for future-proofing
prov,
model,
temp,
p.TextDocument.URI,
fmt.Sprintf("%d:%d", p.Position.Line, len(left)),
above,
left,
right,
below,
funcCtx,
fmt.Sprintf("params=%t", inParams),
extra,
}, "\x1f") // use unit separator to avoid collisions
}
func (s *Server) completionCacheGet(key string) (string, bool) {
s.mu.Lock()
defer s.mu.Unlock()
v, ok := s.compCache[key]
if !ok {
return "", false
}
// move to most-recent
s.compCacheTouchLocked(key)
return v, true
}
func (s *Server) completionCachePut(key, value string) {
s.mu.Lock()
defer s.mu.Unlock()
if s.compCache == nil {
s.compCache = make(map[string]string)
}
if _, exists := s.compCache[key]; !exists {
s.compCacheOrder = append(s.compCacheOrder, key)
s.compCache[key] = value
if len(s.compCacheOrder) > 10 {
// evict oldest
old := s.compCacheOrder[0]
s.compCacheOrder = s.compCacheOrder[1:]
delete(s.compCache, old)
}
return
}
// update existing and mark most-recent
s.compCache[key] = value
s.compCacheTouchLocked(key)
}
func (s *Server) compCacheTouchLocked(key string) {
// assumes s.mu is held
// remove any existing occurrence of key in order slice
idx := -1
for i, k := range s.compCacheOrder {
if k == key {
idx = i
break
}
}
if idx >= 0 {
s.compCacheOrder = append(append([]string{}, s.compCacheOrder[:idx]...), s.compCacheOrder[idx+1:]...)
}
s.compCacheOrder = append(s.compCacheOrder, key)
}
// isTriggerEvent returns true when the completion request appears to be caused
// by typing one of our configured trigger characters. It checks the LSP
// CompletionContext if provided and also falls back to inspecting the character
// immediately to the left of the cursor.
func (s *Server) isTriggerEvent(p CompletionParams, current string) bool {
open, _, openChar, closeChar := s.inlineMarkers()
doubleSeqs := doubleOpenSequences(open, openChar, closeChar)
triggerChars := s.triggerCharacters()
// 1) Inspect LSP completion context if present
if p.Context != nil {
var ctx struct {
TriggerKind int `json:"triggerKind"`
TriggerCharacter string `json:"triggerCharacter,omitempty"`
}
if raw, ok := p.Context.(json.RawMessage); ok {
_ = json.Unmarshal(raw, &ctx)
} else {
b, _ := json.Marshal(p.Context)
_ = json.Unmarshal(b, &ctx)
}
// If configured and the line contains a bare double-open marker (e.g., '>>!' with no '>>!text>'),
// do not treat as a trigger source.
if containsAny(current, doubleSeqs) && !hasDoubleOpenTrigger(current, open, openChar, closeChar) {
return false
}
// TriggerKind 1 = Invoked (manual). Always allow manual invoke.
if ctx.TriggerKind == 1 {
return true
}
// TriggerKind 2 is TriggerCharacter per LSP spec
if ctx.TriggerKind == 2 {
if ctx.TriggerCharacter != "" {
for _, c := range triggerChars {
if c == ctx.TriggerCharacter {
return true
}
}
return false
}
// No character provided but reported as TriggerCharacter; be conservative
return false
}
// For TriggerForIncomplete (3), require manual char check below
}
// 2) Fallback: check the character immediately prior to cursor
idx := p.Position.Character
if idx <= 0 || idx > len(current) {
return false
}
// Bare double-open should not trigger via fallback char either (only when configured)
if containsAny(current, doubleSeqs) && !hasDoubleOpenTrigger(current, open, openChar, closeChar) {
return false
}
ch := string(current[idx-1])
for _, c := range triggerChars {
if c == ch {
return true
}
}
return false
}
func (s *Server) makeCompletionItems(cleaned string, inParams bool, current string, p CompletionParams, docStr string, detail string, sortPrefix string) []CompletionItem {
te, filter := computeTextEditAndFilter(cleaned, inParams, current, p)
rm := s.collectPromptRemovalEdits(p.TextDocument.URI)
label := labelForCompletion(cleaned, filter)
if strings.TrimSpace(detail) == "" {
detail = "Hexai LLM completion"
}
if sortPrefix == "" {
sortPrefix = "0000"
}
return []CompletionItem{{
Label: label,
Kind: 1,
Detail: detail,
InsertTextFormat: 1,
FilterText: strings.TrimLeft(filter, " \t"),
TextEdit: te,
AdditionalTextEdits: rm,
SortText: sortPrefix,
Documentation: docStr,
}}
}
func containsAny(haystack string, seqs []string) bool {
for _, seq := range seqs {
if seq == "" {
continue
}
if strings.Contains(haystack, seq) {
return true
}
}
return false
}
// small helpers to keep tryLLMCompletion short
// LLM stats helpers moved to handlers_utils.go
// collectPromptRemovalEdits returns edits to remove all inline prompt markers.
// Supported form (inclusive):
// - ";...;" where there is no space immediately after the first ';'
// and no space immediately before the last ';'. An optional single space
// after the trailing ';' is also removed for cleanliness.
//
// Multiple markers per line are supported.
// Inline prompt removal helpers moved to handlers_utils.go
// inParamList moved to handlers_utils.go
// buildPrompts moved to handlers_utils.go
// computeTextEditAndFilter moved to handlers_utils.go
// computeWordStart moved to handlers_utils.go
// isIdentChar moved to handlers_utils.go
// lineHasInlinePrompt returns true if the line contains an inline strict
// semicolon marker ;text; (no spaces at boundaries) or a double-semicolon
// pattern recognized by hasDoubleSemicolonTrigger.
// lineHasInlinePrompt moved to handlers_utils.go
// leadingIndent returns the run of leading spaces/tabs from the provided line.
// leadingIndent moved to handlers_utils.go
// applyIndent prefixes each non-empty line of suggestion with the given indent
// unless it already starts with that indent.
// applyIndent moved to handlers_utils.go
// isBareDoubleSemicolon reports whether the line contains a standalone
// double-semicolon marker with no inline content (";;" possibly with only
// whitespace after it). It explicitly excludes the valid form ";;text;".
// isBareDoubleSemicolon moved to handlers_utils.go
// stripDuplicateAssignmentPrefix removes a duplicated assignment prefix (e.g.,
// "name :=") from the beginning of the model suggestion when that same prefix
// already appears immediately to the left of the cursor on the current line.
// Also handles simple '=' assignments.
// stripDuplicateAssignmentPrefix moved to handlers_utils.go
// stripDuplicateGeneralPrefix removes any already-typed prefix that the model repeated
// at the beginning of its suggestion. It compares the entire text to the left of the
// cursor (prefixBeforeCursor) against the suggestion, trimming whitespace appropriately,
// and strips the longest sensible overlap. This prevents cases like:
//
// prefix: "func New "
// suggestion:"func New() *Type"
//
// resulting in duplicates like "func New func New() *Type".
// stripDuplicateGeneralPrefix moved to handlers_utils.go
// isIdentBoundary moved to handlers_utils.go
// stripCodeFences removes surrounding Markdown code fences from a model
// response when the entire output is wrapped, e.g. starting with "```go" or
// "```" and ending with "```". It returns the inner content unchanged.
// stripCodeFences moved to handlers_utils.go
// stripInlineCodeSpan returns only the contents of the first inline backtick
// code span if present, e.g., "some text `x := y()` more" -> "x := y()".
// If no matching pair of backticks exists, it returns the input unchanged.
// This is intended for code completion responses where the model may wrap a
// small snippet in single backticks among prose.
// stripInlineCodeSpan moved to handlers_utils.go
// labelForCompletion moved to handlers_utils.go
func (s *Server) fallbackCompletionItems(docStr string) []CompletionItem {
return []CompletionItem{{
Label: "hexai-complete",
Kind: 1,
Detail: "dummy completion",
InsertText: "hexai",
SortText: "9999",
Documentation: docStr,
}}
}
// Summary: Code Action handlers and helpers split from handlers.go for clarity.
package lsp
import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"codeberg.org/snonux/hexai/internal/llm"
"codeberg.org/snonux/hexai/internal/logging"
)
func (s *Server) handleCodeAction(req Request) {
var p CodeActionParams
if err := json.Unmarshal(req.Params, &p); err != nil {
if len(req.ID) != 0 {
s.reply(req.ID, []CodeAction{}, nil)
}
return
}
d := s.getDocument(p.TextDocument.URI)
if d == nil || len(d.lines) == 0 || s.currentLLMClient() == nil {
if len(req.ID) != 0 {
s.reply(req.ID, []CodeAction{}, nil)
}
return
}
sel := extractRangeText(d, p.Range)
actions := make([]CodeAction, 0, 8)
if a := s.buildRewriteCodeAction(p, sel); a != nil {
actions = append(actions, *a)
}
if a := s.buildDiagnosticsCodeAction(p, sel); a != nil {
actions = append(actions, *a)
}
if a := s.buildDocumentCodeAction(p, sel); a != nil {
actions = append(actions, *a)
}
if a := s.buildGoUnitTestCodeAction(p); a != nil {
actions = append(actions, *a)
}
if a := s.buildSimplifyCodeAction(p, sel); a != nil {
actions = append(actions, *a)
}
// Custom actions from config
s.appendCustomActions(&actions, p, sel)
if len(req.ID) != 0 {
s.reply(req.ID, actions, nil)
}
}
// appendCustomActions adds user-defined actions depending on scope and availability.
func (s *Server) appendCustomActions(actions *[]CodeAction, p CodeActionParams, sel string) {
customs := s.customActions()
if len(customs) == 0 {
return
}
diags := s.diagnosticsInRange(p.Context, p.Range)
for _, ca := range customs {
title := strings.TrimSpace(ca.Title)
if title == "" {
continue
}
scope := strings.TrimSpace(strings.ToLower(ca.Scope))
if scope == "diagnostics" {
if len(diags) == 0 {
continue
}
payload := struct {
Type string `json:"type"`
ID string `json:"id"`
URI string `json:"uri"`
Range Range `json:"range"`
Selection string `json:"selection"`
Diagnostics []Diagnostic `json:"diagnostics"`
}{Type: "custom", ID: ca.ID, URI: p.TextDocument.URI, Range: p.Range, Selection: sel, Diagnostics: diags}
raw, _ := json.Marshal(payload)
kind := ca.Kind
if strings.TrimSpace(kind) == "" {
kind = "quickfix"
}
*actions = append(*actions, CodeAction{Title: "Hexai: " + title, Kind: kind, Data: raw})
continue
}
// default: selection
if strings.TrimSpace(sel) == "" {
continue
}
payload := struct {
Type string `json:"type"`
ID string `json:"id"`
URI string `json:"uri"`
Range Range `json:"range"`
Selection string `json:"selection"`
}{Type: "custom", ID: ca.ID, URI: p.TextDocument.URI, Range: p.Range, Selection: sel}
raw, _ := json.Marshal(payload)
kind := ca.Kind
if strings.TrimSpace(kind) == "" {
kind = "refactor"
}
*actions = append(*actions, CodeAction{Title: "Hexai: " + title, Kind: kind, Data: raw})
}
}
func (s *Server) buildSimplifyCodeAction(p CodeActionParams, sel string) *CodeAction {
if strings.TrimSpace(sel) == "" {
return nil
}
payload := struct {
Type string `json:"type"`
URI string `json:"uri"`
Range Range `json:"range"`
Selection string `json:"selection"`
}{Type: "simplify", URI: p.TextDocument.URI, Range: p.Range, Selection: sel}
raw, _ := json.Marshal(payload)
ca := CodeAction{Title: "Hexai: simplify and improve", Kind: "refactor", Data: raw}
return &ca
}
func (s *Server) buildRewriteCodeAction(p CodeActionParams, sel string) *CodeAction {
if instr, cleaned := s.instructionFromSelection(sel); strings.TrimSpace(instr) != "" {
payload := struct {
Type string `json:"type"`
URI string `json:"uri"`
Range Range `json:"range"`
Instruction string `json:"instruction"`
Selection string `json:"selection"`
}{Type: "rewrite", URI: p.TextDocument.URI, Range: p.Range, Instruction: instr, Selection: cleaned}
raw, _ := json.Marshal(payload)
ca := CodeAction{Title: "Hexai: rewrite selection", Kind: "refactor.rewrite", Data: raw}
return &ca
}
return nil
}
func (s *Server) buildDiagnosticsCodeAction(p CodeActionParams, sel string) *CodeAction {
diags := s.diagnosticsInRange(p.Context, p.Range)
if len(diags) == 0 {
return nil
}
payload := struct {
Type string `json:"type"`
URI string `json:"uri"`
Range Range `json:"range"`
Selection string `json:"selection"`
Diagnostics []Diagnostic `json:"diagnostics"`
}{Type: "diagnostics", URI: p.TextDocument.URI, Range: p.Range, Selection: sel, Diagnostics: diags}
raw, _ := json.Marshal(payload)
ca := CodeAction{Title: "Hexai: resolve diagnostics", Kind: "quickfix", Data: raw}
return &ca
}
func (s *Server) resolveCodeAction(ca CodeAction) (CodeAction, bool) {
if s.currentLLMClient() == nil || len(ca.Data) == 0 {
return ca, false
}
var payload struct {
Type string `json:"type"`
ID string `json:"id"`
URI string `json:"uri"`
Range Range `json:"range"`
Instruction string `json:"instruction,omitempty"`
Selection string `json:"selection"`
Diagnostics []Diagnostic `json:"diagnostics,omitempty"`
}
if err := json.Unmarshal(ca.Data, &payload); err != nil {
return ca, false
}
cfg := s.currentConfig()
switch payload.Type {
case "rewrite":
sys := cfg.PromptCodeActionRewriteSystem
user := renderTemplate(cfg.PromptCodeActionRewriteUser, map[string]string{"instruction": payload.Instruction, "selection": payload.Selection})
return s.completeCodeAction(ca, payload.URI, payload.Range, sys, user, 20*time.Second)
case "diagnostics":
sys := cfg.PromptCodeActionDiagnosticsSystem
var b strings.Builder
for i, dgn := range payload.Diagnostics {
if dgn.Source != "" {
fmt.Fprintf(&b, "%d. [%s] %s\n", i+1, dgn.Source, dgn.Message)
} else {
fmt.Fprintf(&b, "%d. %s\n", i+1, dgn.Message)
}
}
diagList := b.String()
user := renderTemplate(cfg.PromptCodeActionDiagnosticsUser, map[string]string{"diagnostics": diagList, "selection": payload.Selection})
return s.completeCodeAction(ca, payload.URI, payload.Range, sys, user, 22*time.Second)
case "document":
sys := cfg.PromptCodeActionDocumentSystem
user := renderTemplate(cfg.PromptCodeActionDocumentUser, map[string]string{"selection": payload.Selection})
return s.completeCodeAction(ca, payload.URI, payload.Range, sys, user, 20*time.Second)
case "go_test":
if edit, jumpURI, jumpRange, ok := s.resolveGoTest(payload.URI, payload.Range.Start); ok {
ca.Edit = &edit
ca.Command = &Command{Title: "Jump to generated test", Command: "hexai.showDocument", Arguments: []any{jumpURI, jumpRange}}
s.deferShowDocument(jumpURI, jumpRange)
return ca, true
}
case "simplify":
sys := cfg.PromptCodeActionRewriteSystem
user := renderTemplate(cfg.PromptCodeActionRewriteUser, map[string]string{"instruction": "Simplify and improve the code while preserving behavior. Return only the improved code.", "selection": payload.Selection})
return s.completeCodeAction(ca, payload.URI, payload.Range, sys, user, 20*time.Second)
case "custom":
var action *CustomAction
for _, caDef := range s.customActions() {
if caDef.ID == payload.ID {
action = &caDef
break
}
}
if action == nil {
return ca, false
}
var sys, user string
if strings.TrimSpace(action.User) != "" {
if strings.TrimSpace(action.System) != "" {
sys = action.System
} else {
sys = cfg.PromptCodeActionRewriteSystem
}
var diagList string
if len(payload.Diagnostics) > 0 {
var b strings.Builder
for _, d := range payload.Diagnostics {
fmt.Fprintf(&b, "%s\n", d.Message)
}
diagList = b.String()
}
user = renderTemplate(action.User, map[string]string{"selection": payload.Selection, "diagnostics": strings.TrimSpace(diagList)})
} else {
sys = cfg.PromptCodeActionRewriteSystem
user = renderTemplate(cfg.PromptCodeActionRewriteUser, map[string]string{"instruction": payload.Instruction, "selection": payload.Selection})
}
return s.completeCodeAction(ca, payload.URI, payload.Range, sys, user, 20*time.Second)
}
return ca, false
}
func (s *Server) completeCodeAction(ca CodeAction, uri string, rng Range, sys, user string, timeout time.Duration) (CodeAction, bool) {
ctx, cancel := context.WithTimeout(context.Background(), timeout)
defer cancel()
messages := []llm.Message{{Role: "system", Content: sys}, {Role: "user", Content: user}}
spec := s.buildRequestSpec(surfaceCodeAction)
if text, err := s.chatWithStats(ctx, surfaceCodeAction, spec, messages); err == nil {
if out := stripCodeFences(strings.TrimSpace(text)); out != "" {
edit := WorkspaceEdit{Changes: map[string][]TextEdit{uri: {{Range: rng, NewText: out}}}}
ca.Edit = &edit
return ca, true
}
} else {
logging.Logf("lsp ", "codeAction llm error: %v", err)
}
return ca, false
}
func (s *Server) handleCodeActionResolve(req Request) {
var ca CodeAction
if err := json.Unmarshal(req.Params, &ca); err != nil {
if len(req.ID) != 0 {
s.reply(req.ID, ca, nil)
}
return
}
if resolved, ok := s.resolveCodeAction(ca); ok {
s.reply(req.ID, resolved, nil)
return
}
s.reply(req.ID, ca, nil)
}
// diagnosticsInRange parses the CodeAction context and returns diagnostics
// that overlap the given selection range. If the context is missing or does
// not contain diagnostics, returns an empty slice.
func (s *Server) diagnosticsInRange(ctxRaw json.RawMessage, sel Range) []Diagnostic {
if len(ctxRaw) == 0 {
return nil
}
var ctx CodeActionContext
if err := json.Unmarshal(ctxRaw, &ctx); err != nil {
return nil
}
if len(ctx.Diagnostics) == 0 {
return nil
}
out := make([]Diagnostic, 0, len(ctx.Diagnostics))
for _, d := range ctx.Diagnostics {
if rangesOverlap(d.Range, sel) {
out = append(out, d)
}
}
return out
}
// rangesOverlap reports whether two LSP ranges overlap at all.
func rangesOverlap(a, b Range) bool {
// Normalize ordering
if greaterPos(a.Start, a.End) {
a.Start, a.End = a.End, a.Start
}
if greaterPos(b.Start, b.End) {
b.Start, b.End = b.End, b.Start
}
// a ends before b starts
if lessPos(a.End, b.Start) {
return false
}
// b ends before a starts
if lessPos(b.End, a.Start) {
return false
}
return true
}
func lessPos(p, q Position) bool {
if p.Line != q.Line {
return p.Line < q.Line
}
return p.Character < q.Character
}
func greaterPos(p, q Position) bool {
if p.Line != q.Line {
return p.Line > q.Line
}
return p.Character > q.Character
}
// --- Go unit test code action ---
func (s *Server) buildGoUnitTestCodeAction(p CodeActionParams) *CodeAction {
uri := p.TextDocument.URI
if uri == "" || !strings.HasSuffix(strings.TrimPrefix(uri, "file://"), ".go") {
return nil
}
// Skip if already a _test.go file
if strings.HasSuffix(strings.TrimPrefix(uri, "file://"), "_test.go") {
return nil
}
// Heuristic: only offer when a function context is found above the cursor
_, _, _, funcCtx := s.lineContext(uri, p.Range.Start)
if !strings.Contains(funcCtx, "func ") {
return nil
}
payload := struct {
Type string `json:"type"`
URI string `json:"uri"`
Range Range `json:"range"`
}{Type: "go_test", URI: uri, Range: p.Range}
raw, _ := json.Marshal(payload)
ca := CodeAction{Title: "Hexai: implement unit test", Kind: "quickfix", Data: raw}
return &ca
}
// buildDocumentCodeAction offers to document the selected code by injecting comments.
func (s *Server) buildDocumentCodeAction(p CodeActionParams, sel string) *CodeAction {
if s.currentLLMClient() == nil {
return nil
}
if strings.TrimSpace(sel) == "" {
return nil
}
payload := struct {
Type string `json:"type"`
URI string `json:"uri"`
Range Range `json:"range"`
Selection string `json:"selection"`
}{Type: "document", URI: p.TextDocument.URI, Range: p.Range, Selection: sel}
raw, _ := json.Marshal(payload)
ca := CodeAction{Title: "Hexai: document code", Kind: "refactor.rewrite", Data: raw}
return &ca
}
func (s *Server) resolveGoTest(uri string, pos Position) (WorkspaceEdit, string, Range, bool) {
path := strings.TrimPrefix(uri, "file://")
if !strings.HasSuffix(path, ".go") || strings.HasSuffix(path, "_test.go") {
return WorkspaceEdit{}, "", Range{}, false
}
// Load source text
_, lines := s.loadFileText(uri)
if len(lines) == 0 {
return WorkspaceEdit{}, "", Range{}, false
}
pkg := parseGoPackageName(lines)
fnStart, fnEnd := findGoFunctionAtLine(lines, pos.Line)
if fnStart < 0 || fnEnd < fnStart {
return WorkspaceEdit{}, "", Range{}, false
}
funcCode := strings.Join(lines[fnStart:fnEnd+1], "\n")
testFunc := s.generateGoTestFunction(funcCode)
if strings.TrimSpace(testFunc) == "" {
return WorkspaceEdit{}, "", Range{}, false
}
// Determine test file target
testPath := strings.TrimSuffix(path, ".go") + "_test.go"
testURI := "file://" + testPath
// If test file exists, append test at EOF; otherwise, create a new file with package+import
if fileExists(testPath) {
// Build an insertion at end of file
_, tLines := s.loadFileText(testURI)
// Fallback when not open and cannot read: still insert at line 0
lineIdx := 0
col := 0
if len(tLines) > 0 {
lineIdx = len(tLines) - 1
col = len(tLines[lineIdx])
}
var b strings.Builder
// Ensure at least two newlines before the new test
if len(tLines) == 0 || (len(tLines) > 0 && !strings.HasSuffix(strings.Join(tLines, "\n"), "\n\n")) {
b.WriteString("\n\n")
}
b.WriteString(testFunc)
insert := b.String()
edit := TextEdit{Range: Range{Start: Position{Line: lineIdx, Character: col}, End: Position{Line: lineIdx, Character: col}}, NewText: insert}
we := WorkspaceEdit{Changes: map[string][]TextEdit{testURI: {edit}}}
// Compute jump range start
// Count how many prefix newlines added before the test function
prefixNL := 0
if strings.HasPrefix(insert, "\n\n") {
prefixNL = 2
}
startLine := lineIdx + prefixNL
// If we inserted with two newlines and last line wasn't blank, first newline moves to next line
if prefixNL > 0 {
startLine = lineIdx + prefixNL
}
jump := Range{Start: Position{Line: startLine, Character: 0}, End: Position{Line: startLine, Character: 0}}
return we, testURI, jump, true
}
// Create new file content
var content strings.Builder
if pkg == "" {
pkg = filepath.Base(filepath.Dir(path))
}
content.WriteString("package ")
content.WriteString(pkg)
content.WriteString("\n\n")
content.WriteString("import (\n\t\"testing\"\n)\n\n")
content.WriteString(testFunc)
full := content.String()
// Use documentChanges with create + full content insert
create := CreateFile{Kind: "create", URI: testURI}
tde := TextDocumentEdit{TextDocument: VersionedTextDocumentIdentifier{URI: testURI}, Edits: []TextEdit{{Range: Range{Start: Position{Line: 0, Character: 0}, End: Position{Line: 0, Character: 0}}, NewText: full}}}
we := WorkspaceEdit{DocumentChanges: []any{create, tde}}
// Find start line of first test function
// Count lines before the substring "func Test"
pre := content.String()
idx := strings.Index(pre, "func Test")
startLine := 0
if idx > 0 {
before := pre[:idx]
startLine = strings.Count(before, "\n")
}
jump := Range{Start: Position{Line: startLine, Character: 0}, End: Position{Line: startLine, Character: 0}}
return we, testURI, jump, true
}
// loadFileText returns the file content and lines. It prefers the open document; otherwise reads from disk.
func (s *Server) loadFileText(uri string) (string, []string) {
if d := s.getDocument(uri); d != nil {
return d.text, append([]string{}, d.lines...)
}
path := strings.TrimPrefix(uri, "file://")
b, err := os.ReadFile(path)
if err != nil {
return "", nil
}
txt := string(b)
return txt, splitLines(txt)
}
func fileExists(path string) bool {
if _, err := os.Stat(path); err == nil {
return true
}
return false
}
// parseGoPackageName returns the package name from file lines, or empty if not found.
func parseGoPackageName(lines []string) string {
for _, ln := range lines {
t := strings.TrimSpace(ln)
if strings.HasPrefix(t, "package ") {
name := strings.TrimSpace(strings.TrimPrefix(t, "package "))
// strip inline comments
if i := strings.Index(name, " "); i >= 0 {
name = name[:i]
}
if i := strings.Index(name, "\t"); i >= 0 {
name = name[:i]
}
if i := strings.Index(name, "//"); i >= 0 {
name = strings.TrimSpace(name[:i])
}
return name
}
}
return ""
}
// findGoFunctionAtLine finds the function enclosing or preceding line idx. Returns start and end line indexes.
func findGoFunctionAtLine(lines []string, idx int) (int, int) {
if idx < 0 {
idx = 0
}
if idx >= len(lines) {
idx = len(lines) - 1
}
// find signature start
start := -1
for i := idx; i >= 0; i-- {
if strings.Contains(lines[i], "func ") {
start = i
break
}
if strings.Contains(lines[i], "}") {
break
}
}
if start == -1 {
return -1, -1
}
// find first '{'
depth := 0
seenOpen := false
for i := start; i < len(lines); i++ {
ln := lines[i]
for j := 0; j < len(ln); j++ {
switch ln[j] {
case '{':
depth++
seenOpen = true
case '}':
if depth > 0 {
depth--
}
if seenOpen && depth == 0 {
return start, i
}
}
}
}
// if never saw '{', assume single-line prototype; return that line
if !seenOpen {
return start, start
}
return start, -1
}
// generateGoTestFunction uses LLM to produce a test function; falls back to a stub when unavailable.
func (s *Server) generateGoTestFunction(funcCode string) string {
spec := s.buildRequestSpec(surfaceCodeAction)
cfg := s.currentConfig()
sys := cfg.PromptCodeActionGoTestSystem
user := renderTemplate(cfg.PromptCodeActionGoTestUser, map[string]string{"function": funcCode})
ctx, cancel := context.WithTimeout(context.Background(), 18*time.Second)
defer cancel()
messages := []llm.Message{{Role: "system", Content: sys}, {Role: "user", Content: user}}
if out, err := s.chatWithStats(ctx, surfaceCodeAction, spec, messages); err == nil {
cleaned := strings.TrimSpace(stripCodeFences(out))
if cleaned != "" {
return cleaned
}
} else {
logging.Logf("lsp ", "codeAction go_test llm error: %v", err)
}
// Fallback stub
name := deriveGoFuncName(funcCode)
if name == "" {
name = "Function"
}
return fmt.Sprintf("func Test%s(t *testing.T) {\n\t// TODO: implement tests for %s\n}\n", exportName(name), name)
}
// deriveGoFuncName extracts function or method name from code.
func deriveGoFuncName(code string) string {
// look for line starting with func
line := firstLine(code)
line = strings.TrimSpace(line)
if !strings.HasPrefix(line, "func ") {
return ""
}
rest := strings.TrimSpace(strings.TrimPrefix(line, "func "))
// method receiver
if strings.HasPrefix(rest, "(") {
// find ")"
if i := strings.Index(rest, ")"); i >= 0 && i+1 < len(rest) {
rest = strings.TrimSpace(rest[i+1:])
}
}
// now rest should start with Name(
if i := strings.Index(rest, "("); i > 0 {
return strings.TrimSpace(rest[:i])
}
return ""
}
func exportName(name string) string {
if name == "" {
return name
}
r := []rune(name)
if r[0] >= 'a' && r[0] <= 'z' {
r[0] = r[0] - ('a' - 'A')
}
return string(r)
}
// Summary: Completion handlers split from handlers.go to reduce file size and isolate feature logic.
package lsp
import (
"context"
"encoding/json"
"fmt"
"strings"
"sync"
"time"
"codeberg.org/snonux/hexai/internal/llm"
"codeberg.org/snonux/hexai/internal/logging"
"codeberg.org/snonux/hexai/internal/stats"
)
type completionPlan struct {
params CompletionParams
above string
current string
below string
funcCtx string
docStr string
hasExtra bool
extraText string
inlinePrompt bool
inParams bool
manualInvoke bool
cacheKey string
}
func (s *Server) handleCompletion(req Request) {
if s.completionDisabled() {
s.reply(req.ID, CompletionList{IsIncomplete: false, Items: nil}, nil)
return
}
var p CompletionParams
var docStr string
if err := json.Unmarshal(req.Params, &p); err == nil {
// Log trigger information for every completion request from client
tk, tch := extractTriggerInfo(p)
logging.Logf("lsp ", "completion trigger kind=%d char=%q uri=%s line=%d char=%d",
tk, tch, p.TextDocument.URI, p.Position.Line, p.Position.Character)
above, current, below, funcCtx := s.lineContext(p.TextDocument.URI, p.Position)
docStr = s.buildDocString(p, above, current, below, funcCtx)
if s.logContext {
s.logCompletionContext(p, above, current, below, funcCtx)
}
if s.llmClient != nil {
newFunc := s.isDefiningNewFunction(p.TextDocument.URI, p.Position)
extra, has := s.buildAdditionalContext(newFunc, p.TextDocument.URI, p.Position)
items, ok, incomplete := s.tryLLMCompletion(p, above, current, below, funcCtx, docStr, has, extra)
if ok {
s.reply(req.ID, CompletionList{IsIncomplete: incomplete, Items: items}, nil)
return
}
}
}
items := s.fallbackCompletionItems(docStr)
s.reply(req.ID, CompletionList{IsIncomplete: false, Items: items}, nil)
}
// extractTriggerInfo returns the LSP completion TriggerKind and TriggerCharacter
// if provided by the client; when absent it returns zeros.
func extractTriggerInfo(p CompletionParams) (kind int, ch string) {
if p.Context == nil {
return 0, ""
}
var ctx struct {
TriggerKind int `json:"triggerKind"`
TriggerCharacter string `json:"triggerCharacter,omitempty"`
}
if raw, ok := p.Context.(json.RawMessage); ok {
_ = json.Unmarshal(raw, &ctx)
} else {
b, _ := json.Marshal(p.Context)
_ = json.Unmarshal(b, &ctx)
}
return ctx.TriggerKind, ctx.TriggerCharacter
}
// --- completion helpers ---
func (s *Server) buildDocString(p CompletionParams, above, current, below, funcCtx string) string {
return fmt.Sprintf("file: %s\nline: %d\nabove: %s\ncurrent: %s\nbelow: %s\nfunction: %s",
p.TextDocument.URI, p.Position.Line, trimLen(above), trimLen(current), trimLen(below), trimLen(funcCtx))
}
func (s *Server) logCompletionContext(p CompletionParams, above, current, below, funcCtx string) {
logging.Logf("lsp ", "completion ctx uri=%s line=%d char=%d above=%q current=%q below=%q function=%q",
p.TextDocument.URI, p.Position.Line, p.Position.Character, trimLen(above), trimLen(current), trimLen(below), trimLen(funcCtx))
}
func (s *Server) tryLLMCompletion(p CompletionParams, above, current, below, funcCtx, docStr string, hasExtra bool, extraText string) ([]CompletionItem, bool, bool) {
ctx, cancel := context.WithTimeout(context.Background(), 12*time.Second)
var cancelOnce sync.Once
end := func() { cancelOnce.Do(cancel) }
plan, items, handled := s.prepareCompletionPlan(p, above, current, below, funcCtx, docStr, hasExtra, extraText)
if handled {
end()
return items, true, false
}
specs := s.buildRequestSpecs(surfaceCompletion)
if len(specs) == 0 {
end()
return nil, false, false
}
type jobResult struct {
items []CompletionItem
ok bool
}
results := make(chan jobResult, len(specs))
var wg sync.WaitGroup
started := 0
s.waitForDebounce(ctx)
if !s.waitForThrottle(ctx) {
end()
close(results)
return nil, false, false
}
for _, spec := range specs {
spec := spec
client := s.clientFor(spec)
if client == nil {
continue
}
started++
wg.Add(1)
go func(idx int, spec requestSpec, client llm.Client) {
defer wg.Done()
items, ok := s.runCompletionForSpec(ctx, plan, spec, client)
results <- jobResult{items: items, ok: ok}
}(spec.index, spec, client)
}
if started == 0 {
end()
close(results)
return nil, false, false
}
go func() {
wg.Wait()
close(results)
}()
if started == 1 {
res := <-results
if !res.ok || len(res.items) == 0 {
end()
return nil, false, false
}
end()
return res.items, true, false
}
firstCh := make(chan []CompletionItem, 1)
go func(planKey string) {
defer end()
combined := make([]CompletionItem, 0)
firstSent := false
for res := range results {
if !res.ok || len(res.items) == 0 {
continue
}
combined = append(combined, res.items...)
if !firstSent {
first := make([]CompletionItem, len(res.items))
copy(first, res.items)
firstCh <- first
firstSent = true
}
}
if !firstSent {
close(firstCh)
return
}
s.storePendingCompletion(planKey, combined)
close(firstCh)
}(plan.cacheKey)
firstItems, ok := <-firstCh
if !ok || len(firstItems) == 0 {
end()
return nil, false, false
}
return firstItems, true, true
}
func (s *Server) prepareCompletionPlan(p CompletionParams, above, current, below, funcCtx, docStr string, hasExtra bool, extraText string) (completionPlan, []CompletionItem, bool) {
plan := completionPlan{
params: p,
above: above,
current: current,
below: below,
funcCtx: funcCtx,
docStr: docStr,
hasExtra: hasExtra,
extraText: extraText,
}
openStr, _, openChar, closeChar := s.inlineMarkers()
plan.inlinePrompt = lineHasInlinePrompt(current, openStr, openChar, closeChar)
if !plan.inlinePrompt && !s.isTriggerEvent(p, current) {
logging.Logf("lsp ", "%scompletion skip=no-trigger line=%d char=%d current=%q%s", logging.AnsiYellow, p.Position.Line, p.Position.Character, trimLen(current), logging.AnsiBase)
return plan, []CompletionItem{}, true
}
if s.shouldSuppressForChatTriggerEOL(current, p) {
return plan, []CompletionItem{}, true
}
plan.inParams = inParamList(current, p.Position.Character)
plan.manualInvoke = parseManualInvoke(p.Context)
plan.cacheKey = s.completionCacheKey(p, above, current, below, funcCtx, plan.inParams, hasExtra, extraText)
if pending := s.takePendingCompletion(plan.cacheKey); len(pending) > 0 {
return plan, pending, true
}
if isBareDoubleOpen(current, openStr, openChar, closeChar) || isBareDoubleOpen(below, openStr, openChar, closeChar) {
logging.Logf("lsp ", "%scompletion skip=empty-double-semicolon line=%d char=%d current=%q%s", logging.AnsiYellow, p.Position.Line, p.Position.Character, trimLen(current), logging.AnsiBase)
return plan, []CompletionItem{}, true
}
if !plan.inParams && !s.prefixHeuristicAllows(plan.inlinePrompt, current, p, plan.manualInvoke) {
logging.Logf("lsp ", "%scompletion skip=short-prefix line=%d char=%d current=%q%s", logging.AnsiYellow, p.Position.Line, p.Position.Character, trimLen(current), logging.AnsiBase)
return plan, []CompletionItem{}, true
}
return plan, nil, false
}
func (s *Server) runCompletionForSpec(ctx context.Context, plan completionPlan, spec requestSpec, client llm.Client) ([]CompletionItem, bool) {
sortPrefix := fmt.Sprintf("%04d", spec.index)
modelKey := spec.effectiveModel(client.DefaultModel())
providerKey := spec.provider
if providerKey == "" {
providerKey = canonicalProvider(client.Name())
}
cacheKey := plan.cacheKey + "|" + providerKey + ":" + modelKey
if cached, ok := s.completionCacheGet(cacheKey); ok && strings.TrimSpace(cached) != "" {
logging.Logf("lsp ", "completion cache hit uri=%s line=%d char=%d preview=%s%s%s",
plan.params.TextDocument.URI, plan.params.Position.Line, plan.params.Position.Character,
logging.AnsiGreen, logging.PreviewForLog(cached), logging.AnsiBase)
detail := fmt.Sprintf("Hexai %s:%s", client.Name(), modelKey)
items := s.makeCompletionItems(cached, plan.inParams, plan.current, plan.params, plan.docStr, detail, sortPrefix)
return items, true
}
if items, ok := s.tryProviderNativeCompletion(ctx, plan, spec, client, sortPrefix); ok {
return items, true
}
return s.executeChatCompletion(ctx, plan, spec, client, sortPrefix)
}
func (s *Server) executeChatCompletion(ctx context.Context, plan completionPlan, spec requestSpec, client llm.Client, sortPrefix string) ([]CompletionItem, bool) {
messages := s.buildCompletionMessages(plan.inlinePrompt, plan.hasExtra, plan.extraText, plan.inParams, plan.params, plan.above, plan.current, plan.below, plan.funcCtx)
sentSize := 0
for _, m := range messages {
sentSize += len(m.Content)
}
s.incSentCounters(sentSize)
text, err := client.Chat(ctx, messages, spec.options...)
if err != nil {
logging.Logf("lsp ", "llm completion error: %v", err)
s.logLLMStats("")
return nil, false
}
s.incRecvCounters(len(text))
modelUsed := spec.effectiveModel(client.DefaultModel())
_ = stats.Update(ctx, client.Name(), modelUsed, sentSize, len(text))
s.logLLMStats(modelUsed)
trimmed := strings.TrimSpace(text)
cleaned := s.postProcessCompletion(trimmed, plan.current[:plan.params.Position.Character], plan.current)
if cleaned == "" {
return nil, false
}
detail := fmt.Sprintf("Hexai %s:%s", client.Name(), modelUsed)
providerKey := spec.provider
if providerKey == "" {
providerKey = canonicalProvider(client.Name())
}
cacheKey := plan.cacheKey + "|" + providerKey + ":" + modelUsed
s.completionCachePut(cacheKey, cleaned)
items := s.makeCompletionItems(cleaned, plan.inParams, plan.current, plan.params, plan.docStr, detail, sortPrefix)
return items, true
}
// parseManualInvoke inspects the LSP completion context and reports whether the user manually invoked completion.
func parseManualInvoke(ctx any) bool {
if ctx == nil {
return false
}
var c struct {
TriggerKind int `json:"triggerKind"`
}
if raw, ok := ctx.(json.RawMessage); ok {
_ = json.Unmarshal(raw, &c)
} else {
b, _ := json.Marshal(ctx)
_ = json.Unmarshal(b, &c)
}
return c.TriggerKind == 1
}
// shouldSuppressForChatTriggerEOL returns true when a chat trigger like ">" follows ?, !, :, or ; at EOL.
func (s *Server) shouldSuppressForChatTriggerEOL(current string, p CompletionParams) bool {
t := strings.TrimRight(current, " \t")
suffix, prefixes, _ := s.chatConfig()
if suffix == "" {
return false
}
if strings.HasSuffix(t, suffix) {
if len(t) < len(suffix)+1 {
return false
}
prev := string(t[len(t)-len(suffix)-1])
for _, pf := range prefixes {
if prev == pf {
logging.Logf("lsp ", "completion skip=chat-trigger-eol uri=%s line=%d", p.TextDocument.URI, p.Position.Line)
return true
}
}
}
return false
}
// prefixHeuristicAllows applies minimal prefix rules unless inlinePrompt or structural triggers apply.
func (s *Server) prefixHeuristicAllows(inlinePrompt bool, current string, p CompletionParams, manualInvoke bool) bool {
// Determine the effective cursor index within current line, clamped, and
// skip over trailing spaces/tabs to support cases like "type Matrix| ".
idx := p.Position.Character
if idx > len(current) {
idx = len(current)
}
allowNoPrefix := inlinePrompt
if idx > 0 {
ch := current[idx-1]
if ch == '.' || ch == ':' || ch == '/' || ch == '_' || ch == ')' {
allowNoPrefix = true
}
}
if allowNoPrefix {
return true
}
// Walk left over whitespace
j := idx
for j > 0 {
c := current[j-1]
if c == ' ' || c == '\t' {
j--
continue
}
break
}
start := computeWordStart(current, j)
min := 1
if manualInvoke {
if v := s.manualInvokeMinPrefix(); v >= 0 {
min = v
}
}
return j-start >= min
}
// tryProviderNativeCompletion attempts provider-native completion and returns items when successful.
func (s *Server) tryProviderNativeCompletion(ctx context.Context, plan completionPlan, spec requestSpec, client llm.Client, sortPrefix string) ([]CompletionItem, bool) {
cc, ok := client.(llm.CodeCompleter)
if !ok {
return nil, false
}
current := plan.current
p := plan.params
before, after := s.docBeforeAfter(p.TextDocument.URI, p.Position)
path := strings.TrimPrefix(p.TextDocument.URI, "file://")
cfg := s.currentConfig()
openStr, _, openChar, closeChar := s.inlineMarkers()
prompt := renderTemplate(cfg.PromptNativeCompletion, map[string]string{
"path": path,
"before": before,
})
provider := spec.provider
if provider == "" {
provider = canonicalProvider(cfg.Provider)
}
logging.Logf("lsp ", "completion path=codex provider=%s uri=%s", provider, path)
ctx2, cancel2 := context.WithTimeout(ctx, 15*time.Second)
defer cancel2()
sentBytes := len(prompt) + len(after)
modelUsed := spec.effectiveModel(client.DefaultModel())
tempVal := 0.0
if val, ok := chooseSurfaceTemperature(surfaceCompletion, cfg, spec.entry, provider, modelUsed); ok {
tempVal = val
}
suggestions, err := cc.CodeCompletion(ctx2, prompt, after, 1, "", tempVal)
if err != nil || len(suggestions) == 0 {
if err != nil {
logging.Logf("lsp ", "completion path=codex error=%v (falling back)", err)
}
return nil, false
}
s.incSentCounters(sentBytes)
s.incRecvCounters(len(suggestions[0]))
_ = stats.Update(ctx2, client.Name(), modelUsed, sentBytes, len(suggestions[0]))
s.logLLMStats(modelUsed)
cleaned := strings.TrimSpace(suggestions[0])
if cleaned == "" {
return nil, false
}
cleaned = stripDuplicateAssignmentPrefix(current[:p.Position.Character], cleaned)
if cleaned == "" {
return nil, false
}
cleaned = stripDuplicateGeneralPrefix(current[:p.Position.Character], cleaned)
if cleaned == "" {
return nil, false
}
if strings.TrimSpace(cleaned) != "" && hasDoubleOpenTrigger(current, openStr, openChar, closeChar) {
indent := leadingIndent(current)
if indent != "" {
cleaned = applyIndent(indent, cleaned)
}
}
if strings.TrimSpace(cleaned) == "" {
return nil, false
}
detail := fmt.Sprintf("Hexai %s:%s", client.Name(), modelUsed)
providerKey := provider
if providerKey == "" {
providerKey = canonicalProvider(client.Name())
}
cacheKey := plan.cacheKey + "|" + providerKey + ":" + modelUsed
s.completionCachePut(cacheKey, cleaned)
items := s.makeCompletionItems(cleaned, plan.inParams, current, p, plan.docStr, detail, sortPrefix)
return items, true
}
// waitForDebounce sleeps until there has been no input activity for at least
// completionDebounce. If debounce is zero or ctx is done, it returns promptly.
func (s *Server) waitForDebounce(ctx context.Context) {
d := s.completionDebounce()
if d <= 0 {
return
}
for {
s.mu.RLock()
last := s.lastInput
s.mu.RUnlock()
if last.IsZero() {
return
}
since := time.Since(last)
if since >= d {
return
}
rem := d - since
timer := time.NewTimer(rem)
select {
case <-ctx.Done():
timer.Stop()
return
case <-timer.C:
// loop and re-evaluate in case input occurred during sleep
}
}
}
// waitForThrottle enforces a minimum spacing between LLM calls. Returns false
// if the context is canceled while waiting.
func (s *Server) waitForThrottle(ctx context.Context) bool {
interval := s.completionThrottle()
if interval <= 0 {
return true
}
var wait time.Duration
for {
s.mu.Lock()
next := s.lastLLMCall.Add(interval)
now := time.Now()
if now.Before(next) {
wait = next.Sub(now)
s.mu.Unlock()
timer := time.NewTimer(wait)
select {
case <-ctx.Done():
timer.Stop()
return false
case <-timer.C:
// try again to set the next call time
continue
}
}
// we are allowed to proceed now; record this call as the latest
s.lastLLMCall = now
s.mu.Unlock()
return true
}
}
// buildCompletionMessages constructs the LLM messages for completion.
func (s *Server) buildCompletionMessages(inlinePrompt, hasExtra bool, extraText string, inParams bool, p CompletionParams, above, current, below, funcCtx string) []llm.Message {
vars := map[string]string{
"file": p.TextDocument.URI,
"function": funcCtx,
"above": above,
"current": current,
"below": below,
"char": fmt.Sprintf("%d", p.Position.Character),
}
cfg := s.currentConfig()
sys := cfg.PromptCompletionSystemGeneral
userTpl := cfg.PromptCompletionUserGeneral
if inParams {
sys = cfg.PromptCompletionSystemParams
userTpl = cfg.PromptCompletionUserParams
}
if inlinePrompt && strings.TrimSpace(cfg.PromptCompletionSystemInline) != "" {
sys = cfg.PromptCompletionSystemInline
}
user := renderTemplate(userTpl, vars)
messages := []llm.Message{{Role: "system", Content: sys}, {Role: "user", Content: user}}
if hasExtra && strings.TrimSpace(extraText) != "" {
extra := renderTemplate(cfg.PromptCompletionExtraHeader, map[string]string{"context": extraText})
if strings.TrimSpace(extra) == "" {
extra = extraText
}
messages = append(messages, llm.Message{Role: "user", Content: extra})
}
return messages
}
// postProcessCompletion normalizes and deduplicates completion text and applies indentation rules.
func (s *Server) postProcessCompletion(text string, leftOfCursor string, currentLine string) string {
cleaned := stripCodeFences(text)
if cleaned != "" && strings.ContainsRune(cleaned, '`') {
if inline := stripInlineCodeSpan(cleaned); strings.TrimSpace(inline) != "" {
cleaned = inline
}
}
if cleaned != "" {
cleaned = stripDuplicateAssignmentPrefix(leftOfCursor, cleaned)
}
if cleaned != "" {
cleaned = stripDuplicateGeneralPrefix(leftOfCursor, cleaned)
}
openStr, _, openChar, closeChar := s.inlineMarkers()
if cleaned != "" && hasDoubleOpenTrigger(currentLine, openStr, openChar, closeChar) {
if indent := leadingIndent(currentLine); indent != "" {
cleaned = applyIndent(indent, cleaned)
}
}
return cleaned
}
// Summary: Document open/change/close and in-editor chat handlers split out of handlers.go.
package lsp
import (
"context"
"encoding/json"
"strings"
"time"
"codeberg.org/snonux/hexai/internal/llm"
"codeberg.org/snonux/hexai/internal/logging"
)
func (s *Server) handleDidOpen(req Request) {
var p DidOpenTextDocumentParams
if err := json.Unmarshal(req.Params, &p); err == nil {
s.setDocument(p.TextDocument.URI, p.TextDocument.Text)
s.markActivity()
}
}
func (s *Server) handleDidChange(req Request) {
var p DidChangeTextDocumentParams
if err := json.Unmarshal(req.Params, &p); err == nil {
if len(p.ContentChanges) > 0 {
s.setDocument(p.TextDocument.URI, p.ContentChanges[len(p.ContentChanges)-1].Text)
}
s.markActivity()
// Detect in-editor chat trigger lines and respond inline.
s.detectAndHandleChat(p.TextDocument.URI)
}
}
func (s *Server) handleDidClose(req Request) {
var p DidCloseTextDocumentParams
if err := json.Unmarshal(req.Params, &p); err == nil {
s.deleteDocument(p.TextDocument.URI)
s.markActivity()
}
}
// docBeforeAfter returns the full document text split at the given position.
// The returned strings are the text before the cursor (inclusive of anything
// left of the position) and the text after the cursor.
func (s *Server) docBeforeAfter(uri string, pos Position) (string, string) {
d := s.getDocument(uri)
if d == nil {
return "", ""
}
// Clamp indices
line := pos.Line
if line < 0 {
line = 0
}
if line >= len(d.lines) {
line = len(d.lines) - 1
}
col := pos.Character
if col < 0 {
col = 0
}
if col > len(d.lines[line]) {
col = len(d.lines[line])
}
// Build before
var b strings.Builder
for i := 0; i < line; i++ {
b.WriteString(d.lines[i])
b.WriteByte('\n')
}
b.WriteString(d.lines[line][:col])
before := b.String()
// Build after
var a strings.Builder
a.WriteString(d.lines[line][col:])
for i := line + 1; i < len(d.lines); i++ {
a.WriteByte('\n')
a.WriteString(d.lines[i])
}
return before, a.String()
}
// --- in-editor chat (";C ...") ---
// detectAndHandleChat scans the current document for any line that starts with
// a new trigger pair (e.g., "?>" ",>" ":>" ";>") at EOL and inserts the LLM
// reply below.
func (s *Server) detectAndHandleChat(uri string) {
d := s.getDocument(uri)
if d == nil || len(d.lines) == 0 {
return
}
suffix, prefixes, _ := s.chatConfig()
openStr, _, openChar, closeChar := s.inlineMarkers()
for i, raw := range d.lines {
if lineHasInlinePrompt(raw, openStr, openChar, closeChar) {
if s.currentLLMClient() != nil {
pos := Position{Line: i, Character: len(raw)}
go s.runInlinePrompt(uri, pos)
}
continue
}
// Find last non-space character index
j := len(raw) - 1
for j >= 0 {
if raw[j] == ' ' || raw[j] == '\t' {
j--
continue
}
break
}
if j < 0 {
continue
}
// Check suffix and derive the prompt text before validating prefixes
if suffix == "" {
continue
}
if string(raw[j]) != suffix {
continue
}
removeCount := len(suffix)
base := raw[:j+1-removeCount]
prompt := strings.TrimSpace(base)
if prompt == "" {
continue
}
// Slash commands (`/foo>`) do not require a prefix trigger.
isCommand := strings.HasPrefix(prompt, "/")
if !isCommand {
// Require at least one char before suffix and that char must be in chatPrefixes
if j < 1 {
continue
}
prev := string(raw[j-1])
match := false
for _, pfx := range prefixes {
if prev == pfx {
match = true
break
}
}
if !match {
continue
}
}
// Avoid double-answering: if the next non-empty line starts with '>' we skip.
k := i + 1
for k < len(d.lines) && strings.TrimSpace(d.lines[k]) == "" {
k++
}
if k < len(d.lines) && strings.HasPrefix(strings.TrimSpace(d.lines[k]), ">") {
continue
}
lineIdx := i
lastIdx := j
if resp, ok := s.chatCommandResponse(uri, lineIdx, prompt); ok {
msg := strings.TrimSpace(resp.message)
if msg != "" {
s.applyChatEdits(uri, lineIdx, lastIdx, removeCount, "> "+msg)
}
return
}
go func(prompt string, remove int) {
ctx, cancel := context.WithTimeout(context.Background(), 25*time.Second)
defer cancel()
// Build messages with history and context_mode aware extras.
pos := Position{Line: lineIdx, Character: lastIdx + 1}
msgs := s.buildChatMessages(uri, pos, prompt)
spec := s.buildRequestSpec(surfaceChat)
client := s.clientFor(spec)
if client == nil {
return
}
modelUsed := spec.effectiveModel(client.DefaultModel())
logging.Logf("lsp ", "chat llm=requesting model=%s", modelUsed)
text, err := s.chatWithStats(ctx, surfaceChat, spec, msgs)
if err != nil {
logging.Logf("lsp ", "chat llm error: %v", err)
return
}
out := strings.TrimSpace(stripCodeFences(text))
if out == "" {
return
}
s.applyChatEdits(uri, lineIdx, lastIdx, remove, "> "+out)
}(prompt, removeCount)
// Only handle one per change tick to avoid flooding
break
}
}
// applyChatEdits removes the triggering punctuation at end of the line and
// inserts two newlines followed by a new line with the response prefixed.
func (s *Server) applyChatEdits(uri string, lineIdx int, lastNonSpace int, removeCount int, response string) {
d := s.getDocument(uri)
if d == nil {
return
}
// 1) Delete the trailing punctuation (1 or 2 chars)
delStart := Position{Line: lineIdx, Character: lastNonSpace + 1 - removeCount}
delEnd := Position{Line: lineIdx, Character: lastNonSpace + 1}
// 2) Insert two newlines and the response at end-of-line, then one extra blank line
insPos := Position{Line: lineIdx, Character: len(d.lines[lineIdx])}
resp := strings.TrimRight(response, "\n") + "\n"
insert := "\n\n" + resp + "\n"
edits := []TextEdit{
{Range: Range{Start: delStart, End: delEnd}, NewText: ""},
{Range: Range{Start: insPos, End: insPos}, NewText: insert},
}
we := WorkspaceEdit{Changes: map[string][]TextEdit{uri: edits}}
s.clientApplyEdit("Hexai: insert chat response", we)
}
func (s *Server) runInlinePrompt(uri string, pos Position) {
if s.currentLLMClient() == nil {
return
}
d := s.getDocument(uri)
if d == nil || pos.Line < 0 || pos.Line >= len(d.lines) {
return
}
line := d.lines[pos.Line]
openStr, _, openChar, closeChar := s.inlineMarkers()
if !lineHasInlinePrompt(line, openStr, openChar, closeChar) {
return
}
p := CompletionParams{TextDocument: TextDocumentIdentifier{URI: uri}, Position: Position{Line: pos.Line, Character: len(line)}}
p.Context = map[string]int{"triggerKind": 1}
above, current, below, funcCtx := s.lineContext(uri, p.Position)
docStr := s.buildDocString(p, above, current, below, funcCtx)
newFunc := s.isDefiningNewFunction(uri, p.Position)
extra, hasExtra := s.buildAdditionalContext(newFunc, uri, p.Position)
items, ok, _ := s.tryLLMCompletion(p, above, current, below, funcCtx, docStr, hasExtra, extra)
if !ok || len(items) == 0 {
return
}
s.applyInlineCompletion(uri, items[0])
}
func (s *Server) applyInlineCompletion(uri string, item CompletionItem) {
var edits []TextEdit
if len(item.AdditionalTextEdits) > 0 {
edits = append(edits, item.AdditionalTextEdits...)
}
if item.TextEdit != nil {
edits = append(edits, *item.TextEdit)
}
if len(edits) == 0 {
return
}
we := WorkspaceEdit{Changes: map[string][]TextEdit{uri: edits}}
s.clientApplyEdit("Hexai: inline prompt", we)
}
// buildChatHistory walks upwards from the current line to collect the most recent
// Q/A pairs in the in-editor transcript. Returns messages ending with current prompt.
func (s *Server) buildChatHistory(uri string, lineIdx int, currentPrompt string) []llm.Message {
d := s.getDocument(uri)
if d == nil {
return []llm.Message{{Role: "user", Content: currentPrompt}}
}
type pair struct{ q, a string }
pairs := []pair{}
i := lineIdx - 1
for i >= 0 && len(pairs) < 3 {
for i >= 0 && strings.TrimSpace(d.lines[i]) == "" {
i--
}
if i < 0 {
break
}
if !strings.HasPrefix(strings.TrimSpace(d.lines[i]), ">") {
break
}
var replyLines []string
for i >= 0 {
line := strings.TrimSpace(d.lines[i])
if strings.HasPrefix(line, ">") {
replyLines = append([]string{strings.TrimSpace(strings.TrimPrefix(line, ">"))}, replyLines...)
i--
continue
}
break
}
for i >= 0 && strings.TrimSpace(d.lines[i]) == "" {
i--
}
if i < 0 {
break
}
q := strings.TrimSpace(d.lines[i])
q = s.stripTrailingTrigger(q)
pairs = append([]pair{{q: q, a: strings.Join(replyLines, "\n")}}, pairs...)
i--
}
msgs := make([]llm.Message, 0, len(pairs)*2+1)
for _, p := range pairs {
if strings.TrimSpace(p.q) != "" {
msgs = append(msgs, llm.Message{Role: "user", Content: p.q})
}
if strings.TrimSpace(p.a) != "" {
msgs = append(msgs, llm.Message{Role: "assistant", Content: p.a})
}
}
msgs = append(msgs, llm.Message{Role: "user", Content: currentPrompt})
return msgs
}
// stripTrailingTrigger removes the trailing chat trigger punctuation from a line if present.
func (s *Server) stripTrailingTrigger(sx string) string {
trim := strings.TrimRight(sx, " \t")
if len(trim) == 0 {
return sx
}
_, prefixes, suffixChar := s.chatConfig()
if len(trim) >= 2 && suffixChar != 0 && trim[len(trim)-1] == suffixChar {
prev := string(trim[len(trim)-2])
for _, pf := range prefixes {
if prev == pf {
return strings.TrimRight(trim[:len(trim)-1], " \t")
}
}
}
last := trim[len(trim)-1]
switch last {
case '?', '!', ':':
return strings.TrimRight(trim[:len(trim)-1], " \t")
default:
return sx
}
}
// buildChatMessages assembles the chat request messages using:
// - system from prompts.chat.system
// - rolling in-editor history up to current prompt
// - optional extra context per general.context_mode (window/full-file/new-func)
func (s *Server) buildChatMessages(uri string, pos Position, prompt string) []llm.Message {
// Base system and history
cfg := s.currentConfig()
sys := cfg.PromptChatSystem
// Determine line index for history from position
lineIdx := pos.Line
history := s.buildChatHistory(uri, lineIdx, prompt)
// Start with system
msgs := []llm.Message{{Role: "system", Content: sys}}
// Optional additional context like completion path (insert before history so last remains the prompt)
newFunc := s.isDefiningNewFunction(uri, pos)
if extra, has := s.buildAdditionalContext(newFunc, uri, pos); has && strings.TrimSpace(extra) != "" {
// Reuse completion's extra header template to avoid duplication
header := renderTemplate(cfg.PromptCompletionExtraHeader, map[string]string{"context": extra})
if strings.TrimSpace(header) == "" {
header = extra
}
msgs = append(msgs, llm.Message{Role: "user", Content: header})
}
// Then add history (which ends with the current prompt)
msgs = append(msgs, history...)
return msgs
}
// clientApplyEdit sends a workspace/applyEdit request to the client.
func (s *Server) clientApplyEdit(label string, edit WorkspaceEdit) {
params := ApplyWorkspaceEditParams{Label: label, Edit: edit}
id := s.nextReqID()
req := Request{JSONRPC: "2.0", ID: id, Method: "workspace/applyEdit"}
b, _ := json.Marshal(params)
req.Params = b
s.writeMessage(req)
}
// nextReqID returns a unique json.RawMessage id for server-initiated requests.
func (s *Server) nextReqID() json.RawMessage {
s.mu.Lock()
s.nextID++
idNum := s.nextID
s.mu.Unlock()
b, _ := json.Marshal(idNum)
return b
}
// clientShowDocument asks the client to open/focus a document and select a range.
func (s *Server) clientShowDocument(uri string, sel *Range) {
var params struct {
URI string `json:"uri"`
External bool `json:"external,omitempty"`
TakeFocus bool `json:"takeFocus,omitempty"`
Selection *Range `json:"selection,omitempty"`
}
params.URI = uri
params.TakeFocus = true
params.Selection = sel
id := s.nextReqID()
req := Request{JSONRPC: "2.0", ID: id, Method: "window/showDocument"}
b, _ := json.Marshal(params)
req.Params = b
s.writeMessage(req)
}
// deferShowDocument schedules a showDocument after a short delay to allow the client
// time to apply any pending edits (e.g., create the file before focusing it).
func (s *Server) deferShowDocument(uri string, sel Range) {
go func() {
time.Sleep(120 * time.Millisecond)
s.clientShowDocument(uri, &sel)
}()
}
// Summary: ExecuteCommand handler to support post-edit navigation (jump to generated test).
package lsp
import (
"encoding/json"
)
func (s *Server) handleExecuteCommand(req Request) {
var p ExecuteCommandParams
if err := json.Unmarshal(req.Params, &p); err != nil {
s.reply(req.ID, nil, nil)
return
}
switch p.Command {
case "hexai.showDocument":
if len(p.Arguments) >= 2 {
uri, _ := p.Arguments[0].(string)
var r Range
// Convert second arg to Range via re-marshal to be robust across clients
if b, err := json.Marshal(p.Arguments[1]); err == nil {
_ = json.Unmarshal(b, &r)
}
if uri != "" {
s.clientShowDocument(uri, &r)
}
}
s.reply(req.ID, nil, nil)
return
default:
// Unknown command; no-op
s.reply(req.ID, nil, nil)
return
}
}
// Summary: Initialization and lifecycle handlers split from handlers.go.
package lsp
import (
"os"
"codeberg.org/snonux/hexai/internal"
"codeberg.org/snonux/hexai/internal/logging"
tmx "codeberg.org/snonux/hexai/internal/tmux"
)
func (s *Server) handleInitialize(req Request) {
client := s.currentLLMClient()
version := internal.Version
if client != nil {
version = version + " [" + client.Name() + ":" + client.DefaultModel() + "]"
}
res := InitializeResult{
Capabilities: ServerCapabilities{
TextDocumentSync: 1, // 1 = TextDocumentSyncKindFull
CompletionProvider: &CompletionOptions{
ResolveProvider: false,
TriggerCharacters: s.triggerCharacters(),
},
CodeActionProvider: CodeActionOptions{ResolveProvider: true},
},
ServerInfo: &ServerInfo{Name: "hexai", Version: version},
}
s.reply(req.ID, res, nil)
}
func (s *Server) handleInitialized() {
logging.Logf("lsp ", "client initialized")
// Emit an initial tmux heartbeat with provider/model
if client := s.currentLLMClient(); client != nil {
_ = tmx.SetStatus(tmx.FormatLLMStartStatus(client.Name(), client.DefaultModel()))
}
}
func (s *Server) handleShutdown(req Request) {
s.reply(req.ID, nil, nil)
}
func (s *Server) handleExit() {
s.exited = true
os.Exit(0)
}
// Summary: Generic LSP helpers shared across handlers (LLM opts, prompts, text utils, counters).
package lsp
import (
"context"
"fmt"
"strings"
"time"
"codeberg.org/snonux/hexai/internal/appconfig"
"codeberg.org/snonux/hexai/internal/llm"
"codeberg.org/snonux/hexai/internal/logging"
"codeberg.org/snonux/hexai/internal/stats"
"codeberg.org/snonux/hexai/internal/textutil"
tmx "codeberg.org/snonux/hexai/internal/tmux"
)
type surfaceKind string
const (
surfaceCompletion surfaceKind = "completion"
surfaceCodeAction surfaceKind = "code_action"
surfaceChat surfaceKind = "chat"
)
type requestSpec struct {
provider string
entry appconfig.SurfaceConfig
fallbackModel string
options []llm.RequestOption
index int
}
func (r requestSpec) modelOverride() string { return strings.TrimSpace(r.entry.Model) }
func (r requestSpec) effectiveModel(defaultModel string) string {
if m := strings.TrimSpace(r.entry.Model); m != "" {
return m
}
if f := strings.TrimSpace(r.fallbackModel); f != "" {
return f
}
return strings.TrimSpace(defaultModel)
}
func (s *Server) buildRequestSpecs(surface surfaceKind) []requestSpec {
cfg := s.currentConfig()
entries := surfaceConfigsFor(cfg, surface)
if len(entries) == 0 {
entries = []appconfig.SurfaceConfig{{Provider: cfg.Provider}}
}
maxTokens := s.maxTokens()
specs := make([]requestSpec, 0, len(entries))
for idx, raw := range entries {
entry := appconfig.SurfaceConfig{
Provider: strings.TrimSpace(raw.Provider),
Model: strings.TrimSpace(raw.Model),
Temperature: raw.Temperature,
}
provider := entry.Provider
if provider == "" {
provider = cfg.Provider
}
provider = canonicalProvider(provider)
fallbackModel := entry.Model
if fallbackModel == "" {
fallbackModel = strings.TrimSpace(resolveDefaultModel(cfg, provider))
}
opts := []llm.RequestOption{llm.WithMaxTokens(maxTokens)}
if entry.Model != "" {
opts = append(opts, llm.WithModel(entry.Model))
}
if temp, ok := chooseSurfaceTemperature(surface, cfg, entry, provider, fallbackModel); ok {
opts = append(opts, llm.WithTemperature(temp))
}
specs = append(specs, requestSpec{
provider: provider,
entry: entry,
fallbackModel: fallbackModel,
options: opts,
index: idx,
})
}
return specs
}
func (s *Server) primaryRequestSpec(surface surfaceKind) requestSpec {
specs := s.buildRequestSpecs(surface)
if len(specs) == 0 {
cfg := s.currentConfig()
provider := canonicalProvider(cfg.Provider)
fallback := strings.TrimSpace(resolveDefaultModel(cfg, provider))
return requestSpec{provider: provider, fallbackModel: fallback, options: []llm.RequestOption{llm.WithMaxTokens(s.maxTokens())}}
}
return specs[0]
}
// buildRequestSpec is retained for consumers expecting a single-entry helper.
func (s *Server) buildRequestSpec(surface surfaceKind) requestSpec {
return s.primaryRequestSpec(surface)
}
func canonicalProvider(name string) string {
p := strings.ToLower(strings.TrimSpace(name))
if p == "" {
return "openai"
}
return p
}
func resolveDefaultModel(cfg appconfig.App, provider string) string {
switch provider {
case "ollama":
return strings.TrimSpace(cfg.OllamaModel)
case "copilot":
return strings.TrimSpace(cfg.CopilotModel)
default:
return strings.TrimSpace(cfg.OpenAIModel)
}
}
func surfaceConfigsFor(cfg appconfig.App, surface surfaceKind) []appconfig.SurfaceConfig {
switch surface {
case surfaceCompletion:
return cfg.CompletionConfigs
case surfaceCodeAction:
return cfg.CodeActionConfigs
case surfaceChat:
return cfg.ChatConfigs
default:
return nil
}
}
func chooseSurfaceTemperature(surface surfaceKind, cfg appconfig.App, entry appconfig.SurfaceConfig, provider string, fallbackModel string) (float64, bool) {
if entry.Temperature != nil {
return *entry.Temperature, true
}
if cfg.CodingTemperature != nil {
temp := *cfg.CodingTemperature
effectiveModel := strings.TrimSpace(entry.Model)
if effectiveModel == "" {
effectiveModel = strings.TrimSpace(fallbackModel)
}
if provider == "openai" && strings.HasPrefix(strings.ToLower(effectiveModel), "gpt-5") && temp == 0.2 {
temp = 1.0
}
return temp, true
}
effectiveModel := strings.TrimSpace(entry.Model)
if effectiveModel == "" {
effectiveModel = strings.TrimSpace(fallbackModel)
}
if provider == "openai" && strings.HasPrefix(strings.ToLower(effectiveModel), "gpt-5") {
return 1.0, true
}
return 0, false
}
// small helpers for LLM traffic stats
func (s *Server) incSentCounters(n int) {
s.mu.Lock()
s.llmReqTotal++
s.llmSentBytesTotal += int64(n)
s.mu.Unlock()
}
func (s *Server) incRecvCounters(n int) {
s.mu.Lock()
s.llmRespTotal++
s.llmRespBytesTotal += int64(n)
s.mu.Unlock()
}
func (s *Server) logLLMStats(model string) {
s.mu.RLock()
avgSent := int64(0)
if s.llmReqTotal > 0 {
avgSent = s.llmSentBytesTotal / s.llmReqTotal
}
avgRecv := int64(0)
if s.llmRespTotal > 0 {
avgRecv = s.llmRespBytesTotal / s.llmRespTotal
}
reqs, sentTot, recvTot := s.llmReqTotal, s.llmSentBytesTotal, s.llmRespBytesTotal
s.mu.RUnlock()
mins := time.Since(s.startTime).Minutes()
if mins <= 0 {
mins = 0.001
}
rpmLocal := float64(reqs) / mins
sentPerMin := float64(sentTot) / mins
recvPerMin := float64(recvTot) / mins
// Log local process counters
logging.Logf("lsp ", "llm stats (local) reqs=%d avg_sent=%d avg_recv=%d sent_total=%d recv_total=%d rpm=%.2f sent_per_min=%.0f recv_per_min=%.0f", reqs, avgSent, avgRecv, sentTot, recvTot, rpmLocal, sentPerMin, recvPerMin)
// Global snapshot for tmux status
snap, err := stats.TakeSnapshot()
if err == nil {
if client := s.currentLLMClient(); client != nil {
provider := client.Name()
modelName := strings.TrimSpace(model)
if modelName == "" {
modelName = client.DefaultModel()
}
// Per-scope rpm estimated from window
scopeReqs := int64(0)
if pe, ok := snap.Providers[provider]; ok {
if mc, ok2 := pe.Models[modelName]; ok2 {
scopeReqs = mc.Reqs
}
}
minsWin := snap.Window.Minutes()
if minsWin <= 0 {
minsWin = 0.001
}
scopeRPM := float64(scopeReqs) / minsWin
status := tmx.FormatGlobalStatusColored(snap.Global.Reqs, snap.RPM, snap.Global.Sent, snap.Global.Recv, provider, modelName, scopeRPM, scopeReqs, snap.Window)
_ = tmx.SetStatus(status)
}
}
}
// Completion prompt builders and filters
func inParamList(current string, cursor int) bool {
if !strings.Contains(current, "func ") {
return false
}
open := strings.Index(current, "(")
close := strings.Index(current, ")")
return open >= 0 && cursor > open && (close == -1 || cursor <= close)
}
// renderTemplate performs simple {{var}} replacement in a template string.
func renderTemplate(t string, vars map[string]string) string { return textutil.RenderTemplate(t, vars) }
func computeTextEditAndFilter(cleaned string, inParams bool, current string, p CompletionParams) (*TextEdit, string) {
if inParams {
open := strings.Index(current, "(")
close := strings.Index(current, ")")
if open >= 0 {
left := open + 1
right := len(current)
if close >= 0 && close >= left {
right = close
}
if p.Position.Character < right {
right = p.Position.Character
}
te := &TextEdit{Range: Range{Start: Position{Line: p.Position.Line, Character: left}, End: Position{Line: p.Position.Line, Character: right}}, NewText: cleaned}
var filter string
if left >= 0 && right >= left && right <= len(current) {
filter = strings.TrimLeft(current[left:right], " \t")
}
return te, filter
}
}
startChar := computeWordStart(current, p.Position.Character)
te := &TextEdit{Range: Range{Start: Position{Line: p.Position.Line, Character: startChar}, End: Position{Line: p.Position.Line, Character: p.Position.Character}}, NewText: cleaned}
filter := strings.TrimLeft(current[startChar:p.Position.Character], " \t")
return te, filter
}
func computeWordStart(current string, at int) int {
if at > len(current) {
at = len(current)
}
for at > 0 {
ch := current[at-1]
if (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || (ch >= '0' && ch <= '9') || ch == '_' {
at--
continue
}
break
}
return at
}
func isIdentChar(ch byte) bool {
return (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || (ch >= '0' && ch <= '9') || ch == '_'
}
// chatWithStats wraps llmClient.Chat to increment counters and emit a tmux heartbeat.
func (s *Server) chatWithStats(ctx context.Context, surface surfaceKind, spec requestSpec, msgs []llm.Message) (string, error) {
// Count bytes sent
sent := 0
for _, m := range msgs {
sent += len(m.Content)
}
s.incSentCounters(sent)
// Debounce/throttle if configured (reuse completion gates)
s.waitForDebounce(ctx)
if !s.waitForThrottle(ctx) {
return "", context.Canceled
}
// Perform request
client := s.clientFor(spec)
if client == nil {
return "", fmt.Errorf("llm client unavailable")
}
modelUsed := spec.effectiveModel(client.DefaultModel())
txt, err := client.Chat(ctx, msgs, spec.options...)
if err != nil {
s.logLLMStats(modelUsed)
return "", err
}
s.incRecvCounters(len(txt))
// Update global stats cache
_ = stats.Update(ctx, client.Name(), modelUsed, sent, len(txt))
s.logLLMStats(modelUsed)
return txt, nil
}
// Inline prompt utilities
func lineHasInlinePrompt(line string, openStr string, open, close byte) bool {
if openStr == "" {
openStr = string(open)
}
if _, _, _, ok := findStrictInlineTag(line, openStr, open, close); ok {
return true
}
return hasDoubleOpenTrigger(line, openStr, open, close)
}
func doubleOpenSequences(openStr string, open, close byte) []string {
seen := make(map[string]struct{}, 2)
var seqs []string
if openStr != "" && close != 0 {
seq := openStr + string(close)
if _, ok := seen[seq]; !ok {
seen[seq] = struct{}{}
seqs = append(seqs, seq)
}
}
if openStr != "" && open != 0 {
seq := string(open) + openStr
if len(seq) > len(openStr) {
if _, ok := seen[seq]; !ok {
seen[seq] = struct{}{}
seqs = append(seqs, seq)
}
}
}
return seqs
}
func leadingIndent(line string) string {
i := 0
for i < len(line) {
if line[i] == ' ' || line[i] == '\t' {
i++
continue
}
break
}
if i == 0 {
return ""
}
return line[:i]
}
func applyIndent(indent, suggestion string) string {
if indent == "" || suggestion == "" {
return suggestion
}
lines := splitLines(suggestion)
for i, ln := range lines {
if strings.TrimSpace(ln) == "" {
continue
}
if strings.HasPrefix(ln, indent) {
continue
}
lines[i] = indent + ln
}
return strings.Join(lines, "\n")
}
// --- Inline marker parsing and general string utilities ---
// findStrictInlineTag finds >!text> (configurable), with no space after the first
// opening marker and no space immediately before the closing marker. Returns the
// text between markers, the start index, the end index just after closing, and ok.
func findStrictInlineTag(line string, openStr string, open, close byte) (string, int, int, bool) {
if openStr == "" {
openStr = string(open)
}
if openStr == "" {
return "", 0, 0, false
}
openChar := open
if openChar == 0 {
openChar = openStr[0]
}
doubleSeqs := doubleOpenSequences(openStr, openChar, close)
pos := 0
for pos < len(line) {
j := strings.IndexByte(line[pos:], openChar)
if j < 0 {
return "", 0, 0, false
}
j += pos
if !strings.HasPrefix(line[j:], openStr) {
pos = j + 1
continue
}
contentStart := j + len(openStr)
if contentStart >= len(line) {
return "", 0, 0, false
}
doubleHit := false
for _, seq := range doubleSeqs {
if strings.HasPrefix(line[j:], seq) {
doubleHit = true
contentStart += len(seq) - len(openStr)
if contentStart >= len(line) {
return "", 0, 0, false
}
break
}
}
next := line[contentStart]
if next == ' ' {
pos = contentStart + 1
continue
}
if !doubleHit && next == close {
pos = contentStart + 1
continue
}
k := strings.IndexByte(line[contentStart:], close)
if k < 0 {
return "", 0, 0, false
}
closeIdx := contentStart + k
if closeIdx-1 >= contentStart && line[closeIdx-1] == ' ' {
pos = closeIdx + 1
continue
}
inner := strings.TrimSpace(line[contentStart:closeIdx])
if inner == "" {
pos = closeIdx + 1
continue
}
end := closeIdx + 1
return inner, j, end, true
}
return "", 0, 0, false
}
// isBareDoubleSemicolon reports whether the line contains a standalone
// double-semicolon marker with no inline content (";;" possibly with only
// whitespace after it). It explicitly excludes the valid form ";;text;".
func isBareDoubleOpen(line string, openStr string, open, close byte) bool {
t := strings.TrimSpace(line)
if openStr == "" {
openStr = string(open)
}
if openStr == "" {
return false
}
for _, seq := range doubleOpenSequences(openStr, open, close) {
if strings.HasPrefix(t, seq) {
rest := strings.TrimSpace(t[len(seq):])
if rest == "" || rest == string(close) {
return true
}
}
}
return false
}
// stripDuplicateAssignmentPrefix removes a duplicated assignment prefix from the suggestion.
func stripDuplicateAssignmentPrefix(prefixBeforeCursor, suggestion string) string {
s2 := strings.TrimLeft(suggestion, " \t")
// Prefer := if present at end of prefix
if idx := strings.LastIndex(prefixBeforeCursor, ":="); idx >= 0 && idx+2 <= len(prefixBeforeCursor) {
tail := prefixBeforeCursor[idx+2:]
if strings.TrimSpace(tail) == "" {
start := idx - 1
for start >= 0 && (isIdentChar(prefixBeforeCursor[start]) || prefixBeforeCursor[start] == ' ' || prefixBeforeCursor[start] == '\t') {
start--
}
start++
seg := strings.TrimRight(prefixBeforeCursor[start:idx+2], " \t")
if strings.HasPrefix(s2, seg) {
return strings.TrimLeft(s2[len(seg):], " \t")
}
}
}
// Fallback to plain '=' if present
if idx := strings.LastIndex(prefixBeforeCursor, "="); idx >= 0 {
if !(idx > 0 && prefixBeforeCursor[idx-1] == ':') { // not :=
tail := prefixBeforeCursor[idx+1:]
if strings.TrimSpace(tail) == "" {
start := idx - 1
for start >= 0 && (isIdentChar(prefixBeforeCursor[start]) || prefixBeforeCursor[start] == ' ' || prefixBeforeCursor[start] == '\t') {
start--
}
start++
seg := strings.TrimRight(prefixBeforeCursor[start:idx+1], " \t")
if strings.HasPrefix(s2, seg) {
return strings.TrimLeft(s2[len(seg):], " \t")
}
}
}
}
return suggestion
}
// stripDuplicateGeneralPrefix removes any already-typed prefix that the model repeated.
func stripDuplicateGeneralPrefix(prefixBeforeCursor, suggestion string) string {
if suggestion == "" {
return suggestion
}
s := strings.TrimLeft(suggestion, " \t")
p := strings.TrimRight(prefixBeforeCursor, " \t")
if p != "" && strings.HasPrefix(s, p) {
return strings.TrimLeft(s[len(p):], " \t")
}
for k := len(p) - 1; k > 0; k-- {
if !isIdentBoundary(p[k-1]) {
continue
}
suf := strings.TrimLeft(p[k:], " \t")
if suf == "" {
continue
}
if strings.HasPrefix(s, suf) {
return strings.TrimLeft(s[len(suf):], " \t")
}
}
return suggestion
}
func isIdentBoundary(ch byte) bool {
return !((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || (ch >= '0' && ch <= '9') || ch == '_')
}
// stripCodeFences removes surrounding Markdown code fences from a model response.
func stripCodeFences(s string) string { return textutil.StripCodeFences(s) }
// stripInlineCodeSpan returns the contents of the first inline backtick code span if present.
func stripInlineCodeSpan(s string) string {
t := strings.TrimSpace(s)
if t == "" {
return t
}
i := strings.IndexByte(t, '`')
if i < 0 {
return t
}
jrel := strings.IndexByte(t[i+1:], '`')
if jrel < 0 {
return t
}
j := i + 1 + jrel
return t[i+1 : j]
}
// labelForCompletion picks a short, readable label for the completion list.
func labelForCompletion(cleaned, filter string) string {
label := trimLen(firstLine(cleaned))
if filter != "" && !strings.HasPrefix(strings.ToLower(label), strings.ToLower(filter)) {
return filter
}
return label
}
// extractRangeText returns the exact text within the given document range.
func extractRangeText(d *document, r Range) string {
if r.Start.Line == r.End.Line {
line := d.lines[r.Start.Line]
if r.Start.Character < 0 {
r.Start.Character = 0
}
if r.End.Character > len(line) {
r.End.Character = len(line)
}
if r.Start.Character > r.End.Character {
return ""
}
return line[r.Start.Character:r.End.Character]
}
var b strings.Builder
// first line
first := d.lines[r.Start.Line]
if r.Start.Character < 0 {
r.Start.Character = 0
}
if r.Start.Character > len(first) {
r.Start.Character = len(first)
}
b.WriteString(first[r.Start.Character:])
b.WriteString("\n")
// middle lines
for i := r.Start.Line + 1; i < r.End.Line; i++ {
b.WriteString(d.lines[i])
if i+1 <= r.End.Line {
b.WriteString("\n")
}
}
// last line
last := d.lines[r.End.Line]
if r.End.Character < 0 {
r.End.Character = 0
}
if r.End.Character > len(last) {
r.End.Character = len(last)
}
b.WriteString(last[:r.End.Character])
return b.String()
}
// collectPromptRemovalEdits returns edits to remove all inline prompt markers.
func (s *Server) collectPromptRemovalEdits(uri string) []TextEdit {
d := s.getDocument(uri)
if d == nil || len(d.lines) == 0 {
return nil
}
var edits []TextEdit
openStr, _, openChar, closeChar := s.inlineMarkers()
for i, line := range d.lines {
edits = append(edits, promptRemovalEditsForLine(line, i, openStr, openChar, closeChar)...)
}
return edits
}
func promptRemovalEditsForLine(line string, lineNum int, openStr string, open, close byte) []TextEdit {
if hasDoubleOpenTrigger(line, openStr, open, close) {
return []TextEdit{{Range: Range{Start: Position{Line: lineNum, Character: 0}, End: Position{Line: lineNum, Character: len(line)}}, NewText: ""}}
}
return collectSemicolonMarkers(line, lineNum, openStr, open, close)
}
func hasDoubleOpenTrigger(line string, openStr string, open, close byte) bool {
if openStr == "" {
openStr = string(open)
}
if openStr == "" {
return false
}
seqs := doubleOpenSequences(openStr, open, close)
if len(seqs) == 0 {
return false
}
pos := 0
for pos < len(line) {
found := -1
var seq string
for _, cand := range seqs {
if cand == "" {
continue
}
if idx := strings.Index(line[pos:], cand); idx >= 0 {
abs := pos + idx
if found < 0 || abs < found {
found = abs
seq = cand
}
}
}
if found < 0 {
return false
}
contentStart := found + len(seq)
if contentStart >= len(line) {
return false
}
first := line[contentStart]
if first == ' ' || first == close || first == open {
pos = contentStart + 1
continue
}
if contentStart+1 >= len(line) {
return false
}
k := strings.IndexByte(line[contentStart+1:], close)
if k < 0 {
return false
}
closeIdx := contentStart + 1 + k
if closeIdx-1 >= 0 && line[closeIdx-1] == ' ' {
pos = closeIdx + 1
continue
}
return true
}
return false
}
func collectSemicolonMarkers(line string, lineNum int, openStr string, open, close byte) []TextEdit {
if openStr == "" {
openStr = string(open)
}
if openStr == "" {
return nil
}
var edits []TextEdit
start := 0
doubleSeqs := doubleOpenSequences(openStr, open, close)
for start < len(line) {
j := strings.Index(line[start:], openStr)
if j < 0 {
break
}
j += start
contentStart := j + len(openStr)
if contentStart >= len(line) {
break
}
next := line[contentStart]
if next == ' ' {
start = j + 1
continue
}
skipDouble := false
for _, seq := range doubleSeqs {
if strings.HasPrefix(line[j:], seq) {
skipDouble = true
break
}
}
if skipDouble {
start = j + 1
continue
}
k := strings.IndexByte(line[contentStart:], close)
if k < 0 {
break
}
closeIdx := contentStart + k
if closeIdx-1 < contentStart || line[closeIdx-1] == ' ' {
start = closeIdx + 1
continue
}
if closeIdx == contentStart {
start = closeIdx + 1
continue
}
endChar := closeIdx + 1
if endChar < len(line) && line[endChar] == ' ' {
endChar++
}
edits = append(edits, TextEdit{Range: Range{Start: Position{Line: lineNum, Character: j}, End: Position{Line: lineNum, Character: endChar}}, NewText: ""})
start = endChar
}
return edits
}
// Summary: Minimal LSP server over stdio; manages documents, dispatches requests, and tracks stats.
package lsp
import (
"bufio"
"encoding/json"
"io"
"log"
"os"
"strings"
"sync"
"time"
"codeberg.org/snonux/hexai/internal/appconfig"
"codeberg.org/snonux/hexai/internal/llm"
"codeberg.org/snonux/hexai/internal/logging"
"codeberg.org/snonux/hexai/internal/runtimeconfig"
)
// Server implements a minimal LSP over stdio.
type Server struct {
in *bufio.Reader
out io.Writer
outMu sync.Mutex
logger *log.Logger
exited bool
mu sync.RWMutex
docs map[string]*document
logContext bool
configStore *runtimeconfig.Store
cfg appconfig.App
llmClient llm.Client
llmProvider string
altClients map[string]llm.Client
lastInput time.Time
// LLM request stats
llmReqTotal int64
llmSentBytesTotal int64
llmRespTotal int64
llmRespBytesTotal int64
startTime time.Time
// Small LRU cache for recent code completion outputs (keyed by context)
compCache map[string]string
compCacheOrder []string // most-recent at end; cap ~10
pendingCompletions map[string][]CompletionItem
configLoadOpts appconfig.LoadOptions
// Outgoing JSON-RPC id counter for server-initiated requests
nextID int64
lastLLMCall time.Time
completionsDisabled bool
// Dispatch table for JSON-RPC methods → handler functions
handlers map[string]func(Request)
}
// ServerOptions collects configuration for NewServer to avoid long parameter lists.
type ServerOptions struct {
LogContext bool
ConfigStore *runtimeconfig.Store
Config *appconfig.App
MaxTokens int
ContextMode string
WindowLines int
MaxContextTokens int
ConfigLoadOptions appconfig.LoadOptions
Client llm.Client
TriggerCharacters []string
CodingTemperature *float64
ManualInvokeMinPrefix int
CompletionDebounceMs int
CompletionThrottleMs int
// Inline/chat triggers
InlineOpen string
InlineClose string
ChatSuffix string
ChatPrefixes []string
// Prompt templates
PromptCompSysGeneral string
PromptCompSysParams string
PromptCompSysInline string
PromptCompUserGeneral string
PromptCompUserParams string
PromptCompExtraHeader string
PromptNativeCompletion string
PromptChatSystem string
PromptRewriteSystem string
PromptDiagnosticsSystem string
PromptDocumentSystem string
PromptRewriteUser string
PromptDiagnosticsUser string
PromptDocumentUser string
PromptGoTestSystem string
PromptGoTestUser string
PromptSimplifySystem string
PromptSimplifyUser string
// Custom actions
CustomActions []CustomAction
}
// CustomAction mirrors user-defined code actions passed from config.
type CustomAction struct {
ID string
Title string
Kind string
Scope string // "selection" | "diagnostics"
Instruction string // if set, use rewrite templates
System string // optional when User is set
User string // if set, use this user template
}
func NewServer(r io.Reader, w io.Writer, logger *log.Logger, opts ServerOptions) *Server {
s := &Server{in: bufio.NewReader(r), out: w, logger: logger, docs: make(map[string]*document), logContext: opts.LogContext, configStore: opts.ConfigStore}
s.startTime = time.Now()
s.compCache = make(map[string]string)
s.pendingCompletions = make(map[string][]CompletionItem)
s.applyOptions(opts)
// Initialize dispatch table
s.handlers = map[string]func(Request){
"initialize": s.handleInitialize,
"initialized": func(_ Request) { s.handleInitialized() },
"shutdown": s.handleShutdown,
"exit": func(_ Request) { s.handleExit() },
"textDocument/didOpen": s.handleDidOpen,
"textDocument/didChange": s.handleDidChange,
"textDocument/didClose": s.handleDidClose,
"textDocument/completion": s.handleCompletion,
"textDocument/codeAction": s.handleCodeAction,
"codeAction/resolve": s.handleCodeActionResolve,
"workspace/executeCommand": s.handleExecuteCommand,
}
return s
}
func (s *Server) applyOptions(opts ServerOptions) {
s.mu.Lock()
defer s.mu.Unlock()
s.logContext = opts.LogContext
s.configLoadOpts = opts.ConfigLoadOptions
if opts.ConfigStore != nil {
s.configStore = opts.ConfigStore
}
if opts.Config != nil {
s.cfg = *opts.Config
} else if opts.ConfigStore != nil {
s.cfg = opts.ConfigStore.Snapshot()
} else {
s.cfg = appconfig.App{}
// populate from legacy ServerOptions fields
s.cfg.MaxTokens = opts.MaxTokens
s.cfg.ContextMode = opts.ContextMode
s.cfg.ContextWindowLines = opts.WindowLines
s.cfg.MaxContextTokens = opts.MaxContextTokens
s.cfg.TriggerCharacters = append([]string{}, opts.TriggerCharacters...)
s.cfg.CodingTemperature = opts.CodingTemperature
s.cfg.ManualInvokeMinPrefix = opts.ManualInvokeMinPrefix
s.cfg.CompletionDebounceMs = opts.CompletionDebounceMs
s.cfg.CompletionThrottleMs = opts.CompletionThrottleMs
s.cfg.InlineOpen = opts.InlineOpen
s.cfg.InlineClose = opts.InlineClose
s.cfg.ChatSuffix = opts.ChatSuffix
s.cfg.ChatPrefixes = append([]string{}, opts.ChatPrefixes...)
s.cfg.PromptCompletionSystemGeneral = opts.PromptCompSysGeneral
s.cfg.PromptCompletionSystemParams = opts.PromptCompSysParams
s.cfg.PromptCompletionSystemInline = opts.PromptCompSysInline
s.cfg.PromptCompletionUserGeneral = opts.PromptCompUserGeneral
s.cfg.PromptCompletionUserParams = opts.PromptCompUserParams
s.cfg.PromptCompletionExtraHeader = opts.PromptCompExtraHeader
s.cfg.PromptNativeCompletion = opts.PromptNativeCompletion
s.cfg.PromptChatSystem = opts.PromptChatSystem
s.cfg.PromptCodeActionRewriteSystem = opts.PromptRewriteSystem
s.cfg.PromptCodeActionDiagnosticsSystem = opts.PromptDiagnosticsSystem
s.cfg.PromptCodeActionDocumentSystem = opts.PromptDocumentSystem
s.cfg.PromptCodeActionRewriteUser = opts.PromptRewriteUser
s.cfg.PromptCodeActionDiagnosticsUser = opts.PromptDiagnosticsUser
s.cfg.PromptCodeActionDocumentUser = opts.PromptDocumentUser
s.cfg.PromptCodeActionGoTestSystem = opts.PromptGoTestSystem
s.cfg.PromptCodeActionGoTestUser = opts.PromptGoTestUser
s.cfg.PromptCodeActionSimplifySystem = opts.PromptSimplifySystem
s.cfg.PromptCodeActionSimplifyUser = opts.PromptSimplifyUser
s.cfg.CustomActions = make([]appconfig.CustomAction, len(opts.CustomActions))
for i, ca := range opts.CustomActions {
s.cfg.CustomActions[i] = appconfig.CustomAction{
ID: ca.ID,
Title: ca.Title,
Kind: ca.Kind,
Scope: ca.Scope,
Instruction: ca.Instruction,
System: ca.System,
User: ca.User,
}
}
}
s.llmClient = opts.Client
if opts.Client != nil {
s.llmProvider = canonicalProvider(opts.Client.Name())
} else {
s.llmProvider = canonicalProvider(s.cfg.Provider)
}
s.altClients = make(map[string]llm.Client)
}
// ApplyOptions updates the server's configuration at runtime.
func (s *Server) ApplyOptions(opts ServerOptions) {
s.applyOptions(opts)
}
func (s *Server) currentLLMClient() llm.Client {
s.mu.RLock()
defer s.mu.RUnlock()
return s.llmClient
}
func newClientForProvider(cfg appconfig.App, provider string) (llm.Client, error) {
llmCfg := llm.Config{
Provider: provider,
OpenAIBaseURL: cfg.OpenAIBaseURL,
OpenAIModel: cfg.OpenAIModel,
OpenAITemperature: cfg.OpenAITemperature,
OpenRouterBaseURL: cfg.OpenRouterBaseURL,
OpenRouterModel: cfg.OpenRouterModel,
OpenRouterTemperature: cfg.OpenRouterTemperature,
OllamaBaseURL: cfg.OllamaBaseURL,
OllamaModel: cfg.OllamaModel,
OllamaTemperature: cfg.OllamaTemperature,
CopilotBaseURL: cfg.CopilotBaseURL,
CopilotModel: cfg.CopilotModel,
CopilotTemperature: cfg.CopilotTemperature,
}
oaKey := strings.TrimSpace(os.Getenv("HEXAI_OPENAI_API_KEY"))
if oaKey == "" {
oaKey = strings.TrimSpace(os.Getenv("OPENAI_API_KEY"))
}
orKey := strings.TrimSpace(os.Getenv("HEXAI_OPENROUTER_API_KEY"))
if orKey == "" {
orKey = strings.TrimSpace(os.Getenv("OPENROUTER_API_KEY"))
}
cpKey := strings.TrimSpace(os.Getenv("HEXAI_COPILOT_API_KEY"))
if cpKey == "" {
cpKey = strings.TrimSpace(os.Getenv("COPILOT_API_KEY"))
}
return llm.NewFromConfig(llmCfg, oaKey, orKey, cpKey)
}
func (s *Server) clientFor(spec requestSpec) llm.Client {
provider := canonicalProvider(spec.provider)
s.mu.RLock()
baseProvider := s.llmProvider
baseClient := s.llmClient
if baseClient != nil && strings.TrimSpace(baseProvider) == "" {
baseProvider = canonicalProvider(baseClient.Name())
}
if provider == "" {
provider = baseProvider
}
if provider == baseProvider && baseClient != nil {
s.mu.RUnlock()
return baseClient
}
if c, ok := s.altClients[provider]; ok {
s.mu.RUnlock()
return c
}
cfg := s.cfg
store := s.configStore
s.mu.RUnlock()
if store != nil {
cfg = store.Snapshot()
}
cfg.Provider = provider
modelOverride := strings.TrimSpace(spec.entry.Model)
switch provider {
case "openai":
if modelOverride != "" {
cfg.OpenAIModel = modelOverride
} else if spec.fallbackModel != "" {
cfg.OpenAIModel = spec.fallbackModel
}
case "openrouter":
if modelOverride != "" {
cfg.OpenRouterModel = modelOverride
} else if spec.fallbackModel != "" {
cfg.OpenRouterModel = spec.fallbackModel
}
case "copilot":
if modelOverride != "" {
cfg.CopilotModel = modelOverride
} else if spec.fallbackModel != "" {
cfg.CopilotModel = spec.fallbackModel
}
case "ollama":
if modelOverride != "" {
cfg.OllamaModel = modelOverride
} else if spec.fallbackModel != "" {
cfg.OllamaModel = spec.fallbackModel
}
}
client, err := newClientForProvider(cfg, provider)
if err != nil {
logging.Logf("lsp ", "failed to build client for provider=%s: %v", provider, err)
if baseClient != nil {
return baseClient
}
return nil
}
s.mu.Lock()
defer s.mu.Unlock()
if provider == s.llmProvider {
if s.llmClient == nil {
s.llmClient = client
s.llmProvider = provider
}
return s.llmClient
}
if existing, ok := s.altClients[provider]; ok {
return existing
}
if s.altClients == nil {
s.altClients = make(map[string]llm.Client)
}
s.altClients[provider] = client
return client
}
func (s *Server) currentConfig() appconfig.App {
if s.configStore != nil {
return s.configStore.Snapshot()
}
s.mu.RLock()
defer s.mu.RUnlock()
return s.cfg
}
func (s *Server) storePendingCompletion(key string, items []CompletionItem) {
if len(items) == 0 {
return
}
cpy := make([]CompletionItem, len(items))
copy(cpy, items)
s.mu.Lock()
if s.pendingCompletions == nil {
s.pendingCompletions = make(map[string][]CompletionItem)
}
s.pendingCompletions[key] = cpy
s.mu.Unlock()
}
func (s *Server) setCompletionsDisabled(disabled bool) bool {
s.mu.Lock()
prev := s.completionsDisabled
s.completionsDisabled = disabled
s.mu.Unlock()
return prev
}
func (s *Server) completionDisabled() bool {
s.mu.RLock()
defer s.mu.RUnlock()
return s.completionsDisabled
}
func (s *Server) takePendingCompletion(key string) []CompletionItem {
s.mu.Lock()
defer s.mu.Unlock()
if len(s.pendingCompletions) == 0 {
return nil
}
items, ok := s.pendingCompletions[key]
if !ok {
return nil
}
delete(s.pendingCompletions, key)
cpy := make([]CompletionItem, len(items))
copy(cpy, items)
return cpy
}
func (s *Server) maxTokens() int {
cfg := s.currentConfig()
if cfg.MaxTokens <= 0 {
return 500
}
return cfg.MaxTokens
}
func (s *Server) contextMode() string {
mode := strings.TrimSpace(s.currentConfig().ContextMode)
if mode == "" {
return "file-on-new-func"
}
return mode
}
func (s *Server) windowLines() int {
cfg := s.currentConfig()
if cfg.ContextWindowLines <= 0 {
return 120
}
return cfg.ContextWindowLines
}
func (s *Server) maxContextTokens() int {
cfg := s.currentConfig()
if cfg.MaxContextTokens <= 0 {
return 2000
}
return cfg.MaxContextTokens
}
func (s *Server) triggerCharacters() []string {
cfg := s.currentConfig()
if len(cfg.TriggerCharacters) == 0 {
return []string{".", ":", "/", "_", ")", "{"}
}
return append([]string{}, cfg.TriggerCharacters...)
}
func (s *Server) codingTemperature() *float64 {
cfg := s.currentConfig()
return cfg.CodingTemperature
}
func (s *Server) manualInvokeMinPrefix() int {
return s.currentConfig().ManualInvokeMinPrefix
}
func (s *Server) completionDebounce() time.Duration {
cfg := s.currentConfig()
if cfg.CompletionDebounceMs <= 0 {
return 0
}
return time.Duration(cfg.CompletionDebounceMs) * time.Millisecond
}
func (s *Server) completionThrottle() time.Duration {
cfg := s.currentConfig()
if cfg.CompletionThrottleMs <= 0 {
return 0
}
return time.Duration(cfg.CompletionThrottleMs) * time.Millisecond
}
func (s *Server) inlineMarkers() (open string, close string, openChar byte, closeChar byte) {
cfg := s.currentConfig()
open = strings.TrimSpace(cfg.InlineOpen)
if open == "" {
open = ">!"
}
close = strings.TrimSpace(cfg.InlineClose)
if close == "" {
close = ">"
}
openChar = '>'
if len(open) > 0 {
openChar = open[0]
}
closeChar = '>'
if len(close) > 0 {
closeChar = close[0]
}
return open, close, openChar, closeChar
}
func (s *Server) chatConfig() (suffix string, prefixes []string, suffixChar byte) {
cfg := s.currentConfig()
suffix = cfg.ChatSuffix
if suffix != "" {
suffix = strings.TrimSpace(suffix)
if suffix == "" {
suffix = ">"
}
} else {
suffix = ""
}
if len(cfg.ChatPrefixes) == 0 {
prefixes = []string{"?", "!", ":", ";"}
} else {
prefixes = append([]string{}, cfg.ChatPrefixes...)
}
suffixChar = '>'
if len(suffix) > 0 {
suffixChar = suffix[0]
}
return suffix, prefixes, suffixChar
}
func (s *Server) promptSet() appconfig.App {
return s.currentConfig()
}
func (s *Server) customActions() []CustomAction {
cfg := s.currentConfig()
if len(cfg.CustomActions) == 0 {
return nil
}
customs := make([]CustomAction, 0, len(cfg.CustomActions))
for _, ca := range cfg.CustomActions {
customs = append(customs, CustomAction{
ID: ca.ID,
Title: ca.Title,
Kind: ca.Kind,
Scope: ca.Scope,
Instruction: ca.Instruction,
System: ca.System,
User: ca.User,
})
}
return customs
}
func (s *Server) Run() error {
for {
body, err := s.readMessage()
if err == io.EOF {
return nil
}
if err != nil {
return err
}
var req Request
if err := json.Unmarshal(body, &req); err != nil {
logging.Logf("lsp ", "invalid JSON: %v", err)
continue
}
if req.Method == "" {
// A response from client; ignore
continue
}
go s.handle(req)
if s.exited {
return nil
}
}
}
// Summary: LSP transport utilities to read and write JSON-RPC messages with Content-Length framing.
package lsp
import (
"encoding/json"
"fmt"
"io"
"net/textproto"
"strconv"
"strings"
"codeberg.org/snonux/hexai/internal/logging"
)
func (s *Server) readMessage() ([]byte, error) {
tp := textproto.NewReader(s.in)
var contentLength int
for {
line, err := tp.ReadLine()
if err != nil {
return nil, err
}
if line == "" { // end of headers
break
}
parts := strings.SplitN(line, ":", 2)
if len(parts) != 2 {
continue
}
key := strings.TrimSpace(strings.ToLower(parts[0]))
val := strings.TrimSpace(parts[1])
switch key {
case "content-length":
n, err := strconv.Atoi(val)
if err != nil {
return nil, fmt.Errorf("invalid Content-Length: %v", err)
}
contentLength = n
}
}
if contentLength <= 0 {
return nil, fmt.Errorf("missing or invalid Content-Length")
}
buf := make([]byte, contentLength)
if _, err := io.ReadFull(s.in, buf); err != nil {
return nil, err
}
return buf, nil
}
func (s *Server) writeMessage(v any) {
s.outMu.Lock()
defer s.outMu.Unlock()
data, err := json.Marshal(v)
if err != nil {
logging.Logf("lsp ", "marshal error: %v", err)
return
}
header := fmt.Sprintf("Content-Length: %d\r\n\r\n", len(data))
if _, err := io.WriteString(s.out, header); err != nil {
logging.Logf("lsp ", "write header error: %v", err)
return
}
if _, err := s.out.Write(data); err != nil {
logging.Logf("lsp ", "write body error: %v", err)
return
}
}
package runtimeconfig
import (
"fmt"
"log"
"reflect"
"sort"
"strconv"
"strings"
"sync"
"codeberg.org/snonux/hexai/internal/appconfig"
)
// Change captures a single configuration delta.
type Change struct {
Key string
Old string
New string
}
// Listener receives the previous and new application configuration when updates occur.
type Listener func(old appconfig.App, new appconfig.App)
// Store holds the active runtime configuration and notifies listeners on updates.
type Store struct {
mu sync.RWMutex
cfg appconfig.App
listeners map[int]Listener
nextID int
}
// New creates a Store seeded with the provided configuration snapshot.
func New(cfg appconfig.App) *Store {
return &Store{cfg: cfg, listeners: make(map[int]Listener)}
}
// Snapshot returns the current configuration snapshot. Callers must treat it as read-only.
func (s *Store) Snapshot() appconfig.App {
s.mu.RLock()
defer s.mu.RUnlock()
return s.cfg
}
// Subscribe registers a listener that will be invoked on configuration changes.
// The returned function removes the listener.
func (s *Store) Subscribe(listener Listener) func() {
if listener == nil {
return func() {}
}
s.mu.Lock()
id := s.nextID
s.nextID++
s.listeners[id] = listener
s.mu.Unlock()
return func() {
s.mu.Lock()
delete(s.listeners, id)
s.mu.Unlock()
}
}
// Set replaces the current configuration with the provided snapshot and notifies listeners.
// It returns the list of detected changes between the previous and new configuration.
func (s *Store) Set(cfg appconfig.App) []Change {
s.mu.Lock()
old := s.cfg
s.cfg = cfg
listeners := make([]Listener, 0, len(s.listeners))
for _, l := range s.listeners {
listeners = append(listeners, l)
}
s.mu.Unlock()
changes := Diff(old, cfg)
for _, l := range listeners {
l(old, cfg)
}
return changes
}
// Reload re-reads configuration using the supplied options and applies it when valid.
func (s *Store) Reload(logger *log.Logger, opts appconfig.LoadOptions) ([]Change, error) {
cfg := appconfig.LoadWithOptions(logger, opts)
if err := cfg.Validate(); err != nil {
return nil, err
}
changes := s.Set(cfg)
if logger != nil {
logger.Print(FormatSummary("Reloaded config", changes))
}
return changes, nil
}
// Diff computes a stable, sorted list of key/value changes between two configuration snapshots.
func Diff(oldCfg, newCfg appconfig.App) []Change {
before := flattenAppConfig(oldCfg)
after := flattenAppConfig(newCfg)
keys := make(map[string]struct{}, len(before)+len(after))
for k := range before {
keys[k] = struct{}{}
}
for k := range after {
keys[k] = struct{}{}
}
ordered := make([]string, 0, len(keys))
for k := range keys {
ordered = append(ordered, k)
}
sort.Strings(ordered)
changes := make([]Change, 0, len(ordered))
for _, k := range ordered {
if before[k] == after[k] {
continue
}
changes = append(changes, Change{Key: k, Old: before[k], New: after[k]})
}
return changes
}
func flattenAppConfig(cfg appconfig.App) map[string]string {
result := make(map[string]string)
val := reflect.ValueOf(cfg)
typ := val.Type()
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
key := strings.TrimSpace(field.Tag.Get("toml"))
if key == "" || key == "-" {
switch field.Name {
case "StatsWindowMinutes":
key = "stats_window_minutes"
case "CompletionConfigs":
key = "completion_configs"
case "CodeActionConfigs":
key = "code_action_configs"
case "ChatConfigs":
key = "chat_configs"
case "CLIConfigs":
key = "cli_configs"
default:
continue
}
}
if idx := strings.Index(key, ","); idx >= 0 {
key = key[:idx]
}
if key == "" || key == "-" {
continue
}
result[key] = stringifyValue(val.Field(i))
}
return result
}
func stringifyValue(v reflect.Value) string {
if !v.IsValid() {
return ""
}
switch v.Kind() {
case reflect.String:
return v.String()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return strconv.FormatInt(v.Int(), 10)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return strconv.FormatUint(v.Uint(), 10)
case reflect.Float32, reflect.Float64:
return strconv.FormatFloat(v.Float(), 'f', -1, 64)
case reflect.Bool:
return strconv.FormatBool(v.Bool())
case reflect.Slice:
if v.IsNil() {
return ""
}
if v.Type().Elem().Kind() == reflect.String {
parts := make([]string, v.Len())
for i := range parts {
parts[i] = v.Index(i).String()
}
return strings.Join(parts, ",")
}
if v.Type().Elem() == reflect.TypeOf(appconfig.SurfaceConfig{}) {
parts := make([]string, 0, v.Len())
for i := 0; i < v.Len(); i++ {
entry := v.Index(i).Interface().(appconfig.SurfaceConfig)
segment := strings.TrimSpace(entry.Provider)
if segment != "" {
segment += ":"
}
segment += strings.TrimSpace(entry.Model)
if entry.Temperature != nil {
segment += fmt.Sprintf("@%.3f", *entry.Temperature)
}
parts = append(parts, segment)
}
return strings.Join(parts, "|")
}
return fmt.Sprint(v.Interface())
case reflect.Ptr:
if v.IsNil() {
return "(unset)"
}
return stringifyValue(v.Elem())
default:
return fmt.Sprint(v.Interface())
}
}
// FormatSummary creates a human-readable summary for configuration changes.
func FormatSummary(prefix string, changes []Change) string {
if len(changes) == 0 {
return fmt.Sprintf("%s (no changes detected).", prefix)
}
lines := make([]string, 0, len(changes)+1)
lines = append(lines, fmt.Sprintf("%s (%d changes):", prefix, len(changes)))
for _, ch := range changes {
lines = append(lines, fmt.Sprintf("- %s: %s → %s", ch.Key, ch.Old, ch.New))
}
return strings.Join(lines, "\n")
}
//go:build !windows
package stats
import (
"errors"
"golang.org/x/sys/unix"
)
func tryLockFile(fd uintptr) error {
if err := unix.Flock(int(fd), unix.LOCK_EX|unix.LOCK_NB); err != nil {
if errors.Is(err, unix.EWOULDBLOCK) {
return errLockWouldBlock
}
return err
}
return nil
}
func unlockFile(fd uintptr) error {
return unix.Flock(int(fd), unix.LOCK_UN)
}
// Package stats provides a simple, process-safe, on-disk cache of Hexai LLM usage
// statistics shared across all binaries. It appends compact events (ts, provider,
// model, sent, recv) to a JSON file guarded by an advisory file lock, prunes
// entries older than the configured window (default 1h), and computes aggregated
// snapshots for display in logs and tmux status.
package stats
import (
"context"
"encoding/json"
"errors"
"fmt"
"os"
"path/filepath"
"strconv"
"sync/atomic"
"time"
)
const (
fileName = "stats.json"
lockFileName = "stats.lock"
fileVersion = 1
defaultWindow = time.Hour
)
var windowSeconds int64 = int64(defaultWindow.Seconds())
var errLockWouldBlock = errors.New("stats: lock would block")
// SetWindow sets the sliding window used for pruning and aggregation.
func SetWindow(d time.Duration) {
if d < time.Second {
d = time.Second
}
if d > 24*time.Hour {
d = 24 * time.Hour
}
atomic.StoreInt64(&windowSeconds, int64(d.Seconds()))
}
// Window returns the current sliding window.
func Window() time.Duration { return time.Duration(atomic.LoadInt64(&windowSeconds)) * time.Second }
// Event represents a single request/response with sizes.
type Event struct {
TS time.Time `json:"ts"`
Provider string `json:"provider"`
Model string `json:"model"`
Sent int64 `json:"sent"`
Recv int64 `json:"recv"`
}
// File is the on-disk JSON structure.
type File struct {
Version int `json:"version"`
UpdatedAt time.Time `json:"updated_at"`
WindowSeconds int `json:"window_seconds"`
Events []Event `json:"events"`
}
// Counters and Snapshot represent computed aggregates for the current window.
type Counters struct{ Reqs, Sent, Recv int64 }
type ProviderEntry struct {
Totals Counters
Models map[string]Counters
}
type Snapshot struct {
Global Counters
Providers map[string]ProviderEntry
RPM float64
Window time.Duration
}
// Update appends one event and prunes old entries under lock.
func Update(ctx context.Context, provider, model string, sentBytes, recvBytes int) error {
dir, err := CacheDir()
if err != nil {
return err
}
if err := os.MkdirAll(dir, 0o755); err != nil {
return err
}
lockPath := filepath.Join(dir, lockFileName)
f, err := os.OpenFile(lockPath, os.O_CREATE|os.O_RDWR, 0o600)
if err != nil {
return err
}
defer f.Close()
unlock, err := acquireFileLock(ctx, f)
if err != nil {
return err
}
defer func() { _ = unlock() }()
// Read existing file (if any)
path := filepath.Join(dir, fileName)
var sf File
if b, rerr := os.ReadFile(path); rerr == nil {
_ = json.Unmarshal(b, &sf)
}
if sf.Version != fileVersion {
sf = File{Version: fileVersion}
}
now := time.Now()
win := Window()
sf.WindowSeconds = int(win.Seconds())
// Append event
sf.Events = append(sf.Events, Event{TS: now, Provider: provider, Model: model, Sent: int64(sentBytes), Recv: int64(recvBytes)})
// Prune old
cutoff := now.Add(-win)
if len(sf.Events) > 0 {
// Find first >= cutoff
i := 0
for ; i < len(sf.Events); i++ {
if !sf.Events[i].TS.Before(cutoff) {
break
}
}
if i > 0 {
sf.Events = append([]Event(nil), sf.Events[i:]...)
}
}
sf.UpdatedAt = now
// Write atomically
tmp, err := os.CreateTemp(dir, fileName+".tmp.")
if err != nil {
return err
}
enc := json.NewEncoder(tmp)
enc.SetEscapeHTML(false)
if err := enc.Encode(&sf); err != nil {
tmp.Close()
os.Remove(tmp.Name())
return err
}
if err := tmp.Sync(); err != nil {
tmp.Close()
os.Remove(tmp.Name())
return err
}
if err := tmp.Close(); err != nil {
os.Remove(tmp.Name())
return err
}
if err := os.Rename(tmp.Name(), path); err != nil {
os.Remove(tmp.Name())
return err
}
return nil
}
func acquireFileLock(ctx context.Context, f *os.File) (func() error, error) {
fd := f.Fd()
for {
err := tryLockFile(fd)
if err == nil {
return func() error { return unlockFile(fd) }, nil
}
if errors.Is(err, errLockWouldBlock) {
select {
case <-ctx.Done():
return nil, ctx.Err()
case <-time.After(5 * time.Millisecond):
}
continue
}
return nil, err
}
}
// Snapshot reads and aggregates events within the configured window.
func TakeSnapshot() (Snapshot, error) {
dir, err := CacheDir()
if err != nil {
return Snapshot{}, err
}
path := filepath.Join(dir, fileName)
b, err := os.ReadFile(path)
if err != nil {
if errors.Is(err, os.ErrNotExist) {
return Snapshot{Providers: map[string]ProviderEntry{}, Window: Window()}, nil
}
return Snapshot{}, err
}
var sf File
if err := json.Unmarshal(b, &sf); err != nil {
return Snapshot{}, err
}
win := time.Duration(sf.WindowSeconds) * time.Second
if win <= 0 {
win = Window()
} else {
SetWindow(win) // align process with file window if changed elsewhere
}
cutoff := time.Now().Add(-win)
snap := Snapshot{Providers: make(map[string]ProviderEntry), Window: win}
for _, ev := range sf.Events {
if ev.TS.Before(cutoff) {
continue
}
snap.Global.Reqs++
snap.Global.Sent += ev.Sent
snap.Global.Recv += ev.Recv
pe := snap.Providers[ev.Provider]
if pe.Models == nil {
pe.Models = make(map[string]Counters)
}
pe.Totals.Reqs++
pe.Totals.Sent += ev.Sent
pe.Totals.Recv += ev.Recv
mc := pe.Models[ev.Model]
mc.Reqs++
mc.Sent += ev.Sent
mc.Recv += ev.Recv
pe.Models[ev.Model] = mc
snap.Providers[ev.Provider] = pe
}
mins := win.Minutes()
if mins <= 0 {
mins = 0.001
}
snap.RPM = float64(snap.Global.Reqs) / mins
return snap, nil
}
// CacheDir resolves the cache directory for stats.
func CacheDir() (string, error) {
if x := os.Getenv("XDG_CACHE_HOME"); stringsTrim(x) != "" {
return filepath.Join(x, "hexai"), nil
}
home, err := os.UserHomeDir()
if err != nil {
return "", fmt.Errorf("cannot resolve home: %w", err)
}
return filepath.Join(home, ".cache", "hexai"), nil
}
// stringsTrim is a tiny helper to avoid importing strings everywhere here.
func stringsTrim(s string) string {
i := 0
j := len(s)
for i < j && (s[i] == ' ' || s[i] == '\t' || s[i] == '\n' || s[i] == '\r') {
i++
}
for j > i && (s[j-1] == ' ' || s[j-1] == '\t' || s[j-1] == '\n' || s[j-1] == '\r') {
j--
}
if i == 0 && j == len(s) {
return s
}
return s[i:j]
}
// DebugString returns a compact single-line view of a snapshot (useful for logs).
func (s Snapshot) DebugString() string {
return "Σ reqs=" + strconv.FormatInt(s.Global.Reqs, 10) + " rpm=" + fmt.Sprintf("%.2f", s.RPM)
}
package testutil
// MultilineDocBlock returns a realistic multi-line documentation block.
func MultilineDocBlock() string {
return "// add adds two numbers\n// returns their sum"
}
// MultilineChatReply returns a multi-line assistant reply for chat tests.
func MultilineChatReply() string {
return "Hello, world!\nThis is a multi-line reply."
}
// MultilineFunctionSuggestion returns a more realistic multi-line function body suggestion.
func MultilineFunctionSuggestion() string {
return "(ctx context.Context, input string) (*CustData, error) {\n // TODO: implement\n return &CustData{}, nil\n}"
}
// MarkdownCodeFence returns a fenced markdown snippet used in post-processing tests.
func MarkdownCodeFence() string {
return "```go\nname := value\n```"
}
// MalformedJSON returns a deliberately malformed JSON string.
func MalformedJSON() string {
return "{\"choices\":[{\"delta\":{\"content\":\"oops\"}}]"
}
package textutil
import "fmt"
// HumanBytes renders n in a short human-friendly form using base-1000 units.
// Examples: 999 -> 999B, 1200 -> 1.2k, 1540000 -> 1.5M
func HumanBytes(n int64) string {
if n < 1000 {
return fmt.Sprintf("%dB", n)
}
const unit = 1000.0
v := float64(n)
suffix := []string{"k", "M", "G", "T"}
i := 0
for v >= unit && i < len(suffix)-1 {
v /= unit
i++
}
s := fmt.Sprintf("%.1f%s", v, suffix[i])
// Strip trailing ".0"
if len(s) >= 3 && s[len(s)-2:] == ".0" {
s = fmt.Sprintf("%d%s", int(v), suffix[i])
}
return s
}
package textutil
import "strings"
// RenderTemplate performs simple {{var}} replacement in a template string.
func RenderTemplate(t string, vars map[string]string) string {
if t == "" || len(vars) == 0 {
return t
}
out := t
for k, v := range vars {
out = strings.ReplaceAll(out, "{{"+k+"}}", v)
}
return out
}
// StripCodeFences removes surrounding Markdown triple-backtick fences.
func StripCodeFences(s string) string {
t := strings.TrimSpace(s)
if t == "" {
return t
}
lines := strings.Split(t, "\n")
start := 0
for start < len(lines) && strings.TrimSpace(lines[start]) == "" {
start++
}
end := len(lines) - 1
for end >= 0 && strings.TrimSpace(lines[end]) == "" {
end--
}
if start >= len(lines) || end < 0 || start > end {
return t
}
first := strings.TrimSpace(lines[start])
last := strings.TrimSpace(lines[end])
if strings.HasPrefix(first, "```") && last == "```" && end > start {
inner := strings.Join(lines[start+1:end], "\n")
return inner
}
return t
}
// InstructionFromSelection extracts the first inline instruction and returns
// (instruction, cleanedSelection). It detects markers on the earliest position
// per line in precedence: strict ;text;, /* */, <!-- -->, //, #, --.
func InstructionFromSelection(sel string) (string, string) {
lines := strings.Split(sel, "\n")
for idx, line := range lines {
if instr, cleaned, ok := FindFirstInstructionInLine(line); ok && strings.TrimSpace(instr) != "" {
lines[idx] = cleaned
return instr, strings.Join(lines, "\n")
}
}
return "", sel
}
// FindFirstInstructionInLine returns (instruction, cleaned, ok) for a single line.
func FindFirstInstructionInLine(line string) (instr, cleaned string, ok bool) {
type cand struct {
start, end int
text string
}
cands := []cand{}
if t, l, r, ok := FindStrictInlineTag(line); ok {
cands = append(cands, cand{start: l, end: r, text: t})
}
if i := strings.Index(line, "/*"); i >= 0 {
if j := strings.Index(line[i+2:], "*/"); j >= 0 {
start := i
end := i + 2 + j + 2
text := strings.TrimSpace(line[i+2 : i+2+j])
cands = append(cands, cand{start: start, end: end, text: text})
}
}
if i := strings.Index(line, "<!--"); i >= 0 {
if j := strings.Index(line[i+4:], "-->"); j >= 0 {
start := i
end := i + 4 + j + 3
text := strings.TrimSpace(line[i+4 : i+4+j])
cands = append(cands, cand{start: start, end: end, text: text})
}
}
if i := strings.Index(line, "//"); i >= 0 {
cands = append(cands, cand{start: i, end: len(line), text: strings.TrimSpace(line[i+2:])})
}
if i := strings.Index(line, "#"); i >= 0 {
cands = append(cands, cand{start: i, end: len(line), text: strings.TrimSpace(line[i+1:])})
}
if i := strings.Index(line, "--"); i >= 0 {
cands = append(cands, cand{start: i, end: len(line), text: strings.TrimSpace(line[i+2:])})
}
if len(cands) == 0 {
return "", line, false
}
best := cands[0]
for _, c := range cands[1:] {
if c.start >= 0 && (best.start < 0 || c.start < best.start) {
best = c
}
}
cleaned = strings.TrimRight(line[:best.start]+line[best.end:], " \t")
return best.text, cleaned, true
}
// FindStrictInlineTag finds ;text; with no spaces after/before semicolons.
func FindStrictInlineTag(line string) (text string, left, right int, ok bool) {
for i := 0; i < len(line); i++ {
if line[i] != ';' {
continue
}
if i+1 < len(line) && line[i+1] == ' ' {
continue
}
for j := i + 1; j < len(line); j++ {
if line[j] == ';' {
if j-1 >= 0 && line[j-1] == ' ' {
continue
}
inner := strings.TrimSpace(line[i+1 : j])
if inner != "" {
return inner, i, j + 1, true
}
}
}
}
return "", -1, -1, false
}
package tmux
import (
"fmt"
"os"
"os/exec"
"strconv"
"strings"
"time"
"codeberg.org/snonux/hexai/internal/textutil"
)
// baseFGToken is a placeholder inserted by status formatters wherever the
// base foreground color should be restored. The theming layer (applyTheme)
// replaces this token with a tmux color sequence matching the active theme's
// foreground, which fixes readability when a theme sets a non-default fg.
const (
baseFGToken = "\x1EHEXAI_BASE_FG\x1E"
arrowUpToken = "\x1EHEXAI_ARROW_UP\x1E"
arrowDownToken = "\x1EHEXAI_ARROW_DOWN\x1E"
)
// Enabled reports whether tmux status updates are enabled via env (default: on).
func Enabled() bool {
v := strings.TrimSpace(os.Getenv("HEXAI_TMUX_STATUS"))
if v == "" {
return true
}
v = strings.ToLower(v)
return v == "1" || v == "true" || v == "yes" || v == "on"
}
// SetUserOption sets a global tmux user option like @hexai_status to value.
func SetUserOption(key, value string) error {
if !Enabled() || !HasBinary() || !InSession() {
return nil
}
k := strings.TrimPrefix(strings.TrimSpace(key), "@")
if k == "" {
return nil
}
// Use set-option -g so it appears for all windows
return exec.Command("tmux", "set-option", "-g", "@"+k, value).Run()
}
// SetStatus is a convenience for setting @hexai_status.
func SetStatus(value string) error { return SetUserOption("hexai_status", applyTheme(value)) }
// FormatLLMStatsStatus builds a compact tmux status string for LLM heartbeats.
// Example: "LLM:gpt-4.1 5r 0.8rpm in12k out34k"
func FormatLLMStatsStatus(model string, reqs int64, rpm float64, inBytes, outBytes int64) string {
return fmt.Sprintf("LLM:%s %dr %.1frpm in%s out%s", model, reqs, rpm, textutil.HumanBytes(inBytes), textutil.HumanBytes(outBytes))
}
// FormatLLMStatsStatusColored is like FormatLLMStatsStatus but includes provider and
// tmux color segments for readability. Uses up/down arrows for bytes.
// Example (with colors): "LLM:openai:gpt-4.1 ↑12k ↓34k 0.8rpm 5r"
func FormatLLMStatsStatusColored(provider, model string, reqs int64, rpm float64, inBytes, outBytes int64) string {
in := textutil.HumanBytes(inBytes)
out := textutil.HumanBytes(outBytes)
// Keep it compact; colorize prefix and arrows; use fg resets so a themed bg can persist.
// Arrows use theme-aware styles; bytes immediately switch to base fg for contrast.
return fmt.Sprintf(
"%sLLM:%s:%s %s↑%s%s %s↓%s%s %.1frpm %dr",
baseFGToken, provider, model, arrowUpToken, baseFGToken, in, arrowDownToken, baseFGToken, out, rpm, reqs,
)
}
// FormatGlobalStatusColored renders a compact global stats heartbeat with an optional
// scoped provider:model tail. The window indicator (e.g., Σ@1h) should be composed
// by the caller if needed; this function focuses on numbers and labels.
// Example: "Σ ↑120k ↓340k 4.2rpm | openai:gpt-4.1 3.1rpm 80r"
func FormatGlobalStatusColored(globalReqs int64, globalRPM float64, globalIn, globalOut int64, scopeProvider, scopeModel string, scopeRPM float64, scopeReqs int64, window time.Duration) string {
gin := textutil.HumanBytes(globalIn)
gout := textutil.HumanBytes(globalOut)
head := fmt.Sprintf("%sΣ@%s %s↑%s%s %s↓%s%s %.1frpm", baseFGToken, humanWindow(window), arrowUpToken, baseFGToken, gin, arrowDownToken, baseFGToken, gout, globalRPM)
// Narrow modes: only show Σ head
if narrowEnabled() || stringsTrim(scopeProvider) == "" || stringsTrim(scopeModel) == "" {
return head
}
tail := fmt.Sprintf(" | %s:%s %.1frpm %dr", scopeProvider, scopeModel, scopeRPM, scopeReqs)
// Respect max length when configured: drop tail if it would overflow
if ml := maxStatusLen(); ml > 0 {
if len(head) <= ml && len(head)+len(tail) > ml {
return head
}
if len(head) > ml {
return truncateStatus(head, ml)
}
}
return head + tail
}
func humanWindow(d time.Duration) string {
if d <= 0 {
return "?"
}
mins := int(d.Minutes())
if mins%60 == 0 {
return fmt.Sprintf("%dh", mins/60)
}
if mins >= 60 {
return fmt.Sprintf("%dm", mins)
}
return fmt.Sprintf("%dm", mins)
}
// narrowEnabled returns true when HEXAI_TMUX_STATUS_NARROW is truthy (1/true/yes/on).
func narrowEnabled() bool {
v := strings.ToLower(stringsTrim(os.Getenv("HEXAI_TMUX_STATUS_NARROW")))
if v == "" {
return false
}
switch v {
case "1", "true", "yes", "on":
return true
default:
return false
}
}
// maxStatusLen returns HEXAI_TMUX_STATUS_MAXLEN parsed as int; 0 disables.
func maxStatusLen() int {
v := stringsTrim(os.Getenv("HEXAI_TMUX_STATUS_MAXLEN"))
if v == "" {
return 0
}
n, err := strconv.Atoi(v)
if err != nil || n <= 0 {
return 0
}
return n
}
func truncateStatus(s string, n int) string {
if n <= 0 {
return ""
}
if len(s) <= n {
return s
}
if n <= 1 {
return s[:n]
}
return s[:n-1] + "…"
}
func stringsTrim(s string) string {
i := 0
j := len(s)
for i < j && (s[i] == ' ' || s[i] == '\t' || s[i] == '\n' || s[i] == '\r') {
i++
}
for j > i && (s[j-1] == ' ' || s[j-1] == '\t' || s[j-1] == '\n' || s[j-1] == '\r') {
j--
}
if i == 0 && j == len(s) {
return s
}
return s[i:j]
}
// FormatLLMStartStatus renders a short colored heartbeat at start/initialize time.
// Example: "LLM:openai:gpt-4.1 ⏳"
func FormatLLMStartStatus(provider, model string) string {
return fmt.Sprintf("%sLLM:%s:%s #[fg=colour11]⏳%s", baseFGToken, provider, model, baseFGToken)
}
// applyTheme wraps the status string with a user-selected tmux style if requested.
// Set HEXAI_TMUX_STATUS_THEME=white-on-purple to get white-on-purple background.
func applyTheme(s string) string {
theme := strings.ToLower(strings.TrimSpace(os.Getenv("HEXAI_TMUX_STATUS_THEME")))
// Allow explicit fg/bg override
fg := strings.TrimSpace(os.Getenv("HEXAI_TMUX_STATUS_FG"))
bg := strings.TrimSpace(os.Getenv("HEXAI_TMUX_STATUS_BG"))
// Determine base foreground and background from env or theme presets
baseFG := ""
wrap := false
if fg != "" || bg != "" { // explicit override path
wrap = true
if fg == "" {
baseFG = "default"
} else {
baseFG = fg
}
// bg used as provided (may be empty)
} else {
switch theme {
case "white-on-purple", "purple", "magenta", "white-on-magenta":
baseFG, bg, wrap = "white", "magenta", true
case "black-on-yellow", "yellow", "black-on-gold":
baseFG, bg, wrap = "black", "yellow", true
case "white-on-blue", "blue", "white-on-navy":
baseFG, bg, wrap = "white", "blue", true
}
if baseFG == "" { // no theme selected
baseFG = "default"
}
}
// Theme-aware arrow styles
upStyle, downStyle := "#[fg=colour3]", "#[fg=colour2]" // defaults: yellow up, green down
if fg != "" || bg != "" { // explicit override path: match arrows to base fg, bold for visibility
upStyle = "#[bold,fg=" + baseFG + "]"
downStyle = upStyle
} else {
switch theme {
case "white-on-purple", "purple", "magenta", "white-on-magenta":
upStyle, downStyle = "#[bold,fg=black]", "#[bold,fg=black]"
case "black-on-yellow", "yellow", "black-on-gold":
upStyle, downStyle = "#[bold,fg=black]", "#[bold,fg=black]"
case "white-on-blue", "blue", "white-on-navy":
upStyle, downStyle = "#[bold,fg=white]", "#[bold,fg=white]"
}
}
// Replace base-foreground and arrow placeholders with selected styles
if strings.Contains(s, baseFGToken) {
s = strings.ReplaceAll(s, baseFGToken, "#[fg="+baseFG+"]")
}
if strings.Contains(s, arrowUpToken) {
s = strings.ReplaceAll(s, arrowUpToken, upStyle)
}
if strings.Contains(s, arrowDownToken) {
s = strings.ReplaceAll(s, arrowDownToken, downStyle)
}
if !wrap {
return s
}
// Wrap with base fg and optional bg, then reset at the end
prefix := "#[fg=" + baseFG
if bg != "" {
prefix += ",bg=" + bg
}
prefix += "]"
return prefix + s + "#[fg=default,bg=default]"
}
package tmux
import (
"os"
"os/exec"
"strconv"
"strings"
)
// Available reports whether tmux is available and we appear to be in a tmux session.
func Available() bool { return HasBinary() && InSession() }
// HasBinary reports whether the tmux binary is on PATH.
var (
lookPath = exec.LookPath
command = exec.Command
)
func HasBinary() bool { _, err := lookPath("tmux"); return err == nil }
// InSession reports whether we seem to be running inside a tmux session.
func InSession() bool { return strings.TrimSpace(os.Getenv("TMUX")) != "" }
// SplitOpts controls how a new pane is created for running a command.
type SplitOpts struct {
Target string // optional pane target, e.g. ":."
Vertical bool // true => split vertically (-v); false => horizontally (-h)
Percent int // 1..100; 0 means use tmux default
}
// SplitRun splits the current tmux window and runs argv in the new pane.
// It returns once tmux has launched the child process.
func SplitRun(opts SplitOpts, argv []string) error {
if len(argv) == 0 {
return nil
}
args := []string{"split-window"}
if opts.Vertical {
args = append(args, "-v")
} else {
args = append(args, "-h")
}
if opts.Percent > 0 && opts.Percent <= 100 {
args = append(args, "-p", strconv.Itoa(opts.Percent))
}
if strings.TrimSpace(opts.Target) != "" {
args = append(args, "-t", opts.Target)
}
// tmux takes a single command string. Use a conservative shell join.
cmdStr := shellJoin(argv)
args = append(args, cmdStr)
c := command("tmux", args...)
return c.Run()
}
// shellJoin quotes argv elements for safe use in a single shell command string.
// It avoids interpretation by wrapping in single quotes and escaping embedded single quotes.
func shellJoin(argv []string) string {
out := make([]string, 0, len(argv))
for _, a := range argv {
if a == "" {
out = append(out, "''")
continue
}
if isSafeBare(a) {
out = append(out, a)
continue
}
// single-quote wrapping with escaped single quotes
// ' => '\'' (close, escaped quote, reopen)
esc := strings.ReplaceAll(a, "'", "'\\''")
out = append(out, "'"+esc+"'")
}
return strings.Join(out, " ")
}
// isSafeBare returns true if a contains only safe characters for bare words.
func isSafeBare(s string) bool {
for i := 0; i < len(s); i++ {
b := s[i]
if (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9') || b == '-' || b == '_' || b == '.' || b == '/' || b == ':' {
continue
}
return false
}
return true
}