summaryrefslogtreecommitdiff
path: root/cmd
diff options
context:
space:
mode:
authorPaul Buetow <paul@buetow.org>2025-08-17 08:54:20 +0300
committerPaul Buetow <paul@buetow.org>2025-08-17 08:54:20 +0300
commita61f40a4221c1586d801308da9ae1869fb6bdbb6 (patch)
tree477ecc7313b4f9e6fd5dd57ea2a727b7befc8f13 /cmd
parent17e9d33f2a5dc9305b4dbae1bc400b28f9b6c211 (diff)
cli: add hexai command-line tool and split LSP to hexai-lsp
- New cmd/hexai CLI: reads stdin/arg, prints LLM output to stdout - Prints provider/model immediately to stderr; summary at end - Refactor config loader to internal/appconfig - Update Taskfile to build/install/run both binaries - Update README with new CLI and LSP names
Diffstat (limited to 'cmd')
-rw-r--r--cmd/hexai-lsp/main.go82
-rw-r--r--cmd/hexai/main.go233
2 files changed, 158 insertions, 157 deletions
diff --git a/cmd/hexai-lsp/main.go b/cmd/hexai-lsp/main.go
new file mode 100644
index 0000000..065b6e2
--- /dev/null
+++ b/cmd/hexai-lsp/main.go
@@ -0,0 +1,82 @@
+package main
+
+import (
+ "flag"
+ "log"
+ "os"
+ "strings"
+
+ "hexai/internal"
+ "hexai/internal/appconfig"
+ "hexai/internal/llm"
+ "hexai/internal/logging"
+ "hexai/internal/lsp"
+)
+
+func main() {
+ logPath := flag.String("log", "/tmp/hexai-lsp.log", "path to log file (optional)")
+ showVersion := flag.Bool("version", false, "print version and exit")
+ flag.Parse()
+ if *showVersion {
+ log.Println(internal.Version)
+ return
+ }
+
+ // Configure logging (path flag only)
+ logger := log.New(os.Stderr, "hexai-lsp ", log.LstdFlags|log.Lmsgprefix)
+ if *logPath != "" {
+ f, err := os.OpenFile(*logPath, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644)
+ if err != nil {
+ logger.Fatalf("failed to open log file: %v", err)
+ }
+ defer f.Close()
+ logger.SetOutput(f)
+ }
+ logging.Bind(logger)
+
+ // Load config file
+ cfg := appconfig.Load(logger)
+
+ // Normalize and apply logging config
+ cfg.ContextMode = strings.ToLower(strings.TrimSpace(cfg.ContextMode))
+ if cfg.LogPreviewLimit >= 0 {
+ logging.SetLogPreviewLimit(cfg.LogPreviewLimit)
+ }
+
+ // Build LLM client from config
+ var client llm.Client
+ {
+ llmCfg := llm.Config{
+ Provider: cfg.Provider,
+ OpenAIBaseURL: cfg.OpenAIBaseURL,
+ OpenAIModel: cfg.OpenAIModel,
+ OllamaBaseURL: cfg.OllamaBaseURL,
+ OllamaModel: cfg.OllamaModel,
+ CopilotBaseURL: cfg.CopilotBaseURL,
+ CopilotModel: cfg.CopilotModel,
+ }
+ oaKey := os.Getenv("OPENAI_API_KEY")
+ cpKey := os.Getenv("COPILOT_API_KEY")
+ if c, err := llm.NewFromConfig(llmCfg, oaKey, cpKey); err != nil {
+ logging.Logf("lsp ", "llm disabled: %v", err)
+ } else {
+ client = c
+ logging.Logf("lsp ", "llm enabled provider=%s model=%s", c.Name(), c.DefaultModel())
+ }
+ }
+
+ server := lsp.NewServer(os.Stdin, os.Stdout, logger, lsp.ServerOptions{
+ LogContext: *logPath != "",
+ MaxTokens: cfg.MaxTokens,
+ ContextMode: cfg.ContextMode,
+ WindowLines: cfg.ContextWindowLines,
+ MaxContextTokens: cfg.MaxContextTokens,
+ NoDiskIO: cfg.NoDiskIO,
+ Client: client,
+ TriggerCharacters: cfg.TriggerCharacters,
+ })
+ if err := server.Run(); err != nil {
+ logger.Fatalf("server error: %v", err)
+ }
+}
+
diff --git a/cmd/hexai/main.go b/cmd/hexai/main.go
index b433b13..ad72439 100644
--- a/cmd/hexai/main.go
+++ b/cmd/hexai/main.go
@@ -1,172 +1,91 @@
package main
import (
- "encoding/json"
- "flag"
- "log"
- "os"
- "path/filepath"
- "strings"
+ "bufio"
+ "context"
+ "flag"
+ "fmt"
+ "io"
+ "os"
+ "strings"
+ "time"
- "hexai/internal"
- "hexai/internal/llm"
- "hexai/internal/logging"
- "hexai/internal/lsp"
+ "hexai/internal"
+ "hexai/internal/appconfig"
+ "hexai/internal/llm"
)
func main() {
- logPath := flag.String("log", "/tmp/hexai-lsp.log", "path to log file (optional)")
- showVersion := flag.Bool("version", false, "print version and exit")
- flag.Parse()
- if *showVersion {
- log.Println(internal.Version)
- return
- }
+ showVersion := flag.Bool("version", false, "print version and exit")
+ flag.Parse()
+ if *showVersion {
+ fmt.Fprintln(os.Stdout, internal.Version)
+ return
+ }
- // Configure logging (path flag only)
- logger := log.New(os.Stderr, "hexai-lsp ", log.LstdFlags|log.Lmsgprefix)
- if *logPath != "" {
- f, err := os.OpenFile(*logPath, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644)
- if err != nil {
- logger.Fatalf("failed to open log file: %v", err)
- }
- defer f.Close()
- logger.SetOutput(f)
- }
- logging.Bind(logger)
+ // Read stdin if present
+ var stdinData string
+ if fi, err := os.Stdin.Stat(); err == nil && (fi.Mode()&os.ModeCharDevice) == 0 {
+ b, _ := io.ReadAll(bufio.NewReader(os.Stdin))
+ stdinData = string(b)
+ }
- // Load config file
- cfg := loadConfig(logger)
+ // Read argument input (join all remaining args with space)
+ argData := strings.TrimSpace(strings.Join(flag.Args(), " "))
- // Normalize and apply logging config
- cfg.ContextMode = strings.ToLower(strings.TrimSpace(cfg.ContextMode))
- if cfg.LogPreviewLimit >= 0 {
- logging.SetLogPreviewLimit(cfg.LogPreviewLimit)
- }
+ // Combine inputs
+ var input string
+ switch {
+ case stdinData != "" && argData != "":
+ input = strings.TrimSpace(stdinData) + "\n\n" + argData
+ case stdinData != "":
+ input = strings.TrimSpace(stdinData)
+ case argData != "":
+ input = argData
+ default:
+ fmt.Fprintln(os.Stderr, "hexai: no input provided; pass text as an argument or via stdin")
+ os.Exit(2)
+ }
- // Build LLM client from config (only OPENAI_API_KEY may come from env)
- var client llm.Client
- {
- llmCfg := llm.Config{
- Provider: cfg.Provider,
- OpenAIBaseURL: cfg.OpenAIBaseURL,
- OpenAIModel: cfg.OpenAIModel,
- OllamaBaseURL: cfg.OllamaBaseURL,
- OllamaModel: cfg.OllamaModel,
- CopilotBaseURL: cfg.CopilotBaseURL,
- CopilotModel: cfg.CopilotModel,
- }
- oaKey := os.Getenv("OPENAI_API_KEY")
- cpKey := os.Getenv("COPILOT_API_KEY")
- if c, err := llm.NewFromConfig(llmCfg, oaKey, cpKey); err != nil {
- logging.Logf("lsp ", "llm disabled: %v", err)
- } else {
- client = c
- logging.Logf("lsp ", "llm enabled provider=%s model=%s", c.Name(), c.DefaultModel())
- }
- }
+ // Load config (no external logging for CLI)
+ cfg := appconfig.Load(nil)
- server := lsp.NewServer(os.Stdin, os.Stdout, logger, lsp.ServerOptions{
- LogContext: *logPath != "",
- MaxTokens: cfg.MaxTokens,
- ContextMode: cfg.ContextMode,
- WindowLines: cfg.ContextWindowLines,
- MaxContextTokens: cfg.MaxContextTokens,
- NoDiskIO: cfg.NoDiskIO,
- Client: client,
- TriggerCharacters: cfg.TriggerCharacters,
- })
- if err := server.Run(); err != nil {
- logger.Fatalf("server error: %v", err)
- }
-}
+ // Build LLM client
+ llmCfg := llm.Config{
+ Provider: cfg.Provider,
+ OpenAIBaseURL: cfg.OpenAIBaseURL,
+ OpenAIModel: cfg.OpenAIModel,
+ OllamaBaseURL: cfg.OllamaBaseURL,
+ OllamaModel: cfg.OllamaModel,
+ CopilotBaseURL: cfg.CopilotBaseURL,
+ CopilotModel: cfg.CopilotModel,
+ }
+ oaKey := os.Getenv("OPENAI_API_KEY")
+ cpKey := os.Getenv("COPILOT_API_KEY")
+ client, err := llm.NewFromConfig(llmCfg, oaKey, cpKey)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "hexai: LLM disabled: %v\n", err)
+ os.Exit(1)
+ }
-// appConfig holds user-configurable settings.
-type appConfig struct {
- MaxTokens int `json:"max_tokens"`
- ContextMode string `json:"context_mode"`
- ContextWindowLines int `json:"context_window_lines"`
- MaxContextTokens int `json:"max_context_tokens"`
- LogPreviewLimit int `json:"log_preview_limit"`
- NoDiskIO bool `json:"no_disk_io"`
- TriggerCharacters []string `json:"trigger_characters"`
- Provider string `json:"provider"`
- // Provider-specific options
- OpenAIBaseURL string `json:"openai_base_url"`
- OpenAIModel string `json:"openai_model"`
- OllamaBaseURL string `json:"ollama_base_url"`
- OllamaModel string `json:"ollama_model"`
- CopilotBaseURL string `json:"copilot_base_url"`
- CopilotModel string `json:"copilot_model"`
-}
+ // Print provider/model immediately to stderr
+ fmt.Fprintf(os.Stderr, "provider=%s model=%s\n", client.Name(), client.DefaultModel())
+
+ // Prepare and send request
+ start := time.Now()
+ msgs := []llm.Message{{Role: "user", Content: input}}
+ out, err := client.Chat(context.Background(), msgs)
+ dur := time.Since(start)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "hexai: error: %v\n", err)
+ os.Exit(1)
+ }
+
+ // Write assistant output to stdout
+ fmt.Fprint(os.Stdout, out)
-func loadConfig(logger *log.Logger) appConfig {
- // Defaults (mirror prior sensible values)
- cfg := appConfig{
- MaxTokens: 4000,
- ContextMode: "always-full",
- ContextWindowLines: 120,
- MaxContextTokens: 4000,
- LogPreviewLimit: 100,
- NoDiskIO: true,
- }
- home, err := os.UserHomeDir()
- if err != nil {
- return cfg
- }
- path := filepath.Join(home, ".config", "hexai", "config.json")
- f, err := os.Open(path)
- if err != nil {
- return cfg
- }
- defer f.Close()
- dec := json.NewDecoder(f)
- var fileCfg appConfig
- if err := dec.Decode(&fileCfg); err != nil {
- logger.Printf("invalid config file %s: %v", path, err)
- return cfg
- }
- // Merge: file overrides defaults when provided
- if fileCfg.MaxTokens > 0 {
- cfg.MaxTokens = fileCfg.MaxTokens
- }
- if strings.TrimSpace(fileCfg.ContextMode) != "" {
- cfg.ContextMode = fileCfg.ContextMode
- }
- if fileCfg.ContextWindowLines > 0 {
- cfg.ContextWindowLines = fileCfg.ContextWindowLines
- }
- if fileCfg.MaxContextTokens > 0 {
- cfg.MaxContextTokens = fileCfg.MaxContextTokens
- }
- if fileCfg.LogPreviewLimit >= 0 {
- cfg.LogPreviewLimit = fileCfg.LogPreviewLimit
- }
- cfg.NoDiskIO = fileCfg.NoDiskIO
- if len(fileCfg.TriggerCharacters) > 0 {
- cfg.TriggerCharacters = append([]string{}, fileCfg.TriggerCharacters...)
- }
- if strings.TrimSpace(fileCfg.Provider) != "" {
- cfg.Provider = fileCfg.Provider
- }
- // Provider-specific options
- if strings.TrimSpace(fileCfg.OpenAIBaseURL) != "" {
- cfg.OpenAIBaseURL = fileCfg.OpenAIBaseURL
- }
- if strings.TrimSpace(fileCfg.OpenAIModel) != "" {
- cfg.OpenAIModel = fileCfg.OpenAIModel
- }
- if strings.TrimSpace(fileCfg.OllamaBaseURL) != "" {
- cfg.OllamaBaseURL = fileCfg.OllamaBaseURL
- }
- if strings.TrimSpace(fileCfg.OllamaModel) != "" {
- cfg.OllamaModel = fileCfg.OllamaModel
- }
- if strings.TrimSpace(fileCfg.CopilotBaseURL) != "" {
- cfg.CopilotBaseURL = fileCfg.CopilotBaseURL
- }
- if strings.TrimSpace(fileCfg.CopilotModel) != "" {
- cfg.CopilotModel = fileCfg.CopilotModel
- }
- return cfg
+ // Summary to stderr
+ inSize := len(input)
+ outSize := len(out)
+ fmt.Fprintf(os.Stderr, "done provider=%s model=%s time=%s in_bytes=%d out_bytes=%d\n", client.Name(), client.DefaultModel(), dur.Round(time.Millisecond), inSize, outSize)
}