blob: 1a6dff114021502553a840b92a4d2cfa28384f82 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
|
# Hexai Configuration File
[mcp]
# Directory where MCP prompts are stored
prompts_dir = "~/.local/hexai/data/prompts"
# Enable automatic syncing of MCP prompts to slash command files
slashcommand_sync = true
# Directory where slash command files will be written
slashcommand_dir = "~/.cursor/commands"
# Default LLM provider for chat / CLI / code actions. The API key is read from
# the environment (HEXAI_OLLAMA_API_KEY, falling back to OLLAMA_API_KEY).
[provider]
name = "ollama"
[ollama]
model = "gemma4:31b-cloud"
base_url = "https://ollama.com"
temperature = 0.2
# In-code auto-completion uses gemma4:31b-cloud (the dense Gemma 4 hosted on
# Ollama Cloud). Latency-sensitive completions use the same model as chat.
[[models.completion]]
provider = "ollama"
model = "gemma4:31b-cloud"
temperature = 0.2
|