summaryrefslogtreecommitdiff
path: root/Rexfile
diff options
context:
space:
mode:
authorPaul Buetow <paul@buetow.org>2026-03-18 17:01:40 +0200
committerPaul Buetow <paul@buetow.org>2026-03-18 17:01:40 +0200
commit22d546124009c5907145318647cafd1659e2bc0b (patch)
tree0596ea75e36ce3a7796f77698b52c0fd5e7854c2 /Rexfile
parenta34aab167ace5e851e9ee434b8b8425b800b5a73 (diff)
Move opencode config to static file; switch to vLLM-only backend
- Remove inline opencode_config_content() and configured_ollama_host() from Rexfile; config is now a plain file in opencode/opencode.json - Remove JSON::PP dependency (no longer needed) - Simplify home_opencode task to one-liner using ensure glob - opencode/opencode.json: all 10 vLLM model presets from hyperstack (qwen3-coder-next, gpt-oss-20b/120b, llama-3.3-70b, qwen25-coder-32b, qwen3-coder-30b, deepseek-r1-32b, qwen3-32b, devstral, nemotron-super) - Default model set to vLLM qwen3-coder-next (was Ollama qwen3-coder:30b) - Ollama models removed; opencode now connects exclusively to vLLM Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
Diffstat (limited to 'Rexfile')
-rw-r--r--Rexfile77
1 files changed, 1 insertions, 76 deletions
diff --git a/Rexfile b/Rexfile
index 2663df8..5f818f3 100644
--- a/Rexfile
+++ b/Rexfile
@@ -1,6 +1,5 @@
use Rex -feature => [ '1.14', 'exec_autodie' ];
use Rex::Logger;
-use JSON::PP ();
our $HOME = $ENV{HOME};
@@ -39,67 +38,6 @@ sub ensure {
( $dst =~ /\/$/ ? \&ensure_dir : \&ensure_file )->( $src, $dst, $mode );
}
-sub configured_ollama_host {
- my $ollama_host = $ENV{OLLAMA_HOST};
-
- if ( !defined $ollama_host || $ollama_host eq q{} ) {
- my $fish_config = "$DOT/fish/conf.d/ai.fish";
-
- if ( open my $fh, '<', $fish_config ) {
- while ( my $line = <$fh> ) {
- if ( $line =~ /set -gx OLLAMA_HOST\s+(\S+)/ ) {
- $ollama_host = $1;
- last;
- }
- }
-
- close $fh;
- }
- }
-
- $ollama_host ||= 'http://hyperstack.wg1:11434';
- $ollama_host =~ s{/\z}{};
- $ollama_host =~ s{/v1\z}{};
-
- return $ollama_host;
-}
-
-sub opencode_config_content {
- my $base_url = configured_ollama_host() . '/v1';
-
- return JSON::PP->new->ascii->pretty->canonical->encode(
- {
- '$schema' => 'https://opencode.ai/config.json',
- 'model' => 'ollama/qwen3-coder:30b',
- 'provider' => {
- 'ollama' => {
- 'models' => {
- 'gpt-oss:120b' => {
- 'name' => 'GPT-OSS 120B'
- },
- 'gpt-oss:20b' => {
- 'name' => 'GPT-OSS 20B'
- },
- 'nemotron-3-super:latest' => {
- 'name' => 'Nemotron 3 Super'
- },
- 'qwen3-coder-next' => {
- 'name' => 'Qwen3 Coder Next'
- },
- 'qwen3-coder:30b' => {
- 'name' => 'Qwen3 Coder 30B'
- },
- },
- 'name' => 'Ollama',
- 'npm' => '@ai-sdk/openai-compatible',
- 'options' => {
- 'baseURL' => $base_url
- },
- },
- },
- }
- );
-}
desc 'Install packages on Termux';
task 'pkg_termux', sub {
@@ -212,20 +150,7 @@ desc 'Install ~/.config/lazygit';
task 'home_lazygit', sub { ensure "$DOT/lazygit/*" => "$HOME/.config/lazygit/" };
desc 'Install ~/.config/opencode';
-task 'home_opencode', sub {
- my $opencode_dir = "$HOME/.config/opencode";
-
- Rex::Logger::info( 'Deploying OpenCode config via ' . configured_ollama_host() );
-
- file $opencode_dir,
- ensure => 'directory',
- mode => '0750';
-
- file "$opencode_dir/opencode.json",
- ensure => 'present',
- content => opencode_config_content(),
- mode => '0640';
-};
+task 'home_opencode', sub { ensure "$DOT/opencode/*" => "$HOME/.config/opencode/" };
desc 'Install prompt links for AI tools';
task 'home_prompts', sub {