opencode部署

官方推荐

# YOLO
curl -fsSL https://opencode.ai/install | bash

# Package managers
npm i -g opencode-ai@latest        # or bun/pnpm/yarn
scoop install opencode             # Windows
choco install opencode             # Windows
brew install anomalyco/tap/opencode # macOS and Linux (recommended, always up to date)
brew install opencode              # macOS and Linux (official brew formula, updated less)
sudo pacman -S opencode            # Arch Linux (Stable)
paru -S opencode-bin               # Arch Linux (Latest from AUR)
mise use -g opencode               # Any OS
nix run nixpkgs#opencode           # or github:anomalyco/opencode for latest dev branch

主要docker

  • coderluii/holycode:1.0.4
    penCode AI coding agent with built-in web UI, Claude subscription support, 30+ dev tools, headless browser, and multi-agent orchestration. Use your existing Claude Max/Pro plan. No separate API key needed.
  • tailscale/tailscale:v1.96.5
    lets you connect your devices and users together in your own secure virtual private network. Tailscale enables encrypted point-to-point connections using the open source

mint最小部署

  • 版本号

    curl -s https://api.github.com/repos/anomalyco/opencode/releases | jq -r '.[0:5] | .[] | .tag_name + " (" + .published_at + ")"'
    v1.4.0 (2026-04-08T00:32:55Z)
    v1.3.17 (2026-04-06T07:39:42Z)
    v1.3.16 (2026-04-06T03:45:09Z)
    v1.3.15 (2026-04-04T20:54:43Z)
    v1.3.14 (2026-04-04T18:32:40Z)
  • 安装

    curl -fsSL https://opencode.ai/install | bash  # 支持升级
    opencode --version
    opencode  #启动进命令行
  • 体验

    新用户交互:执行opencode后,会提示各种商业模型是否有帐号,
    本地化:最后在它自己的操作下完成了本地openai的配置,调用本地算力成功
  • 排查方法
    • 系统运行日志:~/.local/share/opencode/log
      • tail -f $(ls -t ~/.local/share/opencode/log/*.log | head -1)
    • 对话记录:~/.local/share/opencode/lopencode.db
      • sqlite3 ~/.local/share/opencode/opencode.db "SELECT * FROM part ORDER BY time_created desc LIMIT 10"
    • 配置文件:~/.config/opencode/opencode.json
    • 本地算力API权限:~/.local/share/opencode/auth.json
    • 对话内容过滤脚本:openchat.sh

openchat.sh

#!/bin/bash
# cat ~/openchat.sh
# 检索对话记录的脚本,没抓取提问内容
DB="/home/at/.local/share/opencode/opencode.db"

# 只保留数字参数,默认5组
LIMIT="${1:-5}"

first_entry=1

# 查询 part 表,按时间正序输出
sqlite3 "$DB" "
SELECT data FROM part
ORDER BY time_created DESC
LIMIT $((LIMIT * 10))
" | tac | while read -r line; do

    # 思考内容
    if echo "$line" | grep -q '"type":"reasoning"'; then
        text=$(echo "$line" | python3 -c "import json,sys;o=json.load(sys.stdin);print(o.get('text',''))" 2>/dev/null)
        if [ -n "$text" ] && [ ${#text} -gt 5 ]; then
            if [ $first_entry -eq 1 ]; then
                echo "-------------------------------------------"
                first_entry=0
            fi
            echo "[思考] $text"
        fi
    fi

    # 回答内容
    if echo "$line" | grep -q '"type":"text"'; then
        text=$(echo "$line" | python3 -c "import json,sys;o=json.load(sys.stdin);print(o.get('text',''))" 2>/dev/null)
        if [ -n "$text" ] && ! echo "$text" | grep -qE '^\{'; then
            if [ $first_entry -eq 1 ]; then
                echo "-------------------------------------------"
                first_entry=0
            fi
            echo "[回答] $text"
            echo "-------------------------------------------"
        fi
    fi

done

echo "=================================="
echo " 显示最近 $LIMIT 组对话"
echo "=================================="

opencode.json

(base) at@vm-mint:~/.config/opencode$ cat opencode.json
# cat ~/.config/opencode/opencode.json
{
  "$schema": "https://opencode.ai/config.json",
  "provider": {
    "4060-lms": {
      "npm": "@ai-sdk/openai",
      "name": "4060-lms-openai",
      "options": {
        "baseURL": "https://openai1.atibm.com/v1",
        "apiKey": "sk-lm-qIaKp4M5:xxxxxxxxxxxxxxx"
      },
      "models": {
        "qwen3.5-4b-claude-4.6-opus-reasoning-distill-heretic-v3-i1": {
          "name": "Qwen3.5-4B"
        }
      }
    },
    "v100-llama": {
      "npm": "@ai-sdk/openai-compatible",
      "name": "v100-llama-openai",
      "options": {
        "baseURL": "http://192.168.1.142:8080/v1",
        "apiKey": "sk-dummy"
      },
      "models": {
        "Qwen3.5-27B-claude.Q4_K_M.gguf":{
          "name": "qwen3.5-27B-claude"
        }
      }
    }
  },
  "model": "v100-llama/Qwen3.5-27B-claude.Q4_K_M.gguf",
  "small_model": "4060-ims/qwen3.5-4b-claude-4.6-opus-reasoning-distill-heretic-v3-i1"
}

auth.json

(base) at@vm-mint:~$ cat ~/.local/share/opencode/auth.json
# cat ~/.local/share/opencode/auth.json 
{
  "providers": {
    "4060-lms": {
      "api_key": "sk-lm-qIaKp4M5:xxxxxxxxxxxxxxxx"
    }
  }
}

default.json

# cat default.json
{
  "team_mode": true,
  "offline": true,
  "workspace": "./opencode-projects",
  "git_auto_commit": true,
  "git_init_if_missing": true    <--- 【没有仓库也自动创建】

  "models": {
    "local-api": {
      "provider": "openai",
      "base_url": "http://localhost:8000/v1",  // 改成本地 API 地址
      "api_key": "sk-anything",
      "timeout": 120
    }
  },

  "agents": {
    "architect": {
      "model": "local-api:code-model",
      "role": "architect",
      "temperature": 0.1
    },
    "tech_lead": {
      "model": "local-api:code-model",
      "role": "tech_lead",
      "temperature": 0.2
    },
    "dev1": {
      "model": "local-api:code-model",
      "role": "developer",
      "temperature": 0.3
    },
    "dev2": {
      "model": "local-api:code-model",
      "role": "developer",
      "temperature": 0.3
    },
    "tester": {
      "model": "local-api:code-model",
      "role": "tester",
      "temperature": 0.2
    }
  }
}