opencode编程

安装方式

  • 官方推荐

    # YOLO
    curl -fsSL https://opencode.ai/install | bash
    
    # Package managers
    npm i -g opencode-ai@latest        # or bun/pnpm/yarn
    scoop install opencode             # Windows
    choco install opencode             # Windows
    brew install anomalyco/tap/opencode # macOS and Linux (recommended, always up to date)
    brew install opencode              # macOS and Linux (official brew formula, updated less)
    sudo pacman -S opencode            # Arch Linux (Stable)
    paru -S opencode-bin               # Arch Linux (Latest from AUR)
    mise use -g opencode               # Any OS
    nix run nixpkgs#opencode           # or github:anomalyco/opencode for latest dev branch

mint虚拟机部署

  • 安装

    curl -fsSL https://opencode.ai/install | bash
    opencode --version
    opencode  #启动进命令行,输入下面内容让ai自己安装
    Install and configure oh-my-opencode by following the instructions here:
    https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/refs/heads/master/docs/guide/installation.md
  • 体验

    新用户交互:执行opencode后,会提示各种商业模型是否有帐号,
    本地化:最后在它自己的操作下完成了本地openai的配置,调用本地算力成功
  • 排查
    • 系统运行日志:~/.local/share/opencode/log
      • tail -f $(ls -t ~/.local/share/opencode/log/*.log | head -1)
    • 对话记录:~/.local/share/opencode/lopencode.db
      • sqlite3 ~/.local/share/opencode/opencode.db "SELECT * FROM part ORDER BY time_created desc LIMIT 10"
    • 配置文件:~/.config/opencode/opencode.json
    • 本地算力API权限:~/.local/share/opencode/auth.json
    • 对话内容过滤脚本

      #!/bin/bash
      # 检索对话记录的脚本,没抓取提问内容
      DB="/home/at/.local/share/opencode/opencode.db"
      
      # 只保留数字参数,默认5组
      LIMIT="${1:-5}"
      
      first_entry=1
      
      # 查询 part 表,按时间正序输出
      sqlite3 "$DB" "
      SELECT data FROM part
      ORDER BY time_created DESC
      LIMIT $((LIMIT * 10))
      " | tac | while read -r line; do
      
          # 思考内容
          if echo "$line" | grep -q '"type":"reasoning"'; then
              text=$(echo "$line" | python3 -c "import json,sys;o=json.load(sys.stdin);print(o.get('text',''))" 2>/dev/null)
              if [ -n "$text" ] && [ ${#text} -gt 5 ]; then
                  if [ $first_entry -eq 1 ]; then
                      echo "-------------------------------------------"
                      first_entry=0
                  fi
                  echo "[思考] $text"
              fi
          fi
      
          # 回答内容
          if echo "$line" | grep -q '"type":"text"'; then
              text=$(echo "$line" | python3 -c "import json,sys;o=json.load(sys.stdin);print(o.get('text',''))" 2>/dev/null)
              if [ -n "$text" ] && ! echo "$text" | grep -qE '^\{'; then
                  if [ $first_entry -eq 1 ]; then
                      echo "-------------------------------------------"
                      first_entry=0
                  fi
                  echo "[回答] $text"
                  echo "-------------------------------------------"
              fi
          fi
      
      done
      
      echo "=================================="
      echo " 显示最近 $LIMIT 组对话"
      echo "=================================="

本地化

  • 本地模型接口配置:~/.config/opencode/opencode.json

    (base) at@vm-mint:~/.config/opencode$ cat opencode.json
    {
      "$schema": "https://opencode.ai/config.json",
      "provider": {
        "4060-lms": {
          "npm": "@ai-sdk/openai",
          "name": "4060-lms-openai",
          "options": {
            "baseURL": "https://openai1.atibm.com/v1",
            "apiKey": "sk-lm-qIaKp4M5:xxxxxxxxxxxxxxxx"
          },
          "models": {
            "qwen3.5-4b-claude-4.6-opus-reasoning-distill-heretic-v3-i1": {
              "name": "Qwen3.5-4B"
            }
          }
        },
        "v100-llama": {
          "npm": "@ai-sdk/openai-compatible",
          "name": "v100-llama-openai",
          "options": {
            "baseURL": "http://192.168.1.142:8080/v1",
            "apiKey": "sk-dummy"
          },
          "models": {
            "Qwen3-Coder-30B-A3B-Instruct-IQ4_NL.gguf":{
              "name": "qwen3-30B-coder"
            }
          }
        }
      },
      "model": "v100-llama/Qwen3-Coder-30B-A3B-Instruct-IQ4_NL.gguf",
      "small_model": "4060-ims/qwen3.5-4b-claude-4.6-opus-reasoning-distill-heretic-v3-i1"
    }
    
  • 本地模型接口权鉴配置:~/.local/share/opencode/auth.json

    (base) at@vm-mint:~$ cat ~/.local/share/opencode/auth.json
    {
      "providers": {
        "4060-lms": {
          "api_key": "sk-lm-qIaKp4M5:xxxxxxxxxxxxxxxx"
        }
      }
    }
    
  • 环境变量(待探索)

    # 目录文件
    export OPENCODE_CONFIG_DIR=/www/opencode/data/teams
    export OPENCODE_CONFIG=/www/opencode/data/teams/default.json
    # claude行为开关
    export OPENCODE_DISABLE_CLAUDE_CODE=1        # Disable all
    export OPENCODE_DISABLE_CLAUDE_CODE_PROMPT=1 # Disable only ~/.claude/CLAUDE.md
    export OPENCODE_DISABLE_CLAUDE_CODE_SKILLS=1 # Disable only .claude/skills

模型感受(27B待探索)

  • 商业模型:opencode可以选择几个免费的商业模型,算力很不错,但是对请求有压力限制
  • 本地模型:我测试的qwen3.5 4B 很惊艳,但技术细节有推理断路,上下文最少2万+,q4量化即可
  • 模型推理能力:4B的chat能力足以应对常见的技术和商业分析,但opencode可以提高数倍的工作质量!

ohmyopencode编程demo(待探索)

团队设置(待探索)

  • 运行模式设置

    # vi default.json
    {
      "team_mode": true,
      "offline": true,
      "workspace": "./opencode-projects",
      "git_auto_commit": true,
      "git_init_if_missing": true    <--- 【没有仓库也自动创建】
    
      "models": {
        "local-api": {
          "provider": "openai",
          "base_url": "http://localhost:8000/v1",  // 改成本地 API 地址
          "api_key": "sk-anything",
          "timeout": 120
        }
      },
    
      "agents": {
        "architect": {
          "model": "local-api:code-model",
          "role": "architect",
          "temperature": 0.1
        },
        "tech_lead": {
          "model": "local-api:code-model",
          "role": "tech_lead",
          "temperature": 0.2
        },
        "dev1": {
          "model": "local-api:code-model",
          "role": "developer",
          "temperature": 0.3
        },
        "dev2": {
          "model": "local-api:code-model",
          "role": "developer",
          "temperature": 0.3
        },
        "tester": {
          "model": "local-api:code-model",
          "role": "tester",
          "temperature": 0.2
        }
      }
    }
    opencode					# 团队 + 离线 (默认配置)
    opencode --online			# 团队 + 在线
    opencode --no-team			# 单例 + 离线
    opencode --no-team --online # 单例 + 在线