Skip to content

Instantly share code, notes, and snippets.

@zsnmwy
Last active January 28, 2026 13:48
Show Gist options
  • Select an option

  • Save zsnmwy/457499f65a6d25c382ee78cc2ae44152 to your computer and use it in GitHub Desktop.

Select an option

Save zsnmwy/457499f65a6d25c382ee78cc2ae44152 to your computer and use it in GitHub Desktop.
~/.config/opencode/oh-my-opencode.json
{
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"google_auth": false,
"tmux": {
"enabled": true,
"layout": "main-vertical",
"main_pane_size": 60,
"main_pane_min_width": 120,
"agent_pane_min_width": 40
},
"agents": {
"sisyphus": {
"model": "openai/gpt-5.2-codex",
"variant": "xhigh"
},
"sisyphus-junior": {
"model": "openai/gpt-5.2-codex",
"variant": "xhigh"
},
"prometheus": {
"disable": false,
"model": "openai/gpt-5.2-codex",
"variant": "xhigh"
},
"plan": {
"model": "openai/gpt-5.2-codex",
"variant": "xhigh"
},
"oracle": {
"model": "openai/gpt-5.2-codex",
"variant": "xhigh"
},
"atlas": {
"model": "openai/gpt-5.2-codex",
"variant": "xhigh"
},
"OpenCode-Builder": {
"model": "openai/gpt-5.2-codex",
"variant": "xhigh"
},
"metis": {
"model": "openai/gpt-5.2-codex",
"variant": "xhigh"
},
"momus": {
"model": "openai/gpt-5.2-codex",
"variant": "xhigh"
},
"librarian": {
"model": "openai/gpt-5.2-codex",
"variant": "medium"
},
"explore": {
"model": "openai/gpt-5.2-codex",
"variant": "low"
},
"frontend-ui-ux-engineer": {
"model": "google/gemini-3-pro-preview"
},
"document-writer": {
"model": "google/gemini-3-flash-preview"
},
"multimodal-looker": {
"model": "google/gemini-3-flash-preview"
},
"General": {
"model": "openai/gpt-5.2-codex",
"variant": "medium"
}
},
"categories": {
"general": {
"model": "openai/gpt-5.2-codex",
"variant": "xhigh"
},
"visual-engineering": {
"model": "google/gemini-3-pro-preview"
},
"artistry": {
"model": "google/gemini-3-pro-preview"
},
"writing": {
"model": "google/gemini-3-flash-preview"
},
"quick": {
"model": "openai/gpt-5.2-codex",
"variant": "none"
},
"unspecified-low": {
"model": "openai/gpt-5.2-codex",
"variant": "medium"
},
"unspecified-high": {
"model": "openai/gpt-5.2-codex",
"variant": "xhigh"
}
}
}
@zsnmwy
Copy link
Author

zsnmwy commented Jan 16, 2026

~/.config/opencode/opencode.json


{
  "$schema": "https://opencode.ai/config.json",
  "model": "openai/gpt-5.2",
  "tui": {
    "scroll_acceleration": {
      "enabled": true
    }
  },
  "mcp": {
    "chrome-devtools": {
      "type": "local",
      "command": [
        "npx",
        "-y",
        "chrome-devtools-mcp@latest"
      ],
      "enabled": false
    }
  },
  "provider": {
    "anthropic": {
      "options": {
        "baseURL": "https://duckcoding.com/v1"
      }
    },
    "openai": {
      "name": "OpenAI",
      "options": {
        "baseURL": "https://duckcoding.com/v1",
      },
      "models": {
        "gpt-5.2": {
          "options": {
            "store": false,
            "include": [
              "reasoning.encrypted_content"
            ],
          },
          "name": "GPT 5.2 (OAuth)",
          "limit": {
            "context": 272000,
            "output": 128000
          },
          "modalities": {
            "input": [
              "text",
              "image"
            ],
            "output": [
              "text"
            ]
          },
          "variants": {
            "none": {
              "reasoningEffort": "none",
              "reasoningSummary": "auto",
              "textVerbosity": "medium"
            },
            "low": {
              "reasoningEffort": "low",
              "reasoningSummary": "auto",
              "textVerbosity": "medium"
            },
            "medium": {
              "reasoningEffort": "medium",
              "reasoningSummary": "auto",
              "textVerbosity": "medium"
            },
            "high": {
              "reasoningEffort": "high",
              "reasoningSummary": "detailed",
              "textVerbosity": "medium"
            },
            "xhigh": {
              "reasoningEffort": "xhigh",
              "reasoningSummary": "detailed",
              "textVerbosity": "medium"
            }
          }
        },
        "gpt-5.2-codex": {
          "options": {
            "store": false,
            "include": [
              "reasoning.encrypted_content"
            ],
          },
          "name": "GPT 5.2 Codex (OAuth)",
          "limit": {
            "context": 272000,
            "output": 128000
          },
          "modalities": {
            "input": [
              "text",
              "image"
            ],
            "output": [
              "text"
            ]
          },
          "variants": {
            "low": {
              "reasoningEffort": "low",
              "reasoningSummary": "auto",
              "textVerbosity": "medium"
            },
            "medium": {
              "reasoningEffort": "medium",
              "reasoningSummary": "auto",
              "textVerbosity": "medium"
            },
            "high": {
              "reasoningEffort": "high",
              "reasoningSummary": "detailed",
              "textVerbosity": "medium"
            },
            "xhigh": {
              "reasoningEffort": "xhigh",
              "reasoningSummary": "detailed",
              "textVerbosity": "medium"
            }
          }
        },
        "gpt-5.1-codex-max": {
          "options": {
            "store": false,
            "include": [
              "reasoning.encrypted_content"
            ],
          },
          "name": "GPT 5.1 Codex Max (OAuth)",
          "limit": {
            "context": 272000,
            "output": 128000
          },
          "modalities": {
            "input": [
              "text",
              "image"
            ],
            "output": [
              "text"
            ]
          },
          "variants": {
            "low": {
              "reasoningEffort": "low",
              "reasoningSummary": "detailed",
              "textVerbosity": "medium"
            },
            "medium": {
              "reasoningEffort": "medium",
              "reasoningSummary": "detailed",
              "textVerbosity": "medium"
            },
            "high": {
              "reasoningEffort": "high",
              "reasoningSummary": "detailed",
              "textVerbosity": "medium"
            },
            "xhigh": {
              "reasoningEffort": "xhigh",
              "reasoningSummary": "detailed",
              "textVerbosity": "medium"
            }
          }
        },
        "gpt-5.2-codex-xhigh": {
          "options": {
            "store": false,
            "include": [
              "reasoning.encrypted_content"
            ],
          },
        }
      }
    },
    "google": {
      "name": "Google",
      "options": {
        "baseURL": "https://duckcoding.com/v1",
      }
    }
  },
  "plugin": [
    "oh-my-opencode@latest",
    "opencode-openai-codex-auth",
    "./plugin/codex-proxy-plugin.ts"
  ]
}

~/.config/opencode/plugin/codex-proxy-plugin.ts

import type { Plugin } from '@opencode-ai/plugin';

/**
 * Custom fetch wrapper that removes max_output_tokens and max_completion_tokens
 * from the request body before sending to the API.
 *
 * @param input - The request URL or Request object
 * @param init - Optional request init options
 * @returns The fetch response
 */
async function codexFetch(input: Request | string | URL, init?: RequestInit): Promise<Response> {
  if (!init?.body || typeof init.body !== 'string') {
    return globalThis.fetch(input, init);
  }

  try {
    const originalBody = JSON.parse(init.body);

    // Remove token limit parameters that may cause issues
    delete originalBody.max_output_tokens;
    delete originalBody.max_completion_tokens;

    // disable_response_storage = true
    // Remove previous_response_id to avoid "Item not found" errors
    // when using API pools/proxies where responses may not be cached
    // or routed to different API keys
    if (originalBody.previous_response_id) {
      delete originalBody.previous_response_id;
    }

    const modifiedInit: RequestInit = {
      ...init,
      body: JSON.stringify(originalBody),
    };

    return globalThis.fetch(input, modifiedInit);
  } catch {
    // If JSON parsing fails, proceed with original request
    return globalThis.fetch(input, init);
  }
}

/**
 * Codex Proxy Plugin
 *
 * Intercepts requests to OpenCode providers and applies custom fetch handling
 * to modify request parameters before they are sent to the API.
 */
export const CodexProxyPlugin: Plugin = async () => {
  return {
    config: async (input) => {
      // const provider = input.provider?.['openai-duckcoding'];
      const provider = input.provider?.['openai'];
      if (provider) {
        if (!provider.options) {
          provider.options = {};
        }
        provider.options.fetch = codexFetch;
      }
    },
  };
};

export default CodexProxyPlugin;

@zsnmwy
Copy link
Author

zsnmwy commented Jan 22, 2026

  1. 直接覆盖官方的Provider
image
  1. 填入Duckcoding API Key
image

@zsnmwy
Copy link
Author

zsnmwy commented Jan 27, 2026

tmux Shell

后台任务可以单独显示在面板里面,当跑完之后,又回自己关闭。

这个能力依托于 opencode 的远程attach。

image

版本要求在 v3.1.0之后,可以将版本直接设置成 latest

https://github.com/code-yeongyu/oh-my-opencode/releases/tag/v3.1.0

~/.config/opencode/oh-my-opencode.json

{
  "tmux": {
    "enabled": true,
    "layout": "main-vertical",
    "main_pane_size": 60,
    "main_pane_min_width": 120,
    "agent_pane_min_width": 40
  }
}

下面的脚本可以加入到 .zshrc或者 .bashrc中,输入 oc 即可享受该 feature。

oc() {
    # 1. 生成唯一的会话名称
    local base_name=$(basename "$PWD")              # 获取当前目录名,如 "oh-my-opencode"
    local path_hash=$(echo "$PWD" | md5sum | cut -c1-4)  # 获取路径的 MD5 前4位
    local session_name="${base_name}-${path_hash}"   # 组合成会话名,如 "oh-my-opencode-a1b2"
    
    # 2. 查找可用端口(4096-5096)
    local port=4096
    while [ $port -lt 5096 ]; do
        if ! lsof -i :$port >/dev/null 2>&1; then  # 检查端口是否被占用
            break                                  # 端口空闲,退出循环
        fi
        port=$((port + 1))                         # 端口被占用,尝试下一个
    done
    
    # 3. 设置环境变量
    export OPENCODE_PORT=$port                     # 导出端口变量
    
    # 4. 根据是否在 tmux 中执行不同操作
    if [ -n "$TMUX" ]; then
        # 4a. 已经在 tmux 中,直接运行 opencode
        opencode --port $port "$@"
    else
        # 4b. 不在 tmux 中,创建或附加到 tmux 会话
        local oc_cmd="OPENCODE_PORT=$port opencode --port $port $*; exec $SHELL"
        
        if tmux has-session -t "$session_name" 2>/dev/null; then
            # 会话已存在:创建新窗口并附加
            tmux new-window -t "$session_name" -c "$PWD" "$oc_cmd"
            tmux attach-session -t "$session_name"
        else
            # 会话不存在:创建新会话
            tmux new-session -s "$session_name" -c "$PWD" "$oc_cmd"
        fi
    fi
}

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment