#注销Ubuntu-24.04
wsl --unregister Ubuntu-24.04
#安装Ubuntu-24.04
wsl --install -d Ubuntu-24.04
#启动Ubuntu-24.04
wsl -d Ubuntu-24.04
# 1. 生成SSH密钥(如果还没有)
ssh-keygen -t rsa -b 4096 -C "your_email@example.com"
# 2. 查看公钥并添加到GitHub
cat ~/.ssh/id_rsa.pub
然后登录GitHub → Settings → SSH and GPG keys → New SSH key,粘贴公钥。
curl -fsSL https://openclaw.ai/install.sh | bash
修改openclaw.json,gateway.bind:"lan"
查看Ubuntu的IP
hostname -I
# 请将 wsl2_ip 替换为上一步获取的 IP 地址
netsh interface portproxy add v4tov4 listenaddress=0.0.0.0 listenport=18789 connectaddress=wsl2_ip connectport=18789
配置 Windows 防火墙
New-NetFirewallRule -DisplayName "OpenClaw WSL Access" -Direction Inbound -Protocol TCP -LocalPort 18789 -Action Allow
PowerShell查看端口映射
netsh interface portproxy show all
# 1. 安装QQ机器人插件
openclaw plugins install @sliverp/qqbot@latest
# 2. 添加QQ通信通道
openclaw channels add --channel qqbot --token "你的AppID:你的AppSecret"
# 3. 重启OpenClaw网关使配置生效
openclaw gateway restart
{
"models": {
"mode": "merge",
"providers": {
"bailian": {
"baseUrl": "https://coding.dashscope.aliyuncs.com/v1",
"apiKey": "YOUR_API_KEY",
"api": "openai-completions",
"models": [
{
"id": "qwen3.5-plus",
"name": "qwen3.5-plus",
"reasoning": false,
"input": ["text", "image"],
"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
"contextWindow": 1000000,
"maxTokens": 65536,
"compat": {
"thinkingFormat": "qwen"
}
},
{
"id": "qwen3-max-2026-01-23",
"name": "qwen3-max-2026-01-23",
"reasoning": false,
"input": ["text"],
"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
"contextWindow": 262144,
"maxTokens": 65536,
"compat": {
"thinkingFormat": "qwen"
}
},
{
"id": "qwen3-coder-next",
"name": "qwen3-coder-next",
"reasoning": false,
"input": ["text"],
"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
"contextWindow": 262144,
"maxTokens": 65536
},
{
"id": "qwen3-coder-plus",
"name": "qwen3-coder-plus",
"reasoning": false,
"input": ["text"],
"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
"contextWindow": 1000000,
"maxTokens": 65536
},
{
"id": "MiniMax-M2.5",
"name": "MiniMax-M2.5",
"reasoning": false,
"input": ["text"],
"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
"contextWindow": 196608,
"maxTokens": 32768
},
{
"id": "glm-5",
"name": "glm-5",
"reasoning": false,
"input": ["text"],
"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
"contextWindow": 202752,
"maxTokens": 16384,
"compat": {
"thinkingFormat": "qwen"
}
},
{
"id": "glm-4.7",
"name": "glm-4.7",
"reasoning": false,
"input": ["text"],
"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
"contextWindow": 202752,
"maxTokens": 16384,
"compat": {
"thinkingFormat": "qwen"
}
},
{
"id": "kimi-k2.5",
"name": "kimi-k2.5",
"reasoning": false,
"input": ["text", "image"],
"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
"contextWindow": 262144,
"maxTokens": 32768,
"compat": {
"thinkingFormat": "qwen"
}
}
]
}
}
},
"agents": {
"defaults": {
"model": {
"primary": "bailian/qwen3.5-plus"
},
"models": {
"bailian/qwen3.5-plus": {},
"bailian/qwen3-max-2026-01-23": {},
"bailian/qwen3-coder-next": {},
"bailian/qwen3-coder-plus": {},
"bailian/MiniMax-M2.5": {},
"bailian/glm-5": {},
"bailian/glm-4.7": {},
"bailian/kimi-k2.5": {}
}
}
},
"gateway": {
"mode": "local"
}
}
wsl --shutdown
wsl --export ubuntu-24.04 D:\wsl-backup\ubuntu-24.04-has-model.tar
wsl --import
wsl --import Ubuntu-24.04 D:\wsl\Ubuntu24 D:\wsl-backup\ubuntu-24.04-2026-03-18_09-02.tar
openclaw dashboard --no-open
获取带token的地址
mkdir -p ~/bin
ln -s /home/administrator/.npm-global/bin/openclaw ~/bin/openclaw
~/bin加入到$PATH
echo 'export PATH="$HOME/bin:$PATH"' >> ~/.bashrc
source ~/.bashrc
chrome.exe --unsafely-treat-insecure-origin-as-secure="http://192.168.2.116:18789" --user-data-dir="C:/test-profile"
openclaw devices list
#批准待处理的设备配对请求。
openclaw devices approve <requestid>
#批准待处理的最近的(有实效性)
openclaw devices approve --latest
```</requestid>