wsl2安装openClaw

2026-03-17 14:44:51 阅读:22 编辑

安装流程

1. 安装ubuntu-24.04

#注销Ubuntu-24.04
wsl --unregister Ubuntu-24.04
#安装Ubuntu-24.04
wsl --install -d Ubuntu-24.04
#启动Ubuntu-24.04
wsl  -d Ubuntu-24.04

2. github使用SSH key

# 1. 生成SSH密钥(如果还没有)
ssh-keygen -t rsa -b 4096 -C "your_email@example.com"

# 2. 查看公钥并添加到GitHub
cat ~/.ssh/id_rsa.pub

然后登录GitHub → Settings → SSH and GPG keys → New SSH key,粘贴公钥。

3. 安装openClaw(都按默认配置选项)

curl -fsSL https://openclaw.ai/install.sh | bash

4.修改宿主局域网可以访问

修改openclaw.json,gateway.bind:"lan"

查看Ubuntu的IP

hostname -I
# 请将 wsl2_ip 替换为上一步获取的 IP 地址
netsh interface portproxy add v4tov4 listenaddress=0.0.0.0 listenport=18789 connectaddress=wsl2_ip connectport=18789

配置 Windows 防火墙

New-NetFirewallRule -DisplayName "OpenClaw WSL Access" -Direction Inbound -Protocol TCP -LocalPort 18789 -Action Allow

PowerShell查看端口映射

 netsh interface portproxy show all

5.安装qq机器人

# 1. 安装QQ机器人插件
openclaw plugins install @sliverp/qqbot@latest

# 2. 添加QQ通信通道
openclaw channels add --channel qqbot --token "你的AppID:你的AppSecret"

# 3. 重启OpenClaw网关使配置生效
openclaw gateway restart

6.加阿里云百炼模型

{
  "models": {
    "mode": "merge",
    "providers": {
      "bailian": {
        "baseUrl": "https://coding.dashscope.aliyuncs.com/v1",
        "apiKey": "YOUR_API_KEY",
        "api": "openai-completions",
        "models": [
          {
            "id": "qwen3.5-plus",
            "name": "qwen3.5-plus",
            "reasoning": false,
            "input": ["text", "image"],
            "cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
            "contextWindow": 1000000,
            "maxTokens": 65536,
            "compat": {
              "thinkingFormat": "qwen"
            }
          },
          {
            "id": "qwen3-max-2026-01-23",
            "name": "qwen3-max-2026-01-23",
            "reasoning": false,
            "input": ["text"],
            "cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
            "contextWindow": 262144,
            "maxTokens": 65536,
            "compat": {
              "thinkingFormat": "qwen"
            }
          },
          {
            "id": "qwen3-coder-next",
            "name": "qwen3-coder-next",
            "reasoning": false,
            "input": ["text"],
            "cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
            "contextWindow": 262144,
            "maxTokens": 65536
          },
          {
            "id": "qwen3-coder-plus",
            "name": "qwen3-coder-plus",
            "reasoning": false,
            "input": ["text"],
            "cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
            "contextWindow": 1000000,
            "maxTokens": 65536
          },
          {
            "id": "MiniMax-M2.5",
            "name": "MiniMax-M2.5",
            "reasoning": false,
            "input": ["text"],
            "cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
            "contextWindow": 196608,
            "maxTokens": 32768
          },
          {
            "id": "glm-5",
            "name": "glm-5",
            "reasoning": false,
            "input": ["text"],
            "cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
            "contextWindow": 202752,
            "maxTokens": 16384,
            "compat": {
              "thinkingFormat": "qwen"
            }
          },
          {
            "id": "glm-4.7",
            "name": "glm-4.7",
            "reasoning": false,
            "input": ["text"],
            "cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
            "contextWindow": 202752,
            "maxTokens": 16384,
            "compat": {
              "thinkingFormat": "qwen"
            }
          },
          {
            "id": "kimi-k2.5",
            "name": "kimi-k2.5",
            "reasoning": false,
            "input": ["text", "image"],
            "cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
            "contextWindow": 262144,
            "maxTokens": 32768,
            "compat": {
              "thinkingFormat": "qwen"
            }
          }
        ]
      }
    }
  },
  "agents": {
    "defaults": {
      "model": {
        "primary": "bailian/qwen3.5-plus"
      },
      "models": {
        "bailian/qwen3.5-plus": {},
        "bailian/qwen3-max-2026-01-23": {},
        "bailian/qwen3-coder-next": {},
        "bailian/qwen3-coder-plus": {},
        "bailian/MiniMax-M2.5": {},
        "bailian/glm-5": {},
        "bailian/glm-4.7": {},
        "bailian/kimi-k2.5": {}
      }
    }
  },
  "gateway": {
    "mode": "local"
  }
}

6. 备份wsl

wsl --shutdown
wsl --export ubuntu-24.04 D:\wsl-backup\ubuntu-24.04-has-model.tar

7. 还原wsl

wsl --import

将之前备份的 Ubuntu 系统,安装到 D 盘的 WSL/Ubuntu 文件夹下

 wsl --import Ubuntu-24.04 D:\wsl\Ubuntu24 D:\wsl-backup\ubuntu-24.04-2026-03-18_09-02.tar

注意事项:

1. 可以使用\wsl.localhost\Ubuntu-24.04\home\administrator.openclaw\workspace来访问wsl内的linux目录

2. 宿主要访问网页

openclaw dashboard --no-open

获取带token的地址

3. 找不到openclaw命令

mkdir -p ~/bin
ln -s /home/administrator/.npm-global/bin/openclaw ~/bin/openclaw

~/bin加入到$PATH

echo 'export PATH="$HOME/bin:$PATH"' >> ~/.bashrc
source ~/.bashrc

访问网页提示control ui requires device identity (use HTTPS or localhost secure context)

chrome.exe --unsafely-treat-insecure-origin-as-secure="http://192.168.2.116:18789" --user-data-dir="C:/test-profile"

访问网页提示pairing required

openclaw devices list

#批准待处理的设备配对请求。
openclaw devices approve <requestid>
#批准待处理的最近的(有实效性)
openclaw devices approve --latest
```</requestid>