Skip to content

Commit e9640c9

Browse files
author
quanzhanzongshi
committed
feat: add qwen3.6-plus model support with 1M context window
- Add qwen3.6-plus to KNOWN_MODELS in install script - Add CodingPlan model selection option 5 for qwen3.6-plus (1M context) - Add i18n strings for qwen3.6-plus (Chinese and English) - Update model select prompt from [1/2/3/4] to [1/2/3/4/5] - Add qwen3.6-plus to known-models.json with 1M context window - Add qwen3.6-plus to manager-openclaw.json.tmpl models and aliases
1 parent e913f1a commit e9640c9

File tree

3 files changed

+17
-10
lines changed

3 files changed

+17
-10
lines changed

install/hiclaw-install.sh

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -389,8 +389,10 @@ msg() {
389389
"llm.codingplan.model.kimi.en") text=" 3) kimi-k2.5 - Moonshot Kimi K2.5" ;;
390390
"llm.codingplan.model.minimax.zh") text=" 4) MiniMax-M2.5 - MiniMax M2.5" ;;
391391
"llm.codingplan.model.minimax.en") text=" 4) MiniMax-M2.5 - MiniMax M2.5" ;;
392-
"llm.codingplan.model.select.zh") text="选择模型 [1/2/3/4]" ;;
393-
"llm.codingplan.model.select.en") text="Select model [1/2/3/4]" ;;
392+
"llm.codingplan.model.qwen36plus.zh") text=" 5) qwen3.6-plus - 千问 3.6(100万上下文,适合长文档)" ;;
393+
"llm.codingplan.model.qwen36plus.en") text=" 5) qwen3.6-plus - Qwen 3.6 (1M context, ideal for long documents)" ;;
394+
"llm.codingplan.model.select.zh") text="选择模型 [1/2/3/4/5]" ;;
395+
"llm.codingplan.model.select.en") text="Select model [1/2/3/4/5]" ;;
394396
"llm.provider.selected_codingplan.zh") text=" 提供商: 阿里云百炼 CodingPlan" ;;
395397
"llm.provider.selected_codingplan.en") text=" Provider: Alibaba Cloud CodingPlan" ;;
396398
"llm.provider.selected_qwen.zh") text=" 提供商: 阿里云百炼" ;;
@@ -926,7 +928,7 @@ resolve_docker_proxy_image() {
926928
# ============================================================
927929
# Known models list — used to detect custom models during install
928930
# ============================================================
929-
KNOWN_MODELS="gpt-5.4 gpt-5.3-codex gpt-5-mini gpt-5-nano claude-opus-4-6 claude-sonnet-4-6 claude-haiku-4-5 qwen3.5-plus deepseek-chat deepseek-reasoner kimi-k2.5 glm-5 MiniMax-M2.7 MiniMax-M2.7-highspeed MiniMax-M2.5"
931+
KNOWN_MODELS="gpt-5.4 gpt-5.3-codex gpt-5-mini gpt-5-nano claude-opus-4-6 claude-sonnet-4-6 claude-haiku-4-5 qwen3.5-plus qwen3.6-plus deepseek-chat deepseek-reasoner kimi-k2.5 glm-5 MiniMax-M2.7 MiniMax-M2.7-highspeed MiniMax-M2.5"
930932

931933
is_known_model() {
932934
local model="$1"
@@ -1605,6 +1607,7 @@ step_llm() {
16051607
2|glm-5) HICLAW_DEFAULT_MODEL="glm-5" ;;
16061608
3|kimi-k2.5) HICLAW_DEFAULT_MODEL="kimi-k2.5" ;;
16071609
4|MiniMax-M2.5) HICLAW_DEFAULT_MODEL="MiniMax-M2.5" ;;
1610+
5|qwen3.6-plus) HICLAW_DEFAULT_MODEL="qwen3.6-plus" ;;
16081611
*) HICLAW_DEFAULT_MODEL="qwen3.5-plus" ;;
16091612
esac
16101613
log "$(msg llm.provider.selected_codingplan)"
@@ -1659,6 +1662,7 @@ step_llm() {
16591662
2|glm-5) HICLAW_DEFAULT_MODEL="glm-5" ;;
16601663
3|kimi-k2.5) HICLAW_DEFAULT_MODEL="kimi-k2.5" ;;
16611664
4|MiniMax-M2.5) HICLAW_DEFAULT_MODEL="MiniMax-M2.5" ;;
1665+
5|qwen3.6-plus) HICLAW_DEFAULT_MODEL="qwen3.6-plus" ;;
16621666
*) HICLAW_DEFAULT_MODEL="qwen3.5-plus" ;;
16631667
esac
16641668
log "$(msg llm.provider.selected_codingplan)"

manager/configs/known-models.json

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,11 +7,12 @@
77
{ "id": "claude-sonnet-4-6", "name": "claude-sonnet-4-6", "reasoning": true, "contextWindow": 1000000, "maxTokens": 64000, "input": ["text", "image"] },
88
{ "id": "claude-haiku-4-5", "name": "claude-haiku-4-5", "reasoning": true, "contextWindow": 200000, "maxTokens": 64000, "input": ["text", "image"] },
99
{ "id": "qwen3.5-plus", "name": "qwen3.5-plus", "reasoning": true, "contextWindow": 200000, "maxTokens": 64000, "input": ["text", "image"] },
10+
{ "id": "qwen3.6-plus", "name": "qwen3.6-plus", "reasoning": true, "contextWindow": 1000000, "maxTokens": 128000, "input": ["text"] },
1011
{ "id": "deepseek-chat", "name": "deepseek-chat", "reasoning": true, "contextWindow": 256000, "maxTokens": 128000, "input": ["text"] },
11-
{ "id": "deepseek-reasoner", "name": "deepseek-reasoner", "reasoning": true, "contextWindow": 256000, "maxTokens": 128000, "input": ["text"] },
12+
{ "id": "deepseek-reasoner", "name": "deepseek-reasoner", "reasoning": true, "contextWindow": 256000, "maxTokens": 128000, "input": ["text"] },
1213
{ "id": "kimi-k2.5", "name": "kimi-k2.5", "reasoning": true, "contextWindow": 256000, "maxTokens": 128000, "input": ["text", "image"] },
1314
{ "id": "glm-5", "name": "glm-5", "reasoning": true, "contextWindow": 200000, "maxTokens": 128000, "input": ["text"] },
1415
{ "id": "MiniMax-M2.7", "name": "MiniMax-M2.7", "reasoning": true, "contextWindow": 200000, "maxTokens": 128000, "input": ["text"] },
15-
{ "id": "MiniMax-M2.7-highspeed", "name": "MiniMax-M2.7-highspeed", "reasoning": true, "contextWindow": 200000, "maxTokens": 128000, "input": ["text"] },
16+
{ "id": "MiniMax-M2.7-highspeed", "name": "MiniMax-M2.7-highspeed", "reasoning": true, "contextWindow": 200000, "maxTokens": 128000, "input": ["text"] },
1617
{ "id": "MiniMax-M2.5", "name": "MiniMax-M2.5", "reasoning": true, "contextWindow": 200000, "maxTokens": 128000, "input": ["text"] }
1718
]

manager/configs/manager-openclaw.json.tmpl

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -49,15 +49,16 @@
4949
{ "id": "gpt-5-nano", "name": "gpt-5-nano", "reasoning": true, "contextWindow": 400000, "maxTokens": 128000, "input": ["text", "image"] },
5050
{ "id": "claude-opus-4-6", "name": "claude-opus-4-6", "reasoning": true, "contextWindow": 1000000, "maxTokens": 128000, "input": ["text", "image"] },
5151
{ "id": "claude-sonnet-4-6", "name": "claude-sonnet-4-6", "reasoning": true, "contextWindow": 1000000, "maxTokens": 64000, "input": ["text", "image"] },
52-
{ "id": "claude-haiku-4-5", "name": "claude-haiku-4-5", "reasoning": true, "contextWindow": 200000, "maxTokens": 64000, "input": ["text", "image"] },
52+
{ "id": "claude-haiku-4-5", "name": "claude-haiku-4-5", "reasoning": true, "contextWindow": 200000, "maxTokens": 64000, "input": ["text", "image"] },
5353
{ "id": "qwen3.5-plus", "name": "qwen3.5-plus", "reasoning": true, "contextWindow": 200000, "maxTokens": 64000, "input": ["text", "image"] },
54+
{ "id": "qwen3.6-plus", "name": "qwen3.6-plus", "reasoning": true, "contextWindow": 1000000, "maxTokens": 128000, "input": ["text"] },
5455
{ "id": "deepseek-chat", "name": "deepseek-chat", "reasoning": true, "contextWindow": 256000, "maxTokens": 128000, "input": ["text"] },
5556
{ "id": "deepseek-reasoner", "name": "deepseek-reasoner", "reasoning": true, "contextWindow": 256000, "maxTokens": 128000, "input": ["text"] },
56-
{ "id": "kimi-k2.5", "name": "kimi-k2.5", "reasoning": true, "contextWindow": 256000, "maxTokens": 128000, "input": ["text", "image"] },
57+
{ "id": "kimi-k2.5", "name": "kimi-k2.5", "reasoning": true, "contextWindow": 256000, "maxTokens": 128000, "input": ["text", "image"] },
5758
{ "id": "glm-5", "name": "glm-5", "reasoning": true, "contextWindow": 200000, "maxTokens": 128000, "input": ["text"] },
5859
{ "id": "MiniMax-M2.7", "name": "MiniMax-M2.7", "reasoning": true, "contextWindow": 200000, "maxTokens": 128000, "input": ["text"] },
59-
{ "id": "MiniMax-M2.7-highspeed", "name": "MiniMax-M2.7-highspeed", "reasoning": true, "contextWindow": 200000, "maxTokens": 128000, "input": ["text"] },
60-
{ "id": "MiniMax-M2.5", "name": "MiniMax-M2.5", "reasoning": true, "contextWindow": 200000, "maxTokens": 128000, "input": ["text"] }
60+
{ "id": "MiniMax-M2.7-highspeed", "name": "MiniMax-M2.7-highspeed", "reasoning": true, "contextWindow": 200000, "maxTokens": 128000, "input": ["text"] },
61+
{ "id": "MiniMax-M2.5", "name": "MiniMax-M2.5", "reasoning": true, "contextWindow": 200000, "maxTokens": 128000, "input": ["text"] }
6162
]
6263
}
6364
}
@@ -79,6 +80,7 @@
7980
"hiclaw-gateway/claude-sonnet-4-6": { "alias": "claude-sonnet-4-6" },
8081
"hiclaw-gateway/claude-haiku-4-5": { "alias": "claude-haiku-4-5" },
8182
"hiclaw-gateway/qwen3.5-plus": { "alias": "qwen3.5-plus" },
83+
"hiclaw-gateway/qwen3.6-plus": { "alias": "qwen3.6-plus" },
8284
"hiclaw-gateway/deepseek-chat": { "alias": "deepseek-chat" },
8385
"hiclaw-gateway/deepseek-reasoner": { "alias": "deepseek-reasoner" },
8486
"hiclaw-gateway/kimi-k2.5": { "alias": "kimi-k2.5" },
@@ -118,4 +120,4 @@
118120
"commands": {
119121
"restart": true
120122
}
121-
}
123+
}

0 commit comments

Comments
 (0)