Skip to content

Commit 1c40e45

Browse files
committed
AI执行器集成OpenClaw: 新增“openClawJobHandler”内置AI任务,与OpenClaw集成打通,支持快速开发AI类任务。
1 parent 80b2df7 commit 1c40e45

7 files changed

Lines changed: 226 additions & 11 deletions

File tree

doc/XXL-JOB官方文档.md

Lines changed: 17 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1271,6 +1271,7 @@ public void demoJobHandler() throws Exception {
12711271
- 执行器代码:xxl-job-executor-sample-springboot-ai
12721272

12731273
**执行器内置任务列表:**
1274+
12741275
- a、ollamaJobHandler: OllamaChat任务,支持自定义prompt、input等输入信息。示例任务入参如下:
12751276
```
12761277
{
@@ -1279,6 +1280,7 @@ public void demoJobHandler() throws Exception {
12791280
"model": "{模型实现,如qwen3.5:2b,可选信息}"
12801281
}
12811282
```
1283+
12821284
- b、difyWorkflowJobHandler:DifyWorkflow 任务,支持自定义inputs、user、baseUrl、apiKey 等输入信息,示例参数如下;
12831285
```
12841286
{
@@ -1291,6 +1293,14 @@ public void demoJobHandler() throws Exception {
12911293
}
12921294
```
12931295

1296+
- c、openClawJobHandler: OpenClaw任务,支持自定义prompt、input等输入信息。示例任务入参如下:
1297+
```
1298+
{
1299+
"input": "{输入信息,必填信息}",
1300+
"prompt": "{模型prompt,可选信息}"
1301+
}
1302+
```
1303+
12941304
- 依赖1:参考 [Ollama本地化部署大模型](https://www.xuxueli.com/blog/?blog=./notebook/13-AI/%E4%BD%BF%E7%94%A8Ollama%E6%9C%AC%E5%9C%B0%E5%8C%96%E9%83%A8%E7%BD%B2DeepSeek.md) ,执行器示例部署“qwen2.5:1.5b”模型,也可自定选择其他模型版本。
12951305
- 依赖2:参考 [使用DeepSeek与Dify搭建AI助手](https://www.xuxueli.com/blog/?blog=./notebook/13-AI/%E4%BD%BF%E7%94%A8DeepSeek%E4%B8%8EDify%E6%90%AD%E5%BB%BAAI%E5%8A%A9%E6%89%8B.md),执行器示例新建Dify DifyWork应用,并在开始节点添加“input”参数,可结合实际情况调整。
12961306
- 依赖3:启动示例 “AI执行器” 相关配置文件说明如下:
@@ -2788,15 +2798,16 @@ public void execute() {
27882798
- 10、【优化】统一项目依赖管理结构,依赖版本统一到父级pom提升可维护性;
27892799
27902800
### 7.44 版本 v3.4.0 Release Notes[ING]
2791-
- 1、【新增】调度性能提升:任务触发后分批批量更新,高频调度场景可百倍降低SQL操作合并执行,提升调度性能;
2801+
- 1、【新增】AI执行器集成OpenClaw: 新增“openClawJobHandler”内置AI任务,与OpenClaw集成打通,支持快速开发AI类任务。
2802+
- 2、【新增】调度批次写聚合提升调度性能:任务触发后分批批量更新,高频调度场景可百倍降低SQL操作合并执行,提升调度性能;
27922803
(任务触发后批量更新配置“xxl.job.schedule.batchsize”)
2793-
- 2、【调整】固定频率调度策略调整,修复小概率下触发时间偏差问题;
2794-
- 3、【调整】Docker基础镜像调整为eclipse-temurin;
2795-
- 4、【优化】父POM依赖配置优化,移除容易配置;合并PR-3926;
2796-
- 5、【升级】升级多项maven依赖至较新版本;
2804+
- 3、【调整】固定频率调度策略调整,修复小概率下触发时间偏差问题;
2805+
- 4、【调整】Docker基础镜像调整为eclipse-temurin;
2806+
- 5、【优化】父POM依赖配置优化,移除容易配置;合并PR-3926;
27972807
- 6、【优化】调度日志优化:支持执行器维度查看调度日志;新增调度日志索引,提升查询效率;
27982808
(数据库新增索引脚本:``` create index I_jobgroup on xxl_job_log (job_group); ``` )
2799-
- 7、【TODO】调度中心OpenAPI完善,提供任务管理能力;封装Agent Skill并推送ClawHub;
2809+
- 7、【升级】升级多项maven依赖至较新版本;
2810+
- 8、【TODO】调度中心OpenAPI完善,提供任务管理能力;封装Agent Skill并推送ClawHub;
28002811
28012812
28022813
### TODO LIST

doc/db/tables_xxl_job.sql

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ INSERT INTO `xxl_job_info`(`id`, `job_group`, `job_desc`, `add_time`, `update_ti
157157
VALUES (1, 1, '示例任务01', now(), now(), 'XXL', '', 'CRON', '0 0 0 * * ? *',
158158
'DO_NOTHING', 'FIRST', 'demoJobHandler', '', 'SERIAL_EXECUTION', 0, 0, 'BEAN', '', 'GLUE代码初始化',
159159
now(), ''),
160-
(2, 2, 'Ollama示例任务01', now(), now(), 'XXL', '', 'NONE', '',
160+
(2, 2, 'Ollama示例任务', now(), now(), 'XXL', '', 'NONE', '',
161161
'DO_NOTHING', 'FIRST', 'ollamaJobHandler', '{
162162
"input": "慢SQL问题分析思路",
163163
"prompt": "你是一个研发工程师,擅长解决技术类问题。",
@@ -172,6 +172,12 @@ VALUES (1, 1, '示例任务01', now(), now(), 'XXL', '', 'CRON', '0 0 0 * * ? *'
172172
"user": "xxl-job",
173173
"baseUrl": "http://localhost/v1",
174174
"apiKey": "app-OUVgNUOQRIMokfmuJvBJoUTN"
175+
}', 'SERIAL_EXECUTION', 0, 0, 'BEAN', '', 'GLUE代码初始化',
176+
now(), ''),
177+
(4, 2, 'OpenClaw示例任务', now(), now(), 'XXL', '', 'NONE', '',
178+
'DO_NOTHING', 'FIRST', 'difyWorkflowJobHandler', '{
179+
"input": "查看下上海今天得天气,给出出游建议",
180+
"prompt": "你是一个出游助手,擅长做旅游规划"
175181
}', 'SERIAL_EXECUTION', 0, 0, 'BEAN', '', 'GLUE代码初始化',
176182
now(), '');
177183

pom.xml

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -161,12 +161,17 @@
161161
<version>${xxl-sso.version}</version>
162162
</dependency>
163163

164-
<!-- spring-ai -->
164+
<!-- spring-ai: ollama、openai -->
165165
<dependency>
166166
<groupId>org.springframework.ai</groupId>
167167
<artifactId>spring-ai-starter-model-ollama</artifactId>
168168
<version>${spring-ai.version}</version>
169169
</dependency>
170+
<dependency>
171+
<groupId>org.springframework.ai</groupId>
172+
<artifactId>spring-ai-starter-model-openai</artifactId>
173+
<version>${spring-ai.version}</version>
174+
</dependency>
170175
<!-- dify -->
171176
<dependency>
172177
<groupId>io.github.imfangs</groupId>

xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/pom.xml

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,11 +37,16 @@
3737
<artifactId>xxl-job-core</artifactId>
3838
</dependency>
3939

40-
<!-- spring-ai -->
40+
<!-- spring-ai: ollama -->
4141
<dependency>
4242
<groupId>org.springframework.ai</groupId>
4343
<artifactId>spring-ai-starter-model-ollama</artifactId>
4444
</dependency>
45+
<!-- spring-ai: openai -->
46+
<dependency>
47+
<groupId>org.springframework.ai</groupId>
48+
<artifactId>spring-ai-starter-model-openai</artifactId>
49+
</dependency>
4550
<!-- dify -->
4651
<dependency>
4752
<groupId>io.github.imfangs</groupId>

xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/jobhandler/AIXxlJob.java

Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import org.springframework.ai.chat.memory.MessageWindowChatMemory;
1616
import org.springframework.ai.ollama.OllamaChatModel;
1717
import org.springframework.ai.ollama.api.OllamaChatOptions;
18+
import org.springframework.ai.openai.OpenAiChatModel;
1819
import org.springframework.stereotype.Component;
1920

2021
import java.util.HashMap;
@@ -32,6 +33,8 @@ public class AIXxlJob {
3233

3334
@Resource
3435
private OllamaChatModel ollamaChatModel;
36+
@Resource
37+
private OpenAiChatModel openAiChatModel;
3538

3639
/**
3740
* 1、ollama Chat任务
@@ -252,4 +255,91 @@ public void setApiKey(String apiKey) {
252255

253256
}
254257

258+
// --------------------------------- openclaw ---------------------------------
259+
260+
/**
261+
* 3、openclaw 任务
262+
*
263+
* 参数示例:格式见 OpenclawParam
264+
* <pre>
265+
* {
266+
* "input": "{输入信息,必填信息}",
267+
* "prompt": "{模型prompt,可选信息}"
268+
* }
269+
* </pre>
270+
*/
271+
@XxlJob("openClawJobHandler")
272+
public void openClawJobHandler() {
273+
274+
// param
275+
String param = XxlJobHelper.getJobParam();
276+
if (param == null || param.trim().isEmpty()) {
277+
XxlJobHelper.log("param is empty.");
278+
279+
XxlJobHelper.handleFail();
280+
return;
281+
}
282+
283+
// openclaw param
284+
OpenClawParam openClawParam = null;
285+
try {
286+
openClawParam = GsonTool.fromJson(param, OpenClawParam.class);
287+
if (openClawParam.getPrompt()==null || openClawParam.getPrompt().isBlank()) {
288+
openClawParam.setPrompt("你是一个出游助手,擅长做旅游规划");
289+
}
290+
if (openClawParam.getInput() == null || openClawParam.getInput().isBlank()) {
291+
XxlJobHelper.log("input is empty.");
292+
293+
XxlJobHelper.handleFail();
294+
return;
295+
}
296+
} catch (Exception e) {
297+
XxlJobHelper.log(new RuntimeException("OpenclawParam parse error", e));
298+
XxlJobHelper.handleFail();
299+
return;
300+
}
301+
302+
// input
303+
XxlJobHelper.log("<br><br><b>【Input】: " + openClawParam.getInput()+ "</b><br><br>");
304+
305+
// build chat-client
306+
ChatClient openclawChatClient = ChatClient
307+
.builder(openAiChatModel)
308+
.defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
309+
.defaultAdvisors(SimpleLoggerAdvisor.builder().build())
310+
.build();
311+
312+
// call opencalw
313+
String response = openclawChatClient
314+
.prompt(openClawParam.getPrompt())
315+
.user(openClawParam.getInput())
316+
.call()
317+
.content();
318+
319+
XxlJobHelper.log("<br><br><b>【Output】: " + response + "</b><br><br>");
320+
321+
}
322+
323+
private static class OpenClawParam {
324+
private String input;
325+
private String prompt;
326+
327+
public String getInput() {
328+
return input;
329+
}
330+
331+
public void setInput(String input) {
332+
this.input = input;
333+
}
334+
335+
public String getPrompt() {
336+
return prompt;
337+
}
338+
339+
public void setPrompt(String prompt) {
340+
this.prompt = prompt;
341+
}
342+
}
343+
344+
255345
}

xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/resources/application.properties

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,9 @@ xxl.job.executor.excludedpackage=
3535

3636

3737
### ollama
38-
spring.ai.model.chat=ollama
39-
### ollama url
4038
spring.ai.ollama.base-url=http://localhost:11434
39+
40+
### openai (for openclaw)
41+
spring.ai.openai.base-url=http://127.0.0.1:18789
42+
spring.ai.openai.api-key=xxxxxx
43+
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
package com.xxl.job.executor.test.openclaw;
2+
3+
import jakarta.annotation.Resource;
4+
import org.junit.jupiter.api.Test;
5+
import org.slf4j.Logger;
6+
import org.slf4j.LoggerFactory;
7+
import org.springframework.ai.chat.client.ChatClient;
8+
import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
9+
import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
10+
import org.springframework.ai.chat.memory.MessageWindowChatMemory;
11+
import org.springframework.ai.openai.OpenAiChatModel;
12+
import org.springframework.boot.test.context.SpringBootTest;
13+
import reactor.core.publisher.Flux;
14+
15+
import java.util.Map;
16+
import java.util.concurrent.TimeUnit;
17+
18+
@SpringBootTest
19+
public class OpenClawTest {
20+
private static final Logger logger = LoggerFactory.getLogger(OpenClawTest.class);
21+
22+
@Resource
23+
private OpenAiChatModel openAiChatModel;
24+
25+
/*ChatModel chatModel = OpenAiChatModel
26+
.builder()
27+
.openAiApi(OpenAiApi
28+
.builder()
29+
.baseUrl(baseUrl)
30+
.apiKey( token)
31+
.webClientBuilder(WebClient.builder().clientConnector(
32+
new ReactorClientHttpConnector(
33+
HttpClient.create()
34+
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, 30 * 1000)
35+
.responseTimeout(Duration.ofMillis(30 * 1000))
36+
)
37+
))
38+
.build())
39+
.build();*/
40+
41+
@Test
42+
public void test() throws Exception {
43+
44+
String prompt = "你是一个出游助手,擅长做旅游规划";
45+
String input = "查看下上海今天得天气,给出出游建议";
46+
47+
// ChatClient
48+
ChatClient chatClient = ChatClient
49+
.builder(openAiChatModel)
50+
.defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
51+
.defaultAdvisors(SimpleLoggerAdvisor.builder().build())
52+
.build();
53+
54+
// Call LLM: 同步输出
55+
String response = chatClient
56+
.prompt(prompt)
57+
.user(input)
58+
.call()
59+
.content();
60+
61+
logger.info("Input: {}", input);
62+
logger.info("Output: {}", response);
63+
}
64+
65+
@Test
66+
public void test2() throws Exception {
67+
68+
String prompt = "你是一个出游助手,擅长做旅游规划";
69+
String input = "查看下上海今天得天气,给出出游建议";
70+
71+
// ChatClient
72+
ChatClient chatClient = ChatClient
73+
.builder(openAiChatModel)
74+
.defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
75+
.defaultAdvisors(SimpleLoggerAdvisor.builder().build())
76+
.build();
77+
78+
// Call LLM: 流式输出
79+
Flux<String> flux = chatClient
80+
.prompt(prompt)
81+
.user(input)
82+
.user(user -> user.text(input).params(Map.of("stream", true)))
83+
.stream()
84+
.content();
85+
86+
flux.subscribe(
87+
data -> System.out.println("Received: " + data), // onNext 处理
88+
error -> System.err.println("Error: " + error), // onError 处理
89+
() -> System.out.println("Completed") // onComplete 处理
90+
);
91+
92+
TimeUnit.SECONDS.sleep(30);
93+
}
94+
95+
}

0 commit comments

Comments
 (0)