---初始化项目

This commit is contained in:
2025-09-19 16:14:08 +08:00
parent 902d3d7e3b
commit afee7c03ac
767 changed files with 75809 additions and 82 deletions

View File

@ -0,0 +1,18 @@
# 基础镜像(支持 amd64 & arm64based on Ubuntu 18.04.4 LTS
FROM adoptopenjdk:8-jdk-hotspot
# 维护者
MAINTAINER dudiao(idudiao@163.com)
# 设置时区
ENV TZ=Asia/Shanghai
# 设置其他环境变量
ENV APP_NAME=powerjob-worker-samples
# 传递 SpringBoot 启动参数 和 JVM参数
ENV PARAMS=""
ENV JVMOPTIONS=""
# 将应用 jar 包拷入 docker
COPY target/powerjob-worker-samples-*.jar /powerjob-worker-samples.jar
# 暴露端口HTTP + AKKA
EXPOSE 8081 27777
# 启动应用
ENTRYPOINT ["sh","-c","java $JVMOPTIONS -jar /powerjob-worker-samples.jar $PARAMS"]

View File

@ -0,0 +1,108 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>5.1.2</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-worker-samples</artifactId>
<name>powerjob-worker-samples</name>
<version>5.1.2</version>
<properties>
<springboot.version>2.7.18</springboot.version>
<powerjob.worker.starter.version>5.1.2</powerjob.worker.starter.version>
<fastjson.version>1.2.83</fastjson.version>
<powerjob.official.processors.version>5.1.2</powerjob.official.processors.version>
<!-- 部署时跳过该module -->
<maven.deploy.skip>true</maven.deploy.skip>
<powerjob-client.version>5.1.2</powerjob-client.version>
</properties>
<dependencies>
<!-- SpringBoot -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<version>${springboot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
<version>${springboot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<version>${springboot.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-worker-spring-boot-starter</artifactId>
<version>${powerjob.worker.starter.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.alibaba/fastjson -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>${fastjson.version}</version>
</dependency>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-official-processors</artifactId>
<version>${powerjob.official.processors.version}</version>
</dependency>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-client</artifactId>
<version>${powerjob-client.version}</version>
</dependency>
<!-- 高版本 JDK 移除了 JavaEE 的包,需要手动引入 -->
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>2.3.1</version>
</dependency>
</dependencies>
<!-- SpringBoot专用的打包插件 -->
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>${springboot.version}</version>
<configuration>
<mainClass>tech.powerjob.samples.SampleApplication</mainClass>
</configuration>
<executions>
<execution>
<goals>
<goal>repackage</goal><!--可以把依赖的包都打包到生成的Jar包中-->
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,18 @@
package tech.powerjob.samples;
import org.springframework.stereotype.Service;
/**
* 神秘服务
*
* @author tjq
* @since 2020/4/18
*/
@Service
public class MysteryService {
public String hasaki() {
return "面对疾风吧~";
}
}

View File

@ -0,0 +1,44 @@
package tech.powerjob.samples;
import org.springframework.context.annotation.Configuration;
/**
* powerjob-worker 配置
* 代码配置示例SpringBoot 项目支持使用 starter只需要在 application.properties 中完成配置即可
*
* @author tjq
* @since 2020/4/17
*/
@Configuration
public class PowerJobWorkerInitializer {
/*
手动配置版代码
常规 SpringBoot 用户直接使用 starter 配置即可,具体配置见 application.properties
@Bean
public PowerJobSpringWorker initPowerJobSpringWorkerByCode() {
// 初始化 PowerJob 配置文件
PowerJobWorkerConfig config = new PowerJobWorkerConfig();
// 传输协议,新用户建议直接上 HTTP
config.setProtocol(Protocol.HTTP);
// 传输层端口号
config.setPort(28888);
// worker 的归组,建议使用项目名称
config.setAppName("powerjob-multi-worker-2");
// server 的服务发现地址支持多IP 或 HTTP 域名
config.setServerAddress(Lists.newArrayList("127.0.0.1:7700", "127.0.0.1:7701"));
// 如果没有大型 Map/MapReduce 的需求,建议使用内存来加速计算
config.setStoreStrategy(StoreStrategy.DISK);
// 执行器的自定义标签,可用于指定部分执行器运行。举例:多单元机房将 TAG 设置为单元名称,即可在控制台指定单元运行
config.setTag("CENTER");
// 以上为核心配置,其他配置可直接参考注释 or 官方文档
// 注意 Spring 用户请使用 PowerJobSpringWorker 而不是 PowerJobWorker后者无法使用 Spring 管理的 Bean 作为执行器
return new PowerJobSpringWorker(config);
}
*/
}

View File

@ -0,0 +1,19 @@
package tech.powerjob.samples;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.scheduling.annotation.EnableScheduling;
/**
* 主类
*
* @author tjq
* @since 2020/4/17
*/
@EnableScheduling
@SpringBootApplication
public class SampleApplication {
public static void main(String[] args) {
SpringApplication.run(SampleApplication.class, args);
}
}

View File

@ -0,0 +1,16 @@
package tech.powerjob.samples.anno;
import java.lang.annotation.*;
/**
* 自定义方法注解
* <a href="https://github.com/PowerJob/PowerJob/issues/770">自定义注解导致 @PowerJobHandler 失效</a>
*
* @author tjq
* @since 2024/2/8
*/
@Target({ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Inherited
public @interface ATestMethodAnnotation {
}

View File

@ -0,0 +1,55 @@
package tech.powerjob.samples.config;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.h2.Driver;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import tech.powerjob.common.utils.CommonUtils;
import tech.powerjob.official.processors.impl.sql.SpringDatasourceSqlProcessor;
import tech.powerjob.worker.PowerJobSpringWorker;
import javax.sql.DataSource;
/**
* @author Echo009
* @since 2021/3/10
*/
@Configuration
@ConditionalOnBean(PowerJobSpringWorker.class)
public class SqlProcessorConfiguration {
@Bean
@DependsOn({"initPowerJob"})
public DataSource sqlProcessorDataSource() {
String path = System.getProperty("user.home") + "/test/h2/" + CommonUtils.genUUID() + "/";
String jdbcUrl = String.format("jdbc:h2:file:%spowerjob_sql_processor_db;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false", path);
HikariConfig config = new HikariConfig();
config.setDriverClassName(Driver.class.getName());
config.setJdbcUrl(jdbcUrl);
config.setAutoCommit(true);
// 池中最小空闲连接数量
config.setMinimumIdle(1);
// 池中最大连接数量
config.setMaximumPoolSize(10);
return new HikariDataSource(config);
}
@Bean
public SpringDatasourceSqlProcessor simpleSpringSqlProcessor(@Qualifier("sqlProcessorDataSource") DataSource dataSource) {
SpringDatasourceSqlProcessor springDatasourceSqlProcessor = new SpringDatasourceSqlProcessor(dataSource);
// do nothing
springDatasourceSqlProcessor.registerSqlValidator("fakeSqlValidator", sql -> true);
// 排除掉包含 drop 的 SQL
springDatasourceSqlProcessor.registerSqlValidator("interceptDropValidator", sql -> sql.matches("^(?i)((?!drop).)*$"));
// do nothing
springDatasourceSqlProcessor.setSqlParser((sql, taskContext) -> sql);
return springDatasourceSqlProcessor;
}
}

View File

@ -0,0 +1,80 @@
package tech.powerjob.samples.mr;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.TaskResult;
import tech.powerjob.worker.core.processor.sdk.MapReduceProcessor;
import com.google.common.collect.Lists;
import java.util.List;
/**
* 模拟 DAG 的处理器在正式提供DAG支持前可用该方法代替
*
* ROOT -> A -> B -> REDUCE
* -> C
*
* @author tjq
* @since 2020/5/15
*/
public class DAGSimulationProcessor implements MapReduceProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
if (isRootTask()) {
// L1. 执行根任务
// 执行完毕后产生子任务 A需要传递的参数可以作为 TaskA 的属性进行传递
TaskA taskA = new TaskA();
try {
map(Lists.newArrayList(taskA), "LEVEL1_TASK_A");
return new ProcessResult(true, "map success");
} catch (Exception e) {
return new ProcessResult(false, "map failed");
}
}
if (context.getSubTask() instanceof TaskA) {
// L2. 执行A任务
// 执行完成后产生子任务 BC并行执行
TaskB taskB = new TaskB();
TaskC taskC = new TaskC();
try {
map(Lists.newArrayList(taskB, taskC), "LEVEL2_TASK_BC");
return new ProcessResult(true, "map success");
} catch (Exception ignore) {
}
}
if (context.getSubTask() instanceof TaskB) {
// L3. 执行B任务
return new ProcessResult(true, "xxx");
}
if (context.getSubTask() instanceof TaskC) {
// L3. 执行C任务
return new ProcessResult(true, "xxx");
}
return new ProcessResult(false, "UNKNOWN_TYPE_OF_SUB_TASK");
}
@Override
public ProcessResult reduce(TaskContext context, List<TaskResult> taskResults) {
// L4. 执行最终 Reduce 任务taskResults保存了之前所有任务的结果
taskResults.forEach(taskResult -> {
// do something...
});
return new ProcessResult(true, "reduce success");
}
private static class TaskA {
}
private static class TaskB {
}
private static class TaskC {
}
}

View File

@ -0,0 +1,67 @@
package tech.powerjob.samples.mr;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.TaskResult;
import tech.powerjob.worker.core.processor.sdk.MapReduceProcessor;
import tech.powerjob.worker.log.OmsLogger;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Map;
/**
* MapReduce 模拟 静态分片
* 典型的杀鸡焉用牛刀~
*
* @author tjq
* @since 2020/5/21
*/
@Component
public class StaticSliceProcessor implements MapReduceProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
OmsLogger omsLogger = context.getOmsLogger();
// root task 负责分发任务
if (isRootTask()) {
// 从控制台传递分片参数假设格式为KV1=a&2=b&3=c
String jobParams = context.getJobParams();
Map<String, String> paramsMap = Splitter.on("&").withKeyValueSeparator("=").split(jobParams);
List<SubTask> subTasks = Lists.newLinkedList();
paramsMap.forEach((k, v) -> subTasks.add(new SubTask(Integer.parseInt(k), v)));
map(subTasks, "SLICE_TASK");
return new ProcessResult(true, "map successfully");
}
Object subTask = context.getSubTask();
if (subTask instanceof SubTask) {
// 实际处理
// 当然,如果觉得 subTask 还是很大,也可以继续分发哦
return new ProcessResult(true, "subTask:" + ((SubTask) subTask).getIndex() + " process successfully");
}
return new ProcessResult(false, "UNKNOWN BUG");
}
@Override
public ProcessResult reduce(TaskContext context, List<TaskResult> taskResults) {
// 按需求做一些统计工作... 不需要的话,直接使用 Map 处理器即可
return new ProcessResult(true, "xxxx");
}
@Getter
@NoArgsConstructor
@AllArgsConstructor
private static class SubTask {
private int index;
private String params;
}
}

View File

@ -0,0 +1,56 @@
package tech.powerjob.samples.processors;
import tech.powerjob.common.utils.NetUtils;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.TaskResult;
import tech.powerjob.worker.core.processor.sdk.BroadcastProcessor;
import tech.powerjob.worker.log.OmsLogger;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* 广播处理器 示例
*
* @author tjq
* @since 2020/4/17
*/
@Slf4j
@Component
public class BroadcastProcessorDemo implements BroadcastProcessor {
@Override
public ProcessResult preProcess(TaskContext context) {
System.out.println("===== BroadcastProcessorDemo#preProcess ======");
context.getOmsLogger().info("BroadcastProcessorDemo#preProcess, current host: {}", NetUtils.getLocalHost4Test());
if ("rootFailed".equals(context.getJobParams())) {
return new ProcessResult(false, "console need failed");
} else {
return new ProcessResult(true);
}
}
@Override
public ProcessResult process(TaskContext taskContext) throws Exception {
OmsLogger logger = taskContext.getOmsLogger();
System.out.println("===== BroadcastProcessorDemo#process ======");
logger.info("BroadcastProcessorDemo#process, current host: {}", NetUtils.getLocalHost4Test());
long sleepTime = 1000;
try {
sleepTime = Long.parseLong(taskContext.getJobParams());
} catch (Exception e) {
logger.warn("[BroadcastProcessor] parse sleep time failed!", e);
}
Thread.sleep(Math.max(sleepTime, 1000));
return new ProcessResult(true);
}
@Override
public ProcessResult postProcess(TaskContext context, List<TaskResult> taskResults) {
System.out.println("===== BroadcastProcessorDemo#postProcess ======");
context.getOmsLogger().info("BroadcastProcessorDemo#postProcess, current host: {}, taskResult: {}", NetUtils.getLocalHost4Test(), taskResults);
return new ProcessResult(true, "success");
}
}

View File

@ -0,0 +1,98 @@
package tech.powerjob.samples.processors;
import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.samples.MysteryService;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.MapProcessor;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
/**
* Map处理器 示例
*
* @author tjq
* @since 2020/4/18
*/
@Component
public class MapProcessorDemo implements MapProcessor {
@Resource
private MysteryService mysteryService;
/**
* 每一批发送任务大小
*/
private static final int BATCH_SIZE = 100;
/**
* 发送的批次
*/
private static final int BATCH_NUM = 5;
@Override
public ProcessResult process(TaskContext context) throws Exception {
log.info("============== MapProcessorDemo#process ==============");
log.info("isRootTask:{}", isRootTask());
log.info("taskContext:{}", JsonUtils.toJSONString(context));
log.info("{}", mysteryService.hasaki());
if (isRootTask()) {
log.info("==== MAP ====");
List<SubTask> subTasks = Lists.newLinkedList();
for (int j = 0; j < BATCH_NUM; j++) {
SubTask subTask = new SubTask();
subTask.siteId = j;
subTask.itemIds = Lists.newLinkedList();
subTasks.add(subTask);
for (int i = 0; i < BATCH_SIZE; i++) {
subTask.itemIds.add(i + j * 100);
}
}
map(subTasks, "MAP_TEST_TASK");
return new ProcessResult(true, "map successfully");
} else {
log.info("==== PROCESS ====");
SubTask subTask = (SubTask) context.getSubTask();
for (Integer itemId : subTask.getItemIds()) {
if (Thread.interrupted()) {
// 任务被中断
log.info("job has been stop! so stop to process subTask: {} => {}", subTask.getSiteId(), itemId);
break;
}
log.info("processing subTask: {} => {}", subTask.getSiteId(), itemId);
int max = Integer.MAX_VALUE >> 7;
for (int i = 0; ; i++) {
// 模拟耗时操作
if (i > max) {
break;
}
}
}
// 测试在 Map 任务中追加上下文
context.getWorkflowContext().appendData2WfContext("Yasuo", "A sword's poor company for a long road.");
boolean b = ThreadLocalRandom.current().nextBoolean();
if (context.getCurrentRetryTimes() >= 1) {
// 重试的话一定会成功
b = true;
}
return new ProcessResult(b, "RESULT:" + b);
}
}
@Getter
@NoArgsConstructor
@AllArgsConstructor
public static class SubTask {
private Integer siteId;
private List<Integer> itemIds;
}
}

View File

@ -0,0 +1,190 @@
package tech.powerjob.samples.processors;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.common.utils.MapUtils;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.TaskResult;
import tech.powerjob.worker.core.processor.sdk.MapReduceProcessor;
import tech.powerjob.worker.log.OmsLogger;
import java.io.Serializable;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicLong;
/**
* MapReduce 处理器示例
* 控制台参数:{"batchSize": 100, "batchNum": 2}
*
* @author tjq
* @since 2020/4/17
*/
@Slf4j
@Component("demoMapReduceProcessor")
public class MapReduceProcessorDemo implements MapReduceProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
// PowerJob 提供的日志 API可支持在控制台指定多种日志模式在线查看 / 本地打印)。最佳实践:全部使用 OmsLogger 打印日志,开发阶段控制台配置为 在线日志方便开发;上线后调整为本地日志,与直接使用 SLF4J 无异
OmsLogger omsLogger = context.getOmsLogger();
// 是否为根任务,一般根任务进行任务的分发
boolean isRootTask = isRootTask();
// Task 名称,除了 MAP 任务其他 taskName 均由开发者自己创建,某种意义上也可以按参数理解(比如多层 MAP 的情况下taskName 可以命名为Map_Level1, Map_Level2最终按 taskName 判断层级进不同的执行分支)
String taskName = context.getTaskName();
// 任务参数,控制台任务配置中直接填写的参数
String jobParamsStr = context.getJobParams();
// 任务示例参数,运行任务时手动填写的参数(等同于 OpenAPI runJob 的携带的参数)
String instanceParamsStr = context.getInstanceParams();
omsLogger.info("[MapReduceDemo] [startExecuteNewTask] jobId:{}, instanceId:{}, taskId:{}, taskName: {}, RetryTimes: {}, isRootTask:{}, jobParams:{}, instanceParams:{}", context.getJobId(), context.getInstanceId(), context.getTaskId(), taskName, context.getCurrentRetryTimes(), isRootTask, jobParamsStr, instanceParamsStr);
// 常见写法,优先从 InstanceParams 获取参数,取不到再从 JobParams 中获取,灵活性最佳(相当于实现了实例参数重载任务参数)
String finalParams = StringUtils.isEmpty(instanceParamsStr) ? jobParamsStr : instanceParamsStr;
final JSONObject params = Optional.ofNullable(finalParams).map(JSONObject::parseObject).orElse(new JSONObject());
if (isRootTask) {
omsLogger.info("[MapReduceDemo] [RootTask] start execute root task~");
/*
* rootTask 内的核心逻辑,即为按自己的业务需求拆分子任务。比如
* - 从数据库/数仓拉一批任务出来做计算,那 MAP 任务就可以 stream 读全库,每 N 个 ID 作为一个 SubTask 对外分发
* - 需要读取几千万个文件进行解析,那么 MAP 任务就可以将 N 个文件名作为一个 SubTask 对外分发,每个子任务接收到文件名称进行文件处理
*
* eg. 现在需要从文件中读取100W个ID并处理数据库中这些ID对应的数据那么步骤如下
* 1. 根任务RootTask读取文件流式拉取100W个ID并按100个一批的大小组装成子任务进行派发
* 2. 非根任务获取子任务,完成业务逻辑的处理
*
* 以下 demo 进行该逻辑的模拟
*/
// 构造子任务
// 需要读取的文件总数
Long num = MapUtils.getLong(params, "num", 100000L);
// 每个子任务携带多少个文件ID此参数越大每个子任务就“越大”如果失败的重试成本就越高。参数越小每个子任务就越轻当相应的分片数量会提升会让 PowerJob 计算开销增大,建议按业务需求合理调配)
Long batchSize = MapUtils.getLong(params, "batchSize", 100L);
// 此处模拟从文件读取 num 个 ID每个子任务携带 batchSize 个 ID 作为一个分片
List<Long> ids = Lists.newArrayList();
for (long i = 0; i < num; i++) {
ids.add(i);
if (ids.size() >= batchSize) {
// 构造自己的子任务,自行传递所有需要的参数
SubTask subTask = new SubTask(ThreadLocalRandom.current().nextLong(), Lists.newArrayList(ids), "extra");
ids.clear();
try {
/*
第一个参数List<子任务>map 支持批量操作以减少网络 IO 提升性能,简单起见此处不再示例,开发者可自行优化性能
第二个参数:子任务名称,即后续 Task 执行时从 TaskContext#taskName 拿到的值。某种意义上也可以按参数理解(比如多层 MAP 的情况下taskName 可以命名为Map_Level1, Map_Level2最终按 taskName 判断层级进不同的执行分支)
*/
map(Lists.newArrayList(subTask), "L1_FILE_PROCESS");
} catch (Exception e) {
// 注意 MAP 操作可能抛出异常,建议进行捕获并按需处理
omsLogger.error("[MapReduceDemo] map task failed!", e);
throw e;
}
}
}
if (!ids.isEmpty()) {
map(Lists.newArrayList(new SubTask()), "L1_FILE_PROCESS");
}
// map 阶段的结果,由于前置逻辑为异常直接抛出,执行到这里一定成功,所以无脑设置为 success。开发者可自行调整逻辑
return new ProcessResult(true, "MAP_SUCCESS,totalNum:" + num);
}
// 如果是简单的二层结构ROOT - SubTASK此处一定是子 Task无需再次判断。否则可使用 TaskContext#taskName 字符串匹配 或 TaskContext#SubTask 对象内自定义参数匹配,进入目标执行分支
// 获取前置节点 map 传递过来的参数,进行业务处理
SubTask subTask = (SubTask) context.getSubTask();
log.info("[MapReduceDemo] [SubTask] taskId:{}, taskName: {}, subTask: {}", context.getTaskId(), taskName, JsonUtils.toJSONString(subTask));
Thread.sleep(MapUtils.getLong(params, "bizProcessCost", 233L));
// 模拟有成功有失败的情况,开发者按真实业务执行情况判断即可
long successRate = MapUtils.getLong(params, "successRate", 80L);
long randomNum = ThreadLocalRandom.current().nextLong(100);
if (successRate > randomNum) {
return new ProcessResult(true, "PROCESS_SUCCESS:" + randomNum);
} else {
return new ProcessResult(false, "PROCESS_FAILED:" + randomNum);
}
}
@Override
public ProcessResult reduce(TaskContext context, List<TaskResult> taskResults) {
// 子任务结果太大,上报在线日志会有 IO 问题,直接使用本地日志打
log.info("List<TaskResult>: {}", JSONObject.toJSONString(taskResults));
OmsLogger omsLogger = context.getOmsLogger();
omsLogger.info("================ MapReduceProcessorDemo#reduce ================");
// 所有 Task 执行结束后reduce 将会被执行taskResults 保存了所有子任务的执行结果。(注意 reduce 由于保存了所有子任务的执行结果,在子任务规模巨大时对内存有极大开销,超大型计算任务慎用或使用流式 reduce开发中
// 用法举例:统计执行结果
AtomicLong successCnt = new AtomicLong(0);
AtomicLong failedCnt = new AtomicLong(0);
taskResults.forEach(tr -> {
if (tr.isSuccess()) {
successCnt.incrementAndGet();
} else {
failedCnt.incrementAndGet();
}
});
double successRate = 1.0 * successCnt.get() / (successCnt.get() + failedCnt.get());
String resultMsg = String.format("succeedTaskNum:%d,failedTaskNum:%d,successRate:%f", successCnt.get(), failedCnt.get(), successRate);
omsLogger.info("[MapReduceDemo] [Reduce] {}", resultMsg);
// reduce 阶段的结果,将作为任务真正执行结果
if (successRate > 0.8) {
return new ProcessResult(true, resultMsg);
} else {
return new ProcessResult(false, resultMsg);
}
}
/**
* 自定义的子任务,按自己的业务需求定义即可
* 注意:代表子任务参数的类:一定要有无参构造方法!一定要有无参构造方法!一定要有无参构造方法!
* 最好把 GET / SET 方法也加上,减少序列化问题的概率
*/
@Data
@AllArgsConstructor
public static class SubTask implements Serializable {
/**
* 再次强调,一定要有无参构造方法
*/
public SubTask() {
}
private Long siteId;
private List<Long> idList;
private String extra;
}
}

View File

@ -0,0 +1,34 @@
package tech.powerjob.samples.processors;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
import tech.powerjob.worker.log.OmsLogger;
import java.util.Optional;
/**
* @author Echo009
* @since 2022/4/27
*/
public class SimpleProcessor implements BasicProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
OmsLogger logger = context.getOmsLogger();
String jobParams = Optional.ofNullable(context.getJobParams()).orElse("S");
logger.info("Current context:{}", context.getWorkflowContext());
logger.info("Current job params:{}", jobParams);
logger.info("META: {}", context.getInstanceMeta());
// 测试中文问题 #581
if (jobParams.contains("CN")) {
return new ProcessResult(true, "任务成功啦!!!");
}
return jobParams.contains("F") ? new ProcessResult(false) : new ProcessResult(true, "yeah!");
}
}

View File

@ -0,0 +1,51 @@
package tech.powerjob.samples.processors;
import org.apache.commons.lang3.StringUtils;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
import tech.powerjob.worker.log.OmsLogger;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.Collections;
/**
* 单机处理器 示例
*
* @author tjq
* @since 2020/4/17
*/
@Slf4j
@Component("testBaseProcessor")
public class StandaloneProcessorDemo implements BasicProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
OmsLogger omsLogger = context.getOmsLogger();
omsLogger.info("StandaloneProcessorDemo start process,context is {}.", context);
omsLogger.info("Notice! If you want this job process failed, your jobParams need to be 'failed'");
omsLogger.info("Let's test the exception~");
// 测试异常日志
try {
Collections.emptyList().add("277");
} catch (Exception e) {
omsLogger.error("oh~it seems that we have an exception~", e);
}
log.info("================ StandaloneProcessorDemo#process ================");
log.info("jobParam:{}", context.getJobParams());
log.info("instanceParams:{}", context.getInstanceParams());
String param;
// 解析参数,非处于工作流中时,优先取实例参数(允许动态[instanceParams]覆盖静态参数[jobParams]
if (context.getWorkflowContext() == null) {
param = StringUtils.isBlank(context.getInstanceParams()) ? context.getJobParams() : context.getInstanceParams();
} else {
param = context.getJobParams();
}
// 根据参数判断是否成功
boolean success = !"failed".equals(param);
omsLogger.info("StandaloneProcessorDemo finished process,success: {}", success);
omsLogger.info("anyway, we finished the job successfully~Congratulations!");
return new ProcessResult(success, context + ": " + success);
}
}

View File

@ -0,0 +1,25 @@
package tech.powerjob.samples.processors;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
import org.springframework.stereotype.Component;
/**
* 测试超时任务(可中断)
*
* @author tjq
* @since 2020/4/20
*/
@Component
@Slf4j
public class TimeoutProcessor implements BasicProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
long sleepTime = Long.parseLong(context.getJobParams());
log.info("TaskInstance({}) will sleep {} ms", context.getInstanceId(), sleepTime);
Thread.sleep(Long.parseLong(context.getJobParams()));
return new ProcessResult(true, "impossible~~~~QAQ~");
}
}

View File

@ -0,0 +1,46 @@
package tech.powerjob.samples.processors.test;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import tech.powerjob.common.utils.CommonUtils;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.TaskResult;
import tech.powerjob.worker.core.processor.sdk.MapReduceProcessor;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
/**
* <a href="https://github.com/PowerJob/PowerJob/issues/1033">测试长时间执行的任务 idle 导致 reduce 不执行</a>
*
* @author tjq
* @since 2024/11/21
*/
@Slf4j
@Component
public class IdleBugTestProcessor implements MapReduceProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
if (isRootTask()) {
map(Lists.newArrayList("1", "2", "3", "4", "5", "6", "7"), "L1_TASK");
return new ProcessResult(true, "MAP_SUCCESS");
}
Object subTask = context.getSubTask();
log.info("[IdleBugTestProcessor] subTask:={}, start to process!", subTask);
// 同步修改 idle 阈值
CommonUtils.easySleep(ThreadLocalRandom.current().nextInt(40001, 60000));
log.info("[IdleBugTestProcessor] subTask:={}, finished process", subTask);
return new ProcessResult(true, "SUCCESS_" + subTask);
}
@Override
public ProcessResult reduce(TaskContext context, List<TaskResult> taskResults) {
log.info("[IdleBugTestProcessor] [REDUCE] REDUCE!!!");
return new ProcessResult(true, "SUCCESS");
}
}

View File

@ -0,0 +1,41 @@
package tech.powerjob.samples.processors.test;
import com.alibaba.fastjson.JSONObject;
import org.springframework.stereotype.Component;
import tech.powerjob.official.processors.util.CommonUtils;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
import tech.powerjob.worker.log.OmsLogger;
import java.util.Date;
import java.util.Optional;
/**
* LogTestProcessor
*
* @author tjq
* @since 2022/9/18
*/
@Component
public class LogTestProcessor implements BasicProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
final OmsLogger omsLogger = context.getOmsLogger();
final String parseParams = CommonUtils.parseParams(context);
final JSONObject config = Optional.ofNullable(JSONObject.parseObject(parseParams)).orElse(new JSONObject());
final long loopTimes = Optional.ofNullable(config.getLong("loopTimes")).orElse(1000L);
for (int i = 0; i < loopTimes; i++) {
omsLogger.debug("[DEBUG] one DEBUG log in {}", new Date());
omsLogger.info("[INFO] one INFO log in {}", new Date());
omsLogger.warn("[WARN] one WARN log in {}", new Date());
omsLogger.error("[ERROR] one ERROR log in {}", new Date());
}
return new ProcessResult(true);
}
}

View File

@ -0,0 +1,18 @@
package tech.powerjob.samples.processors.test;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
/**
* ZeroCostTestProcessor
*
* @author tjq
* @since 2023/5/7
*/
public class ZeroCostTestProcessor implements BasicProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
return new ProcessResult(true, "zero cost");
}
}

View File

@ -0,0 +1,46 @@
package tech.powerjob.samples.tester;
import tech.powerjob.common.WorkflowContextConstant;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
import org.springframework.stereotype.Component;
import java.util.Map;
/**
* 测试追加工作流上下文数据
*
* @author Echo009
* @since 2021/2/6
*/
@Component
public class AppendWorkflowContextTester implements BasicProcessor {
private static final String FAIL_CODE = "0";
@Override
@SuppressWarnings("squid:S106")
public ProcessResult process(TaskContext context) throws Exception {
Map<String, String> workflowContext = context.getWorkflowContext().fetchWorkflowContext();
String originValue = workflowContext.get(WorkflowContextConstant.CONTEXT_INIT_PARAMS_KEY);
System.out.println("======= AppendWorkflowContextTester#start =======");
System.out.println("current instance id : " + context.getInstanceId());
System.out.println("current workflow context : " + workflowContext);
System.out.println("current job param : " + context.getJobParams());
System.out.println("initParam of workflow context : " + originValue);
int num = 0;
try {
num = Integer.parseInt(originValue);
} catch (Exception e) {
// ignore
}
context.getWorkflowContext().appendData2WfContext(WorkflowContextConstant.CONTEXT_INIT_PARAMS_KEY, num + 1);
System.out.println("======= AppendWorkflowContextTester#end =======");
if (FAIL_CODE.equals(context.getJobParams())) {
return new ProcessResult(false, "Failed!");
}
return new ProcessResult(true, "Success!");
}
}

View File

@ -0,0 +1,64 @@
package tech.powerjob.samples.tester;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Sets;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
import tech.powerjob.worker.log.OmsLogger;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
/**
* 简单任务重复执行检测
* 有用户反馈偶尔存在重试现象,写一个 Processor 来测试
*
* @author tjq
* @since 2023/3/7
*/
@Slf4j
@Component
public class JobRepetitiveExecutionTester implements BasicProcessor {
private final AtomicLong repetitions = new AtomicLong();
/**
* 存储 jobId_instanceId 方便查问题
* 测试代码,就不考虑内存泄漏了
*/
private final Set<String> repetitionsInfo = Sets.newHashSet();
private final Cache<String, Integer> instanceId2Num = CacheBuilder.newBuilder().maximumSize(1024).build();
@Override
public ProcessResult process(TaskContext context) throws Exception {
// 纯本地日志打印当前情况
log.info("[SimpleJobRepetitiveExecutionTester] repetitions:{}, repetitionsInfo: {}", repetitions.get(), repetitionsInfo);
final OmsLogger omsLogger = context.getOmsLogger();
final Long instanceId = context.getInstanceId();
omsLogger.info("[SimpleJobRepetitiveExecutionTester] jobId: {}, instanceId: {}, subInstanceId: {}", context.getJobParams(), instanceId, context.getSubInstanceId());
check(context);
return new ProcessResult(true, "success: " + System.currentTimeMillis());
}
private synchronized void check(TaskContext context) {
String uid = context.getInstanceId() + "_" + Optional.ofNullable(context.getSubInstanceId()).orElse(context.getInstanceId());
Integer numIfPresent = instanceId2Num.getIfPresent(uid);
// 不重复情况下100% 进入该分支
if (numIfPresent == null) {
instanceId2Num.put(uid, 1);
return;
}
context.getOmsLogger().error("[Repetitions] instance[id={}] already execute {} nums!", uid, numIfPresent);
instanceId2Num.put(uid, numIfPresent + 1);
repetitionsInfo.add(String.format("%d_%s", context.getJobId(), uid));
context.getOmsLogger().error("[Repetitions] current repetitions num: {}", repetitions.incrementAndGet());
context.getOmsLogger().error("[Repetitions] current repetitionsInfo: {}", repetitionsInfo.toString());
}
}

View File

@ -0,0 +1,52 @@
package tech.powerjob.samples.tester;
import com.alibaba.fastjson.JSONObject;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
import tech.powerjob.worker.log.OmsLogger;
import org.springframework.stereotype.Component;
/**
* 测试 Oms 在线日志的性能
*
* @author tjq
* @since 2020/5/3
*/
@Component
public class OmsLogPerformanceTester implements BasicProcessor {
private static final int BATCH = 1000;
@Override
public ProcessResult process(TaskContext context) throws Exception {
OmsLogger omsLogger = context.getOmsLogger();
// 控制台参数,格式为 {"num":10000, "interval": 200}
JSONObject jobParams = JSONObject.parseObject(context.getJobParams());
Long num = jobParams.getLong("num");
Long interval = jobParams.getLong("interval");
omsLogger.info("ready to start to process, current JobParams is {}.", jobParams);
RuntimeException re = new RuntimeException("This is a exception~~~");
long times = (long) Math.ceil(1.0 * num / BATCH);
for (long i = 0; i < times; i++) {
for (long j = 0; j < BATCH; j++) {
long index = i * BATCH + j;
System.out.println("send index: " + index);
omsLogger.info("testing omsLogger's performance, current index is {}.", index);
}
omsLogger.error("Oh, it seems that we have got an exception.", re);
try {
Thread.sleep(interval);
}catch (Exception ignore) {
}
}
omsLogger.info("anyway, we finished the job~configuration~");
return new ProcessResult(true, "good job");
}
}

View File

@ -0,0 +1,194 @@
package tech.powerjob.samples.tester;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.IPowerJobClient;
import tech.powerjob.client.PowerJobClient;
import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.ProcessorType;
import tech.powerjob.common.enums.SwitchableStatus;
import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.response.InstanceInfoDTO;
import tech.powerjob.common.response.JobInfoDTO;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.official.processors.util.CommonUtils;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* 测试 OpenAPI
*
* @author tjq
* @since 2024/8/11
*/
@Slf4j
@Component
public class OpenApiTester implements BasicProcessor {
private final Map<String, IPowerJobClient> clientCache = Maps.newHashMap();
private static final String NEW_JOB_PARAMS = "{'aa':'bb'}";
private static final String RUN_INSTANCE_PARAMS = "{'key':'value'}";
private static final int NEW_JOB_MAX_INSTANCE_NUM = 9;
@Override
public ProcessResult process(TaskContext context) throws Exception {
IPowerJobClient client = fetchClient(context);
SaveJobInfoRequest saveJobInfoRequest = buildSaveJobInfoRequest();
context.getOmsLogger().info("[newJob] saveJobInfoRequest: {}", JSONObject.toJSONString(saveJobInfoRequest));
ResultDTO<Long> saveJobResult = client.saveJob(saveJobInfoRequest);
context.getOmsLogger().info("[newJob] RESPONSE: {}", JSONObject.toJSONString(saveJobResult));
Long createdJobId = fetchResultData(saveJobResult);
// 测试导出
ResultDTO<SaveJobInfoRequest> exportJobResult = client.exportJob(createdJobId);
context.getOmsLogger().info("[exportJob] exportJobResult: {}", JSONObject.toJSONString(exportJobResult));
SaveJobInfoRequest exportJobInfo = fetchResultData(exportJobResult);
assert exportJobInfo.getJobParams().equals(saveJobInfoRequest.getJobParams());
assert exportJobInfo.getMaxInstanceNum().equals(saveJobInfoRequest.getMaxInstanceNum());
// 测试复制
context.getOmsLogger().info("[copyJob] sourceJobId: {}", createdJobId);
ResultDTO<Long> copyJobResult = client.copyJob(createdJobId);
context.getOmsLogger().info("[copyJob] copyJobResult: {}", JSONObject.toJSONString(copyJobResult));
Long copiedJobId = fetchResultData(copyJobResult);
context.getOmsLogger().info("[disableJob] targetJobId: {}", copiedJobId);
ResultDTO<Void> disableJobResult = client.disableJob(copiedJobId);
fetchResultData(disableJobResult);
context.getOmsLogger().info("[disableJob] disableJobResult: {}", disableJobResult);
ResultDTO<JobInfoDTO> createdJobInfoResult = client.fetchJob(createdJobId);
context.getOmsLogger().info("[fetchJob] createdJobInfo: {}", JSONObject.toJSONString(createdJobInfoResult));
ResultDTO<JobInfoDTO> copiedJobInfoResult = client.fetchJob(copiedJobId);
context.getOmsLogger().info("[fetchJob] copiedJobInfo: {}", JSONObject.toJSONString(copiedJobInfoResult));
JobInfoDTO createdJob = fetchResultData(createdJobInfoResult);
JobInfoDTO copiedJob = fetchResultData(copiedJobInfoResult);
assert createdJob.getJobParams().equals(copiedJob.getJobParams());
assert createdJob.getMaxInstanceNum().equals(copiedJob.getMaxInstanceNum());
assert copiedJob.getStatus() == SwitchableStatus.DISABLE.getV();
ResultDTO<Void> enableJobResult = client.enableJob(copiedJob.getId());
fetchResultData(enableJobResult);
context.getOmsLogger().info("[enableJob] enableJobResult: {}", JSONObject.toJSONString(enableJobResult));
// 再次查询验证 enable
ResultDTO<JobInfoDTO> copiedJobInfoResult2 = client.fetchJob(copiedJobId);
context.getOmsLogger().info("[fetchJob] copiedJobInfoResult2: {}", JSONObject.toJSONString(copiedJobInfoResult2));
JobInfoDTO copiedJob2 = fetchResultData(copiedJobInfoResult2);
assert copiedJob2.getStatus() == SwitchableStatus.ENABLE.getV();
// 删除拷贝出来的任务
ResultDTO<Void> deleteJobResult = client.deleteJob(copiedJobId);
context.getOmsLogger().info("[deleteJob] deleteJobResult: {}", JSONObject.toJSONString(deleteJobResult));
fetchResultData(deleteJobResult);
// 执行任务
ResultDTO<Long> runJobResult = client.runJob(createdJobId, RUN_INSTANCE_PARAMS, 0);
context.getOmsLogger().info("[runJob] runJobResult: {}", JSONObject.toJSONString(runJobResult));
Long instanceId = fetchResultData(runJobResult);
// 等10S理论上应该能执行完成
Thread.sleep(10000);
// 查询任务详情和状态
ResultDTO<InstanceInfoDTO> fetchInstanceInfoResult = client.fetchInstanceInfo(instanceId);
context.getOmsLogger().info("[fetchInstanceInfo] fetchInstanceInfoResult: {}", JSONObject.toJSONString(fetchInstanceInfoResult));
InstanceInfoDTO instanceInfoDTO = fetchResultData(fetchInstanceInfoResult);
ResultDTO<Integer> fetchInstanceStatusResult = client.fetchInstanceStatus(instanceId);
context.getOmsLogger().info("[fetchInstanceStatus] fetchInstanceStatusResult: {}", JSONObject.toJSONString(fetchInstanceStatusResult));
Integer instanceStatus = fetchResultData(fetchInstanceStatusResult);
assert instanceInfoDTO.getStatus() == instanceStatus;
// 回收全部资源
ResultDTO<Void> deleteCreatedJobResult = client.deleteJob(createdJobId);
context.getOmsLogger().info("[deleteJob] deleteCreatedJobResult: {}", JSONObject.toJSONString(deleteCreatedJobResult));
fetchResultData(deleteCreatedJobResult);
return new ProcessResult(true);
}
private static <T> T fetchResultData(ResultDTO<T> resultDTO) {
if (resultDTO.isSuccess()) {
return resultDTO.getData();
}
throw new RuntimeException(resultDTO.getMessage());
}
private SaveJobInfoRequest buildSaveJobInfoRequest() {
SaveJobInfoRequest newJobInfo = new SaveJobInfoRequest();
newJobInfo.setJobName("JobCreateByOpenAPI");
newJobInfo.setJobDescription("Timestamp: " + System.currentTimeMillis());
newJobInfo.setJobParams(NEW_JOB_PARAMS);
newJobInfo.setTimeExpressionType(TimeExpressionType.API);
newJobInfo.setExecuteType(ExecuteType.STANDALONE);
newJobInfo.setProcessorType(ProcessorType.BUILT_IN);
newJobInfo.setProcessorInfo("tech.powerjob.samples.processors.StandaloneProcessorDemo");
newJobInfo.setMaxInstanceNum(NEW_JOB_MAX_INSTANCE_NUM);
newJobInfo.setMinCpuCores(0.01);
newJobInfo.setMinMemorySpace(0.02);
newJobInfo.setMinDiskSpace(0.03);
return newJobInfo;
}
private IPowerJobClient fetchClient(TaskContext context) {
String params = CommonUtils.parseParams(context);
Config clientConfig = Optional.ofNullable(params).map(x -> JSONObject.parseObject(params, Config.class)).orElse(new Config());
String appName = Optional.ofNullable(clientConfig.getAppName()).orElse("powerjob-worker-samples");
String password = Optional.ofNullable(clientConfig.getPassword()).orElse("powerjob123");
List<String> addressList = Optional.ofNullable(clientConfig.getAddressList()).orElse(Lists.newArrayList("127.0.0.1:7700", "127.0.0.1:7701"));
String key = String.format("client_%s_%s_%s", appName, password, addressList);
return clientCache.computeIfAbsent(key, ignore -> initPowerJobClient(appName, password, addressList));
}
private IPowerJobClient initPowerJobClient(String appName, String password, List<String> addressList) {
ClientConfig config = new ClientConfig();
config.setAppName(appName);
config.setPassword(password);
config.setAddressList(addressList);
log.info("[OpenApiTester] initPowerJobClient, config: {}", config);
return new PowerJobClient(config);
}
@Data
public static class Config implements Serializable {
private String appName;
private String password;
private List<String> addressList;
}
}

View File

@ -0,0 +1,45 @@
package tech.powerjob.samples.tester;
import org.springframework.stereotype.Component;
import tech.powerjob.samples.anno.ATestMethodAnnotation;
import tech.powerjob.worker.annotation.PowerJobHandler;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.log.OmsLogger;
@Component(value = "springMethodProcessorService")
public class SpringMethodProcessorService {
/**
* 处理器配置方法1 全限定类名#方法名,比如 tech.powerjob.samples.tester.SpringMethodProcessorService#testEmptyReturn
* 处理器配置方法2 SpringBean名称#方法名,比如 springMethodProcessorService#testEmptyReturn
* @param context 必须要有入参 TaskContext返回值可以是 null也可以是其他任意类型。正常返回代表成功抛出异常代表执行失败
*/
@PowerJobHandler(name = "testEmptyReturn")
public void testEmptyReturn(TaskContext context) {
OmsLogger omsLogger = context.getOmsLogger();
omsLogger.warn("测试日志");
}
@PowerJobHandler(name = "testNormalReturn")
public String testNormalReturn(TaskContext context) {
OmsLogger omsLogger = context.getOmsLogger();
omsLogger.warn("测试日志");
return "testNormalReturn";
}
@PowerJobHandler(name = "testThrowException")
public String testThrowException(TaskContext context) {
OmsLogger omsLogger = context.getOmsLogger();
omsLogger.warn("testThrowException");
throw new IllegalArgumentException("test");
}
@ATestMethodAnnotation
@PowerJobHandler(name = "testNormalReturnWithCustomAnno")
public String testNormalReturnWithCustomAnno(TaskContext context) {
OmsLogger omsLogger = context.getOmsLogger();
omsLogger.warn("测试自定义注解");
return "testNormalReturnWithCustomAnno";
}
}

View File

@ -0,0 +1,27 @@
package tech.powerjob.samples.tester;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
import org.springframework.stereotype.Component;
/**
* 测试用户反馈的无法停止实例的问题 (可中断)
* https://github.com/PowerJob/PowerJob/issues/37
*
* @author tjq
* @since 2020/7/30
*/
@Component
@SuppressWarnings("all")
public class StopInstanceTester implements BasicProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
int i = 0;
while (true) {
System.out.println(i++);
// interruptable
Thread.sleep(10000L);
}
}
}

View File

@ -0,0 +1,30 @@
package tech.powerjob.samples.tester;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
/**
* 停止实例 (不可中断)
*
* @author Echo009
* @since 2023/1/15
*/
@Component
@Slf4j
@SuppressWarnings("all")
public class StopInstanceUninterruptibleTester implements BasicProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
int i = 0;
while (true) {
// uninterruptible
i++;
if (i % 1000000000 == 0){
log.info("taskInstance({}) is running ...",context.getInstanceId());
}
}
}
}

View File

@ -0,0 +1,22 @@
package tech.powerjob.samples.tester;
import org.springframework.stereotype.Component;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
/**
* 测试直接使用 BeanName 获取处理器
* 控制台可填写 powerJobTestBeanNameProcessor 作为处理器信息
*
* @author tjq
* @since 2023/3/5
*/
@Component(value = "powerJobTestBeanNameProcessor")
public class TestFindByBeanNameProcessor implements BasicProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
System.out.println("======== IN =======");
return new ProcessResult(true, "Welcome to use PowerJob~");
}
}

View File

@ -0,0 +1,36 @@
package tech.powerjob.samples.workflow;
import com.alibaba.fastjson.JSON;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
import tech.powerjob.worker.log.OmsLogger;
import org.springframework.stereotype.Component;
import java.util.Map;
/**
* 工作流测试
*
* @author tjq
* @since 2020/6/2
*/
@Component
@Slf4j
public class WorkflowStandaloneProcessor implements BasicProcessor {
@Override
public ProcessResult process(TaskContext context) throws Exception {
OmsLogger logger = context.getOmsLogger();
logger.info("current jobParams: {}", context.getJobParams());
logger.info("current context: {}", context.getWorkflowContext());
log.info("jobParams:{}", context.getJobParams());
log.info("currentContext:{}", JSON.toJSONString(context));
// 尝试获取上游任务
Map<String, String> workflowContext = context.getWorkflowContext().fetchWorkflowContext();
log.info("工作流上下文数据:{}", workflowContext);
return new ProcessResult(true, context.getJobId() + " process successfully.");
}
}

View File

@ -0,0 +1,21 @@
server.port=8081
spring.jpa.open-in-view=false
########### PowerJob-worker properties. ###########
# Whether to enable PowerJob Worker, default is true
powerjob.worker.enabled=true
# Turn on test mode and do not force the server connection to be verified
powerjob.worker.allow-lazy-connect-server=false
# Transport port, default is 27777
powerjob.worker.port=27777
# Application name, used for grouping applications. Recommend to set the same value as project name.
powerjob.worker.app-name=powerjob-worker-samples
# Address of PowerJob-server node(s). Ip:port or domain. Multiple addresses should be separated with comma.
powerjob.worker.server-address=127.0.0.1:7700,127.0.0.1:7701
# transport protocol between server and worker
powerjob.worker.protocol=http
# Store strategy of H2 database. disk or memory. Default value is disk.
powerjob.worker.store-strategy=disk
# Max length of result. Results that are longer than the value will be truncated.
powerjob.worker.max-result-length=4096
# Max length of appended workflow context . Appended workflow context value that is longer than the value will be ignore.
powerjob.worker.max-appended-wf-context-length=4096

View File

@ -0,0 +1,72 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!-- 不同 worker 区分不同日志 参考参数: spring.profiles.active=local log.name=worker1 -->
<springProperty name="LOG_NAME" source="log.name" defaultValue="INFO"/>
<property name="LOG_PATH" value="${user.home}/powerjob-worker-samples/${LOG_NAME}/logs"/>
<!-- %m输出的信息,%p日志级别,%t线程名,%d日期,%c类的全名,%i索引【从数字0开始递增】,,, -->
<!-- appender是configuration的子节点是负责写日志的组件。 -->
<!-- ConsoleAppender把日志输出到控制台 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<!-- <pattern>%red(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %green([%thread]) - %cyan(%msg%n)</pattern>-->
<pattern>%red(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) - %cyan(%msg%n)</pattern>
<!-- 控制台也要使用UTF-8不要使用GBK否则会中文乱码 -->
<charset>UTF-8</charset>
</encoder>
</appender>
<springProfile name="default">
<!-- 默认环境时激活,全部输出到控制台 -->
<logger name="com.zaxxer.hikari" level="INFO">
<appender-ref ref="STDOUT"/>
</logger>
<logger name="tech.powerjob" level="DEBUG" additivity="false">
<appender-ref ref="STDOUT"/>
</logger>
<!-- 控制台输出日志级别 -->
<root level="INFO">
<appender-ref ref="STDOUT"/>
</root>
</springProfile>
<springProfile name="local">
<!-- 传入 local 的时候,全部使用本地日志 -->
<appender name="APPLICATION_APPENDER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_PATH}/application.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 每天滚动日志文件 -->
<fileNamePattern>${LOG_PATH}/application.log.%d{yyyy-MM-dd}.%i</fileNamePattern>
<!-- 日志文件总大小限制为3GB -->
<maxFileSize>500MB</maxFileSize>
<!-- 设置最大历史记录为10天 -->
<maxHistory>7</maxHistory>
<!-- 设置总大小限制 -->
<totalSizeCap>3GB</totalSizeCap>
</rollingPolicy>
</appender>
<!-- 异步输出 -->
<appender name="ASYNC_APPLICATION_APPENDER" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志,默认的,如果队列的 80% 已满则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度该值会影响性能默认值为256 -->
<queueSize>256</queueSize>
<!-- 添加附加的appender最多只能添加一个 -->
<appender-ref ref="APPLICATION_APPENDER"/>
</appender>
<!-- 控制台输出日志级别 -->
<root level="INFO">
<appender-ref ref="ASYNC_APPLICATION_APPENDER"/>
</root>
</springProfile>
</configuration>