---初始化项目

This commit is contained in:
2025-09-19 16:14:08 +08:00
parent 902d3d7e3b
commit afee7c03ac
767 changed files with 75809 additions and 82 deletions

121
.gitignore vendored
View File

@ -1,88 +1,49 @@
# ---> Java
# Compiled class file
*.class
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**
!**/src/test/**
# Log file
*.log
# BlueJ files
*.ctxt
# Mobile Tools for Java (J2ME)
.mtj.tmp/
# Package Files #
*.jar
*.war
*.nar
*.ear
*.zip
*.tar.gz
*.rar
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
hs_err_pid*
replay_pid*
# ---> Eclipse
.metadata
bin/
tmp/
*.tmp
*.bak
*.swp
*~.nib
local.properties
.settings/
.loadpath
.recommenders
# External tool builders
.externalToolBuilders/
# Locally stored "Eclipse launch configurations"
*.launch
# PyDev specific (Python IDE for Eclipse)
*.pydevproject
# CDT-specific (C/C++ Development Tooling)
.cproject
# CDT- autotools
.autotools
# Java annotation processor (APT)
### STS ###
.apt_generated
.classpath
.factorypath
# PDT-specific (PHP Development Tools)
.buildpath
# sbteclipse plugin
.target
# Tern plugin
.tern-project
# TeXlipse plugin
.texlipse
# STS (Spring Tool Suite)
.project
.settings
.springBeans
.sts4-cache
# Code Recommenders
.recommenders/
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
# Annotation Processing
.apt_generated/
.apt_generated_test/
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
# Scala IDE specific (Scala & Java development for Eclipse)
.cache-main
.scala_dependencies
.worksheet
### VS Code ###
.vscode/
# Uncomment this line if you wish to ignore the project description file.
# Typically, this file would be tracked if it contains build/dependency configurations:
#.project
### others ###
*.jar
*.log
*/.DS_Store
.DS_Store
.phd
.txt
.trc
*/.phd
*/.txt
*/.trc
powerjob-data/
powerjob-server/powerjob-server-starter/src/main/resources/application-dev.properties
.claude/**
CLAUDE.md

201
LICENSE Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [2021] [PowerJob]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,3 +1,64 @@
# powerjob-kingbase
# 简体中文
PowerJob原OhMyScheduler是全新一代分布式调度与计算框架能让您轻松完成作业的调度与繁杂任务的分布式计算。
### 添加kingbase数据库支持
powerjob扩展kingbase支持
# 简介
### 主要特性
* 使用简单提供前端Web界面允许开发者可视化地完成调度任务的管理增、删、改、查、任务运行状态监控和运行日志查看等功能。
* 定时策略完善支持CRON表达式、固定频率、固定延迟和API四种定时调度策略。
* 执行模式丰富支持单机、广播、Map、MapReduce四种执行模式其中Map/MapReduce处理器能使开发者寥寥数行代码便获得集群分布式计算的能力。
* DAG工作流支持支持在线配置任务依赖关系可视化得对任务进行编排同时还支持上下游任务间的数据传递
* 执行器支持广泛支持Spring Bean、内置/外置Java类、Shell、Python等处理器应用范围广。
* 运维便捷支持在线日志功能执行器产生的日志可以在前端控制台页面实时显示降低debug成本极大地提高开发效率。
* 依赖精简最小仅依赖关系型数据库MySQL/Oracle/MS SQLServer...)。
* 高可用&高性能:调度服务器经过精心设计,一改其他调度框架基于数据库锁的策略,实现了无锁化调度。部署多个调度服务器可以同时实现高可用和性能的提升(支持无限的水平扩展)。
* 故障转移与恢复:任务执行失败后,可根据配置的重试策略完成重试,只要执行器集群有足够的计算节点,任务就能顺利完成。
### 适用场景
* 有定时执行需求的业务场景:如每天凌晨全量同步数据、生成业务报表等。
* 有需要全部机器一同执行的业务场景:如使用广播执行模式清理集群日志。
* 有需要分布式处理的业务场景比如需要更新一大批数据单机执行耗时非常长可以使用Map/MapReduce处理器完成任务的分发调动整个集群加速计算。
* 有需要**延迟执行**某些任务的业务场景:比如订单过期处理等。
### 设计目标
PowerJob 的设计目标为企业级的分布式任务调度平台,即成为公司内部的**任务调度中间件**。整个公司统一部署调度中心 powerjob-server旗下所有业务线应用只需要依赖 `powerjob-worker` 即可接入调度中心获取任务调度与分布式计算能力。
### 在线试用
* [点击查看试用说明和教程](https://www.yuque.com/powerjob/guidence/trial)
### 同类产品对比
| | QuartZ | xxl-job | SchedulerX 2.0 | PowerJob |
| -------------- | ------------------------ | ---------------------------------------- | ------------------------------------------------- | ------------------------------------------------------------ |
| 定时类型 | CRON | CRON | CRON、固定频率、固定延迟、OpenAPI | **CRON、固定频率、固定延迟、OpenAPI** |
| 任务类型 | 内置Java | 内置Java、GLUE Java、Shell、Python等脚本 | 内置Java、外置JavaFatJar、Shell、Python等脚本 | **内置Java、外置Java容器、Shell、Python等脚本** |
| 分布式计算 | 无 | 静态分片 | MapReduce动态分片 | **MapReduce动态分片** |
| 在线任务治理 | 不支持 | 支持 | 支持 | **支持** |
| 日志白屏化 | 不支持 | 支持 | 不支持 | **支持** |
| 调度方式及性能 | 基于数据库锁,有性能瓶颈 | 基于数据库锁,有性能瓶颈 | 不详 | **无锁化设计,性能强劲无上限** |
| 报警监控 | 无 | 邮件 | 短信 | **WebHook、邮件、钉钉与自定义扩展** |
| 系统依赖 | JDBC支持的关系型数据库MySQL、Oracle... | MySQL | 人民币 | **任意Spring Data Jpa支持的关系型数据库MySQL、Oracle...** |
| DAG工作流 | 不支持 | 不支持 | 支持 | **支持** |
# 官方文档
**[中文文档](https://www.yuque.com/powerjob/guidence/intro)**
**[Docs](https://www.yuque.com/powerjob/en/introduce)**
# 接入登记
[点击进行接入登记,为 PowerJob 的发展贡献自己的力量!](https://github.com/PowerJob/PowerJob/issues/6)
ღ( ´・ᴗ・\` )ღ 感谢以下接入用户的大力支持 ღ( ´・ᴗ・\` )ღ
<p align="center">
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/user.png" alt="PowerJob User" title="PowerJob User"/>
</p>
# 其他
* 开源许可证Apache License, Version 2.0
* 欢迎共同参与本项目的贡献PR和Issue都大大滴欢迎求求了
* 觉得还不错的话可以点个Star支持一下哦 = ̄ω ̄=
* 联系方式@KFCFans -> `tengjiqi@gmail.com`
* 用户交流QQ群因广告信息泛滥加群需要验证请认真填写申请原因
* 一群已满487453839
* 二群834937813

4
SECURITY.md Normal file
View File

@ -0,0 +1,4 @@
# Security notices relating to PowerJob
Please disclose any security issues or vulnerabilities found through [Tidelift's coordinated disclosure system](https://tidelift.com/security) or to the maintainers privately(tengjiqi@gmail.com).

55
docker-compose.yml Normal file
View File

@ -0,0 +1,55 @@
# 使用说明 V4.3.1
# 1. PowerJob 根目录执行docker-compose up
# 2. 静静等待服务启动。
version: '3'
services:
powerjob-mysql:
environment:
MYSQL_ROOT_HOST: "%"
MYSQL_ROOT_PASSWORD: No1Bug2Please3!
restart: always
container_name: powerjob-mysql
image: powerjob/powerjob-mysql:latest
ports:
- "3307:3306"
volumes:
- ./powerjob-data/powerjob-mysql:/var/lib/mysql
command: --lower_case_table_names=1
powerjob-server:
container_name: powerjob-server
image: powerjob/powerjob-server:latest
restart: always
depends_on:
- powerjob-mysql
environment:
JVMOPTIONS: "-Xmx512m"
PARAMS: "--oms.mongodb.enable=false --spring.datasource.core.jdbc-url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai"
ports:
- "7700:7700"
- "10086:10086"
- "10010:10010"
- "10077:10077"
volumes:
- ./powerjob-data/powerjob-server:/root/powerjob/server/
powerjob-worker-samples:
container_name: powerjob-worker-samples
image: powerjob/powerjob-worker-samples:latest
restart: always
depends_on:
- powerjob-mysql
- powerjob-server
# environment:
# PARAMS: "--powerjob.worker.server-address=powerjob-server:7700"
ports:
- "8081:8081"
- "27777:27777"
volumes:
- ./powerjob-data/powerjob-worker-samples:/root/powerjob/worker
- ./others/script/wait-for-it.sh:/wait-for-it.sh
entrypoint:
- "sh"
- "-c"
- "chmod +x wait-for-it.sh && ./wait-for-it.sh powerjob-server:7700 --strict -- java -Xmx512m -jar /powerjob-worker-samples.jar --powerjob.worker.server-address=powerjob-server:7700"

11
others/Dockerfile Normal file
View File

@ -0,0 +1,11 @@
FROM mysql/mysql-server:8.0.30
MAINTAINER dudiao(idudaio@163.com)
ENV TZ=Asia/Shanghai
RUN ln -sf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
COPY sql/01schema.sql /docker-entrypoint-initdb.d
COPY sql/02worker-samples.sql /docker-entrypoint-initdb.d

View File

@ -0,0 +1,21 @@
#!/bin/bash
# 构建 PowerJob 测试环境
echo "================== 关闭全部服务 =================="
docker-compose down
echo "================== 删除历史数据 =================="
rm -rf ~/powerjob-data/
echo "================== 构建 jar =================="
cd `dirname $0`/../.. || exit
# mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
# -U强制检查snapshot库 -pl指定需要构建的模块多模块逗号分割 -am同时构建依赖模块一般与pl连用 -Pxxx指定使用的配置文件
mvn clean package -Pdev -DskipTests
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
ls -l powerjob-server/docker/powerjob-server.jar
ls -l powerjob-worker-agent/powerjob-agent.jar
cd others/dev
docker-compose build
docker-compose --compatibility -p powerjob_test_env up

View File

@ -0,0 +1,102 @@
# 构建 PowerJob 测试环境
version: '3.7'
services:
powerjob-mysql:
build:
context: ../
environment:
MYSQL_ROOT_HOST: "%"
MYSQL_ROOT_PASSWORD: No1Bug2Please3!
deploy:
resources:
limits:
memory: 1024M
restart: always
container_name: powerjob-mysql
image: powerjob/powerjob-mysql:test_env
ports:
- "3309:3306"
volumes:
- ~/powerjob-data/powerjob-mysql:/var/lib/mysql
command: --lower_case_table_names=1
# powerjob-mongodb:
# image: mongo:latest
# container_name: powerjob-mongodb
# restart: always
# deploy:
# resources:
# limits:
# memory: 256M
# environment:
# MONGO_INITDB_ROOT_USERNAME: "root"
# MONGO_INITDB_ROOT_PASSWORD: "No1Bug2Please3!"
# MONGO_INITDB_DATABASE: "powerjob_daily"
# ports:
# - "27017:27017"
# volumes:
# - ./testenv/init_mongodb.js:/docker-entrypoint-initdb.d/mongo-init.js:ro
# - ~/powerjob-data/powerjob-mongodb:/data/db
powerjob-server:
build:
context: ../../powerjob-server/docker
container_name: powerjob-server
image: powerjob/powerjob-server:test_env
restart: always
depends_on:
- powerjob-mysql
# - powerjob-mongodb
environment:
PARAMS: "--spring.profiles.active=daily --logging.config=classpath:logback-product.xml --spring.datasource.core.jdbc-url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai --oms.storage.dfs.mysql_series.url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai"
JVMOPTIONS: "-server -XX:+UseG1GC -Xms768m -Xmx768m -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=7 -XX:GCLogFileSize=100M -Xloggc:/root/powerjob/server/gc.log -Dpowerjob.sp-env=trial -Dpowerjob.server.test.user.accounts=powerjob"
ports:
- "7700:7700"
- "10086:10086"
- "10010:10010"
- "10077:10077"
volumes:
- ~/powerjob-data/powerjob-server:/root/powerjob/server/
- ~/.m2:/root/.m2
powerjob-worker-agent:
build:
context: ../../powerjob-worker-agent
container_name: powerjob-worker-agent
image: powerjob/powerjob-worker-agent:test_env
restart: always
depends_on:
- powerjob-mysql
- powerjob-server
ports:
- "5002:5005"
- "10002:10000"
- "27777:27777"
volumes:
- ~/powerjob-data/powerjob-worker-agent:/root
entrypoint:
- "sh"
- "-c"
- "./wait-for-it.sh powerjob-server:7700 --strict -- java -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -server -XX:+UseG1GC -Xms256m -Xmx256m -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=7 -XX:GCLogFileSize=100M -Xloggc:/root/gc.log -jar /powerjob-agent.jar --app powerjob-worker-samples --server powerjob-server:7700"
powerjob-worker-agent2:
container_name: powerjob-worker-agent2
image: powerjob/powerjob-worker-agent:test_env
restart: always
depends_on:
- powerjob-mysql
- powerjob-server
ports:
- "5003:5005"
- "10003:10000"
- "27778:27777"
volumes:
- ~/powerjob-data/powerjob-worker-agent2:/root
entrypoint:
- "sh"
- "-c"
- "./wait-for-it.sh powerjob-server:7700 --strict -- java -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -server -XX:+UseG1GC -Xms256m -Xmx256m -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=7 -XX:GCLogFileSize=100M -Xloggc:/root/gc.log -jar /powerjob-agent.jar --app powerjob-worker-samples --server powerjob-server:7700"
networks:
powerjob_test_env_network:
driver: bridge
name: powerjob_test_env_network

View File

@ -0,0 +1,124 @@
#!/bin/bash
# -p允许后面跟一个字符串作为提示 -r保证读入的是原始内容不会发生任何转义
read -r -p "请输入Dockedr镜像版本:" version
echo "即将构建的 server 镜像powerjob-server:$version"
echo "即将构建的 agent 镜像powerjob-agent:$version"
read -r -p "任意键继续:"
# 一键部署脚本,请勿挪动脚本
cd `dirname $0`/../.. || exit
read -r -p "是否进行maven构建y/n:" needmvn
if [ "$needmvn" = "y" ] || [ "$needmvn" = "Y" ]; then
echo "================== 构建 jar =================="
# mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
# -U强制检查snapshot库 -pl指定需要构建的模块多模块逗号分割 -am同时构建依赖模块一般与pl连用 -Pxxx指定使用的配置文件
mvn clean package -Pdev -DskipTests -U -e
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
ls -l powerjob-server/docker/powerjob-server.jar
ls -l powerjob-worker-agent/powerjob-agent.jar
fi
echo "================== 关闭老应用 =================="
docker stop powerjob-server
docker stop powerjob-agent
docker stop powerjob-agent2
echo "================== 删除老容器 =================="
docker container rm powerjob-server
docker container rm powerjob-agent
docker container rm powerjob-agent2
read -r -p "是否重新构建镜像y/n:" rebuild
if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
echo "================== 删除旧镜像 =================="
docker rmi -f tjqq/powerjob-server:$version
docker rmi -f powerjob/powerjob-server:$version
docker rmi -f tjqq/powerjob-agent:$version
docker rmi -f powerjob/powerjob-agent:$version
docker rmi -f powerjob/powerjob-mysql:$version
docker rmi -f powerjob/powerjob-worker-samples:$version
echo "================== 构建 powerjob-server 镜像 =================="
docker build -t tjqq/powerjob-server:$version powerjob-server/docker/. || exit
echo "================== 构建 powerjob-agent 镜像 =================="
docker build -t tjqq/powerjob-agent:$version powerjob-worker-agent/. || exit
echo "================== 构建 powerjob-mysql 镜像 =================="
docker build -t powerjob/powerjob-mysql:$version others/. || exit
echo "================== 构建 powerjob-worker-samples 镜像 =================="
docker build -t powerjob/powerjob-worker-samples:$version powerjob-worker-samples/. || exit
read -r -p "是否正式发布该镜像y/n:" needrelease
if [ "$needrelease" = "y" ] || [ "$needrelease" = "Y" ]; then
read -r -p "三思请确保当前处于已发布的Master分支y/n:" needrelease
if [ "$needrelease" = "y" ] || [ "$needrelease" = "Y" ]; then
echo "================== 正在推送 server 镜像到中央仓库 =================="
docker push tjqq/powerjob-server:$version
echo "================== 正在推送 agent 镜像到中央仓库 =================="
docker push tjqq/powerjob-agent:$version
echo "================== 正在推送 powerjob-mysql 镜像到中央仓库 =================="
docker push powerjob/powerjob-mysql:$version
echo "================== 正在推送 samples 镜像到中央仓库 =================="
docker push powerjob/powerjob-worker-samples:$version
echo "================== 双写推送 =================="
docker tag tjqq/powerjob-server:$version powerjob/powerjob-server:$version
docker push powerjob/powerjob-server:$version
docker tag tjqq/powerjob-agent:$version powerjob/powerjob-agent:$version
docker push powerjob/powerjob-agent:$version
echo "================== 更新 LATEST 版本 =================="
docker tag powerjob/powerjob-server:$version powerjob/powerjob-server:latest
docker push powerjob/powerjob-server:latest
docker tag powerjob/powerjob-agent:$version powerjob/powerjob-agent:latest
docker push powerjob/powerjob-agent:latest
docker tag powerjob/powerjob-mysql:$version powerjob/powerjob-mysql:latest
docker push powerjob/powerjob-mysql:latest
docker tag powerjob/powerjob-worker-samples:$version powerjob/powerjob-worker-samples:latest
docker push powerjob/powerjob-worker-samples:latest
echo "================== Docker 推送完毕 =================="
fi
fi
fi
read -r -p "是否启动 server & agenty/n:" startup
if [ "$startup" = "y" ] || [ "$startup" = "Y" ]; then
# 启动应用(端口映射、数据路径挂载)
## -d后台运行
## -p指定端口映射主机端口:容器端口
## --name指定容器名称
## -v--volume挂载目录宿主机目录docker内目录写入docker内路径的数据会被直接写到宿主机上常用于日志文件
## --net=host容器和宿主机共享网络容器直接使用宿主机IP性能最好但网络隔离较差
echo "================== 准备启动 powerjob-server =================="
docker run -d \
--name powerjob-server \
-p 7700:7700 -p 10086:10086 -p 10010:10010 -p 10077:10077 -p 5001:5005 -p 10001:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--spring.profiles.active=pre" \
-e TZ="Asia/Shanghai" \
-v ~/docker/powerjob-server:/root/powerjob-server -v ~/.m2:/root/.m2 \
tjqq/powerjob-server:$version
sleep 1
# tail -f -n 1000 ~/docker/powerjob-server/logs/powerjob-server-application.log
sleep 30
echo "================== 准备启动 powerjob-client =================="
serverIP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' powerjob-server)
serverAddress="$serverIP:7700"
echo "使用的Server地址$serverAddress"
docker run -d \
--name powerjob-agent \
-p 27777:27777 -p 5002:5005 -p 10002:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--app powerjob-agent-test --server $serverAddress" \
-v ~/docker/powerjob-agent:/root \
tjqq/powerjob-agent:$version
docker run -d \
--name powerjob-agent2 \
-p 27778:27777 -p 5003:5005 -p 10003:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005" \
-e PARAMS="--app powerjob-agent-test --server $serverAddress" \
-v ~/docker/powerjob-agent2:/root \
tjqq/powerjob-agent:$version
tail -f -n 100 ~/docker/powerjob-agent/powerjob/logs/powerjob-agent-application.log
fi

View File

@ -0,0 +1,71 @@
#!/bin/bash
echo "A docker image release script for the Apple Silicon device."
# -p允许后面跟一个字符串作为提示 -r保证读入的是原始内容不会发生任何转义
read -r -p "请输入Dockedr镜像版本:" version
echo "即将构建的 server 镜像powerjob-server:$version"
echo "即将构建的 agent 镜像powerjob-agent:$version"
read -r -p "任意键继续:"
# 一键部署脚本,请勿挪动脚本
cd `dirname $0`/../.. || exit
read -r -p "是否进行maven构建y/n:" needmvn
if [ "$needmvn" = "y" ] || [ "$needmvn" = "Y" ]; then
echo "================== 构建 jar =================="
# mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
# -U强制检查snapshot库 -pl指定需要构建的模块多模块逗号分割 -am同时构建依赖模块一般与pl连用 -Pxxx指定使用的配置文件
mvn clean package -Pdev -DskipTests -U -e
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
ls -l powerjob-server/docker/powerjob-server.jar
ls -l powerjob-worker-agent/powerjob-agent.jar
fi
echo "================== 关闭老应用 =================="
docker stop powerjob-server
docker stop powerjob-agent
docker stop powerjob-agent2
echo "================== 删除老容器 =================="
docker container rm powerjob-server
docker container rm powerjob-agent
docker container rm powerjob-agent2
read -r -p "是否构建并发布镜像y/n:" rebuild
if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
echo "================== 删除旧镜像 =================="
docker rmi -f tjqq/powerjob-server:$version
docker rmi -f powerjob/powerjob-server:$version
docker rmi -f tjqq/powerjob-agent:$version
docker rmi -f powerjob/powerjob-agent:$version
docker rmi -f powerjob/powerjob-mysql:$version
docker rmi -f powerjob/powerjob-worker-samples:$version
echo "================== 构建 powerjob-server 镜像(tjqq) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-server:$version powerjob-server/docker/. --push || exit
echo "================== 构建 powerjob-server 镜像(powerjob) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-server:$version powerjob-server/docker/. --push || exit
echo "================== 构建 powerjob-agent 镜像(tjqq) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-agent:$version powerjob-worker-agent/. --push|| exit
echo "================== 构建 powerjob-agent 镜像(powerjob) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-agent:$version powerjob-worker-agent/. --push|| exit
echo "================== 构建 powerjob-mysql 镜像 =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-mysql:$version others/. --push|| exit
echo "================== 构建 powerjob-worker-samples 镜像 =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-worker-samples:$version powerjob-worker-samples/. --push|| exit
fi
read -r -p "是否推送LATESTy/n:" push_latest
if [ "$push_latest" = "y" ] || [ "$push_latest" = "Y" ]; then
echo "================== powerjob-server LATEST (tjqq) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-server:latest powerjob-server/docker/. --push || exit
echo "================== powerjob-server LATEST (powerjob) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-server:latest powerjob-server/docker/. --push || exit
echo "================== powerjob-agent LATEST (tjqq) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-agent:latest powerjob-worker-agent/. --push|| exit
echo "================== powerjob-agent LATEST (powerjob) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-agent:latest powerjob-worker-agent/. --push|| exit
echo "================== powerjob-mysql LATEST =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-mysql:latest others/. --push|| exit
echo "================== powerjob-worker-samples LATEST =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-worker-samples:latest powerjob-worker-samples/. --push|| exit
fi

View File

@ -0,0 +1,12 @@
db.createUser(
{
user: "zqq",
pwd: "No1Bug2Please3!",
roles: [
{
role: "readWrite",
db: "powerjob_daily"
}
]
}
);

BIN
others/images/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 114 KiB

BIN
others/images/user.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 209 KiB

332
others/powerjob-mysql.sql Normal file
View File

@ -0,0 +1,332 @@
/*
官方 SQL 仅基于特定版本MySQL8导出不一定兼容其他数据库也不一定兼容其他版本。此 SQL 仅供参考。
如果您的数据库无法使用此 SQL建议使用 SpringDataJPA 自带的建表能力,先在开发环境直连测试库自动建表,然后自行导出相关的 SQL 即可。
*/
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob5g
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 17/08/2025 21:58:30
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`extend_value` varchar(255) DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`meta` varchar(255) DEFAULT NULL,
`outer_key` varchar(255) DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`),
KEY `idx04_instance_info_outer_key` (`outer_key`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,54 @@
#!/bin/bash
cd `dirname $0`/../.. || exit
echo "================== 构建 jar =================="
mvn clean package -Pdev -DskipTests -e
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
echo "================== 关闭老应用 =================="
docker stop powerjob-server
docker stop powerjob-worker-samples
docker stop powerjob-worker-samples2
echo "================== 删除老容器 =================="
docker container rm powerjob-server
docker container rm powerjob-worker-samples
docker container rm powerjob-worker-samples2
echo "================== 删除旧镜像 =================="
docker rmi -f tjqq/powerjob-server:latest
docker rmi -f tjqq/powerjob-worker-samples:latest
echo "================== 构建 powerjob-server 镜像 =================="
docker build -t tjqq/powerjob-server:latest powerjob-server/docker/. || exit
echo "================== 构建 powerjob-worker-samples 镜像 =================="
docker build -t tjqq/powerjob-worker-samples:latest powerjob-worker-samples/. || exit
echo "================== 准备启动 powerjob-server =================="
docker run -d \
--restart=always \
--name powerjob-server \
-p 7700:7700 -p 10086:10086 -p 5001:5005 -p 10001:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--oms.swagger.enable=true --spring.profiles.active=product --spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3306/powerjob-product?useUnicode=true&characterEncoding=UTF-8 --oms.mongodb.enable=false --spring.data.mongodb.uri=mongodb://remotehost:27017/powerjob-product" \
-v ~/docker/powerjob-server:/root/powerjob/server -v ~/.m2:/root/.m2 \
tjqq/powerjob-server:latest
sleep 60
echo "================== 准备启动 powerjob-agent =================="
serverIP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' powerjob-server)
serverAddress="$serverIP:7700"
echo "使用的Server地址$serverAddress"
docker run -d \
--restart=always \
--name powerjob-worker-samples \
-p 27777:27777 -p 5002:5005 -p 10002:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--powerjob.worker.server-address=$serverAddress" \
-v ~/docker/powerjob-worker-samples:/root \
tjqq/powerjob-worker-samples:latest
docker run -d \
--restart=always \
--name powerjob-worker-samples2 \
-p 27778:27777 -p 5003:5005 -p 10003:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--powerjob.worker.server-address=$serverAddress" \
-v ~/docker/powerjob-worker-samples2:/root \
tjqq/powerjob-worker-samples:latest

View File

@ -0,0 +1,182 @@
#!/usr/bin/env bash
# Use this script to test if a given TCP host/port are available
WAITFORIT_cmdname=${0##*/}
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
-s | --strict Only execute subcommand if the test succeeds
-q | --quiet Don't output any status messages
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
else
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
fi
WAITFORIT_start_ts=$(date +%s)
while :
do
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
WAITFORIT_result=$?
else
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
WAITFORIT_result=$?
fi
if [[ $WAITFORIT_result -eq 0 ]]; then
WAITFORIT_end_ts=$(date +%s)
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
break
fi
sleep 1
done
return $WAITFORIT_result
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
else
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
fi
WAITFORIT_PID=$!
trap "kill -INT -$WAITFORIT_PID" INT
wait $WAITFORIT_PID
WAITFORIT_RESULT=$?
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
fi
return $WAITFORIT_RESULT
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
WAITFORIT_hostport=(${1//:/ })
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
shift 1
;;
--child)
WAITFORIT_CHILD=1
shift 1
;;
-q | --quiet)
WAITFORIT_QUIET=1
shift 1
;;
-s | --strict)
WAITFORIT_STRICT=1
shift 1
;;
-h)
WAITFORIT_HOST="$2"
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
shift 2
;;
--host=*)
WAITFORIT_HOST="${1#*=}"
shift 1
;;
-p)
WAITFORIT_PORT="$2"
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
shift 2
;;
--port=*)
WAITFORIT_PORT="${1#*=}"
shift 1
;;
-t)
WAITFORIT_TIMEOUT="$2"
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
WAITFORIT_TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
WAITFORIT_CLI=("$@")
break
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
echoerr "Error: you need to provide a host and port to test."
usage
fi
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
# Check to see if timeout is from busybox?
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
WAITFORIT_BUSYTIMEFLAG=""
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
WAITFORIT_ISBUSY=1
# Check if busybox timeout uses -t flag
# (recent Alpine versions don't support -t anymore)
if timeout &>/dev/stdout | grep -q -e '-t '; then
WAITFORIT_BUSYTIMEFLAG="-t"
fi
else
WAITFORIT_ISBUSY=0
fi
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
wait_for
WAITFORIT_RESULT=$?
exit $WAITFORIT_RESULT
else
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
wait_for_wrapper
WAITFORIT_RESULT=$?
else
wait_for
WAITFORIT_RESULT=$?
fi
fi
if [[ $WAITFORIT_CLI != "" ]]; then
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
exit $WAITFORIT_RESULT
fi
exec "${WAITFORIT_CLI[@]}"
else
exit $WAITFORIT_RESULT
fi

2
others/sql/01schema.sql Normal file
View File

@ -0,0 +1,2 @@
-- powerjob
create database `powerjob-daily` default character set utf8mb4 collate utf8mb4_general_ci;

View File

@ -0,0 +1,21 @@
USE powerjob-daily;
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
CREATE TABLE IF NOT EXISTS `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '应用ID',
`app_name` varchar(128) not NULL COMMENT '应用名称',
`current_server` varchar(255) default null COMMENT 'Server地址,用于负责调度应用的ActorSystem地址',
`gmt_create` datetime not null COMMENT '创建时间',
`gmt_modified` datetime not null COMMENT '更新时间',
`password` varchar(255) not null COMMENT '应用密码',
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE = InnoDB AUTO_INCREMENT = 1
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='应用表';
insert into app_info (app_name, gmt_create, gmt_modified, password) select 'powerjob-worker-samples', current_timestamp(), current_timestamp(), 'powerjob123' from dual where not exists ( select * from app_info where app_name = 'powerjob-worker-samples');
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,243 @@
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob4
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 02/03/2024 18:51:36
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_info` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,323 @@
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob5
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 16/03/2024 22:07:31
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,323 @@
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob5
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 11/08/2024 23:23:30
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,327 @@
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob5g
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 17/08/2025 21:58:30
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`extend_value` varchar(255) DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`meta` varchar(255) DEFAULT NULL,
`outer_key` varchar(255) DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`),
KEY `idx04_instance_info_outer_key` (`outer_key`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,7 @@
由于存在不同数据库、不同版本的升级,官方能给出的 upgrade SQL 相对有限,大家可参考以下方式自行生成升级 SQL
- 【官方脚本】参考官方每个版本的数据库全库建表文件(项目 others - sql - schema自行进行字段 DIFF
- 【自己动手版】导出当前您的 powerjob 数据库表结构,同时创建一个测试库,让 5.x 版本的 server 直连该测试库,自动建表。分别拿到两个版本的表结构 SQL 后,借用工具生产 update SQL 即可navigate 等数据库管理软件均支持结构对比)
参考文档https://www.yuque.com/powerjob/guidence/upgrade

View File

@ -0,0 +1,10 @@
-- Upgrade SQL FROM 4.0.x to 4.1.x
-- ----------------------------
-- Table change for workflow_instance_info
-- ----------------------------
alter table workflow_instance_info
add parent_wf_instance_id bigint default null null comment '上层工作流实例ID';
-- ----------------------------
-- Table change for job_info
-- ----------------------------
alter table job_info add alarm_config varchar(512) comment '告警配置' default null;

View File

@ -0,0 +1,6 @@
-- Upgrade SQL FROM 4.1.x to 4.2.x
-- ----------------------------
-- Table change for job_info
-- ----------------------------
alter table job_info add tag varchar(255) comment 'TAG' default null;
alter table job_info add log_config varchar(255) comment 'logConfig' default null;

View File

@ -0,0 +1,6 @@
-- Upgrade SQL FROM 4.3.7 to 4.3.8
-- ----------------------------
-- Table change for job_info
-- ----------------------------
alter table job_info add dispatch_strategy_config varchar(255) comment 'dispatch_strategy_config' default null;
alter table job_info add advanced_runtime_config varchar(255) comment 'advanced_runtime_config' default null;

View File

@ -0,0 +1,88 @@
-- Upgrade SQL FROM 4.1.x to 4.2.x
-- ----------------------------
-- Table change for app_info
-- ----------------------------
SET FOREIGN_KEY_CHECKS=0;
ALTER TABLE `app_info` ADD COLUMN `creator` bigint NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `extra` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `modifier` bigint NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `namespace_id` bigint NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `tags` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `title` varchar(255) NULL DEFAULT NULL;
-- ----------------------------
-- Table change for user_info
-- ----------------------------
ALTER TABLE `user_info` ADD COLUMN `account_type` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD COLUMN `nick` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD COLUMN `origin_username` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD COLUMN `token_login_verify_info` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD UNIQUE INDEX `uidx01_user_name`(`username` ASC) USING BTREE;
-- ----------------------------
-- new table 'namespace'
-- ----------------------------
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) NULL DEFAULT NULL,
`creator` bigint NULL DEFAULT NULL,
`dept` varchar(255) NULL DEFAULT NULL,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`modifier` bigint NULL DEFAULT NULL,
`name` varchar(255) NULL DEFAULT NULL,
`status` int NULL DEFAULT NULL,
`tags` varchar(255) NULL DEFAULT NULL,
`token` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uidx01_namespace`(`code` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- new table 'pwjb_user_info'
-- ----------------------------
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`password` varchar(255) NULL DEFAULT NULL,
`username` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uidx01_username`(`username` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- new table 'sundry'
-- ----------------------------
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) NULL DEFAULT NULL,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`pkey` varchar(255) NULL DEFAULT NULL,
`skey` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uidx01_sundry`(`pkey` ASC, `skey` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 3 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- new table 'user_role'
-- ----------------------------
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`role` int NULL DEFAULT NULL,
`scope` int NULL DEFAULT NULL,
`target` bigint NULL DEFAULT NULL,
`user_id` bigint NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
INDEX `uidx01_user_id`(`user_id` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;

View File

@ -0,0 +1,7 @@
ALTER TABLE `instance_info` ADD COLUMN `extend_value` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL AFTER `expected_trigger_time`;
ALTER TABLE `instance_info` ADD COLUMN `meta` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL AFTER `last_report_time`;
ALTER TABLE `instance_info` ADD COLUMN `outer_key` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL AFTER `meta`;
ALTER TABLE `instance_info` ADD INDEX `idx04_instance_info_outer_key`(`outer_key` ASC) USING BTREE;

208
pom.xml Normal file
View File

@ -0,0 +1,208 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob</artifactId>
<version>5.1.2</version>
<packaging>pom</packaging>
<name>powerjob</name>
<url>http://www.powerjob.tech</url>
<description>Enterprise job scheduling middleware with distributed computing ability.</description>
<licenses>
<license>
<name>Apache License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<url>https://github.com/PowerJob/PowerJob</url>
<connection>https://github.com/PowerJob/PowerJob.git</connection>
</scm>
<developers>
<developer>
<name>tengjiqi</name>
<id>tengjiqi</id>
<email>tengjiqi@gmail.com</email>
<roles>
<role>Developer</role>
</roles>
<timezone>+8</timezone>
</developer>
</developers>
<modules>
<module>powerjob-worker</module>
<module>powerjob-server</module>
<module>powerjob-common</module>
<module>powerjob-client</module>
<module>powerjob-worker-agent</module>
<module>powerjob-worker-spring-boot-starter</module>
<module>powerjob-worker-samples</module>
<module>powerjob-official-processors</module>
<module>powerjob-remote</module>
</modules>
<properties>
<java.version>1.8</java.version>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<maven-compiler-plugin.version>3.8.1</maven-compiler-plugin.version>
<maven-source-plugin.version>3.2.1</maven-source-plugin.version>
<maven-javadoc-plugin.version>3.2.0</maven-javadoc-plugin.version>
<maven-jar-plugin.version>3.2.0</maven-jar-plugin.version>
<maven-gpg-plugin.version>1.6</maven-gpg-plugin.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<lombok.version>1.18.38</lombok.version>
</properties>
<dependencies>
<!-- lombok -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>${lombok.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
<profiles>
<!-- 发布到中央仓库,需要使用 mvn xxx -Prelease 启用 -->
<profile>
<id>release</id>
<build>
<plugins>
<!-- 编译插件 -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<testSource>${java.version}</testSource>
<testTarget>${java.version}</testTarget>
</configuration>
</plugin>
<!-- Package source codes -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- 编辑 MANIFEST.MF -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven-jar-plugin.version}</version>
<configuration>
<archive>
<manifestEntries>
<Implementation-Title>${project.artifactId}</Implementation-Title>
<Implementation-Version>${project.version}</Implementation-Version>
</manifestEntries>
</archive>
</configuration>
</plugin>
<!-- Java Doc -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<configuration>
<!-- Prevent JavaDoc error from affecting building project. -->
<failOnError>false</failOnError>
<!-- Non-strict mode -->
<additionalJOption>-Xdoclint:none</additionalJOption>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- GPG -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>${maven-gpg-plugin.version}</version>
<executions>
<execution>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- maven central 发布插件 https://central.sonatype.org/publish/publish-portal-maven/ -->
<plugin>
<groupId>org.sonatype.central</groupId>
<artifactId>central-publishing-maven-plugin</artifactId>
<version>0.8.0</version>
<extensions>true</extensions>
<configuration>
<publishingServerId>central</publishingServerId>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<!-- Local profile -->
<profile>
<id>dev</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<build>
<plugins>
<!-- Maven compiler plugin -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<testSource>${java.version}</testSource>
<testTarget>${java.version}</testTarget>
</configuration>
</plugin>
<!-- Edit MANIFEST.MF -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven-jar-plugin.version}</version>
<configuration>
<archive>
<manifestEntries>
<Implementation-Title>${project.artifactId}</Implementation-Title>
<Implementation-Version>${project.version}</Implementation-Version>
</manifestEntries>
</archive>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

104
powerjob-client/pom.xml Normal file
View File

@ -0,0 +1,104 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>5.1.2</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-client</artifactId>
<name>powerjob-client</name>
<version>5.1.2</version>
<packaging>jar</packaging>
<properties>
<junit.version>5.9.1</junit.version>
<logback.version>1.2.13</logback.version>
<fastjson.version>1.2.83</fastjson.version>
<powerjob.common.version>5.1.2</powerjob.common.version>
<mvn.shade.plugin.version>3.2.4</mvn.shade.plugin.version>
</properties>
<dependencies>
<!-- fastJson -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>${fastjson.version}</version>
</dependency>
<!-- oms-common -->
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-common</artifactId>
<version>${powerjob.common.version}</version>
</dependency>
<!-- Junit tests -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<!-- log for test stage -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<!-- 暂时放弃 shade打 shade 包一定要非常干净,否则是更大的坑 -->
<!--
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>${mvn.shade.plugin.version}</version>
<configuration>
<createDependencyReducedPom>false</createDependencyReducedPom>
<relocations>
<relocation>
<pattern>okhttp3</pattern>
<shadedPattern>shade.powerjob.okhttp3</shadedPattern>
</relocation>
<relocation>
<pattern>okio</pattern>
<shadedPattern>shade.powerjob.okio</shadedPattern>
</relocation>
<relocation>
<pattern>com.google</pattern>
<shadedPattern>shade.powerjob.com.google</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache</pattern>
<shadedPattern>shade.powerjob.org.apache</shadedPattern>
</relocation>
<relocation>
<pattern>com.alibaba</pattern>
<shadedPattern>shade.powerjob.com.alibaba</shadedPattern>
</relocation>
</relocations>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
</execution>
</executions>
</plugin>
-->
</plugins>
</build>
</project>

View File

@ -0,0 +1,71 @@
package tech.powerjob.client;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
import tech.powerjob.client.common.Protocol;
import tech.powerjob.client.extension.ClientExtension;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
/**
* 客户端配置
*
* @author 程序帕鲁
* @since 2024/2/20
*/
@Getter
@Setter
@ToString
@Accessors(chain = true)
public class ClientConfig implements Serializable {
/**
* 执行器 AppName
*/
private String appName;
/**
* 执行器密码
*/
private String password;
/**
* 地址列表,支持格式:
* - IP:Port, eg: 192.168.1.1:7700
* - 域名, eg: powerjob.apple-inc.com
*/
private List<String> addressList;
/**
* 客户端通讯协议
*/
private Protocol protocol = Protocol.HTTP;
/**
* 连接超时时间
*/
private Integer connectionTimeout;
/**
* 指定了等待服务器响应数据的最长时间。更具体地说这是从服务器开始返回响应数据包括HTTP头和数据客户端读取数据的超时时间
*/
private Integer readTimeout;
/**
* 指定了向服务器发送数据的最长时间。这是从客户端开始发送数据如POST请求的正文到数据完全发送出去的时间
*/
private Integer writeTimeout;
/**
* 默认携带的请求头
* 用于流量被基础设施识别
*/
private Map<String, String> defaultHeaders;
/**
* 客户端行为扩展
*/
private ClientExtension clientExtension;
}

View File

@ -0,0 +1,85 @@
package tech.powerjob.client;
import tech.powerjob.common.request.http.RunJobRequest;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.request.http.SaveWorkflowNodeRequest;
import tech.powerjob.common.request.http.SaveWorkflowRequest;
import tech.powerjob.common.request.query.InstancePageQuery;
import tech.powerjob.common.request.query.JobInfoQuery;
import tech.powerjob.common.response.*;
import java.util.List;
/**
* PowerJobClient, the client for OpenAPI.
*
* @author tjq
* @since 2023/3/5
*/
public interface IPowerJobClient {
/* ************* Job 区 ************* */
ResultDTO<SaveJobInfoRequest> exportJob(Long jobId);
ResultDTO<Long> saveJob(SaveJobInfoRequest request);
ResultDTO<Long> copyJob(Long jobId);
ResultDTO<JobInfoDTO> fetchJob(Long jobId);
ResultDTO<List<JobInfoDTO>> fetchAllJob();
ResultDTO<List<JobInfoDTO>> queryJob(JobInfoQuery powerQuery);
ResultDTO<Void> disableJob(Long jobId);
ResultDTO<Void> enableJob(Long jobId);
ResultDTO<Void> deleteJob(Long jobId);
ResultDTO<Long> runJob(Long jobId, String instanceParams, long delayMS);
PowerResultDTO<Long> runJob(RunJobRequest runJobRequest);
/* ************* Instance API list ************* */
ResultDTO<Void> stopInstance(Long instanceId);
ResultDTO<Void> cancelInstance(Long instanceId);
ResultDTO<Void> retryInstance(Long instanceId);
ResultDTO<Integer> fetchInstanceStatus(Long instanceId);
ResultDTO<InstanceInfoDTO> fetchInstanceInfo(Long instanceId);
ResultDTO<PageResult<InstanceInfoDTO>> queryInstanceInfo(InstancePageQuery instancePageQuery);
/* ************* Workflow API list ************* */
ResultDTO<Long> saveWorkflow(SaveWorkflowRequest request);
ResultDTO<Long> copyWorkflow(Long workflowId);
ResultDTO<List<WorkflowNodeInfoDTO>> saveWorkflowNode(List<SaveWorkflowNodeRequest> requestList);
ResultDTO<WorkflowInfoDTO> fetchWorkflow(Long workflowId);
ResultDTO<Void> disableWorkflow(Long workflowId);
ResultDTO<Void> enableWorkflow(Long workflowId);
ResultDTO<Void> deleteWorkflow(Long workflowId);
ResultDTO<Long> runWorkflow(Long workflowId, String initParams, long delayMS);
/* ************* Workflow Instance API list ************* */
ResultDTO<Void> stopWorkflowInstance(Long wfInstanceId);
ResultDTO<Void> retryWorkflowInstance(Long wfInstanceId);
ResultDTO<Void> markWorkflowNodeAsSuccess(Long wfInstanceId, Long nodeId);
ResultDTO<WorkflowInstanceInfoDTO> fetchWorkflowInstanceInfo(Long wfInstanceId);
}

View File

@ -0,0 +1,563 @@
package tech.powerjob.client;
import com.alibaba.fastjson.JSON;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import tech.powerjob.client.module.AppAuthRequest;
import tech.powerjob.client.module.AppAuthResult;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.client.service.RequestService;
import tech.powerjob.client.service.impl.ClusterRequestServiceOkHttp3Impl;
import tech.powerjob.common.OpenAPIConstant;
import tech.powerjob.common.enums.EncryptType;
import tech.powerjob.common.enums.InstanceStatus;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.request.http.RunJobRequest;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.request.http.SaveWorkflowNodeRequest;
import tech.powerjob.common.request.http.SaveWorkflowRequest;
import tech.powerjob.common.request.query.InstancePageQuery;
import tech.powerjob.common.request.query.JobInfoQuery;
import tech.powerjob.common.response.*;
import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.common.utils.CommonUtils;
import tech.powerjob.common.utils.DigestUtils;
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import static tech.powerjob.client.TypeStore.*;
/**
* PowerJobClient, the client for OpenAPI.
*
* @author tjq
* @since 2020/4/15
*/
@Slf4j
public class PowerJobClient implements IPowerJobClient, Closeable {
private Long appId;
private final RequestService requestService;
public PowerJobClient(ClientConfig config) {
List<String> addressList = config.getAddressList();
String appName = config.getAppName();
CommonUtils.requireNonNull(addressList, "addressList can't be null!");
CommonUtils.requireNonNull(appName, "appName can't be null");
this.requestService = new ClusterRequestServiceOkHttp3Impl(config);
AppAuthRequest appAuthRequest = new AppAuthRequest();
appAuthRequest.setAppName(appName);
appAuthRequest.setEncryptedPassword(DigestUtils.md5(config.getPassword()));
appAuthRequest.setEncryptType(EncryptType.MD5.getCode());
String assertResponse = requestService.request(OpenAPIConstant.AUTH_APP, PowerRequestBody.newJsonRequestBody(appAuthRequest));
if (StringUtils.isNotEmpty(assertResponse)) {
ResultDTO<AppAuthResult> resultDTO = JSON.parseObject(assertResponse, APP_AUTH_RESULT_TYPE);
if (resultDTO.isSuccess()) {
appId = resultDTO.getData().getAppId();
} else {
throw new PowerJobException(resultDTO.getMessage());
}
}
if (appId == null) {
throw new PowerJobException("appId is null, please check your config");
}
log.info("[PowerJobClient] [INIT] {}'s PowerJobClient bootstrap successfully", appName);
}
/**
* Init PowerJobClient with domain, appName and password.
*
* @param domain like powerjob-server.apple-inc.com (Intranet Domain)
* @param appName name of the application
* @param password password of the application
*/
public PowerJobClient(String domain, String appName, String password) {
this(new ClientConfig().setAppName(appName).setPassword(password).setAddressList(Lists.newArrayList(domain)));
}
/**
* Init PowerJobClient with server address, appName and password.
*
* @param addressList IP:Port address list, like 192.168.1.1:7700
* @param appName name of the application
* @param password password of the application
*/
public PowerJobClient(List<String> addressList, String appName, String password) {
this(new ClientConfig().setAppName(appName).setPassword(password).setAddressList(addressList));
}
/* ************* Job 区 ************* */
/**
* Save one Job
* When an ID exists in SaveJobInfoRequest, it is an update operation. Otherwise, it is a crate operation.
*
* @param request Job meta info
* @return jobId
*/
@Override
public ResultDTO<Long> saveJob(SaveJobInfoRequest request) {
request.setAppId(appId);
String post = requestService.request(OpenAPIConstant.SAVE_JOB, PowerRequestBody.newJsonRequestBody(request));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
/**
* Copy one Job
*
* @param jobId Job id
* @return Id of job copy
*/
@Override
public ResultDTO<Long> copyJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.COPY_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
@Override
public ResultDTO<SaveJobInfoRequest> exportJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.EXPORT_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, SAVE_JOB_INFO_REQUEST_RESULT_TYPE);
}
/**
* Query JobInfo by jobId
*
* @param jobId jobId
* @return Job meta info
*/
@Override
public ResultDTO<JobInfoDTO> fetchJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, JOB_RESULT_TYPE);
}
/**
* Query all JobInfo
*
* @return All JobInfo
*/
@Override
public ResultDTO<List<JobInfoDTO>> fetchAllJob() {
Map<String, String> param = Maps.newHashMap();
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_ALL_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LIST_JOB_RESULT_TYPE);
}
/**
* Query JobInfo by PowerQuery
*
* @param powerQuery JobQuery
* @return JobInfo
*/
@Override
public ResultDTO<List<JobInfoDTO>> queryJob(JobInfoQuery powerQuery) {
powerQuery.setAppIdEq(appId);
String post = requestService.request(OpenAPIConstant.QUERY_JOB, PowerRequestBody.newJsonRequestBody(powerQuery));
return JSON.parseObject(post, LIST_JOB_RESULT_TYPE);
}
/**
* Disable one Job by jobId
*
* @param jobId jobId
* @return Standard return object
*/
@Override
public ResultDTO<Void> disableJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.DISABLE_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Enable one job by jobId
*
* @param jobId jobId
* @return Standard return object
*/
@Override
public ResultDTO<Void> enableJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.ENABLE_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Delete one job by jobId
*
* @param jobId jobId
* @return Standard return object
*/
@Override
public ResultDTO<Void> deleteJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.DELETE_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Run a job once
*
* @param jobId ID of the job to be run
* @param instanceParams Runtime parameters of the job (TaskContext#instanceParams)
* @param delayMS Delay timeMilliseconds
* @return instanceId
*/
@Override
public ResultDTO<Long> runJob(Long jobId, String instanceParams, long delayMS) {
RunJobRequest runJobRequest = new RunJobRequest().setJobId(jobId).setInstanceParams(instanceParams).setDelay(delayMS);
return runJob(runJobRequest);
}
public ResultDTO<Long> runJob(Long jobId) {
return runJob(jobId, null, 0);
}
@Override
public PowerResultDTO<Long> runJob(RunJobRequest runJobRequest) {
runJobRequest.setAppId(appId);
String post = requestService.request(OpenAPIConstant.RUN_JOB2, PowerRequestBody.newJsonRequestBody(runJobRequest));
return JSON.parseObject(post, LONG_POWER_RESULT_TYPE);
}
/* ************* Instance API list ************* */
/**
* Stop one job instance
*
* @param instanceId instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> stopInstance(Long instanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.STOP_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Cancel a job instance that is not yet running
* NoticeThere is a time interval between the call interface time and the expected execution time of the job instance to be cancelled, otherwise reliability is not guaranteed
*
* @param instanceId instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> cancelInstance(Long instanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.CANCEL_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Retry failed job instance
* Notice: Only job instance with completion status (success, failure, manually stopped, cancelled) can be retried, and retries of job instances within workflows are not supported yet.
*
* @param instanceId instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> retryInstance(Long instanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.RETRY_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Query status about a job instance
*
* @param instanceId instanceId
* @return {@link InstanceStatus}
*/
@Override
public ResultDTO<Integer> fetchInstanceStatus(Long instanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_INSTANCE_STATUS, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, INTEGER_RESULT_TYPE);
}
/**
* Query detail about a job instance
*
* @param instanceId instanceId
* @return instance detail
*/
@Override
public ResultDTO<InstanceInfoDTO> fetchInstanceInfo(Long instanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_INSTANCE_INFO, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, INSTANCE_RESULT_TYPE);
}
@Override
public ResultDTO<PageResult<InstanceInfoDTO>> queryInstanceInfo(InstancePageQuery instancePageQuery) {
instancePageQuery.setAppIdEq(appId);
String post = requestService.request(OpenAPIConstant.QUERY_INSTANCE, PowerRequestBody.newJsonRequestBody(instancePageQuery));
return JSON.parseObject(post, PAGE_INSTANCE_RESULT_TYPE);
}
/* ************* Workflow API list ************* */
/**
* Save one workflow
* When an ID exists in SaveWorkflowRequest, it is an update operation. Otherwise, it is a crate operation.
*
* @param request Workflow meta info
* @return workflowId
*/
@Override
public ResultDTO<Long> saveWorkflow(SaveWorkflowRequest request) {
request.setAppId(appId);
// 中坑记录:用 FastJSON 序列化会导致 Server 接收时 pEWorkflowDAG 为 null无语.jpg
String json = JsonUtils.toJSONStringUnsafe(request);
String post = requestService.request(OpenAPIConstant.SAVE_WORKFLOW, PowerRequestBody.newJsonRequestBody(json));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
/**
* Copy one workflow
*
* @param workflowId Workflow id
* @return Id of workflow copy
*/
@Override
public ResultDTO<Long> copyWorkflow(Long workflowId) {
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.COPY_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
/**
* 添加工作流节点
*
* @param requestList Node info list of Workflow
* @return Standard return object
*/
@Override
public ResultDTO<List<WorkflowNodeInfoDTO>> saveWorkflowNode(List<SaveWorkflowNodeRequest> requestList) {
for (SaveWorkflowNodeRequest saveWorkflowNodeRequest : requestList) {
saveWorkflowNodeRequest.setAppId(appId);
}
String json = JsonUtils.toJSONStringUnsafe(requestList);
String post = requestService.request(OpenAPIConstant.SAVE_WORKFLOW_NODE, PowerRequestBody.newJsonRequestBody(json));
return JSON.parseObject(post, WF_NODE_LIST_RESULT_TYPE);
}
/**
* Query Workflow by workflowId
*
* @param workflowId workflowId
* @return Workflow meta info
*/
@Override
public ResultDTO<WorkflowInfoDTO> fetchWorkflow(Long workflowId) {
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, WF_RESULT_TYPE);
}
/**
* Disable Workflow by workflowId
*
* @param workflowId workflowId
* @return Standard return object
*/
@Override
public ResultDTO<Void> disableWorkflow(Long workflowId) {
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.DISABLE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Enable Workflow by workflowId
*
* @param workflowId workflowId
* @return Standard return object
*/
@Override
public ResultDTO<Void> enableWorkflow(Long workflowId) {
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.ENABLE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Delete Workflow by workflowId
*
* @param workflowId workflowId
* @return Standard return object
*/
@Override
public ResultDTO<Void> deleteWorkflow(Long workflowId) {
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.DELETE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Run a workflow once
*
* @param workflowId workflowId
* @param initParams workflow startup parameters
* @param delayMS Delay timeMilliseconds
* @return workflow instanceId
*/
@Override
public ResultDTO<Long> runWorkflow(Long workflowId, String initParams, long delayMS) {
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
param.put("delay", String.valueOf(delayMS));
if (StringUtils.isNotEmpty(initParams)) {
param.put("initParams", initParams);
}
String post = requestService.request(OpenAPIConstant.RUN_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
public ResultDTO<Long> runWorkflow(Long workflowId) {
return runWorkflow(workflowId, null, 0);
}
/* ************* Workflow Instance API list ************* */
/**
* Stop one workflow instance
*
* @param wfInstanceId workflow instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> stopWorkflowInstance(Long wfInstanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("wfInstanceId", wfInstanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.STOP_WORKFLOW_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Retry one workflow instance
*
* @param wfInstanceId workflow instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> retryWorkflowInstance(Long wfInstanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("wfInstanceId", wfInstanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.RETRY_WORKFLOW_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* mark the workflow node as success
*
* @param wfInstanceId workflow instanceId
* @param nodeId node id
* @return Standard return object
*/
@Override
public ResultDTO<Void> markWorkflowNodeAsSuccess(Long wfInstanceId, Long nodeId) {
Map<String, String> param = Maps.newHashMap();
param.put("wfInstanceId", wfInstanceId.toString());
param.put("appId", appId.toString());
param.put("nodeId", nodeId.toString());
String post = requestService.request(OpenAPIConstant.MARK_WORKFLOW_NODE_AS_SUCCESS, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Query detail about a workflow instance
*
* @param wfInstanceId workflow instanceId
* @return detail about a workflow
*/
@Override
public ResultDTO<WorkflowInstanceInfoDTO> fetchWorkflowInstanceInfo(Long wfInstanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("wfInstanceId", wfInstanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_WORKFLOW_INSTANCE_INFO, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, WF_INSTANCE_RESULT_TYPE);
}
@Override
public void close() throws IOException {
requestService.close();
}
}

View File

@ -0,0 +1,44 @@
package tech.powerjob.client;
import com.alibaba.fastjson.TypeReference;
import tech.powerjob.client.module.AppAuthResult;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.response.*;
import java.util.List;
/**
* TypeReference store.
*
* @author tjq
* @since 11/7/20
*/
public class TypeStore {
public static final TypeReference<ResultDTO<AppAuthResult>> APP_AUTH_RESULT_TYPE = new TypeReference<ResultDTO<AppAuthResult>>(){};
public static final TypeReference<ResultDTO<Void>> VOID_RESULT_TYPE = new TypeReference<ResultDTO<Void>>(){};
public static final TypeReference<ResultDTO<Integer>> INTEGER_RESULT_TYPE = new TypeReference<ResultDTO<Integer>>(){};
public static final TypeReference<ResultDTO<Long>> LONG_RESULT_TYPE = new TypeReference<ResultDTO<Long>>(){};
public static final TypeReference<PowerResultDTO<Long>> LONG_POWER_RESULT_TYPE = new TypeReference<PowerResultDTO<Long>>(){};
public static final TypeReference<ResultDTO<JobInfoDTO>> JOB_RESULT_TYPE = new TypeReference<ResultDTO<JobInfoDTO>>(){};
public static final TypeReference<ResultDTO<SaveJobInfoRequest>> SAVE_JOB_INFO_REQUEST_RESULT_TYPE = new TypeReference<ResultDTO<SaveJobInfoRequest>>(){};
public static final TypeReference<ResultDTO<List<JobInfoDTO>>> LIST_JOB_RESULT_TYPE = new TypeReference<ResultDTO<List<JobInfoDTO>>>(){};
public static final TypeReference<ResultDTO<InstanceInfoDTO>> INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<InstanceInfoDTO>>() {};
public static final TypeReference<ResultDTO<List<InstanceInfoDTO>>> LIST_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<List<InstanceInfoDTO>>>(){};
public static final TypeReference<ResultDTO<PageResult<InstanceInfoDTO>>> PAGE_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<PageResult<InstanceInfoDTO>>>(){};
public static final TypeReference<ResultDTO<WorkflowInfoDTO>> WF_RESULT_TYPE = new TypeReference<ResultDTO<WorkflowInfoDTO>>() {};
public static final TypeReference<ResultDTO<WorkflowInstanceInfoDTO>> WF_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<WorkflowInstanceInfoDTO>>() {};
public static final TypeReference<ResultDTO<List<WorkflowNodeInfoDTO>>> WF_NODE_LIST_RESULT_TYPE = new TypeReference<ResultDTO<List<WorkflowNodeInfoDTO>>> () {};
}

View File

@ -0,0 +1,28 @@
package tech.powerjob.client.common;
import lombok.Getter;
/**
* Protocol
*
* @author tjq
* @since 2024/2/20
*/
@Getter
public enum Protocol {
HTTP("http"),
HTTPS("https");
private final String protocol;
Protocol(String protocol) {
this.protocol = protocol;
}
@Override
public String toString() {
return protocol;
}
}

View File

@ -0,0 +1,19 @@
package tech.powerjob.client.extension;
import java.util.List;
/**
* 扩展服务
*
* @author tjq
* @since 2024/8/11
*/
public interface ClientExtension {
/**
* 动态提供地址,适用于 server 部署在动态集群上的场景
* @param context 上下文
* @return 地址,格式要求同 ClientConfig#addressList
*/
List<String> addressProvider(ExtensionContext context);
}

View File

@ -0,0 +1,10 @@
package tech.powerjob.client.extension;
/**
* 扩展上下文
*
* @author tjq
* @since 2024/8/11
*/
public class ExtensionContext {
}

View File

@ -0,0 +1,39 @@
package tech.powerjob.client.module;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
import java.util.Map;
/**
* App 鉴权请求
*
* @author tjq
* @since 2024/2/19
*/
@Getter
@Setter
@ToString
public class AppAuthRequest implements Serializable {
/**
* 应用名称
*/
private String appName;
/**
* 加密后密码
*/
private String encryptedPassword;
/**
* 加密类型
*/
private String encryptType;
/**
* 额外参数,方便开发者传递其他参数
*/
private Map<String, Object> extra;
}

View File

@ -0,0 +1,30 @@
package tech.powerjob.client.module;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
import java.util.Map;
/**
* App 鉴权响应
*
* @author tjq
* @since 2024/2/21
*/
@Getter
@Setter
@ToString
public class AppAuthResult implements Serializable {
private Long appId;
private String token;
/**
* 额外参数
* 有安全需求的开发者可执行扩展
*/
private Map<String, Object> extra;
}

View File

@ -0,0 +1,26 @@
package tech.powerjob.client.service;
import lombok.Data;
import lombok.experimental.Accessors;
import java.io.Serializable;
import java.util.Map;
/**
* HTTP 响应
*
* @author tjq
* @since 2024/8/10
*/
@Data
@Accessors(chain = true)
public class HttpResponse implements Serializable {
private boolean success;
private int code;
private String response;
private Map<String, String> headers;
}

View File

@ -0,0 +1,47 @@
package tech.powerjob.client.service;
import com.google.common.collect.Maps;
import lombok.Getter;
import tech.powerjob.common.enums.MIME;
import java.util.Map;
/**
* 请求体
*
* @author tjq
* @since 2024/8/10
*/
@Getter
public class PowerRequestBody {
private MIME mime;
private Object payload;
private final Map<String, String> headers = Maps.newHashMap();
private PowerRequestBody() {
}
public static PowerRequestBody newJsonRequestBody(Object data) {
PowerRequestBody powerRequestBody = new PowerRequestBody();
powerRequestBody.mime = MIME.APPLICATION_JSON;
powerRequestBody.payload = data;
return powerRequestBody;
}
public static PowerRequestBody newFormRequestBody(Map<String, String> form) {
PowerRequestBody powerRequestBody = new PowerRequestBody();
powerRequestBody.mime = MIME.APPLICATION_FORM;
powerRequestBody.payload = form;
return powerRequestBody;
}
public void addHeaders(Map<String, String> hs) {
if (hs == null || hs.isEmpty()) {
return;
}
this.headers.putAll(hs);
}
}

View File

@ -0,0 +1,15 @@
package tech.powerjob.client.service;
import java.io.Closeable;
/**
* 请求服务
*
* @author tjq
* @since 2024/2/20
*/
public interface RequestService extends Closeable {
String request(String path, PowerRequestBody powerRequestBody);
}

View File

@ -0,0 +1,113 @@
package tech.powerjob.client.service.impl;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Maps;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.TypeStore;
import tech.powerjob.client.module.AppAuthRequest;
import tech.powerjob.client.module.AppAuthResult;
import tech.powerjob.client.service.HttpResponse;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.common.OpenAPIConstant;
import tech.powerjob.common.enums.EncryptType;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.common.utils.DigestUtils;
import tech.powerjob.common.utils.MapUtils;
import java.util.Map;
/**
* 封装鉴权相关逻辑
*
* @author tjq
* @since 2024/2/21
*/
@Slf4j
abstract class AppAuthClusterRequestService extends ClusterRequestService {
protected AppAuthResult appAuthResult;
public AppAuthClusterRequestService(ClientConfig config) {
super(config);
}
@Override
public String request(String path, PowerRequestBody powerRequestBody) {
// 若不存在 appAuthResult则首先进行鉴权
if (appAuthResult == null) {
refreshAppAuthResult();
}
HttpResponse httpResponse = doRequest(path, powerRequestBody);
// 如果 auth 成功,则代表请求有效,直接返回
String authStatus = MapUtils.getString(httpResponse.getHeaders(), OpenAPIConstant.RESPONSE_HEADER_AUTH_STATUS);
// 继续尝试获取转为小写之后的 auth header (Nginx代理默认会将自定义header转为纯小写, 实现针对该情况的兼容)
if (StringUtils.isEmpty(authStatus)) {
authStatus = MapUtils.getString(httpResponse.getHeaders(),
OpenAPIConstant.RESPONSE_HEADER_AUTH_STATUS.toLowerCase());
}
if (Boolean.TRUE.toString().equalsIgnoreCase(authStatus)) {
return httpResponse.getResponse();
}
// 否则请求无效,刷新鉴权后重新请求
log.warn("[PowerJobClient] auth failed[authStatus: {}], try to refresh the auth info", authStatus);
refreshAppAuthResult();
httpResponse = doRequest(path, powerRequestBody);
// 只要请求不失败直接返回如果鉴权失败则返回鉴权错误信息server 保证 response 永远非空)
return httpResponse.getResponse();
}
private HttpResponse doRequest(String path, PowerRequestBody powerRequestBody) {
// 添加鉴权信息
Map<String, String> authHeaders = buildAuthHeader();
powerRequestBody.addHeaders(authHeaders);
HttpResponse httpResponse = clusterHaRequest(path, powerRequestBody);
// 任何请求不成功,都直接报错
if (!httpResponse.isSuccess()) {
throw new PowerJobException("REMOTE_SERVER_INNER_EXCEPTION");
}
return httpResponse;
}
private Map<String, String> buildAuthHeader() {
Map<String, String> authHeader = Maps.newHashMap();
authHeader.put(OpenAPIConstant.REQUEST_HEADER_APP_ID, String.valueOf(appAuthResult.getAppId()));
authHeader.put(OpenAPIConstant.REQUEST_HEADER_ACCESS_TOKEN, appAuthResult.getToken());
return authHeader;
}
@SneakyThrows
private void refreshAppAuthResult() {
AppAuthRequest appAuthRequest = buildAppAuthRequest();
HttpResponse httpResponse = clusterHaRequest(OpenAPIConstant.AUTH_APP, PowerRequestBody.newJsonRequestBody(appAuthRequest));
if (!httpResponse.isSuccess()) {
throw new PowerJobException("AUTH_APP_EXCEPTION!");
}
ResultDTO<AppAuthResult> authResultDTO = JSONObject.parseObject(httpResponse.getResponse(), TypeStore.APP_AUTH_RESULT_TYPE);
if (!authResultDTO.isSuccess()) {
throw new PowerJobException("AUTH_FAILED_" + authResultDTO.getMessage());
}
log.warn("[PowerJobClient] refresh auth info successfully!");
this.appAuthResult = authResultDTO.getData();
}
protected AppAuthRequest buildAppAuthRequest() {
AppAuthRequest appAuthRequest = new AppAuthRequest();
appAuthRequest.setAppName(config.getAppName());
appAuthRequest.setEncryptedPassword(DigestUtils.md5(config.getPassword()));
appAuthRequest.setEncryptType(EncryptType.MD5.getCode());
return appAuthRequest;
}
}

View File

@ -0,0 +1,140 @@
package tech.powerjob.client.service.impl;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.extension.ClientExtension;
import tech.powerjob.client.extension.ExtensionContext;
import tech.powerjob.client.service.HttpResponse;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.client.service.RequestService;
import tech.powerjob.common.OpenAPIConstant;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.utils.CollectionUtils;
import javax.net.ssl.X509TrustManager;
import java.io.IOException;
import java.security.cert.X509Certificate;
import java.util.List;
import java.util.Objects;
/**
* 集群请求服务
* 封装网络相关通用逻辑
*
* @author tjq
* @since 2024/2/21
*/
@Slf4j
abstract class ClusterRequestService implements RequestService {
protected final ClientConfig config;
/**
* 当前地址(上次请求成功的地址)
*/
protected String currentAddress;
/**
* 地址格式
* 协议://域名/OpenAPI/子路径
*/
protected static final String URL_PATTERN = "%s://%s%s%s";
/**
* 默认超时时间
*/
protected static final Integer DEFAULT_TIMEOUT_SECONDS = 2;
protected static final int HTTP_SUCCESS_CODE = 200;
public ClusterRequestService(ClientConfig config) {
this.config = config;
this.currentAddress = config.getAddressList().get(0);
}
/**
* 具体某一次 HTTP 请求的实现
* @param url 完整请求地址
* @param body 请求体
* @return 响应
* @throws IOException 异常
*/
protected abstract HttpResponse sendHttpRequest(String url, PowerRequestBody body) throws IOException;
/**
* 封装集群请求能力
* @param path 请求 PATH
* @param powerRequestBody 请求体
* @return 响应
*/
protected HttpResponse clusterHaRequest(String path, PowerRequestBody powerRequestBody) {
// 先尝试默认地址
String url = getUrl(path, currentAddress);
try {
return sendHttpRequest(url, powerRequestBody);
} catch (IOException e) {
log.warn("[ClusterRequestService] request url:{} failed, reason is {}.", url, e.toString());
}
List<String> addressList = fetchAddressList();
// 失败,开始重试
for (String addr : addressList) {
if (Objects.equals(addr, currentAddress)) {
continue;
}
url = getUrl(path, addr);
try {
HttpResponse res = sendHttpRequest(url, powerRequestBody);
log.warn("[ClusterRequestService] server change: from({}) -> to({}).", currentAddress, addr);
currentAddress = addr;
return res;
} catch (IOException e) {
log.warn("[ClusterRequestService] request url:{} failed, reason is {}.", url, e.toString());
}
}
log.error("[ClusterRequestService] do post for path: {} failed because of no server available in {}.", path, addressList);
throw new PowerJobException("no server available when send post request");
}
private List<String> fetchAddressList() {
ClientExtension clientExtension = config.getClientExtension();
if (clientExtension != null) {
List<String> addressList = clientExtension.addressProvider(new ExtensionContext());
if (!CollectionUtils.isEmpty(addressList)) {
return addressList;
}
}
return config.getAddressList();
}
/**
* 不验证证书
* X.509 是一个国际标准定义了公钥证书的格式。这个标准是由国际电信联盟ITU-T制定的用于公钥基础设施PKI中数字证书的创建和分发。X.509证书主要用于在公开网络上验证实体的身份如服务器或客户端的身份验证过程中确保通信双方是可信的。X.509证书广泛应用于多种安全协议中包括SSL/TLS它是实现HTTPS的基础。
*/
protected static class NoVerifyX509TrustManager implements X509TrustManager {
@Override
public void checkClientTrusted(X509Certificate[] arg0, String arg1) {
}
@Override
public void checkServerTrusted(X509Certificate[] arg0, String arg1) {
// 不验证
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
}
private String getUrl(String path, String address) {
String protocol = config.getProtocol().getProtocol();
return String.format(URL_PATTERN, protocol, address, OpenAPIConstant.WEB_PATH, path);
}
}

View File

@ -0,0 +1,148 @@
package tech.powerjob.client.service.impl;
import com.google.common.collect.Maps;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import okhttp3.*;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.common.Protocol;
import tech.powerjob.client.service.HttpResponse;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.common.OmsConstant;
import tech.powerjob.common.serialize.JsonUtils;
import javax.net.ssl.*;
import java.io.IOException;
import java.security.SecureRandom;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* desc
*
* @author tjq
* @since 2024/2/20
*/
@Slf4j
public class ClusterRequestServiceOkHttp3Impl extends AppAuthClusterRequestService {
private final OkHttpClient okHttpClient;
public ClusterRequestServiceOkHttp3Impl(ClientConfig config) {
super(config);
// 初始化 HTTP 客户端
if (Protocol.HTTPS.equals(config.getProtocol())) {
okHttpClient = initHttpsNoVerifyClient();
} else {
okHttpClient = initHttpClient();
}
}
@Override
protected HttpResponse sendHttpRequest(String url, PowerRequestBody powerRequestBody) throws IOException {
// 添加公共 header
powerRequestBody.addHeaders(config.getDefaultHeaders());
Object obj = powerRequestBody.getPayload();
RequestBody requestBody = null;
switch (powerRequestBody.getMime()) {
case APPLICATION_JSON:
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE);
String body = obj instanceof String ? (String) obj : JsonUtils.toJSONStringUnsafe(obj);
requestBody = RequestBody.create(jsonType, body);
break;
case APPLICATION_FORM:
FormBody.Builder formBuilder = new FormBody.Builder();
Map<String, String> formObj = (Map<String, String>) obj;
formObj.forEach(formBuilder::add);
requestBody = formBuilder.build();
}
Request request = new Request.Builder()
.post(requestBody)
.headers(Headers.of(powerRequestBody.getHeaders()))
.url(url)
.build();
try (Response response = okHttpClient.newCall(request).execute()) {
int code = response.code();
HttpResponse httpResponse = new HttpResponse()
.setCode(code)
.setSuccess(code == HTTP_SUCCESS_CODE);
ResponseBody body = response.body();
if (body != null) {
httpResponse.setResponse(body.string());
}
Headers respHeaders = response.headers();
Set<String> headerNames = respHeaders.names();
Map<String, String> respHeaderMap = Maps.newHashMap();
headerNames.forEach(hdKey -> respHeaderMap.put(hdKey, respHeaders.get(hdKey)));
httpResponse.setHeaders(respHeaderMap);
return httpResponse;
}
}
@SneakyThrows
private OkHttpClient initHttpClient() {
OkHttpClient.Builder okHttpBuilder = commonOkHttpBuilder();
return okHttpBuilder.build();
}
@SneakyThrows
private OkHttpClient initHttpsNoVerifyClient() {
X509TrustManager trustManager = new NoVerifyX509TrustManager();
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(null, new TrustManager[]{trustManager}, new SecureRandom());
SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
OkHttpClient.Builder okHttpBuilder = commonOkHttpBuilder();
// 不需要校验证书
okHttpBuilder.sslSocketFactory(sslSocketFactory, trustManager);
// 不校验 url中的 hostname
okHttpBuilder.hostnameVerifier((String hostname, SSLSession session) -> true);
return okHttpBuilder.build();
}
private OkHttpClient.Builder commonOkHttpBuilder() {
return new OkHttpClient.Builder()
// 设置读取超时时间
.readTimeout(Optional.ofNullable(config.getReadTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
// 设置写的超时时间
.writeTimeout(Optional.ofNullable(config.getWriteTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
// 设置连接超时时间
.connectTimeout(Optional.ofNullable(config.getConnectionTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
.callTimeout(Optional.ofNullable(config.getConnectionTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS);
}
@Override
public void close() throws IOException {
// 关闭 Dispatcher
okHttpClient.dispatcher().executorService().shutdown();
// 清理连接池
okHttpClient.connectionPool().evictAll();
// 清理缓存(如果有使用)
Cache cache = okHttpClient.cache();
if (cache != null) {
cache.close();
}
}
}

View File

@ -0,0 +1,22 @@
package tech.powerjob.client.test;
import com.google.common.collect.Lists;
import org.junit.jupiter.api.BeforeAll;
import tech.powerjob.client.IPowerJobClient;
import tech.powerjob.client.PowerJobClient;
/**
* Initialize OhMyClient
*
* @author tjq
* @since 1/16/21
*/
public class ClientInitializer {
protected static IPowerJobClient powerJobClient;
@BeforeAll
public static void initClient() throws Exception {
powerJobClient = new PowerJobClient(Lists.newArrayList("127.0.0.1:7700", "127.0.0.1:7701"), "powerjob-worker-samples", "powerjob123");
}
}

View File

@ -0,0 +1,172 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import tech.powerjob.client.PowerJobClient;
import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.ProcessorType;
import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.request.query.InstancePageQuery;
import tech.powerjob.common.response.InstanceInfoDTO;
import tech.powerjob.common.response.JobInfoDTO;
import tech.powerjob.common.response.ResultDTO;
/**
* Test cases for {@link PowerJobClient}
*
* @author tjq
* @author Echo009
* @since 2020/4/15
*/
@Slf4j
class TestClient extends ClientInitializer {
public static final long JOB_ID = 1L;
@Test
void testSaveJob() {
SaveJobInfoRequest newJobInfo = new SaveJobInfoRequest();
newJobInfo.setId(JOB_ID);
newJobInfo.setJobName("omsOpenAPIJobccccc" + System.currentTimeMillis());
newJobInfo.setJobDescription("test OpenAPI" + System.currentTimeMillis());
newJobInfo.setJobParams("{'aa':'bb'}");
newJobInfo.setTimeExpressionType(TimeExpressionType.CRON);
newJobInfo.setTimeExpression("0 0 * * * ? ");
newJobInfo.setExecuteType(ExecuteType.STANDALONE);
newJobInfo.setProcessorType(ProcessorType.BUILT_IN);
newJobInfo.setProcessorInfo("tech.powerjob.samples.processors.StandaloneProcessorDemo");
newJobInfo.setDesignatedWorkers("");
newJobInfo.setMinCpuCores(1.1);
newJobInfo.setMinMemorySpace(1.2);
newJobInfo.setMinDiskSpace(1.3);
log.info("[TestClient] [testSaveJob] SaveJobInfoRequest: {}", JSONObject.toJSONString(newJobInfo));
ResultDTO<Long> resultDTO = powerJobClient.saveJob(newJobInfo);
log.info("[TestClient] [testSaveJob] result: {}", JSONObject.toJSONString(resultDTO));
Assertions.assertNotNull(resultDTO);
}
@Test
void testCopyJob() {
ResultDTO<Long> copyJobRes = powerJobClient.copyJob(JOB_ID);
System.out.println(JSONObject.toJSONString(copyJobRes));
Assertions.assertNotNull(copyJobRes);
}
@Test
void testExportJob() {
ResultDTO<SaveJobInfoRequest> exportJobRes = powerJobClient.exportJob(JOB_ID);
System.out.println(JSONObject.toJSONString(exportJobRes));
}
@Test
void testFetchJob() {
ResultDTO<JobInfoDTO> fetchJob = powerJobClient.fetchJob(JOB_ID);
System.out.println(JSONObject.toJSONString(fetchJob));
Assertions.assertNotNull(fetchJob);
}
@Test
void testDisableJob() {
ResultDTO<Void> res = powerJobClient.disableJob(JOB_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testEnableJob() {
ResultDTO<Void> res = powerJobClient.enableJob(JOB_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testDeleteJob() {
ResultDTO<Void> res = powerJobClient.deleteJob(JOB_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testRun() {
ResultDTO<Long> res = powerJobClient.runJob(JOB_ID, null, 0);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testRunJobDelay() {
ResultDTO<Long> res = powerJobClient.runJob(JOB_ID, "this is instanceParams", 60000);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testFetchInstanceInfo() {
ResultDTO<InstanceInfoDTO> res = powerJobClient.fetchInstanceInfo(702482902331424832L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testQueryInstanceInfo() {
InstancePageQuery instancePageQuery = new InstancePageQuery();
instancePageQuery.setJobIdEq(11L);
instancePageQuery.setSortBy("actualTriggerTime");
instancePageQuery.setAsc(true);
instancePageQuery.setPageSize(3);
instancePageQuery.setStatusIn(Lists.newArrayList(1,2,5));
TestUtils.output(powerJobClient.queryInstanceInfo(instancePageQuery));
}
@Test
void testStopInstance() {
ResultDTO<Void> res = powerJobClient.stopInstance(702482902331424832L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testFetchInstanceStatus() {
ResultDTO<Integer> res = powerJobClient.fetchInstanceStatus(702482902331424832L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testCancelInstanceInTimeWheel() {
ResultDTO<Long> startRes = powerJobClient.runJob(JOB_ID, "start by OhMyClient", 20000);
System.out.println("runJob result: " + JSONObject.toJSONString(startRes));
ResultDTO<Void> cancelRes = powerJobClient.cancelInstance(startRes.getData());
System.out.println("cancelJob result: " + JSONObject.toJSONString(cancelRes));
Assertions.assertTrue(cancelRes.isSuccess());
}
// @Test
// @SneakyThrows
// void testCancelInstanceInDatabase() {
// ResultDTO<Long> startRes = powerJobClient.runJob(15L, "start by OhMyClient", 2000000);
// System.out.println("runJob result: " + JSONObject.toJSONString(startRes));
//
// // Restart server manually and clear all the data in time wheeler.
// TimeUnit.MINUTES.sleep(1);
//
// ResultDTO<Void> cancelRes = powerJobClient.cancelInstance(startRes.getData());
// System.out.println("cancelJob result: " + JSONObject.toJSONString(cancelRes));
// Assertions.assertTrue(cancelRes.isSuccess());
// }
@Test
void testRetryInstance() {
ResultDTO<Void> res = powerJobClient.retryInstance(169557545206153344L);
System.out.println(res);
Assertions.assertNotNull(res);
}
}

View File

@ -0,0 +1,35 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import tech.powerjob.common.response.JobInfoDTO;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.common.utils.CommonUtils;
/**
* 测试容灾能力
*
* @author tjq
* @since 2024/8/11
*/
@Slf4j
public class TestClusterHA extends ClientInitializer {
@Test
void testHa() {
// 人工让 server 启停
for (int i = 0; i < 1000000; i++) {
CommonUtils.easySleep(100);
ResultDTO<JobInfoDTO> jobInfoDTOResultDTO = powerJobClient.fetchJob(1L);
log.info("[TestClusterHA] response: {}", JSONObject.toJSONString(jobInfoDTOResultDTO));
if (!jobInfoDTOResultDTO.isSuccess()) {
throw new RuntimeException("request failed!");
}
}
}
}

View File

@ -0,0 +1,45 @@
package tech.powerjob.client.test;
import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.ProcessorType;
import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.response.ResultDTO;
import org.junit.jupiter.api.Test;
import java.util.concurrent.ForkJoinPool;
/**
* TestConcurrencyControl
*
* @author tjq
* @since 1/16/21
*/
class TestConcurrencyControl extends ClientInitializer {
@Test
void testRunJobConcurrencyControl() {
SaveJobInfoRequest saveJobInfoRequest = new SaveJobInfoRequest();
saveJobInfoRequest.setJobName("test concurrency control job");
saveJobInfoRequest.setProcessorType(ProcessorType.SHELL);
saveJobInfoRequest.setProcessorInfo("pwd");
saveJobInfoRequest.setExecuteType(ExecuteType.STANDALONE);
saveJobInfoRequest.setTimeExpressionType(TimeExpressionType.API);
saveJobInfoRequest.setMaxInstanceNum(1);
Long jobId = powerJobClient.saveJob(saveJobInfoRequest).getData();
System.out.println("jobId: " + jobId);
ForkJoinPool pool = new ForkJoinPool(32);
for (int i = 0; i < 100; i++) {
String params = "index-" + i;
pool.execute(() -> {
ResultDTO<Long> res = powerJobClient.runJob(jobId, params, 0);
System.out.println(params + ": " + res);
});
}
}
}

View File

@ -0,0 +1,44 @@
package tech.powerjob.client.test;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import tech.powerjob.common.request.http.RunJobRequest;
import tech.powerjob.common.request.query.InstancePageQuery;
import tech.powerjob.common.response.InstanceInfoDTO;
import tech.powerjob.common.response.PageResult;
import tech.powerjob.common.response.PowerResultDTO;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.common.serialize.JsonUtils;
/**
* 测试任务实例
*
* @author tjq
* @since 2025/8/17
*/
@Slf4j
public class TestInstance extends ClientInitializer {
private static final Long jobId = 1L;
@Test
void testOuterKeyAndExtendValue() {
String outerKey = "ok_" + System.currentTimeMillis();
RunJobRequest runJobRequest = new RunJobRequest()
.setJobId(jobId)
.setOuterKey(outerKey).setExtendValue("TEST_EXT_VALUE")
.setInstanceParams("OpenAPI-Params")
.setDelay(1000L);
PowerResultDTO<Long> runJobResult = powerJobClient.runJob(runJobRequest);
log.info("[TestInstance] runJobResult: {}", runJobResult);
Long instanceId = runJobResult.getData();
InstancePageQuery instancePageQuery = new InstancePageQuery();
instancePageQuery.setOuterKeyEq(outerKey);
ResultDTO<PageResult<InstanceInfoDTO>> pageResultResultDTO = powerJobClient.queryInstanceInfo(instancePageQuery);
log.info("[TestInstance] queryInstanceInfo: {}", JsonUtils.toJSONString(pageResultResultDTO));
assert pageResultResultDTO.getData().getData().get(0).getInstanceId().equals(instanceId);
}
}

View File

@ -0,0 +1,48 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSON;
import tech.powerjob.common.request.query.JobInfoQuery;
import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.ProcessorType;
import tech.powerjob.common.response.JobInfoDTO;
import tech.powerjob.common.response.ResultDTO;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.time.DateUtils;
import org.junit.jupiter.api.Test;
import java.util.Date;
import java.util.List;
/**
* Test the query method
*
* @author tjq
* @since 1/16/21
*/
@Slf4j
class TestQuery extends ClientInitializer {
@Test
void testFetchAllJob() {
ResultDTO<List<JobInfoDTO>> allJobRes = powerJobClient.fetchAllJob();
System.out.println(JSON.toJSONString(allJobRes));
}
@Test
void testQueryJob() {
JobInfoQuery jobInfoQuery = new JobInfoQuery()
.setIdGt(-1L)
.setIdLt(10086L)
.setJobNameLike("DAG")
.setGmtModifiedGt(DateUtils.addYears(new Date(), -10))
.setGmtCreateLt(DateUtils.addDays(new Date(), 10))
.setExecuteTypeIn(Lists.newArrayList(ExecuteType.STANDALONE.getV(), ExecuteType.BROADCAST.getV(), ExecuteType.MAP_REDUCE.getV()))
.setProcessorTypeIn(Lists.newArrayList(ProcessorType.BUILT_IN.getV(), ProcessorType.SHELL.getV(), ProcessorType.EXTERNAL.getV()))
.setProcessorInfoLike("tech.powerjob");
ResultDTO<List<JobInfoDTO>> jobQueryResult = powerJobClient.queryJob(jobInfoQuery);
System.out.println(JSON.toJSONString(jobQueryResult));
System.out.println(jobQueryResult.getData().size());
}
}

View File

@ -0,0 +1,17 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject;
/**
* TestUtils
*
* @author tjq
* @since 2024/11/21
*/
public class TestUtils {
public static void output(Object v) {
String str = JSONObject.toJSONString(v);
System.out.println(str);
}
}

View File

@ -0,0 +1,191 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject;
import tech.powerjob.client.PowerJobClient;
import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.ProcessorType;
import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.enums.WorkflowNodeType;
import tech.powerjob.common.model.PEWorkflowDAG;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.request.http.SaveWorkflowNodeRequest;
import tech.powerjob.common.request.http.SaveWorkflowRequest;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.common.response.WorkflowInfoDTO;
import tech.powerjob.common.response.WorkflowInstanceInfoDTO;
import tech.powerjob.common.response.WorkflowNodeInfoDTO;
import com.google.common.collect.Lists;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.List;
/**
* Test cases for {@link PowerJobClient} workflow.
*
* @author tjq
* @author Echo009
* @since 2020/6/2
*/
class TestWorkflow extends ClientInitializer {
private static final long WF_ID = 2;
@Test
void initTestData() {
SaveJobInfoRequest base = new SaveJobInfoRequest();
base.setJobName("DAG-Node-");
base.setTimeExpressionType(TimeExpressionType.WORKFLOW);
base.setExecuteType(ExecuteType.STANDALONE);
base.setProcessorType(ProcessorType.BUILT_IN);
base.setProcessorInfo("tech.powerjob.samples.workflow.WorkflowStandaloneProcessor");
for (int i = 0; i < 5; i++) {
SaveJobInfoRequest request = JSONObject.parseObject(JSONObject.toJSONBytes(base), SaveJobInfoRequest.class);
request.setJobName(request.getJobName() + i);
ResultDTO<Long> res = powerJobClient.saveJob(request);
System.out.println(res);
Assertions.assertNotNull(res);
}
}
@Test
void testSaveWorkflow() {
SaveWorkflowRequest req = new SaveWorkflowRequest();
req.setWfName("workflow-by-client");
req.setWfDescription("created by client");
req.setEnable(true);
req.setTimeExpressionType(TimeExpressionType.API);
System.out.println("req ->" + JSONObject.toJSON(req));
ResultDTO<Long> res = powerJobClient.saveWorkflow(req);
System.out.println(res);
Assertions.assertNotNull(res);
req.setId(res.getData());
// 创建节点
SaveWorkflowNodeRequest saveWorkflowNodeRequest1 = new SaveWorkflowNodeRequest();
saveWorkflowNodeRequest1.setJobId(1L);
saveWorkflowNodeRequest1.setNodeName("DAG-Node-1");
saveWorkflowNodeRequest1.setType(WorkflowNodeType.JOB.getCode());
SaveWorkflowNodeRequest saveWorkflowNodeRequest2 = new SaveWorkflowNodeRequest();
saveWorkflowNodeRequest2.setJobId(1L);
saveWorkflowNodeRequest2.setNodeName("DAG-Node-2");
saveWorkflowNodeRequest2.setType(WorkflowNodeType.JOB.getCode());
SaveWorkflowNodeRequest saveWorkflowNodeRequest3 = new SaveWorkflowNodeRequest();
saveWorkflowNodeRequest3.setJobId(1L);
saveWorkflowNodeRequest3.setNodeName("DAG-Node-3");
saveWorkflowNodeRequest3.setType(WorkflowNodeType.JOB.getCode());
List<WorkflowNodeInfoDTO> nodeList = powerJobClient.saveWorkflowNode(Lists.newArrayList(saveWorkflowNodeRequest1,saveWorkflowNodeRequest2,saveWorkflowNodeRequest3)).getData();
System.out.println(nodeList);
Assertions.assertNotNull(nodeList);
// DAG 图
List<PEWorkflowDAG.Node> nodes = Lists.newLinkedList();
List<PEWorkflowDAG.Edge> edges = Lists.newLinkedList();
nodes.add(new PEWorkflowDAG.Node(nodeList.get(0).getId()));
nodes.add(new PEWorkflowDAG.Node(nodeList.get(1).getId()));
nodes.add(new PEWorkflowDAG.Node(nodeList.get(2).getId()));
edges.add(new PEWorkflowDAG.Edge(nodeList.get(0).getId(), nodeList.get(1).getId()));
edges.add(new PEWorkflowDAG.Edge(nodeList.get(1).getId(), nodeList.get(2).getId()));
PEWorkflowDAG peWorkflowDAG = new PEWorkflowDAG(nodes, edges);
// 保存完整信息
req.setDag(peWorkflowDAG);
res = powerJobClient.saveWorkflow(req);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testCopyWorkflow() {
ResultDTO<Long> res = powerJobClient.copyWorkflow(WF_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testDisableWorkflow() {
ResultDTO<Void> res = powerJobClient.disableWorkflow(WF_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testDeleteWorkflow() {
ResultDTO<Void> res = powerJobClient.deleteWorkflow(WF_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testEnableWorkflow() {
ResultDTO<Void> res = powerJobClient.enableWorkflow(WF_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testFetchWorkflowInfo() {
ResultDTO<WorkflowInfoDTO> res = powerJobClient.fetchWorkflow(WF_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testRunWorkflow() {
ResultDTO<Long> res = powerJobClient.runWorkflow(WF_ID, null, 0);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testStopWorkflowInstance() {
ResultDTO<Void> res = powerJobClient.stopWorkflowInstance(149962433421639744L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testRetryWorkflowInstance() {
ResultDTO<Void> res = powerJobClient.retryWorkflowInstance(149962433421639744L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testMarkWorkflowNodeAsSuccess() {
ResultDTO<Void> res = powerJobClient.markWorkflowNodeAsSuccess(149962433421639744L, 1L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testFetchWfInstanceInfo() {
ResultDTO<WorkflowInstanceInfoDTO> res = powerJobClient.fetchWorkflowInstanceInfo(149962433421639744L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testRunWorkflowPlus() {
ResultDTO<Long> res = powerJobClient.runWorkflow(WF_ID, "this is init Params 2", 90000);
System.out.println(res);
Assertions.assertNotNull(res);
}
}

98
powerjob-common/pom.xml Normal file
View File

@ -0,0 +1,98 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>5.1.2</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-common</artifactId>
<name>powerjob-common</name>
<version>5.1.2</version>
<packaging>jar</packaging>
<properties>
<slf4j.version>1.7.36</slf4j.version>
<commons.lang.version>3.12.0</commons.lang.version>
<commons.io.version>2.11.0</commons.io.version>
<guava.version>31.1-jre</guava.version>
<okhttp.version>3.14.9</okhttp.version>
<kryo.version>5.3.0</kryo.version>
<jackson.version>2.14.3</jackson.version>
<junit.version>5.9.0</junit.version>
</properties>
<dependencies>
<!-- slf4j -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>${commons.lang.version}</version>
</dependency>
<!-- guava -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<!-- OKHttp -->
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
<version>${okhttp.version}</version>
</dependency>
<!-- commons-io -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons.io.version}</version>
</dependency>
<!-- kryo 超超超高性能序列化框架 -->
<dependency>
<groupId>com.esotericsoftware.kryo</groupId>
<artifactId>kryo5</artifactId>
<version>${kryo.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<!-- 解决 Java8 data/time 类型处理问题 #869 -->
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>${jackson.version}</version>
</dependency>
<!-- Junit tests -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,24 @@
package tech.powerjob.common;
/**
* Container constants.
*
* @author tjq
* @since 2020/5/15
*/
public class ContainerConstant {
/**
* Spring-context configuration file name of the container.
*/
public static final String SPRING_CONTEXT_FILE_NAME = "oms-worker-container-spring-context.xml";
/**
* Property file name of the container.
*/
public static final String CONTAINER_PROPERTIES_FILE_NAME = "oms-worker-container.properties";
/**
* Package name of the container.
*/
public static final String CONTAINER_PACKAGE_NAME_KEY = "PACKAGE_NAME";
}

View File

@ -0,0 +1,35 @@
package tech.powerjob.common;
/**
* Common constants.
*
* @author tjq
* @since 2020/5/31
*/
public class OmsConstant {
/**
* package name
*/
public static final String PACKAGE = "tech.powerjob";
public static final int SERVER_DEFAULT_AKKA_PORT = 10086;
public static final int SERVER_DEFAULT_HTTP_PORT = 10010;
public static final String TIME_PATTERN = "yyyy-MM-dd HH:mm:ss";
public static final String TIME_PATTERN_PLUS = "yyyy-MM-dd HH:mm:ss.SSS";
public static final String NONE = "N/A";
public static final String COMMA = ",";
public static final String AND = "&";
public static final String EQUAL = "=";
public static final String LINE_SEPARATOR = "\r\n";
public static final String HTTP_HEADER_CONTENT_TYPE = "Content-Type";
public static final String JSON_MEDIA_TYPE = "application/json; charset=utf-8";
public static final String NULL = "null";
}

View File

@ -0,0 +1,70 @@
package tech.powerjob.common;
/**
* OpenAPI 常量
*
* @author tjq
* @since 2020/4/15
*/
public class OpenAPIConstant {
private OpenAPIConstant(){
}
public static final String WEB_PATH = "/openApi";
public static final String ASSERT = "/assert";
public static final String AUTH_APP = "/authApp";
/* ************* JOB 区 ************* */
public static final String SAVE_JOB = "/saveJob";
public static final String COPY_JOB = "/copyJob";
public static final String EXPORT_JOB = "/exportJob";
public static final String FETCH_JOB = "/fetchJob";
public static final String FETCH_ALL_JOB = "/fetchAllJob";
public static final String QUERY_JOB = "/queryJob";
public static final String DISABLE_JOB = "/disableJob";
public static final String ENABLE_JOB = "/enableJob";
public static final String DELETE_JOB = "/deleteJob";
public static final String RUN_JOB = "/runJob";
public static final String RUN_JOB2 = "/runJob2";
/* ************* Instance 区 ************* */
public static final String STOP_INSTANCE = "/stopInstance";
public static final String CANCEL_INSTANCE = "/cancelInstance";
public static final String RETRY_INSTANCE = "/retryInstance";
public static final String FETCH_INSTANCE_STATUS = "/fetchInstanceStatus";
public static final String FETCH_INSTANCE_INFO = "/fetchInstanceInfo";
public static final String QUERY_INSTANCE = "/queryInstance";
/* ************* Workflow 区 ************* */
public static final String SAVE_WORKFLOW = "/saveWorkflow";
public static final String COPY_WORKFLOW = "/copyWorkflow";
public static final String FETCH_WORKFLOW = "/fetchWorkflow";
public static final String DISABLE_WORKFLOW = "/disableWorkflow";
public static final String ENABLE_WORKFLOW = "/enableWorkflow";
public static final String DELETE_WORKFLOW = "/deleteWorkflow";
public static final String RUN_WORKFLOW = "/runWorkflow";
public static final String SAVE_WORKFLOW_NODE = "/addWorkflowNode";
/* ************* WorkflowInstance 区 ************* */
public static final String STOP_WORKFLOW_INSTANCE = "/stopWfInstance";
public static final String RETRY_WORKFLOW_INSTANCE = "/retryWfInstance";
public static final String FETCH_WORKFLOW_INSTANCE_INFO = "/fetchWfInstanceInfo";
public static final String MARK_WORKFLOW_NODE_AS_SUCCESS = "/markWorkflowNodeAsSuccess";
/* ************* 鉴权 ************* */
public static final String REQUEST_HEADER_ACCESS_TOKEN = "X-POWERJOB-ACCESS-TOKEN";
public static final String REQUEST_HEADER_APP_ID = "X-POWERJOB-APP-ID";
public static final String RESPONSE_HEADER_AUTH_STATUS = "X-POWERJOB-AUTH-PASSED";
}

View File

@ -0,0 +1,80 @@
package tech.powerjob.common;
import java.net.NetworkInterface;
/**
* 通过 JVM 启动参数传入的配置信息
*
* @author tjq
* @since 2020/8/8
*/
public class PowerJobDKey {
/**
* The property name for {@link NetworkInterface#getDisplayName() the name of network interface} that the PowerJob application prefers
*/
public static final String PREFERRED_NETWORK_INTERFACE = "powerjob.network.interface.preferred";
/**
* 绑定地址,一般填写本机网卡地址
*/
public static final String BIND_LOCAL_ADDRESS = "powerjob.network.local.address";
/**
* 外部地址,可选,默认与绑定地址相同。当存在 NAT 等场景时可通过单独传递外部地址来实现通讯
*/
public static final String NT_EXTERNAL_ADDRESS = "powerjob.network.external.address";
public static final String NT_EXTERNAL_PORT = "powerjob.network.external.port";
/**
* Java regular expressions for network interfaces that will be ignored.
*/
public static final String IGNORED_NETWORK_INTERFACE_REGEX = "powerjob.network.interface.ignored";
/**
* Enables compression during data transfer, such as gzip under the HTTP protocol. default value is 'false'
* Note that enabling compression reduces network usage, but increases CPU consumption
*/
public static final String TRANSPORTER_USE_COMPRESSING = "powerjob.transporter.compression.enabled";
/**
* keep-alive connection timeout(in seconds), value <= 0 means disable keepalive. default value is 75
*/
public static final String TRANSPORTER_KEEP_ALIVE_TIMEOUT = "powerjob.transporter.keepalive.timeout";
public static final String WORKER_STATUS_CHECK_PERIOD = "powerjob.worker.status-check.normal.period";
/**
* allowed PowerJob to invoke Thread#stop to kill a thread when PowerJob can't interrupt the thread
* <a href="https://stackoverflow.com/questions/16504140/thread-stop-deprecated">It's VERY dangerous</a>
*/
public static final String WORKER_ALLOWED_FORCE_STOP_THREAD = "powerjob.worker.allowed-force-stop-thread";
public static final String WORKER_WORK_SPACE = "powerjob.worker.workspace";
/**
* ms
*/
public static final String FREQUENCY_JOB_MAX_INTERVAL = "powerjob.server.frequency-job.max-interval";
/* ******************* 系统纬度参数,低频使用 ******************* */
/**
* 自行指定可用CPU核数
*/
public static final String SYS_AVAILABLE_PROCESSORS = "powerjob.system.available-processors";
/* ******************* 不太可能有人用的参数,主要方便内部测试 ******************* */
/**
* 最大活跃任务数量,超出部分 SWAP 到磁盘以提升性能
*/
public static final String WORKER_RUNTIME_SWAP_MAX_ACTIVE_TASK_NUM = "powerjob.worker.swap.max-active-task-num";
public static final String WORKER_RUNTIME_SWAP_TASK_SCHEDULE_INTERVAL_MS = "powerjob.worker.swap.scan-interval";
public static final String SERVER_TEST_ACCOUNT_USERNAME = "powerjob.server.test-accounts";
/**
* 特殊环境test 代表测试环境trial 代表试用环境
*/
public static final String SP_ENV = "powerjob.sp-env";
}

View File

@ -0,0 +1,41 @@
package tech.powerjob.common;
import lombok.Getter;
import lombok.Setter;
/**
* PowerJob Query interface
*
* @author tjq
* @since 2021/1/15
*/
@Getter
@Setter
public abstract class PowerQuery {
public static String EQUAL = "Eq";
public static String NOT_EQUAL = "NotEq";
public static String LIKE = "Like";
public static String NOT_LIKE = "NotLike";
public static String LESS_THAN = "Lt";
public static String LESS_THAN_EQUAL = "LtEq";
public static String GREATER_THAN = "Gt";
public static String GREATER_THAN_EQUAL = "GtEq";
public static String IN = "In";
public static String NOT_IN = "NotIn";
public static String IS_NULL = "IsNull";
public static String IS_NOT_NULL = "IsNotNull";
private Long appIdEq;
}

View File

@ -0,0 +1,12 @@
package tech.powerjob.common;
import java.io.Serializable;
/**
* PowerJob serializable interface.
*
* @author tjq
* @since 2020/4/16
*/
public interface PowerSerializable extends Serializable {
}

View File

@ -0,0 +1,104 @@
package tech.powerjob.common;
/**
* RemoteConstant
*
* @author tjq
* @since 2020/3/17
*/
public class RemoteConstant {
/* ************************ AKKA WORKER ************************ */
public static final int DEFAULT_WORKER_PORT = 27777;
/* ************************ OTHERS ************************ */
public static final String EMPTY_ADDRESS = "N/A";
public static final long DEFAULT_TIMEOUT_MS = 5000;
/* ************************ SERVER-self_side (s4s == server for server side) ************************ */
public static final String S4S_PATH = "friend";
/**
* server 集群间的心跳处理
*/
public static final String S4S_HANDLER_PING = "ping";
/**
* 处理其他 server 的执行请求
*/
public static final String S4S_HANDLER_PROCESS = "process";
/* ************************ SERVER-worker_sides4w == server for worker side ************************ */
public static final String S4W_PATH = "server";
/**
* server 处理在线日志
*/
public static final String S4W_HANDLER_REPORT_LOG = "reportLog";
/**
* server 处理 worker 心跳
*/
public static final String S4W_HANDLER_WORKER_HEARTBEAT = "workerHeartbeat";
/**
* server 处理 TaskTracker 上报的任务实例状态
*/
public static final String S4W_HANDLER_REPORT_INSTANCE_STATUS = "reportInstanceStatus";
/**
* server 查询任务的可执行集群
*/
public static final String S4W_HANDLER_QUERY_JOB_CLUSTER = "queryJobCluster";
/**
* server 处理 worker 请求部署容器命令
*/
public static final String S4W_HANDLER_WORKER_NEED_DEPLOY_CONTAINER = "queryContainer";
/* ************************ Worker-TaskTracker ************************ */
public static final String WTT_PATH = "taskTracker";
/**
* server 任务执行命令
*/
public static final String WTT_HANDLER_RUN_JOB = "runJob";
/**
* server 停止任务实例命令
*/
public static final String WTT_HANDLER_STOP_INSTANCE = "stopInstance";
/**
* sever 查询任务状态
*/
public static final String WTT_HANDLER_QUERY_INSTANCE_STATUS = "queryInstanceStatus";
/**
* PT 上报任务状态,包含执行结果
*/
public static final String WTT_HANDLER_REPORT_TASK_STATUS = "reportTaskStatus";
/**
* PT 上报自身状态
*/
public static final String WTT_HANDLER_REPORT_PROCESSOR_TRACKER_STATUS = "reportProcessorTrackerStatus";
/**
* Map 任务
*/
public static final String WTT_HANDLER_MAP_TASK = "mapTask";
/* ************************ Worker-ProcessorTracker ************************ */
public static final String WPT_PATH = "processorTracker";
public static final String WPT_HANDLER_START_TASK = "startTask";
public static final String WPT_HANDLER_STOP_INSTANCE = "stopInstance";
/* ************************ Worker-NORMAL ************************ */
public static final String WORKER_PATH = "worker";
public static final String WORKER_HANDLER_DEPLOY_CONTAINER = "deployContainer";
public static final String WORKER_HANDLER_DESTROY_CONTAINER = "destroyContainer";
}

View File

@ -0,0 +1,91 @@
package tech.powerjob.common;
/**
* 系统生成的任务实例运行结果
*
* @author tjq
* @since 2020/4/11
*/
public class SystemInstanceResult {
private SystemInstanceResult() {
}
/* *********** 普通instance 专用 *********** */
/**
* 同时运行的任务实例数过多
*/
public static final String TOO_MANY_INSTANCES = "too many instances(%d>%d)";
/**
* 无可用worker
*/
public static final String NO_WORKER_AVAILABLE = "no worker available";
/**
* 任务执行超时
*/
public static final String INSTANCE_EXECUTE_TIMEOUT = "instance execute timeout";
/**
* 任务执行超时,成功打断任务
*/
public static final String INSTANCE_EXECUTE_TIMEOUT_INTERRUPTED = "instance execute timeout,interrupted success";
/**
* 任务执行超时,强制终止任务
*/
public static final String INSTANCE_EXECUTE_TIMEOUT_FORCE_STOP= "instance execute timeout,force stop success";
/**
* 用户手动停止任务,成功打断任务
*/
public static final String USER_STOP_INSTANCE_INTERRUPTED= "user stop instance,interrupted success";
/**
* 用户手动停止任务,被系统强制终止
*/
public static final String USER_STOP_INSTANCE_FORCE_STOP= "user stop instance,force stop success";
/**
* 创建根任务失败
*/
public static final String TASK_INIT_FAILED = "create root task failed";
/**
* 未知错误
*/
public static final String UNKNOWN_BUG = "unknown bug";
/**
* TaskTracker 长时间未上报
*/
public static final String REPORT_TIMEOUT = "worker report timeout, maybe TaskTracker down";
public static final String CAN_NOT_FIND_JOB_INFO = "can't find job info";
/* *********** workflow 专用 *********** */
public static final String MIDDLE_JOB_FAILED = "middle job failed";
public static final String MIDDLE_JOB_STOPPED = "middle job stopped by user";
public static final String CAN_NOT_FIND_JOB = "can't find some job";
public static final String CAN_NOT_FIND_NODE = "can't find some node";
public static final String ILLEGAL_NODE = "illegal node info";
/**
* 没有启用的节点
*/
public static final String NO_ENABLED_NODES = "no enabled nodes";
/**
* 被用户手动停止
*/
public static final String STOPPED_BY_USER = "stopped by user";
public static final String CANCELED_BY_USER = "canceled by user";
/**
* 无效 DAG
*/
public static final String INVALID_DAG = "invalid dag";
/**
* 被禁用的节点
*/
public static final String DISABLE_NODE = "disable node";
/**
* 标记为成功的节点
*/
public static final String MARK_AS_SUCCESSFUL_NODE = "mark as successful node";
}

View File

@ -0,0 +1,18 @@
package tech.powerjob.common;
/**
* 工作流上下文相关常量
*
* @author Echo009
* @since 2021/2/3
*/
public final class WorkflowContextConstant {
/**
* 上下文初始参数
*/
public static final String CONTEXT_INIT_PARAMS_KEY = "initParams";
}

View File

@ -0,0 +1,26 @@
package tech.powerjob.common.enhance;
import lombok.extern.slf4j.Slf4j;
import java.util.concurrent.ScheduledExecutorService;
/**
* 安全的 runnable可防止因抛出异常导致周期性任务终止
* 使用 {@link ScheduledExecutorService} 执行任务时,推荐继承此类捕获并打印异常,避免因为抛出异常导致周期性任务终止
*
* @author songyinyin
* @since 2023/9/20 15:52
*/
@Slf4j
public abstract class SafeRunnable implements Runnable{
@Override
public void run() {
try {
run0();
} catch (Exception e) {
log.error("[SafeRunnable] run failed", e);
}
}
protected abstract void run0();
}

View File

@ -0,0 +1,30 @@
package tech.powerjob.common.enhance;
import lombok.extern.slf4j.Slf4j;
import java.util.concurrent.ScheduledExecutorService;
/**
* 使用 {@link ScheduledExecutorService} 执行任务时,推荐使用此对象包装一层,避免因为抛出异常导致周期性任务终止
*
* @author songyinyin
* @since 2023/9/20 16:04
*/
@Slf4j
public class SafeRunnableWrapper implements Runnable {
private final Runnable runnable;
public SafeRunnableWrapper(Runnable runnable) {
this.runnable = runnable;
}
@Override
public void run() {
try {
runnable.run();
} catch (Exception e) {
log.error("[SafeRunnableWrapper] run failed", e);
}
}
}

View File

@ -0,0 +1,43 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* DispatchStrategy
*
* @author tjq
* @since 2021/2/22
*/
@Getter
@AllArgsConstructor
public enum DispatchStrategy {
/**
* 健康度优先
*/
HEALTH_FIRST(1),
/**
* 随机
*/
RANDOM(2),
/**
* 指定执行
*/
SPECIFY(11)
;
private final int v;
public static DispatchStrategy of(Integer v) {
if (v == null) {
return HEALTH_FIRST;
}
for (DispatchStrategy ds : values()) {
if (v.equals(ds.v)) {
return ds;
}
}
throw new IllegalArgumentException("unknown DispatchStrategy of " + v);
}
}

View File

@ -0,0 +1,22 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 加密类型
*
* @author tjq
* @since 2024/8/10
*/
@Getter
@AllArgsConstructor
public enum EncryptType {
NONE("none"),
MD5("md5")
;
private final String code;
}

View File

@ -0,0 +1,22 @@
package tech.powerjob.common.enums;
/**
* Environment Enum class.
*
* @author tjq
* @since 2020/5/3
*/
public enum Env {
/**
* Development or test environment.
*/
DAILY,
/**
* Pre-release environment.
*/
PRE,
/**
* Production environment.
*/
PRODUCT
}

View File

@ -0,0 +1,75 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 鉴权错误信息
*
* @author tjq
* @since 2024/2/11
*/
@Getter
@AllArgsConstructor
public enum ErrorCodes {
USER_NOT_LOGIN("-100", "UserNotLoggedIn"),
USER_NOT_EXIST("-101", "UserNotExist"),
USER_AUTH_FAILED("-102", "UserAuthFailed"),
/**
* 账户被停用
*/
USER_DISABLED("-103", "UserDisabled"),
NO_PERMISSION("-200", "NoPermission"),
/**
* 无效请求,一般是参数问题
*/
INVALID_REQUEST("-300", "INVALID_REQUEST"),
INCORRECT_PASSWORD("-400", "INCORRECT_PASSWORD"),
/**
* 非法令牌
*/
INVALID_TOKEN("-401", "INVALID_TOKEN"),
/**
* 无效 APP无法找到 app
*/
INVALID_APP("-402", "INVALID_APP"),
/**
* 令牌过期
*/
TOKEN_EXPIRED("-403", "TOKEN_EXPIRED"),
/**
* 系统内部异常
*/
SYSTEM_UNKNOWN_ERROR("-500", "SYS_UNKNOWN_ERROR"),
/**
* 非法参数
*/
ILLEGAL_ARGS_ERROR("-501", "ILLEGAL_ARGS_ERROR"),
/**
* 不允许操作
*/
OPERATION_NOT_PERMITTED("-502", "OPERATION_NOT_PERMITTED"),
/**
* OPENAPI 错误码号段 -10XX
*/
OPEN_API_AUTH_FAILED("-1002", "OPEN_API_AUTH_FAILED"),
/**
* PowerJobClient 错误码号段
*/
CLIENT_HTTP_REQUEST_FAILED("-2001", "CLIENT_HTTP_REQUEST_FAILED"),
;
private final String code;
private final String msg;
}

View File

@ -0,0 +1,40 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* Execution type.
*
* @author tjq
* @since 2020/3/17
*/
@Getter
@AllArgsConstructor
public enum ExecuteType {
/**
* Standalone type of task.
*/
STANDALONE(1, "单机执行"),
/**
* Broadcast type of task.
*/
BROADCAST(2, "广播执行"),
/**
* MapReduce type of task.
*/
MAP_REDUCE(3, "MapReduce"),
MAP(4, "Map");
private final int v;
private final String des;
public static ExecuteType of(int v) {
for (ExecuteType type : values()) {
if (type.v == v) {
return type;
}
}
throw new IllegalArgumentException("unknown ExecuteType of " + v);
}
}

View File

@ -0,0 +1,49 @@
package tech.powerjob.common.enums;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.List;
/**
* Status of the job instance
*
* @author tjq
* @since 2020/3/17
*/
@Getter
@AllArgsConstructor
public enum InstanceStatus {
/**
*
*/
WAITING_DISPATCH(1, "等待派发"),
WAITING_WORKER_RECEIVE(2, "等待Worker接收"),
RUNNING(3, "运行中"),
FAILED(4, "失败"),
SUCCEED(5, "成功"),
CANCELED(9, "取消"),
STOPPED(10, "手动停止");
private final int v;
private final String des;
/**
* 广义的运行状态
*/
public static final List<Integer> GENERALIZED_RUNNING_STATUS = Lists.newArrayList(WAITING_DISPATCH.v, WAITING_WORKER_RECEIVE.v, RUNNING.v);
/**
* 结束状态
*/
public static final List<Integer> FINISHED_STATUS = Lists.newArrayList(FAILED.v, SUCCEED.v, CANCELED.v, STOPPED.v);
public static InstanceStatus of(int v) {
for (InstanceStatus is : values()) {
if (v == is.v) {
return is;
}
}
throw new IllegalArgumentException("InstanceStatus has no item for value " + v);
}
}

View File

@ -0,0 +1,35 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.Objects;
/**
* 日志级别
*
* @author tjq
* @since 12/20/20
*/
@Getter
@AllArgsConstructor
public enum LogLevel {
DEBUG(1),
INFO(2),
WARN(3),
ERROR(4),
OFF(99);
private final int v;
public static String genLogLevelString(Integer v) {
for (LogLevel logLevel : values()) {
if (Objects.equals(logLevel.v, v)) {
return logLevel.name();
}
}
return "UNKNOWN";
}
}

View File

@ -0,0 +1,37 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* LogType
*
* @author tjq
* @since 2022/10/3
*/
@Getter
@AllArgsConstructor
public enum LogType {
ONLINE(1),
LOCAL(2),
STDOUT(3),
LOCAL_AND_ONLINE(4),
NULL(999);
private final Integer v;
public static LogType of(Integer type) {
if (type == null) {
return ONLINE;
}
for (LogType logType : values()) {
if (logType.v.equals(type)) {
return logType;
}
}
return ONLINE;
}
}

View File

@ -0,0 +1,22 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types">消息内容类型</a>
*
* @author tjq
* @since 2024/8/10
*/
@Getter
@AllArgsConstructor
public enum MIME {
APPLICATION_JSON("application/json; charset=utf-8"),
APPLICATION_FORM("application/x-www-form-urlencoded")
;
private final String code;
}

View File

@ -0,0 +1,35 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* Task Processor Type
*
* @author tjq
* @since 2020/3/23
*/
@Getter
@AllArgsConstructor
public enum ProcessorType {
BUILT_IN(1, "内建处理器"),
EXTERNAL(4, "外部处理器(动态加载)"),
@Deprecated
SHELL(2, "SHELL脚本"),
@Deprecated
PYTHON(3, "Python脚本");
private final int v;
private final String des;
public static ProcessorType of(int v) {
for (ProcessorType type : values()) {
if (type.v == v) {
return type;
}
}
throw new IllegalArgumentException("unknown ProcessorType of " + v);
}
}

View File

@ -0,0 +1,30 @@
package tech.powerjob.common.enums;
import lombok.Getter;
import org.apache.commons.lang3.StringUtils;
/**
* transport protocol between server and worker
*
* @author tjq
* @since 2021/2/7
*/
@Getter
public enum Protocol {
AKKA,
HTTP,
MU;
public static Protocol of(String protocol) {
if (StringUtils.isEmpty(protocol)) {
return AKKA;
}
try {
return Protocol.valueOf(protocol.toUpperCase());
} catch (Exception ignore) {
}
// For compatibility with older versions, the AKKA protocol is used by default
return AKKA;
}
}

View File

@ -0,0 +1,27 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 特殊环境
*
* @author tjq
* @since 2025/8/17
*/
@Getter
@AllArgsConstructor
public enum SpEnv {
/**
* 测试环境
*/
TEST("test"),
/**
* 试用环境
*/
TRIAL("trial")
;
private final String code;
}

View File

@ -0,0 +1,38 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 支持开/关的状态,如 任务状态JobStatus和工作流状态WorkflowStatus
*
* @author tjq
* @since 2020/4/6
*/
@Getter
@AllArgsConstructor
public enum SwitchableStatus {
/**
* 启用
*/
ENABLE(1),
/**
* 关闭
*/
DISABLE(2),
/**
* 软删除
*/
DELETED(99);
private final int v;
public static SwitchableStatus of(int v) {
for (SwitchableStatus type : values()) {
if (type.v == v) {
return type;
}
}
throw new IllegalArgumentException("unknown SwitchableStatus of " + v);
}
}

View File

@ -0,0 +1,42 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* TaskTracker 行为枚举
*
* @author tjq
* @since 2024/2/24
*/
@Getter
@AllArgsConstructor
public enum TaskTrackerBehavior {
/**
* 普通:不特殊处理,参与集群计算,会导致 TaskTracker 负载比常规节点高。适用于节点数不那么多,任务不那么繁重的场景
*/
NORMAL(1),
/**
* 划水:只负责管理节点,不参与计算,稳定性最优。适用于节点数量非常多的大规模计算场景,少一个计算节点来换取稳定性提升
*/
PADDLING(11)
;
private final Integer v;
public static TaskTrackerBehavior of(Integer type) {
if (type == null) {
return NORMAL;
}
for (TaskTrackerBehavior t : values()) {
if (t.v.equals(type)) {
return t;
}
}
return NORMAL;
}
}

View File

@ -0,0 +1,46 @@
package tech.powerjob.common.enums;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.ToString;
import java.util.Collections;
import java.util.List;
/**
* Scheduling time strategies
*
* @author tjq
* @since 2020/3/30
*/
@Getter
@AllArgsConstructor
@ToString
public enum TimeExpressionType {
API(1),
CRON(2),
FIXED_RATE(3),
FIXED_DELAY(4),
WORKFLOW(5),
DAILY_TIME_INTERVAL(11);
private final int v;
public static final List<Integer> FREQUENT_TYPES = Collections.unmodifiableList(Lists.newArrayList(FIXED_RATE.v, FIXED_DELAY.v));
/**
* 首次计算触发时间时必须计算出一个有效值
*/
public static final List<Integer> INSPECT_TYPES = Collections.unmodifiableList(Lists.newArrayList(CRON.v, DAILY_TIME_INTERVAL.v));
public static TimeExpressionType of(int v) {
for (TimeExpressionType type : values()) {
if (type.v == v) {
return type;
}
}
throw new IllegalArgumentException("unknown TimeExpressionType of " + v);
}
}

View File

@ -0,0 +1,49 @@
package tech.powerjob.common.enums;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.Collections;
import java.util.List;
/**
* Workflow 任务运行状态
*
* @author tjq
* @since 2020/5/26
*/
@Getter
@AllArgsConstructor
public enum WorkflowInstanceStatus {
/**
* 初始状态为等待调度
*/
WAITING(1, "等待调度"),
RUNNING(2, "运行中"),
FAILED(3, "失败"),
SUCCEED(4, "成功"),
STOPPED(10, "手动停止");
/**
* 广义的运行状态
*/
public static final List<Integer> GENERALIZED_RUNNING_STATUS = Collections.unmodifiableList(Lists.newArrayList(WAITING.v, RUNNING.v));
/**
* 结束状态
*/
public static final List<Integer> FINISHED_STATUS = Collections.unmodifiableList(Lists.newArrayList(FAILED.v, SUCCEED.v, STOPPED.v));
private final int v;
private final String des;
public static WorkflowInstanceStatus of(int v) {
for (WorkflowInstanceStatus is : values()) {
if (v == is.v) {
return is;
}
}
throw new IllegalArgumentException("WorkflowInstanceStatus has no item for value " + v);
}
}

View File

@ -0,0 +1,47 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 节点类型
*
* @author Echo009
* @since 2021/3/7
*/
@Getter
@AllArgsConstructor
public enum WorkflowNodeType {
/**
* 任务节点
*/
JOB(1,false),
/**
* 判断节点
*/
DECISION(2,true),
/**
* 内嵌工作流
*/
NESTED_WORKFLOW(3,false),
;
private final int code;
/**
* 控制节点
*/
private final boolean controlNode;
public static WorkflowNodeType of(int code) {
for (WorkflowNodeType nodeType : values()) {
if (nodeType.code == code) {
return nodeType;
}
}
throw new IllegalArgumentException("unknown WorkflowNodeType of " + code);
}
}

View File

@ -0,0 +1,10 @@
package tech.powerjob.common.exception;
/**
* ImpossibleException
*
* @author tjq
* @since 2023/7/12
*/
public class ImpossibleException extends RuntimeException {
}

View File

@ -0,0 +1,29 @@
package tech.powerjob.common.exception;
/**
* PowerJob 受检异常,需要开发者手动处理
*
* @author KFC·D·Fans
* @since 2021/3/21
*/
public class PowerJobCheckedException extends Exception {
public PowerJobCheckedException() {
}
public PowerJobCheckedException(String message) {
super(message);
}
public PowerJobCheckedException(String message, Throwable cause) {
super(message, cause);
}
public PowerJobCheckedException(Throwable cause) {
super(cause);
}
public PowerJobCheckedException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}

View File

@ -0,0 +1,42 @@
package tech.powerjob.common.exception;
import lombok.Getter;
import lombok.Setter;
import tech.powerjob.common.enums.ErrorCodes;
/**
* PowerJob 运行时异常
*
* @author tjq
* @since 2020/5/26
*/
@Setter
@Getter
public class PowerJobException extends RuntimeException {
protected String code;
public PowerJobException() {
}
public PowerJobException(String message) {
super(message);
}
public PowerJobException(ErrorCodes errorCode, String extraMsg) {
super(extraMsg == null ? errorCode.getMsg() : errorCode.getMsg().concat(":").concat(extraMsg));
this.code = errorCode.getCode();
}
public PowerJobException(String message, Throwable cause) {
super(message, cause);
}
public PowerJobException(Throwable cause) {
super(cause);
}
public PowerJobException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}

View File

@ -0,0 +1,17 @@
package tech.powerjob.common.exception;
import tech.powerjob.common.enums.ErrorCodes;
/**
* PowerJobExceptionLauncher
*
* @author tjq
* @since 2024/11/22
*/
public class PowerJobExceptionLauncher {
public PowerJobExceptionLauncher(ErrorCodes errorCode, String message) {
throw new PowerJobException(errorCode, message);
}
}

View File

@ -0,0 +1,28 @@
package tech.powerjob.common.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* @author Echo009
* @since 2022/1/25
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class AlarmConfig {
/**
* 触发告警的阈值
*/
private Integer alertThreshold;
/**
* 统计的窗口长度s
*/
private Integer statisticWindowLen;
/**
* 沉默时间窗口s
*/
private Integer silenceWindowLen;
}

View File

@ -0,0 +1,35 @@
package tech.powerjob.common.model;
import tech.powerjob.common.PowerSerializable;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* Deployed Container Information
*
* @author tjq
* @since 2020/5/18
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class DeployedContainerInfo implements PowerSerializable {
/**
* Id of the container.
*/
private Long containerId;
/**
* Version of the container.
*/
private String version;
/**
* Deploy timestamp.
*/
private long deployedTime;
/**
* No need to report to the server
*/
private String workerAddress;
}

View File

@ -0,0 +1,29 @@
package tech.powerjob.common.model;
import lombok.Data;
/**
* The class for Git Repository info.
*
* @author tjq
* @since 2020/5/17
*/
@Data
public class GitRepoInfo {
/**
* Address of Git repository.
*/
private String repo;
/**
* Name of the branch.
*/
private String branch;
/**
* username of Git.
*/
private String username;
/**
* Password of Git.
*/
private String password;
}

View File

@ -0,0 +1,121 @@
package tech.powerjob.common.model;
import lombok.Data;
import lombok.NoArgsConstructor;
import tech.powerjob.common.PowerSerializable;
import java.util.List;
/**
* Detailed info of job instances.
*
* @author tjq
* @since 2020/4/11
*/
@Data
@NoArgsConstructor
public class InstanceDetail implements PowerSerializable {
/**
* Expected trigger time.
*/
private Long expectedTriggerTime;
/**
* Actual trigger time of an instance.
*/
private Long actualTriggerTime;
/**
* Finish time of an instance, which may be null.
*/
private Long finishedTime;
/**
* Status of the task instance.
*/
private Integer status;
/**
* Execution result, which may be null.
*/
private String result;
/**
* Task tracker address.
*/
private String taskTrackerAddress;
/**
* 任务参数
*/
private String jobParams;
/**
* Param string that is passed to an instance when it is initialized.
*/
private String instanceParams;
/**
* “外键”,用于 OPENAPI 场景业务场景与 PowerJob 实例的绑定
*/
private String outerKey;
/**
* 扩展属性,用于 OPENAPI 场景上下文参数的透传
*/
private String extendValue;
/**
* Task detail, used by MapReduce or Broadcast tasks.
* 命名有点问题,实际是 task 统计信息
*/
private TaskDetail taskDetail;
/**
* 查询出来的 Task 详细结果
*/
private List<TaskDetailInfo> queriedTaskDetailInfoList;
/**
* Sub instance details, used by frequent tasks.
*/
private List<SubInstanceDetail> subInstanceDetails;
/**
* Running times.
*/
private Long runningTimes;
/**
* Extended fields. Middlewares are not supposed to update frequently.
* Changes in PowerJob-common may lead to incompatible versions.
* PowerJob-common packages should not be modified if not necessary.
*/
private String extra;
/**
* Extra info for frequent tasks, return List<SubInstanceDetail>.
*/
@Data
@NoArgsConstructor
public static class SubInstanceDetail implements PowerSerializable {
private long subInstanceId;
private Long startTime;
private Long finishedTime;
private String result;
private int status;
}
/**
* Extra info of {@code MapReduce} or {@code Broadcast} type of tasks.
*/
@Data
@NoArgsConstructor
public static class TaskDetail implements PowerSerializable {
private long totalTaskNum;
private long succeedTaskNum;
private long failedTaskNum;
// 等待派发状态(仅存在 TaskTracker 数据库中)
protected Long waitingDispatchTaskNum;
// 已派发,但 ProcessorTracker 未确认,可能由于网络错误请求未送达,也有可能 ProcessorTracker 线程池满,拒绝执行
protected Long workerUnreceivedTaskNum;
// ProcessorTracker确认接收存在与线程池队列中排队执行
protected Long receivedTaskNum;
// ProcessorTracker正在执行
protected Long runningTaskNum;
}
}

View File

@ -0,0 +1,36 @@
package tech.powerjob.common.model;
import tech.powerjob.common.PowerSerializable;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* Log instance model.
*
* @author tjq
* @since 2020/4/21
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class InstanceLogContent implements PowerSerializable {
/**
* Id of instance.
*/
private long instanceId;
/**
* Submitted time of the log.
*/
private long logTime;
/**
* Level of the log.
*/
private int logLevel;
/**
* Content of the log.
*/
private String logContent;
}

View File

@ -0,0 +1,23 @@
package tech.powerjob.common.model;
import lombok.Data;
import tech.powerjob.common.PowerSerializable;
/**
* 调度元信息
*
* @author tjq
* @since 2025/8/17
*/
@Data
public class InstanceMeta implements PowerSerializable {
/**
* expectTriggerTime
* 原始的期望调度时间无论重试N次都保留最开始的期望调度时间
*/
private Long ett;
public InstanceMeta() {
}
}

View File

@ -0,0 +1,25 @@
package tech.powerjob.common.model;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
/**
* 任务运行时高级配置
*
* @author tjq
* @since 2024/2/24
*/
@Getter
@Setter
@ToString
@Accessors(chain = true)
public class JobAdvancedRuntimeConfig {
/**
* MR 任务专享参数TaskTracker 行为 {@link tech.powerjob.common.enums.TaskTrackerBehavior}
*/
private Integer taskTrackerBehavior;
}

View File

@ -0,0 +1,28 @@
package tech.powerjob.common.model;
import lombok.Data;
import tech.powerjob.common.serialize.JsonUtils;
/**
* @author Echo009
* @since 2022/3/22
*/
@Data
public class LifeCycle {
public static final LifeCycle EMPTY_LIFE_CYCLE = new LifeCycle();
private Long start;
private Long end;
public static LifeCycle parse(String lifeCycle){
try {
return JsonUtils.parseObject(lifeCycle,LifeCycle.class);
}catch (Exception e){
// ignore
return EMPTY_LIFE_CYCLE;
}
}
}

View File

@ -0,0 +1,32 @@
package tech.powerjob.common.model;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
/**
* 任务日志配置
*
* @author yhz
* @since 2022/9/16
*/
@Getter
@Setter
@ToString
@Accessors(chain = true)
public class LogConfig {
/**
* log type {@link tech.powerjob.common.enums.LogType}
*/
private Integer type;
/**
* log level {@link tech.powerjob.common.enums.LogLevel}
*/
private Integer level;
private String loggerName;
}

View File

@ -0,0 +1,140 @@
package tech.powerjob.common.model;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import com.google.common.collect.Lists;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import tech.powerjob.common.enums.WorkflowNodeType;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.Serializable;
import java.util.List;
/**
* Points & edges for DAG, making it easier to describe or transfer.
*
* @author tjq
* @since 2020/5/26
*/
@Data
@NoArgsConstructor
public class PEWorkflowDAG implements Serializable {
/**
* Nodes of DAG diagram.
*/
private List<Node> nodes;
/**
* Edges of DAG diagram.
*/
private List<Edge> edges;
/**
* Point.
*/
@Data
@Accessors(chain = true)
@NoArgsConstructor
public static class Node implements Serializable {
/**
* node id
*
* @since 20210128
*/
private Long nodeId;
/* Instance running param, which is not required by DAG. */
/**
* note type
*
* @see WorkflowNodeType
* @since 20210316
*/
private Integer nodeType;
/**
* job id or workflow id (if this Node type is a nested workflow)
*
* @see WorkflowNodeType#NESTED_WORKFLOW
*/
private Long jobId;
/**
* node name
*/
private String nodeName;
@JsonSerialize(using = ToStringSerializer.class)
private Long instanceId;
/**
* for decision node, it is JavaScript code
*/
private String nodeParams;
private Integer status;
/**
* for decision node, it only be can "true" or "false"
*/
private String result;
/**
* instanceId will be null if disable .
*/
private Boolean enable;
/**
* mark node which disable by control node.
*/
private Boolean disableByControlNode;
private Boolean skipWhenFailed;
private String startTime;
private String finishedTime;
public Node(Long nodeId) {
this.nodeId = nodeId;
this.nodeType = WorkflowNodeType.JOB.getCode();
}
public Node(Long nodeId, Integer nodeType) {
this.nodeId = nodeId;
this.nodeType = nodeType;
}
}
/**
* Edge formed by two node ids.
*/
@Data
@NoArgsConstructor
public static class Edge implements Serializable {
private Long from;
private Long to;
/**
* property,support for complex flow control
* for decision node , it can be "true" or "false"
*/
private String property;
private Boolean enable;
public Edge(long from, long to) {
this.from = from;
this.to = to;
}
public Edge(long from, long to, String property) {
this.from = from;
this.to = to;
this.property = property;
}
}
public PEWorkflowDAG(@Nonnull List<Node> nodes, @Nullable List<Edge> edges) {
this.nodes = nodes;
this.edges = edges == null ? Lists.newLinkedList() : edges;
}
}

View File

@ -0,0 +1,118 @@
package tech.powerjob.common.model;
import tech.powerjob.common.PowerSerializable;
import lombok.Data;
/**
* Class for system metrics.
*
* @author tjq
* @since 2020/3/25
*/
@Data
public class SystemMetrics implements PowerSerializable, Comparable<SystemMetrics> {
/**
* CPU processor num.
*/
private int cpuProcessors;
/**
* Percent of CPU load.
*/
private double cpuLoad;
/**
* Memory that is used by JVM, in GB.
*/
private double jvmUsedMemory;
/**
* Max memory that JVM can use, in GB.
*/
private double jvmMaxMemory;
/**
* Ratio of memory that JVM uses to total memory, 0.X,
* the value is between 0 and 1.
*/
private double jvmMemoryUsage;
/**
* Total used disk space, in GB.
*/
private double diskUsed;
/**
* Total disk space, in GB.
*/
private double diskTotal;
/**
* Used disk ratio.
*/
private double diskUsage;
/**
* user-customized system metrics collector, eg. GPU usage
* implement SystemMetricsCollector to set the value in worker side
* implement WorkerFilter to filter the worker in server side
*/
private String extra;
/**
* Score of cache.
*/
private int score;
/**
* Override compareTo.
*
* @param that the metrics that is to be compared with current.
* @return {@code int}
*/
@Override
public int compareTo(SystemMetrics that) {
// Sort by metrics in descending order.
return that.calculateScore() - this.calculateScore();
}
/**
* Calculate score, based on CPU and memory info.
*
* @return score
*/
public int calculateScore() {
if (score > 0) {
return score;
}
// Memory is vital to TaskTracker, so we set the multiplier factor as 2.
double memScore = (jvmMaxMemory - jvmUsedMemory) * 2;
// Calculate the remaining load of CPU. Multiplier is set as 1.
double cpuScore = cpuProcessors - cpuLoad;
// Windows can not fetch CPU load, set cpuScore as 1.
if (cpuScore > cpuProcessors) {
cpuScore = 1;
}
score = (int) (memScore + cpuScore);
return score;
}
/**
* Judge if the machine is available.
*
* @param minCPUCores Minimum available CPU cores.
* @param minMemorySpace Minimum available memory size
* @param minDiskSpace Minimum disk space
* @return {@code boolean} whether the machine is available.
*/
public boolean available(double minCPUCores, double minMemorySpace, double minDiskSpace) {
double availableMemory = jvmMaxMemory - jvmUsedMemory;
double availableDisk = diskTotal - diskUsed;
if (availableMemory < minMemorySpace || availableDisk < minDiskSpace) {
return false;
}
// 0 indicates the CPU is free, which is the optimal condition.
// Negative number means being unable to fetch CPU info, return true.
if (cpuLoad <= 0 || minCPUCores <= 0) {
return true;
}
return minCPUCores < (cpuProcessors - cpuLoad);
}
}

View File

@ -0,0 +1,43 @@
package tech.powerjob.common.model;
import lombok.Data;
import lombok.experimental.Accessors;
import tech.powerjob.common.PowerSerializable;
/**
* Task 详细信息
*
* @author tjq
* @since 2024/2/25
*/
@Data
@Accessors(chain = true)
public class TaskDetailInfo implements PowerSerializable {
private String taskId;
private String taskName;
/**
* 任务对象map 的 subTask
*/
private String taskContent;
/**
* 处理器地址
*/
private String processorAddress;
private Integer status;
private String statusStr;
private String result;
private Integer failedCnt;
/**
* 创建时间
*/
private Long createdTime;
/**
* 最后修改时间
*/
private Long lastModifiedTime;
/**
* ProcessorTracker 最后上报时间
*/
private Long lastReportTime;
}

View File

@ -0,0 +1,24 @@
package tech.powerjob.common.model;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import java.io.Serializable;
/**
* WorkerAppInfo
*
* @author tjq
* @since 2023/9/2
*/
@Data
@NoArgsConstructor
@Accessors(chain = true)
public class WorkerAppInfo implements Serializable {
/**
* 应用唯一 ID
*/
private Long appId;
}

View File

@ -0,0 +1,35 @@
package tech.powerjob.common.request;
import tech.powerjob.common.PowerSerializable;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* Worker部署Container请求
*
* @author tjq
* @since 2020/5/16
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class ServerDeployContainerRequest implements PowerSerializable {
/**
* 容器ID
*/
private Long containerId;
/**
* 容器名称
*/
private String containerName;
/**
* 文件名MD5值用于做版本校验和文件下载
*/
private String version;
/**
* 下载地址
*/
private String downloadURL;
}

Some files were not shown because too many files have changed in this diff Show More