Compare commits

...

77 Commits

Author SHA1 Message Date
chenbowen
7f7c4210ac Merge branch 'dev' into test 2025-11-28 14:01:14 +08:00
chenbowen
db3afb5b64 Merge remote-tracking branch 'base-version/main' into dev 2025-11-28 11:07:42 +08:00
chenbowen
542466270a 1. 修复自定义 sql 中大写表名无法匹配到 mybatis 中的缓存表信息,导致表被忽略租户的问题
2. 新增 iwork feign api 调用
2025-11-28 11:05:09 +08:00
chenbowen
03ebe21670 1. 清理 iwork 无用的接口。
2. 整合 iwork 用户的密码管理策略。
2025-11-27 20:25:02 +08:00
chenbowen
64d0d4e55e 1. iwork 统一用印发起接口 2025-11-27 20:19:27 +08:00
chenbowen
22599bbc65 Merge branch 'dev' into test 2025-11-27 16:46:27 +08:00
chenbowen
240a531ee1 Merge remote-tracking branch 'base-version/main' into dev
# Conflicts:
#	zt-module-bpm/zt-module-bpm-server/src/main/java/liquibase/database/core/DmDatabase.java
2025-11-27 16:35:49 +08:00
chenbowen
00b2f6312d 修复 flowable 无法通过 dm 数据库驱动正常获取 schema 的bug 2025-11-27 16:01:05 +08:00
chenbowen
446b5ca7a4 剔除掉 swagger 不能请求的 rpc-api 2025-11-27 13:48:55 +08:00
chenbowen
28a49ce45a 修复 dm jdbc 不兼容 flowable 转义 sql 的错误 2025-11-27 13:26:30 +08:00
chenbowen
4bd0402dde 禁止事件引擎重复自动建表 2025-11-27 11:16:49 +08:00
chenbowen
0ab550123f 关闭 databus web 请求连接池 2025-11-27 10:27:30 +08:00
chenbowen
cd21239ff2 flowable 达梦迁移 2025-11-27 09:58:44 +08:00
chenbowen
837e09941a Merge branch 'dev' into test 2025-11-26 20:14:04 +08:00
chenbowen
256bf22a10 Merge remote-tracking branch 'base-version/main' into dev 2025-11-26 20:12:46 +08:00
chenbowen
76eabb6db0 修复 system 模块编译错误 2025-11-26 20:12:07 +08:00
chenbowen
06909fafea 当前登录用户新增公司编码与部门编码属性 2025-11-26 20:01:34 +08:00
qianshijiang
00956030a4 错误信息未记录到日志文件 2025-11-26 15:52:00 +08:00
chenbowen
2dac28d3b3 Merge branch 'dev' into test 2025-11-26 13:46:23 +08:00
chenbowen
dbb1d1905e Merge remote-tracking branch 'base-version/main' into dev 2025-11-26 13:46:02 +08:00
chenbowen
08232eb3cb iwork 人员组织同步相关 2025-11-26 13:45:06 +08:00
chenbowen
5de2801fc9 Merge branch 'dev' into test 2025-11-26 12:40:05 +08:00
chenbowen
e9994a24c2 Merge remote-tracking branch 'base-version/main' into dev 2025-11-26 12:39:44 +08:00
chenbowen
a10732119b iwork 人员组织同步相关 2025-11-26 12:38:38 +08:00
qianshijiang
e7efddf976 配置mybais-plus打印sql 2025-11-26 11:57:17 +08:00
chenbowen
13ec805c20 Merge branch 'dev' into test 2025-11-26 11:34:59 +08:00
chenbowen
61e61d08b6 Merge remote-tracking branch 'base-version/main' into dev 2025-11-26 11:34:41 +08:00
chenbowen
5698c34185 iwork 人员组织同步相关 2025-11-26 11:34:04 +08:00
qianshijiang
96058e29c2 Merge remote-tracking branch 'origin/dev' into dev 2025-11-26 10:44:28 +08:00
qianshijiang
27d22de4e0 日志配置修改 2025-11-26 10:44:13 +08:00
chenbowen
f1242e74fc Merge branch 'dev' into test 2025-11-26 10:43:25 +08:00
chenbowen
0c0d82f465 Merge remote-tracking branch 'base-version/main' into dev 2025-11-26 10:43:04 +08:00
chenbowen
12ba2cf756 iwork 人员组织同步相关 2025-11-26 10:42:24 +08:00
qianshijiang
b1bd193f50 nacos配置。 2025-11-26 08:57:00 +08:00
chenbowen
0b4b87845c Merge branch 'dev' into test 2025-11-26 01:48:49 +08:00
chenbowen
a2f2325119 Merge remote-tracking branch 'base-version/main' into dev 2025-11-26 01:48:31 +08:00
chenbowen
4c79ac8a6d iwork 人员组织同步相关 2025-11-26 01:48:10 +08:00
chenbowen
0c0cb27c15 Merge branch 'dev' into test 2025-11-26 01:35:09 +08:00
chenbowen
a263632e49 Merge remote-tracking branch 'base-version/main' into dev 2025-11-26 01:34:50 +08:00
chenbowen
2e2b7ac6fa iwork 人员组织同步相关 2025-11-26 01:34:08 +08:00
chenbowen
9730573546 Merge branch 'dev' into test 2025-11-26 01:06:13 +08:00
chenbowen
299132943c Merge remote-tracking branch 'base-version/main' into dev 2025-11-26 01:05:55 +08:00
chenbowen
76ba994b50 iwork 人员组织同步相关 2025-11-26 01:04:35 +08:00
chenbowen
dd284728b4 Merge branch 'dev' into test 2025-11-25 23:27:22 +08:00
chenbowen
685ed6b504 Merge remote-tracking branch 'base-version/main' into dev 2025-11-25 23:27:01 +08:00
chenbowen
f754b1c694 iwork 人员组织同步相关 2025-11-25 23:26:26 +08:00
chenbowen
dc1db47d07 iwork 人员组织同步相关 2025-11-25 20:31:56 +08:00
chenbowen
dd38e65972 Merge branch 'dev' into test 2025-11-25 20:09:31 +08:00
chenbowen
02e0c81446 Merge remote-tracking branch 'base-version/main' into dev 2025-11-25 20:09:06 +08:00
chenbowen
6c8c479984 同步 nacos 配置到基础系统 2025-11-25 20:08:31 +08:00
chenbowen
829229a355 Merge branch 'dev' into test 2025-11-25 19:15:29 +08:00
chenbowen
067f7226f4 Merge remote-tracking branch 'base-version/main' into dev 2025-11-25 19:14:59 +08:00
chenbowen
b35df8493c 同步 nacos 配置到基础系统 2025-11-25 19:13:27 +08:00
hewencai
518aa2a773 Merge remote-tracking branch 'origin/dev' into dev 2025-11-25 19:09:13 +08:00
hewencai
4003388740 feat:集成移动云mas短信平台 2025-11-25 19:08:55 +08:00
chenbowen
f16509c107 Merge branch 'dev' into test 2025-11-25 18:58:07 +08:00
chenbowen
565a625df7 Merge remote-tracking branch 'base-version/main' into dev 2025-11-25 18:57:47 +08:00
chenbowen
adcea87bbf 同步 nacos 配置到基础系统 2025-11-25 18:56:12 +08:00
chenbowen
a79806690d Merge branch 'dev' into test 2025-11-25 17:43:28 +08:00
chenbowen
8689c5e844 Merge remote-tracking branch 'base-version/main' into dev 2025-11-25 17:42:26 +08:00
chenbowen
5be1b75be8 iwork 人员组织同步相关,兼容 iwork 返回 2025-11-25 17:41:39 +08:00
chenbowen
06d9ae2688 Merge branch 'dev' into test 2025-11-25 17:22:53 +08:00
chenbowen
547b1d9afb Merge remote-tracking branch 'base-version/main' into dev 2025-11-25 17:22:36 +08:00
chenbowen
2f9c28f166 iwork 人员组织同步相关,兼容 iwork 返回 2025-11-25 17:22:11 +08:00
chenbowen
9a0e60ad84 Merge branch 'dev' into test 2025-11-25 17:19:06 +08:00
chenbowen
c24ae5bad8 Merge remote-tracking branch 'base-version/main' into dev 2025-11-25 17:18:43 +08:00
chenbowen
64eb031486 iwork 人员组织同步相关,兼容 iwork 返回 2025-11-25 17:18:04 +08:00
chenbowen
30b22698e8 Merge branch 'dev' into test 2025-11-25 16:50:11 +08:00
chenbowen
95fab27556 Merge remote-tracking branch 'base-version/main' into dev 2025-11-25 16:49:43 +08:00
chenbowen
2efb815d59 iwork 人员组织同步相关,兼容 iwork 返回 2025-11-25 16:41:29 +08:00
chenbowen
3d5f07b7a5 Merge branch 'dev' into test 2025-11-25 16:08:41 +08:00
chenbowen
77b4e62def Merge remote-tracking branch 'base-version/main' into dev 2025-11-25 16:06:41 +08:00
chenbowen
e2dbaf12a4 iwork 人员组织同步相关 2025-11-25 16:05:52 +08:00
chenbowen
f8d95218f5 Merge branch 'dev' into test 2025-11-25 15:55:44 +08:00
chenbowen
e0d5c0221e Merge remote-tracking branch 'base-version/main' into dev 2025-11-25 15:55:13 +08:00
chenbowen
59afa893b0 修复编译错误 2025-11-25 15:54:50 +08:00
chenbowen
d4d80ce86a iwork 人员组织同步相关 2025-11-25 15:48:47 +08:00
114 changed files with 7992 additions and 724 deletions

21
pom.xml
View File

@@ -237,8 +237,8 @@
<config.server-addr>172.16.46.63:30848</config.server-addr> <config.server-addr>172.16.46.63:30848</config.server-addr>
<config.namespace>dev</config.namespace> <config.namespace>dev</config.namespace>
<config.group>DEFAULT_GROUP</config.group> <config.group>DEFAULT_GROUP</config.group>
<config.username/> <config.username>nacos</config.username>
<config.password/> <config.password>P@ssword25</config.password>
<config.version>1.0.0</config.version> <config.version>1.0.0</config.version>
</properties> </properties>
</profile> </profile>
@@ -250,8 +250,8 @@
<config.server-addr>172.16.46.63:30848</config.server-addr> <config.server-addr>172.16.46.63:30848</config.server-addr>
<config.namespace>prod</config.namespace> <config.namespace>prod</config.namespace>
<config.group>DEFAULT_GROUP</config.group> <config.group>DEFAULT_GROUP</config.group>
<config.username/> <config.username>nacos</config.username>
<config.password/> <config.password>P@ssword25</config.password>
<config.version>1.0.0</config.version> <config.version>1.0.0</config.version>
</properties> </properties>
</profile> </profile>
@@ -263,8 +263,8 @@
<config.server-addr>172.16.46.63:30848</config.server-addr> <config.server-addr>172.16.46.63:30848</config.server-addr>
<config.namespace>local</config.namespace> <config.namespace>local</config.namespace>
<config.group>DEFAULT_GROUP</config.group> <config.group>DEFAULT_GROUP</config.group>
<config.username/> <config.username>nacos</config.username>
<config.password/> <config.password>P@ssword25</config.password>
<config.version>1.0.0</config.version> <config.version>1.0.0</config.version>
</properties> </properties>
</profile> </profile>
@@ -277,7 +277,14 @@
<profile> <profile>
<id>qsj</id> <id>qsj</id>
<properties> <properties>
<config.namespace>qsj</config.namespace> <env.name>dev</env.name>
<!--Nacos 配置-->
<config.server-addr>172.16.46.63:30848</config.server-addr>
<config.namespace>qsj</config.namespace>
<config.group>DEFAULT_GROUP</config.group>
<config.username>nacos</config.username>
<config.password>P@ssword25</config.password>
<config.version>1.0.0</config.version>
</properties> </properties>
</profile> </profile>
</profiles> </profiles>

View File

@@ -0,0 +1,74 @@
CREATE TABLE "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"
(
"ID" BIGINT NOT NULL,
"TRACE_ID" VARCHAR(64) DEFAULT NULL,
"API_CODE" VARCHAR(128) DEFAULT NULL,
"API_VERSION" VARCHAR(32) DEFAULT NULL,
"REQUEST_METHOD" VARCHAR(16) DEFAULT NULL,
"REQUEST_PATH" VARCHAR(512) DEFAULT NULL,
"REQUEST_QUERY" TEXT,
"REQUEST_HEADERS" TEXT,
"REQUEST_BODY" TEXT,
"RESPONSE_STATUS" INT DEFAULT NULL,
"RESPONSE_MESSAGE" VARCHAR(500) DEFAULT NULL,
"RESPONSE_BODY" TEXT,
"STATUS" SMALLINT DEFAULT 3 NOT NULL,
"ERROR_CODE" VARCHAR(100) DEFAULT NULL,
"ERROR_MESSAGE" VARCHAR(1000) DEFAULT NULL,
"EXCEPTION_STACK" TEXT,
"CLIENT_IP" VARCHAR(64) DEFAULT NULL,
"USER_AGENT" VARCHAR(512) DEFAULT NULL,
"DURATION" BIGINT DEFAULT NULL,
"REQUEST_TIME" DATETIME(6) DEFAULT CURRENT_TIMESTAMP NOT NULL,
"RESPONSE_TIME" DATETIME(6) DEFAULT NULL,
"STEP_RESULTS" TEXT,
"EXTRA" TEXT,
"CREATOR" VARCHAR(64) DEFAULT '' NOT NULL,
"CREATE_TIME" DATETIME(6) DEFAULT CURRENT_TIMESTAMP NOT NULL,
"UPDATER" VARCHAR(64) DEFAULT '' NOT NULL,
"UPDATE_TIME" DATETIME(6) DEFAULT CURRENT_TIMESTAMP NOT NULL,
"DELETED" BIT DEFAULT '0' NOT NULL,
"TENANT_ID" BIGINT DEFAULT 0 NOT NULL,
NOT CLUSTER PRIMARY KEY("ID")) STORAGE(ON "MAIN", CLUSTERBTR) ;
COMMENT ON TABLE "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG IS 'Databus API 访问日志表';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."API_CODE" IS 'API 编码';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."API_VERSION" IS 'API 版本';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."CLIENT_IP" IS '客户端 IP';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."CREATE_TIME" IS '创建时间';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."CREATOR" IS '创建者';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."DELETED" IS '是否删除';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."DURATION" IS '请求耗时(毫秒)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."ERROR_CODE" IS '业务错误码';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."ERROR_MESSAGE" IS '错误信息';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."EXCEPTION_STACK" IS '异常堆栈';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."EXTRA" IS '额外调试信息JSON 字符串)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."ID" IS '日志主键';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."REQUEST_BODY" IS '请求体JSON 字符串)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."REQUEST_HEADERS" IS '请求头JSON 字符串)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."REQUEST_METHOD" IS '请求方法';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."REQUEST_PATH" IS '请求路径';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."REQUEST_QUERY" IS '请求查询参数JSON 字符串)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."REQUEST_TIME" IS '请求时间';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."RESPONSE_BODY" IS '响应体JSON 字符串)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."RESPONSE_MESSAGE" IS '响应提示信息';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."RESPONSE_STATUS" IS '响应 HTTP 状态码';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."RESPONSE_TIME" IS '响应时间';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."STATUS" IS '访问状态0-成功 1-客户端错误 2-服务端错误 3-未知';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."STEP_RESULTS" IS '执行步骤结果JSON 字符串)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."TENANT_ID" IS '租户编号';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."TRACE_ID" IS '追踪 ID';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."UPDATER" IS '更新者';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."UPDATE_TIME" IS '更新时间';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."USER_AGENT" IS 'User-Agent';
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_TRACE" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("TRACE_ID" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_CODE" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("API_CODE" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_METHOD" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("REQUEST_METHOD" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_STATUS" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("STATUS" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_RESP_STATUS" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("RESPONSE_STATUS" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_REQUEST_TIME" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("REQUEST_TIME" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_CLIENT_IP" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("CLIENT_IP" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_TENANT" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("TENANT_ID" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;

View File

@@ -87,6 +87,7 @@
<netty.version>4.1.116.Final</netty.version> <netty.version>4.1.116.Final</netty.version>
<mqtt.version>1.2.5</mqtt.version> <mqtt.version>1.2.5</mqtt.version>
<pf4j-spring.version>0.9.0</pf4j-spring.version> <pf4j-spring.version>0.9.0</pf4j-spring.version>
<okhttp3.version>4.12.0</okhttp3.version>
<!-- 规则引擎 --> <!-- 规则引擎 -->
<liteflow.version>2.15.1</liteflow.version> <liteflow.version>2.15.1</liteflow.version>
<vertx.version>4.5.13</vertx.version> <vertx.version>4.5.13</vertx.version>

View File

@@ -182,8 +182,10 @@ public class BusinessDeptHandleUtil {
if (loginUser != null) { if (loginUser != null) {
loginUser.setVisitCompanyId(Long.valueOf(info.getCompanyId())); loginUser.setVisitCompanyId(Long.valueOf(info.getCompanyId()));
loginUser.setVisitCompanyName(info.getCompanyName()); loginUser.setVisitCompanyName(info.getCompanyName());
loginUser.setVisitCompanyCode(info.getCompanyName());
loginUser.setVisitDeptId(Long.valueOf(info.getDeptId())); loginUser.setVisitDeptId(Long.valueOf(info.getDeptId()));
loginUser.setVisitDeptName(info.getDeptName()); loginUser.setVisitDeptName(info.getDeptName());
loginUser.setVisitDeptCode(info.getDeptName());
} }
request.setAttribute(WebFrameworkUtils.HEADER_VISIT_COMPANY_ID, info.getCompanyId()); request.setAttribute(WebFrameworkUtils.HEADER_VISIT_COMPANY_ID, info.getCompanyId());
if (info.getCompanyName() != null) { if (info.getCompanyName() != null) {

View File

@@ -1,12 +1,12 @@
package com.zt.plat.framework.tenant.core.db; package com.zt.plat.framework.tenant.core.db;
import com.zt.plat.framework.tenant.config.TenantProperties;
import com.zt.plat.framework.tenant.core.aop.TenantIgnore;
import com.zt.plat.framework.tenant.core.context.TenantContextHolder;
import com.baomidou.mybatisplus.core.metadata.TableInfo; import com.baomidou.mybatisplus.core.metadata.TableInfo;
import com.baomidou.mybatisplus.core.metadata.TableInfoHelper; import com.baomidou.mybatisplus.core.metadata.TableInfoHelper;
import com.baomidou.mybatisplus.extension.plugins.handler.TenantLineHandler; import com.baomidou.mybatisplus.extension.plugins.handler.TenantLineHandler;
import com.baomidou.mybatisplus.extension.toolkit.SqlParserUtils; import com.baomidou.mybatisplus.extension.toolkit.SqlParserUtils;
import com.zt.plat.framework.tenant.config.TenantProperties;
import com.zt.plat.framework.tenant.core.aop.TenantIgnore;
import com.zt.plat.framework.tenant.core.context.TenantContextHolder;
import net.sf.jsqlparser.expression.Expression; import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.LongValue; import net.sf.jsqlparser.expression.LongValue;
@@ -69,7 +69,12 @@ public class TenantDatabaseInterceptor implements TenantLineHandler {
// 找不到的表,说明不是 zt 项目里的,不进行拦截(忽略租户) // 找不到的表,说明不是 zt 项目里的,不进行拦截(忽略租户)
TableInfo tableInfo = TableInfoHelper.getTableInfo(tableName); TableInfo tableInfo = TableInfoHelper.getTableInfo(tableName);
if (tableInfo == null) { if (tableInfo == null) {
return true; tableName = tableName.toLowerCase();
tableInfo = TableInfoHelper.getTableInfo(tableName);
}
if (tableInfo == null) {
tableName = tableName.toLowerCase();
tableInfo = TableInfoHelper.getTableInfo(tableName);
} }
// 如果继承了 TenantBaseDO 基类,显然不忽略租户 // 如果继承了 TenantBaseDO 基类,显然不忽略租户
if (TenantBaseDO.class.isAssignableFrom(tableInfo.getEntityType())) { if (TenantBaseDO.class.isAssignableFrom(tableInfo.getEntityType())) {

View File

@@ -73,9 +73,11 @@ public class LoginUser {
private Long visitCompanyId; private Long visitCompanyId;
private String visitCompanyName; private String visitCompanyName;
private String visitCompanyCode;
private Long visitDeptId; private Long visitDeptId;
private String visitDeptName; private String visitDeptName;
private String visitDeptCode;
public void setContext(String key, Object value) { public void setContext(String key, Object value) {
if (context == null) { if (context == null) {

View File

@@ -1,5 +1,6 @@
package com.zt.plat.framework.swagger.config; package com.zt.plat.framework.swagger.config;
import com.zt.plat.framework.common.enums.RpcConstants;
import io.swagger.v3.oas.models.Components; import io.swagger.v3.oas.models.Components;
import io.swagger.v3.oas.models.OpenAPI; import io.swagger.v3.oas.models.OpenAPI;
import io.swagger.v3.oas.models.info.Contact; import io.swagger.v3.oas.models.info.Contact;
@@ -11,6 +12,7 @@ import io.swagger.v3.oas.models.parameters.Parameter;
import io.swagger.v3.oas.models.security.SecurityRequirement; import io.swagger.v3.oas.models.security.SecurityRequirement;
import io.swagger.v3.oas.models.security.SecurityScheme; import io.swagger.v3.oas.models.security.SecurityScheme;
import org.springdoc.core.customizers.OpenApiBuilderCustomizer; import org.springdoc.core.customizers.OpenApiBuilderCustomizer;
import org.springdoc.core.customizers.OpenApiCustomizer;
import org.springdoc.core.customizers.ServerBaseUrlCustomizer; import org.springdoc.core.customizers.ServerBaseUrlCustomizer;
import org.springdoc.core.models.GroupedOpenApi; import org.springdoc.core.models.GroupedOpenApi;
import org.springdoc.core.properties.SpringDocConfigProperties; import org.springdoc.core.properties.SpringDocConfigProperties;
@@ -123,12 +125,26 @@ public class ZtSwaggerAutoConfiguration {
return GroupedOpenApi.builder() return GroupedOpenApi.builder()
.group(group) .group(group)
.pathsToMatch("/admin-api/" + path + "/**", "/app-api/" + path + "/**") .pathsToMatch("/admin-api/" + path + "/**", "/app-api/" + path + "/**")
.pathsToExclude(RpcConstants.RPC_API_PREFIX + "/**")
.addOperationCustomizer((operation, handlerMethod) -> operation .addOperationCustomizer((operation, handlerMethod) -> operation
.addParametersItem(buildTenantHeaderParameter()) .addParametersItem(buildTenantHeaderParameter())
.addParametersItem(buildSecurityHeaderParameter())) .addParametersItem(buildSecurityHeaderParameter()))
.build(); .build();
} }
@Bean
public OpenApiCustomizer rpcApiPathExclusionCustomiser() {
return openApi -> {
if (openApi == null || openApi.getPaths() == null) {
return;
}
openApi.getPaths().entrySet().removeIf(entry -> {
String path = entry.getKey();
return path != null && path.startsWith(RpcConstants.RPC_API_PREFIX);
});
};
}
/** /**
* 构建 Tenant 租户编号请求头参数 * 构建 Tenant 租户编号请求头参数
* *

View File

@@ -4,8 +4,8 @@ spring:
cloud: cloud:
nacos: nacos:
server-addr: 172.16.46.63:30848 # Nacos 服务器地址 server-addr: 172.16.46.63:30848 # Nacos 服务器地址
username: # Nacos 账号 username: ${config.username} # Nacos 账号
password: # Nacos 密码 password: ${config.password} # Nacos 密码
discovery: # 【配置中心】配置项 discovery: # 【配置中心】配置项
namespace: ${config.namespace} # 命名空间。这里使用 maven Profile 资源过滤进行动态替换 namespace: ${config.namespace} # 命名空间。这里使用 maven Profile 资源过滤进行动态替换
group: DEFAULT_GROUP # 使用的 Nacos 配置分组,默认为 DEFAULT_GROUP group: DEFAULT_GROUP # 使用的 Nacos 配置分组,默认为 DEFAULT_GROUP

View File

@@ -5,6 +5,10 @@
<springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/> <springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/>
<!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 --> <!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 -->
<property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/> <property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
<!--应用名称-->
<springProperty scope="context" name="spring.application.name" source="spring.application.name"/>
<!-- 日志输出路径 -->
<property name="LOG_DIR" value="${user.home}/logs/${spring.application.name}"/>
<!-- 控制台 Appender --> <!-- 控制台 Appender -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">      <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">     
@@ -31,7 +35,7 @@
<!-- 启动服务时,是否清理历史日志,一般不建议清理 --> <!-- 启动服务时,是否清理历史日志,一般不建议清理 -->
<cleanHistoryOnStart>${LOGBACK_ROLLINGPOLICY_CLEAN_HISTORY_ON_START:-false}</cleanHistoryOnStart> <cleanHistoryOnStart>${LOGBACK_ROLLINGPOLICY_CLEAN_HISTORY_ON_START:-false}</cleanHistoryOnStart>
<!-- 日志文件,到达多少容量,进行滚动 --> <!-- 日志文件,到达多少容量,进行滚动 -->
<maxFileSize>${LOGBACK_ROLLINGPOLICY_MAX_FILE_SIZE:-10MB}</maxFileSize> <maxFileSize>${LOGBACK_ROLLINGPOLICY_MAX_FILE_SIZE:-50MB}</maxFileSize>
<!-- 日志文件的总大小0 表示不限制 --> <!-- 日志文件的总大小0 表示不限制 -->
<totalSizeCap>${LOGBACK_ROLLINGPOLICY_TOTAL_SIZE_CAP:-0}</totalSizeCap> <totalSizeCap>${LOGBACK_ROLLINGPOLICY_TOTAL_SIZE_CAP:-0}</totalSizeCap>
<!-- 日志文件的保留天数 --> <!-- 日志文件的保留天数 -->
@@ -56,18 +60,39 @@
</encoder> </encoder>
</appender> </appender>
<!-- ERROR 级别日志 -->
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_DIR}-error.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_DIR}-error.%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>30</maxHistory> <!-- 保留30天的日志 -->
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG-->
<!-- 本地环境 --> <!-- 本地环境 -->
<springProfile name="local"> <springProfile name="local,dev">
<root level="INFO"> <root level="WARN">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 --> <appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 -->
<appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 --> <appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 -->
</root> </root>
</springProfile> </springProfile>
<!-- 其它环境 --> <!-- 其它环境 -->
<springProfile name="dev,test,stage,prod,default"> <springProfile name="dev,test,stage,prod,default">
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="ASYNC"/> <appender-ref ref="ASYNC"/>
<appender-ref ref="GRPC"/> <appender-ref ref="GRPC"/>
</root> </root>

View File

@@ -8,17 +8,25 @@ import com.zt.plat.module.bpm.framework.flowable.core.event.BpmProcessInstanceEv
import com.zt.plat.module.system.api.user.AdminUserApi; import com.zt.plat.module.system.api.user.AdminUserApi;
import org.flowable.common.engine.api.delegate.FlowableFunctionDelegate; import org.flowable.common.engine.api.delegate.FlowableFunctionDelegate;
import org.flowable.common.engine.api.delegate.event.FlowableEventListener; import org.flowable.common.engine.api.delegate.event.FlowableEventListener;
import org.flowable.engine.ProcessEngineConfiguration;
import org.flowable.spring.SpringProcessEngineConfiguration; import org.flowable.spring.SpringProcessEngineConfiguration;
import org.flowable.spring.boot.EngineConfigurationConfigurer; import org.flowable.spring.boot.EngineConfigurationConfigurer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.ObjectProvider; import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.ApplicationEventPublisher; import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.AsyncListenableTaskExecutor; import org.springframework.core.task.AsyncListenableTaskExecutor;
import org.springframework.jdbc.datasource.DataSourceUtils;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.List; import java.util.List;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
/** /**
* BPM 模块的 Flowable 配置类 * BPM 模块的 Flowable 配置类
@@ -28,6 +36,8 @@ import java.util.List;
@Configuration(proxyBeanMethods = false) @Configuration(proxyBeanMethods = false)
public class BpmFlowableConfiguration { public class BpmFlowableConfiguration {
private static final Logger log = LoggerFactory.getLogger(BpmFlowableConfiguration.class);
/** /**
* 参考 {@link org.flowable.spring.boot.FlowableJobConfiguration} 类,创建对应的 AsyncListenableTaskExecutor Bean * 参考 {@link org.flowable.spring.boot.FlowableJobConfiguration} 类,创建对应的 AsyncListenableTaskExecutor Bean
* *
@@ -69,6 +79,37 @@ public class BpmFlowableConfiguration {
}; };
} }
@Bean
public EngineConfigurationConfigurer<SpringProcessEngineConfiguration> dmProcessEngineConfigurationConfigurer(DataSource dataSource) {
return configuration -> {
try {
configureDmCompatibility(configuration, dataSource);
} catch (SQLException ex) {
log.warn("Failed to inspect datasource for DM compatibility; Flowable will keep default settings", ex);
}
};
}
private void configureDmCompatibility(SpringProcessEngineConfiguration configuration, DataSource dataSource) throws SQLException {
Connection connection = null;
try {
connection = DataSourceUtils.getConnection(dataSource);
DatabaseMetaData metaData = connection.getMetaData();
String productName = metaData.getDatabaseProductName();
String jdbcUrl = metaData.getURL();
boolean dmProduct = productName != null && productName.toLowerCase().contains("dm");
boolean dmUrl = jdbcUrl != null && jdbcUrl.toLowerCase().startsWith("jdbc:dm");
if (!dmProduct && !dmUrl) {
return;
}
log.info("Detected DM database (product='{}'); enabling Flowable Oracle compatibility with automatic schema updates", productName);
configuration.setDatabaseSchemaUpdate(ProcessEngineConfiguration.DB_SCHEMA_UPDATE_TRUE);
configuration.setDatabaseType("oracle");
} finally {
DataSourceUtils.releaseConnection(connection, dataSource);
}
}
// =========== 审批人相关的 Bean ========== // =========== 审批人相关的 Bean ==========
@Bean @Bean

View File

@@ -5,6 +5,25 @@
package liquibase.database.core; package liquibase.database.core;
import java.lang.reflect.Method;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import liquibase.CatalogAndSchema; import liquibase.CatalogAndSchema;
import liquibase.GlobalConfiguration; import liquibase.GlobalConfiguration;
import liquibase.Scope; import liquibase.Scope;
@@ -23,17 +42,15 @@ import liquibase.statement.UniqueConstraint;
import liquibase.statement.core.RawCallStatement; import liquibase.statement.core.RawCallStatement;
import liquibase.statement.core.RawParameterizedSqlStatement; import liquibase.statement.core.RawParameterizedSqlStatement;
import liquibase.structure.DatabaseObject; import liquibase.structure.DatabaseObject;
import liquibase.structure.core.*; import liquibase.structure.core.Catalog;
import liquibase.structure.core.Column;
import liquibase.structure.core.Index;
import liquibase.structure.core.PrimaryKey;
import liquibase.structure.core.Schema;
import liquibase.util.JdbcUtil; import liquibase.util.JdbcUtil;
import liquibase.util.StringUtil; import liquibase.util.StringUtil;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import java.lang.reflect.Method;
import java.sql.*;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class DmDatabase extends AbstractJdbcDatabase { public class DmDatabase extends AbstractJdbcDatabase {
private static final String PROXY_USER_REGEX = ".*(?:thin|oci)\\:(.+)/@.*"; private static final String PROXY_USER_REGEX = ".*(?:thin|oci)\\:(.+)/@.*";
public static final Pattern PROXY_USER_PATTERN = Pattern.compile(".*(?:thin|oci)\\:(.+)/@.*"); public static final Pattern PROXY_USER_PATTERN = Pattern.compile(".*(?:thin|oci)\\:(.+)/@.*");
@@ -98,6 +115,7 @@ public class DmDatabase extends AbstractJdbcDatabase {
public void setConnection(DatabaseConnection conn) { public void setConnection(DatabaseConnection conn) {
this.reservedWords.addAll(Arrays.asList("GROUP", "USER", "SESSION", "PASSWORD", "RESOURCE", "START", "SIZE", "UID", "DESC", "ORDER")); this.reservedWords.addAll(Arrays.asList("GROUP", "USER", "SESSION", "PASSWORD", "RESOURCE", "START", "SIZE", "UID", "DESC", "ORDER"));
Connection sqlConn = null; Connection sqlConn = null;
boolean dmDatabase = false;
if (!(conn instanceof OfflineConnection)) { if (!(conn instanceof OfflineConnection)) {
try { try {
if (conn instanceof JdbcConnection) { if (conn instanceof JdbcConnection) {
@@ -124,26 +142,42 @@ public class DmDatabase extends AbstractJdbcDatabase {
Scope.getCurrentScope().getLog(this.getClass()).info("Could not set remarks reporting on OracleDatabase: " + e.getMessage()); Scope.getCurrentScope().getLog(this.getClass()).info("Could not set remarks reporting on OracleDatabase: " + e.getMessage());
} }
CallableStatement statement = null;
try { try {
statement = sqlConn.prepareCall("{call DBMS_UTILITY.DB_VERSION(?,?)}"); DatabaseMetaData metaData = sqlConn.getMetaData();
statement.registerOutParameter(1, 12); if (metaData != null) {
statement.registerOutParameter(2, 12); String productName = metaData.getDatabaseProductName();
statement.execute(); dmDatabase = productName != null && PRODUCT_NAME.equalsIgnoreCase(productName);
String compatibleVersion = statement.getString(2); if (dmDatabase) {
if (compatibleVersion != null) { this.databaseMajorVersion = metaData.getDatabaseMajorVersion();
Matcher majorVersionMatcher = VERSION_PATTERN.matcher(compatibleVersion); this.databaseMinorVersion = metaData.getDatabaseMinorVersion();
if (majorVersionMatcher.matches()) {
this.databaseMajorVersion = Integer.valueOf(majorVersionMatcher.group(1));
this.databaseMinorVersion = Integer.valueOf(majorVersionMatcher.group(2));
} }
} }
} catch (SQLException e) { } catch (SQLException e) {
String message = "Cannot read from DBMS_UTILITY.DB_VERSION: " + e.getMessage(); Scope.getCurrentScope().getLog(this.getClass()).info("Unable to inspect database metadata for DM version detection: " + e.getMessage());
Scope.getCurrentScope().getLog(this.getClass()).info("Could not set check compatibility mode on OracleDatabase, assuming not running in any sort of compatibility mode: " + message); }
} finally {
JdbcUtil.closeStatement(statement); if (!dmDatabase) {
CallableStatement statement = null;
try {
statement = sqlConn.prepareCall("{call DBMS_UTILITY.DB_VERSION(?,?)}");
statement.registerOutParameter(1, 12);
statement.registerOutParameter(2, 12);
statement.execute();
String compatibleVersion = statement.getString(2);
if (compatibleVersion != null) {
Matcher majorVersionMatcher = VERSION_PATTERN.matcher(compatibleVersion);
if (majorVersionMatcher.matches()) {
this.databaseMajorVersion = Integer.valueOf(majorVersionMatcher.group(1));
this.databaseMinorVersion = Integer.valueOf(majorVersionMatcher.group(2));
}
}
} catch (SQLException e) {
String message = "Cannot read from DBMS_UTILITY.DB_VERSION: " + e.getMessage();
Scope.getCurrentScope().getLog(this.getClass()).info("Could not set check compatibility mode on OracleDatabase, assuming not running in any sort of compatibility mode: " + message);
} finally {
JdbcUtil.closeStatement(statement);
}
} }
if (GlobalConfiguration.DDL_LOCK_TIMEOUT.getCurrentValue() != null) { if (GlobalConfiguration.DDL_LOCK_TIMEOUT.getCurrentValue() != null) {
@@ -250,7 +284,15 @@ public class DmDatabase extends AbstractJdbcDatabase {
} }
public boolean isCorrectDatabaseImplementation(DatabaseConnection conn) throws DatabaseException { public boolean isCorrectDatabaseImplementation(DatabaseConnection conn) throws DatabaseException {
return "oracle".equalsIgnoreCase(conn.getDatabaseProductName()); String databaseProductName = conn == null ? null : conn.getDatabaseProductName();
if (databaseProductName == null) {
return false;
}
if (PRODUCT_NAME.equalsIgnoreCase(databaseProductName)) {
return true;
}
// Flowable 历史上将 DM 映射为 Oracle 元数据,因此这里同样接受 Oracle 以保持兼容
return "oracle".equalsIgnoreCase(databaseProductName);
} }
public String getDefaultDriver(String url) { public String getDefaultDriver(String url) {

View File

@@ -0,0 +1,32 @@
package liquibase.datatype.core;
import liquibase.database.Database;
import liquibase.database.core.DmDatabase;
import liquibase.datatype.DataTypeInfo;
import liquibase.datatype.DatabaseDataType;
@DataTypeInfo(
name = "boolean",
aliases = {"java.sql.Types.BOOLEAN", "java.lang.Boolean", "bit", "bool"},
minParameters = 0,
maxParameters = 0,
priority = 2
)
public class DmBooleanType extends BooleanType {
@Override
public boolean supports(Database database) {
if (database instanceof DmDatabase) {
return true;
}
return super.supports(database);
}
@Override
public DatabaseDataType toDatabaseDataType(Database database) {
if (database instanceof DmDatabase) {
return new DatabaseDataType("NUMBER", 1);
}
return super.toDatabaseDataType(database);
}
}

View File

@@ -0,0 +1,354 @@
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.common.engine.impl.db;
import org.apache.ibatis.session.SqlSessionFactory;
import org.flowable.common.engine.api.FlowableException;
import org.flowable.common.engine.impl.context.Context;
import org.flowable.common.engine.impl.interceptor.CommandContext;
import org.flowable.common.engine.impl.interceptor.Session;
import org.flowable.common.engine.impl.interceptor.SessionFactory;
import org.flowable.common.engine.impl.persistence.cache.EntityCache;
import org.flowable.common.engine.impl.persistence.entity.Entity;
import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author Tom Baeyens
* @author Joram Barrez
*/
public class DbSqlSessionFactory implements SessionFactory {
protected Map<String, Map<String, String>> databaseSpecificStatements = new HashMap<>();
protected String databaseType;
protected String databaseTablePrefix = "";
protected boolean tablePrefixIsSchema;
protected String databaseCatalog;
protected String databaseSchema;
protected SqlSessionFactory sqlSessionFactory;
protected Map<String, String> statementMappings;
protected Map<Class<?>, String> insertStatements = new ConcurrentHashMap<>();
protected Map<Class<?>, String> updateStatements = new ConcurrentHashMap<>();
protected Map<Class<?>, String> deleteStatements = new ConcurrentHashMap<>();
protected Map<Class<?>, String> selectStatements = new ConcurrentHashMap<>();
protected List<Class<? extends Entity>> insertionOrder = new ArrayList<>();
protected List<Class<? extends Entity>> deletionOrder = new ArrayList<>();
protected boolean isDbHistoryUsed = true;
protected Set<Class<? extends Entity>> bulkInserteableEntityClasses = new HashSet<>();
protected Map<Class<?>, String> bulkInsertStatements = new ConcurrentHashMap<>();
protected int maxNrOfStatementsInBulkInsert = 100;
protected Map<String, Class<?>> logicalNameToClassMapping = new ConcurrentHashMap<>();
protected boolean usePrefixId;
public DbSqlSessionFactory(boolean usePrefixId) {
this.usePrefixId = usePrefixId;
}
@Override
public Class<?> getSessionType() {
return DbSqlSession.class;
}
@Override
public Session openSession(CommandContext commandContext) {
DbSqlSession dbSqlSession = createDbSqlSession();
// 当前系统适配 dm,如果存在 schema 为空的情况,从 connection 获取
try {
if (getDatabaseSchema() == null || getDatabaseSchema().length() == 0){
setDatabaseSchema(dbSqlSession.getSqlSession().getConnection().getSchema());
}
dbSqlSession.getSqlSession().getConnection().getSchema();
} catch (SQLException e) {
throw new RuntimeException(e);
}
if (getDatabaseSchema() != null && getDatabaseSchema().length() > 0) {
try {
dbSqlSession.getSqlSession().getConnection().setSchema(getDatabaseSchema());
} catch (SQLException e) {
throw new FlowableException("Could not set database schema on connection", e);
}
}
if (getDatabaseCatalog() != null && getDatabaseCatalog().length() > 0) {
try {
dbSqlSession.getSqlSession().getConnection().setCatalog(getDatabaseCatalog());
} catch (SQLException e) {
throw new FlowableException("Could not set database catalog on connection", e);
}
}
if (dbSqlSession.getSqlSession().getConnection() == null) {
throw new FlowableException("Invalid dbSqlSession: no active connection found");
}
return dbSqlSession;
}
protected DbSqlSession createDbSqlSession() {
return new DbSqlSession(this, Context.getCommandContext().getSession(EntityCache.class));
}
// insert, update and delete statements
// /////////////////////////////////////
public String getInsertStatement(Entity object) {
return getStatement(object.getClass(), insertStatements, "insert");
}
public String getInsertStatement(Class<? extends Entity> clazz) {
return getStatement(clazz, insertStatements, "insert");
}
public String getUpdateStatement(Entity object) {
return getStatement(object.getClass(), updateStatements, "update");
}
public String getDeleteStatement(Class<?> entityClass) {
return getStatement(entityClass, deleteStatements, "delete");
}
public String getSelectStatement(Class<?> entityClass) {
return getStatement(entityClass, selectStatements, "select");
}
protected String getStatement(Class<?> entityClass, Map<Class<?>, String> cachedStatements, String prefix) {
String statement = cachedStatements.get(entityClass);
if (statement != null) {
return statement;
}
statement = prefix + entityClass.getSimpleName();
if (statement.endsWith("Impl")) {
statement = statement.substring(0, statement.length() - 10); // removing 'entityImpl'
} else {
statement = statement.substring(0, statement.length() - 6); // removing 'entity'
}
cachedStatements.put(entityClass, statement);
return statement;
}
// db specific mappings
// /////////////////////////////////////////////////////
protected void addDatabaseSpecificStatement(String databaseType, String activitiStatement, String ibatisStatement) {
Map<String, String> specificStatements = databaseSpecificStatements.get(databaseType);
if (specificStatements == null) {
specificStatements = new HashMap<>();
databaseSpecificStatements.put(databaseType, specificStatements);
}
specificStatements.put(activitiStatement, ibatisStatement);
}
public String mapStatement(String statement) {
if (statementMappings == null) {
return statement;
}
String mappedStatement = statementMappings.get(statement);
return (mappedStatement != null ? mappedStatement : statement);
}
// customized getters and setters
// ///////////////////////////////////////////
public void setDatabaseType(String databaseType) {
this.databaseType = databaseType;
this.statementMappings = databaseSpecificStatements.get(databaseType);
}
public boolean isMysql() {
return "mysql".equals(getDatabaseType());
}
public boolean isOracle() {
return "oracle".equals(getDatabaseType());
}
public Boolean isBulkInsertable(Class<? extends Entity> entityClass) {
return bulkInserteableEntityClasses != null && bulkInserteableEntityClasses.contains(entityClass);
}
@SuppressWarnings("rawtypes")
public String getBulkInsertStatement(Class clazz) {
return getStatement(clazz, bulkInsertStatements, "bulkInsert");
}
public Set<Class<? extends Entity>> getBulkInserteableEntityClasses() {
return bulkInserteableEntityClasses;
}
public void setBulkInserteableEntityClasses(Set<Class<? extends Entity>> bulkInserteableEntityClasses) {
this.bulkInserteableEntityClasses = bulkInserteableEntityClasses;
}
public int getMaxNrOfStatementsInBulkInsert() {
return maxNrOfStatementsInBulkInsert;
}
public void setMaxNrOfStatementsInBulkInsert(int maxNrOfStatementsInBulkInsert) {
this.maxNrOfStatementsInBulkInsert = maxNrOfStatementsInBulkInsert;
}
public Map<Class<?>, String> getBulkInsertStatements() {
return bulkInsertStatements;
}
public void setBulkInsertStatements(Map<Class<?>, String> bulkInsertStatements) {
this.bulkInsertStatements = bulkInsertStatements;
}
// getters and setters //////////////////////////////////////////////////////
public SqlSessionFactory getSqlSessionFactory() {
return sqlSessionFactory;
}
public void setSqlSessionFactory(SqlSessionFactory sqlSessionFactory) {
this.sqlSessionFactory = sqlSessionFactory;
}
public String getDatabaseType() {
return databaseType;
}
public Map<String, Map<String, String>> getDatabaseSpecificStatements() {
return databaseSpecificStatements;
}
public void setDatabaseSpecificStatements(Map<String, Map<String, String>> databaseSpecificStatements) {
this.databaseSpecificStatements = databaseSpecificStatements;
}
public Map<String, String> getStatementMappings() {
return statementMappings;
}
public void setStatementMappings(Map<String, String> statementMappings) {
this.statementMappings = statementMappings;
}
public Map<Class<?>, String> getInsertStatements() {
return insertStatements;
}
public void setInsertStatements(Map<Class<?>, String> insertStatements) {
this.insertStatements = insertStatements;
}
public Map<Class<?>, String> getUpdateStatements() {
return updateStatements;
}
public void setUpdateStatements(Map<Class<?>, String> updateStatements) {
this.updateStatements = updateStatements;
}
public Map<Class<?>, String> getDeleteStatements() {
return deleteStatements;
}
public void setDeleteStatements(Map<Class<?>, String> deleteStatements) {
this.deleteStatements = deleteStatements;
}
public Map<Class<?>, String> getSelectStatements() {
return selectStatements;
}
public void setSelectStatements(Map<Class<?>, String> selectStatements) {
this.selectStatements = selectStatements;
}
public boolean isDbHistoryUsed() {
return isDbHistoryUsed;
}
public void setDbHistoryUsed(boolean isDbHistoryUsed) {
this.isDbHistoryUsed = isDbHistoryUsed;
}
public void setDatabaseTablePrefix(String databaseTablePrefix) {
this.databaseTablePrefix = databaseTablePrefix;
}
public String getDatabaseTablePrefix() {
return databaseTablePrefix;
}
public String getDatabaseCatalog() {
return databaseCatalog;
}
public void setDatabaseCatalog(String databaseCatalog) {
this.databaseCatalog = databaseCatalog;
}
public String getDatabaseSchema() {
return databaseSchema;
}
public void setDatabaseSchema(String databaseSchema) {
this.databaseSchema = databaseSchema;
}
public void setTablePrefixIsSchema(boolean tablePrefixIsSchema) {
this.tablePrefixIsSchema = tablePrefixIsSchema;
}
public boolean isTablePrefixIsSchema() {
return tablePrefixIsSchema;
}
public List<Class<? extends Entity>> getInsertionOrder() {
return insertionOrder;
}
public void setInsertionOrder(List<Class<? extends Entity>> insertionOrder) {
this.insertionOrder = insertionOrder;
}
public List<Class<? extends Entity>> getDeletionOrder() {
return deletionOrder;
}
public void setDeletionOrder(List<Class<? extends Entity>> deletionOrder) {
this.deletionOrder = deletionOrder;
}
public void addLogicalEntityClassMapping(String logicalName, Class<?> entityClass) {
logicalNameToClassMapping.put(logicalName, entityClass);
}
public Map<String, Class<?>> getLogicalNameToClassMapping() {
return logicalNameToClassMapping;
}
public void setLogicalNameToClassMapping(Map<String, Class<?>> logicalNameToClassMapping) {
this.logicalNameToClassMapping = logicalNameToClassMapping;
}
public boolean isUsePrefixId() {
return usePrefixId;
}
public void setUsePrefixId(boolean usePrefixId) {
this.usePrefixId = usePrefixId;
}
}

View File

@@ -13,6 +13,7 @@ liquibase.database.core.MariaDBDatabase
liquibase.database.core.MockDatabase liquibase.database.core.MockDatabase
liquibase.database.core.MySQLDatabase liquibase.database.core.MySQLDatabase
liquibase.database.core.OracleDatabase liquibase.database.core.OracleDatabase
liquibase.database.core.DmDatabase
liquibase.database.core.PostgresDatabase liquibase.database.core.PostgresDatabase
liquibase.database.core.SQLiteDatabase liquibase.database.core.SQLiteDatabase
liquibase.database.core.SybaseASADatabase liquibase.database.core.SybaseASADatabase

View File

@@ -0,0 +1 @@
liquibase.datatype.core.DmBooleanType

View File

@@ -39,14 +39,14 @@ spring:
primary: master primary: master
datasource: datasource:
master: master:
url: jdbc:mysql://172.16.46.247:4787/ruoyi-vue-pro?useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true&nullCatalogMeansCurrent=true&rewriteBatchedStatements=true # MySQL Connector/J 8.X 连接的示例 url: jdbc:dm://172.16.46.247:1050?schema=BPM
username: jygk-test username: SYSDBA
password: Zgty@0527 password: pgbsci6ddJ6Sqj@e
slave: # 模拟从库,可根据自己需要修改 # 模拟从库,可根据自己需要修改 slave: # 模拟从库,可根据自己需要修改 # 模拟从库,可根据自己需要修改
lazy: true # 开启懒加载,保证启动速度 lazy: true # 开启懒加载,保证启动速度
url: jdbc:mysql://172.16.46.247:4787/ruoyi-vue-pro?useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true&nullCatalogMeansCurrent=true&rewriteBatchedStatements=true # MySQL Connector/J 8.X 连接的示例 url: jdbc:dm://172.16.46.247:1050?schema=BPM
username: jygk-test username: SYSDBA
password: Zgty@0527 password: pgbsci6ddJ6Sqj@e
# Redis 配置。Redisson 默认的配置足够使用,一般不需要进行调优 # Redis 配置。Redisson 默认的配置足够使用,一般不需要进行调优
data: data:
@@ -56,6 +56,11 @@ spring:
database: 0 # 数据库索引 database: 0 # 数据库索引
# password: 123456 # 密码,建议生产环境开启 # password: 123456 # 密码,建议生产环境开启
# Flowable 在 DM 场景下需要识别为 Oracle 并自动升级表结构
flowable:
database-schema-update: true
database-type: oracle
--- #################### MQ 消息队列相关配置 #################### --- #################### MQ 消息队列相关配置 ####################
--- #################### 定时任务相关配置 #################### --- #################### 定时任务相关配置 ####################

View File

@@ -0,0 +1,41 @@
create table FLW_RU_BATCH (
ID_ VARCHAR2(64) not null,
REV_ INTEGER,
TYPE_ VARCHAR2(64) not null,
SEARCH_KEY_ VARCHAR2(255),
SEARCH_KEY2_ VARCHAR2(255),
CREATE_TIME_ TIMESTAMP(6) not null,
COMPLETE_TIME_ TIMESTAMP(6),
STATUS_ VARCHAR2(255),
BATCH_DOC_ID_ VARCHAR2(64),
TENANT_ID_ VARCHAR2(255) default '',
primary key (ID_)
);
create table FLW_RU_BATCH_PART (
ID_ VARCHAR2(64) not null,
REV_ INTEGER,
BATCH_ID_ VARCHAR2(64),
TYPE_ VARCHAR2(64) not null,
SCOPE_ID_ VARCHAR2(64),
SUB_SCOPE_ID_ VARCHAR2(64),
SCOPE_TYPE_ VARCHAR2(64),
SEARCH_KEY_ VARCHAR2(255),
SEARCH_KEY2_ VARCHAR2(255),
CREATE_TIME_ TIMESTAMP(6) not null,
COMPLETE_TIME_ TIMESTAMP(6),
STATUS_ VARCHAR2(255),
RESULT_DOC_ID_ VARCHAR2(64),
TENANT_ID_ VARCHAR2(255) default '',
primary key (ID_)
);
create index FLW_IDX_BATCH_PART on FLW_RU_BATCH_PART(BATCH_ID_);
alter table FLW_RU_BATCH_PART
add constraint FLW_FK_BATCH_PART_PARENT
foreign key (BATCH_ID_)
references FLW_RU_BATCH (ID_);
insert into ACT_GE_PROPERTY values ('batch.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,4 @@
drop index FLW_IDX_BATCH_PART;
drop table FLW_RU_BATCH_PART;
drop table FLW_RU_BATCH;

View File

@@ -0,0 +1,23 @@
create table ACT_GE_PROPERTY (
NAME_ VARCHAR2(64),
VALUE_ VARCHAR2(300),
REV_ INTEGER,
primary key (NAME_)
);
create table ACT_GE_BYTEARRAY (
ID_ VARCHAR2(64),
REV_ INTEGER,
NAME_ VARCHAR2(255),
DEPLOYMENT_ID_ VARCHAR2(64),
BYTES_ BLOB,
GENERATED_ NUMBER(1) CHECK (GENERATED_ IN (1,0)),
primary key (ID_)
);
insert into ACT_GE_PROPERTY
values ('common.schema.version', '7.0.1.1', 1);
insert into ACT_GE_PROPERTY
values ('next.dbid', '1', 1);

View File

@@ -0,0 +1,2 @@
drop table ACT_GE_BYTEARRAY;
drop table ACT_GE_PROPERTY;

View File

@@ -0,0 +1,355 @@
create table ACT_RE_DEPLOYMENT (
ID_ VARCHAR2(64),
NAME_ VARCHAR2(255),
CATEGORY_ VARCHAR2(255),
KEY_ VARCHAR2(255),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
DEPLOY_TIME_ TIMESTAMP(6),
DERIVED_FROM_ VARCHAR2(64),
DERIVED_FROM_ROOT_ VARCHAR2(64),
PARENT_DEPLOYMENT_ID_ VARCHAR2(255),
ENGINE_VERSION_ VARCHAR2(255),
primary key (ID_)
);
create table ACT_RE_MODEL (
ID_ VARCHAR2(64) not null,
REV_ INTEGER,
NAME_ VARCHAR2(255),
KEY_ VARCHAR2(255),
CATEGORY_ VARCHAR2(255),
CREATE_TIME_ TIMESTAMP(6),
LAST_UPDATE_TIME_ TIMESTAMP(6),
VERSION_ INTEGER,
META_INFO_ VARCHAR2(2000),
DEPLOYMENT_ID_ VARCHAR2(64),
EDITOR_SOURCE_VALUE_ID_ VARCHAR2(64),
EDITOR_SOURCE_EXTRA_VALUE_ID_ VARCHAR2(64),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create table ACT_RU_EXECUTION (
ID_ VARCHAR2(64),
REV_ INTEGER,
PROC_INST_ID_ VARCHAR2(64),
BUSINESS_KEY_ VARCHAR2(255),
PARENT_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
SUPER_EXEC_ VARCHAR2(64),
ROOT_PROC_INST_ID_ VARCHAR2(64),
ACT_ID_ VARCHAR2(255),
IS_ACTIVE_ NUMBER(1) CHECK (IS_ACTIVE_ IN (1,0)),
IS_CONCURRENT_ NUMBER(1) CHECK (IS_CONCURRENT_ IN (1,0)),
IS_SCOPE_ NUMBER(1) CHECK (IS_SCOPE_ IN (1,0)),
IS_EVENT_SCOPE_ NUMBER(1) CHECK (IS_EVENT_SCOPE_ IN (1,0)),
IS_MI_ROOT_ NUMBER(1) CHECK (IS_MI_ROOT_ IN (1,0)),
SUSPENSION_STATE_ INTEGER,
CACHED_ENT_STATE_ INTEGER,
TENANT_ID_ VARCHAR2(255) DEFAULT '',
NAME_ VARCHAR2(255),
START_ACT_ID_ VARCHAR2(255),
START_TIME_ TIMESTAMP(6),
START_USER_ID_ VARCHAR2(255),
LOCK_TIME_ TIMESTAMP(6),
LOCK_OWNER_ VARCHAR2(255),
IS_COUNT_ENABLED_ NUMBER(1) CHECK (IS_COUNT_ENABLED_ IN (1,0)),
EVT_SUBSCR_COUNT_ INTEGER,
TASK_COUNT_ INTEGER,
JOB_COUNT_ INTEGER,
TIMER_JOB_COUNT_ INTEGER,
SUSP_JOB_COUNT_ INTEGER,
DEADLETTER_JOB_COUNT_ INTEGER,
EXTERNAL_WORKER_JOB_COUNT_ INTEGER,
VAR_COUNT_ INTEGER,
ID_LINK_COUNT_ INTEGER,
CALLBACK_ID_ VARCHAR2(255),
CALLBACK_TYPE_ VARCHAR2(255),
REFERENCE_ID_ VARCHAR2(255),
REFERENCE_TYPE_ VARCHAR2(255),
PROPAGATED_STAGE_INST_ID_ VARCHAR2(255),
BUSINESS_STATUS_ VARCHAR2(255),
primary key (ID_)
);
create table ACT_RE_PROCDEF (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
CATEGORY_ VARCHAR2(255),
NAME_ VARCHAR2(255),
KEY_ VARCHAR2(255) NOT NULL,
VERSION_ INTEGER NOT NULL,
DEPLOYMENT_ID_ VARCHAR2(64),
RESOURCE_NAME_ VARCHAR2(2000),
DGRM_RESOURCE_NAME_ VARCHAR2(4000),
DESCRIPTION_ VARCHAR2(2000),
HAS_START_FORM_KEY_ NUMBER(1) CHECK (HAS_START_FORM_KEY_ IN (1,0)),
HAS_GRAPHICAL_NOTATION_ NUMBER(1) CHECK (HAS_GRAPHICAL_NOTATION_ IN (1,0)),
SUSPENSION_STATE_ INTEGER,
TENANT_ID_ VARCHAR2(255) DEFAULT '',
DERIVED_FROM_ VARCHAR2(64),
DERIVED_FROM_ROOT_ VARCHAR2(64),
DERIVED_VERSION_ INTEGER DEFAULT 0 NOT NULL,
ENGINE_VERSION_ VARCHAR2(255),
primary key (ID_)
);
create table ACT_EVT_LOG (
LOG_NR_ NUMBER(19),
TYPE_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
EXECUTION_ID_ VARCHAR2(64),
TASK_ID_ VARCHAR2(64),
TIME_STAMP_ TIMESTAMP(6) not null,
USER_ID_ VARCHAR2(255),
DATA_ BLOB,
LOCK_OWNER_ VARCHAR2(255),
LOCK_TIME_ TIMESTAMP(6) null,
IS_PROCESSED_ NUMBER(3) default 0,
primary key (LOG_NR_)
);
create sequence act_evt_log_seq;
create table ACT_PROCDEF_INFO (
ID_ VARCHAR2(64) not null,
PROC_DEF_ID_ VARCHAR2(64) not null,
REV_ integer,
INFO_JSON_ID_ VARCHAR2(64),
primary key (ID_)
);
create table ACT_RU_ACTINST (
ID_ VARCHAR2(64) not null,
REV_ INTEGER default 1,
PROC_DEF_ID_ VARCHAR2(64) not null,
PROC_INST_ID_ VARCHAR2(64) not null,
EXECUTION_ID_ VARCHAR2(64) not null,
ACT_ID_ VARCHAR2(255) not null,
TASK_ID_ VARCHAR2(64),
CALL_PROC_INST_ID_ VARCHAR2(64),
ACT_NAME_ VARCHAR2(255),
ACT_TYPE_ VARCHAR2(255) not null,
ASSIGNEE_ VARCHAR2(255),
START_TIME_ TIMESTAMP(6) not null,
END_TIME_ TIMESTAMP(6),
DURATION_ NUMBER(19,0),
TRANSACTION_ORDER_ INTEGER,
DELETE_REASON_ VARCHAR2(2000),
TENANT_ID_ VARCHAR2(255) default '',
primary key (ID_)
);
create index ACT_IDX_EXEC_BUSKEY on ACT_RU_EXECUTION(BUSINESS_KEY_);
create index ACT_IDX_EXEC_ROOT on ACT_RU_EXECUTION(ROOT_PROC_INST_ID_);
create index ACT_IDX_EXEC_REF_ID_ on ACT_RU_EXECUTION(REFERENCE_ID_);
create index ACT_IDX_VARIABLE_TASK_ID on ACT_RU_VARIABLE(TASK_ID_);
create index ACT_IDX_RU_ACTI_START on ACT_RU_ACTINST(START_TIME_);
create index ACT_IDX_RU_ACTI_END on ACT_RU_ACTINST(END_TIME_);
create index ACT_IDX_RU_ACTI_PROC on ACT_RU_ACTINST(PROC_INST_ID_);
create index ACT_IDX_RU_ACTI_PROC_ACT on ACT_RU_ACTINST(PROC_INST_ID_, ACT_ID_);
create index ACT_IDX_RU_ACTI_EXEC on ACT_RU_ACTINST(EXECUTION_ID_);
create index ACT_IDX_RU_ACTI_EXEC_ACT on ACT_RU_ACTINST(EXECUTION_ID_, ACT_ID_);
create index ACT_IDX_RU_ACTI_TASK on ACT_RU_ACTINST(TASK_ID_);
create index ACT_IDX_BYTEAR_DEPL on ACT_GE_BYTEARRAY(DEPLOYMENT_ID_);
alter table ACT_GE_BYTEARRAY
add constraint ACT_FK_BYTEARR_DEPL
foreign key (DEPLOYMENT_ID_)
references ACT_RE_DEPLOYMENT (ID_);
alter table ACT_RE_PROCDEF
add constraint ACT_UNIQ_PROCDEF
unique (KEY_,VERSION_, DERIVED_VERSION_, TENANT_ID_);
create index ACT_IDX_EXE_PROCINST on ACT_RU_EXECUTION(PROC_INST_ID_);
alter table ACT_RU_EXECUTION
add constraint ACT_FK_EXE_PROCINST
foreign key (PROC_INST_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_EXE_PARENT on ACT_RU_EXECUTION(PARENT_ID_);
alter table ACT_RU_EXECUTION
add constraint ACT_FK_EXE_PARENT
foreign key (PARENT_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_EXE_SUPER on ACT_RU_EXECUTION(SUPER_EXEC_);
alter table ACT_RU_EXECUTION
add constraint ACT_FK_EXE_SUPER
foreign key (SUPER_EXEC_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_EXE_PROCDEF on ACT_RU_EXECUTION(PROC_DEF_ID_);
alter table ACT_RU_EXECUTION
add constraint ACT_FK_EXE_PROCDEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
create index ACT_IDX_TSKASS_TASK on ACT_RU_IDENTITYLINK(TASK_ID_);
alter table ACT_RU_IDENTITYLINK
add constraint ACT_FK_TSKASS_TASK
foreign key (TASK_ID_)
references ACT_RU_TASK (ID_);
create index ACT_IDX_ATHRZ_PROCEDEF on ACT_RU_IDENTITYLINK(PROC_DEF_ID_);
alter table ACT_RU_IDENTITYLINK
add constraint ACT_FK_ATHRZ_PROCEDEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
create index ACT_IDX_IDL_PROCINST on ACT_RU_IDENTITYLINK(PROC_INST_ID_);
alter table ACT_RU_IDENTITYLINK
add constraint ACT_FK_IDL_PROCINST
foreign key (PROC_INST_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_TASK_EXEC on ACT_RU_TASK(EXECUTION_ID_);
alter table ACT_RU_TASK
add constraint ACT_FK_TASK_EXE
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_TASK_PROCINST on ACT_RU_TASK(PROC_INST_ID_);
alter table ACT_RU_TASK
add constraint ACT_FK_TASK_PROCINST
foreign key (PROC_INST_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_TASK_PROCDEF on ACT_RU_TASK(PROC_DEF_ID_);
alter table ACT_RU_TASK
add constraint ACT_FK_TASK_PROCDEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
create index ACT_IDX_VAR_EXE on ACT_RU_VARIABLE(EXECUTION_ID_);
alter table ACT_RU_VARIABLE
add constraint ACT_FK_VAR_EXE
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_VAR_PROCINST on ACT_RU_VARIABLE(PROC_INST_ID_);
alter table ACT_RU_VARIABLE
add constraint ACT_FK_VAR_PROCINST
foreign key (PROC_INST_ID_)
references ACT_RU_EXECUTION(ID_);
create index ACT_IDX_JOB_EXECUTION_ID on ACT_RU_JOB(EXECUTION_ID_);
alter table ACT_RU_JOB
add constraint ACT_FK_JOB_EXECUTION
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_JOB_PROC_INST_ID on ACT_RU_JOB(PROCESS_INSTANCE_ID_);
alter table ACT_RU_JOB
add constraint ACT_FK_JOB_PROCESS_INSTANCE
foreign key (PROCESS_INSTANCE_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_JOB_PROC_DEF_ID on ACT_RU_JOB(PROC_DEF_ID_);
alter table ACT_RU_JOB
add constraint ACT_FK_JOB_PROC_DEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
create index ACT_IDX_TJOB_EXECUTION_ID on ACT_RU_TIMER_JOB(EXECUTION_ID_);
alter table ACT_RU_TIMER_JOB
add constraint ACT_FK_TJOB_EXECUTION
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_TJOB_PROC_INST_ID on ACT_RU_TIMER_JOB(PROCESS_INSTANCE_ID_);
alter table ACT_RU_TIMER_JOB
add constraint ACT_FK_TJOB_PROCESS_INSTANCE
foreign key (PROCESS_INSTANCE_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_TJOB_PROC_DEF_ID on ACT_RU_TIMER_JOB(PROC_DEF_ID_);
alter table ACT_RU_TIMER_JOB
add constraint ACT_FK_TJOB_PROC_DEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
create index ACT_IDX_SJOB_EXECUTION_ID on ACT_RU_SUSPENDED_JOB(EXECUTION_ID_);
alter table ACT_RU_SUSPENDED_JOB
add constraint ACT_FK_SJOB_EXECUTION
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_SJOB_PROC_INST_ID on ACT_RU_SUSPENDED_JOB(PROCESS_INSTANCE_ID_);
alter table ACT_RU_SUSPENDED_JOB
add constraint ACT_FK_SJOB_PROCESS_INSTANCE
foreign key (PROCESS_INSTANCE_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_SJOB_PROC_DEF_ID on ACT_RU_SUSPENDED_JOB(PROC_DEF_ID_);
alter table ACT_RU_SUSPENDED_JOB
add constraint ACT_FK_SJOB_PROC_DEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
create index ACT_IDX_DJOB_EXECUTION_ID on ACT_RU_DEADLETTER_JOB(EXECUTION_ID_);
alter table ACT_RU_DEADLETTER_JOB
add constraint ACT_FK_DJOB_EXECUTION
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_DJOB_PROC_INST_ID on ACT_RU_DEADLETTER_JOB(PROCESS_INSTANCE_ID_);
alter table ACT_RU_DEADLETTER_JOB
add constraint ACT_FK_DJOB_PROCESS_INSTANCE
foreign key (PROCESS_INSTANCE_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_DJOB_PROC_DEF_ID on ACT_RU_DEADLETTER_JOB(PROC_DEF_ID_);
alter table ACT_RU_DEADLETTER_JOB
add constraint ACT_FK_DJOB_PROC_DEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
alter table ACT_RU_EVENT_SUBSCR
add constraint ACT_FK_EVENT_EXEC
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION(ID_);
create index ACT_IDX_MODEL_SOURCE on ACT_RE_MODEL(EDITOR_SOURCE_VALUE_ID_);
alter table ACT_RE_MODEL
add constraint ACT_FK_MODEL_SOURCE
foreign key (EDITOR_SOURCE_VALUE_ID_)
references ACT_GE_BYTEARRAY (ID_);
create index ACT_IDX_MODEL_SOURCE_EXTRA on ACT_RE_MODEL(EDITOR_SOURCE_EXTRA_VALUE_ID_);
alter table ACT_RE_MODEL
add constraint ACT_FK_MODEL_SOURCE_EXTRA
foreign key (EDITOR_SOURCE_EXTRA_VALUE_ID_)
references ACT_GE_BYTEARRAY (ID_);
create index ACT_IDX_MODEL_DEPLOYMENT on ACT_RE_MODEL(DEPLOYMENT_ID_);
alter table ACT_RE_MODEL
add constraint ACT_FK_MODEL_DEPLOYMENT
foreign key (DEPLOYMENT_ID_)
references ACT_RE_DEPLOYMENT (ID_);
create index ACT_IDX_PROCDEF_INFO_JSON on ACT_PROCDEF_INFO(INFO_JSON_ID_);
alter table ACT_PROCDEF_INFO
add constraint ACT_FK_INFO_JSON_BA
foreign key (INFO_JSON_ID_)
references ACT_GE_BYTEARRAY (ID_);
create index ACT_IDX_PROCDEF_INFO_PROC on ACT_PROCDEF_INFO(PROC_DEF_ID_);
alter table ACT_PROCDEF_INFO
add constraint ACT_FK_INFO_PROCDEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
alter table ACT_PROCDEF_INFO
add constraint ACT_UNIQ_INFO_PROCDEF
unique (PROC_DEF_ID_);
insert into ACT_GE_PROPERTY
values ('schema.version', '7.0.1.1', 1);
insert into ACT_GE_PROPERTY
values ('schema.history', 'create(7.0.1.1)', 1);

View File

@@ -0,0 +1,114 @@
create table ACT_HI_PROCINST (
ID_ VARCHAR2(64) not null,
REV_ INTEGER default 1,
PROC_INST_ID_ VARCHAR2(64) not null,
BUSINESS_KEY_ VARCHAR2(255),
PROC_DEF_ID_ VARCHAR2(64) not null,
START_TIME_ TIMESTAMP(6) not null,
END_TIME_ TIMESTAMP(6),
DURATION_ NUMBER(19,0),
START_USER_ID_ VARCHAR2(255),
START_ACT_ID_ VARCHAR2(255),
END_ACT_ID_ VARCHAR2(255),
SUPER_PROCESS_INSTANCE_ID_ VARCHAR2(64),
DELETE_REASON_ VARCHAR2(2000),
TENANT_ID_ VARCHAR2(255) default '',
NAME_ VARCHAR2(255),
CALLBACK_ID_ VARCHAR2(255),
CALLBACK_TYPE_ VARCHAR2(255),
REFERENCE_ID_ VARCHAR2(255),
REFERENCE_TYPE_ VARCHAR2(255),
PROPAGATED_STAGE_INST_ID_ VARCHAR2(255),
BUSINESS_STATUS_ VARCHAR2(255),
primary key (ID_),
unique (PROC_INST_ID_)
);
create table ACT_HI_ACTINST (
ID_ VARCHAR2(64) not null,
REV_ INTEGER default 1,
PROC_DEF_ID_ VARCHAR2(64) not null,
PROC_INST_ID_ VARCHAR2(64) not null,
EXECUTION_ID_ VARCHAR2(64) not null,
ACT_ID_ VARCHAR2(255) not null,
TASK_ID_ VARCHAR2(64),
CALL_PROC_INST_ID_ VARCHAR2(64),
ACT_NAME_ VARCHAR2(255),
ACT_TYPE_ VARCHAR2(255) not null,
ASSIGNEE_ VARCHAR2(255),
START_TIME_ TIMESTAMP(6) not null,
END_TIME_ TIMESTAMP(6),
TRANSACTION_ORDER_ INTEGER,
DURATION_ NUMBER(19,0),
DELETE_REASON_ VARCHAR2(2000),
TENANT_ID_ VARCHAR2(255) default '',
primary key (ID_)
);
create table ACT_HI_DETAIL (
ID_ VARCHAR2(64) not null,
TYPE_ VARCHAR2(255) not null,
PROC_INST_ID_ VARCHAR2(64),
EXECUTION_ID_ VARCHAR2(64),
TASK_ID_ VARCHAR2(64),
ACT_INST_ID_ VARCHAR2(64),
NAME_ VARCHAR2(255) not null,
VAR_TYPE_ VARCHAR2(64),
REV_ INTEGER,
TIME_ TIMESTAMP(6) not null,
BYTEARRAY_ID_ VARCHAR2(64),
DOUBLE_ NUMBER(38,10),
LONG_ NUMBER(19,0),
TEXT_ VARCHAR2(2000),
TEXT2_ VARCHAR2(2000),
primary key (ID_)
);
create table ACT_HI_COMMENT (
ID_ VARCHAR2(64) not null,
TYPE_ VARCHAR2(255),
TIME_ TIMESTAMP(6) not null,
USER_ID_ VARCHAR2(255),
TASK_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
ACTION_ VARCHAR2(255),
MESSAGE_ VARCHAR2(2000),
FULL_MSG_ BLOB,
primary key (ID_)
);
create table ACT_HI_ATTACHMENT (
ID_ VARCHAR2(64) not null,
REV_ INTEGER,
USER_ID_ VARCHAR2(255),
NAME_ VARCHAR2(255),
DESCRIPTION_ VARCHAR2(2000),
TYPE_ VARCHAR2(255),
TASK_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
URL_ VARCHAR2(2000),
CONTENT_ID_ VARCHAR2(64),
TIME_ TIMESTAMP(6),
primary key (ID_)
);
create index ACT_IDX_HI_PRO_INST_END on ACT_HI_PROCINST(END_TIME_);
create index ACT_IDX_HI_PRO_I_BUSKEY on ACT_HI_PROCINST(BUSINESS_KEY_);
create index ACT_IDX_HI_PRO_SUPER_PROCINST on ACT_HI_PROCINST(SUPER_PROCESS_INSTANCE_ID_);
create index ACT_IDX_HI_ACT_INST_START on ACT_HI_ACTINST(START_TIME_);
create index ACT_IDX_HI_ACT_INST_END on ACT_HI_ACTINST(END_TIME_);
create index ACT_IDX_HI_DETAIL_PROC_INST on ACT_HI_DETAIL(PROC_INST_ID_);
create index ACT_IDX_HI_DETAIL_ACT_INST on ACT_HI_DETAIL(ACT_INST_ID_);
create index ACT_IDX_HI_DETAIL_TIME on ACT_HI_DETAIL(TIME_);
create index ACT_IDX_HI_DETAIL_NAME on ACT_HI_DETAIL(NAME_);
create index ACT_IDX_HI_DETAIL_TASK_ID on ACT_HI_DETAIL(TASK_ID_);
create index ACT_IDX_HI_PROCVAR_PROC_INST on ACT_HI_VARINST(PROC_INST_ID_);
create index ACT_IDX_HI_PROCVAR_TASK_ID on ACT_HI_VARINST(TASK_ID_);
create index ACT_IDX_HI_PROCVAR_EXE on ACT_HI_VARINST(EXECUTION_ID_);
create index ACT_IDX_HI_IDENT_LNK_TASK on ACT_HI_IDENTITYLINK(TASK_ID_);
create index ACT_IDX_HI_IDENT_LNK_PROCINST on ACT_HI_IDENTITYLINK(PROC_INST_ID_);
create index ACT_IDX_HI_ACT_INST_PROCINST on ACT_HI_ACTINST(PROC_INST_ID_, ACT_ID_);
create index ACT_IDX_HI_ACT_INST_EXEC on ACT_HI_ACTINST(EXECUTION_ID_, ACT_ID_);
create index ACT_IDX_HI_TASK_INST_PROCINST on ACT_HI_TASKINST(PROC_INST_ID_);

View File

@@ -0,0 +1,148 @@
drop index ACT_IDX_BYTEAR_DEPL;
drop index ACT_IDX_EXE_PROCINST;
drop index ACT_IDX_EXE_PARENT;
drop index ACT_IDX_EXE_SUPER;
drop index ACT_IDX_TSKASS_TASK;
drop index ACT_IDX_TASK_EXEC;
drop index ACT_IDX_TASK_PROCINST;
drop index ACT_IDX_TASK_PROCDEF;
drop index ACT_IDX_VAR_EXE;
drop index ACT_IDX_VAR_PROCINST;
drop index ACT_IDX_JOB_EXECUTION_ID;
drop index ACT_IDX_JOB_PROC_INST_ID;
drop index ACT_IDX_JOB_PROC_DEF_ID;
drop index ACT_IDX_TJOB_EXECUTION_ID;
drop index ACT_IDX_TJOB_PROC_INST_ID;
drop index ACT_IDX_TJOB_PROC_DEF_ID;
drop index ACT_IDX_SJOB_EXECUTION_ID;
drop index ACT_IDX_SJOB_PROC_INST_ID;
drop index ACT_IDX_SJOB_PROC_DEF_ID;
drop index ACT_IDX_DJOB_EXECUTION_ID;
drop index ACT_IDX_DJOB_PROC_INST_ID;
drop index ACT_IDX_DJOB_PROC_DEF_ID;
drop index ACT_IDX_MODEL_SOURCE;
drop index ACT_IDX_MODEL_SOURCE_EXTRA;
drop index ACT_IDX_MODEL_DEPLOYMENT;
drop index ACT_IDX_PROCDEF_INFO_JSON;
drop index ACT_IDX_EXEC_BUSKEY;
drop index ACT_IDX_VARIABLE_TASK_ID;
drop index ACT_IDX_RU_ACTI_START;
drop index ACT_IDX_RU_ACTI_END;
drop index ACT_IDX_RU_ACTI_PROC;
drop index ACT_IDX_RU_ACTI_PROC_ACT;
drop index ACT_IDX_RU_ACTI_EXEC;
drop index ACT_IDX_RU_ACTI_EXEC_ACT;
alter table ACT_GE_BYTEARRAY
drop CONSTRAINT ACT_FK_BYTEARR_DEPL;
alter table ACT_RU_EXECUTION
drop CONSTRAINT ACT_FK_EXE_PROCINST;
alter table ACT_RU_EXECUTION
drop CONSTRAINT ACT_FK_EXE_PARENT;
alter table ACT_RU_EXECUTION
drop CONSTRAINT ACT_FK_EXE_SUPER;
alter table ACT_RU_EXECUTION
drop CONSTRAINT ACT_FK_EXE_PROCDEF;
alter table ACT_RU_IDENTITYLINK
drop CONSTRAINT ACT_FK_TSKASS_TASK;
alter table ACT_RU_IDENTITYLINK
drop CONSTRAINT ACT_FK_IDL_PROCINST;
alter table ACT_RU_IDENTITYLINK
drop CONSTRAINT ACT_FK_ATHRZ_PROCEDEF;
alter table ACT_RU_TASK
drop CONSTRAINT ACT_FK_TASK_EXE;
alter table ACT_RU_TASK
drop CONSTRAINT ACT_FK_TASK_PROCINST;
alter table ACT_RU_TASK
drop CONSTRAINT ACT_FK_TASK_PROCDEF;
alter table ACT_RU_VARIABLE
drop CONSTRAINT ACT_FK_VAR_EXE;
alter table ACT_RU_VARIABLE
drop CONSTRAINT ACT_FK_VAR_PROCINST;
alter table ACT_RU_JOB
drop CONSTRAINT ACT_FK_JOB_EXECUTION;
alter table ACT_RU_JOB
drop CONSTRAINT ACT_FK_JOB_PROCESS_INSTANCE;
alter table ACT_RU_JOB
drop CONSTRAINT ACT_FK_JOB_PROC_DEF;
alter table ACT_RU_TIMER_JOB
drop CONSTRAINT ACT_FK_TJOB_EXECUTION;
alter table ACT_RU_TIMER_JOB
drop CONSTRAINT ACT_FK_TJOB_PROCESS_INSTANCE;
alter table ACT_RU_TIMER_JOB
drop CONSTRAINT ACT_FK_TJOB_PROC_DEF;
alter table ACT_RU_SUSPENDED_JOB
drop CONSTRAINT ACT_FK_SJOB_EXECUTION;
alter table ACT_RU_SUSPENDED_JOB
drop CONSTRAINT ACT_FK_SJOB_PROCESS_INSTANCE;
alter table ACT_RU_SUSPENDED_JOB
drop CONSTRAINT ACT_FK_SJOB_PROC_DEF;
alter table ACT_RU_DEADLETTER_JOB
drop CONSTRAINT ACT_FK_DJOB_EXECUTION;
alter table ACT_RU_DEADLETTER_JOB
drop CONSTRAINT ACT_FK_DJOB_PROCESS_INSTANCE;
alter table ACT_RU_DEADLETTER_JOB
drop CONSTRAINT ACT_FK_DJOB_PROC_DEF;
alter table ACT_RU_EVENT_SUBSCR
drop CONSTRAINT ACT_FK_EVENT_EXEC;
alter table ACT_RE_PROCDEF
drop CONSTRAINT ACT_UNIQ_PROCDEF;
alter table ACT_RE_MODEL
drop CONSTRAINT ACT_FK_MODEL_SOURCE;
alter table ACT_RE_MODEL
drop CONSTRAINT ACT_FK_MODEL_SOURCE_EXTRA;
alter table ACT_RE_MODEL
drop CONSTRAINT ACT_FK_MODEL_DEPLOYMENT;
alter table ACT_PROCDEF_INFO
drop CONSTRAINT ACT_UNIQ_INFO_PROCDEF;
alter table ACT_PROCDEF_INFO
drop CONSTRAINT ACT_FK_INFO_JSON_BA;
alter table ACT_PROCDEF_INFO
drop CONSTRAINT ACT_FK_INFO_PROCDEF;
drop index ACT_IDX_ATHRZ_PROCEDEF;
drop index ACT_IDX_PROCDEF_INFO_PROC;
drop table ACT_RU_ACTINST;
drop table ACT_RE_DEPLOYMENT;
drop table ACT_RE_MODEL;
drop table ACT_RE_PROCDEF;
drop table ACT_RU_EXECUTION;
drop sequence act_evt_log_seq;
drop table ACT_EVT_LOG;
drop table ACT_PROCDEF_INFO;

View File

@@ -0,0 +1,23 @@
drop index ACT_IDX_HI_PRO_INST_END;
drop index ACT_IDX_HI_PRO_I_BUSKEY;
drop index ACT_IDX_HI_ACT_INST_START;
drop index ACT_IDX_HI_ACT_INST_END;
drop index ACT_IDX_HI_DETAIL_PROC_INST;
drop index ACT_IDX_HI_DETAIL_ACT_INST;
drop index ACT_IDX_HI_DETAIL_TIME;
drop index ACT_IDX_HI_DETAIL_NAME;
drop index ACT_IDX_HI_DETAIL_TASK_ID;
drop index ACT_IDX_HI_PROCVAR_PROC_INST;
drop index ACT_IDX_HI_PROCVAR_TASK_ID;
drop index ACT_IDX_HI_PROCVAR_EXE;
drop index ACT_IDX_HI_ACT_INST_PROCINST;
drop index ACT_IDX_HI_IDENT_LNK_TASK;
drop index ACT_IDX_HI_IDENT_LNK_PROCINST;
drop index ACT_IDX_HI_TASK_INST_PROCINST;
drop table ACT_HI_PROCINST;
drop table ACT_HI_ACTINST;
drop table ACT_HI_DETAIL;
drop table ACT_HI_COMMENT;
drop table ACT_HI_ATTACHMENT;

View File

@@ -0,0 +1,23 @@
create table ACT_HI_ENTITYLINK (
ID_ VARCHAR2(64),
LINK_TYPE_ VARCHAR2(255),
CREATE_TIME_ TIMESTAMP(6),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
PARENT_ELEMENT_ID_ VARCHAR2(255),
REF_SCOPE_ID_ VARCHAR2(255),
REF_SCOPE_TYPE_ VARCHAR2(255),
REF_SCOPE_DEFINITION_ID_ VARCHAR2(255),
ROOT_SCOPE_ID_ VARCHAR2(255),
ROOT_SCOPE_TYPE_ VARCHAR2(255),
HIERARCHY_TYPE_ VARCHAR2(255),
primary key (ID_)
);
create index ACT_IDX_HI_ENT_LNK_SCOPE on ACT_HI_ENTITYLINK(SCOPE_ID_, SCOPE_TYPE_, LINK_TYPE_);
create index ACT_IDX_HI_ENT_LNK_REF_SCOPE on ACT_HI_ENTITYLINK(REF_SCOPE_ID_, REF_SCOPE_TYPE_, LINK_TYPE_);
create index ACT_IDX_HI_ENT_LNK_ROOT_SCOPE on ACT_HI_ENTITYLINK(ROOT_SCOPE_ID_, ROOT_SCOPE_TYPE_, LINK_TYPE_);
create index ACT_IDX_HI_ENT_LNK_SCOPE_DEF on ACT_HI_ENTITYLINK(SCOPE_DEFINITION_ID_, SCOPE_TYPE_, LINK_TYPE_);

View File

@@ -0,0 +1,26 @@
create table ACT_RU_ENTITYLINK (
ID_ VARCHAR2(64),
REV_ INTEGER,
CREATE_TIME_ TIMESTAMP(6),
LINK_TYPE_ VARCHAR2(255),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
PARENT_ELEMENT_ID_ VARCHAR2(255),
REF_SCOPE_ID_ VARCHAR2(255),
REF_SCOPE_TYPE_ VARCHAR2(255),
REF_SCOPE_DEFINITION_ID_ VARCHAR2(255),
ROOT_SCOPE_ID_ VARCHAR2(255),
ROOT_SCOPE_TYPE_ VARCHAR2(255),
HIERARCHY_TYPE_ VARCHAR2(255),
primary key (ID_)
);
create index ACT_IDX_ENT_LNK_SCOPE on ACT_RU_ENTITYLINK(SCOPE_ID_, SCOPE_TYPE_, LINK_TYPE_);
create index ACT_IDX_ENT_LNK_REF_SCOPE on ACT_RU_ENTITYLINK(REF_SCOPE_ID_, REF_SCOPE_TYPE_, LINK_TYPE_);
create index ACT_IDX_ENT_LNK_ROOT_SCOPE on ACT_RU_ENTITYLINK(ROOT_SCOPE_ID_, ROOT_SCOPE_TYPE_, LINK_TYPE_);
create index ACT_IDX_ENT_LNK_SCOPE_DEF on ACT_RU_ENTITYLINK(SCOPE_DEFINITION_ID_, SCOPE_TYPE_, LINK_TYPE_);
insert into ACT_GE_PROPERTY values ('entitylink.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,4 @@
drop index ACT_IDX_HI_ENT_LNK_SCOPE;
drop index ACT_IDX_HI_ENT_LNK_SCOPE_DEF;
drop table ACT_HI_ENTITYLINK;

View File

@@ -0,0 +1,4 @@
drop index ACT_IDX_ENT_LNK_SCOPE;
drop index ACT_IDX_ENT_LNK_SCOPE_DEF;
drop table ACT_RU_ENTITYLINK;

View File

@@ -0,0 +1,28 @@
create table ACT_RU_EVENT_SUBSCR (
ID_ VARCHAR2(64) not null,
REV_ integer,
EVENT_TYPE_ VARCHAR2(255) not null,
EVENT_NAME_ VARCHAR2(255),
EXECUTION_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
ACTIVITY_ID_ VARCHAR2(64),
CONFIGURATION_ VARCHAR2(255),
CREATED_ TIMESTAMP(6) not null,
PROC_DEF_ID_ VARCHAR2(64),
SUB_SCOPE_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(64),
SCOPE_DEFINITION_ID_ VARCHAR2(64),
SCOPE_DEFINITION_KEY_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(64),
LOCK_TIME_ TIMESTAMP(6),
LOCK_OWNER_ VARCHAR2(255),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create index ACT_IDX_EVENT_SUBSCR_CONFIG_ on ACT_RU_EVENT_SUBSCR(CONFIGURATION_);
create index ACT_IDX_EVENT_SUBSCR on ACT_RU_EVENT_SUBSCR(EXECUTION_ID_);
create index ACT_IDX_EVENT_SUBSCR_SCOPEREF_ on ACT_RU_EVENT_SUBSCR(SCOPE_ID_, SCOPE_TYPE_);
insert into ACT_GE_PROPERTY values ('eventsubscription.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,5 @@
drop index ACT_IDX_EVENT_SUBSCR_CONFIG_;
drop index ACT_IDX_EVENT_SUBSCR;
drop index ACT_IDX_EVENT_SUBSCR_SCOPEREF_;
drop table ACT_RU_EVENT_SUBSCR;

View File

@@ -0,0 +1,20 @@
create table ACT_HI_IDENTITYLINK (
ID_ VARCHAR2(64),
GROUP_ID_ VARCHAR2(255),
TYPE_ VARCHAR2(255),
USER_ID_ VARCHAR2(255),
TASK_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
PROC_INST_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
primary key (ID_)
);
create index ACT_IDX_HI_IDENT_LNK_USER on ACT_HI_IDENTITYLINK(USER_ID_);
create index ACT_IDX_HI_IDENT_LNK_SCOPE on ACT_HI_IDENTITYLINK(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_HI_IDENT_LNK_SUB_SCOPE on ACT_HI_IDENTITYLINK(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_HI_IDENT_LNK_SCOPE_DEF on ACT_HI_IDENTITYLINK(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);

View File

@@ -0,0 +1,24 @@
create table ACT_RU_IDENTITYLINK (
ID_ VARCHAR2(64),
REV_ INTEGER,
GROUP_ID_ VARCHAR2(255),
TYPE_ VARCHAR2(255),
USER_ID_ VARCHAR2(255),
TASK_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
primary key (ID_)
);
create index ACT_IDX_IDENT_LNK_USER on ACT_RU_IDENTITYLINK(USER_ID_);
create index ACT_IDX_IDENT_LNK_GROUP on ACT_RU_IDENTITYLINK(GROUP_ID_);
create index ACT_IDX_IDENT_LNK_SCOPE on ACT_RU_IDENTITYLINK(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_IDENT_LNK_SUB_SCOPE on ACT_RU_IDENTITYLINK(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_IDENT_LNK_SCOPE_DEF on ACT_RU_IDENTITYLINK(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
insert into ACT_GE_PROPERTY values ('identitylink.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,6 @@
drop index ACT_IDX_HI_IDENT_LNK_USER;
drop index ACT_IDX_HI_IDENT_LNK_SCOPE;
drop index ACT_IDX_HI_IDENT_LNK_SUB_SCOPE;
drop index ACT_IDX_HI_IDENT_LNK_SCOPE_DEF;
drop table ACT_HI_IDENTITYLINK;

View File

@@ -0,0 +1,7 @@
drop index ACT_IDX_IDENT_LNK_USER;
drop index ACT_IDX_IDENT_LNK_GROUP;
drop index ACT_IDX_IDENT_LNK_SCOPE;
drop index ACT_IDX_IDENT_LNK_SUB_SCOPE;
drop index ACT_IDX_IDENT_LNK_SCOPE_DEF;
drop table ACT_RU_IDENTITYLINK;

View File

@@ -0,0 +1,108 @@
create table ACT_ID_PROPERTY (
NAME_ VARCHAR2(64),
VALUE_ VARCHAR2(300),
REV_ INTEGER,
primary key (NAME_)
);
insert into ACT_ID_PROPERTY
values ('schema.version', '7.0.1.1', 1);
create table ACT_ID_BYTEARRAY (
ID_ VARCHAR2(64),
REV_ INTEGER,
NAME_ VARCHAR2(255),
BYTES_ BLOB,
primary key (ID_)
);
create table ACT_ID_GROUP (
ID_ VARCHAR2(64),
REV_ INTEGER,
NAME_ VARCHAR2(255),
TYPE_ VARCHAR2(255),
primary key (ID_)
);
create table ACT_ID_MEMBERSHIP (
USER_ID_ VARCHAR2(64),
GROUP_ID_ VARCHAR2(64),
primary key (USER_ID_, GROUP_ID_)
);
create table ACT_ID_USER (
ID_ VARCHAR2(64),
REV_ INTEGER,
FIRST_ VARCHAR2(255),
LAST_ VARCHAR2(255),
DISPLAY_NAME_ VARCHAR2(255),
EMAIL_ VARCHAR2(255),
PWD_ VARCHAR2(255),
PICTURE_ID_ VARCHAR2(64),
TENANT_ID_ VARCHAR2(255) default '',
primary key (ID_)
);
create table ACT_ID_INFO (
ID_ VARCHAR2(64),
REV_ INTEGER,
USER_ID_ VARCHAR2(64),
TYPE_ VARCHAR2(64),
KEY_ VARCHAR2(255),
VALUE_ VARCHAR2(255),
PASSWORD_ BLOB,
PARENT_ID_ VARCHAR2(255),
primary key (ID_)
);
create table ACT_ID_TOKEN (
ID_ VARCHAR2(64) not null,
REV_ INTEGER,
TOKEN_VALUE_ VARCHAR2(255),
TOKEN_DATE_ TIMESTAMP(6),
IP_ADDRESS_ VARCHAR2(255),
USER_AGENT_ VARCHAR2(255),
USER_ID_ VARCHAR2(255),
TOKEN_DATA_ VARCHAR2(2000),
primary key (ID_)
);
create table ACT_ID_PRIV (
ID_ VARCHAR2(64) not null,
NAME_ VARCHAR2(255) not null,
primary key (ID_)
);
create table ACT_ID_PRIV_MAPPING (
ID_ VARCHAR2(64) not null,
PRIV_ID_ VARCHAR2(64) not null,
USER_ID_ VARCHAR2(255),
GROUP_ID_ VARCHAR2(255),
primary key (ID_)
);
create index ACT_IDX_MEMB_GROUP on ACT_ID_MEMBERSHIP(GROUP_ID_);
alter table ACT_ID_MEMBERSHIP
add constraint ACT_FK_MEMB_GROUP
foreign key (GROUP_ID_)
references ACT_ID_GROUP (ID_);
create index ACT_IDX_MEMB_USER on ACT_ID_MEMBERSHIP(USER_ID_);
alter table ACT_ID_MEMBERSHIP
add constraint ACT_FK_MEMB_USER
foreign key (USER_ID_)
references ACT_ID_USER (ID_);
create index ACT_IDX_PRIV_MAPPING on ACT_ID_PRIV_MAPPING(PRIV_ID_);
alter table ACT_ID_PRIV_MAPPING
add constraint ACT_FK_PRIV_MAPPING
foreign key (PRIV_ID_)
references ACT_ID_PRIV (ID_);
create index ACT_IDX_PRIV_USER on ACT_ID_PRIV_MAPPING(USER_ID_);
create index ACT_IDX_PRIV_GROUP on ACT_ID_PRIV_MAPPING(GROUP_ID_);
alter table ACT_ID_PRIV
add constraint ACT_UNIQ_PRIV_NAME
unique (NAME_);

View File

@@ -0,0 +1,22 @@
alter table ACT_ID_MEMBERSHIP
drop CONSTRAINT ACT_FK_MEMB_GROUP;
alter table ACT_ID_MEMBERSHIP
drop CONSTRAINT ACT_FK_MEMB_USER;
alter table ACT_ID_PRIV_MAPPING
drop CONSTRAINT ACT_FK_PRIV_MAPPING;
drop index ACT_IDX_MEMB_GROUP;
drop index ACT_IDX_MEMB_USER;
drop index ACT_IDX_PRIV_MAPPING;
drop table ACT_ID_PROPERTY;
drop table ACT_ID_BYTEARRAY;
drop table ACT_ID_INFO;
drop table ACT_ID_MEMBERSHIP;
drop table ACT_ID_GROUP;
drop table ACT_ID_USER;
drop table ACT_ID_TOKEN;
drop table ACT_ID_PRIV;
drop table ACT_ID_PRIV_MAPPING;

View File

@@ -0,0 +1,261 @@
create table ACT_RU_JOB (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
CATEGORY_ VARCHAR2(255),
TYPE_ VARCHAR2(255) NOT NULL,
LOCK_EXP_TIME_ TIMESTAMP(6),
LOCK_OWNER_ VARCHAR2(255),
EXCLUSIVE_ NUMBER(1) CHECK (EXCLUSIVE_ IN (1,0)),
EXECUTION_ID_ VARCHAR2(64),
PROCESS_INSTANCE_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
ELEMENT_ID_ VARCHAR2(255),
ELEMENT_NAME_ VARCHAR2(255),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
CORRELATION_ID_ VARCHAR2(255),
RETRIES_ INTEGER,
EXCEPTION_STACK_ID_ VARCHAR2(64),
EXCEPTION_MSG_ VARCHAR2(2000),
DUEDATE_ TIMESTAMP(6),
REPEAT_ VARCHAR2(255),
HANDLER_TYPE_ VARCHAR2(255),
HANDLER_CFG_ VARCHAR2(2000),
CUSTOM_VALUES_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create table ACT_RU_TIMER_JOB (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
CATEGORY_ VARCHAR2(255),
TYPE_ VARCHAR2(255) NOT NULL,
LOCK_EXP_TIME_ TIMESTAMP(6),
LOCK_OWNER_ VARCHAR2(255),
EXCLUSIVE_ NUMBER(1) CHECK (EXCLUSIVE_ IN (1,0)),
EXECUTION_ID_ VARCHAR2(64),
PROCESS_INSTANCE_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
ELEMENT_ID_ VARCHAR2(255),
ELEMENT_NAME_ VARCHAR2(255),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
CORRELATION_ID_ VARCHAR2(255),
RETRIES_ INTEGER,
EXCEPTION_STACK_ID_ VARCHAR2(64),
EXCEPTION_MSG_ VARCHAR2(2000),
DUEDATE_ TIMESTAMP(6),
REPEAT_ VARCHAR2(255),
HANDLER_TYPE_ VARCHAR2(255),
HANDLER_CFG_ VARCHAR2(2000),
CUSTOM_VALUES_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create table ACT_RU_SUSPENDED_JOB (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
CATEGORY_ VARCHAR2(255),
TYPE_ VARCHAR2(255) NOT NULL,
EXCLUSIVE_ NUMBER(1) CHECK (EXCLUSIVE_ IN (1,0)),
EXECUTION_ID_ VARCHAR2(64),
PROCESS_INSTANCE_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
ELEMENT_ID_ VARCHAR2(255),
ELEMENT_NAME_ VARCHAR2(255),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
CORRELATION_ID_ VARCHAR2(255),
RETRIES_ INTEGER,
EXCEPTION_STACK_ID_ VARCHAR2(64),
EXCEPTION_MSG_ VARCHAR2(2000),
DUEDATE_ TIMESTAMP(6),
REPEAT_ VARCHAR2(255),
HANDLER_TYPE_ VARCHAR2(255),
HANDLER_CFG_ VARCHAR2(2000),
CUSTOM_VALUES_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create table ACT_RU_DEADLETTER_JOB (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
CATEGORY_ VARCHAR2(255),
TYPE_ VARCHAR2(255) NOT NULL,
EXCLUSIVE_ NUMBER(1) CHECK (EXCLUSIVE_ IN (1,0)),
EXECUTION_ID_ VARCHAR2(64),
PROCESS_INSTANCE_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
ELEMENT_ID_ VARCHAR2(255),
ELEMENT_NAME_ VARCHAR2(255),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
CORRELATION_ID_ VARCHAR2(255),
EXCEPTION_STACK_ID_ VARCHAR2(64),
EXCEPTION_MSG_ VARCHAR2(2000),
DUEDATE_ TIMESTAMP(6),
REPEAT_ VARCHAR2(255),
HANDLER_TYPE_ VARCHAR2(255),
HANDLER_CFG_ VARCHAR2(2000),
CUSTOM_VALUES_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create table ACT_RU_HISTORY_JOB (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
LOCK_EXP_TIME_ TIMESTAMP(6),
LOCK_OWNER_ VARCHAR2(255),
RETRIES_ INTEGER,
EXCEPTION_STACK_ID_ VARCHAR2(64),
EXCEPTION_MSG_ VARCHAR2(2000),
HANDLER_TYPE_ VARCHAR2(255),
HANDLER_CFG_ VARCHAR2(2000),
CUSTOM_VALUES_ID_ VARCHAR2(64),
ADV_HANDLER_CFG_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
SCOPE_TYPE_ VARCHAR2(255),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create table ACT_RU_EXTERNAL_JOB (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
CATEGORY_ VARCHAR2(255),
TYPE_ VARCHAR2(255) NOT NULL,
LOCK_EXP_TIME_ TIMESTAMP(6),
LOCK_OWNER_ VARCHAR2(255),
EXCLUSIVE_ NUMBER(1) CHECK (EXCLUSIVE_ IN (1,0)),
EXECUTION_ID_ VARCHAR2(64),
PROCESS_INSTANCE_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
ELEMENT_ID_ VARCHAR2(255),
ELEMENT_NAME_ VARCHAR2(255),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
CORRELATION_ID_ VARCHAR2(255),
RETRIES_ INTEGER,
EXCEPTION_STACK_ID_ VARCHAR2(64),
EXCEPTION_MSG_ VARCHAR2(2000),
DUEDATE_ TIMESTAMP(6),
REPEAT_ VARCHAR2(255),
HANDLER_TYPE_ VARCHAR2(255),
HANDLER_CFG_ VARCHAR2(2000),
CUSTOM_VALUES_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create index ACT_IDX_JOB_EXCEPTION on ACT_RU_JOB(EXCEPTION_STACK_ID_);
create index ACT_IDX_JOB_CUSTOM_VAL_ID on ACT_RU_JOB(CUSTOM_VALUES_ID_);
create index ACT_IDX_JOB_CORRELATION_ID on ACT_RU_JOB(CORRELATION_ID_);
create index ACT_IDX_TJOB_EXCEPTION on ACT_RU_TIMER_JOB(EXCEPTION_STACK_ID_);
create index ACT_IDX_TJOB_CUSTOM_VAL_ID on ACT_RU_TIMER_JOB(CUSTOM_VALUES_ID_);
create index ACT_IDX_TJOB_CORRELATION_ID on ACT_RU_TIMER_JOB(CORRELATION_ID_);
create index ACT_IDX_TJOB_DUEDATE on ACT_RU_TIMER_JOB(DUEDATE_);
create index ACT_IDX_SJOB_EXCEPTION on ACT_RU_SUSPENDED_JOB(EXCEPTION_STACK_ID_);
create index ACT_IDX_SJOB_CUSTOM_VAL_ID on ACT_RU_SUSPENDED_JOB(CUSTOM_VALUES_ID_);
create index ACT_IDX_SJOB_CORRELATION_ID on ACT_RU_SUSPENDED_JOB(CORRELATION_ID_);
create index ACT_IDX_DJOB_EXCEPTION on ACT_RU_DEADLETTER_JOB(EXCEPTION_STACK_ID_);
create index ACT_IDX_DJOB_CUSTOM_VAL_ID on ACT_RU_DEADLETTER_JOB(CUSTOM_VALUES_ID_);
create index ACT_IDX_DJOB_CORRELATION_ID on ACT_RU_DEADLETTER_JOB(CORRELATION_ID_);
create index ACT_IDX_EJOB_EXCEPTION on ACT_RU_EXTERNAL_JOB(EXCEPTION_STACK_ID_);
create index ACT_IDX_EJOB_CUSTOM_VAL_ID on ACT_RU_EXTERNAL_JOB(CUSTOM_VALUES_ID_);
create index ACT_IDX_EJOB_CORRELATION_ID on ACT_RU_EXTERNAL_JOB(CORRELATION_ID_);
alter table ACT_RU_JOB
add constraint ACT_FK_JOB_EXCEPTION
foreign key (EXCEPTION_STACK_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_JOB
add constraint ACT_FK_JOB_CUSTOM_VAL
foreign key (CUSTOM_VALUES_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_TIMER_JOB
add constraint ACT_FK_TJOB_EXCEPTION
foreign key (EXCEPTION_STACK_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_TIMER_JOB
add constraint ACT_FK_TJOB_CUSTOM_VAL
foreign key (CUSTOM_VALUES_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_SUSPENDED_JOB
add constraint ACT_FK_SJOB_EXCEPTION
foreign key (EXCEPTION_STACK_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_SUSPENDED_JOB
add constraint ACT_FK_SJOB_CUSTOM_VAL
foreign key (CUSTOM_VALUES_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_DEADLETTER_JOB
add constraint ACT_FK_DJOB_EXCEPTION
foreign key (EXCEPTION_STACK_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_DEADLETTER_JOB
add constraint ACT_FK_DJOB_CUSTOM_VAL
foreign key (CUSTOM_VALUES_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_EXTERNAL_JOB
add constraint ACT_FK_EJOB_EXCEPTION
foreign key (EXCEPTION_STACK_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_EXTERNAL_JOB
add constraint ACT_FK_EJOB_CUSTOM_VAL
foreign key (CUSTOM_VALUES_ID_)
references ACT_GE_BYTEARRAY (ID_);
create index ACT_IDX_JOB_SCOPE on ACT_RU_JOB(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_JOB_SUB_SCOPE on ACT_RU_JOB(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_JOB_SCOPE_DEF on ACT_RU_JOB(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
create index ACT_IDX_TJOB_SCOPE on ACT_RU_TIMER_JOB(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_TJOB_SUB_SCOPE on ACT_RU_TIMER_JOB(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_TJOB_SCOPE_DEF on ACT_RU_TIMER_JOB(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
create index ACT_IDX_SJOB_SCOPE on ACT_RU_SUSPENDED_JOB(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_SJOB_SUB_SCOPE on ACT_RU_SUSPENDED_JOB(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_SJOB_SCOPE_DEF on ACT_RU_SUSPENDED_JOB(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
create index ACT_IDX_DJOB_SCOPE on ACT_RU_DEADLETTER_JOB(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_DJOB_SUB_SCOPE on ACT_RU_DEADLETTER_JOB(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_DJOB_SCOPE_DEF on ACT_RU_DEADLETTER_JOB(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
create index ACT_IDX_EJOB_SCOPE on ACT_RU_EXTERNAL_JOB(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_EJOB_SUB_SCOPE on ACT_RU_EXTERNAL_JOB(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_EJOB_SCOPE_DEF on ACT_RU_EXTERNAL_JOB(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
insert into ACT_GE_PROPERTY values ('job.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,74 @@
drop index ACT_IDX_JOB_SCOPE;
drop index ACT_IDX_JOB_SUB_SCOPE;
drop index ACT_IDX_JOB_SCOPE_DEF;
drop index ACT_IDX_TJOB_SCOPE;
drop index ACT_IDX_TJOB_SUB_SCOPE;
drop index ACT_IDX_TJOB_SCOPE_DEF;
drop index ACT_IDX_SJOB_SCOPE;
drop index ACT_IDX_SJOB_SUB_SCOPE;
drop index ACT_IDX_SJOB_SCOPE_DEF;
drop index ACT_IDX_DJOB_SCOPE;
drop index ACT_IDX_DJOB_SUB_SCOPE;
drop index ACT_IDX_DJOB_SCOPE_DEF;
drop index ACT_IDX_EJOB_SCOPE;
drop index ACT_IDX_EJOB_SUB_SCOPE;
drop index ACT_IDX_EJOB_SCOPE_DEF;
drop index ACT_IDX_JOB_EXCEPTION;
drop index ACT_IDX_JOB_CUSTOM_VAL_ID;
drop index ACT_IDX_JOB_CORRELATION_ID;
drop index ACT_IDX_TJOB_EXCEPTION;
drop index ACT_IDX_TJOB_CUSTOM_VAL_ID;
drop index ACT_IDX_TJOB_CORRELATION_ID;
drop index ACT_IDX_TJOB_DUEDATE;
drop index ACT_IDX_SJOB_EXCEPTION;
drop index ACT_IDX_SJOB_CUSTOM_VAL_ID;
drop index ACT_IDX_SJOB_CORRELATION_ID;
drop index ACT_IDX_DJOB_EXCEPTION;
drop index ACT_IDX_DJOB_CUSTOM_VAL_ID;
drop index ACT_IDX_DJOB_CORRELATION_ID;
drop index ACT_IDX_EJOB_EXCEPTION;
drop index ACT_IDX_EJOB_CUSTOM_VAL_ID;
drop index ACT_IDX_EJOB_CORRELATION_ID;
alter table ACT_RU_JOB
drop CONSTRAINT ACT_FK_JOB_EXCEPTION;
alter table ACT_RU_JOB
drop CONSTRAINT ACT_FK_JOB_CUSTOM_VAL;
alter table ACT_RU_TIMER_JOB
drop CONSTRAINT ACT_FK_TJOB_EXCEPTION;
alter table ACT_RU_TIMER_JOB
drop CONSTRAINT ACT_FK_TJOB_CUSTOM_VAL;
alter table ACT_RU_SUSPENDED_JOB
drop CONSTRAINT ACT_FK_SJOB_EXCEPTION;
alter table ACT_RU_SUSPENDED_JOB
drop CONSTRAINT ACT_FK_SJOB_CUSTOM_VAL;
alter table ACT_RU_DEADLETTER_JOB
drop CONSTRAINT ACT_FK_DJOB_EXCEPTION;
alter table ACT_RU_DEADLETTER_JOB
drop CONSTRAINT ACT_FK_DJOB_CUSTOM_VAL;
alter table ACT_RU_EXTERNAL_JOB
drop CONSTRAINT ACT_FK_DJOB_EXCEPTION;
alter table ACT_RU_EXTERNAL_JOB
drop CONSTRAINT ACT_FK_DJOB_CUSTOM_VAL;
drop table ACT_RU_JOB;
drop table ACT_RU_TIMER_JOB;
drop table ACT_RU_SUSPENDED_JOB;
drop table ACT_RU_DEADLETTER_JOB;
drop table ACT_RU_HISTORY_JOB;
drop table ACT_RU_EXTERNAL_JOB;

View File

@@ -0,0 +1,64 @@
create table ACT_HI_TASKINST (
ID_ VARCHAR2(64) not null,
REV_ INTEGER default 1,
PROC_DEF_ID_ VARCHAR2(64),
TASK_DEF_ID_ VARCHAR2(64),
TASK_DEF_KEY_ VARCHAR2(255),
PROC_INST_ID_ VARCHAR2(64),
EXECUTION_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
PROPAGATED_STAGE_INST_ID_ VARCHAR2(255),
PARENT_TASK_ID_ VARCHAR2(64),
STATE_ VARCHAR2(255),
NAME_ VARCHAR2(255),
DESCRIPTION_ VARCHAR2(2000),
OWNER_ VARCHAR2(255),
ASSIGNEE_ VARCHAR2(255),
START_TIME_ TIMESTAMP(6) not null,
IN_PROGRESS_TIME_ TIMESTAMP(6),
IN_PROGRESS_STARTED_BY_ VARCHAR2(255),
CLAIM_TIME_ TIMESTAMP(6),
CLAIMED_BY_ VARCHAR2(255),
SUSPENDED_TIME_ TIMESTAMP(6),
SUSPENDED_BY_ VARCHAR2(255),
END_TIME_ TIMESTAMP(6),
COMPLETED_BY_ VARCHAR2(255),
DURATION_ NUMBER(19,0),
DELETE_REASON_ VARCHAR2(2000),
PRIORITY_ INTEGER,
IN_PROGRESS_DUE_DATE_ TIMESTAMP(6),
DUE_DATE_ TIMESTAMP(6),
FORM_KEY_ VARCHAR2(255),
CATEGORY_ VARCHAR2(255),
TENANT_ID_ VARCHAR2(255) default '',
LAST_UPDATED_TIME_ TIMESTAMP(6),
primary key (ID_)
);
create table ACT_HI_TSK_LOG (
ID_ NUMBER(19),
TYPE_ VARCHAR2(64),
TASK_ID_ VARCHAR2(64) not null,
TIME_STAMP_ TIMESTAMP(6) not null,
USER_ID_ VARCHAR2(255),
DATA_ VARCHAR2(2000),
EXECUTION_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
TENANT_ID_ VARCHAR2(255) default '',
primary key (ID_)
);
create sequence act_hi_task_evt_log_seq start with 1 increment by 1;
create index ACT_IDX_HI_TASK_SCOPE on ACT_HI_TASKINST(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_HI_TASK_SUB_SCOPE on ACT_HI_TASKINST(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_HI_TASK_SCOPE_DEF on ACT_HI_TASKINST(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);

View File

@@ -0,0 +1,48 @@
create table ACT_RU_TASK (
ID_ VARCHAR2(64),
REV_ INTEGER,
EXECUTION_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
TASK_DEF_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
PROPAGATED_STAGE_INST_ID_ VARCHAR2(255),
STATE_ VARCHAR2(255),
NAME_ VARCHAR2(255),
PARENT_TASK_ID_ VARCHAR2(64),
DESCRIPTION_ VARCHAR2(2000),
TASK_DEF_KEY_ VARCHAR2(255),
OWNER_ VARCHAR2(255),
ASSIGNEE_ VARCHAR2(255),
DELEGATION_ VARCHAR2(64),
PRIORITY_ INTEGER,
CREATE_TIME_ TIMESTAMP(6),
IN_PROGRESS_TIME_ TIMESTAMP(6),
IN_PROGRESS_STARTED_BY_ VARCHAR2(255),
CLAIM_TIME_ TIMESTAMP(6),
CLAIMED_BY_ VARCHAR2(255),
SUSPENDED_TIME_ TIMESTAMP(6),
SUSPENDED_BY_ VARCHAR2(255),
IN_PROGRESS_DUE_DATE_ TIMESTAMP(6),
DUE_DATE_ TIMESTAMP(6),
CATEGORY_ VARCHAR2(255),
SUSPENSION_STATE_ INTEGER,
TENANT_ID_ VARCHAR2(255) DEFAULT '',
FORM_KEY_ VARCHAR2(255),
IS_COUNT_ENABLED_ NUMBER(1) CHECK (IS_COUNT_ENABLED_ IN (1,0)),
VAR_COUNT_ INTEGER,
ID_LINK_COUNT_ INTEGER,
SUB_TASK_COUNT_ INTEGER,
primary key (ID_)
);
create index ACT_IDX_TASK_CREATE on ACT_RU_TASK(CREATE_TIME_);
create index ACT_IDX_TASK_SCOPE on ACT_RU_TASK(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_TASK_SUB_SCOPE on ACT_RU_TASK(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_TASK_SCOPE_DEF on ACT_RU_TASK(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
insert into ACT_GE_PROPERTY values ('task.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,8 @@
drop index ACT_IDX_HI_TASK_SCOPE;
drop index ACT_IDX_HI_TASK_SUB_SCOPE;
drop index ACT_IDX_HI_TASK_SCOPE_DEF;
drop sequence act_hi_task_evt_log_seq;
drop table ACT_HI_TASKINST;
drop table ACT_HI_TSK_LOG;

View File

@@ -0,0 +1,6 @@
drop index ACT_IDX_TASK_CREATE;
drop index ACT_IDX_TASK_SCOPE;
drop index ACT_IDX_TASK_SUB_SCOPE;
drop index ACT_IDX_TASK_SCOPE_DEF;
drop table ACT_RU_TASK;

View File

@@ -0,0 +1,26 @@
create table ACT_HI_VARINST (
ID_ VARCHAR2(64) not null,
REV_ INTEGER default 1,
PROC_INST_ID_ VARCHAR2(64),
EXECUTION_ID_ VARCHAR2(64),
TASK_ID_ VARCHAR2(64),
NAME_ VARCHAR2(255) not null,
VAR_TYPE_ VARCHAR2(100),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
BYTEARRAY_ID_ VARCHAR2(64),
DOUBLE_ NUMBER(38,10),
LONG_ NUMBER(19,0),
TEXT_ VARCHAR2(2000),
TEXT2_ VARCHAR2(2000),
META_INFO_ VARCHAR2(2000),
CREATE_TIME_ TIMESTAMP(6),
LAST_UPDATED_TIME_ TIMESTAMP(6),
primary key (ID_)
);
create index ACT_IDX_HI_PROCVAR_NAME_TYPE on ACT_HI_VARINST(NAME_, VAR_TYPE_);
create index ACT_IDX_HI_VAR_SCOPE_ID_TYPE on ACT_HI_VARINST(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_HI_VAR_SUB_ID_TYPE on ACT_HI_VARINST(SUB_SCOPE_ID_, SCOPE_TYPE_);

View File

@@ -0,0 +1,31 @@
create table ACT_RU_VARIABLE (
ID_ VARCHAR2(64) not null,
REV_ INTEGER,
TYPE_ VARCHAR2(255) not null,
NAME_ VARCHAR2(255) not null,
EXECUTION_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
TASK_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
BYTEARRAY_ID_ VARCHAR2(64),
DOUBLE_ NUMBER(38,10),
LONG_ NUMBER(19,0),
TEXT_ VARCHAR2(2000),
TEXT2_ VARCHAR2(2000),
META_INFO_ VARCHAR2(2000),
primary key (ID_)
);
create index ACT_IDX_RU_VAR_SCOPE_ID_TYPE on ACT_RU_VARIABLE(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_RU_VAR_SUB_ID_TYPE on ACT_RU_VARIABLE(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_VAR_BYTEARRAY on ACT_RU_VARIABLE(BYTEARRAY_ID_);
alter table ACT_RU_VARIABLE
add constraint ACT_FK_VAR_BYTEARRAY
foreign key (BYTEARRAY_ID_)
references ACT_GE_BYTEARRAY (ID_);
insert into ACT_GE_PROPERTY values ('variable.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,6 @@
drop index ACT_IDX_HI_PROCVAR_NAME_TYPE;
drop index ACT_IDX_HI_VAR_SCOPE_ID_TYPE;
drop index ACT_IDX_HI_VAR_SUB_ID_TYPE;
drop table ACT_HI_VARINST;

View File

@@ -0,0 +1,9 @@
drop index ACT_IDX_VAR_BYTEARRAY;
drop index ACT_IDX_RU_VAR_SCOPE_ID_TYPE;
drop index ACT_IDX_RU_VAR_SUB_ID_TYPE;
alter table ACT_RU_VARIABLE
drop CONSTRAINT ACT_FK_VAR_BYTEARRAY;
drop table ACT_RU_VARIABLE;

View File

@@ -1,5 +1,7 @@
package com.zt.plat.module.databus.framework.integration.gateway.config; package com.zt.plat.module.databus.framework.integration.gateway.config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.web.reactive.function.client.WebClientCustomizer; import org.springframework.boot.web.reactive.function.client.WebClientCustomizer;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
@@ -17,33 +19,43 @@ public class GatewayWebClientConfiguration {
private final int maxInMemorySize; private final int maxInMemorySize;
private final long maxIdleTimeMillis; private final long maxIdleTimeMillis;
private final long evictInBackgroundMillis; private final long evictInBackgroundMillis;
private final boolean connectionPoolEnabled;
private final ReactorClientHttpConnector httpConnector; private final ReactorClientHttpConnector httpConnector;
private static final Logger log = LoggerFactory.getLogger(GatewayWebClientConfiguration.class);
public GatewayWebClientConfiguration( public GatewayWebClientConfiguration(
@Value("${databus.gateway.web-client.max-in-memory-size:20971520}") int maxInMemorySize, @Value("${databus.gateway.web-client.max-in-memory-size:20971520}") int maxInMemorySize,
@Value("${databus.gateway.web-client.max-idle-time:45000}") long maxIdleTimeMillis, @Value("${databus.gateway.web-client.max-idle-time:45000}") long maxIdleTimeMillis,
@Value("${databus.gateway.web-client.evict-in-background-interval:20000}") long evictInBackgroundMillis) { @Value("${databus.gateway.web-client.evict-in-background-interval:20000}") long evictInBackgroundMillis,
@Value("${databus.gateway.web-client.connection-pool-enabled:true}") boolean connectionPoolEnabled) {
this.maxInMemorySize = maxInMemorySize; this.maxInMemorySize = maxInMemorySize;
this.maxIdleTimeMillis = maxIdleTimeMillis > 0 ? maxIdleTimeMillis : 45000L; this.maxIdleTimeMillis = maxIdleTimeMillis;
this.evictInBackgroundMillis = Math.max(evictInBackgroundMillis, 0L); this.evictInBackgroundMillis = evictInBackgroundMillis;
this.connectionPoolEnabled = connectionPoolEnabled;
this.httpConnector = buildConnector(); this.httpConnector = buildConnector();
} }
@Bean @Bean
public WebClientCustomizer gatewayWebClientCustomizer() { public WebClientCustomizer gatewayWebClientCustomizer() {
// 统一设置 WebClient 连接器与内存限制,避免各处重复配置
return builder -> builder return builder -> builder
.clientConnector(httpConnector) .clientConnector(httpConnector)
.codecs(configurer -> configurer.defaultCodecs().maxInMemorySize(maxInMemorySize)); .codecs(configurer -> configurer.defaultCodecs().maxInMemorySize(maxInMemorySize));
} }
private ReactorClientHttpConnector buildConnector() { private ReactorClientHttpConnector buildConnector() {
ConnectionProvider.Builder providerBuilder = ConnectionProvider.builder("databus-gateway") if (connectionPoolEnabled) {
.maxIdleTime(Duration.ofMillis(maxIdleTimeMillis)); // 启用连接池,基于配置设置空闲回收参数
if (evictInBackgroundMillis > 0) { ConnectionProvider provider = ConnectionProvider.builder("databus-gateway")
providerBuilder.evictInBackground(Duration.ofMillis(evictInBackgroundMillis)); .maxIdleTime(Duration.ofMillis(maxIdleTimeMillis))
.evictInBackground(Duration.ofMillis(evictInBackgroundMillis))
.build();
log.info("Databus gateway WebClient 已启用连接池 (maxIdleTime={}ms, evictInterval={}ms)",
maxIdleTimeMillis, evictInBackgroundMillis);
return new ReactorClientHttpConnector(HttpClient.create(provider).compress(true));
} }
ConnectionProvider provider = providerBuilder.build(); // 关闭连接池,每次请求都会重新建立 TCP 连接
HttpClient httpClient = HttpClient.create(provider).compress(true); log.info("Databus gateway WebClient 已禁用连接池,所有请求将使用全新连接");
return new ReactorClientHttpConnector(httpClient); return new ReactorClientHttpConnector(HttpClient.create().compress(true));
} }
} }

View File

@@ -131,4 +131,9 @@ zt:
ignore-tables: ignore-tables:
- databus_api_client_credential - databus_api_client_credential
databus:
gateway:
web-client:
connection-pool-enabled: false # 默认开启连接池,排查长连接问题时可临时关闭
debug: false debug: false

View File

@@ -5,6 +5,10 @@
<springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/> <springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/>
<!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 --> <!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 -->
<property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/> <property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
<!--应用名称-->
<springProperty scope="context" name="spring.application.name" source="spring.application.name"/>
<!-- 日志输出路径 -->
<property name="LOG_DIR" value="${user.home}/logs/${spring.application.name}"/>
<!-- 控制台 Appender --> <!-- 控制台 Appender -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">      <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">     
@@ -31,7 +35,7 @@
<!-- 启动服务时,是否清理历史日志,一般不建议清理 --> <!-- 启动服务时,是否清理历史日志,一般不建议清理 -->
<cleanHistoryOnStart>${LOGBACK_ROLLINGPOLICY_CLEAN_HISTORY_ON_START:-false}</cleanHistoryOnStart> <cleanHistoryOnStart>${LOGBACK_ROLLINGPOLICY_CLEAN_HISTORY_ON_START:-false}</cleanHistoryOnStart>
<!-- 日志文件,到达多少容量,进行滚动 --> <!-- 日志文件,到达多少容量,进行滚动 -->
<maxFileSize>${LOGBACK_ROLLINGPOLICY_MAX_FILE_SIZE:-10MB}</maxFileSize> <maxFileSize>${LOGBACK_ROLLINGPOLICY_MAX_FILE_SIZE:-50MB}</maxFileSize>
<!-- 日志文件的总大小0 表示不限制 --> <!-- 日志文件的总大小0 表示不限制 -->
<totalSizeCap>${LOGBACK_ROLLINGPOLICY_TOTAL_SIZE_CAP:-0}</totalSizeCap> <totalSizeCap>${LOGBACK_ROLLINGPOLICY_TOTAL_SIZE_CAP:-0}</totalSizeCap>
<!-- 日志文件的保留天数 --> <!-- 日志文件的保留天数 -->
@@ -56,18 +60,44 @@
</encoder> </encoder>
</appender> </appender>
<!-- ERROR 级别日志 -->
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_DIR}-error.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_DIR}-error.%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>30</maxHistory> <!-- 保留30天的日志 -->
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG-->
<!-- 本地环境 --> <!-- 本地环境 -->
<springProfile name="local"> <springProfile name="local,dev">
<root level="INFO"> <root level="WARN">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 --> <appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 -->
<appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 --> <appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 -->
</root> </root>
<!--针对不同的业务路径,配置dao层的sql打印日志级别为DEBUG-->
<logger name="com.zt.plat.module.infra.dal.mysql" level="DEBUG" additivity="false">
<appender-ref ref="STDOUT"/>
</logger>
</springProfile> </springProfile>
<!-- 其它环境 --> <!-- 其它环境 -->
<springProfile name="dev,test,stage,prod,default"> <springProfile name="dev,test,stage,prod,default">
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="ASYNC"/> <appender-ref ref="ASYNC"/>
<appender-ref ref="GRPC"/> <appender-ref ref="GRPC"/>
</root> </root>

View File

@@ -6,6 +6,7 @@ import cn.hutool.core.util.ObjUtil;
import cn.hutool.core.util.StrUtil; import cn.hutool.core.util.StrUtil;
import com.zt.plat.framework.common.pojo.PageResult; import com.zt.plat.framework.common.pojo.PageResult;
import com.zt.plat.module.infra.api.file.FileApi; import com.zt.plat.module.infra.api.file.FileApi;
import com.zt.plat.module.infra.api.file.dto.FileCreateReqDTO;
import com.zt.plat.module.mp.controller.admin.material.vo.MpMaterialPageReqVO; import com.zt.plat.module.mp.controller.admin.material.vo.MpMaterialPageReqVO;
import com.zt.plat.module.mp.controller.admin.material.vo.MpMaterialUploadNewsImageReqVO; import com.zt.plat.module.mp.controller.admin.material.vo.MpMaterialUploadNewsImageReqVO;
import com.zt.plat.module.mp.controller.admin.material.vo.MpMaterialUploadPermanentReqVO; import com.zt.plat.module.mp.controller.admin.material.vo.MpMaterialUploadPermanentReqVO;
@@ -218,7 +219,8 @@ public class MpMaterialServiceImpl implements MpMaterialService {
private String uploadFile(String mediaId, File file) { private String uploadFile(String mediaId, File file) {
String path = mediaId + "." + FileTypeUtil.getType(file); String path = mediaId + "." + FileTypeUtil.getType(file);
return fileApi.createFile(FileUtil.readBytes(file), path); FileCreateReqDTO createReqDTO = new FileCreateReqDTO().setName(file.getName()).setDirectory(path).setType(FileTypeUtil.getType(file)).setContent(FileUtil.readBytes(file));
return fileApi.createFile(createReqDTO).getData();
} }
} }

View File

@@ -5,6 +5,10 @@
<springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/> <springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/>
<!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 --> <!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 -->
<property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/> <property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
<!--应用名称-->
<springProperty scope="context" name="spring.application.name" source="spring.application.name"/>
<!-- 日志输出路径 -->
<property name="LOG_DIR" value="${user.home}/logs/${spring.application.name}"/>
<!-- 控制台 Appender --> <!-- 控制台 Appender -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">      <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">     
@@ -31,7 +35,7 @@
<!-- 启动服务时,是否清理历史日志,一般不建议清理 --> <!-- 启动服务时,是否清理历史日志,一般不建议清理 -->
<cleanHistoryOnStart>${LOGBACK_ROLLINGPOLICY_CLEAN_HISTORY_ON_START:-false}</cleanHistoryOnStart> <cleanHistoryOnStart>${LOGBACK_ROLLINGPOLICY_CLEAN_HISTORY_ON_START:-false}</cleanHistoryOnStart>
<!-- 日志文件,到达多少容量,进行滚动 --> <!-- 日志文件,到达多少容量,进行滚动 -->
<maxFileSize>${LOGBACK_ROLLINGPOLICY_MAX_FILE_SIZE:-10MB}</maxFileSize> <maxFileSize>${LOGBACK_ROLLINGPOLICY_MAX_FILE_SIZE:-50MB}</maxFileSize>
<!-- 日志文件的总大小0 表示不限制 --> <!-- 日志文件的总大小0 表示不限制 -->
<totalSizeCap>${LOGBACK_ROLLINGPOLICY_TOTAL_SIZE_CAP:-0}</totalSizeCap> <totalSizeCap>${LOGBACK_ROLLINGPOLICY_TOTAL_SIZE_CAP:-0}</totalSizeCap>
<!-- 日志文件的保留天数 --> <!-- 日志文件的保留天数 -->
@@ -56,18 +60,44 @@
</encoder> </encoder>
</appender> </appender>
<!-- ERROR 级别日志 -->
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_DIR}-error.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_DIR}-error.%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>30</maxHistory> <!-- 保留30天的日志 -->
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG-->
<!-- 本地环境 --> <!-- 本地环境 -->
<springProfile name="local"> <springProfile name="local,dev">
<root level="INFO"> <root level="WARN">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 --> <appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 -->
<appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 --> <appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 -->
</root> </root>
<!--针对不同的业务路径,配置dao层的sql打印日志级别为DEBUG-->
<logger name="com.zt.plat.module.mp.dal.mysql" level="DEBUG" additivity="false">
<appender-ref ref="STDOUT"/>
</logger>
</springProfile> </springProfile>
<!-- 其它环境 --> <!-- 其它环境 -->
<springProfile name="dev,test,stage,prod,default"> <springProfile name="dev,test,stage,prod,default">
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="ASYNC"/> <appender-ref ref="ASYNC"/>
<appender-ref ref="GRPC"/> <appender-ref ref="GRPC"/>
</root> </root>

View File

@@ -5,6 +5,10 @@
<springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/> <springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/>
<!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 --> <!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 -->
<property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/> <property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
<!--应用名称-->
<springProperty scope="context" name="spring.application.name" source="spring.application.name"/>
<!-- 日志输出路径 -->
<property name="LOG_DIR" value="${user.home}/logs/${spring.application.name}"/>
<!-- 控制台 Appender --> <!-- 控制台 Appender -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">      <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">     
@@ -31,7 +35,7 @@
<!-- 启动服务时,是否清理历史日志,一般不建议清理 --> <!-- 启动服务时,是否清理历史日志,一般不建议清理 -->
<cleanHistoryOnStart>${LOGBACK_ROLLINGPOLICY_CLEAN_HISTORY_ON_START:-false}</cleanHistoryOnStart> <cleanHistoryOnStart>${LOGBACK_ROLLINGPOLICY_CLEAN_HISTORY_ON_START:-false}</cleanHistoryOnStart>
<!-- 日志文件,到达多少容量,进行滚动 --> <!-- 日志文件,到达多少容量,进行滚动 -->
<maxFileSize>${LOGBACK_ROLLINGPOLICY_MAX_FILE_SIZE:-10MB}</maxFileSize> <maxFileSize>${LOGBACK_ROLLINGPOLICY_MAX_FILE_SIZE:-50MB}</maxFileSize>
<!-- 日志文件的总大小0 表示不限制 --> <!-- 日志文件的总大小0 表示不限制 -->
<totalSizeCap>${LOGBACK_ROLLINGPOLICY_TOTAL_SIZE_CAP:-0}</totalSizeCap> <totalSizeCap>${LOGBACK_ROLLINGPOLICY_TOTAL_SIZE_CAP:-0}</totalSizeCap>
<!-- 日志文件的保留天数 --> <!-- 日志文件的保留天数 -->
@@ -56,18 +60,44 @@
</encoder> </encoder>
</appender> </appender>
<!-- ERROR 级别日志 -->
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_DIR}-error.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_DIR}-error.%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>30</maxHistory> <!-- 保留30天的日志 -->
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG-->
<!-- 本地环境 --> <!-- 本地环境 -->
<springProfile name="local"> <springProfile name="local,dev">
<root level="INFO"> <root level="WARN">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 --> <appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 -->
<appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 --> <appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 -->
</root> </root>
<!--针对不同的业务路径,配置dao层的sql打印日志级别为DEBUG-->
<logger name="com.zt.plat.module.report.dal.mysql" level="DEBUG" additivity="false">
<appender-ref ref="STDOUT"/>
</logger>
</springProfile> </springProfile>
<!-- 其它环境 --> <!-- 其它环境 -->
<springProfile name="dev,test,stage,prod,default"> <springProfile name="dev,test,stage,prod,default">
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="ASYNC"/> <appender-ref ref="ASYNC"/>
<appender-ref ref="GRPC"/> <appender-ref ref="GRPC"/>
</root> </root>

View File

@@ -0,0 +1,63 @@
package com.zt.plat.module.system.api.iwork;
import com.zt.plat.framework.common.pojo.CommonResult;
import com.zt.plat.module.system.api.iwork.dto.*;
import com.zt.plat.module.system.enums.ApiConstants;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
/**
* RPC 服务 - iWork 集成
*/
@FeignClient(name = ApiConstants.NAME, contextId = "iWorkIntegrationApi")
@Tag(name = "RPC 服务 - iWork 集成")
public interface IWorkIntegrationApi {
String PREFIX = ApiConstants.PREFIX + "/integration/iwork";
// ----------------- 认证 / 会话 -----------------
@PostMapping(PREFIX + "/auth/register")
@Operation(summary = "注册 iWork 凭证,获取服务端公钥与 secret")
CommonResult<IWorkAuthRegisterRespDTO> register(@RequestBody IWorkAuthRegisterReqDTO reqDTO);
@PostMapping(PREFIX + "/auth/token")
@Operation(summary = "申请 iWork Token独立接口")
CommonResult<IWorkAuthTokenRespDTO> acquireToken(@RequestBody IWorkAuthTokenReqDTO reqDTO);
// ----------------- 流程类能力 -----------------
@PostMapping(PREFIX + "/user/resolve")
@Operation(summary = "根据外部标识获取 iWork 用户编号")
CommonResult<IWorkUserInfoRespDTO> resolveUser(@RequestBody IWorkUserInfoReqDTO reqDTO);
@PostMapping(PREFIX + "/workflow/create")
@Operation(summary = "发起 iWork 流程")
CommonResult<IWorkOperationRespDTO> createWorkflow(@RequestBody IWorkWorkflowCreateReqDTO reqDTO);
@PostMapping(PREFIX + "/workflow/void")
@Operation(summary = "作废 / 干预 iWork 流程")
CommonResult<IWorkOperationRespDTO> voidWorkflow(@RequestBody IWorkWorkflowVoidReqDTO reqDTO);
// ----------------- 人力组织分页接口 -----------------
@PostMapping(PREFIX + "/hr/subcompany/page")
@Operation(summary = "获取 iWork 分部列表")
CommonResult<IWorkHrSubcompanyPageRespDTO> listSubcompanies(@RequestBody IWorkOrgPageReqDTO reqDTO);
@PostMapping(PREFIX + "/hr/department/page")
@Operation(summary = "获取 iWork 部门列表")
CommonResult<IWorkHrDepartmentPageRespDTO> listDepartments(@RequestBody IWorkOrgPageReqDTO reqDTO);
@PostMapping(PREFIX + "/hr/job-title/page")
@Operation(summary = "获取 iWork 岗位列表")
CommonResult<IWorkHrJobTitlePageRespDTO> listJobTitles(@RequestBody IWorkOrgPageReqDTO reqDTO);
@PostMapping(PREFIX + "/hr/user/page")
@Operation(summary = "获取 iWork 人员列表")
CommonResult<IWorkHrUserPageRespDTO> listUsers(@RequestBody IWorkOrgPageReqDTO reqDTO);
}

View File

@@ -0,0 +1,18 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork 注册授权请求 DTO供其他模块通过 Feign 调用 system-server 时使用)
*/
@Data
public class IWorkAuthRegisterReqDTO {
@Schema(description = "iWork 应用编码", requiredMode = Schema.RequiredMode.REQUIRED)
private String appCode;
@Schema(description = "iWork 网关地址", requiredMode = Schema.RequiredMode.NOT_REQUIRED)
private String baseUrl;
}

View File

@@ -0,0 +1,18 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork 注册授权响应 DTO
*/
@Data
public class IWorkAuthRegisterRespDTO {
@Schema(description = "服务端公钥(Base64)")
private String publicKey;
@Schema(description = "服务端下发的 secret")
private String secret;
}

View File

@@ -0,0 +1,15 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork Token 申请请求 DTO
*/
@Data
public class IWorkAuthTokenReqDTO {
@Schema(description = "应用编码", requiredMode = Schema.RequiredMode.REQUIRED)
private String appCode;
}

View File

@@ -0,0 +1,18 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork Token 响应 DTO
*/
@Data
public class IWorkAuthTokenRespDTO {
@Schema(description = "访问令牌")
private String accessToken;
@Schema(description = "过期时间(秒)")
private Long expiresIn;
}

View File

@@ -0,0 +1,31 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.List;
/**
* iWork 部门分页响应 DTO
*/
@Data
public class IWorkHrDepartmentPageRespDTO {
@Schema(description = "总条数")
private Long total;
@Schema(description = "当前页数据")
private List<Item> list;
@Data
public static class Item {
@Schema(description = "部门编号")
private String id;
@Schema(description = "部门名称")
private String name;
}
}

View File

@@ -0,0 +1,31 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.List;
/**
* iWork 岗位分页响应 DTO
*/
@Data
public class IWorkHrJobTitlePageRespDTO {
@Schema(description = "总条数")
private Long total;
@Schema(description = "当前页数据")
private List<Item> list;
@Data
public static class Item {
@Schema(description = "岗位编号")
private String id;
@Schema(description = "岗位名称")
private String name;
}
}

View File

@@ -0,0 +1,31 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.List;
/**
* iWork 分部分页响应 DTO
*/
@Data
public class IWorkHrSubcompanyPageRespDTO {
@Schema(description = "总条数")
private Long total;
@Schema(description = "当前页数据")
private List<Item> list;
@Data
public static class Item {
@Schema(description = "分部编号")
private String id;
@Schema(description = "分部名称")
private String name;
}
}

View File

@@ -0,0 +1,31 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.List;
/**
* iWork 人员分页响应 DTO
*/
@Data
public class IWorkHrUserPageRespDTO {
@Schema(description = "总条数")
private Long total;
@Schema(description = "当前页数据")
private List<Item> list;
@Data
public static class Item {
@Schema(description = "人员编号")
private String id;
@Schema(description = "人员名称")
private String name;
}
}

View File

@@ -0,0 +1,21 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork 操作结果响应 DTO
*/
@Data
public class IWorkOperationRespDTO {
@Schema(description = "是否成功")
private Boolean success;
@Schema(description = "iWork 返回的操作编号或实例编号")
private String operationId;
@Schema(description = "提示信息")
private String message;
}

View File

@@ -0,0 +1,21 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork 人力组织分页查询通用请求 DTO
*/
@Data
public class IWorkOrgPageReqDTO {
@Schema(description = "页码", example = "1", requiredMode = Schema.RequiredMode.REQUIRED)
private Integer pageNo;
@Schema(description = "每页大小", example = "20", requiredMode = Schema.RequiredMode.REQUIRED)
private Integer pageSize;
@Schema(description = "关键字过滤")
private String keyword;
}

View File

@@ -0,0 +1,15 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* 根据外部标识解析 iWork 用户请求 DTO
*/
@Data
public class IWorkUserInfoReqDTO {
@Schema(description = "外部系统中的用户唯一标识", requiredMode = Schema.RequiredMode.REQUIRED)
private String externalUserCode;
}

View File

@@ -0,0 +1,18 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork 用户解析响应 DTO
*/
@Data
public class IWorkUserInfoRespDTO {
@Schema(description = "iWork 用户编号")
private String userId;
@Schema(description = "iWork 用户名称")
private String userName;
}

View File

@@ -0,0 +1,46 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* 发起 iWork 流程请求 DTO
*
* 与 IWorkWorkflowCreateReqVO 字段一一对应,便于 Feign 调用。
*/
@Data
public class IWorkWorkflowCreateReqDTO {
@Schema(description = "用印申请人iWork 人员 ID", example = "1001")
private String jbr;
@Schema(description = "用印部门 ID", example = "2001")
private String yybm;
@Schema(description = "用印单位(分部 ID", example = "3001")
private String fb;
@Schema(description = "申请时间,格式 yyyy-MM-dd", example = "2025-01-01")
private String sqsj;
@Schema(description = "用印去向")
private String yyqx;
@Schema(description = "用印依据附件 URL")
private String yyfkUrl;
@Schema(description = "用印事由或内容摘要")
private String yysy;
@Schema(description = "用印材料附件 URL必填")
private String xyywjUrl;
@Schema(description = "用印材料附件文件名(必填)")
private String xyywjFileName;
@Schema(description = "用印事项")
private String yysx;
@Schema(description = "业务系统单据编号(用于派生流程标题)", example = "DJ-2025-0001")
private String ywxtdjbh;
}

View File

@@ -0,0 +1,21 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* 作废 / 干预 iWork 流程请求 DTO
*/
@Data
public class IWorkWorkflowVoidReqDTO {
@Schema(description = "iWork 实例编号", requiredMode = Schema.RequiredMode.REQUIRED)
private String instanceId;
@Schema(description = "操作人 iWork 用户编号", requiredMode = Schema.RequiredMode.REQUIRED)
private String operatorUserId;
@Schema(description = "作废原因")
private String reason;
}

View File

@@ -14,6 +14,9 @@ public class AdminUserRespDTO implements VO {
@Schema(description = "用户 ID", requiredMode = Schema.RequiredMode.REQUIRED, example = "1024") @Schema(description = "用户 ID", requiredMode = Schema.RequiredMode.REQUIRED, example = "1024")
private Long id; private Long id;
@Schema(description = "用户账号", requiredMode = Schema.RequiredMode.REQUIRED, example = "zhangsan")
private String username;
@Schema(description = "用户昵称", requiredMode = Schema.RequiredMode.REQUIRED, example = "小王") @Schema(description = "用户昵称", requiredMode = Schema.RequiredMode.REQUIRED, example = "小王")
private String nickname; private String nickname;

View File

@@ -61,6 +61,7 @@ public interface ErrorCodeConstants {
ErrorCode USER_IMPORT_INIT_PASSWORD = new ErrorCode(1_002_003_009, "初始密码不能为空"); ErrorCode USER_IMPORT_INIT_PASSWORD = new ErrorCode(1_002_003_009, "初始密码不能为空");
ErrorCode USER_MOBILE_NOT_EXISTS = new ErrorCode(1_002_003_010, "该手机号尚未注册"); ErrorCode USER_MOBILE_NOT_EXISTS = new ErrorCode(1_002_003_010, "该手机号尚未注册");
ErrorCode USER_REGISTER_DISABLED = new ErrorCode(1_002_003_011, "注册功能已关闭"); ErrorCode USER_REGISTER_DISABLED = new ErrorCode(1_002_003_011, "注册功能已关闭");
ErrorCode USER_PASSWORD_MODIFY_FORBIDDEN = new ErrorCode(1_002_003_012, "该用户来源不支持修改密码");
// ========== 部门模块 1-002-004-000 ========== // ========== 部门模块 1-002-004-000 ==========
ErrorCode DEPT_NAME_DUPLICATE = new ErrorCode(1_002_004_000, "当前上级部门已存在同名子部门"); ErrorCode DEPT_NAME_DUPLICATE = new ErrorCode(1_002_004_000, "当前上级部门已存在同名子部门");

View File

@@ -13,7 +13,8 @@ import lombok.Getter;
public enum DeptSourceEnum { public enum DeptSourceEnum {
EXTERNAL(1, "外部部门"), // 系统创建的部门 EXTERNAL(1, "外部部门"), // 系统创建的部门
SYNC(2, "同步部门"); // 通过 OrgSyncService 同步的部门 SYNC(2, "同步部门"), // 通过 OrgSyncService 同步的部门
IWORK(3, "iWork 同步"); // 通过 iWork 同步的部门
/** /**
* 类型 * 类型

View File

@@ -0,0 +1,24 @@
package com.zt.plat.module.system.enums.user;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 密码处理策略,用于区分本地账户与外部同步账户的密码存储/校验方式。
*/
@AllArgsConstructor
@Getter
public enum PasswordStrategyEnum {
/**
* 本地创建或注册用户,使用 Spring Security {@code PasswordEncoder}BCrypt
*/
LOCAL_BCRYPT("LOCAL_BCRYPT"),
/**
* iWork 同步的 MD5 密文,直接按大写 MD5 存储及校验。
*/
IWORK_MD5("IWORK_MD5");
private final String label;
}

View File

@@ -3,6 +3,9 @@ package com.zt.plat.module.system.enums.user;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Getter; import lombok.Getter;
import java.util.Arrays;
import java.util.Objects;
/** /**
* 用户来源枚举 * 用户来源枚举
* *
@@ -12,8 +15,9 @@ import lombok.Getter;
@Getter @Getter
public enum UserSourceEnum { public enum UserSourceEnum {
EXTERNAL(1, "外部用户"), // 系统创建、注册等方式产生的用户 EXTERNAL(1, "外部用户", PasswordStrategyEnum.LOCAL_BCRYPT), // 系统创建、注册等方式产生的用户
SYNC(2, "同步用户"); // 通过 UserSyncService 同步的用户 SYNC(2, "同步用户", PasswordStrategyEnum.LOCAL_BCRYPT), // 通过 UserSyncService 同步的用户
IWORK(3, "iWork 用户", PasswordStrategyEnum.IWORK_MD5); // 通过 iWork 全量/单条同步产生的用户
/** /**
* 类型 * 类型
@@ -23,5 +27,28 @@ public enum UserSourceEnum {
* 名字 * 名字
*/ */
private final String name; private final String name;
/**
* 默认密码策略
*/
private final PasswordStrategyEnum passwordStrategy;
public static UserSourceEnum of(Integer source) {
if (source == null) {
return null;
}
return Arrays.stream(values())
.filter(item -> Objects.equals(item.source, source))
.findFirst()
.orElse(null);
}
public static PasswordStrategyEnum resolvePasswordStrategy(Integer source) {
UserSourceEnum matched = of(source);
return matched == null ? PasswordStrategyEnum.LOCAL_BCRYPT : matched.getPasswordStrategy();
}
public boolean isExternal() {
return this == EXTERNAL;
}
} }

View File

@@ -0,0 +1,111 @@
package com.zt.plat.module.system.api.iwork;
import com.zt.plat.framework.common.pojo.CommonResult;
import com.zt.plat.framework.common.util.object.BeanUtils;
import com.zt.plat.module.system.api.iwork.dto.*;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.*;
import com.zt.plat.module.system.service.integration.iwork.IWorkIntegrationService;
import com.zt.plat.module.system.service.integration.iwork.IWorkOrgRestService;
import com.zt.plat.module.system.service.integration.iwork.IWorkSyncService;
import jakarta.annotation.Resource;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.RestController;
import static com.zt.plat.framework.common.pojo.CommonResult.success;
/**
* iWork 集成 Feign API 实现类。
* <p>
* 将 system-api 模块中的 DTO 与现有 Controller VO 进行简单转换,
* 再委托给 Service 层完成实际业务逻辑,供其他模块通过 Feign 统一调用。
*/
@RestController
@Validated
public class IWorkIntegrationApiImpl implements IWorkIntegrationApi {
@Resource
private IWorkIntegrationService integrationService;
@Resource
private IWorkOrgRestService orgRestService;
@Resource
private IWorkSyncService syncService;
// ----------------- 认证 / 会话 -----------------
@Override
public CommonResult<IWorkAuthRegisterRespDTO> register(IWorkAuthRegisterReqDTO reqDTO) {
IWorkAuthRegisterReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkAuthRegisterReqVO.class);
IWorkAuthRegisterRespVO respVO = integrationService.registerSession(reqVO);
IWorkAuthRegisterRespDTO respDTO = BeanUtils.toBean(respVO, IWorkAuthRegisterRespDTO.class);
return success(respDTO);
}
@Override
public CommonResult<IWorkAuthTokenRespDTO> acquireToken(IWorkAuthTokenReqDTO reqDTO) {
IWorkAuthTokenReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkAuthTokenReqVO.class);
IWorkAuthTokenRespVO respVO = integrationService.acquireToken(reqVO);
IWorkAuthTokenRespDTO respDTO = BeanUtils.toBean(respVO, IWorkAuthTokenRespDTO.class);
return success(respDTO);
}
// ----------------- 流程类能力 -----------------
@Override
public CommonResult<IWorkUserInfoRespDTO> resolveUser(IWorkUserInfoReqDTO reqDTO) {
IWorkUserInfoReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkUserInfoReqVO.class);
IWorkUserInfoRespVO respVO = integrationService.resolveUserId(reqVO);
IWorkUserInfoRespDTO respDTO = BeanUtils.toBean(respVO, IWorkUserInfoRespDTO.class);
return success(respDTO);
}
@Override
public CommonResult<IWorkOperationRespDTO> createWorkflow(IWorkWorkflowCreateReqDTO reqDTO) {
IWorkWorkflowCreateReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkWorkflowCreateReqVO.class);
IWorkOperationRespVO respVO = integrationService.createWorkflow(reqVO);
IWorkOperationRespDTO respDTO = BeanUtils.toBean(respVO, IWorkOperationRespDTO.class);
return success(respDTO);
}
@Override
public CommonResult<IWorkOperationRespDTO> voidWorkflow(IWorkWorkflowVoidReqDTO reqDTO) {
IWorkWorkflowVoidReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkWorkflowVoidReqVO.class);
IWorkOperationRespVO respVO = integrationService.voidWorkflow(reqVO);
IWorkOperationRespDTO respDTO = BeanUtils.toBean(respVO, IWorkOperationRespDTO.class);
return success(respDTO);
}
// ----------------- 人力组织分页接口 -----------------
@Override
public CommonResult<IWorkHrSubcompanyPageRespDTO> listSubcompanies(IWorkOrgPageReqDTO reqDTO) {
IWorkSubcompanyQueryReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkSubcompanyQueryReqVO.class);
IWorkHrSubcompanyPageRespVO respVO = orgRestService.listSubcompanies(reqVO);
IWorkHrSubcompanyPageRespDTO respDTO = BeanUtils.toBean(respVO, IWorkHrSubcompanyPageRespDTO.class);
return success(respDTO);
}
@Override
public CommonResult<IWorkHrDepartmentPageRespDTO> listDepartments(IWorkOrgPageReqDTO reqDTO) {
IWorkDepartmentQueryReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkDepartmentQueryReqVO.class);
IWorkHrDepartmentPageRespVO respVO = orgRestService.listDepartments(reqVO);
IWorkHrDepartmentPageRespDTO respDTO = BeanUtils.toBean(respVO, IWorkHrDepartmentPageRespDTO.class);
return success(respDTO);
}
@Override
public CommonResult<IWorkHrJobTitlePageRespDTO> listJobTitles(IWorkOrgPageReqDTO reqDTO) {
IWorkJobTitleQueryReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkJobTitleQueryReqVO.class);
IWorkHrJobTitlePageRespVO respVO = orgRestService.listJobTitles(reqVO);
IWorkHrJobTitlePageRespDTO respDTO = BeanUtils.toBean(respVO, IWorkHrJobTitlePageRespDTO.class);
return success(respDTO);
}
@Override
public CommonResult<IWorkHrUserPageRespDTO> listUsers(IWorkOrgPageReqDTO reqDTO) {
IWorkUserQueryReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkUserQueryReqVO.class);
IWorkHrUserPageRespVO respVO = orgRestService.listUsers(reqVO);
IWorkHrUserPageRespDTO respDTO = BeanUtils.toBean(respVO, IWorkHrUserPageRespDTO.class);
return success(respDTO);
}
}

View File

@@ -1,23 +1,10 @@
package com.zt.plat.module.system.controller.admin.integration.iwork; package com.zt.plat.module.system.controller.admin.integration.iwork;
import com.zt.plat.framework.common.pojo.CommonResult; import com.zt.plat.framework.common.pojo.CommonResult;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkAuthRegisterReqVO; import com.zt.plat.module.system.controller.admin.integration.iwork.vo.*;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkAuthRegisterRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkAuthTokenReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkAuthTokenRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkDepartmentQueryReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkJobTitleQueryReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkOperationRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkOrgRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkOrgSyncReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkSubcompanyQueryReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkUserInfoReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkUserInfoRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkUserQueryReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkWorkflowCreateReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkWorkflowVoidReqVO;
import com.zt.plat.module.system.service.integration.iwork.IWorkIntegrationService; import com.zt.plat.module.system.service.integration.iwork.IWorkIntegrationService;
import com.zt.plat.module.system.service.integration.iwork.IWorkOrgRestService; import com.zt.plat.module.system.service.integration.iwork.IWorkOrgRestService;
import com.zt.plat.module.system.service.integration.iwork.IWorkSyncService;
import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag; import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.validation.Valid; import jakarta.validation.Valid;
@@ -42,6 +29,7 @@ public class IWorkIntegrationController {
private final IWorkIntegrationService integrationService; private final IWorkIntegrationService integrationService;
private final IWorkOrgRestService orgRestService; private final IWorkOrgRestService orgRestService;
private final IWorkSyncService syncService;
@PostMapping("/auth/register") @PostMapping("/auth/register")
@Operation(summary = "注册 iWork 凭证,获取服务端公钥与 secret") @Operation(summary = "注册 iWork 凭证,获取服务端公钥与 secret")
@@ -77,49 +65,75 @@ public class IWorkIntegrationController {
@PostMapping("/hr/subcompany/page") @PostMapping("/hr/subcompany/page")
@Operation(summary = "获取 iWork 分部列表") @Operation(summary = "获取 iWork 分部列表")
public CommonResult<IWorkOrgRespVO> listSubcompanies(@Valid @RequestBody IWorkSubcompanyQueryReqVO reqVO) { public CommonResult<IWorkHrSubcompanyPageRespVO> listSubcompanies(@Valid @RequestBody IWorkSubcompanyQueryReqVO reqVO) {
return success(orgRestService.listSubcompanies(reqVO)); return success(orgRestService.listSubcompanies(reqVO));
} }
@PostMapping("/hr/department/page") @PostMapping("/hr/department/page")
@Operation(summary = "获取 iWork 部门列表") @Operation(summary = "获取 iWork 部门列表")
public CommonResult<IWorkOrgRespVO> listDepartments(@Valid @RequestBody IWorkDepartmentQueryReqVO reqVO) { public CommonResult<IWorkHrDepartmentPageRespVO> listDepartments(@Valid @RequestBody IWorkDepartmentQueryReqVO reqVO) {
return success(orgRestService.listDepartments(reqVO)); return success(orgRestService.listDepartments(reqVO));
} }
@PostMapping("/hr/job-title/page") @PostMapping("/hr/job-title/page")
@Operation(summary = "获取 iWork 岗位列表") @Operation(summary = "获取 iWork 岗位列表")
public CommonResult<IWorkOrgRespVO> listJobTitles(@Valid @RequestBody IWorkJobTitleQueryReqVO reqVO) { public CommonResult<IWorkHrJobTitlePageRespVO> listJobTitles(@Valid @RequestBody IWorkJobTitleQueryReqVO reqVO) {
return success(orgRestService.listJobTitles(reqVO)); return success(orgRestService.listJobTitles(reqVO));
} }
@PostMapping("/hr/user/page") @PostMapping("/hr/user/page")
@Operation(summary = "获取 iWork 人员列表") @Operation(summary = "获取 iWork 人员列表")
public CommonResult<IWorkOrgRespVO> listUsers(@Valid @RequestBody IWorkUserQueryReqVO reqVO) { public CommonResult<IWorkHrUserPageRespVO> listUsers(@Valid @RequestBody IWorkUserQueryReqVO reqVO) {
return success(orgRestService.listUsers(reqVO)); return success(orgRestService.listUsers(reqVO));
} }
@PostMapping("/hr/subcompany/sync") // @PostMapping("/hr/subcompany/sync")
@Operation(summary = "同步分部信息至 iWork") // @Operation(summary = "同步分部信息至 iWork")
public CommonResult<IWorkOrgRespVO> syncSubcompanies(@Valid @RequestBody IWorkOrgSyncReqVO reqVO) { // public CommonResult<IWorkHrSyncRespVO> syncSubcompanies(@Valid @RequestBody IWorkOrgSyncReqVO reqVO) {
return success(orgRestService.syncSubcompanies(reqVO)); // return success(orgRestService.syncSubcompanies(reqVO));
// }
//
// @PostMapping("/hr/department/sync")
// @Operation(summary = "同步部门信息至 iWork")
// public CommonResult<IWorkHrSyncRespVO> syncDepartments(@Valid @RequestBody IWorkOrgSyncReqVO reqVO) {
// return success(orgRestService.syncDepartments(reqVO));
// }
//
// @PostMapping("/hr/job-title/sync")
// @Operation(summary = "同步岗位信息至 iWork")
// public CommonResult<IWorkHrSyncRespVO> syncJobTitles(@Valid @RequestBody IWorkOrgSyncReqVO reqVO) {
// return success(orgRestService.syncJobTitles(reqVO));
// }
//
// @PostMapping("/hr/user/sync")
// @Operation(summary = "同步人员信息至 iWork")
// public CommonResult<IWorkHrSyncRespVO> syncUsers(@Valid @RequestBody IWorkOrgSyncReqVO reqVO) {
// return success(orgRestService.syncUsers(reqVO));
// }
// ----------------- 同步到本地 -----------------
@PostMapping("/hr/departments/full-sync")
@Operation(summary = "手动触发 iWork 部门同步")
public CommonResult<IWorkFullSyncRespVO> fullSyncDepartments(@Valid @RequestBody IWorkFullSyncReqVO reqVO) {
return success(syncService.fullSyncDepartments(reqVO));
} }
@PostMapping("/hr/department/sync") @PostMapping("/hr/subcompanies/full-sync")
@Operation(summary = "同步部门信息至 iWork") @Operation(summary = "手动触发 iWork 分部同步")
public CommonResult<IWorkOrgRespVO> syncDepartments(@Valid @RequestBody IWorkOrgSyncReqVO reqVO) { public CommonResult<IWorkFullSyncRespVO> fullSyncSubcompanies(@Valid @RequestBody IWorkFullSyncReqVO reqVO) {
return success(orgRestService.syncDepartments(reqVO)); return success(syncService.fullSyncSubcompanies(reqVO));
} }
@PostMapping("/hr/job-title/sync") @PostMapping("/hr/job-titles/full-sync")
@Operation(summary = "同步岗位信息至 iWork") @Operation(summary = "手动触发 iWork 岗位全量同步")
public CommonResult<IWorkOrgRespVO> syncJobTitles(@Valid @RequestBody IWorkOrgSyncReqVO reqVO) { public CommonResult<IWorkFullSyncRespVO> fullSyncJobTitles(@Valid @RequestBody IWorkFullSyncReqVO reqVO) {
return success(orgRestService.syncJobTitles(reqVO)); return success(syncService.fullSyncJobTitles(reqVO));
} }
@PostMapping("/hr/user/sync") @PostMapping("/hr/users/full-sync")
@Operation(summary = "同步人员信息至 iWork") @Operation(summary = "手动触发 iWork 人员全量同步")
public CommonResult<IWorkOrgRespVO> syncUsers(@Valid @RequestBody IWorkOrgSyncReqVO reqVO) { public CommonResult<IWorkFullSyncRespVO> fullSyncUsers(@Valid @RequestBody IWorkFullSyncReqVO reqVO) {
return success(orgRestService.syncUsers(reqVO)); return success(syncService.fullSyncUsers(reqVO));
} }
} }

View File

@@ -0,0 +1,53 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import com.zt.plat.module.system.enums.integration.IWorkSyncEntityTypeEnum;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.Max;
import jakarta.validation.constraints.Min;
import lombok.Data;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
* iWork 全量同步请求
*/
@Data
public class IWorkFullSyncReqVO {
@Schema(description = "起始页码,从 1 开始", example = "1")
@Min(1)
private Integer startPage = 1;
@Schema(description = "最大处理页数null 表示处理至 iWork 返回的末页", example = "10")
@Min(1)
private Integer maxPages;
@Schema(description = "每次分页从 iWork 拉取的记录数", example = "100")
@Min(1)
@Max(500)
private Integer pageSize = 100;
@Schema(description = "同步范围列表默认同步全部。可选subcompany、department、jobTitle、user")
private List<String> scopes;
@Schema(description = "是否包含已失效canceled=1的记录", example = "false")
private Boolean includeCanceled = Boolean.FALSE;
public Set<IWorkSyncEntityTypeEnum> resolveScopes() {
EnumSet<IWorkSyncEntityTypeEnum> defaults = EnumSet.allOf(IWorkSyncEntityTypeEnum.class);
if (scopes == null || scopes.isEmpty()) {
return defaults;
}
Set<IWorkSyncEntityTypeEnum> resolved = scopes.stream()
.map(IWorkSyncEntityTypeEnum::fromCode)
.filter(java.util.Objects::nonNull)
.collect(Collectors.toCollection(() -> EnumSet.noneOf(IWorkSyncEntityTypeEnum.class)));
if (resolved.isEmpty()) {
return defaults;
}
return resolved;
}
}

View File

@@ -0,0 +1,34 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.List;
/**
* iWork 全量同步响应
*/
@Data
public class IWorkFullSyncRespVO {
@Schema(description = "本次处理的总页数")
private Integer processedPages;
@Schema(description = "每次分页请求的条数")
private Integer pageSize;
@Schema(description = "分部统计信息")
private IWorkSyncEntityStatVO subcompanyStat = new IWorkSyncEntityStatVO();
@Schema(description = "部门统计信息")
private IWorkSyncEntityStatVO departmentStat = new IWorkSyncEntityStatVO();
@Schema(description = "岗位统计信息")
private IWorkSyncEntityStatVO jobTitleStat = new IWorkSyncEntityStatVO();
@Schema(description = "人员统计信息")
private IWorkSyncEntityStatVO userStat = new IWorkSyncEntityStatVO();
@Schema(description = "每个批次的详细统计")
private List<IWorkSyncBatchStatVO> batches;
}

View File

@@ -0,0 +1,212 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.zt.plat.module.system.service.integration.iwork.jackson.LenientIntegerDeserializer;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* iWork 部门分页响应。
*/
@Data
@Schema(description = "iWork 部门分页响应")
public class IWorkHrDepartmentPageRespVO {
@Schema(description = "响应码")
private String code;
@Schema(description = "提示信息")
private String message;
@Schema(description = "是否成功")
private boolean success;
@Schema(description = "iWork 返回的数据体")
@JsonProperty("data")
private PageData data;
@JsonIgnore
public Integer getTotalSize() {
return data == null ? null : data.getTotalSize();
}
@JsonIgnore
public void setTotalSize(Integer totalSize) {
ensureData().setTotalSize(totalSize);
}
@JsonIgnore
public Integer getTotalPage() {
return data == null ? null : data.getTotalPage();
}
@JsonIgnore
public void setTotalPage(Integer totalPage) {
ensureData().setTotalPage(totalPage);
}
@JsonIgnore
public Integer getPageSize() {
return data == null ? null : data.getPageSize();
}
@JsonIgnore
public void setPageSize(Integer pageSize) {
ensureData().setPageSize(pageSize);
}
@JsonIgnore
public Integer getPageNumber() {
return data == null ? null : data.getPageNumber();
}
@JsonIgnore
public void setPageNumber(Integer pageNumber) {
ensureData().setPageNumber(pageNumber);
}
@JsonIgnore
public List<Department> getDataList() {
return data == null ? null : data.getDataList();
}
@JsonIgnore
public void setDataList(List<Department> dataList) {
ensureData().setDataList(dataList);
}
@JsonIgnore
private PageData ensureData() {
if (data == null) {
data = new PageData();
}
return data;
}
@Data
@Schema(description = "iWork 部门分页数据体")
public static class PageData {
@Schema(description = "总条数")
private Integer totalSize;
@Schema(description = "总页数")
private Integer totalPage;
@Schema(description = "每页条数")
private Integer pageSize;
@Schema(description = "当前页码")
@JsonProperty("page")
private Integer pageNumber;
@Schema(description = "部门数据列表")
@JsonProperty("dataList")
private List<Department> dataList;
}
@Data
@Schema(description = "部门信息")
public static class Department {
@Schema(description = "部门 ID")
@JsonProperty("departmentid")
@JsonDeserialize(using = LenientIntegerDeserializer.class)
private Integer departmentid;
@Schema(description = "部门 IDiWork 主键)")
@JsonProperty("id")
@JsonDeserialize(using = LenientIntegerDeserializer.class)
private Integer id;
@Schema(description = "部门编码")
@JsonProperty("departmentcode")
private String departmentcode;
@Schema(description = "部门名称")
@JsonProperty("departmentname")
private String departmentname;
@Schema(description = "部门标识")
@JsonProperty("departmentmark")
private String departmentmark;
@Schema(description = "所属分部 ID")
@JsonProperty("subcompanyid1")
@JsonDeserialize(using = LenientIntegerDeserializer.class)
private Integer subcompanyid1;
@Schema(description = "所属分部名称")
@JsonProperty("subcompanyname")
private String subcompanyname;
@Schema(description = "上级分部 ID")
@JsonProperty("supsubcomid")
@JsonDeserialize(using = LenientIntegerDeserializer.class)
private Integer supsubcomid;
@Schema(description = "上级分部名称")
@JsonProperty("supsubcomname")
private String supsubcomname;
@Schema(description = "父部门 ID")
@JsonProperty("supdepid")
@JsonDeserialize(using = LenientIntegerDeserializer.class)
private Integer supdepid;
@Schema(description = "层级路径")
@JsonProperty("alllevel")
private String alllevel;
@Schema(description = "显示顺序")
@JsonProperty("showorder")
@JsonDeserialize(using = LenientIntegerDeserializer.class)
private Integer showorder;
@Schema(description = "是否有子部门 (0/1)")
@JsonProperty("haschild")
private String haschild;
@Schema(description = "是否已失效 (0/1)")
@JsonProperty("canceled")
private String canceled;
@Schema(description = "部门类型")
@JsonProperty("departmenttype")
private String departmenttype;
@Schema(description = "负责人 ID")
@JsonProperty("managerid")
@JsonDeserialize(using = LenientIntegerDeserializer.class)
private Integer managerid;
@Schema(description = "负责人名称")
@JsonProperty("manager")
private String manager;
@JsonIgnore
private Map<String, Object> attributes;
@JsonAnySetter
public void putAttribute(String key, Object value) {
if (attributes == null) {
attributes = new LinkedHashMap<>();
}
attributes.put(key, value);
}
@JsonAnyGetter
public Map<String, Object> any() {
return attributes == null ? Collections.emptyMap() : attributes;
}
}
}

View File

@@ -0,0 +1,121 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.zt.plat.module.system.service.integration.iwork.jackson.LenientIntegerDeserializer;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* iWork 岗位分页响应。
*/
@Data
@Schema(description = "iWork 岗位分页响应")
public class IWorkHrJobTitlePageRespVO {
@Schema(description = "响应码")
private String code;
@Schema(description = "提示信息")
private String message;
@Schema(description = "是否成功")
private boolean success;
@Schema(description = "总条数")
private Integer totalSize;
@Schema(description = "总页数")
private Integer totalPage;
@Schema(description = "每页条数")
private Integer pageSize;
@Schema(description = "当前页码")
private Integer pageNumber;
@Schema(description = "岗位数据列表")
private List<JobTitle> dataList;
@Data
@Schema(description = "岗位信息")
public static class JobTitle {
@Schema(description = "岗位 ID")
@JsonProperty("id")
private Integer id;
@Schema(description = "岗位编码")
@JsonProperty("jobtitlecode")
private String jobtitlecode;
@Schema(description = "岗位名称")
@JsonProperty("jobtitlename")
private String jobtitlename;
@Schema(description = "岗位类型")
@JsonProperty("jobtitletype")
private String jobtitletype;
@Schema(description = "所属岗位组 ID")
@JsonProperty("jobgroupid")
private Integer jobgroupid;
@Schema(description = "所属岗位组名称")
@JsonProperty("jobgroupname")
private String jobgroupname;
@Schema(description = "岗位层级")
@JsonProperty("joblevel")
private String joblevel;
@Schema(description = "岗位职责")
@JsonProperty("jobfunction")
private String jobfunction;
@Schema(description = "岗位描述")
@JsonProperty("description")
private String description;
@Schema(description = "上级岗位 ID")
@JsonProperty("supjobtitleid")
private Integer supjobtitleid;
@Schema(description = "上级岗位名称")
@JsonProperty("supjobtitlename")
private String supjobtitlename;
@Schema(description = "显示顺序")
@JsonProperty("showorder")
@JsonDeserialize(using = LenientIntegerDeserializer.class)
private Integer showorder;
@Schema(description = "是否已失效 (0/1)")
@JsonProperty("canceled")
private String canceled;
@JsonIgnore
private Map<String, Object> attributes;
@JsonAnySetter
public void putAttribute(String key, Object value) {
if (attributes == null) {
attributes = new LinkedHashMap<>();
}
attributes.put(key, value);
}
@JsonAnyGetter
public Map<String, Object> any() {
return attributes == null ? Collections.emptyMap() : attributes;
}
}
}

View File

@@ -0,0 +1,114 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.zt.plat.module.system.service.integration.iwork.jackson.LenientIntegerDeserializer;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* iWork 分部分页响应。
*/
@Data
@Schema(description = "iWork 分部分页响应")
public class IWorkHrSubcompanyPageRespVO {
@Schema(description = "响应码")
private String code;
@Schema(description = "提示信息")
private String message;
@Schema(description = "是否成功")
private boolean success;
@Schema(description = "总条数")
private Integer totalSize;
@Schema(description = "总页数")
private Integer totalPage;
@Schema(description = "每页条数")
private Integer pageSize;
@Schema(description = "当前页码")
private Integer pageNumber;
@Schema(description = "分部数据列表")
private List<Subcompany> dataList;
@Data
@Schema(description = "分部信息")
public static class Subcompany {
@Schema(description = "部门 IDiWork 主键)")
@JsonProperty("id")
@JsonDeserialize(using = LenientIntegerDeserializer.class)
private Integer id;
@Schema(description = "分部编码")
@JsonProperty("subcompanycode")
private String subcompanycode;
@Schema(description = "分部名称")
@JsonProperty("subcompanyname")
private String subcompanyname;
@Schema(description = "所属总部 ID")
@JsonProperty("companyid")
private Integer companyid;
@Schema(description = "所属总部名称")
@JsonProperty("companyname")
private String companyname;
@Schema(description = "上级分部 ID")
@JsonProperty("supsubcomid")
private Integer supsubcomid;
@Schema(description = "上级分部名称")
@JsonProperty("supsubcomname")
private String supsubcomname;
@Schema(description = "显示顺序")
@JsonProperty("showorder")
@JsonDeserialize(using = LenientIntegerDeserializer.class)
private Integer showorder;
@Schema(description = "分部描述")
@JsonProperty("description")
private String description;
@Schema(description = "是否已失效0/1")
@JsonProperty("canceled")
private String canceled;
@Schema(description = "层级路径")
@JsonProperty("alllevel")
private String alllevel;
@JsonIgnore
private Map<String, Object> attributes;
@JsonAnySetter
public void putAttribute(String key, Object value) {
if (attributes == null) {
attributes = new LinkedHashMap<>();
}
attributes.put(key, value);
}
@JsonAnyGetter
public Map<String, Object> any() {
return attributes == null ? Collections.emptyMap() : attributes;
}
}
}

View File

@@ -0,0 +1,74 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* iWork 人力同步响应。
*/
@Data
@Schema(description = "iWork 人力同步响应")
public class IWorkHrSyncRespVO {
@Schema(description = "响应码")
private String code;
@Schema(description = "提示信息")
private String message;
@Schema(description = "是否成功")
private boolean success;
@Schema(description = "同步结果明细")
private List<SyncResult> result;
@Data
@Schema(description = "同步结果项")
public static class SyncResult {
@Schema(description = "操作动作 add/update/delete")
@JsonProperty("@action")
private String action;
@Schema(description = "外部编码")
@JsonProperty("code")
private String code;
@Schema(description = "执行结果 success/fail")
@JsonProperty("result")
private String result;
@Schema(description = "是否成功")
@JsonProperty("success")
private Boolean success;
@Schema(description = "失败描述")
@JsonProperty("message")
private String message;
@JsonIgnore
private Map<String, Object> attributes;
@JsonAnySetter
public void putAttribute(String key, Object value) {
if (attributes == null) {
attributes = new LinkedHashMap<>();
}
attributes.put(key, value);
}
@JsonAnyGetter
public Map<String, Object> any() {
return attributes == null ? Collections.emptyMap() : attributes;
}
}
}

View File

@@ -0,0 +1,194 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import com.zt.plat.module.system.service.integration.iwork.jackson.LenientIntegerDeserializer;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* iWork 人员分页响应。
*/
@Data
@Schema(description = "iWork 人员分页响应")
public class IWorkHrUserPageRespVO {
@Schema(description = "响应码")
private String code;
@Schema(description = "提示信息")
private String message;
@Schema(description = "是否成功")
private boolean success;
@Schema(description = "总条数")
private Integer totalSize;
@Schema(description = "总页数")
private Integer totalPage;
@Schema(description = "每页条数")
private Integer pageSize;
@Schema(description = "当前页码")
private Integer pageNumber;
@Schema(description = "人员数据列表")
private List<User> dataList;
@Data
@Schema(description = "人员信息")
public static class User {
@Schema(description = "人员 ID")
@JsonProperty("id")
private Integer id;
@Schema(description = "人员姓名")
@JsonProperty("lastname")
private String lastname;
@Schema(description = "登录账号")
@JsonProperty("loginid")
private String loginid;
@Schema(description = "工号")
@JsonProperty("workcode")
private String workcode;
@Schema(description = "性别")
@JsonProperty("sex")
private String sex;
@Schema(description = "所属分部 ID")
@JsonProperty("subcompanyid1")
private Integer subcompanyid1;
@Schema(description = "所属分部名称")
@JsonProperty("subcompanyname")
private String subcompanyname;
@Schema(description = "所属部门 ID")
@JsonProperty("departmentid")
private Integer departmentid;
@Schema(description = "所属部门名称")
@JsonProperty("departmentname")
private String departmentname;
@Schema(description = "所属岗位 ID")
@JsonProperty("jobtitleid")
private Integer jobtitleid;
@Schema(description = "所属岗位名称")
@JsonProperty("jobtitlename")
private String jobtitlename;
@Schema(description = "手机号码")
@JsonProperty("mobile")
private String mobile;
@Schema(description = "办公电话")
@JsonProperty("telephone")
private String telephone;
@Schema(description = "邮箱")
@JsonProperty("email")
private String email;
@Schema(description = "直属上级 ID")
@JsonProperty("managerid")
private Integer managerid;
@Schema(description = "助理 ID")
@JsonProperty("assistantid")
private Integer assistantid;
@Schema(description = "安全级别")
@JsonProperty("seclevel")
private Integer seclevel;
@Schema(description = "当前状态")
@JsonProperty("status")
private String status;
@Schema(description = "入职日期")
@JsonProperty("hiredate")
private String hiredate;
@Schema(description = "离职日期")
@JsonProperty("leavedate")
private String leavedate;
@Schema(description = "出生日期")
@JsonProperty("birthday")
private String birthday;
@Schema(description = "民族")
@JsonProperty("folk")
private String folk;
@Schema(description = "婚姻状况")
@JsonProperty("maritalstatus")
private String maritalstatus;
@Schema(description = "文化程度")
@JsonProperty("educationlevel")
private String educationlevel;
@Schema(description = "籍贯")
@JsonProperty("nativeplace")
private String nativeplace;
@Schema(description = "户口所在地")
@JsonProperty("nationality")
private String nationality;
@Schema(description = "证件号码")
@JsonProperty("certificatenum")
private String certificatenum;
@Schema(description = "显示顺序")
@JsonProperty("dsporder")
@JsonDeserialize(using = LenientIntegerDeserializer.class)
private Integer dsporder;
@Schema(description = "系统语言")
@JsonProperty("systemlanguage")
private String systemlanguage;
@Schema(description = "账号类型")
@JsonProperty("accounttype")
private String accounttype;
@Schema(description = "用户密码MD5 密文)")
@JsonProperty("password")
private String password;
@JsonIgnore
private Map<String, Object> attributes;
@JsonAnySetter
public void putAttribute(String key, Object value) {
if (attributes == null) {
attributes = new LinkedHashMap<>();
}
attributes.put(key, value);
}
@JsonAnyGetter
public Map<String, Object> any() {
return attributes == null ? Collections.emptyMap() : attributes;
}
}
}

View File

@@ -14,9 +14,6 @@ public class IWorkOperationRespVO {
@Schema(description = "iWork 返回的原始数据") @Schema(description = "iWork 返回的原始数据")
private Map<String, Object> payload; private Map<String, Object> payload;
@Schema(description = "iWork 返回的原始字符串")
private String rawBody;
@Schema(description = "是否判断为成功") @Schema(description = "是否判断为成功")
private boolean success; private boolean success;

View File

@@ -1,28 +0,0 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.Map;
/**
* 对 iWork 人力组织 REST 请求的响应封装。
*/
@Data
public class IWorkOrgRespVO {
@Schema(description = "响应中的业务数据data 字段或整体映射)")
private Map<String, Object> payload;
@Schema(description = "原始响应字符串")
private String rawBody;
@Schema(description = "是否判断为成功")
private boolean success;
@Schema(description = "提示信息")
private String message;
@Schema(description = "响应码")
private String code;
}

View File

@@ -0,0 +1,34 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import com.zt.plat.module.system.enums.integration.IWorkSyncEntityTypeEnum;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* 记录一次分页批次执行的统计信息。
*/
@Data
public class IWorkSyncBatchStatVO {
@Schema(description = "同步的实体类型")
private IWorkSyncEntityTypeEnum entityType;
@Schema(description = "当前批次处理的页码,从 1 开始")
private Integer pageNumber;
@Schema(description = "本批次从 iWork 拉取的记录数量")
private Integer pulled;
@Schema(description = "本批次创建的记录数量")
private Integer created;
@Schema(description = "本批次因已存在而跳过的记录数量")
private Integer skippedExisting;
@Schema(description = "本批次禁用的记录数量")
private Integer disabled;
@Schema(description = "本批次失败的记录数量")
private Integer failed;
}

View File

@@ -0,0 +1,46 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork 同步实体统计信息
*/
@Data
public class IWorkSyncEntityStatVO {
@Schema(description = "从 iWork 拉取的记录数量")
private int pulled;
@Schema(description = "在本系统中新创建的记录数量")
private int created;
@Schema(description = "因已存在而跳过的记录数量")
private int skippedExisting;
@Schema(description = "在本系统中被禁用的记录数量")
private int disabled;
@Schema(description = "同步失败的记录数量")
private int failed;
public void incrementPulled(int delta) {
this.pulled += delta;
}
public void incrementCreated(int delta) {
this.created += delta;
}
public void incrementSkipped(int delta) {
this.skippedExisting += delta;
}
public void incrementDisabled(int delta) {
this.disabled += delta;
}
public void incrementFailed(int delta) {
this.failed += delta;
}
}

View File

@@ -14,9 +14,6 @@ public class IWorkUserInfoRespVO {
@Schema(description = "iWork 返回的原始数据") @Schema(description = "iWork 返回的原始数据")
private Map<String, Object> payload; private Map<String, Object> payload;
@Schema(description = "iWork 返回的原始字符串")
private String rawBody;
@Schema(description = "是否判断为成功") @Schema(description = "是否判断为成功")
private boolean success; private boolean success;

View File

@@ -1,15 +1,9 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo; package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotEmpty;
import lombok.Data; import lombok.Data;
import lombok.EqualsAndHashCode; import lombok.EqualsAndHashCode;
import java.util.List;
import java.util.Map;
/** /**
* 发起 iWork 流程的请求体。 * 发起 iWork 流程的请求体。
*/ */
@@ -17,25 +11,36 @@ import java.util.Map;
@EqualsAndHashCode(callSuper = true) @EqualsAndHashCode(callSuper = true)
public class IWorkWorkflowCreateReqVO extends IWorkBaseReqVO { public class IWorkWorkflowCreateReqVO extends IWorkBaseReqVO {
@Schema(description = "流程标题", example = "测试流程") @Schema(description = "用印申请人iWork 人员 ID", example = "1001")
@NotBlank(message = "流程标题不能为空") private String jbr;
private String requestName;
@Schema(description = "流程模板编号,可为空使用默认配置", example = "54") @Schema(description = "用印部门 ID", example = "2001")
private Long workflowId; private String yybm;
@Schema(description = "主表字段") @Schema(description = "用印单位(分部 ID", example = "3001")
@NotEmpty(message = "主表字段不能为空") private String fb;
@Valid
private List<IWorkFormFieldVO> mainFields;
@Schema(description = "明细表数据") @Schema(description = "申请时间,格式 yyyy-MM-dd", example = "2025-01-01")
@Valid private String sqsj;
private List<IWorkDetailTableVO> detailTables;
@Schema(description = "额外参数") @Schema(description = "用印去向")
private Map<String, Object> otherParams; private String yyqx;
@Schema(description = "额外 Form 数据") @Schema(description = "用印依据附件 URL")
private Map<String, String> formExtras; private String yyfkUrl;
@Schema(description = "用印事由或内容摘要")
private String yysy;
@Schema(description = "用印材料附件 URL必填")
private String xyywjUrl;
@Schema(description = "用印材料附件文件名(必填)")
private String xyywjFileName;
@Schema(description = "用印事项")
private String yysx;
@Schema(description = "业务系统单据编号(用于派生流程标题)", example = "DJ-2025-0001")
private String ywxtdjbh;
} }

View File

@@ -84,6 +84,18 @@ public class UserSaveReqVO {
@Schema(description = "用户来源类型", example = "1") @Schema(description = "用户来源类型", example = "1")
private Integer userSource; private Integer userSource;
@Schema(hidden = true)
@JsonIgnore
private boolean skipAssociationValidation;
@Schema(hidden = true)
@JsonIgnore
private boolean skipMobileValidation;
@Schema(hidden = true)
@JsonIgnore
private boolean skipEmailValidation;
// ========== 仅【创建】时,需要传递的字段 ========== // ========== 仅【创建】时,需要传递的字段 ==========
@Schema(description = "密码", requiredMode = Schema.RequiredMode.REQUIRED, example = "123456") @Schema(description = "密码", requiredMode = Schema.RequiredMode.REQUIRED, example = "123456")

View File

@@ -0,0 +1,51 @@
package com.zt.plat.module.system.enums.integration;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import java.util.Locale;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* iWork 同步支持的实体类型。
*/
@AllArgsConstructor
@Getter
public enum IWorkSyncEntityTypeEnum {
SUBCOMPANY("subcompany", "分部 / 公司"),
DEPARTMENT("department", "部门"),
JOB_TITLE("jobTitle", "岗位"),
USER("user", "人员");
@JsonValue
private final String code;
private final String label;
public static IWorkSyncEntityTypeEnum fromCode(String code) {
if (code == null) {
return null;
}
for (IWorkSyncEntityTypeEnum value : values()) {
if (value.code.equalsIgnoreCase(code)) {
return value;
}
}
return null;
}
@JsonCreator(mode = JsonCreator.Mode.DELEGATING)
public static IWorkSyncEntityTypeEnum fromJson(String code) {
IWorkSyncEntityTypeEnum value = fromCode(code);
if (value != null || code == null) {
return value;
}
try {
return IWorkSyncEntityTypeEnum.valueOf(code.trim().toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException ex) {
return null;
}
}
}

View File

@@ -34,11 +34,6 @@ public class IWorkProperties {
*/ */
private String clientPublicKey; private String clientPublicKey;
/**
* 当调用方未指定流程编号时使用的默认流程模板编号。
*/
private Long workflowId;
/** /**
* 当请求未指定操作人时使用的默认用户编号。 * 当请求未指定操作人时使用的默认用户编号。
*/ */
@@ -53,6 +48,8 @@ public class IWorkProperties {
private final Client client = new Client(); private final Client client = new Client();
@Valid @Valid
private final OrgRest org = new OrgRest(); private final OrgRest org = new OrgRest();
@Valid
private final Workflow workflow = new Workflow();
@Data @Data
public static class Paths { public static class Paths {
@@ -142,4 +139,13 @@ public class IWorkProperties {
private String syncJobTitle; private String syncJobTitle;
private String syncUser; private String syncUser;
} }
@Data
public static class Workflow {
/**
* 用印流程对应的 iWork 模板编号。
*/
@NotBlank(message = "iWork 用印流程模板编号不能为空")
private String sealWorkflowId;
}
} }

View File

@@ -0,0 +1,222 @@
package com.zt.plat.module.system.framework.sms.core.client.impl;
import cn.hutool.core.codec.Base64;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.util.StrUtil;
import cn.hutool.crypto.digest.DigestUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.zt.plat.framework.common.core.KeyValue;
import com.zt.plat.framework.common.util.http.HttpUtils;
import com.zt.plat.module.system.framework.sms.core.client.dto.SmsReceiveRespDTO;
import com.zt.plat.module.system.framework.sms.core.client.dto.SmsSendRespDTO;
import com.zt.plat.module.system.framework.sms.core.client.dto.SmsTemplateRespDTO;
import com.zt.plat.module.system.framework.sms.core.property.SmsChannelProperties;
import lombok.extern.slf4j.Slf4j;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* 中国移动云MAS短信客户端实现类
*
* @author zt-team
* @since 2025-01-19
*/
@Slf4j
public class CmccMasSmsClient extends AbstractSmsClient {
private static final String URL = "https://112.35.10.201:28888/sms/submit";
private static final String RESPONSE_SUCCESS = "success";
public CmccMasSmsClient(SmsChannelProperties properties) {
super(properties);
Assert.notEmpty(properties.getApiKey(), "apiKey 不能为空");
Assert.notEmpty(properties.getApiSecret(), "apiSecret 不能为空");
validateCmccMasConfig(properties);
}
/**
* 参数校验中国移动云MAS的配置
*
* 原因是中国移动云MAS需要三个参数ecName、apId、secretKey
*
* 解决方案:考虑到不破坏原有的 apiKey + apiSecret 的结构,所以将 ecName 和 apId 拼接到 apiKey 字段中,格式为 "ecName apId"。
* secretKey 存储在 apiSecret 字段中。
*
* @param properties 配置
*/
private static void validateCmccMasConfig(SmsChannelProperties properties) {
String combineKey = properties.getApiKey();
Assert.notEmpty(combineKey, "apiKey 不能为空");
String[] keys = combineKey.trim().split(" ");
Assert.isTrue(keys.length == 2, "中国移动云MAS apiKey 配置格式错误,请配置为 [ecName apId]");
}
/**
* 获取 ecName企业名称
*/
private String getEcName() {
return StrUtil.subBefore(properties.getApiKey(), " ", true);
}
/**
* 获取 apId应用ID
*/
private String getApId() {
return StrUtil.subAfter(properties.getApiKey(), " ", true);
}
/**
* 获取 secretKey密钥
*/
private String getSecretKey() {
return properties.getApiSecret();
}
/**
* 发送短信
*
* @param logId 日志ID
* @param mobile 手机号
* @param apiTemplateId 模板ID本平台不使用模板传入内容
* @param templateParams 模板参数
* @return 发送结果
*/
@Override
public SmsSendRespDTO sendSms(Long logId, String mobile, String apiTemplateId,
List<KeyValue<String, Object>> templateParams) throws Throwable {
// 1. 构建短信内容
String content = buildContent(apiTemplateId, templateParams);
// 2. 计算MAC校验值
String mac = calculateMac(mobile, content);
// 3. 构建请求参数
JSONObject requestBody = new JSONObject();
requestBody.set("ecName", getEcName()); // 企业名称
requestBody.set("apId", getApId()); // 应用ID
requestBody.set("secretKey", getSecretKey()); // 密钥
requestBody.set("sign", properties.getSignature()); // 签名编码
requestBody.set("mobiles", mobile);
requestBody.set("content", content);
requestBody.set("addSerial", "");
requestBody.set("mac", mac);
log.info("[sendSms][发送短信 {}]", JSONUtil.toJsonStr(requestBody));
// 4. Base64编码请求体
String encodedBody = Base64.encode(requestBody.toString());
log.info("[sendSms][Base64编码后: {}]", encodedBody);
// 5. 构建请求头需要JWT Token
Map<String, String> headers = new HashMap<>();
headers.put("Authorization", "Bearer " + getJwtToken());
headers.put("Content-Type", "text/plain");
// 6. 发起请求
String responseBody = HttpUtils.post(URL, headers, encodedBody);
JSONObject response = JSONUtil.parseObj(responseBody);
log.info("[sendSms][收到响应 - {}]", response);
// 7. 解析响应
return new SmsSendRespDTO()
.setSuccess(response.getBool("success", false))
.setSerialNo(response.getStr("msgGroup"))
.setApiCode(response.getStr("rspcod"))
.setApiMsg(response.getStr("message", "未知错误"));
}
/**
* 解析短信接收状态回调
*
* @param text 回调文本
* @return 接收状态列表
*/
@Override
public List<SmsReceiveRespDTO> parseSmsReceiveStatus(String text) throws Throwable {
// TODO: 根据移动云MAS回调格式实现
log.warn("[parseSmsReceiveStatus][暂未实现短信状态回调解析]");
return Collections.emptyList();
}
/**
* 查询短信模板
*
* @param apiTemplateId 模板ID
* @return 模板信息
*/
@Override
public SmsTemplateRespDTO getSmsTemplate(String apiTemplateId) throws Throwable {
// 移动云MAS不使用模板机制直接发送内容
log.debug("[getSmsTemplate][中国移动云MAS不支持模板查询]");
return null;
}
/**
* 计算MAC校验值
* 算法MD5(ecName + apId + secretKey + mobiles + content + sign + addSerial)
*
* @param mobile 手机号
* @param content 短信内容
* @return MAC校验值
*/
private String calculateMac(String mobile, String content) {
String rawString = getEcName() // ecName
+ getApId() // apId
+ getSecretKey() // secretKey
+ mobile // mobiles
+ content // content
+ properties.getSignature() // sign
+ ""; // addSerial
String mac = DigestUtil.md5Hex(rawString).toLowerCase();
log.debug("[calculateMac][原始字符串长度: {}, MAC: {}]", rawString.length(), mac);
return mac;
}
/**
* 构建短信内容
*
* @param apiTemplateId 模板ID
* @param templateParams 模板参数
* @return 短信内容
*/
private String buildContent(String apiTemplateId, List<KeyValue<String, Object>> templateParams) {
// 简单实现直接返回模板ID作为内容
// 实际使用时需要根据业务需求构建短信内容
if (templateParams == null || templateParams.isEmpty()) {
return apiTemplateId;
}
// 替换模板参数,支持 {{key}} 格式
String content = apiTemplateId;
for (KeyValue<String, Object> param : templateParams) {
String placeholder = "{{" + param.getKey() + "}}";
String value = String.valueOf(param.getValue());
content = content.replace(placeholder, value);
}
return content;
}
/**
* 获取JWT Token
* TODO: 实现Token获取逻辑可能需要
* 1. 调用认证接口获取Token
* 2. 缓存Token并在过期前自动刷新
* 3. 处理Token失效情况
*
* @return JWT Token
*/
private String getJwtToken() {
// 临时实现:从配置中读取或调用认证接口获取
// 实际生产环境需要实现完整的Token管理机制
String token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.example.token";
log.warn("[getJwtToken][使用临时Token生产环境需实现完整的Token获取机制]");
return token;
}
}

View File

@@ -81,6 +81,7 @@ public class SmsClientFactoryImpl implements SmsClientFactory {
case TENCENT: return new TencentSmsClient(properties); case TENCENT: return new TencentSmsClient(properties);
case HUAWEI: return new HuaweiSmsClient(properties); case HUAWEI: return new HuaweiSmsClient(properties);
case QINIU: return new QiniuSmsClient(properties); case QINIU: return new QiniuSmsClient(properties);
case CMCC_MAS: return new CmccMasSmsClient(properties);
} }
// 创建失败,错误日志 + 抛出异常 // 创建失败,错误日志 + 抛出异常
log.error("[createSmsClient][配置({}) 找不到合适的客户端实现]", properties); log.error("[createSmsClient][配置({}) 找不到合适的客户端实现]", properties);

View File

@@ -19,6 +19,7 @@ public enum SmsChannelEnum {
TENCENT("TENCENT", "腾讯云"), TENCENT("TENCENT", "腾讯云"),
HUAWEI("HUAWEI", "华为云"), HUAWEI("HUAWEI", "华为云"),
QINIU("QINIU", "七牛云"), QINIU("QINIU", "七牛云"),
CMCC_MAS("CMCC_MAS", "中国移动云MAS"),
; ;
/** /**

View File

@@ -114,7 +114,7 @@ public class AdminAuthServiceImpl implements AdminAuthService {
createLoginLog(null, username, logTypeEnum, LoginResultEnum.BAD_CREDENTIALS); createLoginLog(null, username, logTypeEnum, LoginResultEnum.BAD_CREDENTIALS);
throw exception(AUTH_LOGIN_BAD_CREDENTIALS); throw exception(AUTH_LOGIN_BAD_CREDENTIALS);
} }
if (!userService.isPasswordMatch(password, user.getPassword())) { if (!userService.isPasswordMatch(user, password)) {
createLoginLog(user.getId(), username, logTypeEnum, LoginResultEnum.BAD_CREDENTIALS); createLoginLog(user.getId(), username, logTypeEnum, LoginResultEnum.BAD_CREDENTIALS);
throw exception(AUTH_LOGIN_BAD_CREDENTIALS); throw exception(AUTH_LOGIN_BAD_CREDENTIALS);
} }
@@ -299,7 +299,7 @@ public class AdminAuthServiceImpl implements AdminAuthService {
if (length < 4 || length > 16) { if (length < 4 || length > 16) {
throw exception(AUTH_LOGIN_BAD_CREDENTIALS); throw exception(AUTH_LOGIN_BAD_CREDENTIALS);
} }
if (!userService.isPasswordMatch(password, user.getPassword())) { if (!userService.isPasswordMatch(user, password)) {
throw exception(AUTH_LOGIN_BAD_CREDENTIALS); throw exception(AUTH_LOGIN_BAD_CREDENTIALS);
} }
} }
@@ -436,20 +436,11 @@ public class AdminAuthServiceImpl implements AdminAuthService {
} }
/** /**
* 判断是否为内部用户 * 判断是否为内部用户,仅通过 E 办同步SYNC来源的账号才视为内部用户
* 根据UserSourceEnum判断同步用户为内部用户外部用户为外部用户
*/ */
private boolean isInternalUser(AdminUserDO user) { private boolean isInternalUser(AdminUserDO user) {
// 根据userSource字段判断用户类型
Integer userSource = user.getUserSource(); Integer userSource = user.getUserSource();
return Objects.equals(userSource, UserSourceEnum.SYNC.getSource());
// 同步用户(SYNC = 2)为内部用户需要使用E办登录
if (userSource != null && userSource.equals(UserSourceEnum.SYNC.getSource())) {
return true;
}
// 外部用户(EXTERNAL = 1)或其他情况为外部用户,使用账号密码登录
return false;
} }
/** /**

View File

@@ -74,16 +74,23 @@ public class DeptServiceImpl implements DeptService {
// 校验部门名的唯一性 // 校验部门名的唯一性
validateDeptNameUnique(null, createReqVO.getParentId(), createReqVO.getName()); validateDeptNameUnique(null, createReqVO.getParentId(), createReqVO.getName());
// 生成并校验部门编码 // 生成并校验部门编码
Long effectiveParentId = normalizeParentId(createReqVO.getParentId()); boolean isIWorkSource = Objects.equals(createReqVO.getDeptSource(), DeptSourceEnum.IWORK.getSource());
boolean isTopLevel = Objects.equals(effectiveParentId, DeptDO.PARENT_ID_ROOT); if (isIWorkSource) {
String resolvedCode; // iWork 来源直接使用提供的编码,不再生成
if (isTopLevel) { String providedCode = StrUtil.blankToDefault(createReqVO.getCode(), null);
resolvedCode = resolveTopLevelCode(null, createReqVO.getCode()); createReqVO.setCode(providedCode);
} else { } else {
resolvedCode = generateDeptCode(effectiveParentId); Long effectiveParentId = normalizeParentId(createReqVO.getParentId());
validateDeptCodeUnique(null, resolvedCode); boolean isTopLevel = Objects.equals(effectiveParentId, DeptDO.PARENT_ID_ROOT);
String resolvedCode;
if (isTopLevel) {
resolvedCode = resolveTopLevelCode(null, createReqVO.getCode());
} else {
resolvedCode = generateDeptCode(effectiveParentId);
validateDeptCodeUnique(null, resolvedCode);
}
createReqVO.setCode(resolvedCode);
} }
createReqVO.setCode(resolvedCode);
// 插入部门 // 插入部门
DeptDO dept = BeanUtils.toBean(createReqVO, DeptDO.class); DeptDO dept = BeanUtils.toBean(createReqVO, DeptDO.class);
@@ -110,28 +117,35 @@ public class DeptServiceImpl implements DeptService {
// 校验部门名的唯一性 // 校验部门名的唯一性
validateDeptNameUnique(updateReqVO.getId(), updateReqVO.getParentId(), updateReqVO.getName()); validateDeptNameUnique(updateReqVO.getId(), updateReqVO.getParentId(), updateReqVO.getName());
// 如果上级发生变化,需要重新生成编码并同步子级 // 如果上级发生变化,需要重新生成编码并同步子级
boolean isIWorkSource = Objects.equals(originalDept.getDeptSource(), DeptSourceEnum.IWORK.getSource());
Long newParentId = normalizeParentId(updateReqVO.getParentId()); Long newParentId = normalizeParentId(updateReqVO.getParentId());
Long oldParentId = normalizeParentId(originalDept.getParentId()); Long oldParentId = normalizeParentId(originalDept.getParentId());
boolean parentChanged = !Objects.equals(newParentId, oldParentId); boolean parentChanged = !Objects.equals(newParentId, oldParentId);
if (parentChanged) { if (isIWorkSource) {
String newCode; // iWork 来源直接使用提供的编码,不再生成
if (Objects.equals(newParentId, DeptDO.PARENT_ID_ROOT)) { String providedCode = StrUtil.blankToDefault(updateReqVO.getCode(), null);
newCode = resolveTopLevelCode(updateReqVO.getId(), updateReqVO.getCode()); updateReqVO.setCode(providedCode);
} else {
newCode = generateDeptCode(updateReqVO.getParentId());
validateDeptCodeUnique(updateReqVO.getId(), newCode);
}
updateReqVO.setCode(newCode);
} else { } else {
if (Objects.equals(newParentId, DeptDO.PARENT_ID_ROOT)) { if (parentChanged) {
String requestedCode = updateReqVO.getCode(); String newCode;
if (StrUtil.isNotBlank(requestedCode) && !StrUtil.equals(requestedCode.trim(), originalDept.getCode())) { if (Objects.equals(newParentId, DeptDO.PARENT_ID_ROOT)) {
updateReqVO.setCode(resolveTopLevelCode(updateReqVO.getId(), requestedCode)); newCode = resolveTopLevelCode(updateReqVO.getId(), updateReqVO.getCode());
} else {
newCode = generateDeptCode(updateReqVO.getParentId());
validateDeptCodeUnique(updateReqVO.getId(), newCode);
}
updateReqVO.setCode(newCode);
} else {
if (Objects.equals(newParentId, DeptDO.PARENT_ID_ROOT)) {
String requestedCode = updateReqVO.getCode();
if (StrUtil.isNotBlank(requestedCode) && !StrUtil.equals(requestedCode.trim(), originalDept.getCode())) {
updateReqVO.setCode(resolveTopLevelCode(updateReqVO.getId(), requestedCode));
} else {
updateReqVO.setCode(originalDept.getCode());
}
} else { } else {
updateReqVO.setCode(originalDept.getCode()); updateReqVO.setCode(originalDept.getCode());
} }
} else {
updateReqVO.setCode(originalDept.getCode());
} }
} }
@@ -189,7 +203,7 @@ public class DeptServiceImpl implements DeptService {
// 2. 父部门不存在 // 2. 父部门不存在
DeptDO parentDept = deptMapper.selectById(parentId); DeptDO parentDept = deptMapper.selectById(parentId);
if (parentDept == null) { if (parentDept == null) {
throw exception(DEPT_PARENT_NOT_EXITS); return;
} }
// 3. 递归校验父部门,如果父部门是自己的子部门,则报错,避免形成环路 // 3. 递归校验父部门,如果父部门是自己的子部门,则报错,避免形成环路
if (id == null) { // id 为空,说明新增,不需要考虑环路 if (id == null) { // id 为空,说明新增,不需要考虑环路
@@ -251,19 +265,18 @@ public class DeptServiceImpl implements DeptService {
private String generateDeptCode(Long parentId) { private String generateDeptCode(Long parentId) {
Long effectiveParentId = normalizeParentId(parentId); Long effectiveParentId = normalizeParentId(parentId);
Long codeParentId = effectiveParentId;
String prefix = ROOT_CODE_PREFIX; String prefix = ROOT_CODE_PREFIX;
if (!DeptDO.PARENT_ID_ROOT.equals(effectiveParentId)) { if (!DeptDO.PARENT_ID_ROOT.equals(effectiveParentId)) {
DeptDO parentDept = deptMapper.selectById(effectiveParentId); DeptDO parentDept = deptMapper.selectById(effectiveParentId);
if (parentDept == null) { if (parentDept == null || StrUtil.isBlank(parentDept.getCode())) {
throw exception(DEPT_PARENT_NOT_EXITS); codeParentId = DeptDO.PARENT_ID_ROOT;
} else {
prefix = parentDept.getCode();
} }
if (StrUtil.isBlank(parentDept.getCode())) {
throw exception(DEPT_PARENT_CODE_NOT_INITIALIZED);
}
prefix = parentDept.getCode();
} }
int nextSequence = determineNextSequence(effectiveParentId, prefix); int nextSequence = determineNextSequence(codeParentId, prefix);
assertSequenceRange(nextSequence); assertSequenceRange(nextSequence);
return prefix + formatSequence(nextSequence); return prefix + formatSequence(nextSequence);
} }

View File

@@ -17,5 +17,6 @@ public interface IWorkIntegrationErrorCodeConstants {
ErrorCode IWORK_OPERATOR_USER_MISSING = new ErrorCode(1_010_200_007, "缺少 iWork 操作人用户编号"); ErrorCode IWORK_OPERATOR_USER_MISSING = new ErrorCode(1_010_200_007, "缺少 iWork 操作人用户编号");
ErrorCode IWORK_WORKFLOW_ID_MISSING = new ErrorCode(1_010_200_008, "缺少 iWork 流程模板编号"); ErrorCode IWORK_WORKFLOW_ID_MISSING = new ErrorCode(1_010_200_008, "缺少 iWork 流程模板编号");
ErrorCode IWORK_ORG_IDENTIFIER_MISSING = new ErrorCode(1_010_200_009, "iWork 人力组织接口缺少认证标识"); ErrorCode IWORK_ORG_IDENTIFIER_MISSING = new ErrorCode(1_010_200_009, "iWork 人力组织接口缺少认证标识");
ErrorCode IWORK_ORG_REMOTE_FAILED = new ErrorCode(1_010_200_010, "iWork 人力组织接口请求失败"); ErrorCode IWORK_ORG_REMOTE_FAILED = new ErrorCode(1_010_200_010, "iWork 人力组织接口请求失败{}");
ErrorCode IWORK_SEAL_REQUIRED_FIELD_MISSING = new ErrorCode(1_010_200_011, "缺少用印必填字段:{}");
} }

View File

@@ -1,8 +1,12 @@
package com.zt.plat.module.system.service.integration.iwork; package com.zt.plat.module.system.service.integration.iwork;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkDepartmentQueryReqVO; import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkDepartmentQueryReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkHrDepartmentPageRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkHrJobTitlePageRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkHrSubcompanyPageRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkHrSyncRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkHrUserPageRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkJobTitleQueryReqVO; import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkJobTitleQueryReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkOrgRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkOrgSyncReqVO; import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkOrgSyncReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkSubcompanyQueryReqVO; import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkSubcompanyQueryReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkUserQueryReqVO; import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkUserQueryReqVO;
@@ -12,19 +16,19 @@ import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkUser
*/ */
public interface IWorkOrgRestService { public interface IWorkOrgRestService {
IWorkOrgRespVO listSubcompanies(IWorkSubcompanyQueryReqVO reqVO); IWorkHrSubcompanyPageRespVO listSubcompanies(IWorkSubcompanyQueryReqVO reqVO);
IWorkOrgRespVO listDepartments(IWorkDepartmentQueryReqVO reqVO); IWorkHrDepartmentPageRespVO listDepartments(IWorkDepartmentQueryReqVO reqVO);
IWorkOrgRespVO listJobTitles(IWorkJobTitleQueryReqVO reqVO); IWorkHrJobTitlePageRespVO listJobTitles(IWorkJobTitleQueryReqVO reqVO);
IWorkOrgRespVO listUsers(IWorkUserQueryReqVO reqVO); IWorkHrUserPageRespVO listUsers(IWorkUserQueryReqVO reqVO);
IWorkOrgRespVO syncSubcompanies(IWorkOrgSyncReqVO reqVO); IWorkHrSyncRespVO syncSubcompanies(IWorkOrgSyncReqVO reqVO);
IWorkOrgRespVO syncDepartments(IWorkOrgSyncReqVO reqVO); IWorkHrSyncRespVO syncDepartments(IWorkOrgSyncReqVO reqVO);
IWorkOrgRespVO syncJobTitles(IWorkOrgSyncReqVO reqVO); IWorkHrSyncRespVO syncJobTitles(IWorkOrgSyncReqVO reqVO);
IWorkOrgRespVO syncUsers(IWorkOrgSyncReqVO reqVO); IWorkHrSyncRespVO syncUsers(IWorkOrgSyncReqVO reqVO);
} }

View File

@@ -0,0 +1,195 @@
package com.zt.plat.module.system.service.integration.iwork;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkHrDepartmentPageRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkHrJobTitlePageRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkHrSubcompanyPageRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkHrUserPageRespVO;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/**
* Abstraction for applying iWork entities into local persistence.
*/
public interface IWorkSyncProcessor {
BatchResult syncSubcompanies(List<IWorkHrSubcompanyPageRespVO.Subcompany> data, SyncOptions options);
BatchResult syncDepartments(List<IWorkHrDepartmentPageRespVO.Department> data, SyncOptions options);
BatchResult syncJobTitles(List<IWorkHrJobTitlePageRespVO.JobTitle> data, SyncOptions options);
BatchResult syncUsers(List<IWorkHrUserPageRespVO.User> data, SyncOptions options);
/**
* Execution options shared by batch and single sync flows.
*/
final class SyncOptions {
private final boolean includeCanceled;
private final boolean allowUpdate;
private final boolean createIfMissing;
private SyncOptions(boolean includeCanceled, boolean allowUpdate, boolean createIfMissing) {
this.includeCanceled = includeCanceled;
this.allowUpdate = allowUpdate;
this.createIfMissing = createIfMissing;
}
public static SyncOptions full(boolean includeCanceled) {
return new SyncOptions(includeCanceled, false, true);
}
public static SyncOptions single(boolean createIfMissing) {
return new SyncOptions(true, true, Boolean.TRUE.equals(createIfMissing));
}
public static SyncOptions custom(boolean includeCanceled, boolean allowUpdate, boolean createIfMissing) {
return new SyncOptions(includeCanceled, allowUpdate, createIfMissing);
}
public boolean isIncludeCanceled() {
return includeCanceled;
}
public boolean isAllowUpdate() {
return allowUpdate;
}
public boolean isCreateIfMissing() {
return createIfMissing;
}
}
/**
* Aggregated result for a sync batch.
*/
final class BatchResult {
private int pulled;
private int created;
private int skipped;
private int disabled;
private int failed;
private int updated;
private String message;
public static BatchResult empty() {
return new BatchResult();
}
public BatchResult withMessage(String message) {
this.message = message;
return this;
}
public BatchResult merge(BatchResult other) {
if (other == null) {
return this;
}
this.pulled += other.pulled;
this.created += other.created;
this.skipped += other.skipped;
this.disabled += other.disabled;
this.failed += other.failed;
this.updated += other.updated;
if (Objects.nonNull(other.message)) {
this.message = other.message;
}
return this;
}
public static BatchResult fromSingle(BatchResult single) {
return empty().merge(single);
}
public static BatchResult singleCreated(String message) {
BatchResult result = empty();
result.created = 1;
result.message = message;
return result;
}
public static BatchResult singleSkipped(String message) {
BatchResult result = empty();
result.skipped = 1;
result.message = message;
return result;
}
public static BatchResult singleFailed(String message) {
BatchResult result = empty();
result.failed = 1;
result.message = message;
return result;
}
public BatchResult increasePulled(int delta) {
this.pulled += delta;
return this;
}
public void increaseCreated() {
this.created++;
}
public void increaseSkipped() {
this.skipped++;
}
public void increaseDisabled() {
this.disabled++;
}
public void increaseFailed() {
this.failed++;
}
public void increaseUpdated() {
this.updated++;
}
public int getPulled() {
return pulled;
}
public int getCreated() {
return created;
}
public int getSkipped() {
return skipped;
}
public int getDisabled() {
return disabled;
}
public int getFailed() {
return failed;
}
public int getUpdated() {
return updated;
}
public String getMessage() {
return message;
}
}
default BatchResult syncSubcompany(IWorkHrSubcompanyPageRespVO.Subcompany data, SyncOptions options) {
return syncSubcompanies(Collections.singletonList(data), options);
}
default BatchResult syncDepartment(IWorkHrDepartmentPageRespVO.Department data, SyncOptions options) {
return syncDepartments(Collections.singletonList(data), options);
}
default BatchResult syncJobTitle(IWorkHrJobTitlePageRespVO.JobTitle data, SyncOptions options) {
return syncJobTitles(Collections.singletonList(data), options);
}
default BatchResult syncUser(IWorkHrUserPageRespVO.User data, SyncOptions options) {
return syncUsers(Collections.singletonList(data), options);
}
}

View File

@@ -0,0 +1,31 @@
package com.zt.plat.module.system.service.integration.iwork;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkFullSyncReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkFullSyncRespVO;
/**
* iWork 组织/人员同步服务
*/
public interface IWorkSyncService {
/**
* 仅同步部门
*/
IWorkFullSyncRespVO fullSyncDepartments(IWorkFullSyncReqVO reqVO);
/**
* 仅同步分部
*/
IWorkFullSyncRespVO fullSyncSubcompanies(IWorkFullSyncReqVO reqVO);
/**
* 仅同步岗位
*/
IWorkFullSyncRespVO fullSyncJobTitles(IWorkFullSyncReqVO reqVO);
/**
* 仅同步人员(会自动包含依赖的分部、部门)
*/
IWorkFullSyncRespVO fullSyncUsers(IWorkFullSyncReqVO reqVO);
}

Some files were not shown because too many files have changed in this diff Show More