Compare commits

...

18 Commits

Author SHA1 Message Date
chenbowen
7f7c4210ac Merge branch 'dev' into test 2025-11-28 14:01:14 +08:00
chenbowen
db3afb5b64 Merge remote-tracking branch 'base-version/main' into dev 2025-11-28 11:07:42 +08:00
chenbowen
542466270a 1. 修复自定义 sql 中大写表名无法匹配到 mybatis 中的缓存表信息,导致表被忽略租户的问题
2. 新增 iwork feign api 调用
2025-11-28 11:05:09 +08:00
chenbowen
03ebe21670 1. 清理 iwork 无用的接口。
2. 整合 iwork 用户的密码管理策略。
2025-11-27 20:25:02 +08:00
chenbowen
64d0d4e55e 1. iwork 统一用印发起接口 2025-11-27 20:19:27 +08:00
chenbowen
22599bbc65 Merge branch 'dev' into test 2025-11-27 16:46:27 +08:00
chenbowen
240a531ee1 Merge remote-tracking branch 'base-version/main' into dev
# Conflicts:
#	zt-module-bpm/zt-module-bpm-server/src/main/java/liquibase/database/core/DmDatabase.java
2025-11-27 16:35:49 +08:00
chenbowen
00b2f6312d 修复 flowable 无法通过 dm 数据库驱动正常获取 schema 的bug 2025-11-27 16:01:05 +08:00
chenbowen
446b5ca7a4 剔除掉 swagger 不能请求的 rpc-api 2025-11-27 13:48:55 +08:00
chenbowen
28a49ce45a 修复 dm jdbc 不兼容 flowable 转义 sql 的错误 2025-11-27 13:26:30 +08:00
chenbowen
4bd0402dde 禁止事件引擎重复自动建表 2025-11-27 11:16:49 +08:00
chenbowen
0ab550123f 关闭 databus web 请求连接池 2025-11-27 10:27:30 +08:00
chenbowen
cd21239ff2 flowable 达梦迁移 2025-11-27 09:58:44 +08:00
chenbowen
837e09941a Merge branch 'dev' into test 2025-11-26 20:14:04 +08:00
chenbowen
256bf22a10 Merge remote-tracking branch 'base-version/main' into dev 2025-11-26 20:12:46 +08:00
chenbowen
76eabb6db0 修复 system 模块编译错误 2025-11-26 20:12:07 +08:00
chenbowen
06909fafea 当前登录用户新增公司编码与部门编码属性 2025-11-26 20:01:34 +08:00
qianshijiang
00956030a4 错误信息未记录到日志文件 2025-11-26 15:52:00 +08:00
91 changed files with 5225 additions and 754 deletions

View File

@@ -0,0 +1,74 @@
CREATE TABLE "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"
(
"ID" BIGINT NOT NULL,
"TRACE_ID" VARCHAR(64) DEFAULT NULL,
"API_CODE" VARCHAR(128) DEFAULT NULL,
"API_VERSION" VARCHAR(32) DEFAULT NULL,
"REQUEST_METHOD" VARCHAR(16) DEFAULT NULL,
"REQUEST_PATH" VARCHAR(512) DEFAULT NULL,
"REQUEST_QUERY" TEXT,
"REQUEST_HEADERS" TEXT,
"REQUEST_BODY" TEXT,
"RESPONSE_STATUS" INT DEFAULT NULL,
"RESPONSE_MESSAGE" VARCHAR(500) DEFAULT NULL,
"RESPONSE_BODY" TEXT,
"STATUS" SMALLINT DEFAULT 3 NOT NULL,
"ERROR_CODE" VARCHAR(100) DEFAULT NULL,
"ERROR_MESSAGE" VARCHAR(1000) DEFAULT NULL,
"EXCEPTION_STACK" TEXT,
"CLIENT_IP" VARCHAR(64) DEFAULT NULL,
"USER_AGENT" VARCHAR(512) DEFAULT NULL,
"DURATION" BIGINT DEFAULT NULL,
"REQUEST_TIME" DATETIME(6) DEFAULT CURRENT_TIMESTAMP NOT NULL,
"RESPONSE_TIME" DATETIME(6) DEFAULT NULL,
"STEP_RESULTS" TEXT,
"EXTRA" TEXT,
"CREATOR" VARCHAR(64) DEFAULT '' NOT NULL,
"CREATE_TIME" DATETIME(6) DEFAULT CURRENT_TIMESTAMP NOT NULL,
"UPDATER" VARCHAR(64) DEFAULT '' NOT NULL,
"UPDATE_TIME" DATETIME(6) DEFAULT CURRENT_TIMESTAMP NOT NULL,
"DELETED" BIT DEFAULT '0' NOT NULL,
"TENANT_ID" BIGINT DEFAULT 0 NOT NULL,
NOT CLUSTER PRIMARY KEY("ID")) STORAGE(ON "MAIN", CLUSTERBTR) ;
COMMENT ON TABLE "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG IS 'Databus API 访问日志表';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."API_CODE" IS 'API 编码';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."API_VERSION" IS 'API 版本';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."CLIENT_IP" IS '客户端 IP';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."CREATE_TIME" IS '创建时间';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."CREATOR" IS '创建者';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."DELETED" IS '是否删除';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."DURATION" IS '请求耗时(毫秒)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."ERROR_CODE" IS '业务错误码';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."ERROR_MESSAGE" IS '错误信息';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."EXCEPTION_STACK" IS '异常堆栈';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."EXTRA" IS '额外调试信息JSON 字符串)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."ID" IS '日志主键';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."REQUEST_BODY" IS '请求体JSON 字符串)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."REQUEST_HEADERS" IS '请求头JSON 字符串)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."REQUEST_METHOD" IS '请求方法';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."REQUEST_PATH" IS '请求路径';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."REQUEST_QUERY" IS '请求查询参数JSON 字符串)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."REQUEST_TIME" IS '请求时间';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."RESPONSE_BODY" IS '响应体JSON 字符串)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."RESPONSE_MESSAGE" IS '响应提示信息';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."RESPONSE_STATUS" IS '响应 HTTP 状态码';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."RESPONSE_TIME" IS '响应时间';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."STATUS" IS '访问状态0-成功 1-客户端错误 2-服务端错误 3-未知';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."STEP_RESULTS" IS '执行步骤结果JSON 字符串)';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."TENANT_ID" IS '租户编号';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."TRACE_ID" IS '追踪 ID';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."UPDATER" IS '更新者';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."UPDATE_TIME" IS '更新时间';
COMMENT ON COLUMN "RUOYI-VUE-PRO".DATABUS_API_ACCESS_LOG."USER_AGENT" IS 'User-Agent';
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_TRACE" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("TRACE_ID" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_CODE" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("API_CODE" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_METHOD" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("REQUEST_METHOD" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_STATUS" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("STATUS" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_RESP_STATUS" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("RESPONSE_STATUS" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_REQUEST_TIME" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("REQUEST_TIME" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_CLIENT_IP" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("CLIENT_IP" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;
CREATE OR REPLACE INDEX "IDX_DATABUS_API_ACCESS_LOG_TENANT" ON "RUOYI-VUE-PRO"."DATABUS_API_ACCESS_LOG"("TENANT_ID" ASC) STORAGE(ON "MAIN", CLUSTERBTR) ;

View File

@@ -182,8 +182,10 @@ public class BusinessDeptHandleUtil {
if (loginUser != null) { if (loginUser != null) {
loginUser.setVisitCompanyId(Long.valueOf(info.getCompanyId())); loginUser.setVisitCompanyId(Long.valueOf(info.getCompanyId()));
loginUser.setVisitCompanyName(info.getCompanyName()); loginUser.setVisitCompanyName(info.getCompanyName());
loginUser.setVisitCompanyCode(info.getCompanyName());
loginUser.setVisitDeptId(Long.valueOf(info.getDeptId())); loginUser.setVisitDeptId(Long.valueOf(info.getDeptId()));
loginUser.setVisitDeptName(info.getDeptName()); loginUser.setVisitDeptName(info.getDeptName());
loginUser.setVisitDeptCode(info.getDeptName());
} }
request.setAttribute(WebFrameworkUtils.HEADER_VISIT_COMPANY_ID, info.getCompanyId()); request.setAttribute(WebFrameworkUtils.HEADER_VISIT_COMPANY_ID, info.getCompanyId());
if (info.getCompanyName() != null) { if (info.getCompanyName() != null) {

View File

@@ -1,12 +1,12 @@
package com.zt.plat.framework.tenant.core.db; package com.zt.plat.framework.tenant.core.db;
import com.zt.plat.framework.tenant.config.TenantProperties;
import com.zt.plat.framework.tenant.core.aop.TenantIgnore;
import com.zt.plat.framework.tenant.core.context.TenantContextHolder;
import com.baomidou.mybatisplus.core.metadata.TableInfo; import com.baomidou.mybatisplus.core.metadata.TableInfo;
import com.baomidou.mybatisplus.core.metadata.TableInfoHelper; import com.baomidou.mybatisplus.core.metadata.TableInfoHelper;
import com.baomidou.mybatisplus.extension.plugins.handler.TenantLineHandler; import com.baomidou.mybatisplus.extension.plugins.handler.TenantLineHandler;
import com.baomidou.mybatisplus.extension.toolkit.SqlParserUtils; import com.baomidou.mybatisplus.extension.toolkit.SqlParserUtils;
import com.zt.plat.framework.tenant.config.TenantProperties;
import com.zt.plat.framework.tenant.core.aop.TenantIgnore;
import com.zt.plat.framework.tenant.core.context.TenantContextHolder;
import net.sf.jsqlparser.expression.Expression; import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.LongValue; import net.sf.jsqlparser.expression.LongValue;
@@ -69,7 +69,12 @@ public class TenantDatabaseInterceptor implements TenantLineHandler {
// 找不到的表,说明不是 zt 项目里的,不进行拦截(忽略租户) // 找不到的表,说明不是 zt 项目里的,不进行拦截(忽略租户)
TableInfo tableInfo = TableInfoHelper.getTableInfo(tableName); TableInfo tableInfo = TableInfoHelper.getTableInfo(tableName);
if (tableInfo == null) { if (tableInfo == null) {
return true; tableName = tableName.toLowerCase();
tableInfo = TableInfoHelper.getTableInfo(tableName);
}
if (tableInfo == null) {
tableName = tableName.toLowerCase();
tableInfo = TableInfoHelper.getTableInfo(tableName);
} }
// 如果继承了 TenantBaseDO 基类,显然不忽略租户 // 如果继承了 TenantBaseDO 基类,显然不忽略租户
if (TenantBaseDO.class.isAssignableFrom(tableInfo.getEntityType())) { if (TenantBaseDO.class.isAssignableFrom(tableInfo.getEntityType())) {

View File

@@ -73,9 +73,11 @@ public class LoginUser {
private Long visitCompanyId; private Long visitCompanyId;
private String visitCompanyName; private String visitCompanyName;
private String visitCompanyCode;
private Long visitDeptId; private Long visitDeptId;
private String visitDeptName; private String visitDeptName;
private String visitDeptCode;
public void setContext(String key, Object value) { public void setContext(String key, Object value) {
if (context == null) { if (context == null) {

View File

@@ -1,5 +1,6 @@
package com.zt.plat.framework.swagger.config; package com.zt.plat.framework.swagger.config;
import com.zt.plat.framework.common.enums.RpcConstants;
import io.swagger.v3.oas.models.Components; import io.swagger.v3.oas.models.Components;
import io.swagger.v3.oas.models.OpenAPI; import io.swagger.v3.oas.models.OpenAPI;
import io.swagger.v3.oas.models.info.Contact; import io.swagger.v3.oas.models.info.Contact;
@@ -11,6 +12,7 @@ import io.swagger.v3.oas.models.parameters.Parameter;
import io.swagger.v3.oas.models.security.SecurityRequirement; import io.swagger.v3.oas.models.security.SecurityRequirement;
import io.swagger.v3.oas.models.security.SecurityScheme; import io.swagger.v3.oas.models.security.SecurityScheme;
import org.springdoc.core.customizers.OpenApiBuilderCustomizer; import org.springdoc.core.customizers.OpenApiBuilderCustomizer;
import org.springdoc.core.customizers.OpenApiCustomizer;
import org.springdoc.core.customizers.ServerBaseUrlCustomizer; import org.springdoc.core.customizers.ServerBaseUrlCustomizer;
import org.springdoc.core.models.GroupedOpenApi; import org.springdoc.core.models.GroupedOpenApi;
import org.springdoc.core.properties.SpringDocConfigProperties; import org.springdoc.core.properties.SpringDocConfigProperties;
@@ -123,12 +125,26 @@ public class ZtSwaggerAutoConfiguration {
return GroupedOpenApi.builder() return GroupedOpenApi.builder()
.group(group) .group(group)
.pathsToMatch("/admin-api/" + path + "/**", "/app-api/" + path + "/**") .pathsToMatch("/admin-api/" + path + "/**", "/app-api/" + path + "/**")
.pathsToExclude(RpcConstants.RPC_API_PREFIX + "/**")
.addOperationCustomizer((operation, handlerMethod) -> operation .addOperationCustomizer((operation, handlerMethod) -> operation
.addParametersItem(buildTenantHeaderParameter()) .addParametersItem(buildTenantHeaderParameter())
.addParametersItem(buildSecurityHeaderParameter())) .addParametersItem(buildSecurityHeaderParameter()))
.build(); .build();
} }
@Bean
public OpenApiCustomizer rpcApiPathExclusionCustomiser() {
return openApi -> {
if (openApi == null || openApi.getPaths() == null) {
return;
}
openApi.getPaths().entrySet().removeIf(entry -> {
String path = entry.getKey();
return path != null && path.startsWith(RpcConstants.RPC_API_PREFIX);
});
};
}
/** /**
* 构建 Tenant 租户编号请求头参数 * 构建 Tenant 租户编号请求头参数
* *

View File

@@ -5,6 +5,10 @@
<springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/> <springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/>
<!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 --> <!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 -->
<property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/> <property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
<!--应用名称-->
<springProperty scope="context" name="spring.application.name" source="spring.application.name"/>
<!-- 日志输出路径 -->
<property name="LOG_DIR" value="${user.home}/logs/${spring.application.name}"/>
<!-- 控制台 Appender --> <!-- 控制台 Appender -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">      <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">     
@@ -56,11 +60,29 @@
</encoder> </encoder>
</appender> </appender>
<!-- ERROR 级别日志 -->
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_DIR}-error.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_DIR}-error.%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>30</maxHistory> <!-- 保留30天的日志 -->
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG--> <!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG-->
<!-- 本地环境 --> <!-- 本地环境 -->
<springProfile name="local,dev"> <springProfile name="local,dev">
<root level="WARN"> <root level="WARN">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 --> <appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 -->
<appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 --> <appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 -->
</root> </root>
@@ -70,6 +92,7 @@
<springProfile name="dev,test,stage,prod,default"> <springProfile name="dev,test,stage,prod,default">
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="ASYNC"/> <appender-ref ref="ASYNC"/>
<appender-ref ref="GRPC"/> <appender-ref ref="GRPC"/>
</root> </root>

View File

@@ -8,17 +8,25 @@ import com.zt.plat.module.bpm.framework.flowable.core.event.BpmProcessInstanceEv
import com.zt.plat.module.system.api.user.AdminUserApi; import com.zt.plat.module.system.api.user.AdminUserApi;
import org.flowable.common.engine.api.delegate.FlowableFunctionDelegate; import org.flowable.common.engine.api.delegate.FlowableFunctionDelegate;
import org.flowable.common.engine.api.delegate.event.FlowableEventListener; import org.flowable.common.engine.api.delegate.event.FlowableEventListener;
import org.flowable.engine.ProcessEngineConfiguration;
import org.flowable.spring.SpringProcessEngineConfiguration; import org.flowable.spring.SpringProcessEngineConfiguration;
import org.flowable.spring.boot.EngineConfigurationConfigurer; import org.flowable.spring.boot.EngineConfigurationConfigurer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.ObjectProvider; import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.ApplicationEventPublisher; import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.AsyncListenableTaskExecutor; import org.springframework.core.task.AsyncListenableTaskExecutor;
import org.springframework.jdbc.datasource.DataSourceUtils;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.List; import java.util.List;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
/** /**
* BPM 模块的 Flowable 配置类 * BPM 模块的 Flowable 配置类
@@ -28,6 +36,8 @@ import java.util.List;
@Configuration(proxyBeanMethods = false) @Configuration(proxyBeanMethods = false)
public class BpmFlowableConfiguration { public class BpmFlowableConfiguration {
private static final Logger log = LoggerFactory.getLogger(BpmFlowableConfiguration.class);
/** /**
* 参考 {@link org.flowable.spring.boot.FlowableJobConfiguration} 类,创建对应的 AsyncListenableTaskExecutor Bean * 参考 {@link org.flowable.spring.boot.FlowableJobConfiguration} 类,创建对应的 AsyncListenableTaskExecutor Bean
* *
@@ -69,6 +79,37 @@ public class BpmFlowableConfiguration {
}; };
} }
@Bean
public EngineConfigurationConfigurer<SpringProcessEngineConfiguration> dmProcessEngineConfigurationConfigurer(DataSource dataSource) {
return configuration -> {
try {
configureDmCompatibility(configuration, dataSource);
} catch (SQLException ex) {
log.warn("Failed to inspect datasource for DM compatibility; Flowable will keep default settings", ex);
}
};
}
private void configureDmCompatibility(SpringProcessEngineConfiguration configuration, DataSource dataSource) throws SQLException {
Connection connection = null;
try {
connection = DataSourceUtils.getConnection(dataSource);
DatabaseMetaData metaData = connection.getMetaData();
String productName = metaData.getDatabaseProductName();
String jdbcUrl = metaData.getURL();
boolean dmProduct = productName != null && productName.toLowerCase().contains("dm");
boolean dmUrl = jdbcUrl != null && jdbcUrl.toLowerCase().startsWith("jdbc:dm");
if (!dmProduct && !dmUrl) {
return;
}
log.info("Detected DM database (product='{}'); enabling Flowable Oracle compatibility with automatic schema updates", productName);
configuration.setDatabaseSchemaUpdate(ProcessEngineConfiguration.DB_SCHEMA_UPDATE_TRUE);
configuration.setDatabaseType("oracle");
} finally {
DataSourceUtils.releaseConnection(connection, dataSource);
}
}
// =========== 审批人相关的 Bean ========== // =========== 审批人相关的 Bean ==========
@Bean @Bean

View File

@@ -5,6 +5,25 @@
package liquibase.database.core; package liquibase.database.core;
import java.lang.reflect.Method;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import liquibase.CatalogAndSchema; import liquibase.CatalogAndSchema;
import liquibase.GlobalConfiguration; import liquibase.GlobalConfiguration;
import liquibase.Scope; import liquibase.Scope;
@@ -23,17 +42,15 @@ import liquibase.statement.UniqueConstraint;
import liquibase.statement.core.RawCallStatement; import liquibase.statement.core.RawCallStatement;
import liquibase.statement.core.RawParameterizedSqlStatement; import liquibase.statement.core.RawParameterizedSqlStatement;
import liquibase.structure.DatabaseObject; import liquibase.structure.DatabaseObject;
import liquibase.structure.core.*; import liquibase.structure.core.Catalog;
import liquibase.structure.core.Column;
import liquibase.structure.core.Index;
import liquibase.structure.core.PrimaryKey;
import liquibase.structure.core.Schema;
import liquibase.util.JdbcUtil; import liquibase.util.JdbcUtil;
import liquibase.util.StringUtil; import liquibase.util.StringUtil;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import java.lang.reflect.Method;
import java.sql.*;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class DmDatabase extends AbstractJdbcDatabase { public class DmDatabase extends AbstractJdbcDatabase {
private static final String PROXY_USER_REGEX = ".*(?:thin|oci)\\:(.+)/@.*"; private static final String PROXY_USER_REGEX = ".*(?:thin|oci)\\:(.+)/@.*";
public static final Pattern PROXY_USER_PATTERN = Pattern.compile(".*(?:thin|oci)\\:(.+)/@.*"); public static final Pattern PROXY_USER_PATTERN = Pattern.compile(".*(?:thin|oci)\\:(.+)/@.*");
@@ -98,6 +115,7 @@ public class DmDatabase extends AbstractJdbcDatabase {
public void setConnection(DatabaseConnection conn) { public void setConnection(DatabaseConnection conn) {
this.reservedWords.addAll(Arrays.asList("GROUP", "USER", "SESSION", "PASSWORD", "RESOURCE", "START", "SIZE", "UID", "DESC", "ORDER")); this.reservedWords.addAll(Arrays.asList("GROUP", "USER", "SESSION", "PASSWORD", "RESOURCE", "START", "SIZE", "UID", "DESC", "ORDER"));
Connection sqlConn = null; Connection sqlConn = null;
boolean dmDatabase = false;
if (!(conn instanceof OfflineConnection)) { if (!(conn instanceof OfflineConnection)) {
try { try {
if (conn instanceof JdbcConnection) { if (conn instanceof JdbcConnection) {
@@ -124,26 +142,42 @@ public class DmDatabase extends AbstractJdbcDatabase {
Scope.getCurrentScope().getLog(this.getClass()).info("Could not set remarks reporting on OracleDatabase: " + e.getMessage()); Scope.getCurrentScope().getLog(this.getClass()).info("Could not set remarks reporting on OracleDatabase: " + e.getMessage());
} }
CallableStatement statement = null;
try { try {
statement = sqlConn.prepareCall("{call DBMS_UTILITY.DB_VERSION(?,?)}"); DatabaseMetaData metaData = sqlConn.getMetaData();
statement.registerOutParameter(1, 12); if (metaData != null) {
statement.registerOutParameter(2, 12); String productName = metaData.getDatabaseProductName();
statement.execute(); dmDatabase = productName != null && PRODUCT_NAME.equalsIgnoreCase(productName);
String compatibleVersion = statement.getString(2); if (dmDatabase) {
if (compatibleVersion != null) { this.databaseMajorVersion = metaData.getDatabaseMajorVersion();
Matcher majorVersionMatcher = VERSION_PATTERN.matcher(compatibleVersion); this.databaseMinorVersion = metaData.getDatabaseMinorVersion();
if (majorVersionMatcher.matches()) {
this.databaseMajorVersion = Integer.valueOf(majorVersionMatcher.group(1));
this.databaseMinorVersion = Integer.valueOf(majorVersionMatcher.group(2));
} }
} }
} catch (SQLException e) { } catch (SQLException e) {
String message = "Cannot read from DBMS_UTILITY.DB_VERSION: " + e.getMessage(); Scope.getCurrentScope().getLog(this.getClass()).info("Unable to inspect database metadata for DM version detection: " + e.getMessage());
Scope.getCurrentScope().getLog(this.getClass()).info("Could not set check compatibility mode on OracleDatabase, assuming not running in any sort of compatibility mode: " + message); }
} finally {
JdbcUtil.closeStatement(statement); if (!dmDatabase) {
CallableStatement statement = null;
try {
statement = sqlConn.prepareCall("{call DBMS_UTILITY.DB_VERSION(?,?)}");
statement.registerOutParameter(1, 12);
statement.registerOutParameter(2, 12);
statement.execute();
String compatibleVersion = statement.getString(2);
if (compatibleVersion != null) {
Matcher majorVersionMatcher = VERSION_PATTERN.matcher(compatibleVersion);
if (majorVersionMatcher.matches()) {
this.databaseMajorVersion = Integer.valueOf(majorVersionMatcher.group(1));
this.databaseMinorVersion = Integer.valueOf(majorVersionMatcher.group(2));
}
}
} catch (SQLException e) {
String message = "Cannot read from DBMS_UTILITY.DB_VERSION: " + e.getMessage();
Scope.getCurrentScope().getLog(this.getClass()).info("Could not set check compatibility mode on OracleDatabase, assuming not running in any sort of compatibility mode: " + message);
} finally {
JdbcUtil.closeStatement(statement);
}
} }
if (GlobalConfiguration.DDL_LOCK_TIMEOUT.getCurrentValue() != null) { if (GlobalConfiguration.DDL_LOCK_TIMEOUT.getCurrentValue() != null) {
@@ -250,7 +284,15 @@ public class DmDatabase extends AbstractJdbcDatabase {
} }
public boolean isCorrectDatabaseImplementation(DatabaseConnection conn) throws DatabaseException { public boolean isCorrectDatabaseImplementation(DatabaseConnection conn) throws DatabaseException {
return "oracle".equalsIgnoreCase(conn.getDatabaseProductName()); String databaseProductName = conn == null ? null : conn.getDatabaseProductName();
if (databaseProductName == null) {
return false;
}
if (PRODUCT_NAME.equalsIgnoreCase(databaseProductName)) {
return true;
}
// Flowable 历史上将 DM 映射为 Oracle 元数据,因此这里同样接受 Oracle 以保持兼容
return "oracle".equalsIgnoreCase(databaseProductName);
} }
public String getDefaultDriver(String url) { public String getDefaultDriver(String url) {

View File

@@ -0,0 +1,32 @@
package liquibase.datatype.core;
import liquibase.database.Database;
import liquibase.database.core.DmDatabase;
import liquibase.datatype.DataTypeInfo;
import liquibase.datatype.DatabaseDataType;
@DataTypeInfo(
name = "boolean",
aliases = {"java.sql.Types.BOOLEAN", "java.lang.Boolean", "bit", "bool"},
minParameters = 0,
maxParameters = 0,
priority = 2
)
public class DmBooleanType extends BooleanType {
@Override
public boolean supports(Database database) {
if (database instanceof DmDatabase) {
return true;
}
return super.supports(database);
}
@Override
public DatabaseDataType toDatabaseDataType(Database database) {
if (database instanceof DmDatabase) {
return new DatabaseDataType("NUMBER", 1);
}
return super.toDatabaseDataType(database);
}
}

View File

@@ -0,0 +1,354 @@
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.common.engine.impl.db;
import org.apache.ibatis.session.SqlSessionFactory;
import org.flowable.common.engine.api.FlowableException;
import org.flowable.common.engine.impl.context.Context;
import org.flowable.common.engine.impl.interceptor.CommandContext;
import org.flowable.common.engine.impl.interceptor.Session;
import org.flowable.common.engine.impl.interceptor.SessionFactory;
import org.flowable.common.engine.impl.persistence.cache.EntityCache;
import org.flowable.common.engine.impl.persistence.entity.Entity;
import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author Tom Baeyens
* @author Joram Barrez
*/
public class DbSqlSessionFactory implements SessionFactory {
protected Map<String, Map<String, String>> databaseSpecificStatements = new HashMap<>();
protected String databaseType;
protected String databaseTablePrefix = "";
protected boolean tablePrefixIsSchema;
protected String databaseCatalog;
protected String databaseSchema;
protected SqlSessionFactory sqlSessionFactory;
protected Map<String, String> statementMappings;
protected Map<Class<?>, String> insertStatements = new ConcurrentHashMap<>();
protected Map<Class<?>, String> updateStatements = new ConcurrentHashMap<>();
protected Map<Class<?>, String> deleteStatements = new ConcurrentHashMap<>();
protected Map<Class<?>, String> selectStatements = new ConcurrentHashMap<>();
protected List<Class<? extends Entity>> insertionOrder = new ArrayList<>();
protected List<Class<? extends Entity>> deletionOrder = new ArrayList<>();
protected boolean isDbHistoryUsed = true;
protected Set<Class<? extends Entity>> bulkInserteableEntityClasses = new HashSet<>();
protected Map<Class<?>, String> bulkInsertStatements = new ConcurrentHashMap<>();
protected int maxNrOfStatementsInBulkInsert = 100;
protected Map<String, Class<?>> logicalNameToClassMapping = new ConcurrentHashMap<>();
protected boolean usePrefixId;
public DbSqlSessionFactory(boolean usePrefixId) {
this.usePrefixId = usePrefixId;
}
@Override
public Class<?> getSessionType() {
return DbSqlSession.class;
}
@Override
public Session openSession(CommandContext commandContext) {
DbSqlSession dbSqlSession = createDbSqlSession();
// 当前系统适配 dm,如果存在 schema 为空的情况,从 connection 获取
try {
if (getDatabaseSchema() == null || getDatabaseSchema().length() == 0){
setDatabaseSchema(dbSqlSession.getSqlSession().getConnection().getSchema());
}
dbSqlSession.getSqlSession().getConnection().getSchema();
} catch (SQLException e) {
throw new RuntimeException(e);
}
if (getDatabaseSchema() != null && getDatabaseSchema().length() > 0) {
try {
dbSqlSession.getSqlSession().getConnection().setSchema(getDatabaseSchema());
} catch (SQLException e) {
throw new FlowableException("Could not set database schema on connection", e);
}
}
if (getDatabaseCatalog() != null && getDatabaseCatalog().length() > 0) {
try {
dbSqlSession.getSqlSession().getConnection().setCatalog(getDatabaseCatalog());
} catch (SQLException e) {
throw new FlowableException("Could not set database catalog on connection", e);
}
}
if (dbSqlSession.getSqlSession().getConnection() == null) {
throw new FlowableException("Invalid dbSqlSession: no active connection found");
}
return dbSqlSession;
}
protected DbSqlSession createDbSqlSession() {
return new DbSqlSession(this, Context.getCommandContext().getSession(EntityCache.class));
}
// insert, update and delete statements
// /////////////////////////////////////
public String getInsertStatement(Entity object) {
return getStatement(object.getClass(), insertStatements, "insert");
}
public String getInsertStatement(Class<? extends Entity> clazz) {
return getStatement(clazz, insertStatements, "insert");
}
public String getUpdateStatement(Entity object) {
return getStatement(object.getClass(), updateStatements, "update");
}
public String getDeleteStatement(Class<?> entityClass) {
return getStatement(entityClass, deleteStatements, "delete");
}
public String getSelectStatement(Class<?> entityClass) {
return getStatement(entityClass, selectStatements, "select");
}
protected String getStatement(Class<?> entityClass, Map<Class<?>, String> cachedStatements, String prefix) {
String statement = cachedStatements.get(entityClass);
if (statement != null) {
return statement;
}
statement = prefix + entityClass.getSimpleName();
if (statement.endsWith("Impl")) {
statement = statement.substring(0, statement.length() - 10); // removing 'entityImpl'
} else {
statement = statement.substring(0, statement.length() - 6); // removing 'entity'
}
cachedStatements.put(entityClass, statement);
return statement;
}
// db specific mappings
// /////////////////////////////////////////////////////
protected void addDatabaseSpecificStatement(String databaseType, String activitiStatement, String ibatisStatement) {
Map<String, String> specificStatements = databaseSpecificStatements.get(databaseType);
if (specificStatements == null) {
specificStatements = new HashMap<>();
databaseSpecificStatements.put(databaseType, specificStatements);
}
specificStatements.put(activitiStatement, ibatisStatement);
}
public String mapStatement(String statement) {
if (statementMappings == null) {
return statement;
}
String mappedStatement = statementMappings.get(statement);
return (mappedStatement != null ? mappedStatement : statement);
}
// customized getters and setters
// ///////////////////////////////////////////
public void setDatabaseType(String databaseType) {
this.databaseType = databaseType;
this.statementMappings = databaseSpecificStatements.get(databaseType);
}
public boolean isMysql() {
return "mysql".equals(getDatabaseType());
}
public boolean isOracle() {
return "oracle".equals(getDatabaseType());
}
public Boolean isBulkInsertable(Class<? extends Entity> entityClass) {
return bulkInserteableEntityClasses != null && bulkInserteableEntityClasses.contains(entityClass);
}
@SuppressWarnings("rawtypes")
public String getBulkInsertStatement(Class clazz) {
return getStatement(clazz, bulkInsertStatements, "bulkInsert");
}
public Set<Class<? extends Entity>> getBulkInserteableEntityClasses() {
return bulkInserteableEntityClasses;
}
public void setBulkInserteableEntityClasses(Set<Class<? extends Entity>> bulkInserteableEntityClasses) {
this.bulkInserteableEntityClasses = bulkInserteableEntityClasses;
}
public int getMaxNrOfStatementsInBulkInsert() {
return maxNrOfStatementsInBulkInsert;
}
public void setMaxNrOfStatementsInBulkInsert(int maxNrOfStatementsInBulkInsert) {
this.maxNrOfStatementsInBulkInsert = maxNrOfStatementsInBulkInsert;
}
public Map<Class<?>, String> getBulkInsertStatements() {
return bulkInsertStatements;
}
public void setBulkInsertStatements(Map<Class<?>, String> bulkInsertStatements) {
this.bulkInsertStatements = bulkInsertStatements;
}
// getters and setters //////////////////////////////////////////////////////
public SqlSessionFactory getSqlSessionFactory() {
return sqlSessionFactory;
}
public void setSqlSessionFactory(SqlSessionFactory sqlSessionFactory) {
this.sqlSessionFactory = sqlSessionFactory;
}
public String getDatabaseType() {
return databaseType;
}
public Map<String, Map<String, String>> getDatabaseSpecificStatements() {
return databaseSpecificStatements;
}
public void setDatabaseSpecificStatements(Map<String, Map<String, String>> databaseSpecificStatements) {
this.databaseSpecificStatements = databaseSpecificStatements;
}
public Map<String, String> getStatementMappings() {
return statementMappings;
}
public void setStatementMappings(Map<String, String> statementMappings) {
this.statementMappings = statementMappings;
}
public Map<Class<?>, String> getInsertStatements() {
return insertStatements;
}
public void setInsertStatements(Map<Class<?>, String> insertStatements) {
this.insertStatements = insertStatements;
}
public Map<Class<?>, String> getUpdateStatements() {
return updateStatements;
}
public void setUpdateStatements(Map<Class<?>, String> updateStatements) {
this.updateStatements = updateStatements;
}
public Map<Class<?>, String> getDeleteStatements() {
return deleteStatements;
}
public void setDeleteStatements(Map<Class<?>, String> deleteStatements) {
this.deleteStatements = deleteStatements;
}
public Map<Class<?>, String> getSelectStatements() {
return selectStatements;
}
public void setSelectStatements(Map<Class<?>, String> selectStatements) {
this.selectStatements = selectStatements;
}
public boolean isDbHistoryUsed() {
return isDbHistoryUsed;
}
public void setDbHistoryUsed(boolean isDbHistoryUsed) {
this.isDbHistoryUsed = isDbHistoryUsed;
}
public void setDatabaseTablePrefix(String databaseTablePrefix) {
this.databaseTablePrefix = databaseTablePrefix;
}
public String getDatabaseTablePrefix() {
return databaseTablePrefix;
}
public String getDatabaseCatalog() {
return databaseCatalog;
}
public void setDatabaseCatalog(String databaseCatalog) {
this.databaseCatalog = databaseCatalog;
}
public String getDatabaseSchema() {
return databaseSchema;
}
public void setDatabaseSchema(String databaseSchema) {
this.databaseSchema = databaseSchema;
}
public void setTablePrefixIsSchema(boolean tablePrefixIsSchema) {
this.tablePrefixIsSchema = tablePrefixIsSchema;
}
public boolean isTablePrefixIsSchema() {
return tablePrefixIsSchema;
}
public List<Class<? extends Entity>> getInsertionOrder() {
return insertionOrder;
}
public void setInsertionOrder(List<Class<? extends Entity>> insertionOrder) {
this.insertionOrder = insertionOrder;
}
public List<Class<? extends Entity>> getDeletionOrder() {
return deletionOrder;
}
public void setDeletionOrder(List<Class<? extends Entity>> deletionOrder) {
this.deletionOrder = deletionOrder;
}
public void addLogicalEntityClassMapping(String logicalName, Class<?> entityClass) {
logicalNameToClassMapping.put(logicalName, entityClass);
}
public Map<String, Class<?>> getLogicalNameToClassMapping() {
return logicalNameToClassMapping;
}
public void setLogicalNameToClassMapping(Map<String, Class<?>> logicalNameToClassMapping) {
this.logicalNameToClassMapping = logicalNameToClassMapping;
}
public boolean isUsePrefixId() {
return usePrefixId;
}
public void setUsePrefixId(boolean usePrefixId) {
this.usePrefixId = usePrefixId;
}
}

View File

@@ -13,6 +13,7 @@ liquibase.database.core.MariaDBDatabase
liquibase.database.core.MockDatabase liquibase.database.core.MockDatabase
liquibase.database.core.MySQLDatabase liquibase.database.core.MySQLDatabase
liquibase.database.core.OracleDatabase liquibase.database.core.OracleDatabase
liquibase.database.core.DmDatabase
liquibase.database.core.PostgresDatabase liquibase.database.core.PostgresDatabase
liquibase.database.core.SQLiteDatabase liquibase.database.core.SQLiteDatabase
liquibase.database.core.SybaseASADatabase liquibase.database.core.SybaseASADatabase

View File

@@ -0,0 +1 @@
liquibase.datatype.core.DmBooleanType

View File

@@ -39,14 +39,14 @@ spring:
primary: master primary: master
datasource: datasource:
master: master:
url: jdbc:mysql://172.16.46.247:4787/ruoyi-vue-pro?useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true&nullCatalogMeansCurrent=true&rewriteBatchedStatements=true # MySQL Connector/J 8.X 连接的示例 url: jdbc:dm://172.16.46.247:1050?schema=BPM
username: jygk-test username: SYSDBA
password: Zgty@0527 password: pgbsci6ddJ6Sqj@e
slave: # 模拟从库,可根据自己需要修改 # 模拟从库,可根据自己需要修改 slave: # 模拟从库,可根据自己需要修改 # 模拟从库,可根据自己需要修改
lazy: true # 开启懒加载,保证启动速度 lazy: true # 开启懒加载,保证启动速度
url: jdbc:mysql://172.16.46.247:4787/ruoyi-vue-pro?useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true&nullCatalogMeansCurrent=true&rewriteBatchedStatements=true # MySQL Connector/J 8.X 连接的示例 url: jdbc:dm://172.16.46.247:1050?schema=BPM
username: jygk-test username: SYSDBA
password: Zgty@0527 password: pgbsci6ddJ6Sqj@e
# Redis 配置。Redisson 默认的配置足够使用,一般不需要进行调优 # Redis 配置。Redisson 默认的配置足够使用,一般不需要进行调优
data: data:
@@ -56,6 +56,11 @@ spring:
database: 0 # 数据库索引 database: 0 # 数据库索引
# password: 123456 # 密码,建议生产环境开启 # password: 123456 # 密码,建议生产环境开启
# Flowable 在 DM 场景下需要识别为 Oracle 并自动升级表结构
flowable:
database-schema-update: true
database-type: oracle
--- #################### MQ 消息队列相关配置 #################### --- #################### MQ 消息队列相关配置 ####################
--- #################### 定时任务相关配置 #################### --- #################### 定时任务相关配置 ####################

View File

@@ -0,0 +1,41 @@
create table FLW_RU_BATCH (
ID_ VARCHAR2(64) not null,
REV_ INTEGER,
TYPE_ VARCHAR2(64) not null,
SEARCH_KEY_ VARCHAR2(255),
SEARCH_KEY2_ VARCHAR2(255),
CREATE_TIME_ TIMESTAMP(6) not null,
COMPLETE_TIME_ TIMESTAMP(6),
STATUS_ VARCHAR2(255),
BATCH_DOC_ID_ VARCHAR2(64),
TENANT_ID_ VARCHAR2(255) default '',
primary key (ID_)
);
create table FLW_RU_BATCH_PART (
ID_ VARCHAR2(64) not null,
REV_ INTEGER,
BATCH_ID_ VARCHAR2(64),
TYPE_ VARCHAR2(64) not null,
SCOPE_ID_ VARCHAR2(64),
SUB_SCOPE_ID_ VARCHAR2(64),
SCOPE_TYPE_ VARCHAR2(64),
SEARCH_KEY_ VARCHAR2(255),
SEARCH_KEY2_ VARCHAR2(255),
CREATE_TIME_ TIMESTAMP(6) not null,
COMPLETE_TIME_ TIMESTAMP(6),
STATUS_ VARCHAR2(255),
RESULT_DOC_ID_ VARCHAR2(64),
TENANT_ID_ VARCHAR2(255) default '',
primary key (ID_)
);
create index FLW_IDX_BATCH_PART on FLW_RU_BATCH_PART(BATCH_ID_);
alter table FLW_RU_BATCH_PART
add constraint FLW_FK_BATCH_PART_PARENT
foreign key (BATCH_ID_)
references FLW_RU_BATCH (ID_);
insert into ACT_GE_PROPERTY values ('batch.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,4 @@
drop index FLW_IDX_BATCH_PART;
drop table FLW_RU_BATCH_PART;
drop table FLW_RU_BATCH;

View File

@@ -0,0 +1,23 @@
create table ACT_GE_PROPERTY (
NAME_ VARCHAR2(64),
VALUE_ VARCHAR2(300),
REV_ INTEGER,
primary key (NAME_)
);
create table ACT_GE_BYTEARRAY (
ID_ VARCHAR2(64),
REV_ INTEGER,
NAME_ VARCHAR2(255),
DEPLOYMENT_ID_ VARCHAR2(64),
BYTES_ BLOB,
GENERATED_ NUMBER(1) CHECK (GENERATED_ IN (1,0)),
primary key (ID_)
);
insert into ACT_GE_PROPERTY
values ('common.schema.version', '7.0.1.1', 1);
insert into ACT_GE_PROPERTY
values ('next.dbid', '1', 1);

View File

@@ -0,0 +1,2 @@
drop table ACT_GE_BYTEARRAY;
drop table ACT_GE_PROPERTY;

View File

@@ -0,0 +1,355 @@
create table ACT_RE_DEPLOYMENT (
ID_ VARCHAR2(64),
NAME_ VARCHAR2(255),
CATEGORY_ VARCHAR2(255),
KEY_ VARCHAR2(255),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
DEPLOY_TIME_ TIMESTAMP(6),
DERIVED_FROM_ VARCHAR2(64),
DERIVED_FROM_ROOT_ VARCHAR2(64),
PARENT_DEPLOYMENT_ID_ VARCHAR2(255),
ENGINE_VERSION_ VARCHAR2(255),
primary key (ID_)
);
create table ACT_RE_MODEL (
ID_ VARCHAR2(64) not null,
REV_ INTEGER,
NAME_ VARCHAR2(255),
KEY_ VARCHAR2(255),
CATEGORY_ VARCHAR2(255),
CREATE_TIME_ TIMESTAMP(6),
LAST_UPDATE_TIME_ TIMESTAMP(6),
VERSION_ INTEGER,
META_INFO_ VARCHAR2(2000),
DEPLOYMENT_ID_ VARCHAR2(64),
EDITOR_SOURCE_VALUE_ID_ VARCHAR2(64),
EDITOR_SOURCE_EXTRA_VALUE_ID_ VARCHAR2(64),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create table ACT_RU_EXECUTION (
ID_ VARCHAR2(64),
REV_ INTEGER,
PROC_INST_ID_ VARCHAR2(64),
BUSINESS_KEY_ VARCHAR2(255),
PARENT_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
SUPER_EXEC_ VARCHAR2(64),
ROOT_PROC_INST_ID_ VARCHAR2(64),
ACT_ID_ VARCHAR2(255),
IS_ACTIVE_ NUMBER(1) CHECK (IS_ACTIVE_ IN (1,0)),
IS_CONCURRENT_ NUMBER(1) CHECK (IS_CONCURRENT_ IN (1,0)),
IS_SCOPE_ NUMBER(1) CHECK (IS_SCOPE_ IN (1,0)),
IS_EVENT_SCOPE_ NUMBER(1) CHECK (IS_EVENT_SCOPE_ IN (1,0)),
IS_MI_ROOT_ NUMBER(1) CHECK (IS_MI_ROOT_ IN (1,0)),
SUSPENSION_STATE_ INTEGER,
CACHED_ENT_STATE_ INTEGER,
TENANT_ID_ VARCHAR2(255) DEFAULT '',
NAME_ VARCHAR2(255),
START_ACT_ID_ VARCHAR2(255),
START_TIME_ TIMESTAMP(6),
START_USER_ID_ VARCHAR2(255),
LOCK_TIME_ TIMESTAMP(6),
LOCK_OWNER_ VARCHAR2(255),
IS_COUNT_ENABLED_ NUMBER(1) CHECK (IS_COUNT_ENABLED_ IN (1,0)),
EVT_SUBSCR_COUNT_ INTEGER,
TASK_COUNT_ INTEGER,
JOB_COUNT_ INTEGER,
TIMER_JOB_COUNT_ INTEGER,
SUSP_JOB_COUNT_ INTEGER,
DEADLETTER_JOB_COUNT_ INTEGER,
EXTERNAL_WORKER_JOB_COUNT_ INTEGER,
VAR_COUNT_ INTEGER,
ID_LINK_COUNT_ INTEGER,
CALLBACK_ID_ VARCHAR2(255),
CALLBACK_TYPE_ VARCHAR2(255),
REFERENCE_ID_ VARCHAR2(255),
REFERENCE_TYPE_ VARCHAR2(255),
PROPAGATED_STAGE_INST_ID_ VARCHAR2(255),
BUSINESS_STATUS_ VARCHAR2(255),
primary key (ID_)
);
create table ACT_RE_PROCDEF (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
CATEGORY_ VARCHAR2(255),
NAME_ VARCHAR2(255),
KEY_ VARCHAR2(255) NOT NULL,
VERSION_ INTEGER NOT NULL,
DEPLOYMENT_ID_ VARCHAR2(64),
RESOURCE_NAME_ VARCHAR2(2000),
DGRM_RESOURCE_NAME_ VARCHAR2(4000),
DESCRIPTION_ VARCHAR2(2000),
HAS_START_FORM_KEY_ NUMBER(1) CHECK (HAS_START_FORM_KEY_ IN (1,0)),
HAS_GRAPHICAL_NOTATION_ NUMBER(1) CHECK (HAS_GRAPHICAL_NOTATION_ IN (1,0)),
SUSPENSION_STATE_ INTEGER,
TENANT_ID_ VARCHAR2(255) DEFAULT '',
DERIVED_FROM_ VARCHAR2(64),
DERIVED_FROM_ROOT_ VARCHAR2(64),
DERIVED_VERSION_ INTEGER DEFAULT 0 NOT NULL,
ENGINE_VERSION_ VARCHAR2(255),
primary key (ID_)
);
create table ACT_EVT_LOG (
LOG_NR_ NUMBER(19),
TYPE_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
EXECUTION_ID_ VARCHAR2(64),
TASK_ID_ VARCHAR2(64),
TIME_STAMP_ TIMESTAMP(6) not null,
USER_ID_ VARCHAR2(255),
DATA_ BLOB,
LOCK_OWNER_ VARCHAR2(255),
LOCK_TIME_ TIMESTAMP(6) null,
IS_PROCESSED_ NUMBER(3) default 0,
primary key (LOG_NR_)
);
create sequence act_evt_log_seq;
create table ACT_PROCDEF_INFO (
ID_ VARCHAR2(64) not null,
PROC_DEF_ID_ VARCHAR2(64) not null,
REV_ integer,
INFO_JSON_ID_ VARCHAR2(64),
primary key (ID_)
);
create table ACT_RU_ACTINST (
ID_ VARCHAR2(64) not null,
REV_ INTEGER default 1,
PROC_DEF_ID_ VARCHAR2(64) not null,
PROC_INST_ID_ VARCHAR2(64) not null,
EXECUTION_ID_ VARCHAR2(64) not null,
ACT_ID_ VARCHAR2(255) not null,
TASK_ID_ VARCHAR2(64),
CALL_PROC_INST_ID_ VARCHAR2(64),
ACT_NAME_ VARCHAR2(255),
ACT_TYPE_ VARCHAR2(255) not null,
ASSIGNEE_ VARCHAR2(255),
START_TIME_ TIMESTAMP(6) not null,
END_TIME_ TIMESTAMP(6),
DURATION_ NUMBER(19,0),
TRANSACTION_ORDER_ INTEGER,
DELETE_REASON_ VARCHAR2(2000),
TENANT_ID_ VARCHAR2(255) default '',
primary key (ID_)
);
create index ACT_IDX_EXEC_BUSKEY on ACT_RU_EXECUTION(BUSINESS_KEY_);
create index ACT_IDX_EXEC_ROOT on ACT_RU_EXECUTION(ROOT_PROC_INST_ID_);
create index ACT_IDX_EXEC_REF_ID_ on ACT_RU_EXECUTION(REFERENCE_ID_);
create index ACT_IDX_VARIABLE_TASK_ID on ACT_RU_VARIABLE(TASK_ID_);
create index ACT_IDX_RU_ACTI_START on ACT_RU_ACTINST(START_TIME_);
create index ACT_IDX_RU_ACTI_END on ACT_RU_ACTINST(END_TIME_);
create index ACT_IDX_RU_ACTI_PROC on ACT_RU_ACTINST(PROC_INST_ID_);
create index ACT_IDX_RU_ACTI_PROC_ACT on ACT_RU_ACTINST(PROC_INST_ID_, ACT_ID_);
create index ACT_IDX_RU_ACTI_EXEC on ACT_RU_ACTINST(EXECUTION_ID_);
create index ACT_IDX_RU_ACTI_EXEC_ACT on ACT_RU_ACTINST(EXECUTION_ID_, ACT_ID_);
create index ACT_IDX_RU_ACTI_TASK on ACT_RU_ACTINST(TASK_ID_);
create index ACT_IDX_BYTEAR_DEPL on ACT_GE_BYTEARRAY(DEPLOYMENT_ID_);
alter table ACT_GE_BYTEARRAY
add constraint ACT_FK_BYTEARR_DEPL
foreign key (DEPLOYMENT_ID_)
references ACT_RE_DEPLOYMENT (ID_);
alter table ACT_RE_PROCDEF
add constraint ACT_UNIQ_PROCDEF
unique (KEY_,VERSION_, DERIVED_VERSION_, TENANT_ID_);
create index ACT_IDX_EXE_PROCINST on ACT_RU_EXECUTION(PROC_INST_ID_);
alter table ACT_RU_EXECUTION
add constraint ACT_FK_EXE_PROCINST
foreign key (PROC_INST_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_EXE_PARENT on ACT_RU_EXECUTION(PARENT_ID_);
alter table ACT_RU_EXECUTION
add constraint ACT_FK_EXE_PARENT
foreign key (PARENT_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_EXE_SUPER on ACT_RU_EXECUTION(SUPER_EXEC_);
alter table ACT_RU_EXECUTION
add constraint ACT_FK_EXE_SUPER
foreign key (SUPER_EXEC_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_EXE_PROCDEF on ACT_RU_EXECUTION(PROC_DEF_ID_);
alter table ACT_RU_EXECUTION
add constraint ACT_FK_EXE_PROCDEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
create index ACT_IDX_TSKASS_TASK on ACT_RU_IDENTITYLINK(TASK_ID_);
alter table ACT_RU_IDENTITYLINK
add constraint ACT_FK_TSKASS_TASK
foreign key (TASK_ID_)
references ACT_RU_TASK (ID_);
create index ACT_IDX_ATHRZ_PROCEDEF on ACT_RU_IDENTITYLINK(PROC_DEF_ID_);
alter table ACT_RU_IDENTITYLINK
add constraint ACT_FK_ATHRZ_PROCEDEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
create index ACT_IDX_IDL_PROCINST on ACT_RU_IDENTITYLINK(PROC_INST_ID_);
alter table ACT_RU_IDENTITYLINK
add constraint ACT_FK_IDL_PROCINST
foreign key (PROC_INST_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_TASK_EXEC on ACT_RU_TASK(EXECUTION_ID_);
alter table ACT_RU_TASK
add constraint ACT_FK_TASK_EXE
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_TASK_PROCINST on ACT_RU_TASK(PROC_INST_ID_);
alter table ACT_RU_TASK
add constraint ACT_FK_TASK_PROCINST
foreign key (PROC_INST_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_TASK_PROCDEF on ACT_RU_TASK(PROC_DEF_ID_);
alter table ACT_RU_TASK
add constraint ACT_FK_TASK_PROCDEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
create index ACT_IDX_VAR_EXE on ACT_RU_VARIABLE(EXECUTION_ID_);
alter table ACT_RU_VARIABLE
add constraint ACT_FK_VAR_EXE
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_VAR_PROCINST on ACT_RU_VARIABLE(PROC_INST_ID_);
alter table ACT_RU_VARIABLE
add constraint ACT_FK_VAR_PROCINST
foreign key (PROC_INST_ID_)
references ACT_RU_EXECUTION(ID_);
create index ACT_IDX_JOB_EXECUTION_ID on ACT_RU_JOB(EXECUTION_ID_);
alter table ACT_RU_JOB
add constraint ACT_FK_JOB_EXECUTION
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_JOB_PROC_INST_ID on ACT_RU_JOB(PROCESS_INSTANCE_ID_);
alter table ACT_RU_JOB
add constraint ACT_FK_JOB_PROCESS_INSTANCE
foreign key (PROCESS_INSTANCE_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_JOB_PROC_DEF_ID on ACT_RU_JOB(PROC_DEF_ID_);
alter table ACT_RU_JOB
add constraint ACT_FK_JOB_PROC_DEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
create index ACT_IDX_TJOB_EXECUTION_ID on ACT_RU_TIMER_JOB(EXECUTION_ID_);
alter table ACT_RU_TIMER_JOB
add constraint ACT_FK_TJOB_EXECUTION
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_TJOB_PROC_INST_ID on ACT_RU_TIMER_JOB(PROCESS_INSTANCE_ID_);
alter table ACT_RU_TIMER_JOB
add constraint ACT_FK_TJOB_PROCESS_INSTANCE
foreign key (PROCESS_INSTANCE_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_TJOB_PROC_DEF_ID on ACT_RU_TIMER_JOB(PROC_DEF_ID_);
alter table ACT_RU_TIMER_JOB
add constraint ACT_FK_TJOB_PROC_DEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
create index ACT_IDX_SJOB_EXECUTION_ID on ACT_RU_SUSPENDED_JOB(EXECUTION_ID_);
alter table ACT_RU_SUSPENDED_JOB
add constraint ACT_FK_SJOB_EXECUTION
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_SJOB_PROC_INST_ID on ACT_RU_SUSPENDED_JOB(PROCESS_INSTANCE_ID_);
alter table ACT_RU_SUSPENDED_JOB
add constraint ACT_FK_SJOB_PROCESS_INSTANCE
foreign key (PROCESS_INSTANCE_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_SJOB_PROC_DEF_ID on ACT_RU_SUSPENDED_JOB(PROC_DEF_ID_);
alter table ACT_RU_SUSPENDED_JOB
add constraint ACT_FK_SJOB_PROC_DEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
create index ACT_IDX_DJOB_EXECUTION_ID on ACT_RU_DEADLETTER_JOB(EXECUTION_ID_);
alter table ACT_RU_DEADLETTER_JOB
add constraint ACT_FK_DJOB_EXECUTION
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_DJOB_PROC_INST_ID on ACT_RU_DEADLETTER_JOB(PROCESS_INSTANCE_ID_);
alter table ACT_RU_DEADLETTER_JOB
add constraint ACT_FK_DJOB_PROCESS_INSTANCE
foreign key (PROCESS_INSTANCE_ID_)
references ACT_RU_EXECUTION (ID_);
create index ACT_IDX_DJOB_PROC_DEF_ID on ACT_RU_DEADLETTER_JOB(PROC_DEF_ID_);
alter table ACT_RU_DEADLETTER_JOB
add constraint ACT_FK_DJOB_PROC_DEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
alter table ACT_RU_EVENT_SUBSCR
add constraint ACT_FK_EVENT_EXEC
foreign key (EXECUTION_ID_)
references ACT_RU_EXECUTION(ID_);
create index ACT_IDX_MODEL_SOURCE on ACT_RE_MODEL(EDITOR_SOURCE_VALUE_ID_);
alter table ACT_RE_MODEL
add constraint ACT_FK_MODEL_SOURCE
foreign key (EDITOR_SOURCE_VALUE_ID_)
references ACT_GE_BYTEARRAY (ID_);
create index ACT_IDX_MODEL_SOURCE_EXTRA on ACT_RE_MODEL(EDITOR_SOURCE_EXTRA_VALUE_ID_);
alter table ACT_RE_MODEL
add constraint ACT_FK_MODEL_SOURCE_EXTRA
foreign key (EDITOR_SOURCE_EXTRA_VALUE_ID_)
references ACT_GE_BYTEARRAY (ID_);
create index ACT_IDX_MODEL_DEPLOYMENT on ACT_RE_MODEL(DEPLOYMENT_ID_);
alter table ACT_RE_MODEL
add constraint ACT_FK_MODEL_DEPLOYMENT
foreign key (DEPLOYMENT_ID_)
references ACT_RE_DEPLOYMENT (ID_);
create index ACT_IDX_PROCDEF_INFO_JSON on ACT_PROCDEF_INFO(INFO_JSON_ID_);
alter table ACT_PROCDEF_INFO
add constraint ACT_FK_INFO_JSON_BA
foreign key (INFO_JSON_ID_)
references ACT_GE_BYTEARRAY (ID_);
create index ACT_IDX_PROCDEF_INFO_PROC on ACT_PROCDEF_INFO(PROC_DEF_ID_);
alter table ACT_PROCDEF_INFO
add constraint ACT_FK_INFO_PROCDEF
foreign key (PROC_DEF_ID_)
references ACT_RE_PROCDEF (ID_);
alter table ACT_PROCDEF_INFO
add constraint ACT_UNIQ_INFO_PROCDEF
unique (PROC_DEF_ID_);
insert into ACT_GE_PROPERTY
values ('schema.version', '7.0.1.1', 1);
insert into ACT_GE_PROPERTY
values ('schema.history', 'create(7.0.1.1)', 1);

View File

@@ -0,0 +1,114 @@
create table ACT_HI_PROCINST (
ID_ VARCHAR2(64) not null,
REV_ INTEGER default 1,
PROC_INST_ID_ VARCHAR2(64) not null,
BUSINESS_KEY_ VARCHAR2(255),
PROC_DEF_ID_ VARCHAR2(64) not null,
START_TIME_ TIMESTAMP(6) not null,
END_TIME_ TIMESTAMP(6),
DURATION_ NUMBER(19,0),
START_USER_ID_ VARCHAR2(255),
START_ACT_ID_ VARCHAR2(255),
END_ACT_ID_ VARCHAR2(255),
SUPER_PROCESS_INSTANCE_ID_ VARCHAR2(64),
DELETE_REASON_ VARCHAR2(2000),
TENANT_ID_ VARCHAR2(255) default '',
NAME_ VARCHAR2(255),
CALLBACK_ID_ VARCHAR2(255),
CALLBACK_TYPE_ VARCHAR2(255),
REFERENCE_ID_ VARCHAR2(255),
REFERENCE_TYPE_ VARCHAR2(255),
PROPAGATED_STAGE_INST_ID_ VARCHAR2(255),
BUSINESS_STATUS_ VARCHAR2(255),
primary key (ID_),
unique (PROC_INST_ID_)
);
create table ACT_HI_ACTINST (
ID_ VARCHAR2(64) not null,
REV_ INTEGER default 1,
PROC_DEF_ID_ VARCHAR2(64) not null,
PROC_INST_ID_ VARCHAR2(64) not null,
EXECUTION_ID_ VARCHAR2(64) not null,
ACT_ID_ VARCHAR2(255) not null,
TASK_ID_ VARCHAR2(64),
CALL_PROC_INST_ID_ VARCHAR2(64),
ACT_NAME_ VARCHAR2(255),
ACT_TYPE_ VARCHAR2(255) not null,
ASSIGNEE_ VARCHAR2(255),
START_TIME_ TIMESTAMP(6) not null,
END_TIME_ TIMESTAMP(6),
TRANSACTION_ORDER_ INTEGER,
DURATION_ NUMBER(19,0),
DELETE_REASON_ VARCHAR2(2000),
TENANT_ID_ VARCHAR2(255) default '',
primary key (ID_)
);
create table ACT_HI_DETAIL (
ID_ VARCHAR2(64) not null,
TYPE_ VARCHAR2(255) not null,
PROC_INST_ID_ VARCHAR2(64),
EXECUTION_ID_ VARCHAR2(64),
TASK_ID_ VARCHAR2(64),
ACT_INST_ID_ VARCHAR2(64),
NAME_ VARCHAR2(255) not null,
VAR_TYPE_ VARCHAR2(64),
REV_ INTEGER,
TIME_ TIMESTAMP(6) not null,
BYTEARRAY_ID_ VARCHAR2(64),
DOUBLE_ NUMBER(38,10),
LONG_ NUMBER(19,0),
TEXT_ VARCHAR2(2000),
TEXT2_ VARCHAR2(2000),
primary key (ID_)
);
create table ACT_HI_COMMENT (
ID_ VARCHAR2(64) not null,
TYPE_ VARCHAR2(255),
TIME_ TIMESTAMP(6) not null,
USER_ID_ VARCHAR2(255),
TASK_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
ACTION_ VARCHAR2(255),
MESSAGE_ VARCHAR2(2000),
FULL_MSG_ BLOB,
primary key (ID_)
);
create table ACT_HI_ATTACHMENT (
ID_ VARCHAR2(64) not null,
REV_ INTEGER,
USER_ID_ VARCHAR2(255),
NAME_ VARCHAR2(255),
DESCRIPTION_ VARCHAR2(2000),
TYPE_ VARCHAR2(255),
TASK_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
URL_ VARCHAR2(2000),
CONTENT_ID_ VARCHAR2(64),
TIME_ TIMESTAMP(6),
primary key (ID_)
);
create index ACT_IDX_HI_PRO_INST_END on ACT_HI_PROCINST(END_TIME_);
create index ACT_IDX_HI_PRO_I_BUSKEY on ACT_HI_PROCINST(BUSINESS_KEY_);
create index ACT_IDX_HI_PRO_SUPER_PROCINST on ACT_HI_PROCINST(SUPER_PROCESS_INSTANCE_ID_);
create index ACT_IDX_HI_ACT_INST_START on ACT_HI_ACTINST(START_TIME_);
create index ACT_IDX_HI_ACT_INST_END on ACT_HI_ACTINST(END_TIME_);
create index ACT_IDX_HI_DETAIL_PROC_INST on ACT_HI_DETAIL(PROC_INST_ID_);
create index ACT_IDX_HI_DETAIL_ACT_INST on ACT_HI_DETAIL(ACT_INST_ID_);
create index ACT_IDX_HI_DETAIL_TIME on ACT_HI_DETAIL(TIME_);
create index ACT_IDX_HI_DETAIL_NAME on ACT_HI_DETAIL(NAME_);
create index ACT_IDX_HI_DETAIL_TASK_ID on ACT_HI_DETAIL(TASK_ID_);
create index ACT_IDX_HI_PROCVAR_PROC_INST on ACT_HI_VARINST(PROC_INST_ID_);
create index ACT_IDX_HI_PROCVAR_TASK_ID on ACT_HI_VARINST(TASK_ID_);
create index ACT_IDX_HI_PROCVAR_EXE on ACT_HI_VARINST(EXECUTION_ID_);
create index ACT_IDX_HI_IDENT_LNK_TASK on ACT_HI_IDENTITYLINK(TASK_ID_);
create index ACT_IDX_HI_IDENT_LNK_PROCINST on ACT_HI_IDENTITYLINK(PROC_INST_ID_);
create index ACT_IDX_HI_ACT_INST_PROCINST on ACT_HI_ACTINST(PROC_INST_ID_, ACT_ID_);
create index ACT_IDX_HI_ACT_INST_EXEC on ACT_HI_ACTINST(EXECUTION_ID_, ACT_ID_);
create index ACT_IDX_HI_TASK_INST_PROCINST on ACT_HI_TASKINST(PROC_INST_ID_);

View File

@@ -0,0 +1,148 @@
drop index ACT_IDX_BYTEAR_DEPL;
drop index ACT_IDX_EXE_PROCINST;
drop index ACT_IDX_EXE_PARENT;
drop index ACT_IDX_EXE_SUPER;
drop index ACT_IDX_TSKASS_TASK;
drop index ACT_IDX_TASK_EXEC;
drop index ACT_IDX_TASK_PROCINST;
drop index ACT_IDX_TASK_PROCDEF;
drop index ACT_IDX_VAR_EXE;
drop index ACT_IDX_VAR_PROCINST;
drop index ACT_IDX_JOB_EXECUTION_ID;
drop index ACT_IDX_JOB_PROC_INST_ID;
drop index ACT_IDX_JOB_PROC_DEF_ID;
drop index ACT_IDX_TJOB_EXECUTION_ID;
drop index ACT_IDX_TJOB_PROC_INST_ID;
drop index ACT_IDX_TJOB_PROC_DEF_ID;
drop index ACT_IDX_SJOB_EXECUTION_ID;
drop index ACT_IDX_SJOB_PROC_INST_ID;
drop index ACT_IDX_SJOB_PROC_DEF_ID;
drop index ACT_IDX_DJOB_EXECUTION_ID;
drop index ACT_IDX_DJOB_PROC_INST_ID;
drop index ACT_IDX_DJOB_PROC_DEF_ID;
drop index ACT_IDX_MODEL_SOURCE;
drop index ACT_IDX_MODEL_SOURCE_EXTRA;
drop index ACT_IDX_MODEL_DEPLOYMENT;
drop index ACT_IDX_PROCDEF_INFO_JSON;
drop index ACT_IDX_EXEC_BUSKEY;
drop index ACT_IDX_VARIABLE_TASK_ID;
drop index ACT_IDX_RU_ACTI_START;
drop index ACT_IDX_RU_ACTI_END;
drop index ACT_IDX_RU_ACTI_PROC;
drop index ACT_IDX_RU_ACTI_PROC_ACT;
drop index ACT_IDX_RU_ACTI_EXEC;
drop index ACT_IDX_RU_ACTI_EXEC_ACT;
alter table ACT_GE_BYTEARRAY
drop CONSTRAINT ACT_FK_BYTEARR_DEPL;
alter table ACT_RU_EXECUTION
drop CONSTRAINT ACT_FK_EXE_PROCINST;
alter table ACT_RU_EXECUTION
drop CONSTRAINT ACT_FK_EXE_PARENT;
alter table ACT_RU_EXECUTION
drop CONSTRAINT ACT_FK_EXE_SUPER;
alter table ACT_RU_EXECUTION
drop CONSTRAINT ACT_FK_EXE_PROCDEF;
alter table ACT_RU_IDENTITYLINK
drop CONSTRAINT ACT_FK_TSKASS_TASK;
alter table ACT_RU_IDENTITYLINK
drop CONSTRAINT ACT_FK_IDL_PROCINST;
alter table ACT_RU_IDENTITYLINK
drop CONSTRAINT ACT_FK_ATHRZ_PROCEDEF;
alter table ACT_RU_TASK
drop CONSTRAINT ACT_FK_TASK_EXE;
alter table ACT_RU_TASK
drop CONSTRAINT ACT_FK_TASK_PROCINST;
alter table ACT_RU_TASK
drop CONSTRAINT ACT_FK_TASK_PROCDEF;
alter table ACT_RU_VARIABLE
drop CONSTRAINT ACT_FK_VAR_EXE;
alter table ACT_RU_VARIABLE
drop CONSTRAINT ACT_FK_VAR_PROCINST;
alter table ACT_RU_JOB
drop CONSTRAINT ACT_FK_JOB_EXECUTION;
alter table ACT_RU_JOB
drop CONSTRAINT ACT_FK_JOB_PROCESS_INSTANCE;
alter table ACT_RU_JOB
drop CONSTRAINT ACT_FK_JOB_PROC_DEF;
alter table ACT_RU_TIMER_JOB
drop CONSTRAINT ACT_FK_TJOB_EXECUTION;
alter table ACT_RU_TIMER_JOB
drop CONSTRAINT ACT_FK_TJOB_PROCESS_INSTANCE;
alter table ACT_RU_TIMER_JOB
drop CONSTRAINT ACT_FK_TJOB_PROC_DEF;
alter table ACT_RU_SUSPENDED_JOB
drop CONSTRAINT ACT_FK_SJOB_EXECUTION;
alter table ACT_RU_SUSPENDED_JOB
drop CONSTRAINT ACT_FK_SJOB_PROCESS_INSTANCE;
alter table ACT_RU_SUSPENDED_JOB
drop CONSTRAINT ACT_FK_SJOB_PROC_DEF;
alter table ACT_RU_DEADLETTER_JOB
drop CONSTRAINT ACT_FK_DJOB_EXECUTION;
alter table ACT_RU_DEADLETTER_JOB
drop CONSTRAINT ACT_FK_DJOB_PROCESS_INSTANCE;
alter table ACT_RU_DEADLETTER_JOB
drop CONSTRAINT ACT_FK_DJOB_PROC_DEF;
alter table ACT_RU_EVENT_SUBSCR
drop CONSTRAINT ACT_FK_EVENT_EXEC;
alter table ACT_RE_PROCDEF
drop CONSTRAINT ACT_UNIQ_PROCDEF;
alter table ACT_RE_MODEL
drop CONSTRAINT ACT_FK_MODEL_SOURCE;
alter table ACT_RE_MODEL
drop CONSTRAINT ACT_FK_MODEL_SOURCE_EXTRA;
alter table ACT_RE_MODEL
drop CONSTRAINT ACT_FK_MODEL_DEPLOYMENT;
alter table ACT_PROCDEF_INFO
drop CONSTRAINT ACT_UNIQ_INFO_PROCDEF;
alter table ACT_PROCDEF_INFO
drop CONSTRAINT ACT_FK_INFO_JSON_BA;
alter table ACT_PROCDEF_INFO
drop CONSTRAINT ACT_FK_INFO_PROCDEF;
drop index ACT_IDX_ATHRZ_PROCEDEF;
drop index ACT_IDX_PROCDEF_INFO_PROC;
drop table ACT_RU_ACTINST;
drop table ACT_RE_DEPLOYMENT;
drop table ACT_RE_MODEL;
drop table ACT_RE_PROCDEF;
drop table ACT_RU_EXECUTION;
drop sequence act_evt_log_seq;
drop table ACT_EVT_LOG;
drop table ACT_PROCDEF_INFO;

View File

@@ -0,0 +1,23 @@
drop index ACT_IDX_HI_PRO_INST_END;
drop index ACT_IDX_HI_PRO_I_BUSKEY;
drop index ACT_IDX_HI_ACT_INST_START;
drop index ACT_IDX_HI_ACT_INST_END;
drop index ACT_IDX_HI_DETAIL_PROC_INST;
drop index ACT_IDX_HI_DETAIL_ACT_INST;
drop index ACT_IDX_HI_DETAIL_TIME;
drop index ACT_IDX_HI_DETAIL_NAME;
drop index ACT_IDX_HI_DETAIL_TASK_ID;
drop index ACT_IDX_HI_PROCVAR_PROC_INST;
drop index ACT_IDX_HI_PROCVAR_TASK_ID;
drop index ACT_IDX_HI_PROCVAR_EXE;
drop index ACT_IDX_HI_ACT_INST_PROCINST;
drop index ACT_IDX_HI_IDENT_LNK_TASK;
drop index ACT_IDX_HI_IDENT_LNK_PROCINST;
drop index ACT_IDX_HI_TASK_INST_PROCINST;
drop table ACT_HI_PROCINST;
drop table ACT_HI_ACTINST;
drop table ACT_HI_DETAIL;
drop table ACT_HI_COMMENT;
drop table ACT_HI_ATTACHMENT;

View File

@@ -0,0 +1,23 @@
create table ACT_HI_ENTITYLINK (
ID_ VARCHAR2(64),
LINK_TYPE_ VARCHAR2(255),
CREATE_TIME_ TIMESTAMP(6),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
PARENT_ELEMENT_ID_ VARCHAR2(255),
REF_SCOPE_ID_ VARCHAR2(255),
REF_SCOPE_TYPE_ VARCHAR2(255),
REF_SCOPE_DEFINITION_ID_ VARCHAR2(255),
ROOT_SCOPE_ID_ VARCHAR2(255),
ROOT_SCOPE_TYPE_ VARCHAR2(255),
HIERARCHY_TYPE_ VARCHAR2(255),
primary key (ID_)
);
create index ACT_IDX_HI_ENT_LNK_SCOPE on ACT_HI_ENTITYLINK(SCOPE_ID_, SCOPE_TYPE_, LINK_TYPE_);
create index ACT_IDX_HI_ENT_LNK_REF_SCOPE on ACT_HI_ENTITYLINK(REF_SCOPE_ID_, REF_SCOPE_TYPE_, LINK_TYPE_);
create index ACT_IDX_HI_ENT_LNK_ROOT_SCOPE on ACT_HI_ENTITYLINK(ROOT_SCOPE_ID_, ROOT_SCOPE_TYPE_, LINK_TYPE_);
create index ACT_IDX_HI_ENT_LNK_SCOPE_DEF on ACT_HI_ENTITYLINK(SCOPE_DEFINITION_ID_, SCOPE_TYPE_, LINK_TYPE_);

View File

@@ -0,0 +1,26 @@
create table ACT_RU_ENTITYLINK (
ID_ VARCHAR2(64),
REV_ INTEGER,
CREATE_TIME_ TIMESTAMP(6),
LINK_TYPE_ VARCHAR2(255),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
PARENT_ELEMENT_ID_ VARCHAR2(255),
REF_SCOPE_ID_ VARCHAR2(255),
REF_SCOPE_TYPE_ VARCHAR2(255),
REF_SCOPE_DEFINITION_ID_ VARCHAR2(255),
ROOT_SCOPE_ID_ VARCHAR2(255),
ROOT_SCOPE_TYPE_ VARCHAR2(255),
HIERARCHY_TYPE_ VARCHAR2(255),
primary key (ID_)
);
create index ACT_IDX_ENT_LNK_SCOPE on ACT_RU_ENTITYLINK(SCOPE_ID_, SCOPE_TYPE_, LINK_TYPE_);
create index ACT_IDX_ENT_LNK_REF_SCOPE on ACT_RU_ENTITYLINK(REF_SCOPE_ID_, REF_SCOPE_TYPE_, LINK_TYPE_);
create index ACT_IDX_ENT_LNK_ROOT_SCOPE on ACT_RU_ENTITYLINK(ROOT_SCOPE_ID_, ROOT_SCOPE_TYPE_, LINK_TYPE_);
create index ACT_IDX_ENT_LNK_SCOPE_DEF on ACT_RU_ENTITYLINK(SCOPE_DEFINITION_ID_, SCOPE_TYPE_, LINK_TYPE_);
insert into ACT_GE_PROPERTY values ('entitylink.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,4 @@
drop index ACT_IDX_HI_ENT_LNK_SCOPE;
drop index ACT_IDX_HI_ENT_LNK_SCOPE_DEF;
drop table ACT_HI_ENTITYLINK;

View File

@@ -0,0 +1,4 @@
drop index ACT_IDX_ENT_LNK_SCOPE;
drop index ACT_IDX_ENT_LNK_SCOPE_DEF;
drop table ACT_RU_ENTITYLINK;

View File

@@ -0,0 +1,28 @@
create table ACT_RU_EVENT_SUBSCR (
ID_ VARCHAR2(64) not null,
REV_ integer,
EVENT_TYPE_ VARCHAR2(255) not null,
EVENT_NAME_ VARCHAR2(255),
EXECUTION_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
ACTIVITY_ID_ VARCHAR2(64),
CONFIGURATION_ VARCHAR2(255),
CREATED_ TIMESTAMP(6) not null,
PROC_DEF_ID_ VARCHAR2(64),
SUB_SCOPE_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(64),
SCOPE_DEFINITION_ID_ VARCHAR2(64),
SCOPE_DEFINITION_KEY_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(64),
LOCK_TIME_ TIMESTAMP(6),
LOCK_OWNER_ VARCHAR2(255),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create index ACT_IDX_EVENT_SUBSCR_CONFIG_ on ACT_RU_EVENT_SUBSCR(CONFIGURATION_);
create index ACT_IDX_EVENT_SUBSCR on ACT_RU_EVENT_SUBSCR(EXECUTION_ID_);
create index ACT_IDX_EVENT_SUBSCR_SCOPEREF_ on ACT_RU_EVENT_SUBSCR(SCOPE_ID_, SCOPE_TYPE_);
insert into ACT_GE_PROPERTY values ('eventsubscription.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,5 @@
drop index ACT_IDX_EVENT_SUBSCR_CONFIG_;
drop index ACT_IDX_EVENT_SUBSCR;
drop index ACT_IDX_EVENT_SUBSCR_SCOPEREF_;
drop table ACT_RU_EVENT_SUBSCR;

View File

@@ -0,0 +1,20 @@
create table ACT_HI_IDENTITYLINK (
ID_ VARCHAR2(64),
GROUP_ID_ VARCHAR2(255),
TYPE_ VARCHAR2(255),
USER_ID_ VARCHAR2(255),
TASK_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
PROC_INST_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
primary key (ID_)
);
create index ACT_IDX_HI_IDENT_LNK_USER on ACT_HI_IDENTITYLINK(USER_ID_);
create index ACT_IDX_HI_IDENT_LNK_SCOPE on ACT_HI_IDENTITYLINK(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_HI_IDENT_LNK_SUB_SCOPE on ACT_HI_IDENTITYLINK(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_HI_IDENT_LNK_SCOPE_DEF on ACT_HI_IDENTITYLINK(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);

View File

@@ -0,0 +1,24 @@
create table ACT_RU_IDENTITYLINK (
ID_ VARCHAR2(64),
REV_ INTEGER,
GROUP_ID_ VARCHAR2(255),
TYPE_ VARCHAR2(255),
USER_ID_ VARCHAR2(255),
TASK_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
primary key (ID_)
);
create index ACT_IDX_IDENT_LNK_USER on ACT_RU_IDENTITYLINK(USER_ID_);
create index ACT_IDX_IDENT_LNK_GROUP on ACT_RU_IDENTITYLINK(GROUP_ID_);
create index ACT_IDX_IDENT_LNK_SCOPE on ACT_RU_IDENTITYLINK(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_IDENT_LNK_SUB_SCOPE on ACT_RU_IDENTITYLINK(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_IDENT_LNK_SCOPE_DEF on ACT_RU_IDENTITYLINK(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
insert into ACT_GE_PROPERTY values ('identitylink.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,6 @@
drop index ACT_IDX_HI_IDENT_LNK_USER;
drop index ACT_IDX_HI_IDENT_LNK_SCOPE;
drop index ACT_IDX_HI_IDENT_LNK_SUB_SCOPE;
drop index ACT_IDX_HI_IDENT_LNK_SCOPE_DEF;
drop table ACT_HI_IDENTITYLINK;

View File

@@ -0,0 +1,7 @@
drop index ACT_IDX_IDENT_LNK_USER;
drop index ACT_IDX_IDENT_LNK_GROUP;
drop index ACT_IDX_IDENT_LNK_SCOPE;
drop index ACT_IDX_IDENT_LNK_SUB_SCOPE;
drop index ACT_IDX_IDENT_LNK_SCOPE_DEF;
drop table ACT_RU_IDENTITYLINK;

View File

@@ -0,0 +1,108 @@
create table ACT_ID_PROPERTY (
NAME_ VARCHAR2(64),
VALUE_ VARCHAR2(300),
REV_ INTEGER,
primary key (NAME_)
);
insert into ACT_ID_PROPERTY
values ('schema.version', '7.0.1.1', 1);
create table ACT_ID_BYTEARRAY (
ID_ VARCHAR2(64),
REV_ INTEGER,
NAME_ VARCHAR2(255),
BYTES_ BLOB,
primary key (ID_)
);
create table ACT_ID_GROUP (
ID_ VARCHAR2(64),
REV_ INTEGER,
NAME_ VARCHAR2(255),
TYPE_ VARCHAR2(255),
primary key (ID_)
);
create table ACT_ID_MEMBERSHIP (
USER_ID_ VARCHAR2(64),
GROUP_ID_ VARCHAR2(64),
primary key (USER_ID_, GROUP_ID_)
);
create table ACT_ID_USER (
ID_ VARCHAR2(64),
REV_ INTEGER,
FIRST_ VARCHAR2(255),
LAST_ VARCHAR2(255),
DISPLAY_NAME_ VARCHAR2(255),
EMAIL_ VARCHAR2(255),
PWD_ VARCHAR2(255),
PICTURE_ID_ VARCHAR2(64),
TENANT_ID_ VARCHAR2(255) default '',
primary key (ID_)
);
create table ACT_ID_INFO (
ID_ VARCHAR2(64),
REV_ INTEGER,
USER_ID_ VARCHAR2(64),
TYPE_ VARCHAR2(64),
KEY_ VARCHAR2(255),
VALUE_ VARCHAR2(255),
PASSWORD_ BLOB,
PARENT_ID_ VARCHAR2(255),
primary key (ID_)
);
create table ACT_ID_TOKEN (
ID_ VARCHAR2(64) not null,
REV_ INTEGER,
TOKEN_VALUE_ VARCHAR2(255),
TOKEN_DATE_ TIMESTAMP(6),
IP_ADDRESS_ VARCHAR2(255),
USER_AGENT_ VARCHAR2(255),
USER_ID_ VARCHAR2(255),
TOKEN_DATA_ VARCHAR2(2000),
primary key (ID_)
);
create table ACT_ID_PRIV (
ID_ VARCHAR2(64) not null,
NAME_ VARCHAR2(255) not null,
primary key (ID_)
);
create table ACT_ID_PRIV_MAPPING (
ID_ VARCHAR2(64) not null,
PRIV_ID_ VARCHAR2(64) not null,
USER_ID_ VARCHAR2(255),
GROUP_ID_ VARCHAR2(255),
primary key (ID_)
);
create index ACT_IDX_MEMB_GROUP on ACT_ID_MEMBERSHIP(GROUP_ID_);
alter table ACT_ID_MEMBERSHIP
add constraint ACT_FK_MEMB_GROUP
foreign key (GROUP_ID_)
references ACT_ID_GROUP (ID_);
create index ACT_IDX_MEMB_USER on ACT_ID_MEMBERSHIP(USER_ID_);
alter table ACT_ID_MEMBERSHIP
add constraint ACT_FK_MEMB_USER
foreign key (USER_ID_)
references ACT_ID_USER (ID_);
create index ACT_IDX_PRIV_MAPPING on ACT_ID_PRIV_MAPPING(PRIV_ID_);
alter table ACT_ID_PRIV_MAPPING
add constraint ACT_FK_PRIV_MAPPING
foreign key (PRIV_ID_)
references ACT_ID_PRIV (ID_);
create index ACT_IDX_PRIV_USER on ACT_ID_PRIV_MAPPING(USER_ID_);
create index ACT_IDX_PRIV_GROUP on ACT_ID_PRIV_MAPPING(GROUP_ID_);
alter table ACT_ID_PRIV
add constraint ACT_UNIQ_PRIV_NAME
unique (NAME_);

View File

@@ -0,0 +1,22 @@
alter table ACT_ID_MEMBERSHIP
drop CONSTRAINT ACT_FK_MEMB_GROUP;
alter table ACT_ID_MEMBERSHIP
drop CONSTRAINT ACT_FK_MEMB_USER;
alter table ACT_ID_PRIV_MAPPING
drop CONSTRAINT ACT_FK_PRIV_MAPPING;
drop index ACT_IDX_MEMB_GROUP;
drop index ACT_IDX_MEMB_USER;
drop index ACT_IDX_PRIV_MAPPING;
drop table ACT_ID_PROPERTY;
drop table ACT_ID_BYTEARRAY;
drop table ACT_ID_INFO;
drop table ACT_ID_MEMBERSHIP;
drop table ACT_ID_GROUP;
drop table ACT_ID_USER;
drop table ACT_ID_TOKEN;
drop table ACT_ID_PRIV;
drop table ACT_ID_PRIV_MAPPING;

View File

@@ -0,0 +1,261 @@
create table ACT_RU_JOB (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
CATEGORY_ VARCHAR2(255),
TYPE_ VARCHAR2(255) NOT NULL,
LOCK_EXP_TIME_ TIMESTAMP(6),
LOCK_OWNER_ VARCHAR2(255),
EXCLUSIVE_ NUMBER(1) CHECK (EXCLUSIVE_ IN (1,0)),
EXECUTION_ID_ VARCHAR2(64),
PROCESS_INSTANCE_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
ELEMENT_ID_ VARCHAR2(255),
ELEMENT_NAME_ VARCHAR2(255),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
CORRELATION_ID_ VARCHAR2(255),
RETRIES_ INTEGER,
EXCEPTION_STACK_ID_ VARCHAR2(64),
EXCEPTION_MSG_ VARCHAR2(2000),
DUEDATE_ TIMESTAMP(6),
REPEAT_ VARCHAR2(255),
HANDLER_TYPE_ VARCHAR2(255),
HANDLER_CFG_ VARCHAR2(2000),
CUSTOM_VALUES_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create table ACT_RU_TIMER_JOB (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
CATEGORY_ VARCHAR2(255),
TYPE_ VARCHAR2(255) NOT NULL,
LOCK_EXP_TIME_ TIMESTAMP(6),
LOCK_OWNER_ VARCHAR2(255),
EXCLUSIVE_ NUMBER(1) CHECK (EXCLUSIVE_ IN (1,0)),
EXECUTION_ID_ VARCHAR2(64),
PROCESS_INSTANCE_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
ELEMENT_ID_ VARCHAR2(255),
ELEMENT_NAME_ VARCHAR2(255),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
CORRELATION_ID_ VARCHAR2(255),
RETRIES_ INTEGER,
EXCEPTION_STACK_ID_ VARCHAR2(64),
EXCEPTION_MSG_ VARCHAR2(2000),
DUEDATE_ TIMESTAMP(6),
REPEAT_ VARCHAR2(255),
HANDLER_TYPE_ VARCHAR2(255),
HANDLER_CFG_ VARCHAR2(2000),
CUSTOM_VALUES_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create table ACT_RU_SUSPENDED_JOB (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
CATEGORY_ VARCHAR2(255),
TYPE_ VARCHAR2(255) NOT NULL,
EXCLUSIVE_ NUMBER(1) CHECK (EXCLUSIVE_ IN (1,0)),
EXECUTION_ID_ VARCHAR2(64),
PROCESS_INSTANCE_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
ELEMENT_ID_ VARCHAR2(255),
ELEMENT_NAME_ VARCHAR2(255),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
CORRELATION_ID_ VARCHAR2(255),
RETRIES_ INTEGER,
EXCEPTION_STACK_ID_ VARCHAR2(64),
EXCEPTION_MSG_ VARCHAR2(2000),
DUEDATE_ TIMESTAMP(6),
REPEAT_ VARCHAR2(255),
HANDLER_TYPE_ VARCHAR2(255),
HANDLER_CFG_ VARCHAR2(2000),
CUSTOM_VALUES_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create table ACT_RU_DEADLETTER_JOB (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
CATEGORY_ VARCHAR2(255),
TYPE_ VARCHAR2(255) NOT NULL,
EXCLUSIVE_ NUMBER(1) CHECK (EXCLUSIVE_ IN (1,0)),
EXECUTION_ID_ VARCHAR2(64),
PROCESS_INSTANCE_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
ELEMENT_ID_ VARCHAR2(255),
ELEMENT_NAME_ VARCHAR2(255),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
CORRELATION_ID_ VARCHAR2(255),
EXCEPTION_STACK_ID_ VARCHAR2(64),
EXCEPTION_MSG_ VARCHAR2(2000),
DUEDATE_ TIMESTAMP(6),
REPEAT_ VARCHAR2(255),
HANDLER_TYPE_ VARCHAR2(255),
HANDLER_CFG_ VARCHAR2(2000),
CUSTOM_VALUES_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create table ACT_RU_HISTORY_JOB (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
LOCK_EXP_TIME_ TIMESTAMP(6),
LOCK_OWNER_ VARCHAR2(255),
RETRIES_ INTEGER,
EXCEPTION_STACK_ID_ VARCHAR2(64),
EXCEPTION_MSG_ VARCHAR2(2000),
HANDLER_TYPE_ VARCHAR2(255),
HANDLER_CFG_ VARCHAR2(2000),
CUSTOM_VALUES_ID_ VARCHAR2(64),
ADV_HANDLER_CFG_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
SCOPE_TYPE_ VARCHAR2(255),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create table ACT_RU_EXTERNAL_JOB (
ID_ VARCHAR2(64) NOT NULL,
REV_ INTEGER,
CATEGORY_ VARCHAR2(255),
TYPE_ VARCHAR2(255) NOT NULL,
LOCK_EXP_TIME_ TIMESTAMP(6),
LOCK_OWNER_ VARCHAR2(255),
EXCLUSIVE_ NUMBER(1) CHECK (EXCLUSIVE_ IN (1,0)),
EXECUTION_ID_ VARCHAR2(64),
PROCESS_INSTANCE_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
ELEMENT_ID_ VARCHAR2(255),
ELEMENT_NAME_ VARCHAR2(255),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
CORRELATION_ID_ VARCHAR2(255),
RETRIES_ INTEGER,
EXCEPTION_STACK_ID_ VARCHAR2(64),
EXCEPTION_MSG_ VARCHAR2(2000),
DUEDATE_ TIMESTAMP(6),
REPEAT_ VARCHAR2(255),
HANDLER_TYPE_ VARCHAR2(255),
HANDLER_CFG_ VARCHAR2(2000),
CUSTOM_VALUES_ID_ VARCHAR2(64),
CREATE_TIME_ TIMESTAMP(6),
TENANT_ID_ VARCHAR2(255) DEFAULT '',
primary key (ID_)
);
create index ACT_IDX_JOB_EXCEPTION on ACT_RU_JOB(EXCEPTION_STACK_ID_);
create index ACT_IDX_JOB_CUSTOM_VAL_ID on ACT_RU_JOB(CUSTOM_VALUES_ID_);
create index ACT_IDX_JOB_CORRELATION_ID on ACT_RU_JOB(CORRELATION_ID_);
create index ACT_IDX_TJOB_EXCEPTION on ACT_RU_TIMER_JOB(EXCEPTION_STACK_ID_);
create index ACT_IDX_TJOB_CUSTOM_VAL_ID on ACT_RU_TIMER_JOB(CUSTOM_VALUES_ID_);
create index ACT_IDX_TJOB_CORRELATION_ID on ACT_RU_TIMER_JOB(CORRELATION_ID_);
create index ACT_IDX_TJOB_DUEDATE on ACT_RU_TIMER_JOB(DUEDATE_);
create index ACT_IDX_SJOB_EXCEPTION on ACT_RU_SUSPENDED_JOB(EXCEPTION_STACK_ID_);
create index ACT_IDX_SJOB_CUSTOM_VAL_ID on ACT_RU_SUSPENDED_JOB(CUSTOM_VALUES_ID_);
create index ACT_IDX_SJOB_CORRELATION_ID on ACT_RU_SUSPENDED_JOB(CORRELATION_ID_);
create index ACT_IDX_DJOB_EXCEPTION on ACT_RU_DEADLETTER_JOB(EXCEPTION_STACK_ID_);
create index ACT_IDX_DJOB_CUSTOM_VAL_ID on ACT_RU_DEADLETTER_JOB(CUSTOM_VALUES_ID_);
create index ACT_IDX_DJOB_CORRELATION_ID on ACT_RU_DEADLETTER_JOB(CORRELATION_ID_);
create index ACT_IDX_EJOB_EXCEPTION on ACT_RU_EXTERNAL_JOB(EXCEPTION_STACK_ID_);
create index ACT_IDX_EJOB_CUSTOM_VAL_ID on ACT_RU_EXTERNAL_JOB(CUSTOM_VALUES_ID_);
create index ACT_IDX_EJOB_CORRELATION_ID on ACT_RU_EXTERNAL_JOB(CORRELATION_ID_);
alter table ACT_RU_JOB
add constraint ACT_FK_JOB_EXCEPTION
foreign key (EXCEPTION_STACK_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_JOB
add constraint ACT_FK_JOB_CUSTOM_VAL
foreign key (CUSTOM_VALUES_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_TIMER_JOB
add constraint ACT_FK_TJOB_EXCEPTION
foreign key (EXCEPTION_STACK_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_TIMER_JOB
add constraint ACT_FK_TJOB_CUSTOM_VAL
foreign key (CUSTOM_VALUES_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_SUSPENDED_JOB
add constraint ACT_FK_SJOB_EXCEPTION
foreign key (EXCEPTION_STACK_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_SUSPENDED_JOB
add constraint ACT_FK_SJOB_CUSTOM_VAL
foreign key (CUSTOM_VALUES_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_DEADLETTER_JOB
add constraint ACT_FK_DJOB_EXCEPTION
foreign key (EXCEPTION_STACK_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_DEADLETTER_JOB
add constraint ACT_FK_DJOB_CUSTOM_VAL
foreign key (CUSTOM_VALUES_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_EXTERNAL_JOB
add constraint ACT_FK_EJOB_EXCEPTION
foreign key (EXCEPTION_STACK_ID_)
references ACT_GE_BYTEARRAY (ID_);
alter table ACT_RU_EXTERNAL_JOB
add constraint ACT_FK_EJOB_CUSTOM_VAL
foreign key (CUSTOM_VALUES_ID_)
references ACT_GE_BYTEARRAY (ID_);
create index ACT_IDX_JOB_SCOPE on ACT_RU_JOB(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_JOB_SUB_SCOPE on ACT_RU_JOB(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_JOB_SCOPE_DEF on ACT_RU_JOB(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
create index ACT_IDX_TJOB_SCOPE on ACT_RU_TIMER_JOB(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_TJOB_SUB_SCOPE on ACT_RU_TIMER_JOB(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_TJOB_SCOPE_DEF on ACT_RU_TIMER_JOB(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
create index ACT_IDX_SJOB_SCOPE on ACT_RU_SUSPENDED_JOB(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_SJOB_SUB_SCOPE on ACT_RU_SUSPENDED_JOB(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_SJOB_SCOPE_DEF on ACT_RU_SUSPENDED_JOB(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
create index ACT_IDX_DJOB_SCOPE on ACT_RU_DEADLETTER_JOB(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_DJOB_SUB_SCOPE on ACT_RU_DEADLETTER_JOB(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_DJOB_SCOPE_DEF on ACT_RU_DEADLETTER_JOB(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
create index ACT_IDX_EJOB_SCOPE on ACT_RU_EXTERNAL_JOB(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_EJOB_SUB_SCOPE on ACT_RU_EXTERNAL_JOB(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_EJOB_SCOPE_DEF on ACT_RU_EXTERNAL_JOB(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
insert into ACT_GE_PROPERTY values ('job.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,74 @@
drop index ACT_IDX_JOB_SCOPE;
drop index ACT_IDX_JOB_SUB_SCOPE;
drop index ACT_IDX_JOB_SCOPE_DEF;
drop index ACT_IDX_TJOB_SCOPE;
drop index ACT_IDX_TJOB_SUB_SCOPE;
drop index ACT_IDX_TJOB_SCOPE_DEF;
drop index ACT_IDX_SJOB_SCOPE;
drop index ACT_IDX_SJOB_SUB_SCOPE;
drop index ACT_IDX_SJOB_SCOPE_DEF;
drop index ACT_IDX_DJOB_SCOPE;
drop index ACT_IDX_DJOB_SUB_SCOPE;
drop index ACT_IDX_DJOB_SCOPE_DEF;
drop index ACT_IDX_EJOB_SCOPE;
drop index ACT_IDX_EJOB_SUB_SCOPE;
drop index ACT_IDX_EJOB_SCOPE_DEF;
drop index ACT_IDX_JOB_EXCEPTION;
drop index ACT_IDX_JOB_CUSTOM_VAL_ID;
drop index ACT_IDX_JOB_CORRELATION_ID;
drop index ACT_IDX_TJOB_EXCEPTION;
drop index ACT_IDX_TJOB_CUSTOM_VAL_ID;
drop index ACT_IDX_TJOB_CORRELATION_ID;
drop index ACT_IDX_TJOB_DUEDATE;
drop index ACT_IDX_SJOB_EXCEPTION;
drop index ACT_IDX_SJOB_CUSTOM_VAL_ID;
drop index ACT_IDX_SJOB_CORRELATION_ID;
drop index ACT_IDX_DJOB_EXCEPTION;
drop index ACT_IDX_DJOB_CUSTOM_VAL_ID;
drop index ACT_IDX_DJOB_CORRELATION_ID;
drop index ACT_IDX_EJOB_EXCEPTION;
drop index ACT_IDX_EJOB_CUSTOM_VAL_ID;
drop index ACT_IDX_EJOB_CORRELATION_ID;
alter table ACT_RU_JOB
drop CONSTRAINT ACT_FK_JOB_EXCEPTION;
alter table ACT_RU_JOB
drop CONSTRAINT ACT_FK_JOB_CUSTOM_VAL;
alter table ACT_RU_TIMER_JOB
drop CONSTRAINT ACT_FK_TJOB_EXCEPTION;
alter table ACT_RU_TIMER_JOB
drop CONSTRAINT ACT_FK_TJOB_CUSTOM_VAL;
alter table ACT_RU_SUSPENDED_JOB
drop CONSTRAINT ACT_FK_SJOB_EXCEPTION;
alter table ACT_RU_SUSPENDED_JOB
drop CONSTRAINT ACT_FK_SJOB_CUSTOM_VAL;
alter table ACT_RU_DEADLETTER_JOB
drop CONSTRAINT ACT_FK_DJOB_EXCEPTION;
alter table ACT_RU_DEADLETTER_JOB
drop CONSTRAINT ACT_FK_DJOB_CUSTOM_VAL;
alter table ACT_RU_EXTERNAL_JOB
drop CONSTRAINT ACT_FK_DJOB_EXCEPTION;
alter table ACT_RU_EXTERNAL_JOB
drop CONSTRAINT ACT_FK_DJOB_CUSTOM_VAL;
drop table ACT_RU_JOB;
drop table ACT_RU_TIMER_JOB;
drop table ACT_RU_SUSPENDED_JOB;
drop table ACT_RU_DEADLETTER_JOB;
drop table ACT_RU_HISTORY_JOB;
drop table ACT_RU_EXTERNAL_JOB;

View File

@@ -0,0 +1,64 @@
create table ACT_HI_TASKINST (
ID_ VARCHAR2(64) not null,
REV_ INTEGER default 1,
PROC_DEF_ID_ VARCHAR2(64),
TASK_DEF_ID_ VARCHAR2(64),
TASK_DEF_KEY_ VARCHAR2(255),
PROC_INST_ID_ VARCHAR2(64),
EXECUTION_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
PROPAGATED_STAGE_INST_ID_ VARCHAR2(255),
PARENT_TASK_ID_ VARCHAR2(64),
STATE_ VARCHAR2(255),
NAME_ VARCHAR2(255),
DESCRIPTION_ VARCHAR2(2000),
OWNER_ VARCHAR2(255),
ASSIGNEE_ VARCHAR2(255),
START_TIME_ TIMESTAMP(6) not null,
IN_PROGRESS_TIME_ TIMESTAMP(6),
IN_PROGRESS_STARTED_BY_ VARCHAR2(255),
CLAIM_TIME_ TIMESTAMP(6),
CLAIMED_BY_ VARCHAR2(255),
SUSPENDED_TIME_ TIMESTAMP(6),
SUSPENDED_BY_ VARCHAR2(255),
END_TIME_ TIMESTAMP(6),
COMPLETED_BY_ VARCHAR2(255),
DURATION_ NUMBER(19,0),
DELETE_REASON_ VARCHAR2(2000),
PRIORITY_ INTEGER,
IN_PROGRESS_DUE_DATE_ TIMESTAMP(6),
DUE_DATE_ TIMESTAMP(6),
FORM_KEY_ VARCHAR2(255),
CATEGORY_ VARCHAR2(255),
TENANT_ID_ VARCHAR2(255) default '',
LAST_UPDATED_TIME_ TIMESTAMP(6),
primary key (ID_)
);
create table ACT_HI_TSK_LOG (
ID_ NUMBER(19),
TYPE_ VARCHAR2(64),
TASK_ID_ VARCHAR2(64) not null,
TIME_STAMP_ TIMESTAMP(6) not null,
USER_ID_ VARCHAR2(255),
DATA_ VARCHAR2(2000),
EXECUTION_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
TENANT_ID_ VARCHAR2(255) default '',
primary key (ID_)
);
create sequence act_hi_task_evt_log_seq start with 1 increment by 1;
create index ACT_IDX_HI_TASK_SCOPE on ACT_HI_TASKINST(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_HI_TASK_SUB_SCOPE on ACT_HI_TASKINST(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_HI_TASK_SCOPE_DEF on ACT_HI_TASKINST(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);

View File

@@ -0,0 +1,48 @@
create table ACT_RU_TASK (
ID_ VARCHAR2(64),
REV_ INTEGER,
EXECUTION_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
PROC_DEF_ID_ VARCHAR2(64),
TASK_DEF_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
SCOPE_DEFINITION_ID_ VARCHAR2(255),
PROPAGATED_STAGE_INST_ID_ VARCHAR2(255),
STATE_ VARCHAR2(255),
NAME_ VARCHAR2(255),
PARENT_TASK_ID_ VARCHAR2(64),
DESCRIPTION_ VARCHAR2(2000),
TASK_DEF_KEY_ VARCHAR2(255),
OWNER_ VARCHAR2(255),
ASSIGNEE_ VARCHAR2(255),
DELEGATION_ VARCHAR2(64),
PRIORITY_ INTEGER,
CREATE_TIME_ TIMESTAMP(6),
IN_PROGRESS_TIME_ TIMESTAMP(6),
IN_PROGRESS_STARTED_BY_ VARCHAR2(255),
CLAIM_TIME_ TIMESTAMP(6),
CLAIMED_BY_ VARCHAR2(255),
SUSPENDED_TIME_ TIMESTAMP(6),
SUSPENDED_BY_ VARCHAR2(255),
IN_PROGRESS_DUE_DATE_ TIMESTAMP(6),
DUE_DATE_ TIMESTAMP(6),
CATEGORY_ VARCHAR2(255),
SUSPENSION_STATE_ INTEGER,
TENANT_ID_ VARCHAR2(255) DEFAULT '',
FORM_KEY_ VARCHAR2(255),
IS_COUNT_ENABLED_ NUMBER(1) CHECK (IS_COUNT_ENABLED_ IN (1,0)),
VAR_COUNT_ INTEGER,
ID_LINK_COUNT_ INTEGER,
SUB_TASK_COUNT_ INTEGER,
primary key (ID_)
);
create index ACT_IDX_TASK_CREATE on ACT_RU_TASK(CREATE_TIME_);
create index ACT_IDX_TASK_SCOPE on ACT_RU_TASK(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_TASK_SUB_SCOPE on ACT_RU_TASK(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_TASK_SCOPE_DEF on ACT_RU_TASK(SCOPE_DEFINITION_ID_, SCOPE_TYPE_);
insert into ACT_GE_PROPERTY values ('task.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,8 @@
drop index ACT_IDX_HI_TASK_SCOPE;
drop index ACT_IDX_HI_TASK_SUB_SCOPE;
drop index ACT_IDX_HI_TASK_SCOPE_DEF;
drop sequence act_hi_task_evt_log_seq;
drop table ACT_HI_TASKINST;
drop table ACT_HI_TSK_LOG;

View File

@@ -0,0 +1,6 @@
drop index ACT_IDX_TASK_CREATE;
drop index ACT_IDX_TASK_SCOPE;
drop index ACT_IDX_TASK_SUB_SCOPE;
drop index ACT_IDX_TASK_SCOPE_DEF;
drop table ACT_RU_TASK;

View File

@@ -0,0 +1,26 @@
create table ACT_HI_VARINST (
ID_ VARCHAR2(64) not null,
REV_ INTEGER default 1,
PROC_INST_ID_ VARCHAR2(64),
EXECUTION_ID_ VARCHAR2(64),
TASK_ID_ VARCHAR2(64),
NAME_ VARCHAR2(255) not null,
VAR_TYPE_ VARCHAR2(100),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
BYTEARRAY_ID_ VARCHAR2(64),
DOUBLE_ NUMBER(38,10),
LONG_ NUMBER(19,0),
TEXT_ VARCHAR2(2000),
TEXT2_ VARCHAR2(2000),
META_INFO_ VARCHAR2(2000),
CREATE_TIME_ TIMESTAMP(6),
LAST_UPDATED_TIME_ TIMESTAMP(6),
primary key (ID_)
);
create index ACT_IDX_HI_PROCVAR_NAME_TYPE on ACT_HI_VARINST(NAME_, VAR_TYPE_);
create index ACT_IDX_HI_VAR_SCOPE_ID_TYPE on ACT_HI_VARINST(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_HI_VAR_SUB_ID_TYPE on ACT_HI_VARINST(SUB_SCOPE_ID_, SCOPE_TYPE_);

View File

@@ -0,0 +1,31 @@
create table ACT_RU_VARIABLE (
ID_ VARCHAR2(64) not null,
REV_ INTEGER,
TYPE_ VARCHAR2(255) not null,
NAME_ VARCHAR2(255) not null,
EXECUTION_ID_ VARCHAR2(64),
PROC_INST_ID_ VARCHAR2(64),
TASK_ID_ VARCHAR2(64),
SCOPE_ID_ VARCHAR2(255),
SUB_SCOPE_ID_ VARCHAR2(255),
SCOPE_TYPE_ VARCHAR2(255),
BYTEARRAY_ID_ VARCHAR2(64),
DOUBLE_ NUMBER(38,10),
LONG_ NUMBER(19,0),
TEXT_ VARCHAR2(2000),
TEXT2_ VARCHAR2(2000),
META_INFO_ VARCHAR2(2000),
primary key (ID_)
);
create index ACT_IDX_RU_VAR_SCOPE_ID_TYPE on ACT_RU_VARIABLE(SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_RU_VAR_SUB_ID_TYPE on ACT_RU_VARIABLE(SUB_SCOPE_ID_, SCOPE_TYPE_);
create index ACT_IDX_VAR_BYTEARRAY on ACT_RU_VARIABLE(BYTEARRAY_ID_);
alter table ACT_RU_VARIABLE
add constraint ACT_FK_VAR_BYTEARRAY
foreign key (BYTEARRAY_ID_)
references ACT_GE_BYTEARRAY (ID_);
insert into ACT_GE_PROPERTY values ('variable.schema.version', '7.0.1.1', 1);

View File

@@ -0,0 +1,6 @@
drop index ACT_IDX_HI_PROCVAR_NAME_TYPE;
drop index ACT_IDX_HI_VAR_SCOPE_ID_TYPE;
drop index ACT_IDX_HI_VAR_SUB_ID_TYPE;
drop table ACT_HI_VARINST;

View File

@@ -0,0 +1,9 @@
drop index ACT_IDX_VAR_BYTEARRAY;
drop index ACT_IDX_RU_VAR_SCOPE_ID_TYPE;
drop index ACT_IDX_RU_VAR_SUB_ID_TYPE;
alter table ACT_RU_VARIABLE
drop CONSTRAINT ACT_FK_VAR_BYTEARRAY;
drop table ACT_RU_VARIABLE;

View File

@@ -1,5 +1,7 @@
package com.zt.plat.module.databus.framework.integration.gateway.config; package com.zt.plat.module.databus.framework.integration.gateway.config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.web.reactive.function.client.WebClientCustomizer; import org.springframework.boot.web.reactive.function.client.WebClientCustomizer;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
@@ -17,33 +19,43 @@ public class GatewayWebClientConfiguration {
private final int maxInMemorySize; private final int maxInMemorySize;
private final long maxIdleTimeMillis; private final long maxIdleTimeMillis;
private final long evictInBackgroundMillis; private final long evictInBackgroundMillis;
private final boolean connectionPoolEnabled;
private final ReactorClientHttpConnector httpConnector; private final ReactorClientHttpConnector httpConnector;
private static final Logger log = LoggerFactory.getLogger(GatewayWebClientConfiguration.class);
public GatewayWebClientConfiguration( public GatewayWebClientConfiguration(
@Value("${databus.gateway.web-client.max-in-memory-size:20971520}") int maxInMemorySize, @Value("${databus.gateway.web-client.max-in-memory-size:20971520}") int maxInMemorySize,
@Value("${databus.gateway.web-client.max-idle-time:45000}") long maxIdleTimeMillis, @Value("${databus.gateway.web-client.max-idle-time:45000}") long maxIdleTimeMillis,
@Value("${databus.gateway.web-client.evict-in-background-interval:20000}") long evictInBackgroundMillis) { @Value("${databus.gateway.web-client.evict-in-background-interval:20000}") long evictInBackgroundMillis,
@Value("${databus.gateway.web-client.connection-pool-enabled:true}") boolean connectionPoolEnabled) {
this.maxInMemorySize = maxInMemorySize; this.maxInMemorySize = maxInMemorySize;
this.maxIdleTimeMillis = maxIdleTimeMillis > 0 ? maxIdleTimeMillis : 45000L; this.maxIdleTimeMillis = maxIdleTimeMillis;
this.evictInBackgroundMillis = Math.max(evictInBackgroundMillis, 0L); this.evictInBackgroundMillis = evictInBackgroundMillis;
this.connectionPoolEnabled = connectionPoolEnabled;
this.httpConnector = buildConnector(); this.httpConnector = buildConnector();
} }
@Bean @Bean
public WebClientCustomizer gatewayWebClientCustomizer() { public WebClientCustomizer gatewayWebClientCustomizer() {
// 统一设置 WebClient 连接器与内存限制,避免各处重复配置
return builder -> builder return builder -> builder
.clientConnector(httpConnector) .clientConnector(httpConnector)
.codecs(configurer -> configurer.defaultCodecs().maxInMemorySize(maxInMemorySize)); .codecs(configurer -> configurer.defaultCodecs().maxInMemorySize(maxInMemorySize));
} }
private ReactorClientHttpConnector buildConnector() { private ReactorClientHttpConnector buildConnector() {
ConnectionProvider.Builder providerBuilder = ConnectionProvider.builder("databus-gateway") if (connectionPoolEnabled) {
.maxIdleTime(Duration.ofMillis(maxIdleTimeMillis)); // 启用连接池,基于配置设置空闲回收参数
if (evictInBackgroundMillis > 0) { ConnectionProvider provider = ConnectionProvider.builder("databus-gateway")
providerBuilder.evictInBackground(Duration.ofMillis(evictInBackgroundMillis)); .maxIdleTime(Duration.ofMillis(maxIdleTimeMillis))
.evictInBackground(Duration.ofMillis(evictInBackgroundMillis))
.build();
log.info("Databus gateway WebClient 已启用连接池 (maxIdleTime={}ms, evictInterval={}ms)",
maxIdleTimeMillis, evictInBackgroundMillis);
return new ReactorClientHttpConnector(HttpClient.create(provider).compress(true));
} }
ConnectionProvider provider = providerBuilder.build(); // 关闭连接池,每次请求都会重新建立 TCP 连接
HttpClient httpClient = HttpClient.create(provider).compress(true); log.info("Databus gateway WebClient 已禁用连接池,所有请求将使用全新连接");
return new ReactorClientHttpConnector(httpClient); return new ReactorClientHttpConnector(HttpClient.create().compress(true));
} }
} }

View File

@@ -131,4 +131,9 @@ zt:
ignore-tables: ignore-tables:
- databus_api_client_credential - databus_api_client_credential
databus:
gateway:
web-client:
connection-pool-enabled: false # 默认开启连接池,排查长连接问题时可临时关闭
debug: false debug: false

View File

@@ -5,6 +5,10 @@
<springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/> <springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/>
<!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 --> <!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 -->
<property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/> <property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
<!--应用名称-->
<springProperty scope="context" name="spring.application.name" source="spring.application.name"/>
<!-- 日志输出路径 -->
<property name="LOG_DIR" value="${user.home}/logs/${spring.application.name}"/>
<!-- 控制台 Appender --> <!-- 控制台 Appender -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">      <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">     
@@ -56,11 +60,29 @@
</encoder> </encoder>
</appender> </appender>
<!-- ERROR 级别日志 -->
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_DIR}-error.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_DIR}-error.%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>30</maxHistory> <!-- 保留30天的日志 -->
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG--> <!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG-->
<!-- 本地环境 --> <!-- 本地环境 -->
<springProfile name="local,dev"> <springProfile name="local,dev">
<root level="WARN"> <root level="WARN">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 --> <appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 -->
<appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 --> <appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 -->
</root> </root>
@@ -75,6 +97,7 @@
<springProfile name="dev,test,stage,prod,default"> <springProfile name="dev,test,stage,prod,default">
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="ASYNC"/> <appender-ref ref="ASYNC"/>
<appender-ref ref="GRPC"/> <appender-ref ref="GRPC"/>
</root> </root>

View File

@@ -5,6 +5,10 @@
<springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/> <springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/>
<!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 --> <!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 -->
<property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/> <property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
<!--应用名称-->
<springProperty scope="context" name="spring.application.name" source="spring.application.name"/>
<!-- 日志输出路径 -->
<property name="LOG_DIR" value="${user.home}/logs/${spring.application.name}"/>
<!-- 控制台 Appender --> <!-- 控制台 Appender -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">      <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">     
@@ -56,11 +60,29 @@
</encoder> </encoder>
</appender> </appender>
<!-- ERROR 级别日志 -->
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_DIR}-error.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_DIR}-error.%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>30</maxHistory> <!-- 保留30天的日志 -->
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG--> <!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG-->
<!-- 本地环境 --> <!-- 本地环境 -->
<springProfile name="local,dev"> <springProfile name="local,dev">
<root level="WARN"> <root level="WARN">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 --> <appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 -->
<appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 --> <appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 -->
</root> </root>
@@ -75,6 +97,7 @@
<springProfile name="dev,test,stage,prod,default"> <springProfile name="dev,test,stage,prod,default">
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="ASYNC"/> <appender-ref ref="ASYNC"/>
<appender-ref ref="GRPC"/> <appender-ref ref="GRPC"/>
</root> </root>

View File

@@ -5,6 +5,10 @@
<springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/> <springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/>
<!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 --> <!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 -->
<property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/> <property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
<!--应用名称-->
<springProperty scope="context" name="spring.application.name" source="spring.application.name"/>
<!-- 日志输出路径 -->
<property name="LOG_DIR" value="${user.home}/logs/${spring.application.name}"/>
<!-- 控制台 Appender --> <!-- 控制台 Appender -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">      <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">     
@@ -56,11 +60,29 @@
</encoder> </encoder>
</appender> </appender>
<!-- ERROR 级别日志 -->
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_DIR}-error.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_DIR}-error.%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>30</maxHistory> <!-- 保留30天的日志 -->
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG--> <!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG-->
<!-- 本地环境 --> <!-- 本地环境 -->
<springProfile name="local,dev"> <springProfile name="local,dev">
<root level="WARN"> <root level="WARN">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 --> <appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 -->
<appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 --> <appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 -->
</root> </root>
@@ -75,6 +97,7 @@
<springProfile name="dev,test,stage,prod,default"> <springProfile name="dev,test,stage,prod,default">
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="ASYNC"/> <appender-ref ref="ASYNC"/>
<appender-ref ref="GRPC"/> <appender-ref ref="GRPC"/>
</root> </root>

View File

@@ -0,0 +1,63 @@
package com.zt.plat.module.system.api.iwork;
import com.zt.plat.framework.common.pojo.CommonResult;
import com.zt.plat.module.system.api.iwork.dto.*;
import com.zt.plat.module.system.enums.ApiConstants;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
/**
* RPC 服务 - iWork 集成
*/
@FeignClient(name = ApiConstants.NAME, contextId = "iWorkIntegrationApi")
@Tag(name = "RPC 服务 - iWork 集成")
public interface IWorkIntegrationApi {
String PREFIX = ApiConstants.PREFIX + "/integration/iwork";
// ----------------- 认证 / 会话 -----------------
@PostMapping(PREFIX + "/auth/register")
@Operation(summary = "注册 iWork 凭证,获取服务端公钥与 secret")
CommonResult<IWorkAuthRegisterRespDTO> register(@RequestBody IWorkAuthRegisterReqDTO reqDTO);
@PostMapping(PREFIX + "/auth/token")
@Operation(summary = "申请 iWork Token独立接口")
CommonResult<IWorkAuthTokenRespDTO> acquireToken(@RequestBody IWorkAuthTokenReqDTO reqDTO);
// ----------------- 流程类能力 -----------------
@PostMapping(PREFIX + "/user/resolve")
@Operation(summary = "根据外部标识获取 iWork 用户编号")
CommonResult<IWorkUserInfoRespDTO> resolveUser(@RequestBody IWorkUserInfoReqDTO reqDTO);
@PostMapping(PREFIX + "/workflow/create")
@Operation(summary = "发起 iWork 流程")
CommonResult<IWorkOperationRespDTO> createWorkflow(@RequestBody IWorkWorkflowCreateReqDTO reqDTO);
@PostMapping(PREFIX + "/workflow/void")
@Operation(summary = "作废 / 干预 iWork 流程")
CommonResult<IWorkOperationRespDTO> voidWorkflow(@RequestBody IWorkWorkflowVoidReqDTO reqDTO);
// ----------------- 人力组织分页接口 -----------------
@PostMapping(PREFIX + "/hr/subcompany/page")
@Operation(summary = "获取 iWork 分部列表")
CommonResult<IWorkHrSubcompanyPageRespDTO> listSubcompanies(@RequestBody IWorkOrgPageReqDTO reqDTO);
@PostMapping(PREFIX + "/hr/department/page")
@Operation(summary = "获取 iWork 部门列表")
CommonResult<IWorkHrDepartmentPageRespDTO> listDepartments(@RequestBody IWorkOrgPageReqDTO reqDTO);
@PostMapping(PREFIX + "/hr/job-title/page")
@Operation(summary = "获取 iWork 岗位列表")
CommonResult<IWorkHrJobTitlePageRespDTO> listJobTitles(@RequestBody IWorkOrgPageReqDTO reqDTO);
@PostMapping(PREFIX + "/hr/user/page")
@Operation(summary = "获取 iWork 人员列表")
CommonResult<IWorkHrUserPageRespDTO> listUsers(@RequestBody IWorkOrgPageReqDTO reqDTO);
}

View File

@@ -0,0 +1,18 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork 注册授权请求 DTO供其他模块通过 Feign 调用 system-server 时使用)
*/
@Data
public class IWorkAuthRegisterReqDTO {
@Schema(description = "iWork 应用编码", requiredMode = Schema.RequiredMode.REQUIRED)
private String appCode;
@Schema(description = "iWork 网关地址", requiredMode = Schema.RequiredMode.NOT_REQUIRED)
private String baseUrl;
}

View File

@@ -0,0 +1,18 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork 注册授权响应 DTO
*/
@Data
public class IWorkAuthRegisterRespDTO {
@Schema(description = "服务端公钥(Base64)")
private String publicKey;
@Schema(description = "服务端下发的 secret")
private String secret;
}

View File

@@ -0,0 +1,15 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork Token 申请请求 DTO
*/
@Data
public class IWorkAuthTokenReqDTO {
@Schema(description = "应用编码", requiredMode = Schema.RequiredMode.REQUIRED)
private String appCode;
}

View File

@@ -0,0 +1,18 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork Token 响应 DTO
*/
@Data
public class IWorkAuthTokenRespDTO {
@Schema(description = "访问令牌")
private String accessToken;
@Schema(description = "过期时间(秒)")
private Long expiresIn;
}

View File

@@ -0,0 +1,31 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.List;
/**
* iWork 部门分页响应 DTO
*/
@Data
public class IWorkHrDepartmentPageRespDTO {
@Schema(description = "总条数")
private Long total;
@Schema(description = "当前页数据")
private List<Item> list;
@Data
public static class Item {
@Schema(description = "部门编号")
private String id;
@Schema(description = "部门名称")
private String name;
}
}

View File

@@ -0,0 +1,31 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.List;
/**
* iWork 岗位分页响应 DTO
*/
@Data
public class IWorkHrJobTitlePageRespDTO {
@Schema(description = "总条数")
private Long total;
@Schema(description = "当前页数据")
private List<Item> list;
@Data
public static class Item {
@Schema(description = "岗位编号")
private String id;
@Schema(description = "岗位名称")
private String name;
}
}

View File

@@ -0,0 +1,31 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.List;
/**
* iWork 分部分页响应 DTO
*/
@Data
public class IWorkHrSubcompanyPageRespDTO {
@Schema(description = "总条数")
private Long total;
@Schema(description = "当前页数据")
private List<Item> list;
@Data
public static class Item {
@Schema(description = "分部编号")
private String id;
@Schema(description = "分部名称")
private String name;
}
}

View File

@@ -0,0 +1,31 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.util.List;
/**
* iWork 人员分页响应 DTO
*/
@Data
public class IWorkHrUserPageRespDTO {
@Schema(description = "总条数")
private Long total;
@Schema(description = "当前页数据")
private List<Item> list;
@Data
public static class Item {
@Schema(description = "人员编号")
private String id;
@Schema(description = "人员名称")
private String name;
}
}

View File

@@ -0,0 +1,21 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork 操作结果响应 DTO
*/
@Data
public class IWorkOperationRespDTO {
@Schema(description = "是否成功")
private Boolean success;
@Schema(description = "iWork 返回的操作编号或实例编号")
private String operationId;
@Schema(description = "提示信息")
private String message;
}

View File

@@ -0,0 +1,21 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork 人力组织分页查询通用请求 DTO
*/
@Data
public class IWorkOrgPageReqDTO {
@Schema(description = "页码", example = "1", requiredMode = Schema.RequiredMode.REQUIRED)
private Integer pageNo;
@Schema(description = "每页大小", example = "20", requiredMode = Schema.RequiredMode.REQUIRED)
private Integer pageSize;
@Schema(description = "关键字过滤")
private String keyword;
}

View File

@@ -0,0 +1,15 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* 根据外部标识解析 iWork 用户请求 DTO
*/
@Data
public class IWorkUserInfoReqDTO {
@Schema(description = "外部系统中的用户唯一标识", requiredMode = Schema.RequiredMode.REQUIRED)
private String externalUserCode;
}

View File

@@ -0,0 +1,18 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork 用户解析响应 DTO
*/
@Data
public class IWorkUserInfoRespDTO {
@Schema(description = "iWork 用户编号")
private String userId;
@Schema(description = "iWork 用户名称")
private String userName;
}

View File

@@ -0,0 +1,46 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* 发起 iWork 流程请求 DTO
*
* 与 IWorkWorkflowCreateReqVO 字段一一对应,便于 Feign 调用。
*/
@Data
public class IWorkWorkflowCreateReqDTO {
@Schema(description = "用印申请人iWork 人员 ID", example = "1001")
private String jbr;
@Schema(description = "用印部门 ID", example = "2001")
private String yybm;
@Schema(description = "用印单位(分部 ID", example = "3001")
private String fb;
@Schema(description = "申请时间,格式 yyyy-MM-dd", example = "2025-01-01")
private String sqsj;
@Schema(description = "用印去向")
private String yyqx;
@Schema(description = "用印依据附件 URL")
private String yyfkUrl;
@Schema(description = "用印事由或内容摘要")
private String yysy;
@Schema(description = "用印材料附件 URL必填")
private String xyywjUrl;
@Schema(description = "用印材料附件文件名(必填)")
private String xyywjFileName;
@Schema(description = "用印事项")
private String yysx;
@Schema(description = "业务系统单据编号(用于派生流程标题)", example = "DJ-2025-0001")
private String ywxtdjbh;
}

View File

@@ -0,0 +1,21 @@
package com.zt.plat.module.system.api.iwork.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* 作废 / 干预 iWork 流程请求 DTO
*/
@Data
public class IWorkWorkflowVoidReqDTO {
@Schema(description = "iWork 实例编号", requiredMode = Schema.RequiredMode.REQUIRED)
private String instanceId;
@Schema(description = "操作人 iWork 用户编号", requiredMode = Schema.RequiredMode.REQUIRED)
private String operatorUserId;
@Schema(description = "作废原因")
private String reason;
}

View File

@@ -61,6 +61,7 @@ public interface ErrorCodeConstants {
ErrorCode USER_IMPORT_INIT_PASSWORD = new ErrorCode(1_002_003_009, "初始密码不能为空"); ErrorCode USER_IMPORT_INIT_PASSWORD = new ErrorCode(1_002_003_009, "初始密码不能为空");
ErrorCode USER_MOBILE_NOT_EXISTS = new ErrorCode(1_002_003_010, "该手机号尚未注册"); ErrorCode USER_MOBILE_NOT_EXISTS = new ErrorCode(1_002_003_010, "该手机号尚未注册");
ErrorCode USER_REGISTER_DISABLED = new ErrorCode(1_002_003_011, "注册功能已关闭"); ErrorCode USER_REGISTER_DISABLED = new ErrorCode(1_002_003_011, "注册功能已关闭");
ErrorCode USER_PASSWORD_MODIFY_FORBIDDEN = new ErrorCode(1_002_003_012, "该用户来源不支持修改密码");
// ========== 部门模块 1-002-004-000 ========== // ========== 部门模块 1-002-004-000 ==========
ErrorCode DEPT_NAME_DUPLICATE = new ErrorCode(1_002_004_000, "当前上级部门已存在同名子部门"); ErrorCode DEPT_NAME_DUPLICATE = new ErrorCode(1_002_004_000, "当前上级部门已存在同名子部门");

View File

@@ -0,0 +1,24 @@
package com.zt.plat.module.system.enums.user;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 密码处理策略,用于区分本地账户与外部同步账户的密码存储/校验方式。
*/
@AllArgsConstructor
@Getter
public enum PasswordStrategyEnum {
/**
* 本地创建或注册用户,使用 Spring Security {@code PasswordEncoder}BCrypt
*/
LOCAL_BCRYPT("LOCAL_BCRYPT"),
/**
* iWork 同步的 MD5 密文,直接按大写 MD5 存储及校验。
*/
IWORK_MD5("IWORK_MD5");
private final String label;
}

View File

@@ -3,6 +3,9 @@ package com.zt.plat.module.system.enums.user;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Getter; import lombok.Getter;
import java.util.Arrays;
import java.util.Objects;
/** /**
* 用户来源枚举 * 用户来源枚举
* *
@@ -12,9 +15,9 @@ import lombok.Getter;
@Getter @Getter
public enum UserSourceEnum { public enum UserSourceEnum {
EXTERNAL(1, "外部用户"), // 系统创建、注册等方式产生的用户 EXTERNAL(1, "外部用户", PasswordStrategyEnum.LOCAL_BCRYPT), // 系统创建、注册等方式产生的用户
SYNC(2, "同步用户"), // 通过 UserSyncService 同步的用户 SYNC(2, "同步用户", PasswordStrategyEnum.LOCAL_BCRYPT), // 通过 UserSyncService 同步的用户
IWORK(3, "iWork 用户"); // 通过 iWork 全量/单条同步产生的用户 IWORK(3, "iWork 用户", PasswordStrategyEnum.IWORK_MD5); // 通过 iWork 全量/单条同步产生的用户
/** /**
* 类型 * 类型
@@ -24,5 +27,28 @@ public enum UserSourceEnum {
* 名字 * 名字
*/ */
private final String name; private final String name;
/**
* 默认密码策略
*/
private final PasswordStrategyEnum passwordStrategy;
public static UserSourceEnum of(Integer source) {
if (source == null) {
return null;
}
return Arrays.stream(values())
.filter(item -> Objects.equals(item.source, source))
.findFirst()
.orElse(null);
}
public static PasswordStrategyEnum resolvePasswordStrategy(Integer source) {
UserSourceEnum matched = of(source);
return matched == null ? PasswordStrategyEnum.LOCAL_BCRYPT : matched.getPasswordStrategy();
}
public boolean isExternal() {
return this == EXTERNAL;
}
} }

View File

@@ -0,0 +1,111 @@
package com.zt.plat.module.system.api.iwork;
import com.zt.plat.framework.common.pojo.CommonResult;
import com.zt.plat.framework.common.util.object.BeanUtils;
import com.zt.plat.module.system.api.iwork.dto.*;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.*;
import com.zt.plat.module.system.service.integration.iwork.IWorkIntegrationService;
import com.zt.plat.module.system.service.integration.iwork.IWorkOrgRestService;
import com.zt.plat.module.system.service.integration.iwork.IWorkSyncService;
import jakarta.annotation.Resource;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.RestController;
import static com.zt.plat.framework.common.pojo.CommonResult.success;
/**
* iWork 集成 Feign API 实现类。
* <p>
* 将 system-api 模块中的 DTO 与现有 Controller VO 进行简单转换,
* 再委托给 Service 层完成实际业务逻辑,供其他模块通过 Feign 统一调用。
*/
@RestController
@Validated
public class IWorkIntegrationApiImpl implements IWorkIntegrationApi {
@Resource
private IWorkIntegrationService integrationService;
@Resource
private IWorkOrgRestService orgRestService;
@Resource
private IWorkSyncService syncService;
// ----------------- 认证 / 会话 -----------------
@Override
public CommonResult<IWorkAuthRegisterRespDTO> register(IWorkAuthRegisterReqDTO reqDTO) {
IWorkAuthRegisterReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkAuthRegisterReqVO.class);
IWorkAuthRegisterRespVO respVO = integrationService.registerSession(reqVO);
IWorkAuthRegisterRespDTO respDTO = BeanUtils.toBean(respVO, IWorkAuthRegisterRespDTO.class);
return success(respDTO);
}
@Override
public CommonResult<IWorkAuthTokenRespDTO> acquireToken(IWorkAuthTokenReqDTO reqDTO) {
IWorkAuthTokenReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkAuthTokenReqVO.class);
IWorkAuthTokenRespVO respVO = integrationService.acquireToken(reqVO);
IWorkAuthTokenRespDTO respDTO = BeanUtils.toBean(respVO, IWorkAuthTokenRespDTO.class);
return success(respDTO);
}
// ----------------- 流程类能力 -----------------
@Override
public CommonResult<IWorkUserInfoRespDTO> resolveUser(IWorkUserInfoReqDTO reqDTO) {
IWorkUserInfoReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkUserInfoReqVO.class);
IWorkUserInfoRespVO respVO = integrationService.resolveUserId(reqVO);
IWorkUserInfoRespDTO respDTO = BeanUtils.toBean(respVO, IWorkUserInfoRespDTO.class);
return success(respDTO);
}
@Override
public CommonResult<IWorkOperationRespDTO> createWorkflow(IWorkWorkflowCreateReqDTO reqDTO) {
IWorkWorkflowCreateReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkWorkflowCreateReqVO.class);
IWorkOperationRespVO respVO = integrationService.createWorkflow(reqVO);
IWorkOperationRespDTO respDTO = BeanUtils.toBean(respVO, IWorkOperationRespDTO.class);
return success(respDTO);
}
@Override
public CommonResult<IWorkOperationRespDTO> voidWorkflow(IWorkWorkflowVoidReqDTO reqDTO) {
IWorkWorkflowVoidReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkWorkflowVoidReqVO.class);
IWorkOperationRespVO respVO = integrationService.voidWorkflow(reqVO);
IWorkOperationRespDTO respDTO = BeanUtils.toBean(respVO, IWorkOperationRespDTO.class);
return success(respDTO);
}
// ----------------- 人力组织分页接口 -----------------
@Override
public CommonResult<IWorkHrSubcompanyPageRespDTO> listSubcompanies(IWorkOrgPageReqDTO reqDTO) {
IWorkSubcompanyQueryReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkSubcompanyQueryReqVO.class);
IWorkHrSubcompanyPageRespVO respVO = orgRestService.listSubcompanies(reqVO);
IWorkHrSubcompanyPageRespDTO respDTO = BeanUtils.toBean(respVO, IWorkHrSubcompanyPageRespDTO.class);
return success(respDTO);
}
@Override
public CommonResult<IWorkHrDepartmentPageRespDTO> listDepartments(IWorkOrgPageReqDTO reqDTO) {
IWorkDepartmentQueryReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkDepartmentQueryReqVO.class);
IWorkHrDepartmentPageRespVO respVO = orgRestService.listDepartments(reqVO);
IWorkHrDepartmentPageRespDTO respDTO = BeanUtils.toBean(respVO, IWorkHrDepartmentPageRespDTO.class);
return success(respDTO);
}
@Override
public CommonResult<IWorkHrJobTitlePageRespDTO> listJobTitles(IWorkOrgPageReqDTO reqDTO) {
IWorkJobTitleQueryReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkJobTitleQueryReqVO.class);
IWorkHrJobTitlePageRespVO respVO = orgRestService.listJobTitles(reqVO);
IWorkHrJobTitlePageRespDTO respDTO = BeanUtils.toBean(respVO, IWorkHrJobTitlePageRespDTO.class);
return success(respDTO);
}
@Override
public CommonResult<IWorkHrUserPageRespDTO> listUsers(IWorkOrgPageReqDTO reqDTO) {
IWorkUserQueryReqVO reqVO = BeanUtils.toBean(reqDTO, IWorkUserQueryReqVO.class);
IWorkHrUserPageRespVO respVO = orgRestService.listUsers(reqVO);
IWorkHrUserPageRespDTO respDTO = BeanUtils.toBean(respVO, IWorkHrUserPageRespDTO.class);
return success(respDTO);
}
}

View File

@@ -113,12 +113,6 @@ public class IWorkIntegrationController {
// ----------------- 同步到本地 ----------------- // ----------------- 同步到本地 -----------------
@PostMapping("/hr/full-sync")
@Operation(summary = "手动触发 iWork 组织/人员同步")
public CommonResult<IWorkFullSyncRespVO> fullSync(@Valid @RequestBody IWorkFullSyncReqVO reqVO) {
return success(syncService.fullSync(reqVO));
}
@PostMapping("/hr/departments/full-sync") @PostMapping("/hr/departments/full-sync")
@Operation(summary = "手动触发 iWork 部门同步") @Operation(summary = "手动触发 iWork 部门同步")
public CommonResult<IWorkFullSyncRespVO> fullSyncDepartments(@Valid @RequestBody IWorkFullSyncReqVO reqVO) { public CommonResult<IWorkFullSyncRespVO> fullSyncDepartments(@Valid @RequestBody IWorkFullSyncReqVO reqVO) {
@@ -142,10 +136,4 @@ public class IWorkIntegrationController {
public CommonResult<IWorkFullSyncRespVO> fullSyncUsers(@Valid @RequestBody IWorkFullSyncReqVO reqVO) { public CommonResult<IWorkFullSyncRespVO> fullSyncUsers(@Valid @RequestBody IWorkFullSyncReqVO reqVO) {
return success(syncService.fullSyncUsers(reqVO)); return success(syncService.fullSyncUsers(reqVO));
} }
@PostMapping("/hr/single-sync")
@Operation(summary = "按 iWork ID 同步单条组织/人员")
public CommonResult<IWorkSingleSyncRespVO> singleSync(@Valid @RequestBody IWorkSingleSyncReqVO reqVO) {
return success(syncService.syncSingle(reqVO));
}
} }

View File

@@ -171,6 +171,10 @@ public class IWorkHrUserPageRespVO {
@JsonProperty("accounttype") @JsonProperty("accounttype")
private String accounttype; private String accounttype;
@Schema(description = "用户密码MD5 密文)")
@JsonProperty("password")
private String password;
@JsonIgnore @JsonIgnore
private Map<String, Object> attributes; private Map<String, Object> attributes;

View File

@@ -14,9 +14,6 @@ public class IWorkOperationRespVO {
@Schema(description = "iWork 返回的原始数据") @Schema(description = "iWork 返回的原始数据")
private Map<String, Object> payload; private Map<String, Object> payload;
@Schema(description = "iWork 返回的原始字符串")
private String rawBody;
@Schema(description = "是否判断为成功") @Schema(description = "是否判断为成功")
private boolean success; private boolean success;

View File

@@ -1,15 +0,0 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import io.swagger.v3.oas.annotations.media.Schema;
/**
* @deprecated 请改用强类型的 IWorkHr*RespVO避免再引用该占位类。
*/
@Deprecated(forRemoval = true)
@Schema(description = "已废弃,占位用")
public final class IWorkOrgRespVO {
private IWorkOrgRespVO() {
throw new UnsupportedOperationException("Use IWorkHr*RespVO instead");
}
}

View File

@@ -1,26 +0,0 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import com.zt.plat.module.system.enums.integration.IWorkSyncEntityTypeEnum;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.Min;
import jakarta.validation.constraints.NotNull;
import lombok.Data;
/**
* iWork 单条同步请求
*/
@Data
public class IWorkSingleSyncReqVO {
@Schema(description = "同步的实体类型", requiredMode = Schema.RequiredMode.REQUIRED, example = "user")
@NotNull(message = "实体类型不能为空")
private IWorkSyncEntityTypeEnum entityType;
@Schema(description = "iWork 提供的实体主键 ID", requiredMode = Schema.RequiredMode.REQUIRED, example = "10001")
@NotNull(message = "实体 ID 不能为空")
@Min(1)
private Long entityId;
@Schema(description = "缺失时是否自动创建", example = "true")
private Boolean createIfMissing = Boolean.TRUE;
}

View File

@@ -1,27 +0,0 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import com.zt.plat.module.system.enums.integration.IWorkSyncEntityTypeEnum;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
/**
* iWork 单条同步响应
*/
@Data
public class IWorkSingleSyncRespVO {
@Schema(description = "同步的实体类型")
private IWorkSyncEntityTypeEnum entityType;
@Schema(description = "实体 ID")
private Long entityId;
@Schema(description = "是否创建了新的记录")
private boolean created;
@Schema(description = "是否对已有记录进行了更新")
private boolean updated;
@Schema(description = "提示信息")
private String message;
}

View File

@@ -14,9 +14,6 @@ public class IWorkUserInfoRespVO {
@Schema(description = "iWork 返回的原始数据") @Schema(description = "iWork 返回的原始数据")
private Map<String, Object> payload; private Map<String, Object> payload;
@Schema(description = "iWork 返回的原始字符串")
private String rawBody;
@Schema(description = "是否判断为成功") @Schema(description = "是否判断为成功")
private boolean success; private boolean success;

View File

@@ -1,15 +1,9 @@
package com.zt.plat.module.system.controller.admin.integration.iwork.vo; package com.zt.plat.module.system.controller.admin.integration.iwork.vo;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotEmpty;
import lombok.Data; import lombok.Data;
import lombok.EqualsAndHashCode; import lombok.EqualsAndHashCode;
import java.util.List;
import java.util.Map;
/** /**
* 发起 iWork 流程的请求体。 * 发起 iWork 流程的请求体。
*/ */
@@ -17,25 +11,36 @@ import java.util.Map;
@EqualsAndHashCode(callSuper = true) @EqualsAndHashCode(callSuper = true)
public class IWorkWorkflowCreateReqVO extends IWorkBaseReqVO { public class IWorkWorkflowCreateReqVO extends IWorkBaseReqVO {
@Schema(description = "流程标题", example = "测试流程") @Schema(description = "用印申请人iWork 人员 ID", example = "1001")
@NotBlank(message = "流程标题不能为空") private String jbr;
private String requestName;
@Schema(description = "流程模板编号,可为空使用默认配置", example = "54") @Schema(description = "用印部门 ID", example = "2001")
private Long workflowId; private String yybm;
@Schema(description = "主表字段") @Schema(description = "用印单位(分部 ID", example = "3001")
@NotEmpty(message = "主表字段不能为空") private String fb;
@Valid
private List<IWorkFormFieldVO> mainFields;
@Schema(description = "明细表数据") @Schema(description = "申请时间,格式 yyyy-MM-dd", example = "2025-01-01")
@Valid private String sqsj;
private List<IWorkDetailTableVO> detailTables;
@Schema(description = "额外参数") @Schema(description = "用印去向")
private Map<String, Object> otherParams; private String yyqx;
@Schema(description = "额外 Form 数据") @Schema(description = "用印依据附件 URL")
private Map<String, String> formExtras; private String yyfkUrl;
@Schema(description = "用印事由或内容摘要")
private String yysy;
@Schema(description = "用印材料附件 URL必填")
private String xyywjUrl;
@Schema(description = "用印材料附件文件名(必填)")
private String xyywjFileName;
@Schema(description = "用印事项")
private String yysx;
@Schema(description = "业务系统单据编号(用于派生流程标题)", example = "DJ-2025-0001")
private String ywxtdjbh;
} }

View File

@@ -34,11 +34,6 @@ public class IWorkProperties {
*/ */
private String clientPublicKey; private String clientPublicKey;
/**
* 当调用方未指定流程编号时使用的默认流程模板编号。
*/
private Long workflowId;
/** /**
* 当请求未指定操作人时使用的默认用户编号。 * 当请求未指定操作人时使用的默认用户编号。
*/ */
@@ -53,6 +48,8 @@ public class IWorkProperties {
private final Client client = new Client(); private final Client client = new Client();
@Valid @Valid
private final OrgRest org = new OrgRest(); private final OrgRest org = new OrgRest();
@Valid
private final Workflow workflow = new Workflow();
@Data @Data
public static class Paths { public static class Paths {
@@ -142,4 +139,13 @@ public class IWorkProperties {
private String syncJobTitle; private String syncJobTitle;
private String syncUser; private String syncUser;
} }
@Data
public static class Workflow {
/**
* 用印流程对应的 iWork 模板编号。
*/
@NotBlank(message = "iWork 用印流程模板编号不能为空")
private String sealWorkflowId;
}
} }

View File

@@ -114,7 +114,7 @@ public class AdminAuthServiceImpl implements AdminAuthService {
createLoginLog(null, username, logTypeEnum, LoginResultEnum.BAD_CREDENTIALS); createLoginLog(null, username, logTypeEnum, LoginResultEnum.BAD_CREDENTIALS);
throw exception(AUTH_LOGIN_BAD_CREDENTIALS); throw exception(AUTH_LOGIN_BAD_CREDENTIALS);
} }
if (!userService.isPasswordMatch(password, user.getPassword())) { if (!userService.isPasswordMatch(user, password)) {
createLoginLog(user.getId(), username, logTypeEnum, LoginResultEnum.BAD_CREDENTIALS); createLoginLog(user.getId(), username, logTypeEnum, LoginResultEnum.BAD_CREDENTIALS);
throw exception(AUTH_LOGIN_BAD_CREDENTIALS); throw exception(AUTH_LOGIN_BAD_CREDENTIALS);
} }
@@ -299,7 +299,7 @@ public class AdminAuthServiceImpl implements AdminAuthService {
if (length < 4 || length > 16) { if (length < 4 || length > 16) {
throw exception(AUTH_LOGIN_BAD_CREDENTIALS); throw exception(AUTH_LOGIN_BAD_CREDENTIALS);
} }
if (!userService.isPasswordMatch(password, user.getPassword())) { if (!userService.isPasswordMatch(user, password)) {
throw exception(AUTH_LOGIN_BAD_CREDENTIALS); throw exception(AUTH_LOGIN_BAD_CREDENTIALS);
} }
} }
@@ -436,22 +436,11 @@ public class AdminAuthServiceImpl implements AdminAuthService {
} }
/** /**
* 判断是否为内部用户 * 判断是否为内部用户,仅通过 E 办同步SYNC来源的账号才视为内部用户
* 根据UserSourceEnum判断同步用户为内部用户外部用户为外部用户
*/ */
private boolean isInternalUser(AdminUserDO user) { private boolean isInternalUser(AdminUserDO user) {
// 根据userSource字段判断用户类型
Integer userSource = user.getUserSource(); Integer userSource = user.getUserSource();
return Objects.equals(userSource, UserSourceEnum.SYNC.getSource());
// 同步用户(SYNC = 2)为内部用户需要使用E办登录
if (userSource != null &&
(userSource.equals(UserSourceEnum.SYNC.getSource()) ||
userSource.equals(UserSourceEnum.IWORK.getSource()))) {
return true;
}
// 外部用户(EXTERNAL = 1)或其他情况为外部用户,使用账号密码登录
return false;
} }
/** /**

View File

@@ -18,4 +18,5 @@ public interface IWorkIntegrationErrorCodeConstants {
ErrorCode IWORK_WORKFLOW_ID_MISSING = new ErrorCode(1_010_200_008, "缺少 iWork 流程模板编号"); ErrorCode IWORK_WORKFLOW_ID_MISSING = new ErrorCode(1_010_200_008, "缺少 iWork 流程模板编号");
ErrorCode IWORK_ORG_IDENTIFIER_MISSING = new ErrorCode(1_010_200_009, "iWork 人力组织接口缺少认证标识"); ErrorCode IWORK_ORG_IDENTIFIER_MISSING = new ErrorCode(1_010_200_009, "iWork 人力组织接口缺少认证标识");
ErrorCode IWORK_ORG_REMOTE_FAILED = new ErrorCode(1_010_200_010, "iWork 人力组织接口请求失败{}"); ErrorCode IWORK_ORG_REMOTE_FAILED = new ErrorCode(1_010_200_010, "iWork 人力组织接口请求失败{}");
ErrorCode IWORK_SEAL_REQUIRED_FIELD_MISSING = new ErrorCode(1_010_200_011, "缺少用印必填字段:{}");
} }

View File

@@ -2,19 +2,12 @@ package com.zt.plat.module.system.service.integration.iwork;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkFullSyncReqVO; import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkFullSyncReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkFullSyncRespVO; import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkFullSyncRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkSingleSyncReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkSingleSyncRespVO;
/** /**
* iWork 组织/人员同步服务 * iWork 组织/人员同步服务
*/ */
public interface IWorkSyncService { public interface IWorkSyncService {
/**
* 发起全量分批同步
*/
IWorkFullSyncRespVO fullSync(IWorkFullSyncReqVO reqVO);
/** /**
* 仅同步部门 * 仅同步部门
*/ */
@@ -35,8 +28,4 @@ public interface IWorkSyncService {
*/ */
IWorkFullSyncRespVO fullSyncUsers(IWorkFullSyncReqVO reqVO); IWorkFullSyncRespVO fullSyncUsers(IWorkFullSyncReqVO reqVO);
/**
* 根据 iWork ID 进行单条同步
*/
IWorkSingleSyncRespVO syncSingle(IWorkSingleSyncReqVO reqVO);
} }

View File

@@ -128,7 +128,7 @@ public class IWorkIntegrationServiceImpl implements IWorkIntegrationService {
IWorkSession session = ensureSession(appId, clientKeyPair, operatorUserId, Boolean.TRUE.equals(reqVO.getForceRefreshToken())); IWorkSession session = ensureSession(appId, clientKeyPair, operatorUserId, Boolean.TRUE.equals(reqVO.getForceRefreshToken()));
Map<String, Object> payload = buildCreatePayload(reqVO); Map<String, Object> payload = buildCreatePayload(reqVO);
String responseBody = executeJsonRequest(properties.getPaths().getCreateWorkflow(), null, appId, session, payload); String responseBody = executeFormRequest(properties.getPaths().getCreateWorkflow(), null, appId, session, payload);
return buildOperationResponse(responseBody); return buildOperationResponse(responseBody);
} }
@@ -324,6 +324,39 @@ public class IWorkIntegrationServiceImpl implements IWorkIntegrationService {
return executeRequest(request, IWORK_REMOTE_REQUEST_FAILED); return executeRequest(request, IWORK_REMOTE_REQUEST_FAILED);
} }
private String executeFormRequest(String path,
Map<String, Object> queryParams,
String appId,
IWorkSession session,
Map<String, Object> formFields) {
HttpUrl baseUrl = HttpUrl.parse(resolveUrl(path));
if (baseUrl == null) {
throw ServiceExceptionUtil.exception(IWORK_REMOTE_REQUEST_FAILED, "非法的 URL");
}
HttpUrl.Builder urlBuilder = baseUrl.newBuilder();
if (queryParams != null) {
queryParams.forEach((key, value) -> {
if (value != null) {
urlBuilder.addQueryParameter(key, String.valueOf(value));
}
});
}
FormBody.Builder bodyBuilder = new FormBody.Builder();
if (formFields != null) {
formFields.forEach((key, value) -> {
if (StringUtils.hasText(key) && value != null) {
bodyBuilder.add(key, toFormValue(value));
}
});
}
Request request = new Request.Builder()
.url(urlBuilder.build())
.headers(authHeaders(appId, session).build())
.post(bodyBuilder.build())
.build();
return executeRequest(request, IWORK_REMOTE_REQUEST_FAILED);
}
private Headers.Builder authHeaders(String appId, IWorkSession session) { private Headers.Builder authHeaders(String appId, IWorkSession session) {
return new Headers.Builder() return new Headers.Builder()
.set(properties.getHeaders().getAppId(), appId) .set(properties.getHeaders().getAppId(), appId)
@@ -331,6 +364,19 @@ public class IWorkIntegrationServiceImpl implements IWorkIntegrationService {
.set(properties.getHeaders().getUserId(), session.getEncryptedUserId()); .set(properties.getHeaders().getUserId(), session.getEncryptedUserId());
} }
private String toFormValue(Object value) {
if (value == null) {
return "";
}
if (value instanceof CharSequence || value instanceof Number || value instanceof Boolean) {
return String.valueOf(value);
}
if (value.getClass().isArray() || value instanceof Collection<?> || value instanceof Map<?, ?>) {
return toJsonString(value);
}
return value.toString();
}
private Map<String, Object> buildUserPayload(IWorkUserInfoReqVO reqVO) { private Map<String, Object> buildUserPayload(IWorkUserInfoReqVO reqVO) {
Map<String, Object> payload = new HashMap<>(); Map<String, Object> payload = new HashMap<>();
if (reqVO.getPayload() != null) { if (reqVO.getPayload() != null) {
@@ -342,27 +388,125 @@ public class IWorkIntegrationServiceImpl implements IWorkIntegrationService {
private Map<String, Object> buildCreatePayload(IWorkWorkflowCreateReqVO reqVO) { private Map<String, Object> buildCreatePayload(IWorkWorkflowCreateReqVO reqVO) {
Map<String, Object> payload = new LinkedHashMap<>(); Map<String, Object> payload = new LinkedHashMap<>();
payload.put("requestName", reqVO.getRequestName()); SealRequestFields fields = resolveSealFields(reqVO);
payload.put("workflowId", resolveWorkflowId(reqVO.getWorkflowId())); payload.put("requestName", buildRequestName(fields.ywxtdjbh()));
payload.put("mainData", convertFormFields(reqVO.getMainFields())); payload.put("workflowId", parseWorkflowId(fields.workflowId()));
if (reqVO.getDetailTables() != null && !reqVO.getDetailTables().isEmpty()) { payload.put("mainData", buildSealMainData(fields));
payload.put("detailData", convertDetailTables(reqVO.getDetailTables()));
}
if (reqVO.getOtherParams() != null && !reqVO.getOtherParams().isEmpty()) {
payload.put("otherParams", reqVO.getOtherParams());
}
appendPayloadExtras(payload, reqVO.getFormExtras());
return payload; return payload;
} }
private long resolveWorkflowId(Long requestWorkflowId) { private String buildRequestName(String billNo) {
if (requestWorkflowId != null) { return "用印-" + billNo;
return requestWorkflowId; }
private long parseWorkflowId(String workflowId) {
try {
return Long.parseLong(workflowId);
} catch (NumberFormatException ex) {
throw ServiceExceptionUtil.exception(IWORK_SEAL_REQUIRED_FIELD_MISSING, "workflowId");
} }
if (properties.getWorkflowId() != null) { }
return properties.getWorkflowId();
private List<Map<String, Object>> buildSealMainData(SealRequestFields fields) {
List<Map<String, Object>> main = new ArrayList<>();
addField(main, "jbr", fields.jbr());
addField(main, "yybm", fields.yybm());
addField(main, "fb", fields.fb());
addField(main, "sqsj", fields.sqsj());
addField(main, "yyqx", fields.yyqx());
addField(main, "yyfk", fields.yyfkUrl());
addField(main, "yysy", fields.yysy());
// xyywj 需要是一个数组结构 [{fileName,filePath}, ...]
addJsonField(main, "xyywj", buildSealAttachmentValue(fields.xyywjUrl()));
addField(main, "yysx", fields.yysx());
addField(main, "lclx", SealRequestFields.DEFAULT_FLOW_TYPE);
addField(main, "qsdz", SealRequestFields.DEFAULT_SIGN_ACTION);
addField(main, "ywxtdjbh", fields.ywxtdjbh());
return main;
}
private void addField(List<Map<String, Object>> target, String name, String value) {
if (!StringUtils.hasText(value)) {
return;
} }
throw ServiceExceptionUtil.exception(IWORK_WORKFLOW_ID_MISSING); Map<String, Object> map = new HashMap<>(2);
map.put("fieldName", name);
map.put("fieldValue", value);
target.add(map);
}
private void addJsonField(List<Map<String, Object>> target, String name, Object value) {
if (value == null) {
return;
}
Map<String, Object> map = new HashMap<>(2);
map.put("fieldName", name);
map.put("fieldValue", value);
target.add(map);
}
/**
* 将单个附件 URL 封装成 iWork 需要的数组结构:
* [ {"fileName": "xxx", "filePath": "url"} ]
*/
private List<Map<String, Object>> buildSealAttachmentValue(String xyywjUrl) {
String url = trimToNull(xyywjUrl);
if (url == null) {
return null;
}
Map<String, Object> file = new LinkedHashMap<>(2);
// 这里简单从 URL 截取文件名,调用方也可以直接传入已经带文件名的 URL
String fileName = extractFileNameFromUrl(url);
file.put("fileName", fileName);
file.put("filePath", url);
List<Map<String, Object>> list = new ArrayList<>(1);
list.add(file);
return list;
}
private String extractFileNameFromUrl(String url) {
String trimmed = trimToNull(url);
if (trimmed == null) {
return null;
}
int queryIndex = trimmed.indexOf('?');
String pathPart = queryIndex >= 0 ? trimmed.substring(0, queryIndex) : trimmed;
int slashIndex = pathPart.lastIndexOf('/');
if (slashIndex >= 0 && slashIndex < pathPart.length() - 1) {
return pathPart.substring(slashIndex + 1);
}
return pathPart;
}
private SealRequestFields resolveSealFields(IWorkWorkflowCreateReqVO reqVO) {
String jbr = requireSealField(reqVO.getJbr(), "jbr");
String yybm = requireSealField(reqVO.getYybm(), "yybm");
String fb = requireSealField(reqVO.getFb(), "fb");
String sqsj = requireSealField(reqVO.getSqsj(), "sqsj");
String yyqx = requireSealField(reqVO.getYyqx(), "yyqx");
String xyywjUrl = requireSealField(reqVO.getXyywjUrl(), "xyywjUrl");
String yysx = requireSealField(reqVO.getYysx(), "yysx");
String billNo = requireSealField(reqVO.getYwxtdjbh(), "ywxtdjbh");
String yyfkUrl = trimToNull(reqVO.getYyfkUrl());
String yysy = trimToNull(reqVO.getYysy());
String workflowId = requireSealField(properties.getWorkflow().getSealWorkflowId(), "workflowId");
return new SealRequestFields(jbr, yybm, fb, sqsj, yyqx, yyfkUrl, yysy, xyywjUrl, yysx, billNo, workflowId);
}
private String requireSealField(String value, String fieldName) {
String trimmed = trimToNull(value);
if (trimmed == null) {
throw ServiceExceptionUtil.exception(IWORK_SEAL_REQUIRED_FIELD_MISSING, fieldName);
}
return trimmed;
}
private String trimToNull(String value) {
if (!StringUtils.hasText(value)) {
return null;
}
String trimmed = value.trim();
return trimmed.isEmpty() ? null : trimmed;
} }
private Map<String, Object> buildVoidPayload(IWorkWorkflowVoidReqVO reqVO) { private Map<String, Object> buildVoidPayload(IWorkWorkflowVoidReqVO reqVO) {
@@ -389,35 +533,8 @@ public class IWorkIntegrationServiceImpl implements IWorkIntegrationService {
}); });
} }
private List<Map<String, Object>> convertFormFields(List<IWorkFormFieldVO> fields) {
return fields.stream().map(field -> {
Map<String, Object> map = new HashMap<>(2);
map.put("fieldName", field.getFieldName());
map.put("fieldValue", field.getFieldValue());
return map;
}).toList();
}
private List<Map<String, Object>> convertDetailTables(List<IWorkDetailTableVO> tables) {
return tables.stream().map(table -> {
Map<String, Object> tableMap = new HashMap<>(2);
tableMap.put("tableDBName", table.getTableDBName());
List<Map<String, Object>> records = table.getRecords().stream().map(record -> {
Map<String, Object> recordMap = new HashMap<>(2);
if (record.getRecordOrder() != null) {
recordMap.put("recordOrder", record.getRecordOrder());
}
recordMap.put("workflowRequestTableFields", convertFormFields(record.getFields()));
return recordMap;
}).toList();
tableMap.put("workflowRequestTableRecords", records);
return tableMap;
}).toList();
}
private IWorkUserInfoRespVO buildUserInfoResponse(String responseBody) { private IWorkUserInfoRespVO buildUserInfoResponse(String responseBody) {
IWorkUserInfoRespVO respVO = new IWorkUserInfoRespVO(); IWorkUserInfoRespVO respVO = new IWorkUserInfoRespVO();
respVO.setRawBody(responseBody);
if (!StringUtils.hasText(responseBody)) { if (!StringUtils.hasText(responseBody)) {
return respVO; return respVO;
} }
@@ -432,7 +549,6 @@ public class IWorkIntegrationServiceImpl implements IWorkIntegrationService {
private IWorkOperationRespVO buildOperationResponse(String responseBody) { private IWorkOperationRespVO buildOperationResponse(String responseBody) {
IWorkOperationRespVO respVO = new IWorkOperationRespVO(); IWorkOperationRespVO respVO = new IWorkOperationRespVO();
respVO.setRawBody(responseBody);
if (!StringUtils.hasText(responseBody)) { if (!StringUtils.hasText(responseBody)) {
return respVO; return respVO;
} }
@@ -666,6 +782,21 @@ public class IWorkIntegrationServiceImpl implements IWorkIntegrationService {
return value.replace("'", "'\"'\"'"); return value.replace("'", "'\"'\"'");
} }
private record SealRequestFields(String jbr,
String yybm,
String fb,
String sqsj,
String yyqx,
String yyfkUrl,
String yysy,
String xyywjUrl,
String yysx,
String ywxtdjbh,
String workflowId) {
private static final String DEFAULT_FLOW_TYPE = "2979600781334966993";
private static final String DEFAULT_SIGN_ACTION = "CORPORATE";
}
private record RegistrationState(String secret, String spk, ClientKeyPair clientKeyPair) { private record RegistrationState(String secret, String spk, ClientKeyPair clientKeyPair) {
} }

View File

@@ -37,7 +37,6 @@ import java.util.concurrent.ConcurrentHashMap;
public class IWorkSyncProcessorImpl implements IWorkSyncProcessor { public class IWorkSyncProcessorImpl implements IWorkSyncProcessor {
private static final String JOB_CODE_PREFIX = "IWORK_JOB_"; private static final String JOB_CODE_PREFIX = "IWORK_JOB_";
private static final String DEFAULT_USER_PASSWORD = "Zgty@9527";
private static final int DEFAULT_SORT = 999; private static final int DEFAULT_SORT = 999;
private final DeptService deptService; private final DeptService deptService;
@@ -234,6 +233,8 @@ public class IWorkSyncProcessorImpl implements IWorkSyncProcessor {
} }
Long postId = resolveUserPostId(user); Long postId = resolveUserPostId(user);
CommonStatusEnum status = inactive ? CommonStatusEnum.DISABLE : CommonStatusEnum.ENABLE; CommonStatusEnum status = inactive ? CommonStatusEnum.DISABLE : CommonStatusEnum.ENABLE;
// 直接沿用 iWork 原始密码,避免重复格式化造成校验偏差
String externalPassword = trimToNull(user.getPassword());
AdminUserDO existing = adminUserMapper.selectByUsername(username); AdminUserDO existing = adminUserMapper.selectByUsername(username);
UserSyncOutcome outcome; UserSyncOutcome outcome;
if (existing == null) { if (existing == null) {
@@ -242,7 +243,12 @@ public class IWorkSyncProcessorImpl implements IWorkSyncProcessor {
result.increaseSkipped(); result.increaseSkipped();
continue; continue;
} }
outcome = createUser(user, username, deptId, postId, status); if (StrUtil.isBlank(externalPassword)) {
log.warn("[iWork] 人员缺少密码信息无法创建id={} username={}", user.getId(), username);
result.increaseFailed();
continue;
}
outcome = createUser(user, username, deptId, postId, status, externalPassword);
} else { } else {
if (!Objects.equals(existing.getUserSource(), UserSourceEnum.IWORK.getSource())) { if (!Objects.equals(existing.getUserSource(), UserSourceEnum.IWORK.getSource())) {
logSkip("人员", existing.getId(), "非 iWork 来源用户,保持原状"); logSkip("人员", existing.getId(), "非 iWork 来源用户,保持原状");
@@ -254,7 +260,7 @@ public class IWorkSyncProcessorImpl implements IWorkSyncProcessor {
result.increaseSkipped(); result.increaseSkipped();
continue; continue;
} }
outcome = updateUser(existing, user, username, deptId, postId, status); outcome = updateUser(existing, user, username, deptId, postId, status, externalPassword);
} }
applyUserOutcome(result, outcome, user.getLastname(), username); applyUserOutcome(result, outcome, user.getLastname(), username);
} catch (Exception ex) { } catch (Exception ex) {
@@ -357,13 +363,14 @@ public class IWorkSyncProcessorImpl implements IWorkSyncProcessor {
String username, String username,
Long deptId, Long deptId,
Long postId, Long postId,
CommonStatusEnum status) { CommonStatusEnum status,
String externalPassword) {
UserSaveReqVO req = buildUserSaveReq(source, username, deptId, postId, status); UserSaveReqVO req = buildUserSaveReq(source, username, deptId, postId, status);
Long desiredUserId = source.getId() == null ? null : source.getId().longValue(); Long desiredUserId = source.getId() == null ? null : source.getId().longValue();
if (desiredUserId != null) { if (desiredUserId != null) {
req.setId(desiredUserId); req.setId(desiredUserId);
} }
req.setPassword(DEFAULT_USER_PASSWORD); req.setPassword(externalPassword);
req.setUserSource(UserSourceEnum.IWORK.getSource()); req.setUserSource(UserSourceEnum.IWORK.getSource());
Long userId = adminUserService.createUser(req); Long userId = adminUserService.createUser(req);
Long effectiveUserId = desiredUserId != null ? desiredUserId : userId; Long effectiveUserId = desiredUserId != null ? desiredUserId : userId;
@@ -375,11 +382,13 @@ public class IWorkSyncProcessorImpl implements IWorkSyncProcessor {
String username, String username,
Long deptId, Long deptId,
Long postId, Long postId,
CommonStatusEnum status) { CommonStatusEnum status,
String externalPassword) {
UserSaveReqVO req = buildUserSaveReq(source, username, deptId, postId, status); UserSaveReqVO req = buildUserSaveReq(source, username, deptId, postId, status);
req.setId(existing.getId()); req.setId(existing.getId());
boolean disabledChanged = CommonStatusEnum.isDisable(status.getStatus()) && CommonStatusEnum.isEnable(existing.getStatus()); boolean disabledChanged = CommonStatusEnum.isDisable(status.getStatus()) && CommonStatusEnum.isEnable(existing.getStatus());
adminUserService.updateUser(req); adminUserService.updateUser(req);
syncPassword(existing, externalPassword);
return new UserSyncOutcome(SyncAction.UPDATED, disabledChanged, existing.getId()); return new UserSyncOutcome(SyncAction.UPDATED, disabledChanged, existing.getId());
} }
@@ -491,6 +500,7 @@ public class IWorkSyncProcessorImpl implements IWorkSyncProcessor {
return post; return post;
} }
// 优先匹配部门ID若缺失则回退使用分部ID
private Long resolveUserDeptId(IWorkHrUserPageRespVO.User user) { private Long resolveUserDeptId(IWorkHrUserPageRespVO.User user) {
Long deptId = toLong(user.getDepartmentid()); Long deptId = toLong(user.getDepartmentid());
if (deptId != null) { if (deptId != null) {
@@ -503,6 +513,7 @@ public class IWorkSyncProcessorImpl implements IWorkSyncProcessor {
return null; return null;
} }
// 通过岗位编码命中缓存,否则才按名称自动建档
private Long resolveUserPostId(IWorkHrUserPageRespVO.User user) { private Long resolveUserPostId(IWorkHrUserPageRespVO.User user) {
if (user.getJobtitleid() == null) { if (user.getJobtitleid() == null) {
return null; return null;
@@ -552,6 +563,22 @@ public class IWorkSyncProcessorImpl implements IWorkSyncProcessor {
StrUtil.blankToDefault(displayName, username), describeAction(outcome.action()))); StrUtil.blankToDefault(displayName, username), describeAction(outcome.action())));
} }
/**
* 仅在密码发生变化时才写库,避免多余更新
*/
private void syncPassword(AdminUserDO existing, String externalPassword) {
if (existing == null || StrUtil.isBlank(externalPassword)) {
return;
}
if (StrUtil.equals(externalPassword, existing.getPassword())) {
return;
}
AdminUserDO updateObj = new AdminUserDO();
updateObj.setId(existing.getId());
updateObj.setPassword(externalPassword);
adminUserMapper.updateById(updateObj);
}
private void incrementByAction(BatchResult result, SyncAction action) { private void incrementByAction(BatchResult result, SyncAction action) {
if (action == null) { if (action == null) {
return; return;
@@ -624,23 +651,17 @@ public class IWorkSyncProcessorImpl implements IWorkSyncProcessor {
return value == null ? null : value.longValue(); return value == null ? null : value.longValue();
} }
/**
* 工号优先、登录账号兜底,确保账号体系与 iWork 一致
*/
private String resolveUsername(IWorkHrUserPageRespVO.User user) { private String resolveUsername(IWorkHrUserPageRespVO.User user) {
String candidate = sanitizeUsername(user.getWorkcode()); if (StrUtil.isNotBlank(user.getWorkcode())) {
if (candidate == null) { return user.getWorkcode().trim();
candidate = sanitizeUsername(user.getLoginid());
} }
return candidate; if (StrUtil.isNotBlank(user.getLoginid())) {
} return user.getLoginid().trim();
private String sanitizeUsername(String raw) {
if (StrUtil.isBlank(raw)) {
return null;
} }
String normalized = raw.replaceAll("[^A-Za-z0-9]", ""); return null;
if (StrUtil.isBlank(normalized)) {
return null;
}
return normalized.length() > 30 ? normalized.substring(0, 30) : normalized;
} }
private Set<Long> singletonSet(Long value) { private Set<Long> singletonSet(Long value) {

View File

@@ -12,7 +12,10 @@ import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.*; import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import static com.zt.plat.module.system.service.integration.iwork.IWorkIntegrationErrorCodeConstants.IWORK_ORG_REMOTE_FAILED; import static com.zt.plat.module.system.service.integration.iwork.IWorkIntegrationErrorCodeConstants.IWORK_ORG_REMOTE_FAILED;
@@ -27,11 +30,6 @@ public class IWorkSyncServiceImpl implements IWorkSyncService {
private final IWorkOrgRestService orgRestService; private final IWorkOrgRestService orgRestService;
private final IWorkSyncProcessor syncProcessor; private final IWorkSyncProcessor syncProcessor;
@Override
public IWorkFullSyncRespVO fullSync(IWorkFullSyncReqVO reqVO) {
return runFullSync(reqVO, reqVO.resolveScopes());
}
@Override @Override
public IWorkFullSyncRespVO fullSyncDepartments(IWorkFullSyncReqVO reqVO) { public IWorkFullSyncRespVO fullSyncDepartments(IWorkFullSyncReqVO reqVO) {
return runFullSync(reqVO, EnumSet.of(IWorkSyncEntityTypeEnum.DEPARTMENT)); return runFullSync(reqVO, EnumSet.of(IWorkSyncEntityTypeEnum.DEPARTMENT));
@@ -80,21 +78,6 @@ public class IWorkSyncServiceImpl implements IWorkSyncService {
return respVO; return respVO;
} }
@Override
public IWorkSingleSyncRespVO syncSingle(IWorkSingleSyncReqVO reqVO) {
IWorkSingleSyncRespVO respVO = new IWorkSingleSyncRespVO();
respVO.setEntityType(reqVO.getEntityType());
respVO.setEntityId(reqVO.getEntityId());
switch (reqVO.getEntityType()) {
case SUBCOMPANY -> processSingleSubcompany(reqVO, respVO);
case DEPARTMENT -> processSingleDepartment(reqVO, respVO);
case JOB_TITLE -> processSingleJob(reqVO, respVO);
case USER -> processSingleUser(reqVO, respVO);
default -> throw new IllegalArgumentException("不支持的实体类型: " + reqVO.getEntityType());
}
return respVO;
}
private int executeSubcompanyFullSync(IWorkFullSyncReqVO reqVO, private int executeSubcompanyFullSync(IWorkFullSyncReqVO reqVO,
IWorkSyncProcessor.SyncOptions options, IWorkSyncProcessor.SyncOptions options,
IWorkSyncEntityStatVO stat, IWorkSyncEntityStatVO stat,
@@ -163,46 +146,6 @@ public class IWorkSyncServiceImpl implements IWorkSyncService {
}); });
} }
private void processSingleSubcompany(IWorkSingleSyncReqVO reqVO, IWorkSingleSyncRespVO respVO) {
IWorkHrSubcompanyPageRespVO.Subcompany data = fetchSingleSubcompany(reqVO.getEntityId());
if (data == null) {
markNotFound(respVO, "分部");
return;
}
IWorkSyncProcessor.BatchResult result = syncProcessor.syncSubcompany(data, buildSingleOptions(reqVO));
populateSingleResult(respVO, result);
}
private void processSingleDepartment(IWorkSingleSyncReqVO reqVO, IWorkSingleSyncRespVO respVO) {
IWorkHrDepartmentPageRespVO.Department data = fetchSingleDepartment(reqVO.getEntityId());
if (data == null) {
markNotFound(respVO, "部门");
return;
}
IWorkSyncProcessor.BatchResult result = syncProcessor.syncDepartment(data, buildSingleOptions(reqVO));
populateSingleResult(respVO, result);
}
private void processSingleJob(IWorkSingleSyncReqVO reqVO, IWorkSingleSyncRespVO respVO) {
IWorkHrJobTitlePageRespVO.JobTitle data = fetchSingleJob(reqVO.getEntityId());
if (data == null) {
markNotFound(respVO, "岗位");
return;
}
IWorkSyncProcessor.BatchResult result = syncProcessor.syncJobTitle(data, buildSingleOptions(reqVO));
populateSingleResult(respVO, result);
}
private void processSingleUser(IWorkSingleSyncReqVO reqVO, IWorkSingleSyncRespVO respVO) {
IWorkHrUserPageRespVO.User data = fetchSingleUser(reqVO.getEntityId().toString());
if (data == null) {
markNotFound(respVO, "人员");
return;
}
IWorkSyncProcessor.BatchResult result = syncProcessor.syncUser(data, buildSingleOptions(reqVO));
populateSingleResult(respVO, result);
}
private int executePaged(IWorkFullSyncReqVO reqVO, private int executePaged(IWorkFullSyncReqVO reqVO,
IWorkSyncEntityTypeEnum type, IWorkSyncEntityTypeEnum type,
List<IWorkSyncBatchStatVO> batches, List<IWorkSyncBatchStatVO> batches,
@@ -238,66 +181,10 @@ public class IWorkSyncServiceImpl implements IWorkSyncService {
stat.incrementFailed(result.getFailed()); stat.incrementFailed(result.getFailed());
} }
private void populateSingleResult(IWorkSingleSyncRespVO respVO, IWorkSyncProcessor.BatchResult result) {
respVO.setCreated(result.getCreated() > 0);
respVO.setUpdated(result.getUpdated() > 0);
respVO.setMessage(result.getMessage());
}
private void markNotFound(IWorkSingleSyncRespVO respVO, String entityName) {
respVO.setCreated(false);
respVO.setUpdated(false);
respVO.setMessage(StrUtil.format("未在 iWork 中找到{}(ID={})", entityName, respVO.getEntityId()));
}
private IWorkSyncProcessor.SyncOptions buildFullSyncOptions(IWorkFullSyncReqVO reqVO) { private IWorkSyncProcessor.SyncOptions buildFullSyncOptions(IWorkFullSyncReqVO reqVO) {
return IWorkSyncProcessor.SyncOptions.full(Boolean.TRUE.equals(reqVO.getIncludeCanceled())); return IWorkSyncProcessor.SyncOptions.full(Boolean.TRUE.equals(reqVO.getIncludeCanceled()));
} }
private IWorkSyncProcessor.SyncOptions buildSingleOptions(IWorkSingleSyncReqVO reqVO) {
return IWorkSyncProcessor.SyncOptions.single(Boolean.TRUE.equals(reqVO.getCreateIfMissing()));
}
private IWorkHrSubcompanyPageRespVO.Subcompany fetchSingleSubcompany(Long entityId) {
IWorkSubcompanyQueryReqVO query = new IWorkSubcompanyQueryReqVO();
query.setCurpage(1);
query.setPagesize(1);
query.setParams(Collections.singletonMap("subcompanyid", entityId));
IWorkHrSubcompanyPageRespVO pageResp = orgRestService.listSubcompanies(query);
ensureIWorkSuccess("获取分部详情", pageResp.isSuccess(), pageResp.getMessage());
return CollUtil.getFirst(pageResp.getDataList());
}
private IWorkHrDepartmentPageRespVO.Department fetchSingleDepartment(Long entityId) {
IWorkDepartmentQueryReqVO query = new IWorkDepartmentQueryReqVO();
query.setCurpage(1);
query.setPagesize(1);
query.setParams(Collections.singletonMap("departmentid", entityId));
IWorkHrDepartmentPageRespVO pageResp = orgRestService.listDepartments(query);
ensureIWorkSuccess("获取部门详情", pageResp.isSuccess(), pageResp.getMessage());
return CollUtil.getFirst(pageResp.getDataList());
}
private IWorkHrJobTitlePageRespVO.JobTitle fetchSingleJob(Long entityId) {
IWorkJobTitleQueryReqVO query = new IWorkJobTitleQueryReqVO();
query.setCurpage(1);
query.setPagesize(1);
query.setParams(Collections.singletonMap("jobtitleid", entityId));
IWorkHrJobTitlePageRespVO pageResp = orgRestService.listJobTitles(query);
ensureIWorkSuccess("获取岗位详情", pageResp.isSuccess(), pageResp.getMessage());
return CollUtil.getFirst(pageResp.getDataList());
}
private IWorkHrUserPageRespVO.User fetchSingleUser(String entityId) {
IWorkUserQueryReqVO query = new IWorkUserQueryReqVO();
query.setCurpage(1);
query.setPagesize(1);
query.setParams(Collections.singletonMap("id", entityId));
IWorkHrUserPageRespVO pageResp = orgRestService.listUsers(query);
ensureIWorkSuccess("获取人员详情", pageResp.isSuccess(), pageResp.getMessage());
return CollUtil.getFirst(pageResp.getDataList());
}
private void ensureIWorkSuccess(String action, boolean success, String remoteMessage) { private void ensureIWorkSuccess(String action, boolean success, String remoteMessage) {
if (success) { if (success) {
return; return;

View File

@@ -190,10 +190,10 @@ public interface AdminUserService {
/** /**
* 判断密码是否匹配 * 判断密码是否匹配
* *
* @param user 用户信息(用于决策密码策略)
* @param rawPassword 未加密的密码 * @param rawPassword 未加密的密码
* @param encodedPassword 加密后的密码
* @return 是否匹配 * @return 是否匹配
*/ */
boolean isPasswordMatch(String rawPassword, String encodedPassword); boolean isPasswordMatch(AdminUserDO user, String rawPassword);
} }

View File

@@ -27,6 +27,7 @@ import com.zt.plat.module.system.dal.dataobject.user.AdminUserDO;
import com.zt.plat.module.system.dal.dataobject.userdept.UserDeptDO; import com.zt.plat.module.system.dal.dataobject.userdept.UserDeptDO;
import com.zt.plat.module.system.dal.mysql.dept.UserPostMapper; import com.zt.plat.module.system.dal.mysql.dept.UserPostMapper;
import com.zt.plat.module.system.dal.mysql.user.AdminUserMapper; import com.zt.plat.module.system.dal.mysql.user.AdminUserMapper;
import com.zt.plat.module.system.enums.user.PasswordStrategyEnum;
import com.zt.plat.module.system.enums.user.UserSourceEnum; import com.zt.plat.module.system.enums.user.UserSourceEnum;
import com.zt.plat.module.system.service.dept.DeptService; import com.zt.plat.module.system.service.dept.DeptService;
import com.zt.plat.module.system.service.dept.PostService; import com.zt.plat.module.system.service.dept.PostService;
@@ -40,9 +41,12 @@ import org.springframework.context.annotation.Lazy;
import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.DigestUtils;
import java.nio.charset.StandardCharsets;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.util.*; import java.util.*;
import java.util.Locale;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static com.zt.plat.framework.common.exception.util.ServiceExceptionUtil.exception; import static com.zt.plat.framework.common.exception.util.ServiceExceptionUtil.exception;
@@ -122,8 +126,9 @@ public class AdminUserServiceImpl implements AdminUserService {
if (user.getUserSource() == null) { if (user.getUserSource() == null) {
user.setUserSource(UserSourceEnum.EXTERNAL.getSource()); user.setUserSource(UserSourceEnum.EXTERNAL.getSource());
} }
PasswordStrategyEnum passwordStrategy = determinePasswordStrategy(user.getUserSource());
user.setAvatar(normalizeAvatarValue(createReqVO.getAvatar())); user.setAvatar(normalizeAvatarValue(createReqVO.getAvatar()));
user.setPassword(encodePassword(createReqVO.getPassword())); user.setPassword(encodePassword(createReqVO.getPassword(), passwordStrategy));
userMapper.insert(user); userMapper.insert(user);
// 2.2 插入关联部门 // 2.2 插入关联部门
if (CollectionUtil.isNotEmpty(user.getDeptIds())) { if (CollectionUtil.isNotEmpty(user.getDeptIds())) {
@@ -161,7 +166,7 @@ public class AdminUserServiceImpl implements AdminUserService {
AdminUserDO user = BeanUtils.toBean(registerReqVO, AdminUserDO.class); AdminUserDO user = BeanUtils.toBean(registerReqVO, AdminUserDO.class);
user.setStatus(CommonStatusEnum.ENABLE.getStatus()); // 默认开启 user.setStatus(CommonStatusEnum.ENABLE.getStatus()); // 默认开启
user.setUserSource(UserSourceEnum.EXTERNAL.getSource()); // 注册用户设为外部用户 user.setUserSource(UserSourceEnum.EXTERNAL.getSource()); // 注册用户设为外部用户
user.setPassword(encodePassword(registerReqVO.getPassword())); // 加密密码 user.setPassword(encodePassword(registerReqVO.getPassword(), PasswordStrategyEnum.LOCAL_BCRYPT)); // 加密密码
userMapper.insert(user); userMapper.insert(user);
return user.getId(); return user.getId();
} }
@@ -268,30 +273,23 @@ public class AdminUserServiceImpl implements AdminUserService {
@Override @Override
public void updateUserPassword(Long id, UserProfileUpdatePasswordReqVO reqVO) { public void updateUserPassword(Long id, UserProfileUpdatePasswordReqVO reqVO) {
// 校验旧密码密码 AdminUserDO user = validateUserExists(id);
validateOldPassword(id, reqVO.getOldPassword()); ensurePasswordCanBeModified(user);
// 执行更新 validateOldPassword(user, reqVO.getOldPassword());
AdminUserDO updateObj = new AdminUserDO().setId(id); applyLocalPassword(user, reqVO.getNewPassword());
updateObj.setPassword(encodePassword(reqVO.getNewPassword())); // 加密密码
userMapper.updateById(updateObj);
} }
@Override @Override
@LogRecord(type = SYSTEM_USER_TYPE, subType = SYSTEM_USER_UPDATE_PASSWORD_SUB_TYPE, bizNo = "{{#id}}", @LogRecord(type = SYSTEM_USER_TYPE, subType = SYSTEM_USER_UPDATE_PASSWORD_SUB_TYPE, bizNo = "{{#id}}",
success = SYSTEM_USER_UPDATE_PASSWORD_SUCCESS) success = SYSTEM_USER_UPDATE_PASSWORD_SUCCESS)
public void updateUserPassword(Long id, String password) { public void updateUserPassword(Long id, String password) {
// 1. 校验用户存在
AdminUserDO user = validateUserExists(id); AdminUserDO user = validateUserExists(id);
ensurePasswordCanBeModified(user);
// 2. 更新密码 String encoded = applyLocalPassword(user, password);
AdminUserDO updateObj = new AdminUserDO();
updateObj.setId(id);
updateObj.setPassword(encodePassword(password)); // 加密密码
userMapper.updateById(updateObj);
// 3. 记录操作日志上下文 // 3. 记录操作日志上下文
LogRecordContext.putVariable("user", user); LogRecordContext.putVariable("user", user);
LogRecordContext.putVariable("newPassword", updateObj.getPassword()); LogRecordContext.putVariable("newPassword", encoded);
} }
@Override @Override
@@ -611,15 +609,24 @@ public class AdminUserServiceImpl implements AdminUserService {
*/ */
@VisibleForTesting @VisibleForTesting
void validateOldPassword(Long id, String oldPassword) { void validateOldPassword(Long id, String oldPassword) {
AdminUserDO user = userMapper.selectById(id); AdminUserDO user = validateUserExists(id);
if (user == null) { validateOldPassword(user, oldPassword);
throw exception(USER_NOT_EXISTS); }
}
if (!isPasswordMatch(oldPassword, user.getPassword())) { private void validateOldPassword(AdminUserDO user, String oldPassword) {
if (!isPasswordMatch(user, oldPassword)) {
throw exception(USER_PASSWORD_FAILED); throw exception(USER_PASSWORD_FAILED);
} }
} }
private void ensurePasswordCanBeModified(AdminUserDO user) {
UserSourceEnum sourceEnum = UserSourceEnum.of(user.getUserSource());
if (sourceEnum == null || sourceEnum.isExternal()) {
return;
}
throw exception(USER_PASSWORD_MODIFY_FORBIDDEN);
}
@Override @Override
public List<AdminUserDO> getUserListByStatus(Integer status) { public List<AdminUserDO> getUserListByStatus(Integer status) {
List<AdminUserDO> users = userMapper.selectListByStatus(status); List<AdminUserDO> users = userMapper.selectListByStatus(status);
@@ -628,18 +635,58 @@ public class AdminUserServiceImpl implements AdminUserService {
} }
@Override @Override
public boolean isPasswordMatch(String rawPassword, String encodedPassword) { public boolean isPasswordMatch(AdminUserDO user, String rawPassword) {
return passwordEncoder.matches(rawPassword, encodedPassword); if (user == null) {
return false;
}
PasswordStrategyEnum strategy = determinePasswordStrategy(user.getUserSource());
if (strategy == PasswordStrategyEnum.IWORK_MD5) {
String stored = user.getPassword();
if (isBcryptFormat(stored)) {
return passwordEncoder.matches(rawPassword, stored);
}
String hashed = md5Upper(rawPassword);
return StrUtil.isNotBlank(hashed) && hashed.equals(StrUtil.nullToDefault(stored, ""));
}
return passwordEncoder.matches(rawPassword, user.getPassword());
} }
/** private String applyLocalPassword(AdminUserDO user, String password) {
* 对密码进行加密 AdminUserDO updateObj = new AdminUserDO();
* updateObj.setId(user.getId());
* @param password 密码 String encoded = encodePassword(password, PasswordStrategyEnum.LOCAL_BCRYPT);
* @return 加密后的密码 updateObj.setPassword(encoded);
*/ userMapper.updateById(updateObj);
private String encodePassword(String password) { return encoded;
}
private PasswordStrategyEnum determinePasswordStrategy(Integer userSource) {
return UserSourceEnum.resolvePasswordStrategy(userSource);
}
private String encodePassword(String password, PasswordStrategyEnum strategy) {
if (strategy == PasswordStrategyEnum.IWORK_MD5) {
return normalizeMd5(password);
}
return passwordEncoder.encode(password); return passwordEncoder.encode(password);
} }
private String normalizeMd5(String password) {
if (StrUtil.isBlank(password)) {
return null;
}
return password.trim().toUpperCase(Locale.ROOT);
}
private String md5Upper(String rawPassword) {
if (StrUtil.isBlank(rawPassword)) {
return null;
}
return DigestUtils.md5DigestAsHex(rawPassword.getBytes(StandardCharsets.UTF_8)).toUpperCase(Locale.ROOT);
}
private boolean isBcryptFormat(String value) {
return StrUtil.isNotBlank(value) && value.startsWith("$2");
}
} }

View File

@@ -109,9 +109,7 @@ iwork:
base-url: http://172.16.36.233:8080 base-url: http://172.16.36.233:8080
# app-id: f47ac10b-58cc-4372-a567-0e02b2c3d479 # app-id: f47ac10b-58cc-4372-a567-0e02b2c3d479
app-id: f47ac10b-58cc-4372-a567-0e02b2c3d479 app-id: f47ac10b-58cc-4372-a567-0e02b2c3d479
client-public-key:
user-id: 9869 user-id: 9869
workflow-id: 1753
paths: paths:
register: /api/ec/dev/auth/regist register: /api/ec/dev/auth/regist
apply-token: /api/ec/dev/auth/applytoken apply-token: /api/ec/dev/auth/applytoken
@@ -135,6 +133,8 @@ iwork:
sync-department: /api/hrm/resful/synDepartment sync-department: /api/hrm/resful/synDepartment
sync-job-title: /api/hrm/resful/synJobtitle sync-job-title: /api/hrm/resful/synJobtitle
sync-user: /api/hrm/resful/synHrmresource sync-user: /api/hrm/resful/synHrmresource
workflow:
seal-workflow-id: "1753"
--- #################### RPC 远程调用相关配置 #################### --- #################### RPC 远程调用相关配置 ####################

View File

@@ -5,6 +5,10 @@
<springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/> <springProperty scope="context" name="zt.info.base-package" source="zt.info.base-package"/>
<!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 --> <!-- 格式化输出:%d 表示日期,%X{tid} SkWalking 链路追踪编号,%thread 表示线程名,%-5level级别从左显示 5 个字符宽度,%msg日志消息%n是换行符 -->
<property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/> <property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread [%tid]) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
<!--应用名称-->
<springProperty scope="context" name="spring.application.name" source="spring.application.name"/>
<!-- 日志输出路径 -->
<property name="LOG_DIR" value="${user.home}/logs/${spring.application.name}"/>
<!-- 控制台 Appender --> <!-- 控制台 Appender -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">      <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">     
@@ -56,11 +60,29 @@
</encoder> </encoder>
</appender> </appender>
<!-- ERROR 级别日志 -->
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_DIR}-error.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_DIR}-error.%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>30</maxHistory> <!-- 保留30天的日志 -->
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG--> <!--logback的日志级别 FATAL > ERROR > WARN > INFO > DEBUG-->
<!-- 本地环境 --> <!-- 本地环境 -->
<springProfile name="local,dev"> <springProfile name="local,dev">
<root level="WARN"> <root level="WARN">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 --> <appender-ref ref="GRPC"/> <!-- 本地环境下,如果不想接入 SkyWalking 日志服务,可以注释掉本行 -->
<appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 --> <appender-ref ref="ASYNC"/> <!-- 本地环境下,如果不想打印日志,可以注释掉本行 -->
</root> </root>
@@ -75,6 +97,7 @@
<springProfile name="dev,test,stage,prod,default"> <springProfile name="dev,test,stage,prod,default">
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="ERROR"/>
<appender-ref ref="ASYNC"/> <appender-ref ref="ASYNC"/>
<appender-ref ref="GRPC"/> <appender-ref ref="GRPC"/>
</root> </root>

View File

@@ -16,7 +16,11 @@ import com.zt.plat.module.system.enums.logger.LoginLogTypeEnum;
import com.zt.plat.module.system.enums.logger.LoginResultEnum; import com.zt.plat.module.system.enums.logger.LoginResultEnum;
import com.zt.plat.module.system.enums.sms.SmsSceneEnum; import com.zt.plat.module.system.enums.sms.SmsSceneEnum;
import com.zt.plat.module.system.enums.social.SocialTypeEnum; import com.zt.plat.module.system.enums.social.SocialTypeEnum;
import com.zt.plat.module.system.enums.user.UserSourceEnum;
import com.zt.plat.module.system.service.logger.LoginLogService; import com.zt.plat.module.system.service.logger.LoginLogService;
import com.zt.plat.module.system.service.member.MemberService;
import com.zt.plat.module.system.service.oauth2.EbanOAuth2Service;
import com.zt.plat.module.system.service.oauth2.EbanTokenService;
import com.zt.plat.module.system.service.oauth2.OAuth2TokenService; import com.zt.plat.module.system.service.oauth2.OAuth2TokenService;
import com.zt.plat.module.system.service.social.SocialUserService; import com.zt.plat.module.system.service.social.SocialUserService;
import com.zt.plat.module.system.service.user.AdminUserService; import com.zt.plat.module.system.service.user.AdminUserService;
@@ -57,6 +61,12 @@ public class AdminAuthServiceImplTest extends BaseDbUnitTest {
private SmsCodeApi smsCodeApi; private SmsCodeApi smsCodeApi;
@MockBean @MockBean
private OAuth2TokenService oauth2TokenService; private OAuth2TokenService oauth2TokenService;
@MockBean
private MemberService memberService;
@MockBean
private EbanOAuth2Service ebanOAuth2Service;
@MockBean
private EbanTokenService ebanTokenService;
@MockBean @MockBean
private Validator validator; private Validator validator;
@@ -78,7 +88,7 @@ public class AdminAuthServiceImplTest extends BaseDbUnitTest {
.setPassword(password).setStatus(CommonStatusEnum.ENABLE.getStatus())); .setPassword(password).setStatus(CommonStatusEnum.ENABLE.getStatus()));
when(userService.getUserByUsername(eq(username))).thenReturn(user); when(userService.getUserByUsername(eq(username))).thenReturn(user);
// mock password 匹配 // mock password 匹配
when(userService.isPasswordMatch(eq(password), eq(user.getPassword()))).thenReturn(true); when(userService.isPasswordMatch(eq(user), eq(password))).thenReturn(true);
// 调用 // 调用
AdminUserDO loginUser = authService.authenticate(username, password); AdminUserDO loginUser = authService.authenticate(username, password);
@@ -132,7 +142,7 @@ public class AdminAuthServiceImplTest extends BaseDbUnitTest {
.setPassword(password).setStatus(CommonStatusEnum.DISABLE.getStatus())); .setPassword(password).setStatus(CommonStatusEnum.DISABLE.getStatus()));
when(userService.getUserByUsername(eq(username))).thenReturn(user); when(userService.getUserByUsername(eq(username))).thenReturn(user);
// mock password 匹配 // mock password 匹配
when(userService.isPasswordMatch(eq(password), eq(user.getPassword()))).thenReturn(true); when(userService.isPasswordMatch(eq(user), eq(password))).thenReturn(true);
// 调用, 并断言异常 // 调用, 并断言异常
assertServiceException(() -> authService.authenticate(username, password), assertServiceException(() -> authService.authenticate(username, password),
@@ -158,7 +168,7 @@ public class AdminAuthServiceImplTest extends BaseDbUnitTest {
.setPassword("test_password").setStatus(CommonStatusEnum.ENABLE.getStatus())); .setPassword("test_password").setStatus(CommonStatusEnum.ENABLE.getStatus()));
when(userService.getUserByUsername(eq("test_username"))).thenReturn(user); when(userService.getUserByUsername(eq("test_username"))).thenReturn(user);
// mock password 匹配 // mock password 匹配
when(userService.isPasswordMatch(eq("test_password"), eq(user.getPassword()))).thenReturn(true); when(userService.isPasswordMatch(eq(user), eq("test_password"))).thenReturn(true);
// mock 缓存登录用户到 Redis // mock 缓存登录用户到 Redis
OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class, o -> o.setUserId(1L) OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class, o -> o.setUserId(1L)
.setUserType(UserTypeEnum.ADMIN.getValue())); .setUserType(UserTypeEnum.ADMIN.getValue()));
@@ -179,6 +189,49 @@ public class AdminAuthServiceImplTest extends BaseDbUnitTest {
reqVO.getSocialType(), reqVO.getSocialCode(), reqVO.getSocialState()))); reqVO.getSocialType(), reqVO.getSocialCode(), reqVO.getSocialState())));
} }
@Test
public void testLogin_internalUserBlocked() {
AuthLoginReqVO reqVO = randomPojo(AuthLoginReqVO.class, o -> {
o.setUsername("sync_user");
o.setPassword("Pass@123");
o.setSocialType(null);
});
authService.setCaptchaEnable(false);
AdminUserDO user = randomPojo(AdminUserDO.class, o -> o.setId(10L)
.setUsername("sync_user")
.setPassword("bcrypt")
.setStatus(CommonStatusEnum.ENABLE.getStatus())
.setUserSource(UserSourceEnum.SYNC.getSource()));
when(userService.getUserByUsername(eq("sync_user"))).thenReturn(user);
when(userService.isPasswordMatch(eq(user), eq("Pass@123"))).thenReturn(true);
assertServiceException(() -> authService.login(reqVO), AUTH_LOGIN_INTERNAL_USER_PASSWORD_NOT_ALLOWED);
}
@Test
public void testLogin_iWorkUserAllowed() {
AuthLoginReqVO reqVO = randomPojo(AuthLoginReqVO.class, o -> {
o.setUsername("iwork_user");
o.setPassword("Password1!");
o.setSocialType(null);
});
authService.setCaptchaEnable(false);
AdminUserDO user = randomPojo(AdminUserDO.class, o -> o.setId(20L)
.setUsername("iwork_user")
.setPassword("md5")
.setStatus(CommonStatusEnum.ENABLE.getStatus())
.setUserSource(UserSourceEnum.IWORK.getSource()));
when(userService.getUserByUsername(eq("iwork_user"))).thenReturn(user);
when(userService.isPasswordMatch(eq(user), eq("Password1!"))).thenReturn(true);
OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class, o -> o.setUserId(20L)
.setUserType(UserTypeEnum.ADMIN.getValue()));
when(oauth2TokenService.createAccessToken(eq(20L), eq(UserTypeEnum.ADMIN.getValue()), eq("default"), isNull()))
.thenReturn(accessTokenDO);
AuthLoginRespVO respVO = authService.login(reqVO);
assertPojoEquals(accessTokenDO, respVO);
}
@Test @Test
public void testSendSmsCode() { public void testSendSmsCode() {
// 准备参数 // 准备参数

View File

@@ -1,228 +0,0 @@
package com.zt.plat.module.system.service.integration.iwork.impl;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkDetailRecordVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkDetailTableVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkFormFieldVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkOperationRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkUserInfoReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkUserInfoRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkWorkflowCreateReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkWorkflowVoidReqVO;
import com.zt.plat.module.system.framework.integration.iwork.config.IWorkProperties;
import com.zt.plat.module.system.service.integration.iwork.IWorkIntegrationService;
import com.fasterxml.jackson.databind.ObjectMapper;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import okhttp3.mockwebserver.RecordedRequest;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.web.reactive.function.client.WebClient;
import javax.crypto.Cipher;
import java.nio.charset.StandardCharsets;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.spec.X509EncodedKeySpec;
import java.time.Duration;
import java.util.Base64;
import java.util.List;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
class IWorkIntegrationServiceImplTest {
private static KeyPair serverKeyPair;
private static String serverPublicKeyBase64;
private static String clientPublicKeyBase64;
private MockWebServer mockWebServer;
private IWorkIntegrationService integrationService;
private IWorkProperties properties;
@BeforeAll
static void initKeys() throws Exception {
KeyPairGenerator generator = KeyPairGenerator.getInstance("RSA");
generator.initialize(1024);
serverKeyPair = generator.generateKeyPair();
serverPublicKeyBase64 = Base64.getEncoder().encodeToString(serverKeyPair.getPublic().getEncoded());
KeyPair clientKeyPair = generator.generateKeyPair();
clientPublicKeyBase64 = Base64.getEncoder().encodeToString(clientKeyPair.getPublic().getEncoded());
}
@BeforeEach
void setUp() throws Exception {
mockWebServer = new MockWebServer();
mockWebServer.start();
properties = buildProperties();
WebClient.Builder builder = WebClient.builder();
ObjectMapper objectMapper = new ObjectMapper();
integrationService = new IWorkIntegrationServiceImpl(properties, objectMapper, builder);
}
@AfterEach
void tearDown() throws Exception {
mockWebServer.shutdown();
}
@Test
void testWorkflowLifecycle() throws Exception {
enqueueRegisterResponse();
enqueueApplyTokenResponse();
enqueueJsonResponse("{\"code\":1,\"userid\":\"1001\",\"msg\":\"OK\"}");
enqueueJsonResponse("{\"code\":\"1\",\"requestid\":\"REQ-001\",\"msg\":\"created\"}");
enqueueJsonResponse("{\"code\":\"1\",\"msg\":\"voided\"}");
IWorkUserInfoReqVO userReq = new IWorkUserInfoReqVO();
userReq.setIdentifierKey("loginid");
userReq.setIdentifierValue("zhangsan");
IWorkUserInfoRespVO userResp = integrationService.resolveUserId(userReq);
assertThat(userResp.isSuccess()).isTrue();
assertThat(userResp.getUserId()).isEqualTo("1001");
IWorkWorkflowCreateReqVO createReq = buildCreateRequest();
IWorkOperationRespVO createResp = integrationService.createWorkflow(createReq);
assertThat(createResp.isSuccess()).isTrue();
assertThat(createResp.getPayload().get("requestid")).isEqualTo("REQ-001");
IWorkWorkflowVoidReqVO voidReq = new IWorkWorkflowVoidReqVO();
voidReq.setRequestId("REQ-001");
voidReq.setReason("testing void");
IWorkOperationRespVO voidResp = integrationService.voidWorkflow(voidReq);
assertThat(voidResp.isSuccess()).isTrue();
verifyRegisterRequest(mockWebServer.takeRequest());
verifyApplyTokenRequest(mockWebServer.takeRequest());
verifyUserInfoRequest(mockWebServer.takeRequest());
verifyCreateRequest(mockWebServer.takeRequest());
verifyVoidRequest(mockWebServer.takeRequest());
assertThat(mockWebServer.getRequestCount()).isEqualTo(5);
}
private IWorkProperties buildProperties() {
IWorkProperties properties = new IWorkProperties();
properties.setEnabled(true);
properties.setBaseUrl(mockWebServer.url("/").toString());
properties.setAppId("test-app");
properties.setClientPublicKey(clientPublicKeyBase64);
properties.setUserId("1");
properties.setWorkflowId(54L);
properties.getToken().setTtlSeconds(3600L);
properties.getToken().setRefreshAheadSeconds(30L);
properties.getClient().setResponseTimeout(Duration.ofSeconds(5));
properties.getPaths().setRegister("/api/ec/dev/auth/regist");
properties.getPaths().setApplyToken("/api/ec/dev/auth/applytoken");
properties.getPaths().setUserInfo("/api/workflow/paService/getUserInfo");
properties.getPaths().setCreateWorkflow("/api/workflow/paService/doCreateRequest");
properties.getPaths().setVoidWorkflow("/api/workflow/paService/doCancelRequest");
properties.getClient().setConnectTimeout(Duration.ofSeconds(5));
return properties;
}
private void verifyRegisterRequest(RecordedRequest request) {
assertThat(request.getPath()).isEqualTo("/api/ec/dev/auth/regist");
assertThat(request.getHeader(properties.getHeaders().getAppId())).isEqualTo("test-app");
assertThat(request.getHeader(properties.getHeaders().getClientPublicKey())).isEqualTo(clientPublicKeyBase64);
}
private void verifyApplyTokenRequest(RecordedRequest request) throws Exception {
assertThat(request.getPath()).isEqualTo("/api/ec/dev/auth/applytoken");
assertThat(request.getHeader(properties.getHeaders().getAppId())).isEqualTo("test-app");
assertThat(request.getHeader(properties.getHeaders().getTime())).isEqualTo("3600");
String decryptedSecret = decryptHeader(request.getHeader(properties.getHeaders().getSecret()));
assertThat(decryptedSecret).isEqualTo("plain-secret");
}
private void verifyUserInfoRequest(RecordedRequest request) throws Exception {
assertThat(request.getPath()).isEqualTo("/api/workflow/paService/getUserInfo");
assertThat(request.getHeader(properties.getHeaders().getToken())).isEqualTo("token-123");
String decryptedUserId = decryptHeader(request.getHeader(properties.getHeaders().getUserId()));
assertThat(decryptedUserId).isEqualTo("1");
String body = request.getBody().readUtf8();
assertThat(body).contains("loginid");
assertThat(body).contains("zhangsan");
}
private void verifyCreateRequest(RecordedRequest request) throws Exception {
assertThat(request.getPath()).isEqualTo("/api/workflow/paService/doCreateRequest");
assertThat(request.getHeader(properties.getHeaders().getToken())).isEqualTo("token-123");
String decryptedUserId = decryptHeader(request.getHeader(properties.getHeaders().getUserId()));
assertThat(decryptedUserId).isEqualTo("1");
String body = request.getBody().readUtf8();
assertThat(body).contains("requestName=测试流程");
assertThat(body).contains("workflowId=54");
assertThat(body).contains("mainData=%5B");
}
private void verifyVoidRequest(RecordedRequest request) throws Exception {
assertThat(request.getPath()).isEqualTo("/api/workflow/paService/doCancelRequest");
assertThat(request.getHeader(properties.getHeaders().getToken())).isEqualTo("token-123");
String decryptedUserId = decryptHeader(request.getHeader(properties.getHeaders().getUserId()));
assertThat(decryptedUserId).isEqualTo("1");
String body = request.getBody().readUtf8();
assertThat(body).contains("requestId=REQ-001");
assertThat(body).contains("remark=testing+void");
}
private void enqueueRegisterResponse() {
enqueueJsonResponse("{" +
"\"secret\":\"plain-secret\"," +
"\"spk\":\"" + serverPublicKeyBase64 + "\"}");
}
private void enqueueApplyTokenResponse() {
enqueueJsonResponse("{\"token\":\"token-123\",\"expire\":3600}");
}
private void enqueueJsonResponse(String body) {
mockWebServer.enqueue(new MockResponse()
.setHeader("Content-Type", "application/json")
.setBody(body));
}
private IWorkWorkflowCreateReqVO buildCreateRequest() {
IWorkFormFieldVO field1 = new IWorkFormFieldVO();
field1.setFieldName("sqr");
field1.setFieldValue("张三");
IWorkFormFieldVO field2 = new IWorkFormFieldVO();
field2.setFieldName("sqrq");
field2.setFieldValue("2023-11-02");
IWorkDetailRecordVO detailRecord = new IWorkDetailRecordVO();
detailRecord.setRecordOrder(0);
IWorkFormFieldVO detailField = new IWorkFormFieldVO();
detailField.setFieldName("ddh");
detailField.setFieldValue("100010");
detailRecord.setFields(List.of(detailField));
IWorkDetailTableVO detailTable = new IWorkDetailTableVO();
detailTable.setTableDBName("formtable_main_26_dt1");
detailTable.setRecords(List.of(detailRecord));
IWorkWorkflowCreateReqVO req = new IWorkWorkflowCreateReqVO();
req.setRequestName("测试流程");
req.setMainFields(List.of(field1, field2));
req.setDetailTables(List.of(detailTable));
req.setOtherParams(Map.of("isnextflow", "0"));
return req;
}
private String decryptHeader(String headerValue) throws Exception {
Cipher cipher = Cipher.getInstance("RSA/ECB/PKCS1Padding");
cipher.init(Cipher.DECRYPT_MODE, serverKeyPair.getPrivate());
byte[] decrypted = cipher.doFinal(Base64.getDecoder().decode(headerValue));
return new String(decrypted, StandardCharsets.UTF_8);
}
}

View File

@@ -1,121 +0,0 @@
package com.zt.plat.module.system.service.integration.iwork.impl;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkHrSubcompanyPageRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkHrSyncRespVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkOrgSyncReqVO;
import com.zt.plat.module.system.controller.admin.integration.iwork.vo.IWorkSubcompanyQueryReqVO;
import com.zt.plat.module.system.framework.integration.iwork.config.IWorkProperties;
import com.zt.plat.module.system.service.integration.iwork.IWorkOrgRestService;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import okhttp3.mockwebserver.RecordedRequest;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.util.DigestUtils;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.time.Clock;
import java.time.Duration;
import java.time.Instant;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
class IWorkOrgRestServiceImplTest {
private MockWebServer mockWebServer;
private IWorkOrgRestService service;
private IWorkProperties properties;
private Clock fixedClock;
private ObjectMapper objectMapper;
@BeforeEach
void setUp() throws Exception {
mockWebServer = new MockWebServer();
mockWebServer.start();
properties = buildProperties();
fixedClock = Clock.fixed(Instant.ofEpochMilli(1_672_531_200_000L), ZoneOffset.UTC);
objectMapper = new ObjectMapper();
service = new IWorkOrgRestServiceImpl(properties, objectMapper, fixedClock);
}
@AfterEach
void tearDown() throws Exception {
mockWebServer.shutdown();
}
@Test
void shouldListSubcompanies() throws Exception {
mockWebServer.enqueue(jsonResponse("{\"code\":\"1\",\"data\":{\"totalSize\":1,\"totalPage\":1,\"pageSize\":10,\"pageNumber\":1,\"dataList\":[{\"subcompanyid1\":4,\"subcompanyname\":\"总部\"}]}}"));
IWorkSubcompanyQueryReqVO reqVO = new IWorkSubcompanyQueryReqVO();
reqVO.setParams(Map.of("curpage", 1));
IWorkHrSubcompanyPageRespVO respVO = service.listSubcompanies(reqVO);
assertThat(respVO.isSuccess()).isTrue();
assertThat(respVO.getTotalSize()).isEqualTo(1);
assertThat(respVO.getDataList()).hasSize(1);
assertThat(respVO.getDataList().get(0).getSubcompanyname()).isEqualTo("总部");
RecordedRequest request = mockWebServer.takeRequest();
assertThat(request.getPath()).isEqualTo(properties.getOrg().getPaths().getSubcompanyPage());
String decoded = URLDecoder.decode(request.getBody().readUtf8(), StandardCharsets.UTF_8);
JsonNode bodyNode = objectMapper.readTree(decoded);
assertThat(bodyNode.path("params").path("curpage").asInt()).isEqualTo(1);
JsonNode tokenNode = bodyNode.path("token");
assertThat(tokenNode.path("ts").asText()).isEqualTo("1672531200000");
String expectedKey = DigestUtils.md5DigestAsHex("test-seed1672531200000".getBytes(StandardCharsets.UTF_8)).toUpperCase();
assertThat(tokenNode.path("key").asText()).isEqualTo(expectedKey);
}
@Test
void shouldSyncDepartments() throws Exception {
mockWebServer.enqueue(jsonResponse("{\"code\":\"1\",\"result\":[{\"@action\":\"add\",\"code\":\"demo\",\"result\":\"success\"}]}"));
IWorkOrgSyncReqVO reqVO = new IWorkOrgSyncReqVO();
reqVO.setData(List.of(Map.of("@action", "add", "code", "demo")));
IWorkHrSyncRespVO respVO = service.syncDepartments(reqVO);
assertThat(respVO.isSuccess()).isTrue();
assertThat(respVO.getResult()).hasSize(1);
assertThat(respVO.getResult().get(0).getCode()).isEqualTo("demo");
RecordedRequest request = mockWebServer.takeRequest();
assertThat(request.getPath()).isEqualTo(properties.getOrg().getPaths().getSyncDepartment());
String decoded = URLDecoder.decode(request.getBody().readUtf8(), StandardCharsets.UTF_8);
JsonNode bodyNode = objectMapper.readTree(decoded);
assertThat(bodyNode.path("data").isArray()).isTrue();
assertThat(bodyNode.path("data").get(0).path("code").asText()).isEqualTo("demo");
}
private MockResponse jsonResponse(String body) {
return new MockResponse()
.setHeader("Content-Type", "application/json")
.setBody(body);
}
private IWorkProperties buildProperties() {
IWorkProperties properties = new IWorkProperties();
properties.setBaseUrl(mockWebServer.url("/").toString());
properties.getClient().setConnectTimeout(Duration.ofSeconds(5));
properties.getClient().setResponseTimeout(Duration.ofSeconds(5));
properties.getOrg().setTokenSeed("test-seed");
IWorkProperties.OrgPaths paths = properties.getOrg().getPaths();
paths.setSubcompanyPage("/api/hrm/resful/getHrmsubcompanyWithPage");
paths.setDepartmentPage("/api/hrm/resful/getHrmdepartmentWithPage");
paths.setJobTitlePage("/api/hrm/resful/getJobtitleInfoWithPage");
paths.setUserPage("/api/hrm/resful/getHrmUserInfoWithPage");
paths.setSyncSubcompany("/api/hrm/resful/synSubcompany");
paths.setSyncDepartment("/api/hrm/resful/synDepartment");
paths.setSyncJobTitle("/api/hrm/resful/synJobtitle");
paths.setSyncUser("/api/hrm/resful/synHrmresource");
return properties;
}
}

View File

@@ -23,6 +23,7 @@ import com.zt.plat.module.system.dal.mysql.dept.UserPostMapper;
import com.zt.plat.module.system.dal.mysql.user.AdminUserMapper; import com.zt.plat.module.system.dal.mysql.user.AdminUserMapper;
import com.zt.plat.module.system.dal.mysql.userdept.UserDeptMapper; import com.zt.plat.module.system.dal.mysql.userdept.UserDeptMapper;
import com.zt.plat.module.system.enums.common.SexEnum; import com.zt.plat.module.system.enums.common.SexEnum;
import com.zt.plat.module.system.enums.user.UserSourceEnum;
import com.zt.plat.module.system.service.dept.DeptServiceImpl; import com.zt.plat.module.system.service.dept.DeptServiceImpl;
import com.zt.plat.module.system.service.dept.PostService; import com.zt.plat.module.system.service.dept.PostService;
import com.zt.plat.module.system.service.permission.PermissionService; import com.zt.plat.module.system.service.permission.PermissionService;
@@ -35,7 +36,9 @@ import org.mockito.stubbing.Answer;
import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.util.DigestUtils;
import java.nio.charset.StandardCharsets;
import java.util.*; import java.util.*;
import java.util.function.Consumer; import java.util.function.Consumer;
@@ -267,6 +270,32 @@ public class AdminUserServiceImplTest extends BaseDbUnitTest {
assertEquals("encode:" + password, user.getPassword()); assertEquals("encode:" + password, user.getPassword());
} }
@Test
public void testUpdateUserPassword_forbiddenForIWork() {
AdminUserDO dbUser = randomAdminUserDO(o -> o.setUserSource(UserSourceEnum.IWORK.getSource()));
userMapper.insert(dbUser);
Long userId = dbUser.getId();
UserProfileUpdatePasswordReqVO reqVO = randomPojo(UserProfileUpdatePasswordReqVO.class, o -> {
o.setOldPassword("oldPwd");
o.setNewPassword("newPwd");
});
assertServiceException(() -> userService.updateUserPassword(userId, reqVO), USER_PASSWORD_MODIFY_FORBIDDEN);
assertServiceException(() -> userService.updateUserPassword(userId, "anotherPwd"), USER_PASSWORD_MODIFY_FORBIDDEN);
}
@Test
public void testIsPasswordMatch_iWorkMd5() {
String rawPassword = "Abc12345";
String md5 = DigestUtils.md5DigestAsHex(rawPassword.getBytes(StandardCharsets.UTF_8)).toUpperCase(Locale.ROOT);
AdminUserDO user = randomAdminUserDO(o -> {
o.setUserSource(UserSourceEnum.IWORK.getSource());
o.setPassword(md5);
});
assertTrue(userService.isPasswordMatch(user, rawPassword));
}
@Test @Test
public void testUpdateUserStatus() { public void testUpdateUserStatus() {
// mock 数据 // mock 数据