修改私服地址,把 seata-dm 项目从 dsc挪过来

This commit is contained in:
ranke
2026-01-15 10:07:17 +08:00
parent 287d24fc7f
commit 26ab7dac24
4 changed files with 360 additions and 16 deletions

16
pom.xml
View File

@@ -204,25 +204,11 @@
<repository> <repository>
<id>ZT</id> <id>ZT</id>
<name>中铜 ZStack 私服</name> <name>中铜 ZStack 私服</name>
<url>http://172.16.46.63:30708/repository/test/</url> <url>http://172.16.46.63:30708/repository/zt-cloud/</url>
<releases> <releases>
<updatePolicy>always</updatePolicy> <updatePolicy>always</updatePolicy>
<checksumPolicy>warn</checksumPolicy> <checksumPolicy>warn</checksumPolicy>
</releases> </releases>
<snapshots>
<enabled>false</enabled>
<updatePolicy>always</updatePolicy>
</snapshots>
</repository>
<repository>
<id>ZT-snap</id>
<name>中铜 ZStack 私服</name>
<url>http://172.16.46.63:30708/repository/test-snap/</url>
<releases>
<enabled>false</enabled>
<updatePolicy>always</updatePolicy>
<checksumPolicy>warn</checksumPolicy>
</releases>
<snapshots> <snapshots>
<enabled>true</enabled> <enabled>true</enabled>
<updatePolicy>always</updatePolicy> <updatePolicy>always</updatePolicy>

View File

@@ -25,7 +25,7 @@
<module>zt-spring-boot-starter-job</module> <module>zt-spring-boot-starter-job</module>
<module>zt-spring-boot-starter-mq</module> <module>zt-spring-boot-starter-mq</module>
<module>zt-spring-boot-starter-rpc</module> <module>zt-spring-boot-starter-rpc</module>
<module>zt-spring-boot-starter-seata-dm</module>
<module>zt-spring-boot-starter-excel</module> <module>zt-spring-boot-starter-excel</module>
<module>zt-spring-boot-starter-test</module> <module>zt-spring-boot-starter-test</module>

View File

@@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>zt-framework</artifactId>
<groupId>com.zt.plat</groupId>
<version>${revision}</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<packaging>jar</packaging>
<artifactId>zt-spring-boot-starter-seata-dm</artifactId>
<name>${project.artifactId}</name>
<description>
Seata 达梦数据库补丁模块
解决 DmdbTimestamp 时区格式不一致导致的 dirty undo log 回滚失败问题
补丁来源: https://github.com/apache/incubator-seata/pull/7538
Seata 2.6.0 发布后可移除此模块
</description>
<dependencies>
<!-- 仅编译时需要,运行时由 seata-spring-boot-starter 提供 -->
<dependency>
<groupId>org.apache.seata</groupId>
<artifactId>seata-spring-boot-starter</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,326 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.seata.rm.datasource;
import org.apache.seata.common.util.CollectionUtils;
import org.apache.seata.common.util.StringUtils;
import org.apache.seata.core.model.Result;
import org.apache.seata.rm.datasource.sql.struct.Field;
import org.apache.seata.rm.datasource.sql.struct.Row;
import org.apache.seata.rm.datasource.sql.struct.TableRecords;
import org.apache.seata.rm.datasource.undo.AbstractUndoLogManager;
import org.apache.seata.rm.datasource.undo.parser.FastjsonUndoLogParser;
import org.apache.seata.rm.datasource.undo.parser.JacksonUndoLogParser;
import org.apache.seata.sqlparser.struct.TableMeta;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.time.Instant;
import java.time.LocalDateTime;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* DataCompareUtils - 包含达梦数据库 DmdbTimestamp 时区问题的补丁
* <p>
* 此类覆盖 Seata 原有的 DataCompareUtils添加了对达梦数据库 DmdbTimestamp 类型的特殊处理。
* 通过将 DmdbTimestamp 转换为 UTC Instant 进行比较,解决时区格式不一致导致的 dirty undo log 问题。
* <p>
* 问题背景:
* - 达梦数据库的 DmdbTimestamp 类型在序列化/反序列化后时区格式不一致
* - 例如beforeImage 为 "2025-12-25 09:38:54.077811 +08:00"
* afterImage 为 "2025-12-25 09:38:54.077811"
* - 导致 Seata AT 模式回滚时 dirty undo log 检查失败
* <p>
* 解决方案:
* - 当检测到 DmdbTimestamp 类型时,将两个值都转换为 UTC Instant 进行比较
* - 这样可以忽略时区格式差异,只比较实际的时间点
* <p>
* 补丁来源: https://github.com/apache/incubator-seata/pull/7538
* 相关 Issue: https://github.com/apache/incubator-seata/issues/7453
* 该修复已合并到 Seata 2.x 分支,将在 Seata 2.6.0 正式发布,届时可删除此模块。
*
* @author Seata Community (PR #7538)
*/
public class DataCompareUtils {
private static final Logger LOGGER = LoggerFactory.getLogger(DataCompareUtils.class);
/**
* 标识补丁类是否已加载
*/
private static final boolean PATCHED;
static {
PATCHED = true;
LOGGER.info("[zt-spring-boot-starter-seata-dm] DataCompareUtils 补丁类已加载,用于解决达梦数据库 DmdbTimestamp 时区问题");
}
private DataCompareUtils() {}
/**
* Is field equals result.
*
* @param f0 the f 0
* @param f1 the f 1
* @return the result
*/
public static Result<Boolean> isFieldEquals(Field f0, Field f1) {
if (f0 == null) {
return Result.build(f1 == null);
} else {
if (f1 == null) {
return Result.build(false);
} else {
if (StringUtils.equalsIgnoreCase(f0.getName(), f1.getName()) && f0.getType() == f1.getType()) {
if (f0.getValue() == null) {
return Result.build(f1.getValue() == null);
} else {
if (f1.getValue() == null) {
return Result.buildWithParams(
false, "Field not equals, name {}, new value is null", f0.getName());
} else {
String currentSerializer = AbstractUndoLogManager.getCurrentSerializer();
if (StringUtils.equals(currentSerializer, FastjsonUndoLogParser.NAME)) {
convertType(f0, f1);
}
// 达梦数据库 DmdbTimestamp 时区补丁 (PR #7538)
if (StringUtils.equals(currentSerializer, JacksonUndoLogParser.NAME)) {
Object v0 = f0.getValue();
Object v1 = f1.getValue();
if (isDmdbTimestamp(v0) && isDmdbTimestamp(v1)) {
Instant i0 = toInstant(v0);
Instant i1 = toInstant(v1);
boolean equals = Objects.equals(i0, i1);
LOGGER.info("[zt-seata-dm-patch] DmdbTimestamp 字段比较: field={}, equals={}", f0.getName(), equals);
return equals
? Result.ok()
: Result.buildWithParams(
false,
"Field not equals (DmdbTimestamp), name {}, old value {}, new value {}",
f0.getName(),
v0,
v1);
}
}
boolean result = Objects.deepEquals(f0.getValue(), f1.getValue());
if (result) {
return Result.ok();
} else {
return Result.buildWithParams(
false,
"Field not equals, name {}, old value {}, new value {}",
f0.getName(),
f0.getValue(),
f1.getValue());
}
}
}
} else {
return Result.buildWithParams(
false,
"Field not equals, old name {} type {}, new name {} type {}",
f0.getName(),
f0.getType(),
f1.getName(),
f1.getType());
}
}
}
}
private static void convertType(Field f0, Field f1) {
int f0Type = f0.getType();
int f1Type = f1.getType();
if (f0Type == Types.DATE && f0.getValue().getClass().equals(String.class)) {
String[] strings = f0.getValue().toString().split(" ");
f0.setValue(Date.valueOf(strings[0]));
}
if (f1Type == Types.DATE && f1.getValue().getClass().equals(String.class)) {
String[] strings = f1.getValue().toString().split(" ");
f1.setValue(Date.valueOf(strings[0]));
}
if (f0Type == Types.TIME && f0.getValue().getClass().equals(String.class)) {
f0.setValue(Time.valueOf(f0.getValue().toString()));
}
if (f1Type == Types.TIME && f1.getValue().getClass().equals(String.class)) {
f1.setValue(Time.valueOf(f1.getValue().toString()));
}
if (f0Type == Types.TIMESTAMP && f0.getValue().getClass().equals(String.class)) {
if (f1.getValue().getClass().equals(LocalDateTime.class)) {
f0.setValue(LocalDateTime.parse(f0.getValue().toString()));
} else {
f0.setValue(Timestamp.valueOf(f0.getValue().toString()));
}
}
if (f1Type == Types.TIMESTAMP && f1.getValue().getClass().equals(String.class)) {
f1.setValue(Timestamp.valueOf(f1.getValue().toString()));
}
if (f0Type == Types.DECIMAL && f0.getValue().getClass().equals(Integer.class)) {
f0.setValue(new BigDecimal(f0.getValue().toString()));
}
if (f1Type == Types.DECIMAL && f1.getValue().getClass().equals(Integer.class)) {
f1.setValue(new BigDecimal(f1.getValue().toString()));
}
if (f0Type == Types.BIGINT && f0.getValue().getClass().equals(Integer.class)) {
f0.setValue(Long.parseLong(f0.getValue().toString()));
}
if (f1Type == Types.BIGINT && f1.getValue().getClass().equals(Integer.class)) {
f1.setValue(Long.parseLong(f1.getValue().toString()));
}
}
/**
* Is records equals result.
*
* @param beforeImage the before image
* @param afterImage the after image
* @return the result
*/
public static Result<Boolean> isRecordsEquals(TableRecords beforeImage, TableRecords afterImage) {
if (beforeImage == null) {
return Result.build(afterImage == null, null);
} else {
if (afterImage == null) {
return Result.build(false, null);
}
if (beforeImage.getTableName().equalsIgnoreCase(afterImage.getTableName())
&& CollectionUtils.isSizeEquals(beforeImage.getRows(), afterImage.getRows())) {
// when image is EmptyTableRecords, getTableMeta will throw an exception
if (CollectionUtils.isEmpty(beforeImage.getRows())) {
return Result.ok();
}
return compareRows(beforeImage.getTableMeta(), beforeImage.getRows(), afterImage.getRows());
} else {
return Result.build(false, null);
}
}
}
/**
* Is rows equals result.
*
* @param tableMetaData the table meta data
* @param oldRows the old rows
* @param newRows the new rows
* @return the result
*/
public static Result<Boolean> isRowsEquals(TableMeta tableMetaData, List<Row> oldRows, List<Row> newRows) {
if (!CollectionUtils.isSizeEquals(oldRows, newRows)) {
return Result.build(false, null);
}
return compareRows(tableMetaData, oldRows, newRows);
}
private static Result<Boolean> compareRows(TableMeta tableMetaData, List<Row> oldRows, List<Row> newRows) {
// old row to map
Map<String, Map<String, Field>> oldRowsMap = rowListToMap(oldRows, tableMetaData.getPrimaryKeyOnlyName());
// new row to map
Map<String, Map<String, Field>> newRowsMap = rowListToMap(newRows, tableMetaData.getPrimaryKeyOnlyName());
// compare data
for (Map.Entry<String, Map<String, Field>> oldEntry : oldRowsMap.entrySet()) {
String key = oldEntry.getKey();
Map<String, Field> oldRow = oldEntry.getValue();
Map<String, Field> newRow = newRowsMap.get(key);
if (newRow == null) {
return Result.buildWithParams(false, "compare row failed, rowKey {}, reason [newRow is null]", key);
}
for (Map.Entry<String, Field> oldRowEntry : oldRow.entrySet()) {
String fieldName = oldRowEntry.getKey();
Field oldField = oldRowEntry.getValue();
Field newField = newRow.get(fieldName);
if (newField == null) {
return Result.buildWithParams(
false,
"compare row failed, rowKey {}, fieldName {}, reason [newField is null]",
key,
fieldName);
}
Result<Boolean> oldEqualsNewFieldResult = isFieldEquals(oldField, newField);
if (!oldEqualsNewFieldResult.getResult()) {
return oldEqualsNewFieldResult;
}
}
}
return Result.ok();
}
/**
* Row list to map map.
*
* @param rowList the row list
* @param primaryKeyList the primary key list
* @return the map
*/
public static Map<String, Map<String, Field>> rowListToMap(List<Row> rowList, List<String> primaryKeyList) {
// {value of primaryKey, value of all columns}
Map<String, Map<String, Field>> rowMap = new HashMap<>();
for (Row row : rowList) {
// ensure the order of column
List<Field> rowFieldList = row.getFields().stream()
.sorted(Comparator.comparing(Field::getName))
.collect(Collectors.toList());
// {uppercase fieldName : field}
Map<String, Field> colsMap = new HashMap<>();
StringBuilder rowKey = new StringBuilder();
boolean firstUnderline = false;
for (int j = 0; j < rowFieldList.size(); j++) {
Field field = rowFieldList.get(j);
if (primaryKeyList.stream().anyMatch(e -> field.getName().equals(e))) {
if (firstUnderline && j > 0) {
rowKey.append("_");
}
rowKey.append(String.valueOf(field.getValue()));
firstUnderline = true;
}
colsMap.put(field.getName().trim().toUpperCase(), field);
}
rowMap.put(rowKey.toString(), colsMap);
}
return rowMap;
}
/**
* 判断是否为达梦数据库的 DmdbTimestamp 类型
*/
private static boolean isDmdbTimestamp(Object obj) {
return obj != null
&& "dm.jdbc.driver.DmdbTimestamp".equals(obj.getClass().getName());
}
/**
* 将 DmdbTimestamp 转换为 Instant
*/
private static Instant toInstant(Object dmdbTimestamp) {
try {
Method toInstantMethod = dmdbTimestamp.getClass().getMethod("toInstant");
return (Instant) toInstantMethod.invoke(dmdbTimestamp);
} catch (Exception e) {
throw new RuntimeException("Failed to convert DmdbTimestamp to Instant", e);
}
}
}