Merge remote-tracking branch 'origin/ddd'

# Conflicts:
#	dynamic-datasource-creator/src/main/java/com/baomidou/dynamic/datasource/toolkit/DsStrUtils.java
#	dynamic-datasource-spring/src/main/java/com/baomidou/dynamic/datasource/tx/TransactionContext.java
#	dynamic-datasource-spring/src/main/java/com/baomidou/dynamic/datasource/tx/TransactionalTemplate.java
This commit is contained in:
huayanYu 2023-10-23 21:38:53 +08:00
commit 15d7447d55
7 changed files with 127 additions and 109 deletions

View File

@ -18,14 +18,14 @@ package com.baomidou.dynamic.datasource.creator.druid;
import com.alibaba.druid.pool.DruidDataSource;
import com.baomidou.dynamic.datasource.toolkit.DsConfigUtil;
import lombok.NonNull;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import java.beans.BeanInfo;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.*;
/**
* Druid配置工具类
@ -44,36 +44,29 @@ public final class DruidConfigUtil {
private static final Map<String, PropertyDescriptor> CONFIG_DESCRIPTOR_MAP = DsConfigUtil.getPropertyDescriptorMap(DruidConfig.class);
private static final Map<String, PropertyDescriptor> DATASOURCE_DESCRIPTOR_MAP = DsConfigUtil.getPropertyDescriptorMap(DruidDataSource.class);
private static final Class<?> CLAZZ = DruidDataSource.class;
/**
* 根据全局配置和本地配置结合转换为Properties
*
* @param g 全局配置
* @param c 当前配置
* @param config 当前配置
* @return Druid配置
*/
public static Properties mergeConfig(DruidConfig g, @NonNull DruidConfig c) {
public static Properties toProperties(@NonNull DruidConfig config) {
Properties properties = new Properties();
for (Map.Entry<String, PropertyDescriptor> entry : CONFIG_DESCRIPTOR_MAP.entrySet()) {
String key = entry.getKey();
PropertyDescriptor descriptor = entry.getValue();
Method readMethod = descriptor.getReadMethod();
Class<?> returnType = readMethod.getReturnType();
if (List.class.isAssignableFrom(returnType) || Set.class.isAssignableFrom(returnType) || Map.class.isAssignableFrom(returnType) || Properties.class.isAssignableFrom(returnType)) {
if (List.class.isAssignableFrom(returnType)
|| Set.class.isAssignableFrom(returnType)
|| Map.class.isAssignableFrom(returnType)
|| Properties.class.isAssignableFrom(returnType)) {
continue;
}
try {
Object cValue = readMethod.invoke(c);
Object cValue = readMethod.invoke(config);
if (cValue != null) {
properties.setProperty("druid." + key, String.valueOf(cValue));
continue;
}
if (g != null) {
Object gValue = readMethod.invoke(g);
if (gValue != null) {
properties.setProperty("druid." + key, String.valueOf(gValue));
}
}
} catch (Exception e) {
log.warn("druid current could not set [" + key + " ]", e);
@ -81,78 +74,34 @@ public final class DruidConfigUtil {
}
//filters单独处理默认了stat
String filters = getValue(g, c, "filters");
String filters = config.getFilters();
if (filters == null) {
filters = STAT_STR;
}
String publicKey = getValue(g, c, "publicKey");
boolean configFilterExist = publicKey != null && publicKey.length() > 0;
if (publicKey != null && publicKey.length() > 0 && !filters.contains(CONFIG_STR)) {
String publicKey = config.getPublicKey();
boolean configFilterExist = publicKey != null && !publicKey.isEmpty();
if (publicKey != null && !publicKey.isEmpty() && !filters.contains(CONFIG_STR)) {
filters += "," + CONFIG_STR;
}
properties.setProperty(FILTERS, filters);
Properties connectProperties = new Properties();
Properties cConnectionProperties = c.getConnectionProperties();
if (g != null) {
Properties gConnectionProperties = g.getConnectionProperties();
if (gConnectionProperties != null) {
connectProperties.putAll(gConnectionProperties);
}
}
if (cConnectionProperties != null) {
connectProperties.putAll(cConnectionProperties);
}
Properties connectProperties = config.getConnectionProperties();
if (configFilterExist) {
connectProperties.setProperty("config.decrypt", Boolean.TRUE.toString());
connectProperties.setProperty("config.decrypt.key", publicKey);
}
c.setConnectionProperties(connectProperties);
config.setConnectionProperties(connectProperties);
return properties;
}
/**
* @param g 全局配置
* @param c 当前配置
* @param field 字段
* @return 字段值
*/
public static String getValue(DruidConfig g, @NonNull DruidConfig c, String field) {
PropertyDescriptor propertyDescriptor = CONFIG_DESCRIPTOR_MAP.get(field);
if (propertyDescriptor == null) {
return null;
}
Method method = propertyDescriptor.getReadMethod();
if (method == null) {
return null;
}
try {
Object value = method.invoke(c);
if (value != null) {
return String.valueOf(value);
}
if (g != null) {
value = method.invoke(g);
if (value != null) {
return String.valueOf(value);
}
}
} catch (Exception e) {
// do nothing
}
return null;
}
/**
* 设置DruidDataSource的值
*
* @param dataSource DruidDataSource
* @param field 字段
* @param g 全局配置
* @param c 当前配置
*/
public static void setValue(DruidDataSource dataSource, String field, DruidConfig g, DruidConfig c) {
public static void setValue(DruidDataSource dataSource, String field, DruidConfig c) {
PropertyDescriptor descriptor = DATASOURCE_DESCRIPTOR_MAP.get(field);
if (descriptor == null) {
log.warn("druid current not support [" + field + " ]");
@ -168,16 +117,94 @@ public final class DruidConfigUtil {
Object value = configReadMethod.invoke(c);
if (value != null) {
writeMethod.invoke(dataSource, value);
return;
}
if (g != null) {
value = configReadMethod.invoke(g);
if (value != null) {
writeMethod.invoke(dataSource, value);
}
}
} catch (Exception e) {
log.warn("druid current set [" + field + " ] error");
}
}
@SneakyThrows
public static void merge(DruidConfig global, DruidConfig item) {
if (global == null) {
return;
}
BeanInfo beanInfo = Introspector.getBeanInfo(DruidConfig.class, Object.class);
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
for (PropertyDescriptor pd : propertyDescriptors) {
Class<?> propertyType = pd.getPropertyType();
if (Properties.class == propertyType) {
mergeProperties(global, item, pd);
} else if (List.class == propertyType) {
mergeList(global, item, pd);
} else if (Map.class == propertyType) {
mergeMap(global, item, pd);
} else {
mergeBasic(global, item, pd);
}
}
}
@SneakyThrows
private static void mergeList(DruidConfig global, DruidConfig item, PropertyDescriptor pd) {
Method readMethod = pd.getReadMethod();
Method writeMethod = pd.getWriteMethod();
List<Object> result = new ArrayList<>();
List<Object> itemValue = (List) readMethod.invoke(item);
List<Object> globalValue = (List) readMethod.invoke(global);
if (globalValue != null) {
result.addAll(globalValue);
}
if (itemValue != null) {
result.addAll(itemValue);
}
writeMethod.invoke(item, result);
}
@SneakyThrows
private static void mergeMap(DruidConfig global, DruidConfig item, PropertyDescriptor pd) {
Method readMethod = pd.getReadMethod();
Method writeMethod = pd.getWriteMethod();
Map result = new HashMap();
Map itemValue = (Map) readMethod.invoke(item);
Map globalValue = (Map) readMethod.invoke(global);
if (globalValue != null) {
result.putAll(globalValue);
}
if (itemValue != null) {
result.putAll(itemValue);
}
writeMethod.invoke(item, result);
}
@SneakyThrows
private static void mergeProperties(DruidConfig global, DruidConfig item, PropertyDescriptor pd) {
Method readMethod = pd.getReadMethod();
Method writeMethod = pd.getWriteMethod();
Properties itemValue = (Properties) readMethod.invoke(item);
Properties globalValue = (Properties) readMethod.invoke(global);
Properties properties = new Properties();
if (globalValue != null) {
properties.putAll(globalValue);
}
if (itemValue != null) {
properties.putAll(itemValue);
}
if (!properties.isEmpty()) {
writeMethod.invoke(item, properties);
}
}
@SneakyThrows
private static void mergeBasic(DruidConfig global, DruidConfig item, PropertyDescriptor pd) {
Method readMethod = pd.getReadMethod();
Method writeMethod = pd.getWriteMethod();
Object itemValue = readMethod.invoke(item);
if (itemValue == null) {
Object globalValue = readMethod.invoke(global);
if (globalValue != null) {
writeMethod.invoke(item, globalValue);
}
}
}
}

View File

@ -111,9 +111,11 @@ public class DruidDataSourceCreator implements DataSourceCreator {
dataSource.setDriverClassName(driverClassName);
}
DruidConfig config = dataSourceProperty.getDruid();
Properties properties = DruidConfigUtil.mergeConfig(gConfig, config);
DruidConfigUtil.merge(gConfig, config);
Properties properties = DruidConfigUtil.toProperties(config);
List<Filter> proxyFilters = this.initFilters(dataSourceProperty, properties.getProperty("druid.filters"));
String configFilters = properties.getProperty("druid.filters");
List<Filter> proxyFilters = this.initFilters(config, configFilters);
dataSource.setProxyFilters(proxyFilters);
try {
configMethod.invoke(dataSource, properties);
@ -124,7 +126,7 @@ public class DruidDataSourceCreator implements DataSourceCreator {
dataSource.setConnectProperties(config.getConnectionProperties());
//设置druid内置properties不支持的的参数
for (String param : PARAMS) {
DruidConfigUtil.setValue(dataSource, param, gConfig, config);
DruidConfigUtil.setValue(dataSource, param, config);
}
if (Boolean.FALSE.equals(dataSourceProperty.getLazy())) {
@ -137,40 +139,35 @@ public class DruidDataSourceCreator implements DataSourceCreator {
return dataSource;
}
private List<Filter> initFilters(DataSourceProperty dataSourceProperty, String filters) {
private List<Filter> initFilters(DruidConfig config, String filters) {
List<Filter> proxyFilters = new ArrayList<>(2);
DruidConfig druid = dataSourceProperty.getDruid();
if (DsStrUtils.hasText(filters)) {
String[] filterItems = filters.split(",");
for (String filter : filterItems) {
switch (filter) {
case "stat":
proxyFilters.add(DruidStatConfigUtil.toStatFilter(druid.getStat(), gConfig.getStat()));
proxyFilters.add(DruidStatConfigUtil.toStatFilter(config.getStat()));
break;
case "wall":
Map<String, Object> configWall = gConfig.getWall();
Map<String, Object> globalWall = druid.getWall();
WallConfig wallConfig = DruidWallConfigUtil.toWallConfig(globalWall, configWall);
Map<String, Object> configWall = config.getWall();
WallConfig wallConfig = DruidWallConfigUtil.toWallConfig(configWall);
WallFilter wallFilter = new WallFilter();
wallFilter.setConfig(wallConfig);
String dbType = (String) configWall.get("db-type");
if (!DsStrUtils.hasText(dbType)) {
dbType = (String) globalWall.get("db-type");
}
wallFilter.setDbType(dbType);
proxyFilters.add(wallFilter);
break;
case "slf4j":
proxyFilters.add(DruidLogConfigUtil.initFilter(Slf4jLogFilter.class, druid.getSlf4j(), gConfig.getSlf4j()));
proxyFilters.add(DruidLogConfigUtil.initFilter(Slf4jLogFilter.class, config.getSlf4j()));
break;
case "commons-log":
proxyFilters.add(DruidLogConfigUtil.initFilter(CommonsLogFilter.class, druid.getCommonsLog(), gConfig.getCommonsLog()));
proxyFilters.add(DruidLogConfigUtil.initFilter(CommonsLogFilter.class, config.getCommonsLog()));
break;
case "log4j":
proxyFilters.add(DruidLogConfigUtil.initFilter(Log4jFilter.class, druid.getLog4j(), gConfig.getLog4j()));
proxyFilters.add(DruidLogConfigUtil.initFilter(Log4jFilter.class, config.getLog4j()));
break;
case "log4j2":
proxyFilters.add(DruidLogConfigUtil.initFilter(Log4j2Filter.class, druid.getLog4j2(), gConfig.getLog4j2()));
proxyFilters.add(DruidLogConfigUtil.initFilter(Log4j2Filter.class, config.getLog4j2()));
break;
default:
log.warn("dynamic-datasource current not support [{}]", filter);

View File

@ -34,18 +34,16 @@ public final class DruidLogConfigUtil {
private static final Map<String, Method> METHODS = DsConfigUtil.getSetterMethods(LogFilter.class);
/**
* 根据当前的配置和全局的配置生成druid的日志filter
* 根据当前的配置生成druid的日志filter
*
* @param clazz 日志类
* @param c 当前配置
* @param g 全局配置
* @param map 配置
* @return 日志filter
*/
public static LogFilter initFilter(Class<? extends LogFilter> clazz, Map<String, Object> c, Map<String, Object> g) {
public static LogFilter initFilter(Class<? extends LogFilter> clazz, Map<String, Object> map) {
try {
LogFilter filter = clazz.getDeclaredConstructor().newInstance();
Map<String, Object> params = DsConfigUtil.mergeMap(c, g);
for (Map.Entry<String, Object> item : params.entrySet()) {
for (Map.Entry<String, Object> item : map.entrySet()) {
String key = DsConfigUtil.lineToUpper(item.getKey());
Method method = METHODS.get(key);
if (method != null) {

View File

@ -41,15 +41,13 @@ public final class DruidStatConfigUtil {
}
/**
* 根据当前的配置和全局的配置生成druid防火墙配置
* 根据当前的配置生成druid防火墙配置
*
* @param c 当前配置
* @param g 全局配置
* @param map 配置
* @return StatFilter
*/
public static StatFilter toStatFilter(Map<String, Object> c, Map<String, Object> g) {
public static StatFilter toStatFilter(Map<String, Object> map) {
StatFilter filter = new StatFilter();
Map<String, Object> map = DsConfigUtil.mergeMap(c, g);
for (Map.Entry<String, Object> item : map.entrySet()) {
String key = DsConfigUtil.lineToUpper(item.getKey());
Method method = METHODS.get(key);

View File

@ -36,13 +36,11 @@ public final class DruidWallConfigUtil {
/**
* 根据当前的配置和全局的配置生成druid防火墙配置
*
* @param c 当前配置
* @param g 全局配置
* @param map 当前配置
* @return 防火墙配置
*/
public static WallConfig toWallConfig(Map<String, Object> c, Map<String, Object> g) {
public static WallConfig toWallConfig(Map<String, Object> map) {
WallConfig wallConfig = new WallConfig();
Map<String, Object> map = DsConfigUtil.mergeMap(c, g);
Object dir = map.get("dir");
if (dir != null) {
wallConfig.loadConfig(String.valueOf(dir));

View File

@ -37,7 +37,7 @@ public class TransactionContext {
*/
public static String getXID() {
String xid = CONTEXT_HOLDER.get();
if (DsStrUtils.hasText(xid)) {
if (!DsStrUtils.isEmpty(xid)) {
return xid;
}
return null;

View File

@ -51,7 +51,7 @@
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.8</java.version>
<spring-boot-dependencies.version>2.7.15</spring-boot-dependencies.version>
<spring-boot-dependencies.version>2.7.16</spring-boot-dependencies.version>
<mybatis.plus.version>3.5.3.2</mybatis.plus.version>
<hikaricp.version>2.4.13</hikaricp.version>
<druid.version>1.2.20</druid.version>