This commit is contained in:
HuaYan 2023-10-16 14:26:58 +08:00
parent d9d3b58cfc
commit a6dbcf8249
15 changed files with 149 additions and 126 deletions

View File

@ -18,14 +18,14 @@ package com.baomidou.dynamic.datasource.creator.druid;
import com.alibaba.druid.pool.DruidDataSource;
import com.baomidou.dynamic.datasource.toolkit.DsConfigUtil;
import lombok.NonNull;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import java.beans.BeanInfo;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.*;
/**
* Druid配置工具类
@ -44,36 +44,29 @@ public final class DruidConfigUtil {
private static final Map<String, PropertyDescriptor> CONFIG_DESCRIPTOR_MAP = DsConfigUtil.getPropertyDescriptorMap(DruidConfig.class);
private static final Map<String, PropertyDescriptor> DATASOURCE_DESCRIPTOR_MAP = DsConfigUtil.getPropertyDescriptorMap(DruidDataSource.class);
private static final Class<?> CLAZZ = DruidDataSource.class;
/**
* 根据全局配置和本地配置结合转换为Properties
*
* @param g 全局配置
* @param c 当前配置
* @param config 当前配置
* @return Druid配置
*/
public static Properties mergeConfig(DruidConfig g, @NonNull DruidConfig c) {
public static Properties toProperties(@NonNull DruidConfig config) {
Properties properties = new Properties();
for (Map.Entry<String, PropertyDescriptor> entry : CONFIG_DESCRIPTOR_MAP.entrySet()) {
String key = entry.getKey();
PropertyDescriptor descriptor = entry.getValue();
Method readMethod = descriptor.getReadMethod();
Class<?> returnType = readMethod.getReturnType();
if (List.class.isAssignableFrom(returnType) || Set.class.isAssignableFrom(returnType) || Map.class.isAssignableFrom(returnType) || Properties.class.isAssignableFrom(returnType)) {
if (List.class.isAssignableFrom(returnType)
|| Set.class.isAssignableFrom(returnType)
|| Map.class.isAssignableFrom(returnType)
|| Properties.class.isAssignableFrom(returnType)) {
continue;
}
try {
Object cValue = readMethod.invoke(c);
Object cValue = readMethod.invoke(config);
if (cValue != null) {
properties.setProperty("druid." + key, String.valueOf(cValue));
continue;
}
if (g != null) {
Object gValue = readMethod.invoke(g);
if (gValue != null) {
properties.setProperty("druid." + key, String.valueOf(gValue));
}
}
} catch (Exception e) {
log.warn("druid current could not set [" + key + " ]", e);
@ -81,78 +74,34 @@ public final class DruidConfigUtil {
}
//filters单独处理默认了stat
String filters = getValue(g, c, "filters");
String filters = config.getFilters();
if (filters == null) {
filters = STAT_STR;
}
String publicKey = getValue(g, c, "publicKey");
boolean configFilterExist = publicKey != null && publicKey.length() > 0;
if (publicKey != null && publicKey.length() > 0 && !filters.contains(CONFIG_STR)) {
String publicKey = config.getPublicKey();
boolean configFilterExist = publicKey != null && !publicKey.isEmpty();
if (publicKey != null && !publicKey.isEmpty() && !filters.contains(CONFIG_STR)) {
filters += "," + CONFIG_STR;
}
properties.setProperty(FILTERS, filters);
Properties connectProperties = new Properties();
Properties cConnectionProperties = c.getConnectionProperties();
if (g != null) {
Properties gConnectionProperties = g.getConnectionProperties();
if (gConnectionProperties != null) {
connectProperties.putAll(gConnectionProperties);
}
}
if (cConnectionProperties != null) {
connectProperties.putAll(cConnectionProperties);
}
Properties connectProperties = config.getConnectionProperties();
if (configFilterExist) {
connectProperties.setProperty("config.decrypt", Boolean.TRUE.toString());
connectProperties.setProperty("config.decrypt.key", publicKey);
}
c.setConnectionProperties(connectProperties);
config.setConnectionProperties(connectProperties);
return properties;
}
/**
* @param g 全局配置
* @param c 当前配置
* @param field 字段
* @return 字段值
*/
public static String getValue(DruidConfig g, @NonNull DruidConfig c, String field) {
PropertyDescriptor propertyDescriptor = CONFIG_DESCRIPTOR_MAP.get(field);
if (propertyDescriptor == null) {
return null;
}
Method method = propertyDescriptor.getReadMethod();
if (method == null) {
return null;
}
try {
Object value = method.invoke(c);
if (value != null) {
return String.valueOf(value);
}
if (g != null) {
value = method.invoke(g);
if (value != null) {
return String.valueOf(value);
}
}
} catch (Exception e) {
// do nothing
}
return null;
}
/**
* 设置DruidDataSource的值
*
* @param dataSource DruidDataSource
* @param field 字段
* @param g 全局配置
* @param c 当前配置
*/
public static void setValue(DruidDataSource dataSource, String field, DruidConfig g, DruidConfig c) {
public static void setValue(DruidDataSource dataSource, String field, DruidConfig c) {
PropertyDescriptor descriptor = DATASOURCE_DESCRIPTOR_MAP.get(field);
if (descriptor == null) {
log.warn("druid current not support [" + field + " ]");
@ -168,16 +117,94 @@ public final class DruidConfigUtil {
Object value = configReadMethod.invoke(c);
if (value != null) {
writeMethod.invoke(dataSource, value);
return;
}
if (g != null) {
value = configReadMethod.invoke(g);
if (value != null) {
writeMethod.invoke(dataSource, value);
}
}
} catch (Exception e) {
log.warn("druid current set [" + field + " ] error");
}
}
@SneakyThrows
public static void merge(DruidConfig global, DruidConfig item) {
if (global == null) {
return;
}
BeanInfo beanInfo = Introspector.getBeanInfo(DruidConfig.class, Object.class);
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
for (PropertyDescriptor pd : propertyDescriptors) {
Class<?> propertyType = pd.getPropertyType();
if (Properties.class == propertyType) {
mergeProperties(global, item, pd);
} else if (List.class == propertyType) {
mergeList(global, item, pd);
} else if (Map.class == propertyType) {
mergeMap(global, item, pd);
} else {
mergeBasic(global, item, pd);
}
}
}
@SneakyThrows
private static void mergeList(DruidConfig global, DruidConfig item, PropertyDescriptor pd) {
Method readMethod = pd.getReadMethod();
Method writeMethod = pd.getWriteMethod();
List<Object> result = new ArrayList<>();
List<Object> itemValue = (List) readMethod.invoke(item);
List<Object> globalValue = (List) readMethod.invoke(global);
if (globalValue != null) {
result.addAll(globalValue);
}
if (itemValue != null) {
result.addAll(itemValue);
}
writeMethod.invoke(item, result);
}
@SneakyThrows
private static void mergeMap(DruidConfig global, DruidConfig item, PropertyDescriptor pd) {
Method readMethod = pd.getReadMethod();
Method writeMethod = pd.getWriteMethod();
Map result = new HashMap();
Map itemValue = (Map) readMethod.invoke(item);
Map globalValue = (Map) readMethod.invoke(global);
if (globalValue != null) {
result.putAll(globalValue);
}
if (itemValue != null) {
result.putAll(itemValue);
}
writeMethod.invoke(item, result);
}
@SneakyThrows
private static void mergeProperties(DruidConfig global, DruidConfig item, PropertyDescriptor pd) {
Method readMethod = pd.getReadMethod();
Method writeMethod = pd.getWriteMethod();
Properties itemValue = (Properties) readMethod.invoke(item);
Properties globalValue = (Properties) readMethod.invoke(global);
Properties properties = new Properties();
if (globalValue != null) {
properties.putAll(globalValue);
}
if (itemValue != null) {
properties.putAll(itemValue);
}
if (!properties.isEmpty()) {
writeMethod.invoke(item, properties);
}
}
@SneakyThrows
private static void mergeBasic(DruidConfig global, DruidConfig item, PropertyDescriptor pd) {
Method readMethod = pd.getReadMethod();
Method writeMethod = pd.getWriteMethod();
Object itemValue = readMethod.invoke(item);
if (itemValue == null) {
Object globalValue = readMethod.invoke(global);
if (globalValue != null) {
writeMethod.invoke(item, globalValue);
}
}
}
}

View File

@ -111,9 +111,11 @@ public class DruidDataSourceCreator implements DataSourceCreator {
dataSource.setDriverClassName(driverClassName);
}
DruidConfig config = dataSourceProperty.getDruid();
Properties properties = DruidConfigUtil.mergeConfig(gConfig, config);
DruidConfigUtil.merge(gConfig, config);
Properties properties = DruidConfigUtil.toProperties(config);
List<Filter> proxyFilters = this.initFilters(dataSourceProperty, properties.getProperty("druid.filters"));
String configFilters = properties.getProperty("druid.filters");
List<Filter> proxyFilters = this.initFilters(config, configFilters);
dataSource.setProxyFilters(proxyFilters);
try {
configMethod.invoke(dataSource, properties);
@ -124,7 +126,7 @@ public class DruidDataSourceCreator implements DataSourceCreator {
dataSource.setConnectProperties(config.getConnectionProperties());
//设置druid内置properties不支持的的参数
for (String param : PARAMS) {
DruidConfigUtil.setValue(dataSource, param, gConfig, config);
DruidConfigUtil.setValue(dataSource, param, config);
}
if (Boolean.FALSE.equals(dataSourceProperty.getLazy())) {
@ -137,40 +139,35 @@ public class DruidDataSourceCreator implements DataSourceCreator {
return dataSource;
}
private List<Filter> initFilters(DataSourceProperty dataSourceProperty, String filters) {
private List<Filter> initFilters(DruidConfig config, String filters) {
List<Filter> proxyFilters = new ArrayList<>(2);
DruidConfig druid = dataSourceProperty.getDruid();
if (DsStrUtils.hasText(filters)) {
String[] filterItems = filters.split(",");
for (String filter : filterItems) {
switch (filter) {
case "stat":
proxyFilters.add(DruidStatConfigUtil.toStatFilter(druid.getStat(), gConfig.getStat()));
proxyFilters.add(DruidStatConfigUtil.toStatFilter(config.getStat()));
break;
case "wall":
Map<String, Object> configWall = gConfig.getWall();
Map<String, Object> globalWall = druid.getWall();
WallConfig wallConfig = DruidWallConfigUtil.toWallConfig(globalWall, configWall);
Map<String, Object> configWall = config.getWall();
WallConfig wallConfig = DruidWallConfigUtil.toWallConfig(configWall);
WallFilter wallFilter = new WallFilter();
wallFilter.setConfig(wallConfig);
String dbType = (String) configWall.get("db-type");
if (!DsStrUtils.hasText(dbType)) {
dbType = (String) globalWall.get("db-type");
}
wallFilter.setDbType(dbType);
proxyFilters.add(wallFilter);
break;
case "slf4j":
proxyFilters.add(DruidLogConfigUtil.initFilter(Slf4jLogFilter.class, druid.getSlf4j(), gConfig.getSlf4j()));
proxyFilters.add(DruidLogConfigUtil.initFilter(Slf4jLogFilter.class, config.getSlf4j()));
break;
case "commons-log":
proxyFilters.add(DruidLogConfigUtil.initFilter(CommonsLogFilter.class, druid.getCommonsLog(), gConfig.getCommonsLog()));
proxyFilters.add(DruidLogConfigUtil.initFilter(CommonsLogFilter.class, config.getCommonsLog()));
break;
case "log4j":
proxyFilters.add(DruidLogConfigUtil.initFilter(Log4jFilter.class, druid.getLog4j(), gConfig.getLog4j()));
proxyFilters.add(DruidLogConfigUtil.initFilter(Log4jFilter.class, config.getLog4j()));
break;
case "log4j2":
proxyFilters.add(DruidLogConfigUtil.initFilter(Log4j2Filter.class, druid.getLog4j2(), gConfig.getLog4j2()));
proxyFilters.add(DruidLogConfigUtil.initFilter(Log4j2Filter.class, config.getLog4j2()));
break;
default:
log.warn("dynamic-datasource current not support [{}]", filter);

View File

@ -34,18 +34,16 @@ public final class DruidLogConfigUtil {
private static final Map<String, Method> METHODS = DsConfigUtil.getSetterMethods(LogFilter.class);
/**
* 根据当前的配置和全局的配置生成druid的日志filter
* 根据当前的配置生成druid的日志filter
*
* @param clazz 日志类
* @param c 当前配置
* @param g 全局配置
* @param map 配置
* @return 日志filter
*/
public static LogFilter initFilter(Class<? extends LogFilter> clazz, Map<String, Object> c, Map<String, Object> g) {
public static LogFilter initFilter(Class<? extends LogFilter> clazz, Map<String, Object> map) {
try {
LogFilter filter = clazz.getDeclaredConstructor().newInstance();
Map<String, Object> params = DsConfigUtil.mergeMap(c, g);
for (Map.Entry<String, Object> item : params.entrySet()) {
for (Map.Entry<String, Object> item : map.entrySet()) {
String key = DsConfigUtil.lineToUpper(item.getKey());
Method method = METHODS.get(key);
if (method != null) {

View File

@ -41,15 +41,13 @@ public final class DruidStatConfigUtil {
}
/**
* 根据当前的配置和全局的配置生成druid防火墙配置
* 根据当前的配置生成druid防火墙配置
*
* @param c 当前配置
* @param g 全局配置
* @param map 配置
* @return StatFilter
*/
public static StatFilter toStatFilter(Map<String, Object> c, Map<String, Object> g) {
public static StatFilter toStatFilter(Map<String, Object> map) {
StatFilter filter = new StatFilter();
Map<String, Object> map = DsConfigUtil.mergeMap(c, g);
for (Map.Entry<String, Object> item : map.entrySet()) {
String key = DsConfigUtil.lineToUpper(item.getKey());
Method method = METHODS.get(key);

View File

@ -36,13 +36,11 @@ public final class DruidWallConfigUtil {
/**
* 根据当前的配置和全局的配置生成druid防火墙配置
*
* @param c 当前配置
* @param g 全局配置
* @param map 当前配置
* @return 防火墙配置
*/
public static WallConfig toWallConfig(Map<String, Object> c, Map<String, Object> g) {
public static WallConfig toWallConfig(Map<String, Object> map) {
WallConfig wallConfig = new WallConfig();
Map<String, Object> map = DsConfigUtil.mergeMap(c, g);
Object dir = map.get("dir");
if (dir != null) {
wallConfig.loadConfig(String.valueOf(dir));

View File

@ -22,6 +22,10 @@ package com.baomidou.dynamic.datasource.toolkit;
*/
public abstract class DsStrUtils {
public static boolean isEmpty(Object str) {
return str == null || "".equals(str);
}
/**
* 判断字符串是否为空
*

View File

@ -22,6 +22,7 @@ import com.baomidou.dynamic.datasource.exception.CannotFindDataSourceException;
import com.baomidou.dynamic.datasource.provider.DynamicDataSourceProvider;
import com.baomidou.dynamic.datasource.strategy.DynamicDataSourceStrategy;
import com.baomidou.dynamic.datasource.strategy.LoadBalanceDynamicDataSourceStrategy;
import com.baomidou.dynamic.datasource.toolkit.DsStrUtils;
import com.baomidou.dynamic.datasource.toolkit.DynamicDataSourceContextHolder;
import com.p6spy.engine.spy.P6DataSource;
import io.seata.rm.datasource.DataSourceProxy;
@ -126,7 +127,7 @@ public class DynamicRoutingDataSource extends AbstractRoutingDataSource implemen
* @return 数据源
*/
public DataSource getDataSource(String ds) {
if (StringUtils.isEmpty(ds)) {
if (DsStrUtils.isEmpty(ds)) {
return determinePrimaryDataSource();
} else if (!groupDataSources.isEmpty() && groupDataSources.containsKey(ds)) {
log.debug("dynamic-datasource switch to the datasource named [{}]", ds);

View File

@ -20,6 +20,7 @@ import com.baomidou.dynamic.datasource.enums.SeataMode;
import com.baomidou.dynamic.datasource.event.DataSourceInitEvent;
import com.baomidou.dynamic.datasource.support.ScriptRunner;
import com.baomidou.dynamic.datasource.toolkit.CryptoUtils;
import com.baomidou.dynamic.datasource.toolkit.DsStrUtils;
import com.p6spy.engine.spy.P6DataSource;
import io.seata.rm.datasource.DataSourceProxy;
import io.seata.rm.datasource.xa.DataSourceProxyXA;
@ -84,7 +85,7 @@ public class DefaultDataSourceCreator {
throw new IllegalStateException("creator must not be null,please check the DataSourceCreator");
}
String propertyPublicKey = dataSourceProperty.getPublicKey();
if (StringUtils.isEmpty(propertyPublicKey)) {
if (DsStrUtils.isEmpty(propertyPublicKey)) {
dataSourceProperty.setPublicKey(publicKey);
}
Boolean propertyLazy = dataSourceProperty.getLazy();

View File

@ -15,11 +15,11 @@
*/
package com.baomidou.dynamic.datasource.ds;
import com.baomidou.dynamic.datasource.toolkit.DsStrUtils;
import com.baomidou.dynamic.datasource.toolkit.DynamicDataSourceContextHolder;
import com.baomidou.dynamic.datasource.tx.ConnectionFactory;
import com.baomidou.dynamic.datasource.tx.ConnectionProxy;
import com.baomidou.dynamic.datasource.tx.TransactionContext;
import org.springframework.util.StringUtils;
import javax.sql.DataSource;
import java.sql.Connection;
@ -50,11 +50,11 @@ public abstract class AbstractRoutingDataSource extends AbstractDataSource {
@Override
public Connection getConnection() throws SQLException {
String xid = TransactionContext.getXID();
if (StringUtils.isEmpty(xid)) {
if (DsStrUtils.isEmpty(xid)) {
return determineDataSource().getConnection();
} else {
String ds = DynamicDataSourceContextHolder.peek();
ds = StringUtils.isEmpty(ds) ? getPrimary() : ds;
ds = DsStrUtils.isEmpty(ds) ? getPrimary() : ds;
ConnectionProxy connection = ConnectionFactory.getConnection(xid, ds);
return connection == null ? getConnectionProxy(xid, ds, determineDataSource().getConnection()) : connection;
}
@ -63,11 +63,11 @@ public abstract class AbstractRoutingDataSource extends AbstractDataSource {
@Override
public Connection getConnection(String username, String password) throws SQLException {
String xid = TransactionContext.getXID();
if (StringUtils.isEmpty(xid)) {
if (DsStrUtils.isEmpty(xid)) {
return determineDataSource().getConnection(username, password);
} else {
String ds = DynamicDataSourceContextHolder.peek();
ds = StringUtils.isEmpty(ds) ? getPrimary() : ds;
ds = DsStrUtils.isEmpty(ds) ? getPrimary() : ds;
ConnectionProxy connection = ConnectionFactory.getConnection(xid, ds);
return connection == null ? getConnectionProxy(xid, ds, determineDataSource().getConnection(username, password))
: connection;

View File

@ -17,8 +17,8 @@ package com.baomidou.dynamic.datasource.provider;
import com.baomidou.dynamic.datasource.creator.DataSourceProperty;
import com.baomidou.dynamic.datasource.creator.DefaultDataSourceCreator;
import com.baomidou.dynamic.datasource.toolkit.DsStrUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.util.StringUtils;
import javax.sql.DataSource;
import java.sql.Connection;
@ -106,7 +106,7 @@ public abstract class AbstractJdbcDataSourceProvider extends AbstractDataSourceP
try {
// 由于 SPI 的支持现在已无需显示加载驱动了
// 但在用户显示配置的情况下进行主动加载
if (!StringUtils.isEmpty(driverClassName)) {
if (!DsStrUtils.isEmpty(driverClassName)) {
Class.forName(driverClassName);
log.info("成功加载数据库驱动程序");
}

View File

@ -16,7 +16,6 @@
package com.baomidou.dynamic.datasource.toolkit;
import org.springframework.core.NamedThreadLocal;
import org.springframework.util.StringUtils;
import java.util.ArrayDeque;
import java.util.Deque;
@ -66,7 +65,7 @@ public final class DynamicDataSourceContextHolder {
* @return 数据源名称
*/
public static String push(String ds) {
String dataSourceStr = StringUtils.isEmpty(ds) ? "" : ds;
String dataSourceStr = DsStrUtils.isEmpty(ds) ? "" : ds;
LOOKUP_KEY_HOLDER.get().push(dataSourceStr);
return dataSourceStr;
}

View File

@ -15,8 +15,8 @@
*/
package com.baomidou.dynamic.datasource.tx;
import com.baomidou.dynamic.datasource.toolkit.DsStrUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.util.StringUtils;
import java.security.SecureRandom;
import java.util.UUID;
@ -75,7 +75,7 @@ public final class LocalTxUtil {
*/
public static String startTransaction() {
String xid = TransactionContext.getXID();
if (!StringUtils.isEmpty(xid)) {
if (!DsStrUtils.isEmpty(xid)) {
log.debug("dynamic-datasource exist local tx [{}]", xid);
} else {
xid = randomUUID().toString();

View File

@ -15,7 +15,7 @@
*/
package com.baomidou.dynamic.datasource.tx;
import org.springframework.util.StringUtils;
import com.baomidou.dynamic.datasource.toolkit.DsStrUtils;
/**
* @author funkye
@ -31,7 +31,7 @@ public class TransactionContext {
*/
public static String getXID() {
String xid = CONTEXT_HOLDER.get();
if (!StringUtils.isEmpty(xid)) {
if (!DsStrUtils.isEmpty(xid)) {
return xid;
}
return null;

View File

@ -16,8 +16,8 @@
package com.baomidou.dynamic.datasource.tx;
import com.baomidou.dynamic.datasource.exception.TransactionException;
import com.baomidou.dynamic.datasource.toolkit.DsStrUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.util.StringUtils;
import java.util.Objects;
@ -106,7 +106,7 @@ public class TransactionalTemplate {
private Object doExecute(TransactionalExecutor transactionalExecutor) throws Throwable {
TransactionalInfo transactionInfo = transactionalExecutor.getTransactionInfo();
DsPropagation propagation = transactionInfo.propagation;
if (!StringUtils.isEmpty(TransactionContext.getXID()) && !propagation.equals(DsPropagation.NESTED)) {
if (!DsStrUtils.isEmpty(TransactionContext.getXID()) && !propagation.equals(DsPropagation.NESTED)) {
return transactionalExecutor.execute();
}
boolean state = true;
@ -209,7 +209,7 @@ public class TransactionalTemplate {
* @return 是否存在事务
*/
public boolean existingTransaction() {
return !StringUtils.isEmpty(TransactionContext.getXID());
return !DsStrUtils.isEmpty(TransactionContext.getXID());
}
/**

View File

@ -51,7 +51,7 @@
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.8</java.version>
<spring-boot-dependencies.version>2.7.15</spring-boot-dependencies.version>
<spring-boot-dependencies.version>2.7.16</spring-boot-dependencies.version>
<mybatis.plus.version>3.5.3.2</mybatis.plus.version>
<hikaricp.version>2.4.13</hikaricp.version>
<druid.version>1.2.19</druid.version>