17 changed files with 427 additions and 558 deletions
@ -1,151 +0,0 @@
@@ -1,151 +0,0 @@
|
||||
package com.cloud.kicc.common.data.entity; |
||||
|
||||
import java.util.HashMap; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
*<p> |
||||
* redis 响应信息 |
||||
*</p> |
||||
* |
||||
* @Author: entfrm开发团队-王翔 |
||||
* @Date: 2022/2/18 |
||||
*/ |
||||
public class RedisInfo { |
||||
|
||||
private static Map<String, String> map = new HashMap<>(); |
||||
|
||||
static { |
||||
map.put("redis_version", "Redis 服务器版本"); |
||||
map.put("redis_git_sha1", "Git SHA1"); |
||||
map.put("redis_git_dirty", "Git dirty flag"); |
||||
map.put("os", "Redis 服务器的宿主操作系统"); |
||||
map.put("arch_bits", " 架构(32 或 64 位)"); |
||||
map.put("multiplexing_api", "Redis 所使用的事件处理机制"); |
||||
map.put("gcc_version", "编译 Redis 时所使用的 GCC 版本"); |
||||
map.put("process_id", "服务器进程的 PID"); |
||||
map.put("run_id", "Redis 服务器的随机标识符(用于 Sentinel 和集群)"); |
||||
map.put("tcp_port", "TCP/IP 监听端口"); |
||||
map.put("uptime_in_seconds", "自 Redis 服务器启动以来,经过的秒数"); |
||||
map.put("uptime_in_days", "自 Redis 服务器启动以来,经过的天数"); |
||||
map.put("lru_clock", " 以分钟为单位进行自增的时钟,用于 LRU 管理"); |
||||
map.put("connected_clients", "已连接客户端的数量(不包括通过从属服务器连接的客户端)"); |
||||
map.put("client_longest_output_list", "当前连接的客户端当中,最长的输出列表"); |
||||
map.put("client_longest_input_buf", "当前连接的客户端当中,最大输入缓存"); |
||||
map.put("blocked_clients", "正在等待阻塞命令(BLPOP、BRPOP、BRPOPLPUSH)的客户端的数量"); |
||||
map.put("used_memory", "由 Redis 分配器分配的内存总量,以字节(byte)为单位"); |
||||
map.put("used_memory_human", "以人类可读的格式返回 Redis 分配的内存总量"); |
||||
map.put("used_memory_rss", "从操作系统的角度,返回 Redis 已分配的内存总量(俗称常驻集大小)。这个值和 top 、 ps 等命令的输出一致"); |
||||
map.put("used_memory_peak", " Redis 的内存消耗峰值(以字节为单位)"); |
||||
map.put("used_memory_peak_human", "以人类可读的格式返回 Redis 的内存消耗峰值"); |
||||
map.put("used_memory_lua", "Lua 引擎所使用的内存大小(以字节为单位)"); |
||||
map.put("mem_fragmentation_ratio", "sed_memory_rss 和 used_memory 之间的比率"); |
||||
map.put("mem_allocator", "在编译时指定的, Redis 所使用的内存分配器。可以是 libc 、 jemalloc 或者 tcmalloc"); |
||||
|
||||
map.put("redis_build_id", "redis_build_id"); |
||||
map.put("redis_mode", "运行模式,单机(standalone)或者集群(cluster)"); |
||||
map.put("atomicvar_api", "atomicvar_api"); |
||||
map.put("hz", "redis内部调度(进行关闭timeout的客户端,删除过期key等等)频率,程序规定serverCron每秒运行10次。"); |
||||
map.put("executable", "server脚本目录"); |
||||
map.put("config_file", "配置文件目录"); |
||||
map.put("client_biggest_input_buf", "当前连接的客户端当中,最大输入缓存,用client list命令观察qbuf和qbuf-free两个字段最大值"); |
||||
map.put("used_memory_rss_human", "以人类可读的方式返回 Redis 已分配的内存总量"); |
||||
map.put("used_memory_peak_perc", "内存使用率峰值"); |
||||
map.put("total_system_memory", "系统总内存"); |
||||
map.put("total_system_memory_human", "以人类可读的方式返回系统总内存"); |
||||
map.put("used_memory_lua_human", "以人类可读的方式返回Lua 引擎所使用的内存大小"); |
||||
map.put("maxmemory", "最大内存限制,0表示无限制"); |
||||
map.put("maxmemory_human", "以人类可读的方式返回最大限制内存"); |
||||
map.put("maxmemory_policy", "超过内存限制后的处理策略"); |
||||
map.put("loading", "服务器是否正在载入持久化文件"); |
||||
map.put("rdb_changes_since_last_save", "离最近一次成功生成rdb文件,写入命令的个数,即有多少个写入命令没有持久化"); |
||||
map.put("rdb_bgsave_in_progress", "服务器是否正在创建rdb文件"); |
||||
map.put("rdb_last_save_time", "离最近一次成功创建rdb文件的时间戳。当前时间戳 - rdb_last_save_time=多少秒未成功生成rdb文件"); |
||||
map.put("rdb_last_bgsave_status", "最近一次rdb持久化是否成功"); |
||||
map.put("rdb_last_bgsave_time_sec", "最近一次成功生成rdb文件耗时秒数"); |
||||
map.put("rdb_current_bgsave_time_sec", "如果服务器正在创建rdb文件,那么这个域记录的就是当前的创建操作已经耗费的秒数"); |
||||
map.put("aof_enabled", "是否开启了aof"); |
||||
map.put("aof_rewrite_in_progress", "标识aof的rewrite操作是否在进行中"); |
||||
map.put("aof_rewrite_scheduled", |
||||
"rewrite任务计划,当客户端发送bgrewriteaof指令,如果当前rewrite子进程正在执行,那么将客户端请求的bgrewriteaof变为计划任务,待aof子进程结束后执行rewrite "); |
||||
|
||||
map.put("aof_last_rewrite_time_sec", "最近一次aof rewrite耗费的时长"); |
||||
map.put("aof_current_rewrite_time_sec", "如果rewrite操作正在进行,则记录所使用的时间,单位秒"); |
||||
map.put("aof_last_bgrewrite_status", "上次bgrewrite aof操作的状态"); |
||||
map.put("aof_last_write_status", "上次aof写入状态"); |
||||
|
||||
map.put("total_commands_processed", "redis处理的命令数"); |
||||
map.put("total_connections_received", "新创建连接个数,如果新创建连接过多,过度地创建和销毁连接对性能有影响,说明短连接严重或连接池使用有问题,需调研代码的连接设置"); |
||||
map.put("instantaneous_ops_per_sec", "redis当前的qps,redis内部较实时的每秒执行的命令数"); |
||||
map.put("total_net_input_bytes", "redis网络入口流量字节数"); |
||||
map.put("total_net_output_bytes", "redis网络出口流量字节数"); |
||||
|
||||
map.put("instantaneous_input_kbps", "redis网络入口kps"); |
||||
map.put("instantaneous_output_kbps", "redis网络出口kps"); |
||||
map.put("rejected_connections", "拒绝的连接个数,redis连接个数达到maxclients限制,拒绝新连接的个数"); |
||||
map.put("sync_full", "主从完全同步成功次数"); |
||||
|
||||
map.put("sync_partial_ok", "主从部分同步成功次数"); |
||||
map.put("sync_partial_err", "主从部分同步失败次数"); |
||||
map.put("expired_keys", "运行以来过期的key的数量"); |
||||
map.put("evicted_keys", "运行以来剔除(超过了maxmemory后)的key的数量"); |
||||
map.put("keyspace_hits", "命中次数"); |
||||
map.put("keyspace_misses", "没命中次数"); |
||||
map.put("pubsub_channels", "当前使用中的频道数量"); |
||||
map.put("pubsub_patterns", "当前使用的模式的数量"); |
||||
map.put("latest_fork_usec", "最近一次fork操作阻塞redis进程的耗时数,单位微秒"); |
||||
map.put("role", "实例的角色,是master or slave"); |
||||
map.put("connected_slaves", "连接的slave实例个数"); |
||||
map.put("master_repl_offset", "主从同步偏移量,此值如果和上面的offset相同说明主从一致没延迟"); |
||||
map.put("repl_backlog_active", "复制积压缓冲区是否开启"); |
||||
map.put("repl_backlog_size", "复制积压缓冲大小"); |
||||
map.put("repl_backlog_first_byte_offset", "复制缓冲区里偏移量的大小"); |
||||
map.put("repl_backlog_histlen", |
||||
"此值等于 master_repl_offset - repl_backlog_first_byte_offset,该值不会超过repl_backlog_size的大小"); |
||||
map.put("used_cpu_sys", "将所有redis主进程在核心态所占用的CPU时求和累计起来"); |
||||
map.put("used_cpu_user", "将所有redis主进程在用户态所占用的CPU时求和累计起来"); |
||||
map.put("used_cpu_sys_children", "将后台进程在核心态所占用的CPU时求和累计起来"); |
||||
map.put("used_cpu_user_children", "将后台进程在用户态所占用的CPU时求和累计起来"); |
||||
map.put("cluster_enabled", "实例是否启用集群模式"); |
||||
map.put("db0", "db0的key的数量,以及带有生存期的key的数,平均存活时间"); |
||||
|
||||
} |
||||
|
||||
private String key; |
||||
private String value; |
||||
private String description; |
||||
|
||||
public String getKey() { |
||||
return key; |
||||
} |
||||
|
||||
public void setKey(String key) { |
||||
this.key = key; |
||||
this.description = map.get(this.key); |
||||
} |
||||
|
||||
public String getValue() { |
||||
return value; |
||||
} |
||||
|
||||
public void setValue(String value) { |
||||
this.value = value; |
||||
} |
||||
|
||||
public String getDescription() { |
||||
return description; |
||||
} |
||||
|
||||
public void setDescription(String description) { |
||||
this.description = description; |
||||
} |
||||
|
||||
@Override |
||||
public String toString() { |
||||
return "RedisInfo{" + |
||||
"key='" + key + '\'' + |
||||
", value='" + value + '\'' + |
||||
", desctiption='" + description + '\'' + |
||||
'}'; |
||||
} |
||||
} |
@ -1,160 +0,0 @@
@@ -1,160 +0,0 @@
|
||||
/* |
||||
package com.cloud.kicc.common.data.plugins; |
||||
|
||||
import cn.hutool.core.collection.CollectionUtil; |
||||
import com.baomidou.mybatisplus.core.toolkit.ExceptionUtils; |
||||
import com.baomidou.mybatisplus.core.toolkit.PluginUtils; |
||||
import com.baomidou.mybatisplus.extension.plugins.inner.InnerInterceptor; |
||||
import net.sf.jsqlparser.JSQLParserException; |
||||
import net.sf.jsqlparser.parser.CCJSqlParserUtil; |
||||
import net.sf.jsqlparser.statement.Statement; |
||||
import org.apache.ibatis.executor.Executor; |
||||
import org.apache.ibatis.mapping.BoundSql; |
||||
import org.apache.ibatis.mapping.MappedStatement; |
||||
import org.apache.ibatis.session.ResultHandler; |
||||
import org.apache.ibatis.session.RowBounds; |
||||
import org.springframework.jdbc.core.JdbcTemplate; |
||||
import org.springframework.security.core.token.TokenService; |
||||
|
||||
import java.sql.SQLException; |
||||
import java.util.List; |
||||
|
||||
*/ |
||||
/** |
||||
*<p> |
||||
* 数据过滤拦截器 |
||||
* 目前没有数据过滤需求,后面有了在做 |
||||
* 这里思路写个数据过滤拦截器,通过在mybatis中注册,跟分页那种直接全局拦截拼接sql、 |
||||
* 不过这边拼接部分放在注解切面中做,只有加了注解的才会有拼接过滤的sql |
||||
*</p> |
||||
* |
||||
* @Author: entfrm开发团队-王翔 |
||||
* @Date: 2022/4/2 |
||||
*//*
|
||||
|
||||
public class KiccDataScopeInnerInterceptor implements InnerInterceptor { |
||||
|
||||
*/ |
||||
/** |
||||
* 全部数据权限 |
||||
*//*
|
||||
|
||||
public static final String DATA_SCOPE_ALL = "1"; |
||||
|
||||
*/ |
||||
/** |
||||
* 自定数据权限 |
||||
*//*
|
||||
|
||||
public static final String DATA_SCOPE_CUSTOM = "2"; |
||||
|
||||
*/ |
||||
/** |
||||
* 部门数据权限 |
||||
*//*
|
||||
|
||||
public static final String DATA_SCOPE_DEPT = "3"; |
||||
|
||||
*/ |
||||
/** |
||||
* 部门及以下数据权限 |
||||
*//*
|
||||
|
||||
public static final String DATA_SCOPE_DEPT_AND_CHILD = "4"; |
||||
|
||||
*/ |
||||
/** |
||||
* 仅本人数据权限 |
||||
*//*
|
||||
|
||||
public static final String DATA_SCOPE_SELF = "5"; |
||||
|
||||
@Override |
||||
public void beforeQuery(Executor executor, MappedStatement ms, Object parameter, RowBounds rowBounds, ResultHandler resultHandler, BoundSql boundSql) throws SQLException { |
||||
|
||||
@Override |
||||
public void beforeQuery(Executor executor, MappedStatement ms, Object parameter, RowBounds rowBounds, |
||||
ResultHandler resultHandler, BoundSql boundSql) { |
||||
|
||||
PluginUtils.MPBoundSql mpBs = PluginUtils.mpBoundSql(boundSql); |
||||
String originalSql = boundSql.getSql(); |
||||
Object parameterObject = boundSql.getParameterObject(); |
||||
|
||||
// 查找参数中包含DataScope类型的参数
|
||||
DataScope dataScope = findDataScopeObject(parameterObject); |
||||
if (dataScope == null) { |
||||
return; |
||||
} |
||||
|
||||
LoginUser loginUser = SpringUtils.getBean(TokenService.class).getLoginUser(ServletUtils.getRequest()); |
||||
if (StringUtils.isNotNull(loginUser)) { |
||||
SysUser currentUser = loginUser.getUser(); |
||||
// 如果是超级管理员,则不过滤数据
|
||||
if (StringUtils.isNotNull(currentUser) && !currentUser.isAdmin()) { |
||||
String scopeName = dataScope.getScopeName(); |
||||
List<String> deptIds = dataScope.getDeptIds(); |
||||
|
||||
dataScopeFilter(currentUser, deptIds); |
||||
if (deptIds.isEmpty()) { |
||||
originalSql = String.format("SELECT %s FROM (%s) temp_data_scope WHERE 1 = 2", |
||||
dataScope.getFunc().getType(), originalSql); |
||||
} else { |
||||
String join = CollectionUtil.join(deptIds, ","); |
||||
originalSql = String.format("SELECT %s FROM (%s) temp_data_scope WHERE temp_data_scope.%s IN (%s)", |
||||
dataScope.getFunc().getType(), originalSql, scopeName, join); |
||||
} |
||||
} |
||||
} |
||||
mpBs.sql(originalSql); |
||||
} |
||||
|
||||
*/ |
||||
/** |
||||
* 查找参数是否包括DataScope对象 |
||||
* |
||||
* @param parameterObj 参数列表 |
||||
* @return DataScope |
||||
*//*
|
||||
|
||||
private DataScope findDataScopeObject(Object parameterObj) { |
||||
if (parameterObj instanceof DataScope) { |
||||
return (DataScope) parameterObj; |
||||
} else if (parameterObj instanceof Map) { |
||||
for (Object val : ((Map<?, ?>) parameterObj).values()) { |
||||
if (val instanceof DataScope) { |
||||
return (DataScope) val; |
||||
} |
||||
} |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
*/ |
||||
/** |
||||
* 数据范围 |
||||
* |
||||
* @return |
||||
*//*
|
||||
|
||||
private void dataScopeFilter(SysUser user, List<String> deptList) { |
||||
|
||||
for (SysRole role : user.getRoles()) { |
||||
String roleScope = role.getDataScope(); |
||||
if (DataScopeTypeEnum.ALL.getType().equals(roleScope)) { |
||||
return; |
||||
} |
||||
if (DataScopeTypeEnum.CUSTOM.getType().equals(roleScope)) { |
||||
// 获取自定义
|
||||
} |
||||
if (DataScopeTypeEnum.OWN_CHILD_LEVEL.getType().equals(roleScope)) { |
||||
// 获取子集
|
||||
} |
||||
if (DataScopeTypeEnum.OWN_LEVEL.getType().equals(roleScope)) { |
||||
deptList.add(user.getDeptId().toString()); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
} |
||||
*/ |
@ -1,38 +0,0 @@
@@ -1,38 +0,0 @@
|
||||
package com.cloud.kicc.common.datasource; |
||||
|
||||
import com.baomidou.dynamic.datasource.processor.DsProcessor; |
||||
import com.baomidou.dynamic.datasource.provider.DynamicDataSourceProvider; |
||||
import com.cloud.kicc.common.datasource.config.DataSourceProperties; |
||||
import com.cloud.kicc.common.datasource.config.JdbcDynamicDataSourceProvider; |
||||
import com.cloud.kicc.common.datasource.config.LastParamDsProcessor; |
||||
import org.jasypt.encryption.StringEncryptor; |
||||
import org.springframework.boot.autoconfigure.AutoConfigureAfter; |
||||
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; |
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties; |
||||
import org.springframework.context.annotation.Bean; |
||||
import org.springframework.context.annotation.Configuration; |
||||
|
||||
/** |
||||
*<p> |
||||
* 动态数据源切换配置 |
||||
*</p> |
||||
* |
||||
* @Author: entfrm开发团队-王翔 |
||||
* @Date: 2022/2/19 |
||||
*/ |
||||
@Configuration(proxyBeanMethods = false) |
||||
@AutoConfigureAfter(DataSourceAutoConfiguration.class) |
||||
@EnableConfigurationProperties(DataSourceProperties.class) |
||||
public class DynamicDataSourceAutoConfiguration { |
||||
|
||||
@Bean |
||||
public DynamicDataSourceProvider dynamicDataSourceProvider(StringEncryptor stringEncryptor, DataSourceProperties properties) { |
||||
return new JdbcDynamicDataSourceProvider(stringEncryptor, properties); |
||||
} |
||||
|
||||
@Bean |
||||
public DsProcessor dsProcessor() { |
||||
return new LastParamDsProcessor(); |
||||
} |
||||
|
||||
} |
@ -0,0 +1,55 @@
@@ -0,0 +1,55 @@
|
||||
package com.cloud.kicc.common.datasource; |
||||
|
||||
import com.baomidou.dynamic.datasource.DynamicRoutingDataSource; |
||||
import com.baomidou.dynamic.datasource.creator.DefaultDataSourceCreator; |
||||
import com.baomidou.dynamic.datasource.provider.DynamicDataSourceProvider; |
||||
import com.baomidou.dynamic.datasource.spring.boot.autoconfigure.DataSourceProperty; |
||||
import com.baomidou.dynamic.datasource.spring.boot.autoconfigure.DynamicDataSourceProperties; |
||||
import com.cloud.kicc.common.datasource.dynamic.DynamicDataSourceJdbcProvider; |
||||
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; |
||||
import org.springframework.context.annotation.Bean; |
||||
import org.springframework.context.annotation.Configuration; |
||||
|
||||
import javax.sql.DataSource; |
||||
|
||||
/** |
||||
*<p> |
||||
* 动态数据源切换配置 |
||||
*</p> |
||||
* |
||||
* @Author: entfrm开发团队-王翔 |
||||
* @Date: 2022/2/19 |
||||
*/ |
||||
@Configuration(proxyBeanMethods = false) |
||||
public class DynamicDataSourceConfiguration { |
||||
|
||||
@Bean |
||||
public DynamicDataSourceProvider dynamicDataSourceProvider(DataSourceProperties dataSourceProperties, |
||||
DynamicDataSourceProperties dynamicDataSourceProperties, |
||||
DefaultDataSourceCreator defaultDataSourceCreator) { |
||||
String driverClassName = dataSourceProperties.getDriverClassName(); |
||||
String url = dataSourceProperties.getUrl(); |
||||
String username = dataSourceProperties.getUsername(); |
||||
String password = dataSourceProperties.getPassword(); |
||||
DataSourceProperty master = dynamicDataSourceProperties.getDatasource().get(dynamicDataSourceProperties.getPrimary()); |
||||
if (master != null) { |
||||
driverClassName = master.getDriverClassName(); |
||||
url = master.getUrl(); |
||||
username = master.getUsername(); |
||||
password = master.getPassword(); |
||||
} |
||||
return new DynamicDataSourceJdbcProvider(dynamicDataSourceProperties, driverClassName, url, username, password); |
||||
} |
||||
|
||||
@Bean |
||||
public DataSource dataSource(DynamicDataSourceProperties dynamicDataSourceProperties) { |
||||
DynamicRoutingDataSource dataSource = new DynamicRoutingDataSource(); |
||||
dataSource.setPrimary(dynamicDataSourceProperties.getPrimary()); |
||||
dataSource.setStrict(dynamicDataSourceProperties.getStrict()); |
||||
dataSource.setStrategy(dynamicDataSourceProperties.getStrategy()); |
||||
dataSource.setP6spy(dynamicDataSourceProperties.getP6spy()); |
||||
dataSource.setSeata(dynamicDataSourceProperties.getSeata()); |
||||
return dataSource; |
||||
} |
||||
|
||||
} |
@ -1,43 +0,0 @@
@@ -1,43 +0,0 @@
|
||||
package com.cloud.kicc.common.datasource.config; |
||||
|
||||
import lombok.Data; |
||||
import org.springframework.boot.context.properties.ConfigurationProperties; |
||||
|
||||
/** |
||||
*<p> |
||||
* 多数据源配置属性 |
||||
*</p> |
||||
* |
||||
* @Author: entfrm开发团队-王翔 |
||||
* @Date: 2022/2/19 |
||||
*/ |
||||
@Data |
||||
@ConfigurationProperties("spring.datasource") |
||||
public class DataSourceProperties { |
||||
|
||||
/** |
||||
* 用户名 |
||||
*/ |
||||
private String username; |
||||
|
||||
/** |
||||
* 密码 |
||||
*/ |
||||
private String password; |
||||
|
||||
/** |
||||
* jdbc_url |
||||
*/ |
||||
private String url; |
||||
|
||||
/** |
||||
* 驱动类型 |
||||
*/ |
||||
private String driverClassName; |
||||
|
||||
/** |
||||
* 查询数据源的SQL |
||||
*/ |
||||
private String queryDsSql = "select * from gen_datasource_conf where del_flag = 0"; |
||||
|
||||
} |
@ -1,69 +0,0 @@
@@ -1,69 +0,0 @@
|
||||
package com.cloud.kicc.common.datasource.config; |
||||
|
||||
import com.baomidou.dynamic.datasource.provider.AbstractJdbcDataSourceProvider; |
||||
import com.baomidou.dynamic.datasource.spring.boot.autoconfigure.DataSourceProperty; |
||||
import com.cloud.kicc.common.datasource.support.DataSourceConstants; |
||||
import org.jasypt.encryption.StringEncryptor; |
||||
|
||||
import java.sql.ResultSet; |
||||
import java.sql.SQLException; |
||||
import java.sql.Statement; |
||||
import java.util.HashMap; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
*<p> |
||||
* 初始化多数据源 |
||||
* 从数据源中获取 配置信息 |
||||
*</p> |
||||
* |
||||
* @Author: entfrm开发团队-王翔 |
||||
* @Date: 2022/2/19 |
||||
*/ |
||||
public class JdbcDynamicDataSourceProvider extends AbstractJdbcDataSourceProvider { |
||||
|
||||
private final DataSourceProperties properties; |
||||
|
||||
private final StringEncryptor stringEncryptor; |
||||
|
||||
public JdbcDynamicDataSourceProvider(StringEncryptor stringEncryptor, DataSourceProperties properties) { |
||||
super(properties.getDriverClassName(), properties.getUrl(), properties.getUsername(), properties.getPassword()); |
||||
this.stringEncryptor = stringEncryptor; |
||||
this.properties = properties; |
||||
} |
||||
|
||||
/** |
||||
* 执行语句获得数据源参数 |
||||
* @param statement 语句 |
||||
* @return 数据源参数 |
||||
* @throws SQLException sql异常 |
||||
*/ |
||||
@Override |
||||
protected Map<String, DataSourceProperty> executeStmt(Statement statement) throws SQLException { |
||||
ResultSet rs = statement.executeQuery(properties.getQueryDsSql()); |
||||
|
||||
Map<String, DataSourceProperty> map = new HashMap<>(8); |
||||
while (rs.next()) { |
||||
String name = rs.getString(DataSourceConstants.DS_NAME); |
||||
String username = rs.getString(DataSourceConstants.DS_USER_NAME); |
||||
String password = rs.getString(DataSourceConstants.DS_USER_PWD); |
||||
String url = rs.getString(DataSourceConstants.DS_JDBC_URL); |
||||
DataSourceProperty property = new DataSourceProperty(); |
||||
property.setUsername(username); |
||||
property.setLazy(true); |
||||
property.setPassword(stringEncryptor.decrypt(password)); |
||||
property.setUrl(url); |
||||
map.put(name, property); |
||||
} |
||||
|
||||
// 添加默认主数据源
|
||||
DataSourceProperty property = new DataSourceProperty(); |
||||
property.setUsername(properties.getUsername()); |
||||
property.setPassword(properties.getPassword()); |
||||
property.setUrl(properties.getUrl()); |
||||
property.setLazy(true); |
||||
map.put(DataSourceConstants.DS_MASTER, property); |
||||
return map; |
||||
} |
||||
|
||||
} |
@ -1,48 +0,0 @@
@@ -1,48 +0,0 @@
|
||||
package com.cloud.kicc.common.datasource.config; |
||||
|
||||
import com.baomidou.dynamic.datasource.processor.DsProcessor; |
||||
import com.baomidou.dynamic.datasource.toolkit.DynamicDataSourceContextHolder; |
||||
import org.aopalliance.intercept.MethodInvocation; |
||||
|
||||
/** |
||||
*<p> |
||||
* 修复 issues |
||||
* 参数数据源解析 @DS("#last) |
||||
* 查找当前方法中最后一个参数的值当作数据源名称 |
||||
*</p> |
||||
* |
||||
* @Author: entfrm开发团队-王翔 |
||||
* @Date: 2022/2/19 |
||||
*/ |
||||
public class LastParamDsProcessor extends DsProcessor { |
||||
|
||||
private static final String LAST_PREFIX = "#last"; |
||||
|
||||
/** |
||||
* 抽象匹配条件 匹配才会走当前执行器否则走下一级执行器 |
||||
* @param key DS注解里的内容 |
||||
* @return 是否匹配 |
||||
*/ |
||||
@Override |
||||
public boolean matches(String key) { |
||||
if (key.startsWith(LAST_PREFIX)) { |
||||
// https://github.com/baomidou/dynamic-datasource-spring-boot-starter/issues/213
|
||||
DynamicDataSourceContextHolder.clear(); |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* 抽象最终决定数据源 |
||||
* @param invocation 方法执行信息 |
||||
* @param key DS注解里的内容 |
||||
* @return 数据源名称 |
||||
*/ |
||||
@Override |
||||
public String doDetermineDatasource(MethodInvocation invocation, String key) { |
||||
Object[] arguments = invocation.getArguments(); |
||||
return String.valueOf(arguments[arguments.length - 1]); |
||||
} |
||||
|
||||
} |
@ -0,0 +1,45 @@
@@ -0,0 +1,45 @@
|
||||
package com.cloud.kicc.common.datasource.dynamic; |
||||
|
||||
import lombok.Data; |
||||
import lombok.EqualsAndHashCode; |
||||
import lombok.experimental.Accessors; |
||||
|
||||
/** |
||||
*<p> |
||||
* 动态数据源 |
||||
*</p> |
||||
* |
||||
* @Author: wangxiang4 |
||||
* @since: 2023/7/3 |
||||
*/ |
||||
@Data |
||||
@Accessors(chain = true) |
||||
@EqualsAndHashCode(callSuper = false) |
||||
public class DynamicDataSource { |
||||
|
||||
/** |
||||
* 数据源ID |
||||
*/ |
||||
private String id; |
||||
|
||||
/** |
||||
* 驱动类 |
||||
*/ |
||||
private String driverClass; |
||||
|
||||
/** |
||||
* 数据库链接 |
||||
*/ |
||||
private String url; |
||||
|
||||
/** |
||||
* 数据库账号名 |
||||
*/ |
||||
private String username; |
||||
|
||||
/** |
||||
* 数据库密码 |
||||
*/ |
||||
private String password; |
||||
|
||||
} |
@ -0,0 +1,91 @@
@@ -0,0 +1,91 @@
|
||||
package com.cloud.kicc.common.datasource.dynamic; |
||||
|
||||
import cn.hutool.core.util.StrUtil; |
||||
import com.baomidou.dynamic.datasource.provider.AbstractJdbcDataSourceProvider; |
||||
import com.baomidou.dynamic.datasource.spring.boot.autoconfigure.DataSourceProperty; |
||||
import com.baomidou.dynamic.datasource.spring.boot.autoconfigure.DynamicDataSourceProperties; |
||||
import com.cloud.kicc.common.datasource.support.DynamicDataSourceConstant; |
||||
import com.cloud.kicc.common.datasource.util.ConnUtil; |
||||
|
||||
import java.sql.ResultSet; |
||||
import java.sql.SQLException; |
||||
import java.sql.Statement; |
||||
import java.util.HashMap; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
*<p> |
||||
* 动态数据源初始加载 |
||||
*</p> |
||||
* |
||||
* @Author: wangxiang4 |
||||
* @since: 2023/7/3 |
||||
*/ |
||||
public class DynamicDataSourceJdbcProvider extends AbstractJdbcDataSourceProvider { |
||||
|
||||
private final String driverClassName; |
||||
private final String url; |
||||
private final String username; |
||||
private final String password; |
||||
private final DynamicDataSourceProperties dynamicDataSourceProperties; |
||||
|
||||
public DynamicDataSourceJdbcProvider(DynamicDataSourceProperties dynamicDataSourceProperties, |
||||
String driverClassName, |
||||
String url, |
||||
String username, |
||||
String password) { |
||||
super(driverClassName, url, username, password); |
||||
this.dynamicDataSourceProperties = dynamicDataSourceProperties; |
||||
this.driverClassName = driverClassName; |
||||
this.url = url; |
||||
this.username = username; |
||||
this.password = password; |
||||
} |
||||
|
||||
@Override |
||||
protected Map<String, DataSourceProperty> executeStmt(Statement statement) throws SQLException { |
||||
// 构建数据源集合
|
||||
Map<String, DataSourceProperty> map = new HashMap<>(16); |
||||
// 构建主数据源
|
||||
DataSourceProperty masterProperty = new DataSourceProperty(); |
||||
masterProperty.setDriverClassName(driverClassName); |
||||
masterProperty.setUrl(url); |
||||
masterProperty.setUsername(username); |
||||
masterProperty.setPassword(password); |
||||
map.put(dynamicDataSourceProperties.getPrimary(), masterProperty); |
||||
// 构建yml数据源
|
||||
Map<String, DataSourceProperty> datasource = dynamicDataSourceProperties.getDatasource(); |
||||
if (datasource.size() > 0) { |
||||
datasource.remove(dynamicDataSourceProperties.getPrimary()); |
||||
map.putAll(datasource); |
||||
} |
||||
// 构建动态数据源
|
||||
ResultSet rs = statement.executeQuery(DynamicDataSourceConstant.DYNAMIC_DATASOURCE_GROUP_STATEMENT); |
||||
while (rs.next()) { |
||||
String id = rs.getString("id"); |
||||
String name = rs.getString("name"); |
||||
String driver = rs.getString("driverClass"); |
||||
String url = rs.getString("url"); |
||||
String username = rs.getString("username"); |
||||
String password = rs.getString("password"); |
||||
try { |
||||
if (StrUtil.isAllNotBlank(id, driver, url, username, password)) { |
||||
// 测试链接是否生效
|
||||
Boolean result = ConnUtil.dbTest(driver, url, username, password); |
||||
if (result) { |
||||
DataSourceProperty jdbcProperty = new DataSourceProperty(); |
||||
// 设置SQL链接
|
||||
jdbcProperty.setDriverClassName(driver); |
||||
jdbcProperty.setUrl(url); |
||||
jdbcProperty.setUsername(username); |
||||
jdbcProperty.setPassword(password); |
||||
map.put(id, jdbcProperty); |
||||
} |
||||
} |
||||
} catch (Exception e) { |
||||
System.err.printf("数据源:"+ name + "初始化失败!"); |
||||
} |
||||
} |
||||
return map; |
||||
} |
||||
} |
@ -1,43 +0,0 @@
@@ -1,43 +0,0 @@
|
||||
package com.cloud.kicc.common.datasource.support; |
||||
|
||||
/** |
||||
*<p> |
||||
* 多数据源相关常量 |
||||
*</p> |
||||
* |
||||
* @Author: entfrm开发团队-王翔 |
||||
* @Date: 2022/2/19 |
||||
*/ |
||||
public interface DataSourceConstants { |
||||
|
||||
/** |
||||
* 数据源名称 |
||||
*/ |
||||
String DS_NAME = "name"; |
||||
|
||||
/** |
||||
* 默认数据源(master) |
||||
*/ |
||||
String DS_MASTER = "master"; |
||||
|
||||
/** |
||||
* jdbc_url |
||||
*/ |
||||
String DS_JDBC_URL = "url"; |
||||
|
||||
/** |
||||
* 用户名 |
||||
*/ |
||||
String DS_USER_NAME = "username"; |
||||
|
||||
/** |
||||
* 密码 |
||||
*/ |
||||
String DS_USER_PWD = "password"; |
||||
|
||||
/** |
||||
* 驱动包名称 |
||||
*/ |
||||
String DS_DRIVER_CLASS_NAME = "driver_class_name"; |
||||
|
||||
} |
@ -0,0 +1,48 @@
@@ -0,0 +1,48 @@
|
||||
package com.cloud.kicc.common.datasource.support; |
||||
|
||||
/** |
||||
*<p> |
||||
* 数据源常量 |
||||
*</p> |
||||
* |
||||
* @Author: wangxiang4 |
||||
* @since: 2023/7/3 |
||||
*/ |
||||
public interface DynamicDataSourceConstant { |
||||
|
||||
/** |
||||
* 数据源查询基础 |
||||
*/ |
||||
String DYNAMIC_DATASOURCE_BASE_STATEMENT = "SELECT id, name, driver_class as driverClass, url, username, password FROM sys_datasource"; |
||||
|
||||
/** |
||||
* 数据源查询SQL |
||||
*/ |
||||
String DYNAMIC_DATASOURCE_SINGLE_STATEMENT = DYNAMIC_DATASOURCE_BASE_STATEMENT + " WHERE del_flag = 0 AND id = ?"; |
||||
|
||||
/** |
||||
* 数据源查询SQL |
||||
*/ |
||||
String DYNAMIC_DATASOURCE_GROUP_STATEMENT = DYNAMIC_DATASOURCE_BASE_STATEMENT + " WHERE del_flag = 0"; |
||||
|
||||
/** |
||||
* 数据源错误提示 |
||||
*/ |
||||
String DYNAMIC_DATASOURCE_NOT_FOUND = "数据源信息有误,数据加载失败"; |
||||
|
||||
/** |
||||
* oracle驱动类 |
||||
*/ |
||||
String ORACLE_DRIVER_CLASS = "oracle.jdbc.OracleDriver"; |
||||
|
||||
/** |
||||
* oracle校验 |
||||
*/ |
||||
String ORACLE_VALIDATE_STATEMENT = "select 1 from dual"; |
||||
|
||||
/** |
||||
* 通用校验 |
||||
*/ |
||||
String COMMON_VALIDATE_STATEMENT = "select 1"; |
||||
|
||||
} |
@ -0,0 +1,49 @@
@@ -0,0 +1,49 @@
|
||||
package com.cloud.kicc.common.datasource.util; |
||||
|
||||
import java.sql.Connection; |
||||
import java.sql.DriverManager; |
||||
import java.sql.SQLException; |
||||
|
||||
/** |
||||
*<p> |
||||
* 数据库工具类 |
||||
*</p> |
||||
* |
||||
* @Author: wangxiang4 |
||||
* @since: 2023/7/3 |
||||
*/ |
||||
public class ConnUtil { |
||||
|
||||
/** |
||||
* 测试数据库链接 |
||||
*/ |
||||
public static Boolean dbTest(String driverClass, String url, String username, String password) throws Exception { |
||||
Connection conn = null; |
||||
try { |
||||
//测试驱动类
|
||||
Class.forName(driverClass); |
||||
//创建连接
|
||||
conn = DriverManager.getConnection(url, username, password); |
||||
conn.setAutoCommit(Boolean.FALSE); |
||||
return true; |
||||
} finally { |
||||
//关闭连接
|
||||
dbClose(conn); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* 关闭数据库链接 |
||||
*/ |
||||
private static void dbClose(Connection conn) { |
||||
try { |
||||
//关闭数据源连接
|
||||
if (conn != null) { |
||||
conn.close(); |
||||
} |
||||
} catch (Exception ex) { |
||||
ex.printStackTrace(); |
||||
} |
||||
} |
||||
|
||||
} |
@ -0,0 +1,112 @@
@@ -0,0 +1,112 @@
|
||||
package com.cloud.kicc.common.datasource.util; |
||||
|
||||
import com.baomidou.dynamic.datasource.DynamicRoutingDataSource; |
||||
import com.baomidou.dynamic.datasource.creator.DefaultDataSourceCreator; |
||||
import com.baomidou.dynamic.datasource.spring.boot.autoconfigure.DataSourceProperty; |
||||
import com.baomidou.dynamic.datasource.spring.boot.autoconfigure.DynamicDataSourceProperties; |
||||
import com.baomidou.dynamic.datasource.toolkit.DynamicDataSourceContextHolder; |
||||
import com.cloud.kicc.common.datasource.dynamic.DynamicDataSource; |
||||
import lombok.Setter; |
||||
import lombok.extern.slf4j.Slf4j; |
||||
import org.apache.commons.collections4.map.LRUMap; |
||||
import org.springframework.beans.BeanUtils; |
||||
import org.springframework.stereotype.Component; |
||||
|
||||
import javax.sql.DataSource; |
||||
import java.sql.Connection; |
||||
import java.sql.SQLException; |
||||
|
||||
|
||||
/** |
||||
*<p> |
||||
* 动态数据源核心处理工具 |
||||
*</p> |
||||
* |
||||
* @Author: wangxiang4 |
||||
* @since: 2023/7/3 |
||||
*/ |
||||
@Setter |
||||
@Slf4j |
||||
@Component |
||||
public class DynamicDataSourceUtil { |
||||
|
||||
public static DynamicRoutingDataSource dynamicRoutingDataSource; |
||||
public static DynamicDataSourceProperties dynamicDataSourceProperties; |
||||
private static DefaultDataSourceCreator defaultDataSourceCreator; |
||||
private static int MAX_DATASOURCE_COUNT = 300; |
||||
// 最多保存三百个数据源,按使用率淘汰
|
||||
private static LRUMap<String, DynamicDataSource> linksProperties = new LRUMap(16, MAX_DATASOURCE_COUNT); |
||||
|
||||
public DynamicDataSourceUtil(DataSource dynamicRoutingDataSource, |
||||
DynamicDataSourceProperties dynamicDataSourceProperties, |
||||
DefaultDataSourceCreator defaultDataSourceCreator) { |
||||
DynamicDataSourceUtil.dynamicRoutingDataSource = (DynamicRoutingDataSource) dynamicRoutingDataSource; |
||||
DynamicDataSourceUtil.dynamicDataSourceProperties = dynamicDataSourceProperties; |
||||
DynamicDataSourceUtil.defaultDataSourceCreator = defaultDataSourceCreator; |
||||
} |
||||
|
||||
/** |
||||
* 创建并切换至远程数据源 |
||||
* @param dynamicDataSource 切换数据源 |
||||
*/ |
||||
public static void switchToDataSource(DynamicDataSource dynamicDataSource) { |
||||
String dbKey = dynamicDataSource.getId(); |
||||
String removeKey = null; |
||||
boolean insert = true; |
||||
if (dynamicRoutingDataSource.getDataSources().containsKey(dynamicDataSource.getId())) { |
||||
synchronized (linksProperties) { |
||||
if (linksProperties.get(dbKey).equals(dynamicDataSource)) { |
||||
insert = false; |
||||
} |
||||
} |
||||
} |
||||
if (insert) { |
||||
// 创建数据源配置
|
||||
DataSourceProperty dataSourceProperty = new DataSourceProperty(); |
||||
// 拷贝数据源配置
|
||||
BeanUtils.copyProperties(dynamicDataSource, dataSourceProperty); |
||||
// 创建动态数据源
|
||||
DataSource dataSource = defaultDataSourceCreator.createDataSource(dataSourceProperty); |
||||
// 添加最新数据源
|
||||
dynamicRoutingDataSource.addDataSource(dbKey, dataSource); |
||||
synchronized (linksProperties) { |
||||
if (linksProperties.size() == MAX_DATASOURCE_COUNT) { |
||||
removeKey = linksProperties.firstKey(); |
||||
} |
||||
linksProperties.put(dbKey, dynamicDataSource); |
||||
} |
||||
} |
||||
// 切换数据源
|
||||
DynamicDataSourceContextHolder.push(dbKey); |
||||
if (removeKey != null) { |
||||
try { |
||||
dynamicRoutingDataSource.removeDataSource(removeKey); |
||||
} catch (Exception e) { |
||||
log.error("移除数据源失败:{}", e.getMessage()); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* 移除当前设置的远程数据源,清除上次清除之后切换的所有数据源 |
||||
* 需要先调用 switchToDataSource切换数据源 |
||||
*/ |
||||
public static void clearSwitchDataSource() { |
||||
DynamicDataSourceContextHolder.poll(); |
||||
} |
||||
|
||||
/** |
||||
* 获取当前数据源的数据链接(切库后的) |
||||
* 用完之后一定要关闭 |
||||
* @return |
||||
* @throws SQLException |
||||
*/ |
||||
public static Connection getCurrentConnection() throws SQLException { |
||||
return dynamicRoutingDataSource.getConnection(); |
||||
} |
||||
|
||||
public static boolean containsLink(String key) { |
||||
return linksProperties.containsKey(key); |
||||
} |
||||
|
||||
} |
Loading…
Reference in new issue