Spring Boot集成Nacos动态刷新数据源
前言
因为项目需要,需要在项目运行过程中能够动态修改数据源(即:数据源的热更新)。这里以com.alibaba.druid.pool.DruidDataSource
数据源为例
第一步:重写DruidAbstractDataSource类
这里为什么要重写这个类:因为DruidDataSource数据源在初始化后,就不允许再重新设置数据库的url和userName
package com.alibaba.druid.pool;
...
public abstract class DruidAbstractDataSource extends WrapperAdapter implements DruidAbstractDataSourceMBean, DataSource, DataSourceProxy, Serializable {
...
public void setUrl(String jdbcUrl) {
if (StringUtils.equals(this.jdbcUrl, jdbcUrl)) {
return;
}
// 重写的时候,需要将这个判断注释掉,否则会报错
// if (inited) {
// throw new UnsupportedOperationException();
// }
if (jdbcUrl != null) {
jdbcUrl = jdbcUrl.trim();
}
this.jdbcUrl = jdbcUrl;
// if (jdbcUrl.startsWith(ConfigFilter.URL_PREFIX)) {
// this.filters.add(new ConfigFilter());
// }
}
public void setUsername(String username) {
if (StringUtils.equals(this.username, username)) {
return;
}
// 重写的时候,需要将这个判断注释掉,否则会报错
// if (inited) {
// throw new UnsupportedOperationException();
// }
this.username = username;
}
}
重写的时候包路径不能变,只有这样类加载的时候才会优先加载重写后的类
第二步:配置动态获取Nacos配置信息
DruidConfiguration.java
import com.alibaba.druid.pool.DruidDataSource;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* 描述:如果不使用代码手动初始化DataSource的话,监控界面的SQL监控会没有数据
*/
@Slf4j
@Configuration
@RefreshScope
@Data
public class DruidConfiguration {
@Value("${spring.datasource.url}")
private String dbUrl;
@Value("${spring.datasource.username}")
private String username;
@Value("${spring.datasource.password}")
private String password;
@Value("${spring.datasource.driver-class-name}")
private String driverClassName;
// @Value("${spring.datasource.initialSize}")
// private int initialSize;
//
// @Value("${spring.datasource.minIdle}")
// private int minIdle;
//
// @Value("${spring.datasource.maxActive}")
// private int maxActive;
//
// @Value("${spring.datasource.maxWait}")
// private int maxWait;
//
// @Value("${spring.datasource.timeBetweenEvictionRunsMillis}")
// private int timeBetweenEvictionRunsMillis;
//
// @Value("${spring.datasource.minEvictableIdleTimeMillis}")
// private int minEvictableIdleTimeMillis;
//
// @Value("${spring.datasource.validationQuery}")
// private String validationQuery;
//
// @Value("${spring.datasource.testWhileIdle}")
// private boolean testWhileIdle;
//
// @Value("${spring.datasource.testOnBorrow}")
// private boolean testOnBorrow;
//
// @Value("${spring.datasource.testOnReturn}")
// private boolean testOnReturn;
//
// @Value("${spring.datasource.poolPreparedStatements}")
// private boolean poolPreparedStatements;
//
// @Value("${spring.datasource.maxPoolPreparedStatementPerConnectionSize}")
// private int maxPoolPreparedStatementPerConnectionSize;
//
// @Value("${spring.datasource.filters}")
// private String filters;
//
// @Value("${spring.datasource.connectionProperties}")
// private String connectionProperties;
//
// @Value("${spring.datasource.useGlobalDataSourceStat}")
// private boolean useGlobalDataSourceStat;
@Bean
@RefreshScope
public DruidDataSource dataSource() {
DruidDataSource datasource = new DruidDataSource();
datasource.setUrl(this.dbUrl);
datasource.setUsername(username);
datasource.setPassword(password);
datasource.setDriverClassName(driverClassName);
//configuration
// datasource.setInitialSize(initialSize);
// datasource.setMinIdle(minIdle);
// datasource.setMaxActive(maxActive);
// datasource.setMaxWait(maxWait);
// datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
// datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
// datasource.setValidationQuery(validationQuery);
// datasource.setTestWhileIdle(testWhileIdle);
// datasource.setTestOnBorrow(testOnBorrow);
// datasource.setTestOnReturn(testOnReturn);
// datasource.setPoolPreparedStatements(poolPreparedStatements);
// datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
// datasource.setUseGlobalDataSourceStat(useGlobalDataSourceStat);
// try {
// datasource.setFilters(filters);
// } catch (SQLException e) {
// log.error("druid configuration initialization filter: " + e);
// }
// datasource.setConnectionProperties(connectionProperties);
return datasource;
}
}
这里要注意增加@RefreshScope
注解
第三步:手动刷新数据源
@GetMapping("/refresh")
public String refresh() throws SQLException
{
DruidDataSource master = SpringUtils.getBean("dataSource");
master.setUrl(druidConfiguration.getDbUrl());
master.setUsername(druidConfiguration.getUsername());
master.setPassword(druidConfiguration.getPassword());
master.setDriverClassName(druidConfiguration.getDriverClassName());
master.restart();
return userName + "<>" + jdbcUrl + "----------" + druidConfiguration.getDbUrl();
}
SpringUtils.java
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
import java.util.Map;
@Component
public class SpringUtils implements ApplicationContextAware {
private static ApplicationContext applicationContext;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
SpringUtils.applicationContext = applicationContext;
}
public static <T> T getBean(String beanName) {
if (applicationContext.containsBean(beanName)) {
return (T) applicationContext.getBean(beanName);
} else {
return null;
}
}
public static <T> Map<String, T> getBeansOfType(Class<T> baseType) {
return applicationContext.getBeansOfType(baseType);
}
public static ApplicationContext getApplicationContext() {
return applicationContext;
}
}
版权声明:
作者:Joe.Ye
链接:https://www.appblog.cn/index.php/2023/04/01/spring-boot-integrates-nacos-to-dynamically-refresh-data-sources/
来源:APP全栈技术分享
文章版权归作者所有,未经允许请勿转载。
THE END
0
二维码
打赏
海报
Spring Boot集成Nacos动态刷新数据源
前言
因为项目需要,需要在项目运行过程中能够动态修改数据源(即:数据源的热更新)。这里以com.alibaba.druid.pool.DruidDataSource数据源为例
第一步:重写……
文章目录
关闭
共有 0 条评论