Base项目创建完成后需要集成数据库,能够做到连接多数据库且能够切换到指定数据库。
这个就用到了AbstractRoutingDataSource
连接池使用的是druid
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.1.8</version>
</dependency>
数据库使用的是postgresql
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
</dependency>
其他引用查看 点击打开链接
1.resources下新建application-dev.properties文件。
2.application.properties 里面新增内容 spring.profiles.active=dev 这样我们可以将不同的环境配置文件区分开。
3.application-dev.properties内新增一下数据源配置信息。
#应用访问名称
server.servlet.contextPath=/control
server.port=8088
#serviceNode是自定义,用于区分不同的服务节点,暂时可不关注
server.serviceNode=control
# 数据库访问配置
spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
spring.datasource.filters=stat
spring.datasource.logSlowSql=false
spring.hibernate.dialect=org.hibernate.dialect.PostgreSQL9Dialect
spring.hibernate.hbm2ddl.auto=none
spring.hibernate.show_sql=false
string.hibernate.format_sql=false
#postgresql master为主数据源,也是默认数据源
spring.datasource.master.url=jdbc:postgresql://127.0.0.1:5432/control?currentSchema=control
spring.datasource.master.username=root
spring.datasource.master.password=root
spring.datasource.master.driverClassName=org.postgresql.Driver
spring.datasource.master.initialSize=5
spring.datasource.master.minIdle=5
spring.datasource.master.maxActive=20
spring.datasource.master.maxWait=60000
spring.datasource.master.timeBetweenEvictionRunsMillis=60000
spring.datasource.master.minEvictableIdleTimeMillis=300000
spring.datasource.master.testWhileIdle=true
spring.datasource.master.testOnBorrow=false
spring.datasource.master.testOnReturn=false
spring.datasource.master.filters=stat
#nodes为不同的数据源节点格式为:node1,node2,node3,……
spring.datasource.nodes=node1
spring.datasource.node1.url=jdbc:postgresql://127.0.0.1:5432/ccs?currentSchema=ccs
spring.datasource.node1.username=root
spring.datasource.node1.password=root
spring.datasource.node1.driverClassName=org.postgresql.Driver
spring.datasource.node1.initialSize=5
spring.datasource.node1.minIdle=5
spring.datasource.node1.maxActive=20
spring.datasource.node1.maxWait=60000
spring.datasource.node1.timeBetweenEvictionRunsMillis=60000
spring.datasource.node1.minEvictableIdleTimeMillis=300000
#spring.datasource.node1.validationQuery=SELECT 1 FROM DUAL
spring.datasource.node1.testWhileIdle=true
spring.datasource.node1.testOnBorrow=false
spring.datasource.node1.testOnReturn=false
spring.datasource.node1.filters=stat
4.数据源配置文件创建完成后需要初始化数据源,创建不同的DataSource Bean
以下是初始化全过程:
package com.cloud.base.server.config;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.boot.web.servlet.ServletRegistrationBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.transaction.PlatformTransactionManager;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.support.http.StatViewServlet;
import com.alibaba.druid.support.http.WebStatFilter;
@Configuration
public class DruidConfig implements ApplicationContextAware {
@Autowired
private Environment env;
@Value("${spring.datasource.type}")
private Class<? extends DruidDataSource> dataSourceType;
@Value("${spring.datasource.logSlowSql}")
private String logSlowSql;
@Value("${spring.hibernate.dialect}")
private String dialect;
@Value("${spring.hibernate.hbm2ddl.auto}")
private String hbm2ddl;
@Value("${spring.hibernate.show_sql}")
private String show_sql;
@Value("${string.hibernate.format_sql}")
private String format_sql;
@Value("${spring.datasource.nodes}")
private String nodes;
private ConfigurableApplicationContext app;
@Bean(name = "masterDataSource", destroyMethod = "close", initMethod = "init")
@ConfigurationProperties(prefix = "spring.datasource.master")
public DruidDataSource masterDataSource() {
return DataSourceBuilder.create().type(dataSourceType).build();
}
@Bean(name = "dataSource")
public DynamicDataSource dataSource() {
ApplicationConfig.serviceNode = env.getProperty("server.serviceNode");
DefaultListableBeanFactory acf = (DefaultListableBeanFactory) app.getAutowireCapableBeanFactory();
BeanDefinitionBuilder bdb;
DynamicDataSource dynamicDataSource = new DynamicDataSource();
Map<Object, Object> targetDataSources = new HashMap<>();
targetDataSources.put("master", masterDataSource());
DynamicDataSourceContextHolder.dataSourceIds.add("master");
if (!StringUtils.isEmpty(nodes)) {
for (int i = 0; i < nodes.split(",").length; i++) {
String beanKey = nodes.split(",")[i];
final String prefix = "spring.datasource." + beanKey;
bdb = BeanDefinitionBuilder.rootBeanDefinition(DruidDataSource.class);
bdb.setDestroyMethodName("close");
bdb.setInitMethodName("init");
bdb.getBeanDefinition().setAttribute("id", beanKey);
bdb.addPropertyValue("driverClassName", env.getProperty(prefix + ".driverClassName"));
bdb.addPropertyValue("url", env.getProperty(prefix + ".url"));
bdb.addPropertyValue("username", env.getProperty(prefix + ".username"));
bdb.addPropertyValue("password", env.getProperty(prefix + ".password"));
bdb.addPropertyValue("initialSize", env.getProperty(prefix + ".initialSize"));
bdb.addPropertyValue("minIdle", env.getProperty(prefix + ".minIdle"));
bdb.addPropertyValue("maxActive", env.getProperty(prefix + ".maxActive"));
bdb.addPropertyValue("maxWait", env.getProperty(prefix + ".maxWait"));
bdb.addPropertyValue("timeBetweenEvictionRunsMillis", env.getProperty(prefix + ".minEvictableIdleTimeMillis"));
bdb.addPropertyValue("minEvictableIdleTimeMillis", env.getProperty(prefix + ".minEvictableIdleTimeMillis"));
bdb.addPropertyValue("validationQuery", env.getProperty(prefix + ".validationQuery"));
bdb.addPropertyValue("testWhileIdle", env.getProperty(prefix + ".testWhileIdle"));
bdb.addPropertyValue("testOnBorrow", env.getProperty(prefix + ".testOnBorrow"));
bdb.addPropertyValue("testOnReturn", env.getProperty(prefix + ".testOnReturn"));
bdb.addPropertyValue("filters", env.getProperty(prefix + ".filters"));
/** 以node名称为Bean的ID 注入到Spring*/
acf.registerBeanDefinition(beanKey, bdb.getBeanDefinition());
targetDataSources.put(beanKey, app.getBean(beanKey));
DynamicDataSourceContextHolder.dataSourceIds.add(beanKey);
}
}
dynamicDataSource.setTargetDataSources(targetDataSources);
/** 设置master为默认数据源*/
dynamicDataSource.setDefaultTargetDataSource(masterDataSource());
dynamicDataSource.afterPropertiesSet();
return dynamicDataSource;
}
@Bean
public ServletRegistrationBean druidServlet() {
ServletRegistrationBean reg = new ServletRegistrationBean();
reg.setServlet(new StatViewServlet());
reg.addUrlMappings("/druid/*");
reg.addInitParameter("loginUsername", "admin");
reg.addInitParameter("loginPassword", "admin");
reg.addInitParameter("logSlowSql", logSlowSql);
return reg;
}
@Bean
public FilterRegistrationBean filterRegistrationBean() {
FilterRegistrationBean filterRegistrationBean = new FilterRegistrationBean();
filterRegistrationBean.setFilter(new WebStatFilter());
filterRegistrationBean.addUrlPatterns("/*");
filterRegistrationBean.addInitParameter("exclusions", "*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*");
filterRegistrationBean.addInitParameter("profileEnable", "true");
return filterRegistrationBean;
}
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean factory = new LocalContainerEntityManagerFactoryBean();
factory.setDataSource(dataSource());
factory.setPersistenceProviderClass(HibernatePersistenceProvider.class);
/** 按实际项目路径修改,必须包含Entity和Service*/
factory.setPackagesToScan("com.cloud");
Map<String, String> jpaProperties = new HashMap<String, String>();
jpaProperties.put("hibernate.dialect", dialect);
jpaProperties.put("hibernate.hbm2ddl.auto", hbm2ddl);
jpaProperties.put("hibernate.show_sql", show_sql);
jpaProperties.put("hibernate.format_sql", format_sql);
factory.setJpaPropertyMap(jpaProperties);
factory.afterPropertiesSet();
return factory;
}
/** 重置JpaTransactionManager EntityManagerFactory,否则数据源切换后事务将不生效*/
@Bean
public PlatformTransactionManager transactionManager() {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(entityManagerFactory().getObject());
return transactionManager;
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
app = (ConfigurableApplicationContext) applicationContext;
}
}
package com.cloud.base.server.config;
import java.util.ArrayList;
import java.util.List;
public class DynamicDataSourceContextHolder {
/** 注意此处有陷阱,因为我们大多都会用到线程池,而ThreadLocal数据是和线程绑定的。
所以在每次处理请求完成后必须清空,否则下次线程复用是DataSource就是错误的。
处理方式可以选择实现ServletRequestListener的requestDestroyed方法。
*/
private static final ThreadLocal<String> contextHolder = new ThreadLocal<String>();
public static List<String> dataSourceIds = new ArrayList<String>();
public static void setDataSource(String dataSource) {
contextHolder.set(dataSource);
}
public static String getDataSource() {
return contextHolder.get();
}
public static void clearDataSource() {
contextHolder.remove();
}
public static boolean containsDataSource(String dataSourceId) {
return dataSourceIds.contains(dataSourceId);
}
}
package com.cloud.base.server.config;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
public class DynamicDataSource extends AbstractRoutingDataSource {
@Override
protected Object determineCurrentLookupKey() {
// TODO Auto-generated method stub
return DynamicDataSourceContextHolder.getDataSource();
}
}