Java使⽤多数据源⽅法配置
⾸先需要引⼊druid连接池依赖 ,此处选择alibaba连接池
1<dependency>
2    <groupId>com.alibaba</groupId>
3    <artifactId>druid-spring-boot-starter</artifactId>
4    <version>1.2.8</version>
5</dependency>
配置数据源信息  可创建多个
1spring:
2  datasource:
3    druid:
4      #第⼀个数据库连接信息 local  local为⾃命名命名随意
5      local:
6        type: com.alibaba.druid.pool.DruidDataSource
7        username: root
8        password: mima
9        driver-class-name: sql.cj.jdbc.Driver
10        url: jdbc:mysql://ip:3306/数据库名称?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetriev
11      #第⼆个数据库连接信息 cloud
12      cloud:
13        type: com.alibaba.druid.pool.DruidDataSource
14        username: root
15        password: mima
16        driver-class-name: sql.cj.jdbc.Driver
17        url: jdbc:mysql://ip:3306/数据库名称?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetriev 创建配类DataSourceConfig 注⼊配置信息
1import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder;
slf4j.Slf4j;
3import org.t.properties.ConfigurationProperties;
4import t.annotation.Bean;
5import t.annotation.Configuration;
6
7import javax.sql.DataSource;
8
9
10@Slf4j
11@Configuration
12public class DataSourceConfig {
13
14    /**
15    * 将cloud数据连接信息注⼊cloudDataSource
16    */
17    @Bean(name = "cloudDataSource")
18    @ConfigurationProperties(prefix = "spring.datasource.druid.cloud")
19    public DataSource cloudDataSource(){
20        ate().build();
21    }
22
23    /**
24    * 将local数据连接信息注⼊localDataSource
25    */
26    @Bean(name = "localDataSource")
27    @ConfigurationProperties(prefix = "spring.datasource.druid.local")
28    public DataSource localDataSource(){
29        ate().build();
30    }
31}
创建DataSourceNames类 给数据源命名 便于使⽤。
1public interface DataSourceNames {
2    String cloudDataSource = "CLOUDDATASOURCE";
3    String localDataSource = "LOCALDATASOURCE";
4}
⾃定义注解DataSource 默认选中数据源 localDataSource
1import java.lang.annotation.*;
2
3@Documented
4@Target({ElementType.METHOD})
5@Retention(RetentionPolicy.RUNTIME)
6public @interface DataSource {
7    String value() default DataSourceNames.localDataSource;
8}
创建 DynamicDataSource类 extends AbstractRoutingDataSource 重写determineCurrentLookupKey⽅法
1import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
2
3import javax.sql.DataSource;
4import java.util.Map;
5
6public class DynamicDataSource extends AbstractRoutingDataSource {
7    private static final ThreadLocal<String> CONTEXT_HOLDER= new ThreadLocal<>();
8
9    /**
10    * 配置DataSource, defaultTargetDataSource为主数据库
11    */
12    public DynamicDataSource(DataSource defaultTargetDataSource, Map<Object, Object> targetDataSources) {
13        super.setDefaultTargetDataSource(defaultTargetDataSource);
14        super.setTargetDataSources(targetDataSources);
15        super.afterPropertiesSet();
16    }
17
18    @Override
19    protected Object determineCurrentLookupKey() {
20        return getDataSource();
21    }
22
23    public static void setDataSource(String dataSource) {
24        CONTEXT_HOLDER.set(dataSource);
25    }
26
27    public static String getDataSource() {
28        return ();
29    }
30
31    public static void clearDataSource() {
32        ve();
33    }
34}
创建SqlSessionConfig
2import org.apache.ibatis.session.SqlSessionFactory;
batis.spring.SqlSessionFactoryBean;
4import org.springframework.beans.factory.annotation.Qualifier;
5import t.annotation.Bean;
6import t.annotation.Configuration;
7import t.annotation.Primary;
8import io.support.PathMatchingResourcePatternResolver;
9
10import javax.sql.DataSource;
11import java.util.HashMap;
12import java.util.Map;
13
14
15@Slf4j
16@Configuration
17public class SqlSessionConfig{
18    @Bean(name = "dataSource")
19    @Primary
20    DataSource dataSource(@Qualifier("cloudDataSource") DataSource cloudDataSource , @Qualifier("localDataSource") DataSource localDataSource)
21        Map<Object,Object> targetDataSources = new HashMap<>(2);
22        targetDataSources.put(DataSourceNames.cloudDataSource,cloudDataSource);
23        targetDataSources.put(DataSourceNames.localDataSource,localDataSource);
24        log.info("DataSource:{}" + targetDataSources);
25        return new DynamicDataSource(localDataSource,targetDataSources);
26    }
27
28    @Bean
29    @Primary
30    public SqlSessionFactory sqlSessionFactory(DataSource dataSource) throws Exception {
31        SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
32        bean.setDataSource(dataSource);
33        bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:mapper/*.xml"));
34        Object();
35    }
36}
配置切⾯ 在使⽤⾃定义@DataSource注解时 将指定数据源注⼊
2import org.aspectj.lang.ProceedingJoinPoint;
3import org.aspectj.lang.annotation.Around;
4import org.aspectj.lang.annotation.Aspect;
5import org.aspectj.lang.annotation.Pointcut;
6import org.flect.MethodSignature;
7import org.springframework.stereotype.Component;
8
9import flect.Method;
10
11@Component
12@Aspect
13@Slf4j
14public class DataSourceAspect {
15    @Pointcut("@annotation(fig.DataSource)")
16    public void pointCut(){}
17
18    @Around("pointCut()")
19    public Object around(ProceedingJoinPoint point) throws Throwable {
20        MethodSignature signature = (MethodSignature) Signature();
21        Method method = Method();
22        DataSource dataSource = Annotation(DataSource.class);
23        DynamicDataSource.setDataSource(dataSource.value());
24        log.info("set dataSource is {}" , dataSource.value());
25        try {
26            return point.proceed();
27        }finally {
28            DynamicDataSource.clearDataSource();
29            log.info("clearDataSource");
30        }
31    }
32}
在操作数据库时指定数据源
1@Service
2public class CloudTaskServiceImpl implements CloudTaskService {
3
4    @Autowired
5    private TaskMapper taskMapper;
6
7    @Override
8    @DataSource(value = DataSourceNames.cloudDataSource)
9    public int insertTask(Task task) {
10        return taskMapper.insert(task);
druid连接池配置详解11    }
12}

版权声明:本站内容均来自互联网,仅供演示用,请勿用于商业和其他非法用途。如果侵犯了您的权益请与我们联系QQ:729038198,我们将在24小时内删除。