dynamic-datasource源码解析

1,022 阅读27分钟

1. 介绍

此章节用于记录dynamic-datasource动态数据源源码解析,分为以下几部分

  1. 数据源配置文件解析以及数据源创建
  2. 数据源切换解析
  3. 本地事务解析

项目地址为:gitee.com/baomidou/dy…

1.1. 版本介绍

  • 多数据源版本为4.3.1
  • Spring Boot版本为3.3.0

1.2. 配置文件

  • 项目pom文件
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>3.3.0</version>
        <relativePath/> <!-- lookup parent from repository -->
    </parent>
    <groupId>cn.yuwen</groupId>
    <artifactId>dy-ds-study-test</artifactId>
    <version>0.0.1-SNAPSHOT</version>
    <name>dy-ds-study-test</name>
    <description>dy-ds-study-test</description>
    <url/>
    <licenses>
        <license/>
    </licenses>
    <developers>
        <developer/>
    </developers>
    <scm>
        <connection/>
        <developerConnection/>
        <tag/>
        <url/>
    </scm>
    <properties>
        <java.version>17</java.version>
    </properties>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-jdbc</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>

        <dependency>
            <groupId>com.mysql</groupId>
            <artifactId>mysql-connector-j</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>


        <dependency>
            <groupId>com.baomidou</groupId>
            <artifactId>dynamic-datasource-spring-boot3-starter</artifactId>
            <version>4.3.1</version>
        </dependency>

        <dependency>
            <groupId>com.baomidou</groupId>
            <artifactId>mybatis-plus-spring-boot3-starter</artifactId>
            <version>3.5.7</version>
        </dependency>

        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
        </dependency>


    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
            </plugin>
        </plugins>
    </build>

</project>
  • application.yml
spring:
  datasource:
    type: com.zaxxer.hikari.HikariDataSource
    dynamic:
      public-key: MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAKQLq0BiO/3mtBj/ZMlOka/r+/ZhI8W6Ck7rLW4SmYbj+RZll/dQeYfAWXGshNWKYMJS+uuVEMDQapjGm0b2+wMCAwEAAQ==
      p6spy: false
      primary: master
      strict: true
      datasource:
        master:
          type: ${spring.datasource.type}
          driverClassName: com.mysql.cj.jdbc.Driver
          url: ENC(jMfI+CNzo+8KElX4ZSrTlSt/NoyWMl66cX9V8II+wdi1r/NN1nmimB8O1dv9R+TzoIgoAxpKLzf1ODJZyV+NnQfEHSc/NPOigQUI0ZhuXxZW5CKBR20bTvd6+gqLm2V63UvJDCKBxDlk/Px46h6U7MT3YLrTab9ewg8Vze7NYr08b4T2+vMQpY/8ptv6U2B6t6KioaWTQte/regHYLJxzIrLOsUsGv3Jf+alsniVEwFZm447j8zUtYvQWcjdHV3ykMfnDoOITWhP/MIkh0/tTPj/RSWPWJlP0EgvGPBsEydrNfmZ1U205+rpfD7hThOj77W1EnwJQ8G2+hgx8+cAUoomtv4TiXWRHI80zI+7lOEbHqRF8GLluPNCQ04cONOnn12eLeQMBB+6/AA9sqfLxizblLNr9zRvLbTsNJDDc1M=)
          username: ENC(nkvSbrTELdNLcowzxIpRae4mosVyD/OHFu7uyUhzQHO+r+8ehAKwy3Fba7OUwHpGvD4uhLqCpwp/Lni6OdliMw==)
          password: ENC(bGQvrcgKt53iMJHRmKVbEJktXUKYvoibaDqNUcak1VWGcvSKoTyDWTxWnGGTJ8V3x+P0nvMzd1nYJCpaXQGJiQ==)
      #        slave:
      #          lazy: true
      #          type: ${spring.datasource.type}
      #          driverClassName: com.mysql.cj.jdbc.Driver
      #          url: jdbc:mysql://localhost:3306/ry-flowable-plus?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8&autoReconnect=true&rewriteBatchedStatements=true&nullCatalogMeansCurrent=true
      #          username:
      #          password:
      #        oracle:
      #          type: ${spring.datasource.type}
      #          driverClassName: oracle.jdbc.OracleDriver
      #          url: jdbc:oracle:thin:@//localhost:1521/XE
      #          username: ROOT
      #          password: root
      #          hikari:
      #            connectionTestQuery: SELECT 1 FROM DUAL
      #        postgres:
      #          type: ${spring.datasource.type}
      #          driverClassName: org.postgresql.Driver
      #          url: jdbc:postgresql://localhost:5432/postgres?useUnicode=true&characterEncoding=utf8&useSSL=true&autoReconnect=true&reWriteBatchedInserts=true
      #          username: root
      #          password: root
      #        sqlserver:
      #          type: ${spring.datasource.type}
      #          driverClassName: com.microsoft.sqlserver.jdbc.SQLServerDriver
      #          url: jdbc:sqlserver://localhost:1433;DatabaseName=tempdb;SelectMethod=cursor;encrypt=false;rewriteBatchedStatements=true
      #          username: SA
      #          password: root
      hikari:
        # \u6700\u5927\u8FDE\u63A5\u6C60\u6570\u91CF
        maxPoolSize: 20
        # \u6700\u5C0F\u7A7A\u95F2\u7EBF\u7A0B\u6570\u91CF
        minIdle: 10
        # \u914D\u7F6E\u83B7\u53D6\u8FDE\u63A5\u7B49\u5F85\u8D85\u65F6\u7684\u65F6\u95F4
        connectionTimeout: 30000
        # \u6821\u9A8C\u8D85\u65F6\u65F6\u95F4
        validationTimeout: 5000
        # \u7A7A\u95F2\u8FDE\u63A5\u5B58\u6D3B\u6700\u5927\u65F6\u95F4\uFF0C\u9ED8\u8BA410\u5206\u949F
        idleTimeout: 600000
        # \u6B64\u5C5E\u6027\u63A7\u5236\u6C60\u4E2D\u8FDE\u63A5\u7684\u6700\u957F\u751F\u547D\u5468\u671F\uFF0C\u503C0\u8868\u793A\u65E0\u9650\u751F\u547D\u5468\u671F\uFF0C\u9ED8\u8BA430\u5206\u949F
        maxLifetime: 1800000
        # \u8FDE\u63A5\u6D4B\u8BD5query\uFF08\u914D\u7F6E\u68C0\u6D4B\u8FDE\u63A5\u662F\u5426\u6709\u6548\uFF09
        connectionTestQuery: SELECT 1
        # \u591A\u4E45\u68C0\u67E5\u4E00\u6B21\u8FDE\u63A5\u7684\u6D3B\u6027
        keepaliveTime: 30000
server:
  port: 9999

2. 源码解析

2.1. 入口解析

  • 因使用了spring boot3,所以往常惯例,直接从starter包看起
  • 直接查看dynamic-datasource-spring-boot3-starter包下的META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports文件,此文件是spring boot 3注册配置类的入口文件

  • 可以看到注册了一个动态数据源自动配置类com.baomidou.dynamic.datasource.spring.boot.autoconfigure.DynamicDataSourceAutoConfiguration

2.2. 自动配置类解析

此类是自动配置的核心类,它首先做了以下配置

配置

  1. 通过使用 @AutoConfigureBefor注解,让当前配置类在数据源自动配置类之前执行
  2. 通过使用@Import注解,导入了其他配置类,如
    1. DruidDynamicDataSourceConfiguration:druid动态数据源配置类
    2. DynamicDataSourceCreatorAutoConfiguration:动态数据源创建器自动配置类,用于注册数据源对应的创建器对象
    3. DynamicDataSourceAopConfiguration:动态数据源aop切面支持,用于支持@DS注解和@DSTransactional注解
    4. DynamicDataSourceAssistConfiguration:动态数据源协助配置类,用于注册数据源的创建、解密等对应的对象
  1. 通过使用@ConditionalOnProperty注解,标注了需要在配置文件中声明spring.datasource.dynamic.enabled的值为true,或者不声明spring.datasource.dynamic.enabled属性,才会配置此类

注册bean

  1. 注册了类型DynamicRoutingDataSource的数据源对象到ioc容器中,此类是动态数据源的核心,它属于DataSource子类,所以获取数据库连接都会通过此类获取

初始化方法

  1. 实现了InitializingBean接口,会在bean初始化的时候调用afterPropertiesSet方法,内部调用了所有的DynamicDataSourcePropertiesCustomizer的customize方法,作用是可以定制数据源属性
/**
 * 动态数据源核心自动配置类
 *
 * @author TaoYu Kanyuxia
 * @since 1.0.0
 */
@Slf4j
@Configuration(proxyBeanMethods = false)
// 当前配置类需要在自带的数据源配置类执行前配置
@AutoConfigureBefore(
        value = DataSourceAutoConfiguration.class,
        name = {
                "com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceAutoConfigure",
                "com.alibaba.druid.spring.boot3.autoconfigure.DruidDataSourceAutoConfigure"
        })
@Import({DruidDynamicDataSourceConfiguration.class, DynamicDataSourceCreatorAutoConfiguration.class, DynamicDataSourceAopConfiguration.class, DynamicDataSourceAssistConfiguration.class})
@ConditionalOnProperty(prefix = DynamicDataSourceProperties.PREFIX, name = "enabled", havingValue = "true", matchIfMissing = true)
public class DynamicDataSourceAutoConfiguration implements InitializingBean {

    private final DynamicDataSourceProperties properties;

    private final List<DynamicDataSourcePropertiesCustomizer> dataSourcePropertiesCustomizers;

    public DynamicDataSourceAutoConfiguration(
            DynamicDataSourceProperties properties,
            ObjectProvider<List<DynamicDataSourcePropertiesCustomizer>> dataSourcePropertiesCustomizers) {
        this.properties = properties;
        this.dataSourcePropertiesCustomizers = dataSourcePropertiesCustomizers.getIfAvailable();
    }

    @Bean
    @ConditionalOnMissingBean
    public DataSource dataSource(List<DynamicDataSourceProvider> providers) {
        // 注册一个动态路由数据源(数据库连接都会从此对象获取)
        DynamicRoutingDataSource dataSource = new DynamicRoutingDataSource(providers);
        dataSource.setPrimary(properties.getPrimary());
        dataSource.setStrict(properties.getStrict());
        dataSource.setStrategy(properties.getStrategy());
        dataSource.setP6spy(properties.getP6spy());
        dataSource.setSeata(properties.getSeata());
        dataSource.setGraceDestroy(properties.getGraceDestroy());
        return dataSource;
    }

    @Override
    public void afterPropertiesSet() {
        if (!CollectionUtils.isEmpty(dataSourcePropertiesCustomizers)) {
            // 存在数据源属性定制对象,则会把配置的属性传递进去,可以自行定制
            for (DynamicDataSourcePropertiesCustomizer customizer : dataSourcePropertiesCustomizers) {
                customizer.customize(properties);
            }
        }
    }

}

2.3. 注册关键类分析

2.3.1. DynamicDataSourceCreatorAutoConfiguration

此类的作用是用于注册相关数据源创建对象

  1. BasicDataSourceCreator:用于创建Basic数据源创建对象
  2. JndiDataSourceCreator:用于创建Jndi数据源创建对象
  3. DruidDataSourceCreator:用于创建Druid数据源创建对象
  4. hikariDataSourceCreator:用于创建hikari数据源创建对象
  5. dbcp2DataSourceCreator:用于创建dbcp数据源创建对象
  6. AtomikosDataSourceCreator:用于创建Atomikos数据源创建对象
@Configuration
public class DynamicDataSourceCreatorAutoConfiguration {

    public static final int JNDI_ORDER = 1000;
    public static final int DRUID_ORDER = 2000;
    public static final int HIKARI_ORDER = 3000;
    public static final int BEECP_ORDER = 4000;
    public static final int DBCP2_ORDER = 5000;
    public static final int ATOMIKOS_ORDER = 6000;
    public static final int DEFAULT_ORDER = 7000;

    @Bean
    @Order(DEFAULT_ORDER)
    public BasicDataSourceCreator basicDataSourceCreator() {
        return new BasicDataSourceCreator();
    }

    @Bean
    @Order(JNDI_ORDER)
    public JndiDataSourceCreator jndiDataSourceCreator() {
        return new JndiDataSourceCreator();
    }

    /**
     * 存在Druid数据源时, 加入创建器
     */
    @ConditionalOnClass(DruidDataSource.class)
    @Configuration
    @Slf4j
    static class DruidDataSourceCreatorConfiguration {

        @Autowired(required = false)
        private ApplicationContext applicationContext;

        @Bean
        @Order(DRUID_ORDER)
        public DruidDataSourceCreator druidDataSourceCreator(DynamicDataSourceProperties properties) {
            DruidConfig druid = properties.getDruid();
            return new DruidDataSourceCreator(druid, proxyFilters -> {
                List<Filter> filters = new ArrayList<>();
                if (applicationContext != null && DsStrUtils.hasText(proxyFilters)) {
                    for (String filterId : proxyFilters.split(",")) {
                        try {
                            filters.add(applicationContext.getBean(filterId, Filter.class));
                        } catch (Exception e) {
                            log.warn("dynamic-datasource cannot load druid filter with name [{}], will be ignored", filterId);
                        }
                    }
                }
                return filters;
            });
        }
    }

    /**
     * 存在Hikari数据源时, 加入创建器
     */
    @ConditionalOnClass(HikariDataSource.class)
    @Configuration
    static class HikariDataSourceCreatorConfiguration {
        @Bean
        @Order(HIKARI_ORDER)
        public HikariDataSourceCreator hikariDataSourceCreator(DynamicDataSourceProperties properties) {
            return new HikariDataSourceCreator(properties.getHikari());
        }
    }

    /**
     * 存在BeeCp数据源时, 加入创建器
     */
    @ConditionalOnClass(BeeDataSource.class)
    @Configuration
    static class BeeCpDataSourceCreatorConfiguration {

        @Bean
        @Order(BEECP_ORDER)
        public BeeCpDataSourceCreator beeCpDataSourceCreator(DynamicDataSourceProperties properties) {
            return new BeeCpDataSourceCreator(properties.getBeecp());
        }
    }

    /**
     * 存在Dbcp2数据源时, 加入创建器
     */
    @ConditionalOnClass(BasicDataSource.class)
    @Configuration
    static class Dbcp2DataSourceCreatorConfiguration {

        @Bean
        @Order(DBCP2_ORDER)
        public Dbcp2DataSourceCreator dbcp2DataSourceCreator(DynamicDataSourceProperties properties) {
            return new Dbcp2DataSourceCreator(properties.getDbcp2());
        }

    }

    /**
     * 存在Atomikos数据源时, 加入创建器
     */
    @ConditionalOnClass({AtomikosDataSourceBean.class, TransactionFactory.class})
    @Configuration
    static class AtomikosDataSourceCreatorConfiguration {

        @Bean
        @Order(ATOMIKOS_ORDER)
        public AtomikosDataSourceCreator atomikosDataSourceCreator(DynamicDataSourceProperties properties) {
            return new AtomikosDataSourceCreator(properties.getAtomikos());
        }

        @Bean
        public TransactionFactory atomikosTransactionFactory() {
            return new AtomikosTransactionFactory();
        }

    }
}
2.3.1.1. DataSourceCreator

DataSourceCreator提供了两个方法

  1. createDataSource:创建数据源
  2. support:是否支持数据源的创建

public interface DataSourceCreator {

    /**
     * 通过属性创建数据源
     *
     * @param dataSourceProperty 数据源属性
     * @return 被创建的数据源
     */
    DataSource createDataSource(DataSourceProperty dataSourceProperty);

    /**
     * 当前创建器是否支持根据此属性创建
     *
     * @param dataSourceProperty 数据源属性
     * @return 是否支持
     */
    boolean support(DataSourceProperty dataSourceProperty);
}

2.3.2. DynamicDataSourceAopConfiguration

此类用于创建@DSTransactional和@DS注解切面的支持

  1. DynamicDataSourceAnnotationAdvisor:用于支持@DS注解
  2. DynamicDataSourceAnnotationAdvisor:用于支持@DSTransactional注解
  3. DsProcessor:责任链模式,用于支持请求头、session、sepl表达式这三种方式获取数据源

/**
 * 动态数据源核心自动配置类
 *
 * @author TaoYu Kanyuxia
 * @see DynamicDataSourceProvider
 * @see DynamicDataSourceStrategy
 * @see DynamicRoutingDataSource
 * @since 1.0.0
 */
@Role(BeanDefinition.ROLE_INFRASTRUCTURE)
@Configuration(proxyBeanMethods = false)
public class DynamicDataSourceAopConfiguration {

    private final DynamicDataSourceProperties properties;

    public DynamicDataSourceAopConfiguration(DynamicDataSourceProperties properties) {
        this.properties = properties;
    }

    @Role(BeanDefinition.ROLE_INFRASTRUCTURE)
    @Bean
    public static DynamicDataSourceProperties dynamicDataSourceProperties() {
        return new DynamicDataSourceProperties();
    }

    @Role(BeanDefinition.ROLE_INFRASTRUCTURE)
    @Bean
    @ConditionalOnMissingBean
    public DsProcessor dsProcessor(BeanFactory beanFactory) {
        // 数据源处理器,责任连设计模式
        DsProcessor headerProcessor = new DsJakartaHeaderProcessor();
        DsProcessor sessionProcessor = new DsJakartaSessionProcessor();
        DsSpelExpressionProcessor spelExpressionProcessor = new DsSpelExpressionProcessor();
        spelExpressionProcessor.setBeanResolver(new BeanFactoryResolver(beanFactory));
        headerProcessor.setNextProcessor(sessionProcessor);
        sessionProcessor.setNextProcessor(spelExpressionProcessor);
        return headerProcessor;
    }


    @Role(BeanDefinition.ROLE_INFRASTRUCTURE)
    @Bean
    @ConditionalOnProperty(prefix = DynamicDataSourceProperties.PREFIX + ".aop", name = "enabled", havingValue = "true", matchIfMissing = true)
    public Advisor dynamicDatasourceAnnotationAdvisor(DsProcessor dsProcessor) {
        DynamicDatasourceAopProperties aopProperties = properties.getAop();
        DynamicDataSourceAnnotationInterceptor interceptor = new DynamicDataSourceAnnotationInterceptor(aopProperties.getAllowedPublicOnly(), dsProcessor);
        // 用于支持@DS注解切面
        DynamicDataSourceAnnotationAdvisor advisor = new DynamicDataSourceAnnotationAdvisor(interceptor, DS.class);
        advisor.setOrder(aopProperties.getOrder());
        return advisor;
    }

    @Role(BeanDefinition.ROLE_INFRASTRUCTURE)
    @Bean
    @ConditionalOnProperty(prefix = DynamicDataSourceProperties.PREFIX, name = "seata", havingValue = "false", matchIfMissing = true)
    public Advisor dynamicTransactionAdvisor() {
        DynamicDatasourceAopProperties aopProperties = properties.getAop();
        DynamicLocalTransactionInterceptor interceptor = new DynamicLocalTransactionInterceptor(aopProperties.getAllowedPublicOnly());
        // 用于支持@DSTransactional注解切面
        return new DynamicDataSourceAnnotationAdvisor(interceptor, DSTransactional.class);
    }

}

2.3.3. DynamicDataSourceAssistConfiguration

动态数据源协助配置类,作用如下

  1. 注册YmlDynamicDataSourceProvider的bean,用于加载数据源
  2. 注册DataSourceInitEvent的bean,用于加载解密数据库的url、username、password
  3. 注册DefaultDataSourceCreator的bean,并把所有的数据源创建器设置了进去,用于创建数据源
  4. 注册DsTxEventListenerFactory的bean,用于处理@DsTxEventListener注解 (暂时没看到哪里用上),监听事务相关事件,如提交,回滚等
@Configuration
@RequiredArgsConstructor
public class DynamicDataSourceAssistConfiguration {

    private final DynamicDataSourceProperties properties;

    @Bean
    @Order(0)
    public DynamicDataSourceProvider ymlDynamicDataSourceProvider(DefaultDataSourceCreator defaultDataSourceCreator) {
        // 用于加载数据源
        return new YmlDynamicDataSourceProvider(defaultDataSourceCreator, properties.getDatasource());
    }

    @Bean
    @ConditionalOnMissingBean
    public DataSourceInitEvent dataSourceInitEvent() {
        return new EncDataSourceInitEvent();
    }

    @Bean
    @ConditionalOnMissingBean
    public DefaultDataSourceCreator dataSourceCreator(List<DataSourceCreator> dataSourceCreators, DataSourceInitEvent dataSourceInitEvent) {
        DefaultDataSourceCreator creator = new DefaultDataSourceCreator();
        creator.setCreators(dataSourceCreators);
        creator.setDataSourceInitEvent(dataSourceInitEvent);
        creator.setPublicKey(properties.getPublicKey());
        creator.setLazy(properties.getLazy());
        creator.setP6spy(properties.getP6spy());
        creator.setSeata(properties.getSeata());
        creator.setSeataMode(properties.getSeataMode());
        return creator;
    }

    @Configuration
    static class DsTxEventListenerFactoryConfiguration {
        @Bean
        @ConditionalOnMissingBean
        public DsTxEventListenerFactory dsTxEventListenerFactory() {
            // 数据源事务事件监听器工厂,用于监听事务的相关操作
            return new DsTxEventListenerFactory();
        }
    }
}

2.4. 数据源注册流程分析

注册数据源的流程在DynamicRoutingDataSource中实现

  • 实现了InitializingBean接口,初始化数据源在对应的afterPropertiesSet方法中实现
  • 实现了DisposableBean接口,销毁方法在对应的destroy方法中实现
  • 分析

此方法的流程如下

  1. 检查环境,也就是检查了相关的类是否存在,如开启了p6spy则检查了com.p6spy.engine.spy.P6DataSource类是否存在
  2. 遍历所有的DynamicDataSourceProvider动态数据源提供者(这里默认只有一个,那就是YmlDynamicDataSourceProvider),加载数据源
  3. 根据创建好的数据源添加数据源
  4. 检测默认数据源是否设置,只会日志输出,不存在也不会报错
/**
 * 所有数据库
 */
private final Map<String, DataSource> dataSourceMap = new ConcurrentHashMap<>();
/**
 * 分组数据库
 */
private final Map<String, GroupDataSource> groupDataSources = new ConcurrentHashMap<>();

@Override
public void afterPropertiesSet() {
    // 检查开启了配置但没有相关依赖
    checkEnv();
    // 添加并分组数据源
    Map<String, DataSource> dataSources = new HashMap<>(16);
    for (DynamicDataSourceProvider provider : providers) {
        // 加载数据源
        Map<String, DataSource> dsMap = provider.loadDataSources();
        if (dsMap != null) {
            dataSources.putAll(dsMap);
        }
    }
    // 添加数据源到成员变量dataSourceMap中,还会分组
    for (Map.Entry<String, DataSource> dsItem : dataSources.entrySet()) {
        addDataSource(dsItem.getKey(), dsItem.getValue());
    }
    // 检测默认数据源是否设置
    if (groupDataSources.containsKey(primary)) {
        log.info("dynamic-datasource initial loaded [{}] datasource,primary group datasource named [{}]", dataSources.size(), primary);
    } else if (dataSourceMap.containsKey(primary)) {
        log.info("dynamic-datasource initial loaded [{}] datasource,primary datasource named [{}]", dataSources.size(), primary);
    } else {
        log.warn("dynamic-datasource initial loaded [{}] datasource,Please add your primary datasource or check your configuration", dataSources.size());
    }
}

private void checkEnv() {
    if (p6spy) {
        try {
            Class.forName("com.p6spy.engine.spy.P6DataSource");
            log.info("dynamic-datasource detect P6SPY plugin and enabled it");
        } catch (Exception e) {
            throw new RuntimeException("dynamic-datasource enabled P6SPY ,however without p6spy dependency", e);
        }
    }
    if (seata) {
        try {
            Class.forName("io.seata.rm.datasource.DataSourceProxy");
            log.info("dynamic-datasource detect ALIBABA SEATA and enabled it");
        } catch (Exception e) {
            throw new RuntimeException("dynamic-datasource enabled ALIBABA SEATA,however without seata dependency", e);
        }
    }
}

2.4.1. 加载数据源

这里调用的是父类AbstractDataSourceProvider的方法

public class YmlDynamicDataSourceProvider extends AbstractDataSourceProvider {

    /**
     * 所有数据源
     */
    private final Map<String, DataSourceProperty> dataSourcePropertiesMap;

    /**
     * 构造函数
     *
     * @param defaultDataSourceCreator 默认数据源创建器
     * @param dataSourcePropertiesMap  数据源参数
     */
    public YmlDynamicDataSourceProvider(DefaultDataSourceCreator defaultDataSourceCreator, Map<String, DataSourceProperty> dataSourcePropertiesMap) {
        super(defaultDataSourceCreator);
        this.dataSourcePropertiesMap = dataSourcePropertiesMap;
    }


    @Override
    public Map<String, DataSource> loadDataSources() {
        return createDataSourceMap(dataSourcePropertiesMap);
    }
}

会拿到所有的数据源配置,然后通过DefaultDataSourceCreator对象,去创建数据源

@Slf4j
@AllArgsConstructor
public abstract class AbstractDataSourceProvider implements DynamicDataSourceProvider {

    private final DefaultDataSourceCreator defaultDataSourceCreator;

    /**
     * 创建数据源
     *
     * @param dataSourcePropertiesMap 数据源参数Map
     * @return 数据源Map
     */
    protected Map<String, DataSource> createDataSourceMap(
            Map<String, DataSourceProperty> dataSourcePropertiesMap) {
        Map<String, DataSource> dataSourceMap = new HashMap<>(dataSourcePropertiesMap.size() * 2);
        for (Map.Entry<String, DataSourceProperty> item : dataSourcePropertiesMap.entrySet()) {
            String dsName = item.getKey();
            DataSourceProperty dataSourceProperty = item.getValue();
            String poolName = dataSourceProperty.getPoolName();
            if (poolName == null || "".equals(poolName)) {
                poolName = dsName;
            }
            dataSourceProperty.setPoolName(poolName);
            // 创建数据源
            dataSourceMap.put(dsName, defaultDataSourceCreator.createDataSource(dataSourceProperty));
        }
        return dataSourceMap;
    }
}
  • 创建数据源

此方法的主要流程如下

  1. 查看数据源创建器是否支持,获取对应的数据源创建器
  2. 解密数据库属性
  3. 创建数据源
  4. 运行sql脚本,初始化脚本和数据脚本
public DataSource createDataSource(DataSourceProperty dataSourceProperty) {
    DataSourceCreator dataSourceCreator = null;
    for (DataSourceCreator creator : this.creators) {
        // 查看数据源创建器是否支持
        if (creator.support(dataSourceProperty)) {
            dataSourceCreator = creator;
            break;
        }
    }
    if (dataSourceCreator == null) {
        throw new IllegalStateException("creator must not be null,please check the DataSourceCreator");
    }

	// 如数据源属性没有设置公钥,则把公共的公钥设置进去
    String propertyPublicKey = dataSourceProperty.getPublicKey();
    if (DsStrUtils.isEmpty(propertyPublicKey)) {
        dataSourceProperty.setPublicKey(publicKey);
    }
	// 如数据源属性没有设置是否懒加载属性,则把公共的懒加载属性设置进去
    Boolean propertyLazy = dataSourceProperty.getLazy();
    if (propertyLazy == null) {
        dataSourceProperty.setLazy(lazy);
    }
	// 解密数据库属性
    if (dataSourceInitEvent != null) {
        dataSourceInitEvent.beforeCreate(dataSourceProperty);
    }
	// 创建数据源
    DataSource dataSource = dataSourceCreator.createDataSource(dataSourceProperty);
    if (dataSourceInitEvent != null) {
        dataSourceInitEvent.afterCreate(dataSource);
    }
	// 运行sql脚本,初始化脚本和数据脚本
    this.runScrip(dataSource, dataSourceProperty);
	// 返回一个数据源包装类
    return wrapDataSource(dataSource, dataSourceProperty);
}
2.4.1.1. 获取数据源对应的创建器

我这里是用的是hikari连接池,对应的应该是HikariDataSourceCreator创建器,查看support方法

其实就是查看数据源的类型是否com.zaxxer.hikari.HikariDataSource,是则支持

@Override
public boolean support(DataSourceProperty dataSourceProperty) {
    Class<? extends DataSource> type = dataSourceProperty.getType();
    return type == null || DdConstants.HIKARI_DATASOURCE.equals(type.getName());
}
2.4.1.2. 解密数据库属性

会解密url、username、password,并且值需要使用ENC()包含并且存在公钥才会解密,如ENC(XXXXXX)

public class EncDataSourceInitEvent implements DataSourceInitEvent {

    /**
     * 加密正则
     */
    private static final Pattern ENC_PATTERN = Pattern.compile("^ENC\((.*)\)$");

    @Override
    public void beforeCreate(DataSourceProperty dataSourceProperty) {
        String publicKey = dataSourceProperty.getPublicKey();
        if (DsStrUtils.hasText(publicKey)) {
            dataSourceProperty.setUrl(decrypt(publicKey, dataSourceProperty.getUrl()));
            dataSourceProperty.setUsername(decrypt(publicKey, dataSourceProperty.getUsername()));
            dataSourceProperty.setPassword(decrypt(publicKey, dataSourceProperty.getPassword()));
        }
    }

    @Override
    public void afterCreate(DataSource dataSource) {

    }

    /**
     * 字符串解密
     */
    private String decrypt(String publicKey, String cipherText) {
        if (DsStrUtils.hasText(cipherText)) {
            Matcher matcher = ENC_PATTERN.matcher(cipherText);
            if (matcher.find()) {
                try {
                    return CryptoUtils.decrypt(publicKey, matcher.group(1));
                } catch (Exception e) {
                    log.error("DynamicDataSourceProperties.decrypt error ", e);
                }
            }
        }
        return cipherText;
    }
}
2.4.1.3. 创建数据源

创建了一个HikariConfig对象,并把DataSourceProperty.hikari所有属性设置了进去

  • 不是懒加载则会创建一个HikariDataSource连接池对象
  • 是拦截在则会创建一个空的HikariDataSource数据源对象,并调用copyState或者copyStateTo方法,把属性克隆了进去
public DataSource createDataSource(DataSourceProperty dataSourceProperty) {
    HikariConfig config = MERGE_CREATOR.create(gConfig, dataSourceProperty.getHikari());
    config.setUsername(dataSourceProperty.getUsername());
    config.setPassword(dataSourceProperty.getPassword());
    config.setJdbcUrl(dataSourceProperty.getUrl());
    config.setPoolName(dataSourceProperty.getPoolName());
    String driverClassName = dataSourceProperty.getDriverClassName();
    if (DsStrUtils.hasText(driverClassName)) {
        config.setDriverClassName(driverClassName);
    }
	// 不是懒加载
    if (Boolean.FALSE.equals(dataSourceProperty.getLazy())) {
        return new HikariDataSource(config);
    }
    config.validate();
    HikariDataSource dataSource = new HikariDataSource();
    try {
        configCopyMethod.invoke(config, dataSource);
    } catch (IllegalAccessException | InvocationTargetException e) {
        throw new RuntimeException("HikariConfig failed to copy to HikariDataSource", e);
    }
    return dataSource;
}
  • 什么情况下调用copyState方法或copyStateTo方法

会在类第一次调用的时候触发 fetchMethod方法,内部会查看是copyState存在还是copyStateTo存在来判定调用哪个方法

static {
    fetchMethod();
}

private static void fetchMethod() {
    Class<HikariConfig> hikariConfigClass = HikariConfig.class;
    try {
        configCopyMethod = hikariConfigClass.getMethod("copyState", hikariConfigClass);
        return;
    } catch (NoSuchMethodException ignored) {
    }

    try {
        configCopyMethod = hikariConfigClass.getMethod("copyStateTo", hikariConfigClass);
        return;
    } catch (NoSuchMethodException ignored) {
    }
    throw new RuntimeException("HikariConfig does not has 'copyState' or 'copyStateTo' method!");
}
2.4.1.4. 运行sql脚本

这里运行的sql脚本表示的是建表脚本和初始化数据脚本,其中schema表示的是建表语句,data表示的是数据语句,都是通过spring.datasource.dynamic下的datasource.init中的schema和data属性指定,如

private void runScrip(DataSource dataSource, DataSourceProperty dataSourceProperty) {
    DatasourceInitProperties initProperty = dataSourceProperty.getInit();
    String schema = initProperty.getSchema();
    String data = initProperty.getData();
    if (DsStrUtils.hasText(schema) || DsStrUtils.hasText(data)) {
        // 创建一个sql脚本运行器
        ScriptRunner scriptRunner = new ScriptRunner(initProperty.isContinueOnError(), initProperty.getSeparator());
        // 存在建表语句
        if (DsStrUtils.hasText(schema)) {
            scriptRunner.runScript(dataSource, schema);
        }
        // 存在数据语句
        if (DsStrUtils.hasText(data)) {
            scriptRunner.runScript(dataSource, data);
        }
    }
}
2.4.1.4.1. 运行脚本
  • 首先创建一个资源数据填充器对象ResourceDatabasePopulator,用于sql的执行
  • 创建一个资源路径解析器,用于支持正则路径ResourcePatternResolver
  • 最后执行脚本
public void runScript(DataSource dataSource, String location) {
    if (DsStrUtils.hasText(location)) {
        // 创建一个资源填充器
        ResourceDatabasePopulator populator = new ResourceDatabasePopulator();
        populator.setContinueOnError(continueOnError);
        populator.setSeparator(separator);
        try {
            // 资源路径解析器,支持正则
            ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
            // 设置获取到的脚本文件
            populator.addScripts(resolver.getResources(location));
            // 执行
            DatabasePopulatorUtils.execute(populator, dataSource);
        } catch (DataAccessException e) {
            log.warn("execute sql error", e);
        } catch (Exception e1) {
            log.warn("failed to initialize dataSource from schema file {} ", location, e1);
        }
    }
}
  • 执行脚本
public static void execute(DatabasePopulator populator, DataSource dataSource) throws DataAccessException {
    Assert.notNull(populator, "DatabasePopulator must not be null");
    Assert.notNull(dataSource, "DataSource must not be null");
    try {
        Connection connection = DataSourceUtils.getConnection(dataSource);
        try {
            // 填充sql
            populator.populate(connection);
            // 没有开启自动提交,并且没有事务,则先提交
            if (!connection.getAutoCommit() && !DataSourceUtils.isConnectionTransactional(connection, dataSource)) {
                connection.commit();
            }
        }
        finally {
            // 释放连接
            DataSourceUtils.releaseConnection(connection, dataSource);
        }
    }
    catch (ScriptException ex) {
        throw ex;
    }
    catch (Throwable ex) {
        throw new UncategorizedScriptException("Failed to execute database script", ex);
    }
}
2.4.1.4.1.1. 填充脚本(执行sql语句)

这里会循环遍历所有的脚本文件,然后去执行sql脚本

@Override
public void populate(Connection connection) throws ScriptException {
    Assert.notNull(connection, "'connection' must not be null");
    for (Resource script : this.scripts) {
        EncodedResource encodedScript = new EncodedResource(script, this.sqlScriptEncoding);
        // 执行脚本
        ScriptUtils.executeSqlScript(connection, encodedScript, this.continueOnError, this.ignoreFailedDrops,
                this.commentPrefixes, this.separator, this.blockCommentStartDelimiter, this.blockCommentEndDelimiter);
    }
}
  • 执行脚本

此方法的主要逻辑如下

  1. 读取脚本内容
  2. 根据分隔符(;)拆分sql语句
  3. 执行语句
public static void executeSqlScript(Connection connection, EncodedResource resource, boolean continueOnError,
        boolean ignoreFailedDrops, String[] commentPrefixes, @Nullable String separator,
        String blockCommentStartDelimiter, String blockCommentEndDelimiter) throws ScriptException {

    try {
        if (logger.isDebugEnabled()) {
            logger.debug("Executing SQL script from " + resource);
        }
        long startTime = System.currentTimeMillis();

        String script;
        try {
            // 读取脚本内容
            script = readScript(resource, separator, commentPrefixes, blockCommentEndDelimiter);
        }
        catch (IOException ex) {
            throw new CannotReadScriptException(resource, ex);
        }

        if (separator == null) {
            separator = DEFAULT_STATEMENT_SEPARATOR;
        }
        if (!EOF_STATEMENT_SEPARATOR.equals(separator) &&
                !containsStatementSeparator(resource, script, separator, commentPrefixes,
                    blockCommentStartDelimiter, blockCommentEndDelimiter)) {
            separator = FALLBACK_STATEMENT_SEPARATOR;
        }

        List<String> statements = new ArrayList<>();
        // 根据分隔符拆分脚本文件
        splitSqlScript(resource, script, separator, commentPrefixes, blockCommentStartDelimiter,
                blockCommentEndDelimiter, statements);

        int stmtNumber = 0;
        Statement stmt = connection.createStatement();
        try {
            for (String statement : statements) {
                stmtNumber++;
                try {
                    // 执行sql语句
                    stmt.execute(statement);
                    int rowsAffected = stmt.getUpdateCount();
                    if (logger.isDebugEnabled()) {
                        logger.debug(rowsAffected + " returned as update count for SQL: " + statement);
                        SQLWarning warningToLog = stmt.getWarnings();
                        while (warningToLog != null) {
                            logger.debug("SQLWarning ignored: SQL state '" + warningToLog.getSQLState() +
                                    "', error code '" + warningToLog.getErrorCode() +
                                    "', message [" + warningToLog.getMessage() + "]");
                            warningToLog = warningToLog.getNextWarning();
                        }
                    }
                }
                catch (SQLException ex) {
                    boolean dropStatement = StringUtils.startsWithIgnoreCase(statement.trim(), "drop");
                    if (continueOnError || (dropStatement && ignoreFailedDrops)) {
                        if (logger.isDebugEnabled()) {
                            logger.debug(ScriptStatementFailedException.buildErrorMessage(statement, stmtNumber, resource), ex);
                        }
                    }
                    else {
                        throw new ScriptStatementFailedException(statement, stmtNumber, resource, ex);
                    }
                }
            }
        }
        finally {
            try {
                stmt.close();
            }
            catch (Throwable ex) {
                logger.trace("Could not close JDBC Statement", ex);
            }
        }

        long elapsedTime = System.currentTimeMillis() - startTime;
        if (logger.isDebugEnabled()) {
            logger.debug("Executed SQL script from " + resource + " in " + elapsedTime + " ms.");
        }
    }
    catch (Exception ex) {
        if (ex instanceof ScriptException scriptException) {
            throw scriptException;
        }
        throw new UncategorizedScriptException(
            "Failed to execute database script from resource [" + resource + "]", ex);
    }
}
2.4.1.5. 包装数据源

包装数据源是为了支持P6spy和Seata

  • 开启了p6spy则会被包装成P6DataSource
  • 开启了seata则会被包装成DataSourceProxyXA或DataSourceProxy
  • 最后统一会被包装成ItemDataSource对象,内部包含了数据源和目标数据源,是否开启了ItemDataSource等信息
private DataSource wrapDataSource(DataSource dataSource, DataSourceProperty dataSourceProperty) {
    String name = dataSourceProperty.getPoolName();
    DataSource targetDataSource = dataSource;

    // 开启了p6spy,数据源会被包装成P6DataSource对象
    Boolean enabledP6spy = p6spy && dataSourceProperty.getP6spy();
    if (enabledP6spy) {
        targetDataSource = new P6DataSource(dataSource);
        log.debug("dynamic-datasource [{}] wrap p6spy plugin", name);
    }
    // 开启了seate,数据源会被包装成DataSourceProxyXA对象或DataSourceProxy
    Boolean enabledSeata = seata && dataSourceProperty.getSeata();
    if (enabledSeata) {
        if (SeataMode.XA == seataMode) {
            targetDataSource = new DataSourceProxyXA(targetDataSource);
        } else {
            targetDataSource = new DataSourceProxy(targetDataSource);
        }
        log.debug("dynamic-datasource [{}] wrap seata plugin transaction mode ", name);
    }
    // 包装ItemDataSource对象	
    return new ItemDataSource(name, dataSource, targetDataSource, enabledP6spy, enabledSeata, seataMode);
}

2.4.2. 添加创建的数据源

首先数据源都会被添加到dataSourceMap成员属性中,然后会对数据源进行分组

/**
 * 所有数据库
 */
private final Map<String, DataSource> dataSourceMap = new ConcurrentHashMap<>();
/**
 * 分组数据库
 */
private final Map<String, GroupDataSource> groupDataSources = new ConcurrentHashMap<>();

public synchronized void addDataSource(String ds, DataSource dataSource) {
    DataSource oldDataSource = dataSourceMap.put(ds, dataSource);
    // 新数据源添加到分组
    this.addGroupDataSource(ds, dataSource);
    // 关闭老的数据源
    if (oldDataSource != null) {
        closeDataSource(ds, oldDataSource, graceDestroy);
    }
    log.info("dynamic-datasource - add a datasource named [{}] success", ds);
}
2.4.2.1. 数据源分组

如果数据源的名称存在下划线_,则会进行分组,如slave_1,后面获取数据源会采用随机或者负载均衡等算法

private static final String UNDERLINE = "_";

private void addGroupDataSource(String ds, DataSource dataSource) {
    if (ds.contains(UNDERLINE)) {
        String group = ds.split(UNDERLINE)[0];
        GroupDataSource groupDataSource = groupDataSources.get(group);
        if (groupDataSource == null) {
            try {
                groupDataSource = new GroupDataSource(group, strategy.getDeclaredConstructor().newInstance());
                groupDataSources.put(group, groupDataSource);
            } catch (Exception e) {
                throw new RuntimeException("dynamic-datasource - add the datasource named " + ds + " error", e);
            }
        }
        groupDataSource.addDatasource(ds, dataSource);
    }
}

2.5. 数据库连接获取分析

通过前面,知道了在DynamicDataSourceAutoConfiguration自动配置类中,注入了DynamicRoutingDataSource的bean,此bean是DataSource类型,而且当前spring环境中只存在了当前这一个DataSource,所以获取数据库连接肯定是从DynamicRoutingDataSource的getConnection中获取的,下面看一下类图先

  • 获取连接

这里分两种情况,一种存在事务的情况(本地多数据源事务),一种直接获取连接的情况,这里先不说事务的处理情况

@Override
public Connection getConnection() throws SQLException {
    // 获取事务id
    String xid = TransactionContext.getXID();
    if (DsStrUtils.isEmpty(xid)) {
        // 不存在事务,推断数据源并获取连接
        return determineDataSource().getConnection();
    } else {
        String ds = DynamicDataSourceContextHolder.peek();
        // 为空则是主数据库
        ds = DsStrUtils.isEmpty(ds) ? getPrimary() : ds;
        ConnectionProxy connection = ConnectionFactory.getConnection(xid, ds);
        // 获取连接代理
        return connection == null ? getConnectionProxy(xid, ds, determineDataSource().getConnection()) : connection;
    }
}

2.5.1. 推断数据源

  • 会从DynamicDataSourceContextHolder中获取数据源的名称,可能不存在
  • 根据名称获取数据源
@Override
public DataSource determineDataSource() {
    String dsKey = DynamicDataSourceContextHolder.peek();
    return getDataSource(dsKey);
}
2.5.1.1. DynamicDataSourceContextHolder(动态数据源上下文持有)获取数据源名称
  • 从当前线程中LOOKUP_KEY_HOLDER获取数据源的名称,LOOKUP_KEY_HOLDER是一个双向队列,里面了内部的栈结构,来保证数据源的嵌套切换
  • 通过队列的peek方法,获取到了最新的数据源名称,不会进行删除,只会进行查看

/**
 * 为什么要用链表存储(准确的是栈)
 * <pre>
 * 为了支持嵌套切换,如ABC三个service都是不同的数据源
 * 其中A的某个业务要调B的方法,B的方法需要调用C的方法。一级一级调用切换,形成了链。
 * 传统的只设置当前线程的方式不能满足此业务需求,必须使用栈,后进先出。
 * </pre>
 */
private static final ThreadLocal<Deque<String>> LOOKUP_KEY_HOLDER = new NamedThreadLocal<Deque<String>>("dynamic-datasource") {
    @Override
    protected Deque<String> initialValue() {
        return new ArrayDeque<>();
    }
};

/**
 * 获得当前线程数据源
 *
 * @return 数据源名称
 */
public static String peek() {
    return LOOKUP_KEY_HOLDER.get().peek();
}
2.5.1.2. 获取数据源连接
  • 数据源名称为空,推断主数据源
  • 数据源分组不为空,并且数据源名称存在,则会从分组中获取,内部会通过策略算法获取数据源
  • 从数据源map中获取
public DataSource getDataSource(String ds) {
    if (DsStrUtils.isEmpty(ds)) {
        // 数据源名称为空,推断主数据源
        return determinePrimaryDataSource();
     // 数据源分组不为空,并且数据源名称存在,则会从分组中获取,内部会通过策略算法获取数据源s
    } else if (!groupDataSources.isEmpty() && groupDataSources.containsKey(ds)) {
        log.debug("dynamic-datasource switch to the datasource named [{}]", ds);
        return groupDataSources.get(ds).determineDataSource();
    // 从数据源map中获取
    } else if (dataSourceMap.containsKey(ds)) {
        log.debug("dynamic-datasource switch to the datasource named [{}]", ds);
        return dataSourceMap.get(ds);
    }
    if (strict) {
        throw new CannotFindDataSourceException("dynamic-datasource could not find a datasource named " + ds);
    }
    return determinePrimaryDataSource();
}
2.5.1.3. 数据源名称为空,推断主数据源

主数据源的默认名称为master

  • 首先会从数据库map中获取,如果获取到,直接返回
  • 没有获取到则会从数据源分组获取
  • 都没有则报错
private String primary = "master";

private DataSource determinePrimaryDataSource() {
    log.debug("dynamic-datasource switch to the primary datasource");
    // 从数据源map中获取
    DataSource dataSource = dataSourceMap.get(primary);
    if (dataSource != null) {
        return dataSource;
    }
    // 从数据源分组
    GroupDataSource groupDataSource = groupDataSources.get(primary);
    if (groupDataSource != null) {
        return groupDataSource.determineDataSource();
    }
    throw new CannotFindDataSourceException("dynamic-datasource can not find primary datasource");
}
2.5.1.4. 从数据源分组推断数据源

会通过dynamicDataSourceStrategy(动态数据源策略)来推断key,默认为LoadBalanceDynamicDataSourceStrategy(负载均衡),可以通过在配置文件中spring.datasource.dynamic.strategy指定

负载均衡器默认有两个实现

  1. LoadBalanceDynamicDataSourceStrategy:默认,轮训
  2. RandomDynamicDataSourceStrategy:随机的

如需要扩展负载均衡器,可以实现DynamicDataSourceStrategy接口,实现determineKey方法,最后在spring.datasource.dynamic.strategy指定

private Map<String, DataSource> dataSourceMap = new ConcurrentHashMap<>();
private DynamicDataSourceStrategy dynamicDataSourceStrategy;


/**
 * 推断数据源
 *
 * @return the datasource
 */
public DataSource determineDataSource() {
    return dataSourceMap.get(determineDsKey());
}

/**
 * 推断数据源key
 *
 * @return the name of the datasource
 */
public String determineDsKey() {
    return dynamicDataSourceStrategy.determineKey(new ArrayList<>(dataSourceMap.keySet()));
}
2.5.1.4.1.1. LoadBalanceDynamicDataSourceStrategy(负载均衡策略)

/**
 * LoadBalance strategy to switch a database
 *
 * @author TaoYu Kanyuxia
 * @since 1.0.0
 */
public class LoadBalanceDynamicDataSourceStrategy implements DynamicDataSourceStrategy {

    /**
     * 负载均衡计数器
     */
    private final AtomicInteger index = new AtomicInteger(0);

    @Override
    public String determineKey(List<String> dsNames) {
        return dsNames.get(Math.abs(index.getAndAdd(1) % dsNames.size()));
    }
}

2.6. 数据源切换

数据源的切换分两种

  1. 手动切换:通过调用DynamicDataSourceContextHolder#peek方法,手动设置值
  2. 自动切换:通过使用@DS注解,进行aop切面自动切换

这里分析一下使用@DS切换的原理

2.6.1. @DS切换原理

在DynamicDataSourceAutoConfiguration类中,通过@Import注解导入了DynamicDataSourceAopConfiguration动态数据源aop配置类,此类注册了以下bean

  • DynamicDataSourceAnnotationAdvisor:用于支持@DS注解切面

  • DynamicDataSourceAnnotationAdvisor:用于支持@DSTransactional注解切面(这里先不做解释)

所以我们分析DynamicDataSourceAnnotationAdvisor的实现,其中

  • DynamicDataSourceAnnotationInterceptor:用于aop切面的增强方法,查看invoke的实现
  • DynamicDataSourceAnnotationAdvisor:表示通知者,里面构建了一个切入点,用于判定那些方法会被切面
2.6.1.1. DynamicDataSourceAnnotationAdvisor(通知者实现)

此类用于支持aop的实现,在aop处理阶段,会调用此类的切入点对象,所以这里主要查看在构造方法中切入点的实现

public DynamicDataSourceAnnotationAdvisor(@NonNull MethodInterceptor advice,
                                          @NonNull Class<? extends Annotation> annotation) {
    this.advice = advice;
    this.annotation = annotation;
    // 构建了一个切入点
    this.pointcut = buildPointcut();
}
2.6.1.1.1. 构造切入点
  • AnnotationMatchingPointcut:spring自带,用于检查在类上是否标注了对应的注解
  • AnnotationMethodPoint:自己实现,用于标注方法上是否实现了对应的注解
  • ComposablePointcut:混合的切入点,把AnnotationMatchingPointcut和AnnotationMethodPoint都放入进行,表示只要满足其中一个就可以,也就是说,无论类上,或者方法上存在对应的注解都行
private Pointcut buildPointcut() {
    Pointcut cpc = new AnnotationMatchingPointcut(annotation, true);
    Pointcut mpc = new AnnotationMethodPoint(annotation);
    return new ComposablePointcut(cpc).union(mpc);
}
  • AnnotationMethodPoint的实现
  • 此类属于AnnotationMethodPoint的内部类,实现了Pointcut接口,表示是一个切入点对象,后续会aop切面判断的时候,会调用getMethodMatcher方法返回的MethodMatcher对象中的matches方法是否满足要求

matches方法的主要实现

  1. 检查方法是否存在对应注解,存在则满足要求
  2. 如果当前是代理对象,则不支持,因代理对象上不可能存在对应的注解
  3. 获取目标方法(当前可能是接口,获取实际的方法),查看是否存在对应的注解,存在则满足要求

其中用于判断方法或类是否存在注解使用的是AnnotatedElementUtils#hasAnnotation的方法,如果是判断类,它除了会判断本身,还会判断父类和接口

private static class AnnotationMethodPoint implements Pointcut {

    private final Class<? extends Annotation> annotationType;

    public AnnotationMethodPoint(Class<? extends Annotation> annotationType) {
        Assert.notNull(annotationType, "Annotation type must not be null");
        this.annotationType = annotationType;
    }

    @Override
    public ClassFilter getClassFilter() {
        return ClassFilter.TRUE;
    }

    @Override
    public MethodMatcher getMethodMatcher() {
        return new AnnotationMethodMatcher(annotationType);
    }

    private static class AnnotationMethodMatcher extends StaticMethodMatcher {
        private final Class<? extends Annotation> annotationType;

        public AnnotationMethodMatcher(Class<? extends Annotation> annotationType) {
            this.annotationType = annotationType;
        }

        @Override
        public boolean matches(Method method, Class<?> targetClass) {
            // 检查当前方法上是否存在对应的注解
            if (matchesMethod(method)) {
                return true;
            }
            // 不支持代理对象
            if (Proxy.isProxyClass(targetClass)) {
                return false;
            }
            // 获取目标方法,当前方法可能存在接口上
            Method specificMethod = AopUtils.getMostSpecificMethod(method, targetClass);
            return (specificMethod != method && matchesMethod(specificMethod));
        }

        private boolean matchesMethod(Method method) {
            return AnnotatedElementUtils.hasAnnotation(method, this.annotationType);
        }
    }
}
2.6.1.2. 获取数据源名称
  • 如果阅读过aop实现原理,就会知道,只要是符合Pointcut(切入点匹配的类或方法),最后都会被构建成代理对象,无论是jdk代理还是cglib代理,最后在执行代理对象的时候,会获取到通知者上的Interceptor(拦截器),而DynamicDataSourceAnnotationInterceptor实现了MethodInterceptor接口,所以在执行目标类相关的方法的时候,都会执行到DynamicDataSourceAnnotationInterceptorinvoke方法
  • 此方法做了以下事情
    1. 推断数据源key
    2. 把数据源key放入到动态数据源上下文管理器中
    3. 执行目标方法
    4. 最后删除动态数据源key
@Override
public Object invoke(MethodInvocation invocation) throws Throwable {
    // 推断数据源key
    String dsKey = determineDatasourceKey(invocation);
    // 把数据源key放入到动态数据源上下文管理器中
    DynamicDataSourceContextHolder.push(dsKey);
    try {
        // 执行目标方法
        return invocation.proceed();
    } finally {
        // 删除动态数据源key
        DynamicDataSourceContextHolder.poll();
    }
}
2.6.1.2.1. 推断数据源名称

下面的方法分为两个步骤

  1. 获取数据源的名称,实际上就是查找@DS注解然后获取值
  2. 推断数据源
private static final String DYNAMIC_PREFIX = "#";

private String determineDatasourceKey(MethodInvocation invocation) {
    // 获取数据源的名称,实际上就是查找@DS注解然后获取值
    String key = dataSourceClassResolver.findKey(invocation.getMethod(), invocation.getThis(), DS.class);
    // 推断数据源
    return key.startsWith(DYNAMIC_PREFIX) ? dsProcessor.determineDatasource(invocation, key) : key;
}
2.6.1.2.1.1. 获取数据源名称
  • 当前类如果是Object,直接返回
  • 从缓存中获取,获取到了直接返回
  • 计算数据源
public String findKey(Method method, Object targetObject, Class<? extends Annotation> annotation) {
    if (method.getDeclaringClass() == Object.class) {
        return "";
    }
    // 从缓存中查询
    Object cacheKey = new MethodClassKey(method, targetObject.getClass());
    String ds = this.dsCache.get(cacheKey);
    if (ds == null) {
        // 计算数据源key
        BasicAttribute<String> dsOperation = computeDatasource(method, targetObject, annotation);
        if (dsOperation == null) {
            ds = "";
        } else {
            ds = dsOperation.getDataOperation();
        }
        this.dsCache.put(cacheKey, ds);
    }
    return ds;
}
  • 计算数据源

查询的顺序如下

  1. 从当前方法上查找注解
  2. 从桥接方法上查找注解
  3. 从当前类上和父类上查找注解
  4. 支持mybatis-plus, mybatis-spring
/**
 * 查找注解的顺序
 * 1. 当前方法
 * 2. 桥接方法
 * 3. 当前类开始一直找到Object
 * 4. 支持mybatis-plus, mybatis-spring
 *
 * @param method       方法
 * @param targetObject 目标对象
 * @return ds
 */
private <T> BasicAttribute<T> computeDatasource(Method method, Object targetObject, Class<? extends Annotation> annotation) {
    if (allowedPublicOnly && !Modifier.isPublic(method.getModifiers())) {
        return null;
    }
    //1. 从当前方法接口中获取
    BasicAttribute<T> dsAttr = findDataSourceAttribute(method, annotation);
    if (dsAttr != null) {
        return dsAttr;
    }
    Class<?> targetClass = targetObject.getClass();
    Class<?> userClass = ClassUtils.getUserClass(targetClass);
    // JDK代理时,  获取实现类的方法声明.  method: 接口的方法, specificMethod: 实现类方法
    Method specificMethod = ClassUtils.getMostSpecificMethod(method, userClass);

    specificMethod = BridgeMethodResolver.findBridgedMethod(specificMethod);
    //2. 从实现类的方法找
    dsAttr = findDataSourceAttribute(specificMethod, annotation);
    if (dsAttr != null) {
        return dsAttr;
    }
    // 从当前方法声明的类查找
    dsAttr = findDataSourceAttribute(userClass, annotation);
    if (dsAttr != null && ClassUtils.isUserLevelMethod(method)) {
        return dsAttr;
    }
    //since 3.4.1 从接口查找,只取第一个找到的
    for (Class<?> interfaceClazz : ClassUtils.getAllInterfacesForClassAsSet(userClass)) {
        dsAttr = findDataSourceAttribute(interfaceClazz, annotation);
        if (dsAttr != null) {
            return dsAttr;
        }
    }
    // 如果存在桥接方法
    if (specificMethod != method) {
        // 从桥接方法查找
        dsAttr = findDataSourceAttribute(method, annotation);
        if (dsAttr != null) {
            return dsAttr;
        }
        // 从桥接方法声明的类查找
        dsAttr = findDataSourceAttribute(method.getDeclaringClass(), annotation);
        if (dsAttr != null && ClassUtils.isUserLevelMethod(method)) {
            return dsAttr;
        }
    }
    // 查找默认数据源
    return getDefaultDataSourceAttr(targetObject, annotation);
}
    • 查找默认数据源

此方法的主要流程如下

  1. 如方法不是代理类,会一直不断在父类上查找注解,如找到,则返回
  2. mybatis-plus, mybatis-spring 的获取方式 (不了解原理,没看懂)
private <T> BasicAttribute<T> getDefaultDataSourceAttr(Object targetObject, Class<? extends Annotation> annotation) {
    Class<?> targetClass = targetObject.getClass();
    // 如果不是代理类, 从当前类开始, 不断的找父类的声明
    if (!Proxy.isProxyClass(targetClass)) {
        Class<?> currentClass = targetClass;
        while (currentClass != Object.class) {
            BasicAttribute<T> datasourceAttr = findDataSourceAttribute(currentClass, annotation);
            if (datasourceAttr != null) {
                return datasourceAttr;
            }
            // 获取父类
            currentClass = currentClass.getSuperclass();
        }
    }
    // mybatis-plus, mybatis-spring 的获取方式
    if (mpEnabled) {
        final Class<?> clazz = getMapperInterfaceClass(targetObject);
        if (clazz != null) {
            BasicAttribute<T> datasourceAttr = findDataSourceAttribute(clazz, annotation);
            if (datasourceAttr != null) {
                return datasourceAttr;
            }
            // 尝试从其父接口获取
            return findDataSourceAttribute(clazz.getSuperclass(), annotation);
        }
    }
    return null;
}
2.6.1.2.1.2. 推断数据源名称
  • 因在@DS注解上可能不是直接写的数据源的名称,可能是需要从请求头、session、spel表达式等获取,所以只要是#开头的,则会去继续推断数据源名称,否则直接返回
  • 推断数据源使用的是DsProcessor#determineDatasource进行实现,此方法为责任链方法,实现类如下
    • DsJakartaHeaderProcessor:用于支持请求头,可以从请求头中获取数据源
    • DsJakartaSessionProcessor:用于支持session,可以从session中获取数据源
    • DsSpelExpressionProcessor:用于支持spel表达式,可以从spel表达式中获取数据源
public String determineDatasource(MethodInvocation invocation, String key) {
    // 是否匹配
    if (matches(key)) {
        // 去推断数据源
        String datasource = doDetermineDatasource(invocation, key);
        if (datasource == null && nextProcessor != null) {
            return nextProcessor.determineDatasource(invocation, key);
        }
        return datasource;
    }
    // 下一个处理器不为空,则继续推断数据源
    if (nextProcessor != null) {
        return nextProcessor.determineDatasource(invocation, key);
    }
    return null;
}
  • DsJakartaHeaderProcessor分析

可以看到会从请求头中获取数据源名称

public class DsJakartaHeaderProcessor extends DsProcessor {

    /**
     * header prefix
     */
    private static final String HEADER_PREFIX = "#header";

    @Override
    public boolean matches(String key) {
        return key.startsWith(HEADER_PREFIX);
    }

    @Override
    public String doDetermineDatasource(MethodInvocation invocation, String key) {
        HttpServletRequest request = ((ServletRequestAttributes) RequestContextHolder.getRequestAttributes()).getRequest();
        return request.getHeader(key.substring(8));
    }
}

2.7. 本地事务分析

  • 动态数据源支持本地多数据源事务,核心原理就是如出现异常了,则数据源全回滚,无异常则都提交
  • 本地事务通过@DSTransactional开启,看到注解,那想必然就是通过aop实现,这里通知者对象依旧是DynamicDataSourceAnnotationAdvisor,前面已做分析,不介绍,直接看增强方法DynamicLocalTransactionInterceptor#invoke
  • 从优先级开始,首先是事务切面,然后在是获取数据库连接

2.7.1. 切面分析

  • 直接看增强方法DynamicLocalTransactionInterceptor#invoke
  • 通过TransactionalExecutor#execute去执行事务
@Override
public Object invoke(final MethodInvocation methodInvocation) throws Throwable {
    final Method method = methodInvocation.getMethod();

    TransactionalExecutor transactionalExecutor = new TransactionalExecutor() {
        @Override
        public Object execute() throws Throwable {
            // 执行目标方法
            return methodInvocation.proceed();
        }

        @Override
        public TransactionalInfo getTransactionInfo() {
            // 获取事务信息
            return dataSourceClassResolver.findTransactionalInfo(method, methodInvocation.getThis(), DSTransactional.class);
        }
    };
    // 执行
    return transactionalTemplate.execute(transactionalExecutor);
}
  • 执行

此方法属于一个事务执行模板,用于支持相关的事务传播行为,这里主要分析常用的几个

  1. REQUIRES_NEW:开启一个新事务,如果存在事务,则会暂停当前事务,创建一个新事务,并在目标方法执行完成后恢复原来的事务
  2. REQUIRED:默认,支持当前事务,如果当前没有事务,就新建一个事务。这是最常见的选择。
  3. NESTED:如果当前存在事务,则在嵌套事务内执行,如果当前没有事务,就新建一个事务。
public Object execute(TransactionalExecutor transactionalExecutor) throws Throwable {
    TransactionalInfo transactionInfo = transactionalExecutor.getTransactionInfo();
    DsPropagation propagation = transactionInfo.propagation;
    SuspendedResourcesHolder suspendedResourcesHolder = null;
    try {
        switch (propagation) {
            case NOT_SUPPORTED:
                // 非事务的方式运行
                if (existingTransaction()) {
                    suspendedResourcesHolder = suspend();
                }
                return transactionalExecutor.execute();
                // 
            case REQUIRES_NEW:
                // 开启一个新事务,存在事务,则会暂停原来的事务
                if (existingTransaction()) {
                    suspendedResourcesHolder = suspend();
                }
                // Continue and execute with new transaction
                break;
            case SUPPORTS:
                // 支持当前事务,如果当前没有事务,就以非事务方式执行
                if (!existingTransaction()) {
                    return transactionalExecutor.execute();
                }
                // Continue and execute with new transaction
                break;
            case REQUIRED:
                // 默认,支持当前事务,如果当前没有事务,就新建一个事务。这是最常见的选择。
                break;
            case NEVER:
                // 以非事务方式执行,如果当前存在事务,则抛出异常。
                if (existingTransaction()) {
                    throw new TransactionException("Existing transaction found for transaction marked with propagation never");
                } else {
                    // Execute without transaction and return.
                    return transactionalExecutor.execute();
                }
            case MANDATORY:
                // 支持当前事务,如果当前没有事务,就抛出异常。
                if (!existingTransaction()) {
                    throw new TransactionException("No existing transaction found for transaction marked with propagation 'mandatory'");
                }
                // Continue and execute with current transaction.
                break;
            case NESTED:
                // 如果当前存在事务,则在嵌套事务内执行,如果当前没有事务,就新建一个事务。
                if (existingTransaction()) {
                    ConnectionFactory.createSavepoint(TransactionContext.getXID());
                }
                // Continue and execute with current transaction.
                break;
            default:
                throw new TransactionException("Not Supported Propagation:" + propagation);
        }
        // 去执行
        return doExecute(transactionalExecutor);
    } finally {
        resume(suspendedResourcesHolder);
    }
}
2.7.1.1. 去执行事务

此方法的主要流程如下

  1. 事务不存在,并且当前事务类型不为NESTED(嵌套事务)类型
  2. 开启事务
  3. 出现异常,并且是需要回滚的异常类型,则回滚
  4. 提交事务
  5. 除此之外,还会调用TransactionSynchronization(事务同步器)的相关方法,通知当前事务的状态
private Object doExecute(TransactionalExecutor transactionalExecutor) throws Throwable {
    TransactionalInfo transactionInfo = transactionalExecutor.getTransactionInfo();
    DsPropagation propagation = transactionInfo.propagation;
    // 事务不存在,并且当前事务类型不为NESTED类型
    if (!DsStrUtils.isEmpty(TransactionContext.getXID()) && !propagation.equals(DsPropagation.NESTED)) {
        return transactionalExecutor.execute();
    }
    boolean state = true;
    Object o;
    // 开启事务
    String xid = LocalTxUtil.startTransaction();
    boolean shouldInvokeAction = TransactionContext.getSynchronizations().isEmpty();
    try {
        o = transactionalExecutor.execute();
    } catch (Exception e) {
        // 回滚
        state = !isRollback(e, transactionInfo);
        throw e;
    } finally {
        invokeBeforeCompletion(shouldInvokeAction);
        if (state) {
            invokeBeforeCommit(shouldInvokeAction);
            // 提交事务
            LocalTxUtil.commit(xid);
            invokeAfterCommit(shouldInvokeAction);
            invokeAfterCompletion(TransactionSynchronization.STATUS_COMMITTED, shouldInvokeAction);
        } else {
            // 回滚事务
            LocalTxUtil.rollback(xid);
            invokeAfterCompletion(TransactionSynchronization.STATUS_ROLLED_BACK, shouldInvokeAction);
        }
    }
    return o;
}
2.7.1.1.1. 挂起事务

会获取当前事务id,如果存在则会创建一个SuspendedResourcesHolder把事务id存入进去,并且会解绑当前事务id

public SuspendedResourcesHolder suspend() {
    String xid = TransactionContext.getXID();
    if (xid != null) {
        if (log.isInfoEnabled()) {
            log.info("Suspending current transaction, xid = {}", xid);
        }
        // 解绑事务id
        TransactionContext.unbind(xid);
        // 存入事务id
        return new SuspendedResourcesHolder(xid);
    } else {
        return null;
    }
}
2.7.1.1.2. 开启事务

会从TransactionContext中获取当前事务id,如果不存在,则会生成一个,在设置到TransactionContext中

public static String startTransaction() {
    String xid = TransactionContext.getXID();
    if (!DsStrUtils.isEmpty(xid)) {
        log.debug("dynamic-datasource exist local tx [{}]", xid);
    } else {
        // 生成事务id
        xid = randomUUID().toString();
        TransactionContext.bind(xid);
        log.debug("dynamic-datasource start local tx [{}]", xid);
    }
    return xid;
}
2.7.1.1.2.1. 获取事务id

TransactionContext设置结构为当前线程,所以事务id会和当前线程绑定

  1. CONTEXT_HOLDER:保存了当前线程的事务id
  2. SYNCHRONIZATION_HOLDER:保存了当前线程的事务同步器
// 当前线程的事务id
private static final ThreadLocal<String> CONTEXT_HOLDER = new ThreadLocal<>();
// 当前线程的事务同步器
private static final ThreadLocal<Set<TransactionSynchronization>> SYNCHRONIZATION_HOLDER =
        ThreadLocal.withInitial(LinkedHashSet::new);

/**
 * Gets xid.
 *
 * @return 事务ID
 */
public static String getXID() {
    String xid = CONTEXT_HOLDER.get();
    if (!DsStrUtils.isEmpty(xid)) {
        return xid;
    }
    return null;
}

public static String bind(String xid) {
    CONTEXT_HOLDER.set(xid);
    return xid;
}
2.7.1.1.3. 提交/回滚事务

事务的提交和回滚都是通过ConnectionFactory#notify实现

public static void commit(String xid) throws Exception {
    boolean hasSavepoint = ConnectionFactory.hasSavepoint(xid);
    try {
        ConnectionFactory.notify(xid, true);
    } finally {
        if (!hasSavepoint) {
            log.debug("dynamic-datasource commit local tx [{}]", TransactionContext.getXID());
            TransactionContext.remove();
        }
    }
}

通过下面的源码分析,可以看到

  1. 如果存在保存点
    1. 当前为提交,则会释放所有的保存点
    2. 当前为回滚,则会回滚所有保存点
  1. 不存在保存点,则会根据状态,提交或回滚全部的连接
/**
 * 当前连接持有,key为事务id,第二个map的key为数据源名称
 */
private static final ThreadLocal<Map<String, Map<String, ConnectionProxy>>> CONNECTION_HOLDER =
        new ThreadLocal<Map<String, Map<String, ConnectionProxy>>>() {
            @Override
            protected Map<String, Map<String, ConnectionProxy>> initialValue() {
                return new ConcurrentHashMap<>();
            }
        };
/**
 * 存储保存点,key为事务id
 */
private static final ThreadLocal<Map<String, List<SavePointHolder>>> SAVEPOINT_CONNECTION_HOLDER =
        new ThreadLocal<Map<String, List<SavePointHolder>>>() {
            @Override
            protected Map<String, List<SavePointHolder>> initialValue() {
                return new ConcurrentHashMap<>();
            }
        };

public static void notify(String xid, Boolean state) throws Exception {
    Exception exception = null;
    Map<String, Map<String, ConnectionProxy>> concurrentHashMap = CONNECTION_HOLDER.get();
    Map<String, List<SavePointHolder>> savePointMap = SAVEPOINT_CONNECTION_HOLDER.get();
    if (CollectionUtils.isEmpty(concurrentHashMap)) {
        return;
    }
    boolean hasSavepoint = hasSavepoint(xid);
    // 当前线程事务id对应的所有保存点
    List<SavePointHolder> savePointHolders = savePointMap.get(xid);
    // 当前事务对应的所有连接代理对象
    Map<String, ConnectionProxy> connectionProxyMap = concurrentHashMap.get(xid);
    try {
        // 如果存在保存点,则表示嵌套事务。
        if (hasSavepoint) {
            try {
                if (state) {
                    // 释放所有的保存点
                    Iterator<SavePointHolder> iterator = savePointHolders.iterator();
                    while (iterator.hasNext()) {
                        SavePointHolder savePointHolder = iterator.next();
                        if (savePointHolder.releaseSavepoint()) {
                            iterator.remove();
                        }
                    }
                } else {
                    // 回滚保存点
                    List<ConnectionProxy> markedConnectionProxy = new ArrayList<>();
                    Iterator<SavePointHolder> iterator = savePointHolders.iterator();
                    while (iterator.hasNext()) {
                        SavePointHolder savePointHolder = iterator.next();
                        ConnectionProxy connectionProxy = savePointHolder.getConnectionProxy();
                        markedConnectionProxy.add(connectionProxy);
                        // 回滚保存点
                        if (savePointHolder.rollbackSavePoint()) {
                            iterator.remove();
                        }
                    }

                    // 遍历当前线程中所有的连接代理对象,然后和保存点中的连接代理对象比对,如果不存在,则继续回滚
                    Iterator<Map.Entry<String, ConnectionProxy>> entryIterator = connectionProxyMap.entrySet().iterator();
                    while (entryIterator.hasNext()) {
                        Map.Entry<String, ConnectionProxy> connectionProxyEntry = entryIterator.next();
                        ConnectionProxy value = connectionProxyEntry.getValue();
                        if (!markedConnectionProxy.contains(value)) {
                            value.rollback();
                            entryIterator.remove();
                        }
                    }
                }
            } catch (SQLException e) {
                exception = e;
            }
        } else {
            // 根据state状态,回滚或提交所有的连接对象
            for (ConnectionProxy connectionProxy : connectionProxyMap.values()) {
                try {
                    if (connectionProxy != null) {
                        connectionProxy.notify(state);
                    }
                } catch (SQLException e) {
                    exception = e;
                }

            }
        }
    } finally {
        // 清空当前线程事务id对应的数据
        if (!hasSavepoint) {
            concurrentHashMap.remove(xid);
            savePointMap.remove(xid);
        }
        if (exception != null) {
            throw exception;
        }
    }
}
2.7.1.1.3.1. 提交或回滚
public void notify(Boolean commit) throws SQLException {
    try {
        // 提交
        if (commit) {
            connection.commit();
        } else {
            // 回滚
            connection.rollback();
        }
    } catch (SQLException e) {
        log.error(e.getLocalizedMessage(), e);
        throw e;
    } finally {
        try {
            connection.close();
        } catch (SQLException e2) {
            log.error("db connection close failed", e2);
        }
    }
}
2.7.1.1.4. 恢复事务

如果事务被挂起,会在事务执行完成后恢复上一个事务,会获取事务id,重新绑定到事务上下文中

private void resume(SuspendedResourcesHolder suspendedResourcesHolder) {
    if (suspendedResourcesHolder != null) {
        String xid = suspendedResourcesHolder.getXid();
        // 重新绑定
        TransactionContext.bind(xid);
    }
}

2.7.2. 获取数据连接分析

  • 我们知道,事务的本质就是关闭自动提交,然后所有的操作都是通过同一个连接对象,最后没有问题了就提交或回滚,所以又回到了获取连接对象AbstractRoutingDataSource#getConnection
  • 首先获取事务id,如果事务id存在,则会从连接工厂获取,如没获取到,则会去创建一个连接代理对象,放入到连接工厂去
public Connection getConnection() throws SQLException {
    // 获取事务id
    String xid = TransactionContext.getXID();
    if (DsStrUtils.isEmpty(xid)) {
        return determineDataSource().getConnection();
    } else {
        String ds = DynamicDataSourceContextHolder.peek();
        ds = DsStrUtils.isEmpty(ds) ? getPrimary() : ds;
        // 从连接工厂获取连接
        ConnectionProxy connection = ConnectionFactory.getConnection(xid, ds);
        // 首先推断数据库连接,然后获取连接代理对象
        return connection == null ? getConnectionProxy(xid, ds, determineDataSource().getConnection()) : connection;
    }
}
2.7.2.1. 获取事务id
public static String getXID() {
    String xid = CONTEXT_HOLDER.get();
    if (!DsStrUtils.isEmpty(xid)) {
        return xid;
    }
    return null;
}
2.7.2.2. 从连接工厂获取连接代理对象

会根据当前的事务id和数据源名称,在当前线程中获取连接代理对象

private static final ThreadLocal<Map<String, Map<String, ConnectionProxy>>> CONNECTION_HOLDER =
            new ThreadLocal<Map<String, Map<String, ConnectionProxy>>>() {
                @Override
                protected Map<String, Map<String, ConnectionProxy>> initialValue() {
                    return new ConcurrentHashMap<>();
                }
            };

public static ConnectionProxy getConnection(String xid, String ds) {
    Map<String, Map<String, ConnectionProxy>> concurrentHashMap = CONNECTION_HOLDER.get();
    Map<String, ConnectionProxy> connectionProxyMap = concurrentHashMap.get(xid);
    if (CollectionUtils.isEmpty(connectionProxyMap)) {
        return null;
    }
    return connectionProxyMap.get(ds);
}
2.7.2.3. 创建连接代理对象

首先创建一个连接代理对象,然后会放入连接工厂

private Connection getConnectionProxy(String xid, String ds, Connection connection) {
    ConnectionProxy connectionProxy = new ConnectionProxy(connection, ds);
    // 放入连接工厂
    ConnectionFactory.putConnection(xid, ds, connectionProxy);
    return connectionProxy;
}
2.7.2.3.1.1. 连接代理对象放入连接工厂

会把连接代理对象放入连接工厂,并且关闭自动提交,开启事务

public static void putConnection(String xid, String ds, ConnectionProxy connection) {
    Map<String, Map<String, ConnectionProxy>> concurrentHashMap = CONNECTION_HOLDER.get();
    Map<String, ConnectionProxy> connectionProxyMap = concurrentHashMap.get(xid);
    if (connectionProxyMap == null) {
        connectionProxyMap = new ConcurrentHashMap<>();
        concurrentHashMap.put(xid, connectionProxyMap);
    }
    if (!connectionProxyMap.containsKey(ds)) {
        try {
            // 关闭自动提交,开启事务
            connection.setAutoCommit(false);
        } catch (SQLException e) {
            e.printStackTrace();
        }
        connectionProxyMap.put(ds, connection);
    }
}

2.8. 数据源的销毁

  • 数据源的销毁又回到了DynamicRoutingDataSource类,因实现了DisposableBean接口,所以在spring销毁的时候,会调用对应的destroy方法
  • 内部获取了所有的数据源然后通过DefaultDataSourceDestroyer准备去关闭
@Override
public void destroy() {
    log.info("dynamic-datasource start closing ....");
    for (Map.Entry<String, DataSource> item : dataSourceMap.entrySet()) {
        // 清理数据源
        closeDataSource(item.getKey(), item.getValue(), false);
    }
    log.info("dynamic-datasource all closed success,bye");
}


private void closeDataSource(String ds, DataSource dataSource, boolean graceDestroy) {
    try {
        DataSource realDataSource = null;
        if (dataSource instanceof ItemDataSource) {
            realDataSource = ((ItemDataSource) dataSource).getRealDataSource();
        } else {
            if (seata) {
                if (dataSource instanceof DataSourceProxy) {
                    DataSourceProxy dataSourceProxy = (DataSourceProxy) dataSource;
                    realDataSource = dataSourceProxy.getTargetDataSource();
                }
            }
            if (p6spy) {
                if (dataSource instanceof P6DataSource) {
                    Field realDataSourceField = P6DataSource.class.getDeclaredField("realDataSource");
                    realDataSourceField.setAccessible(true);
                    realDataSource = (DataSource) realDataSourceField.get(dataSource);
                }
            }
        }

        if (null == realDataSource) {
            realDataSource = dataSource;
        }
        if (null != realDataSource) {
            DataSourceDestroyer destroyer = new DefaultDataSourceDestroyer();
            if (graceDestroy) {
                destroyer.asyncDestroy(ds, realDataSource);
            } else {
                destroyer.destroy(ds, realDataSource);··
            }
        }
    } catch (Exception e) {
        log.warn("dynamic-datasource closed datasource named [{}] failed", ds, e);
    }
}

2.8.1. 关闭数据源

关闭氛围同步关闭和异步关闭

2.8.1.1. 同步关闭

同步关闭就是调用数据源的close方法进行关闭

public void destroy(String name, DataSource realDataSource) {
    Class<? extends DataSource> clazz = realDataSource.getClass();
    try {
        Method closeMethod = ReflectionUtils.findMethod(clazz, "close");
        if (closeMethod != null) {
            closeMethod.invoke(realDataSource);
            log.info("dynamic-datasource close the datasource named [{}] success,", name);
        }
    } catch (IllegalAccessException | InvocationTargetException e) {
        log.warn("dynamic-datasource close the datasource named [{}] failed,", name, e);
    }
}
2.8.1.2. 异步关闭

新建了一个单个的县城吃,然后调用了graceDestroy方法,进行销毁

public void asyncDestroy(String name, DataSource dataSource) {
    log.info("dynamic-datasource start asynchronous task to close the datasource named [{}],", name);
    ExecutorService executor = Executors.newSingleThreadExecutor(r -> {
        Thread thread = new Thread(r);
        thread.setName(THREAD_NAME);
        return thread;
    });
    // 异步关闭
    executor.execute(() -> graceDestroy(name, dataSource));
    executor.shutdown();
}
2.8.1.2.1. 优雅关闭
  • 通过数据源获取对应的数据源活动探测器,检查是否有活动的连接对象,如果存在则睡眠一下,直到没有活动或超时,才会去销毁数据源
  • 数据源活动探测器DataSourceActiveDetector
    • HikariDataSourceActiveDetector:用于探测Hikari数据源
    • DruidDataSourceActiveDetector:用于探测Druid数据源
    • Dhcp2DataSourceActiveDetector:用于探测Dhcp数据源
private void graceDestroy(String name, DataSource dataSource) {
    try {
        // 根据数据源获取对应的数据源活动销毁器
        DataSourceActiveDetector detector = detectors.stream()
                .filter(x -> x.support(dataSource))
                .findFirst()
                .orElse(null);
        long start = System.currentTimeMillis();
        // 探测器为空,或存在活动连接
        while (detector == null || detector.containsActiveConnection(dataSource)) {
            // make sure the datasource close
            if (System.currentTimeMillis() - start > TIMEOUT_CLOSE) {
                break;
            }
            try {
                Thread.sleep(100L);
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
            }
        }
    } catch (Exception e) {
        log.warn("dynamic-datasource check the datasource named [{}] contains active connection failed,", name, e);
    }
    // 销毁数据源
    destroy(name, dataSource);
}
2.8.1.2.1.1. HikariDataSourceActiveDetector

这里查看一下Hikari数据源是怎么检测是否有活动连接的,此类有两个方法

  1. support:查看当前数据源是否支持
  2. containsActiveConnection:是否有活动连接
  • support

这里直接就是通过数据源的class类的名称来检测的

@Override
public boolean support(DataSource dataSource) {
    return "com.zaxxer.hikari.HikariDataSource".equals(dataSource.getClass().getName());
}
  • containsActiveConnection

通过获取数据源的getHikariPoolMXBean方法,检查是否存在连接池,如果存在连接池,在调用连接池的getActiveConnections查看是否存在连接

@Override
@SneakyThrows(ReflectiveOperationException.class)
public boolean containsActiveConnection(DataSource dataSource) {
    Object hikariPoolMXBean = dataSource.getClass().getMethod("getHikariPoolMXBean").invoke(dataSource);
    int activeCount = null == hikariPoolMXBean
            ? 0
            : (int) hikariPoolMXBean.getClass().getMethod("getActiveConnections").invoke(hikariPoolMXBean);
    return activeCount != 0;
}

3. 总结

3.1. 多数据源和AbstractRoutingDataSource实现区别

  • AbstractRoutingDataSource的实现,子类是重写determineCurrentLookupKey方法,可以告诉当前要执行的数据源名称,然后搭配当前线程ThreadLocal来使用
  • 而多数据源的实现是直接写了个DataSource,在内部进行数据源的切换,所有的连接获取都是通过DynamicRoutingDataSource的getConnection方法获取连接,内部进行相关的数据源切换,可以理解做了一层包装

3.2. 怎么支持自定义名称解析

通过前面的分析,可以看到获取多数据源名称是在DynamicDataSourceAnnotationInterceptor#determineDatasourceKey方法中调用了DsProcessor#determineDatasource方法获取的,所以我们可以在自己项目中,重新定义DsProcessor这个bean,可以自定义一个子类继承DsProcessor类,实现相关的抽象方法

3.3. 怎么在项目运行的时候添加/删除多数据源

3.3.1. 添加数据源

  • 通过前面的源码分析,数据源的初始化会在DynamicRoutingDataSource#afterPropertiesSet方法中进行处理,在此方法中,创建了数据源后,会调用DynamicRoutingDataSource#addDataSource的方法用来增加数据源,所以我们可以直接获取到DynamicRoutingDataSource然后调用addDataSource去添加数据源
  • 这里比如我们要动态添加hikari的数据源,代码如下,此方法没有指定具体的数据源类型,内部会获取所有的数据源创建器,然后根据匹配的创建器来创建数据源
@Autowired
private DefaultDataSourceCreator dataSourceCreator;

public Set<String> add(@Validated @RequestBody DataSourceDTO dto) {
    DataSourceProperty dataSourceProperty = new DataSourceProperty();
    BeanUtils.copyProperties(dto, dataSourceProperty);
    DynamicRoutingDataSource ds = (DynamicRoutingDataSource) dataSource;
    DataSource dataSource = dataSourceCreator.createDataSource(dataSourceProperty);
    ds.addDataSource(dto.getPoolName(), dataSource);
    return ds.getDataSources().keySet();
}
  • 方式二
public Set<String> addHikariCP(@Validated @RequestBody DataSourceDTO dto) {
    DataSourceProperty dataSourceProperty = new DataSourceProperty();
    BeanUtils.copyProperties(dto, dataSourceProperty);
    dataSourceProperty.setLazy(true);
    DynamicRoutingDataSource ds = (DynamicRoutingDataSource) dataSource;
    DataSource dataSource = hikariDataSourceCreator.createDataSource(dataSourceProperty);
    ds.addDataSource(dto.getPoolName(), dataSource);
    return ds.getDataSources().keySet();
}

3.3.2. 删除数据源

public String remove(String name) {
    DynamicRoutingDataSource ds = (DynamicRoutingDataSource) dataSource;
    ds.removeDataSource(name);
    return "删除成功";
}

3.4. 动态数据源本地事务是否能和spring自带的事务混用?

建议不能,除非是特别了解事务的特性,如使用spring自带的事务,所有的方法获取的连接都是同一个,而不能切换数据源等

3.5. 除了在yml上定义数据源还能怎么定义?

可以自定义一个类,实现DynamicDataSourceProvider接口,并实现loadDataSources方法,然后注入到ioc容器中,这样就可以自定义数据源的加载,如果需要从数据库加载,可以继承AbstractJdbcDataSourceProvider类

3.6. 多数据源重要组件

  1. DynamicDataSourceAutoConfiguration:自动配置类,用于适配spring

    1. DynamicRoutingDataSource:动态路由数据源,属于DataSource,用于在获取数据库连接的时候,动态路由相关数据源
  2. DynamicDataSourceCreatorAutoConfiguration:动态数据源创建自动配置类,用于配置数据源相关创建对象

    1. BasicDataSourceCreator:用于创建Basic数据源

    2. JndiDataSourceCreator:用于创建Jndi数据源

    3. DruidDataSourceCreatorConfiguration

      1. DruidDataSourceCreator:用于创建druid数据源
    4. HikariDataSourceCreatorConfiguration

      1. HikariDataSourceCreator:用于创建Hikari数据源
  3. DynamicDataSourceAopConfiguration:aop配置类,用于注册aop相关的通知者对象,用于支持@DS和@DSTransactional注解

    1. DynamicDataSourceAnnotationInterceptor:@DS注解增强类,查看invoke方法
    2. DynamicLocalTransactionInterceptor:@DSTransactional事务增强类,查看invoke方法
  4. DynamicDataSourceAssistConfiguration:数据源协助配置类,配置了一些数据库解密类、默认数据源创建类、事务监听器、数据源提供者

    1. YmlDynamicDataSourceProvider:yml动态数据源提供者,如需要扩展,可以增加一个类实现DynamicDataSourceProvider接口

    2. EncDataSourceInitEvent:用于数据库参数解密

    3. DefaultDataSourceCreator:用于获取所有的DataSourceCreator,获取对应的,创建数据源

    4. DsTxEventListenerFactoryConfiguration

      1. DsTxEventListenerFactory:数据源事务监听器工厂
  5. DynamicDataSourceProperties:数据源属性类,用于从spring的yml文件上获取配置,配置前缀为spring.datasource.dynamic

  6. TransactionalTemplate:事务模板类,用于根据事务相关的传播行为执行事务方法

  7. DynamicDataSourceContextHolder:动态数据源上下文持有者,保存了当前的数据源名称

  8. DataSourceClassResolver:数据源类解析器,用于查找类上面的注解@DS和@DataSourceClassResolver

  9. DsProcessor:用于支持从请求头、session、sepl表达式中解析数据源名称