springboot 配置多数据源 good

 

1.首先在创建应用对象时引入autoConfig

 

package com;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

@SpringBootApplication
public class Application {

    public static void main(String[] args) {
        SpringApplication.run(Application.class, args);
    }
}

 

2.其次配置文件

######primary#############
datasource.primary.url=jdbc:sqlserver://xx.xx.xx.xx:1433;DatabaseName=PlayNowLog
datasource.primary.username=sa
datasource.primary.password=xxxxxx
datasource.primary.driverClassName=com.microsoft.sqlserver.jdbc.SQLServerDriver  

######secondary#############
datasource.secondary.url=jdbc:sqlserver://xx.xx.xx.xx:1433;DatabaseName=PlayNow_New
datasource.secondary.username=sa
datasource.secondary.password=xxxxxx
datasource.secondary.driverClassName=com.microsoft.sqlserver.jdbc.SQLServerDriver  

3.再其次是数据源的创建

@Configuration
public class GlobalDataConfiguration {
    @Bean(name="primaryDataSource")
    @Primary
    @ConfigurationProperties(prefix="datasource.primary")
    public DataSource primaryDataSource() {
        System.out.println("-------------------- primaryDataSource init ---------------------");
        return DataSourceBuilder.create().build();
    }  

    @Bean(name="secondaryDataSource")
    @ConfigurationProperties(prefix="datasource.secondary")
    public DataSource secondaryDataSource() {
        System.out.println("-------------------- secondaryDataSource init ---------------------");
        return DataSourceBuilder.create().build();
    }
} 

4.Dao层使用数据源

@Component
public class UserDaoImpl<T extends com.sonychina.backend.entity.statistic.SysUser> extends MyBatisBaseDao<SysUser> implements UserDao {  

    @Autowired
    public UserDaoImpl(@Qualifier("secondaryDataSource") DataSource dataSource) {
        super(dataSource);
    }
}  
import java.util.List;
import java.util.Map;  

import javax.sql.DataSource;  

import org.apache.ibatis.session.SqlSession;  

import com.sonychina.backend.global.Constants;
import com.sonychina.backend.utility.GenericsUtils;
import com.sonychina.backend.utility.MapResultHandler;
import com.sonychina.backend.utility.MyBatisUtil;
import com.sonychina.backend.utility.PageView;  

public class MyBatisBaseDao<T>{
    private Class<T> type;
    private SqlSession session;  

    @SuppressWarnings("unchecked")
    public MyBatisBaseDao(DataSource dataSource){
        type = (Class<T>) GenericsUtils.getActualReflectArgumentClass(this.getClass());
        System.out.println("------------- BaseMybatisDao initialize--------------------------");
        System.out.println("------------- T:" + type.toString());
        try {
            MyBatisUtil myBatisUtil = MyBatisUtil.getInstance(dataSource);
            session = myBatisUtil.getSession();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }  

    private String getMethodPath(String methodType){
        return getMethodPath(methodType, "");
    }  

    private String getMethodPath(String methodType, String methodSuffix){
        return Constants.MYBATIS_MAPPER_PRIX + methodType + type.getSimpleName() + methodSuffix;
    }  

    public void save(T obj) {
        session.insert(getMethodPath("save"), obj);
    }  

    public void delete(T obj) {
        session.delete(getMethodPath("delete"), obj);
    }  

    public void update(T obj) {
        session.update(getMethodPath("update"), obj);
        //HashMap<String,Object> map = null;
    }  

    public T get(Integer id) {
        return session.selectOne(getMethodPath("get"),id);
    }  

    public List<T> getList(T entity){
        return session.selectList(getMethodPath("get", "List"), entity);
    }  

    public List<T> getListByAnyObject(Object entity){
        return session.selectList(getMethodPath("get", "List"), entity);
    }  

    /**
     *
     * @param entity
     * @param selectId:mapper。xml文件中<select>标签ID
     * @return
     */
    public List<T> getList(T entity, String selectId){
        return session.selectList(selectId, entity);
    }  

    public List<T> getListByAnyObject(Object entity, String selectId){
        return session.selectList(selectId, entity);
    }  

    public List<Map<String, Object>> getMapList(Map<String, Object> map){
        MapResultHandler mh = new MapResultHandler();
        session.select(getMethodPath("get", "MapList"), map, mh);
        return mh.getMappedResults();
    }  

    /**
     *
     * @param map
     * @param selectId:mapper。xml文件中<select>标签ID
     * @return List<Map<String, Object>>
     */
    public List<Map<String, Object>> getMapList(Map<String, Object> map, String selectId){
        MapResultHandler mh = new MapResultHandler();
        session.select(selectId, map, mh);
        return mh.getMappedResults();
    }  

    public List<Map<String, Object>> getMapList(T entity){
        MapResultHandler mh = new MapResultHandler();
        session.select(getMethodPath("get", "MapList"), entity, mh);
        return mh.getMappedResults();
    }  

    public List<Map<String, Object>> getMapList(T entity,String queryName){
        MapResultHandler mh = new MapResultHandler();
        session.select(queryName, entity, mh);
        return mh.getMappedResults();
    }  

    public Long getCount(Map<String, Object> pm){
        MapResultHandler mh = new MapResultHandler();
        session.select(getMethodPath("get", "Count"),pm, mh);
        return mh.getCount();
    }  

    /**
     *
     * @param pm
     * @param selectId:mapper。xml文件中<select>标签ID
     * @return Long
     */
    public Long getCount(Map<String,Object> pm, String selectId){
        MapResultHandler mh = new MapResultHandler();
        session.select(selectId,pm, mh);
        return mh.getCount();
    }  

    /**
     * map 中必须包含 key:currentPageNum 且其值不能为空, 页面显示的记录数不是10必须包含key:pageShowCnt
     * 且其值不能为空
     * @param map
     * @return PageView
     */
    public PageView getPageList(Map<String, Object> map){  

        if(map == null || map.get("currentPageNum") == null){
            return null;
        } else{
            PageView page = null;
            Integer pageNum = Integer.valueOf(map.get("currentPageNum").toString());
            if(map.get("pageShowCnt") == null){
                page = new PageView(pageNum);
            } else {
                Integer showCnt = Integer.valueOf(map.get("pageShowCnt").toString());
                page = new PageView(pageNum, showCnt);
            }
            map.put("start", page.getStart());
            map.put("end", page.getCurrentMaxCnt());
            //System.out.println("-----------start:" + map.get("start"));
            //System.out.println("-----------start:" + map.get("maxCnt"));
            MapResultHandler mh = new MapResultHandler();
            page.setTotalRecord(this.getCount(map));
            session.select(getMethodPath("get", "MapPageList"), map, mh);
            page.setResultList(mh.getMappedResults());  

            return page;
        }
    }  

    /**
     * map 中必须包含 key:currentPageNum 且其值不能为空, 页面显示的记录数不是10必须包含key:pageShowCnt
     * 且其值不能为空
     * @param map
     * @param selectConutId, mapper.xml文件中<select>标签Id, 查询总记录数的sql语句
     * @param selectPageListId, mapper.xml文件中<select>标签Id,查询分页后数据列表的sql语句
     * @return
     */
    public PageView getPageList(Map<String, Object> map, String selectConutId, String selectPageListId){  

        if(map == null || map.get("currentPageNum") == null){
            return null;
        } else{
            PageView page = null;
            Integer pageNum = Integer.valueOf(map.get("currentPageNum").toString());
            if(map.get("pageShowCnt") == null){
                page = new PageView(pageNum);
            } else {
                Integer showCnt = Integer.valueOf(map.get("pageShowCnt").toString());
                page = new PageView(pageNum, showCnt);
            }
            map.put("start", page.getStart());
            map.put("end", page.getCurrentMaxCnt());
            //System.out.println("-----------start:" + map.get("start"));
            //System.out.println("-----------start:" + map.get("maxCnt"));
            MapResultHandler mh = new MapResultHandler();
            page.setTotalRecord(this.getCount(map, selectConutId));
            session.select(selectPageListId, map, mh);
            page.setResultList(mh.getMappedResults());  

            return page;
        }
    }  

    /**
     * map 中必须包含 key:currentPageNum 且其值不能为空, 页面显示的记录数不是10必须包含key:pageShowCnt
     * 且其值不能为空
     * @param map
     * @param selectConutId, mapper.xml文件中<select>标签Id, 查询总记录数的sql语句
     * @param selectPageListId, mapper.xml文件中<select>标签Id,查询分页后数据列表的sql语句
     * @return
     */
    public PageView getEntityPageList(Map<String, Object> map, String selectConutId, String selectPageListId){  

        if(map == null || map.get("currentPageNum") == null){
            return null;
        } else{
            PageView page = null;
            Integer pageNum = Integer.valueOf(map.get("currentPageNum").toString());
            if(map.get("pageShowCnt") == null){
                page = new PageView(pageNum);
            } else {
                Integer showCnt = Integer.valueOf(map.get("pageShowCnt").toString());
                page = new PageView(pageNum, showCnt);
            }
            map.put("start", page.getStart());
            map.put("end", page.getCurrentMaxCnt());
            //System.out.println("-----------start:" + map.get("start"));
            //System.out.println("-----------start:" + map.get("maxCnt"));
            page.setTotalRecord(this.getCount(map, selectConutId));
            page.setResultList(session.selectList(selectPageListId, map));  

            return page;
        }
    }  

    /**
     * map 中必须包含 key:currentPageNum 且其值不能为空, 页面显示的记录数不是10必须包含key:pageShowCnt
     * 且其值不能为空
     * @param map
     * @return PageView
     */
    public PageView getEntityPageList(Map<String, Object> map){  

        if(map == null || map.get("currentPageNum") == null){
            return null;
        } else{
            PageView page = null;
            Integer pageNum = Integer.valueOf(map.get("currentPageNum").toString());
            if(map.get("pageShowCnt") == null){
                page = new PageView(pageNum);
            } else {
                Integer showCnt = Integer.valueOf(map.get("pageShowCnt").toString());
                page = new PageView(pageNum, showCnt);
            }
            map.put("start", page.getStart());
            map.put("end", page.getCurrentMaxCnt());
            //System.out.println("-----------start:" + map.get("start"));
            //System.out.println("-----------start:" + map.get("maxCnt"));
            page.setTotalRecord(this.getCount(map));
            page.setResultList(session.selectList(getMethodPath("get", "PageList"), map));  

            return page;
        }
    }
}  

 

http://blog.csdn.net/ba5189tsl/article/details/47277737

 

SpringBoot使用阿里数据库连接池Druid以及多数据源配置

引入依赖

<!-- https:dataSource
<dependency>
    <groupId>com.alibaba</groupId>
    <artifactId>druid</artifactId>
    <version>1.0.29</version>
</dependency>

数据源配置

  • 1.数据源

    #master数据源
    spring.datasource.master.url=jdbc:mysql://localhost:3306/SpringBootMaster
    spring.datasource.master.username=root
    spring.datasource.master.password=1
    spring.datasource.master.driver-class-name=com.mysql.jdbc.Driver
    spring.datasource.master.masterMapperLocations=classpath:mapper/master/*.xml
    
    #cluster数据源
    spring.datasource.cluster.url=jdbc:mysql://localhost:3306/SpringBootCluster
    spring.datasource.cluster.username=root
    spring.datasource.cluster.password=1
    spring.datasource.cluster.driver-class-name=com.mysql.jdbc.Driver
    spring.datasource.cluster.clusterMapperLocations=classpath:mapper/cluster/*.xml
    
    #cluster1数据源
    spring.datasource.cluster1.url=jdbc:mysql://localhost:3306/SpringBootCluster1
    spring.datasource.cluster1.username=root
    spring.datasource.cluster1.password=1
    spring.datasource.cluster1.driver-class-name=com.mysql.jdbc.Driver
    spring.datasource.cluster1.clusterMapperLocations=classpath:mapper/cluster1/*.xml

    master数据源:

    @Configuration
    @MapperScan(basePackages = {"com.lc.springBoot.druid.mapper.master"},
            sqlSessionFactoryRef = "masterSqlSessionFactory")
    public class MasterDruidDataSourceConfig {
    
        @Value("${spring.datasource.master.masterMapperLocations}")
        private String masterMapperLocations;
    
        @ConfigurationProperties(prefix = "spring.datasource.master")
        @Bean(name = "masterDataSource")
        @Primary
        public DataSource masterDataSource() {
            return new DruidDataSource();
        }
    
        /**
         * SqlSessionFactory配置
         *
         * @return
         * @throws Exception
         */
        @Bean(name = "masterSqlSessionFactory")
        @Primary
        public SqlSessionFactory masterSqlSessionFactory(
                @Qualifier("masterDataSource") DataSource dataSource
        ) throws Exception {
            SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
            sqlSessionFactoryBean.setDataSource(dataSource);
    
            PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
            // 配置mapper文件位置
            sqlSessionFactoryBean.setMapperLocations(resolver.getResources(masterMapperLocations));
    
            //配置分页插件
            PageHelper pageHelper = new PageHelper();
            Properties properties = new Properties();
            properties.setProperty("reasonable", "true");
            properties.setProperty("supportMethodsArguments", "true");
            properties.setProperty("returnPageInfo", "check");
            properties.setProperty("params", "count=countSql");
            pageHelper.setProperties(properties);
    
            //设置插件
            sqlSessionFactoryBean.setPlugins(new Interceptor[]{pageHelper});
            return sqlSessionFactoryBean.getObject();
        }
    
        /**
         * 配置事物管理器
         *
         * @return
         */
        @Bean(name = "masterTransactionManager")
        @Primary
        public DataSourceTransactionManager masterTransactionManager(
                @Qualifier("masterDataSource") DataSource dataSource
        ) {
            DataSourceTransactionManager dataSourceTransactionManager = new DataSourceTransactionManager();
            dataSourceTransactionManager.setDataSource(dataSource);
            return dataSourceTransactionManager;
        }
    }

     

    cluster数据源:

  • @Configuration
    @MapperScan(basePackages = {"com.lc.springBoot.druid.mapper.cluster"},
            sqlSessionFactoryRef = "clusterSqlSessionFactory")
    public class ClusterDruidDataSourceConfig {
    
        @Value("${spring.datasource.cluster.clusterMapperLocations}")
        private String clusterMapperLocations;
    
        @ConfigurationProperties(prefix = "spring.datasource.cluster")
        @Bean(name = "clusterDataSource")
        public DataSource clusterDataSource() {
            return new DruidDataSource();
        }
    
        /**
         * SqlSessionFactory配置
         *
         * @return
         * @throws Exception
         */
        @Bean(name = "clusterSqlSessionFactory")
        public SqlSessionFactory clusterSqlSessionFactory(
                @Qualifier("clusterDataSource") DataSource dataSource
        ) throws Exception {
            SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
            sqlSessionFactoryBean.setDataSource(dataSource);
    
            PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
            //配置mapper文件位置
            sqlSessionFactoryBean.setMapperLocations(resolver.getResources(clusterMapperLocations));
    
            //配置分页插件
            PageHelper pageHelper = new PageHelper();
            Properties properties = new Properties();
            properties.setProperty("reasonable", "true");
            properties.setProperty("supportMethodsArguments", "true");
            properties.setProperty("returnPageInfo", "check");
            properties.setProperty("params", "count=countSql");
            pageHelper.setProperties(properties);
    
            //设置插件
            sqlSessionFactoryBean.setPlugins(new Interceptor[]{pageHelper});
            return sqlSessionFactoryBean.getObject();
        }
    
        /**
         * 配置事物管理器
         *
         * @return
         */
        @Bean(name = "clusterTransactionManager")
        public DataSourceTransactionManager clusterTransactionManager(
                @Qualifier("clusterDataSource") DataSource dataSource
        ) {
            DataSourceTransactionManager dataSourceTransactionManager = new DataSourceTransactionManager();
            dataSourceTransactionManager.setDataSource(dataSource);
            return dataSourceTransactionManager;
        }
    }

     

  • cluster1数据源:
    @Configuration
    @MapperScan(basePackages = {"com.lc.springBoot.druid.mapper.cluster1"},
            sqlSessionFactoryRef = "cluster1SqlSessionFactory")
    public class Cluster1DruidDataSourceConfig {
    
        @Value("${spring.datasource.cluster1.clusterMapperLocations}")
        private String cluster1MapperLocations;
    
        @ConfigurationProperties(prefix = "spring.datasource.cluster1")
        @Bean(name = "cluster1DataSource")
        public DataSource cluster1DataSource() {
            return new DruidDataSource();
        }
    
        /**
         * SqlSessionFactory配置
         *
         * @return
         * @throws Exception
         */
        @Bean(name = "cluster1SqlSessionFactory")
        public SqlSessionFactory cluster1SqlSessionFactory(
                @Qualifier("cluster1DataSource") DataSource dataSource
        ) throws Exception {
            SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
            sqlSessionFactoryBean.setDataSource(dataSource);
    
            PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
            //配置mapper文件位置
            sqlSessionFactoryBean.setMapperLocations(resolver.getResources(cluster1MapperLocations));
    
            //配置分页插件
            PageHelper pageHelper = new PageHelper();
            Properties properties = new Properties();
            properties.setProperty("reasonable", "true");
            properties.setProperty("supportMethodsArguments", "true");
            properties.setProperty("returnPageInfo", "check");
            properties.setProperty("params", "count=countSql");
            pageHelper.setProperties(properties);
    
            //设置插件
            sqlSessionFactoryBean.setPlugins(new Interceptor[]{pageHelper});
            return sqlSessionFactoryBean.getObject();
        }
    
        /**
         * 配置事物管理器
         *
         * @return
         */
        @Bean(name = "cluster1TransactionManager")
        public DataSourceTransactionManager cluster1TransactionManager(
                @Qualifier("cluster1DataSource") DataSource dataSource
        ) {
            DataSourceTransactionManager dataSourceTransactionManager = new DataSourceTransactionManager();
            dataSourceTransactionManager.setDataSource(dataSource);
            return dataSourceTransactionManager;
        }
    }

     

  • 2.相关注解说明

    @MapperScan:basePackages属性配置需要扫描的mybatis的mapper文件位置,sqlSessionFactory属性配置具体的 sqlSessionFactory.

    @ConfigurationProperties:读取并且设置我们在application.properties配置的内容.

    @Primary:这个注解用来标识当存在多个相同的类型的bean时,优先选用哪个bean注入,需要注意的是,配置多数据源的时候,必须有一个 且只能有一个@Primary注解.

读写分离实现

read数据源配置

```
@Configuration
@MapperScan(basePackages = {"com.lc.springBoot.druid.mapper.read1"},
        sqlSessionFactoryRef = "read1SqlSessionFactory")
public class Read1DruidDataSourceConfig {

    @Value("${spring.datasource.read1.read1MapperLocations}")
    private String read1MapperLocations;

    @ConfigurationProperties(prefix = "spring.datasource.read1")
    @Bean(name = "read1DataSource")
    public DataSource read1DataSource() {
        return new DruidDataSource();
    }

    /**
     * SqlSessionFactory配置
     *
     * @return
     * @throws Exception
     */
    @Bean(name = "read1SqlSessionFactory")
    public SqlSessionFactory read1SqlSessionFactory(
            @Qualifier("read1DataSource") DataSource dataSource
    ) throws Exception {
        SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
        sqlSessionFactoryBean.setDataSource(dataSource);

        PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
        // 配置mapper文件位置
        sqlSessionFactoryBean.setMapperLocations(resolver.getResources(read1MapperLocations));

        //配置分页插件
        PageHelper pageHelper = new PageHelper();
        Properties properties = new Properties();
        properties.setProperty("reasonable", "true");
        properties.setProperty("supportMethodsArguments", "true");
        properties.setProperty("returnPageInfo", "check");
        properties.setProperty("params", "count=countSql");
        pageHelper.setProperties(properties);

        //设置插件
        sqlSessionFactoryBean.setPlugins(new Interceptor[]{pageHelper});
        return sqlSessionFactoryBean.getObject();
    }

    /**
     * 配置事物管理器
     *
     * @return
     */
    @Bean(name = "read1TransactionManager")
    public DataSourceTransactionManager read1TransactionManager(
            @Qualifier("read1DataSource") DataSource dataSource
    ) {
        DataSourceTransactionManager dataSourceTransactionManager = new DataSourceTransactionManager();
        dataSourceTransactionManager.setDataSource(dataSource);
        return dataSourceTransactionManager;
    }
}

```

 

write数据源配置

```
@Configuration
@MapperScan(basePackages = {"com.lc.springBoot.druid.mapper.write"},
        sqlSessionFactoryRef = "writeSqlSessionFactory")
public class WriteDruidDataSourceConfig {

    @Value("${spring.datasource.write.writeMapperLocations}")
    private String writeMapperLocations;

    @ConfigurationProperties(prefix = "spring.datasource.write")
    @Bean(name = "writeDataSource")
    public DataSource writeDataSource() {
        return new DruidDataSource();
    }

    /**
     * SqlSessionFactory配置
     *
     * @return
     * @throws Exception
     */
    @Bean(name = "writeSqlSessionFactory")
    public SqlSessionFactory writeSqlSessionFactory(
            @Qualifier("writeDataSource") DataSource dataSource
    ) throws Exception {
        SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
        sqlSessionFactoryBean.setDataSource(dataSource);

        PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
        // 配置mapper文件位置
        sqlSessionFactoryBean.setMapperLocations(resolver.getResources(writeMapperLocations));
        return sqlSessionFactoryBean.getObject();
    }

    /**
     * 配置事物管理器
     *
     * @return
     */
    @Bean(name = "writeTransactionManager")
    public DataSourceTransactionManager writeTransactionManager(
            @Qualifier("writeDataSource") DataSource dataSource
    ) {
        DataSourceTransactionManager dataSourceTransactionManager = new DataSourceTransactionManager();
        dataSourceTransactionManager.setDataSource(dataSource);
        return dataSourceTransactionManager;
    }
}
```

 

自定义注入AbstractRoutingDataSource

```
@Configuration
public class DataSourceConfig {

    private final static String WRITE_DATASOURCE_KEY = "writeDataSource";
    private final static String READ1_DATASOURCE_KEY = "read1DataSource";
    private final static String READ2_DATASOURCE_KEY = "read2DataSource";

    @Bean
    public AbstractRoutingDataSource routingDataSource(
            @Qualifier("writeDataSource") DataSource  writeDataSource,
            @Qualifier("read1DataSource") DataSource  read1DataSource,
            @Qualifier("read2DataSource") DataSource  read2DataSource
    ) {
        DynamicDataSource dataSource = new DynamicDataSource();
        Map<Object, Object> targetDataSources = new HashMap();
        targetDataSources.put(WRITE_DATASOURCE_KEY, writeDataSource);
        targetDataSources.put(READ1_DATASOURCE_KEY, read1DataSource);
        targetDataSources.put(READ2_DATASOURCE_KEY, read2DataSource);
        dataSource.setTargetDataSources(targetDataSources);
        dataSource.setDefaultTargetDataSource(writeDataSource);
        return dataSource;
    }
}
```

 

自定义注解

```
@Target({ElementType.METHOD, ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface TargetDataSource {
    String dataSource() default "";//数据源
}

```

 

使用ThreadLocal使数据源与线程绑定

```
public class DynamicDataSourceHolder {
    //使用ThreadLocal把数据源与当前线程绑定
    private static final ThreadLocal<String> dataSources = new ThreadLocal<String>();

    public static void setDataSource(String dataSourceName) {
        dataSources.set(dataSourceName);
    }

    public static String getDataSource() {
        return (String) dataSources.get();
    }

    public static void clearDataSource() {
        dataSources.remove();
    }
}
```
```
public class DynamicDataSource extends AbstractRoutingDataSource {
    @Override
    protected Object determineCurrentLookupKey() {

        //可以做一个简单的负载均衡策略
        String lookupKey = DynamicDataSourceHolder.getDataSource();
        System.out.println("------------lookupKey---------"+lookupKey);

        return lookupKey;
    }
}

```

 

定义切面

```
@Aspect
@Component
public class DynamicDataSourceAspect {
    @Around("execution(public * com.lc.springBoot.druid.service..*.*(..))")
    public Object around(ProceedingJoinPoint pjp) throws Throwable {
        MethodSignature methodSignature = (MethodSignature) pjp.getSignature();
        Method targetMethod = methodSignature.getMethod();
        if (targetMethod.isAnnotationPresent(TargetDataSource.class)) {
            String targetDataSource = targetMethod.getAnnotation(TargetDataSource.class).dataSource();
            System.out.println("----------数据源是:" + targetDataSource + "------");
            DynamicDataSourceHolder.setDataSource(targetDataSource);
        }
        Object result = pjp.proceed();//执行方法
        DynamicDataSourceHolder.clearDataSource();

        return result;
    }
}

```

 

注解使用

```
@Service
public class StudentService {

    @Autowired
    private StudentMapper studentMapper;

    @Transactional
    @TargetDataSource(dataSource = "writeDataSource")
    public boolean createUser(Student student) {
        studentMapper.insert(student);

        //事务测试
//        int i = 1 / 0;
        return true;
    }

    @TargetDataSource(dataSource = "read1DataSource")
    public List<Student> getByPage(int page, int rows) {
        Page<Student> studentPage = PageHelper.startPage(page, rows, true);
        List<Student> students = studentMapper.getBypage();
        System.out.println("-------------------" + studentPage.toString() + "-----------");
        return students;
    }
}

```  

 

druid监控功能配置

配置过滤器

```
/**
 * 配置过滤器,需要拦截哪些url,忽略哪些url,初始化参数等
 *
 * @author lsj <lishuijun1992@gmail.com>
 * @date 17-4-7
 */
@WebFilter(filterName = "druidStatFilter",//过滤器名称
        urlPatterns = "/",//需要拦截的url
        initParams = {//filter初始化信息
                //需要忽略的资源
                @WebInitParam(name = "exclusions", value = "*.js,*.gif,*.jpg," +
                        "*.bmp,*.png,*.css,*.ico,/druid/*"),
                @WebInitParam(name = "sessionStatEnable", value = "true"),
                @WebInitParam(name = "profileEnable", value = "true")})
public class DruidStatFilter extends WebStatFilter {
}

```

 

配置servlet

//表明这是一个servlet
@WebServlet(urlPatterns = "/druid/*",//通过哪个url访问
        initParams = {
                @WebInitParam(name = "loginUsername", value = "lengchuan"),//用户名
                @WebInitParam(name = "loginPassword", value = "123456"), //密码
                @WebInitParam(name = "resetEnable", value = "true"),//是否可以重置
                // @WebInitParam(name = "allow",value = "127.0.0.1"),//白名单
                // @WebInitParam(name = "deny",value = "192.168.1.1")//黑名单
        })
public class DruidStatViewServlet extends StatViewServlet {
}

 

配置Spring监控

@Configuration
public class MyDruidStatInterceptor {

    private static final String[] patterns = new String[]{"com.lc.springBoot.druid.service.*"};

    @Bean
    public DruidStatInterceptor druidStatInterceptor() {
        return new DruidStatInterceptor();
    }

    /**
     * 切入点
     * @return
     */
    @Bean
    public JdkRegexpMethodPointcut druidStatPointcut() {
        JdkRegexpMethodPointcut druidStatPointcut = new JdkRegexpMethodPointcut();
        druidStatPointcut.setPatterns(patterns);
        return druidStatPointcut;
    }

    /**
     * 配置aop
     * @return
     */
    @Bean
    public Advisor druidStatAdvisor() {
        return new DefaultPointcutAdvisor(druidStatPointcut(), druidStatInterceptor());
    }
}

 

访问监控页面

配置完成后启动项目,访问loccalhost:8080/druid/就可以看到我们的监控页面了,用户名配置的是lengchuan,密码是123456这里需要注意的是,我们只有在
执行了一次数据库操作后,才能获取到我们的数据源信息.

https://github.com/Lengchuan/SpringBoot-Study/tree/master/SpringBoot-Druid

https://github.com/helloworldtang/springboot-multi-datasource

时间: 2024-10-28 10:47:22

springboot 配置多数据源 good的相关文章

Spring-Boot + Mybatis 多数据源配置

折腾了一天,终于完成了多数据源的配置,记录在这里! 1,先上SpringBoot 基础配置         ①,系统引入了Security的包,但是没有配置Security相关信息,在启动时会打印警告log,故这里排除SecurityAutoConfiguration这个类 2,配置两个数据源 @Configuration public class DataBaseConfig implements EnvironmentAware { private RelaxedPropertyResolv

JBoss AS7配置MySQL数据源

AS7默认的数据源是H2,虽然H2方便,但是一般的生产环境都是MySQL.ORACLE.SQLServer居多,本文描 述如何在AS7配置MySQL数据源. 根据Jboss社区的文档,配置一个新的数据源大致分为两步.描述如 下: 加入JDBC驱动文件 在JBOSS_HOME/modules目录下加入如下目录结构及文件: JBOSS_HOME +- modules +- com +- mysql +- main +- module.xml +- mysql-connector-java-5.1.1

怎样在tomcat5中配置mysql数据源

一.配置环境变量: softe version:tomcat5.0.12/mysql4.1.7/mysql_driver---mysql-connector-java-3.1.4-beta-bin.jar mysql驱动程序可从www.mysql.com官方网站下载 路径: tomcat5在d:\myweb\tomcat5: mysql在C:\Program Files\MySQL\MySQL Server 4.1 Path(在原来的基础上加上): d:\myweb\tomcat5\bin;d:

DataV 配置 OTS 数据源

DataV 配置 OTS 数据源 OTS 的介绍和相关文档SDK:https://help.aliyun.com/document_detail/27280.html 获取 AK对和公网地址 OTS 数据源需要配置 Access ID 和 Access Secret 访问 https://ak-console.aliyun.com/#/accesskey 进行 AK 的生成和管理 OTS 的实例公网地址如下图 在 DataV 数据面板配置 OTS 数据源 配置完 OTS 的数据源之后,就可以在组

weblogic配置sqlserver数据源

前提概要:某个项目需要做查询页面,但是查询的库有多个,且有多种:oracle.SqlServer和pg,所以配置数据源也成了一个问题,因为weblogic版本是10.3.2版本,没有sqlserver的jdbc驱动,因此需要添加驱动才能配置.   1.中间件及数据库信息 中间件:weblogic 10.3.2.0 数据库:sqlserver 2014 ip:10.10.10.10 port:8888 客户端连接时数据库名:GCCDB 代码连接时数据库名:UMPDB 用户:dbuser 密码:12

spring配置多数据源t junit测试报错

问题描述 spring配置多数据源t junit测试报错 Error creating bean with name 'seentao.workflow.engine.EngineTest': Injection of autowired dependencies failed; nested exception is org.springframework.beans.factory.BeanCreationException: Could not autowire method: publi

DataV 配置 VPC 数据源

DataV 配置 VPC 数据源 现在 DataV 支持配置 VPC 的数据库作为数据源.什么是 VPC? 在数据源的配置面板选择 RDS并且选择内网,然后点击 VPC 的按钮就可以配置 VPC 的数据库. VPC 的数据库除了常规的数据库配置之外,还需要配置 VPC ID 和实例 ID,这两个 ID 都可以在阿里云的控制台找到. RDS: 如果你的数据库是自己安装在 VPC 内的 ECS,则需要配置该 ECS 的 VPC ID 和实例 ID: 获取 VPC ID 和实例 ID 之后,依次配置:

websphere-Websphere配置DB2数据源是报SQL异常

问题描述 Websphere配置DB2数据源是报SQL异常 我正在Websphere8.5上配置DB2的数据源.已将DB的jar包的路径配好,确认jar包位置无误. 使用DB2 universa JDBC Driver Provider 和 Connection pool data source. 在测试连接的时候控制台报错.具体内容如下: java.sql.SQLException: java.lang.UnsatisfiedLinkError: COM/ibm/db2/jdbc/app/DB

weblogic配置pg数据源

1.中间件及数据库信息中间件:weblogic 10.3.2.0数据库:pg    ip:10.10.10.10    port:7777    客户端连接时数据库名:mcc    代码连接时数据库名:mcc    用户:mccuser    密码:111111    2.下载sqlserver的jdbc包并配置(1)官方下载 (2)选择:PostgreSQL JDBC 4.0 Driver, 42.1.4.jre6If you are using Java 6 then you should