SpringBatch多数据源配置

编程

spring:

batch:

job:

enabled: false #是否自动执行定义的Job,默认是

initialize-schema: never

datasource:

name: payReportBatch

type: com.alibaba.druid.pool.DruidDataSource

driver-class-name: com.mysql.jdbc.Driver

jdbc-url: jdbc:xx

username: root

password: X18

initial-size: 10 # 初始化大小

min-idle: 10 # 最小数量

max-active: 10 # 最大数量

# 连接等待超时时间

max-wait: 10000

# 配置检测可以关闭的空闲连接间隔时间

time-between-eviction-runs-millis: 60000

# 配置连接在池中的最小生存时间

min-evictable-idle-time-millis: 300000

validation-query: select "1"

test-while-idle: true

test-on-borrow: false

test-on-return: false

# 打开PSCache,并且指定每个连接上PSCache的大小

pool-prepared-statements: true

max-open-prepared-statements: 50

max-pool-prepared-statement-per-connection-size: 50

filter:

stat:

merge-sql: true

# 慢日志查询

log-slow-sql: true

# 慢SQL记录超过5秒的sql在druid控制台标红

slow-sql-millis: 5000

wall:

enabled: false

commons-log:

enabled: false

log4j:

connection-log-enabled: false

slf4j:

statement-log-enabled: false

log4j2:

statement-log-enabled: false

# 配置监控统计拦截的filters,去掉后监控界面sql无法统计,"wall"用于防火墙 如果项目用的logback则删掉log4j

filters: stat,wall

# 配置Druid Spring监控切面

# aop-patterns: com.xy.pay.monitor.service.*,com.xy.pay.monitor.schedule.*,com.xy.pay.monitor.controller.*

# 配置DruidStatFilter

web-stat-filter:

url-pattern: "/*"

exclusions: "*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*,/uploal/*"

# 配置DruidStatViewServlet

stat-view-servlet:

url-pattern: "/druid/*"

# IP白名单(没有配置或者为空,则允许所有访问)

# allow: 127.0.0.1,192.168.163.1

# IP黑名单 (存在共同时,deny优先于allow)

# deny: 192.168.1.73

# 禁用HTML页面上的“Reset All”功能 true是不禁用

reset-enable: true

login-username: admin

login-password: kj

use-global-data-source-stat: false

clear-filters-enable: true

pay:

datasource:

name: payTrade

driver-class-name: com.mysql.jdbc.Driver

type: com.alibaba.druid.pool.DruidDataSource

jdbc-url: jdbc:xx

username: root

password: t2018

initial-size: 10 # 初始化大小

min-idle: 10 # 最小数量

max-active: 10 # 最大数量

# 连接等待超时时间

max-wait: 10000

# 配置检测可以关闭的空闲连接间隔时间

time-between-eviction-runs-millis: 60000

# 配置连接在池中的最小生存时间

min-evictable-idle-time-millis: 300000

validation-query: select "1"

test-while-idle: true

test-on-borrow: false

test-on-return: false

# 打开PSCache,并且指定每个连接上PSCache的大小

pool-prepared-statements: true

max-open-prepared-statements: 50

max-pool-prepared-statement-per-connection-size: 50

filter:

stat:

merge-sql: true

# 慢日志查询

log-slow-sql: true

# 慢SQL记录超过5秒的sql在druid控制台标红

slow-sql-millis: 5000

wall:

enabled: false

commons-log:

enabled: false

log4j:

connection-log-enabled: false

slf4j:

statement-log-enabled: false

log4j2:

statement-log-enabled: false

# 配置监控统计拦截的filters,去掉后监控界面sql无法统计,"wall"用于防火墙 如果项目用的logback则删掉log4j

filters: stat,wall

# 配置Druid Spring监控切面

# aop-patterns: com.xy.pay.monitor.service.*,com.xy.pay.monitor.schedule.*,com.xy.pay.monitor.controller.*

# 配置DruidStatFilter

web-stat-filter:

url-pattern: "/*"

exclusions: "*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*,/uploal/*"

# 配置DruidStatViewServlet

stat-view-servlet:

url-pattern: "/druid/*"

# IP白名单(没有配置或者为空,则允许所有访问)

# allow: 127.0.0.1,192.168.163.1

# IP黑名单 (存在共同时,deny优先于allow)

# deny: 192.168.1.73

# 禁用HTML页面上的“Reset All”功能 true是不禁用

reset-enable: true

login-username: admin

login-password: xsj

use-global-data-source-stat: false

clear-filters-enable: true

BatchDataSourceConfigurer

package com.xy.pay.report.batch.config;

import org.apache.ibatis.session.ExecutorType;

import org.apache.ibatis.session.SqlSessionFactory;

import org.mybatis.spring.SqlSessionFactoryBean;

import org.mybatis.spring.SqlSessionTemplate;

import org.mybatis.spring.annotation.MapperScan;

import org.springframework.beans.factory.annotation.Qualifier;

import org.springframework.beans.factory.annotation.Value;

import org.springframework.boot.context.properties.ConfigurationProperties;

import org.springframework.boot.jdbc.DataSourceBuilder;

import org.springframework.context.annotation.Bean;

import org.springframework.context.annotation.Configuration;

import org.springframework.context.annotation.Primary;

import org.springframework.core.io.Resource;

import org.springframework.core.io.support.PathMatchingResourcePatternResolver;

import org.springframework.jdbc.datasource.DataSourceTransactionManager;

import org.springframework.transaction.annotation.EnableTransactionManagement;

import javax.sql.DataSource;

/**

* spring batch 数组源配置

*

* @author Canaan

* @date 2020/2/27 18:38

*/

@Configuration

@MapperScan(basePackages = "com.xy.pay.dao.batch", sqlSessionTemplateRef = "sqlSessionTemplate")

@EnableTransactionManagement(proxyTargetClass = true)

public class BatchDataSourceConfigurer {

@Value("classpath:config/mybatis-config.xml")

private Resource configLocation;

@Bean

@Primary

@ConfigurationProperties(prefix = "spring.datasource")

public DataSource dataSource() {

return DataSourceBuilder.create().build();

}

@Bean

@Primary

public SqlSessionFactory sqlSessionFactory(@Qualifier("dataSource") DataSource dataSource) throws Exception {

SqlSessionFactoryBean bean = new SqlSessionFactoryBean();

bean.setDataSource(dataSource);

bean.setConfigLocation(configLocation);

bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath:com/xy/pay/dao/batch/mapper/**/*.xml"));

return bean.getObject();

}

@Bean

@Primary

public DataSourceTransactionManager transactionManager(@Qualifier("dataSource") DataSource dataSource) {

return new DataSourceTransactionManager(dataSource);

}

@Bean

@Primary

public SqlSessionTemplate sqlSessionTemplate(@Qualifier("sqlSessionFactory") SqlSessionFactory sqlSessionFactory) throws Exception {

return new SqlSessionTemplate(sqlSessionFactory, ExecutorType.BATCH);

}

}

RDSDataSourceConfigurer

package com.xy.pay.report.batch.config;

import org.apache.ibatis.session.ExecutorType;

import org.apache.ibatis.session.SqlSessionFactory;

import org.mybatis.spring.SqlSessionFactoryBean;

import org.mybatis.spring.SqlSessionTemplate;

import org.mybatis.spring.annotation.MapperScan;

import org.springframework.beans.factory.annotation.Qualifier;

import org.springframework.beans.factory.annotation.Value;

import org.springframework.boot.context.properties.ConfigurationProperties;

import org.springframework.boot.jdbc.DataSourceBuilder;

import org.springframework.context.annotation.Bean;

import org.springframework.context.annotation.Configuration;

import org.springframework.core.io.Resource;

import org.springframework.core.io.support.PathMatchingResourcePatternResolver;

import org.springframework.jdbc.datasource.DataSourceTransactionManager;

import org.springframework.transaction.annotation.EnableTransactionManagement;

import javax.sql.DataSource;

@Configuration

@MapperScan(basePackages = "com.xy.pay.dao.core", sqlSessionTemplateRef = "rdsSqlSessionTemplate")

@EnableTransactionManagement(proxyTargetClass = true)

public class RDSDataSourceConfigurer {

@Value("classpath:config/mybatis-config.xml")

private Resource configLocation;

@Bean

@Qualifier("rds")

@ConfigurationProperties(prefix = "pay.datasource")

public DataSource rdsDataSource() {

return DataSourceBuilder.create().build();

}

@Bean

@Qualifier("rds")

public SqlSessionFactory rdsSqlSessionFactory(@Qualifier("rdsDataSource") DataSource dataSource) throws Exception {

SqlSessionFactoryBean bean = new SqlSessionFactoryBean();

bean.setDataSource(dataSource);

bean.setConfigLocation(configLocation);

bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath:com/xy/pay/dao/core/mapper/**/*.xml"));

return bean.getObject();

}

@Bean

@Qualifier("rds")

public DataSourceTransactionManager rdsTransactionManager(@Qualifier("rdsDataSource") DataSource dataSource) {

return new DataSourceTransactionManager(dataSource);

}

@Bean

@Qualifier("rds")

public SqlSessionTemplate rdsSqlSessionTemplate(@Qualifier("rdsSqlSessionFactory") SqlSessionFactory sqlSessionFactory) throws Exception {

return new SqlSessionTemplate(sqlSessionFactory, ExecutorType.BATCH);

}

}

SpringBatchConfigurer

package com.xy.pay.report.batch.config;

import org.springframework.batch.core.configuration.ListableJobLocator;

import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;

import org.springframework.batch.core.explore.JobExplorer;

import org.springframework.batch.core.launch.JobLauncher;

import org.springframework.batch.core.launch.JobOperator;

import org.springframework.batch.core.launch.support.SimpleJobLauncher;

import org.springframework.batch.core.launch.support.SimpleJobOperator;

import org.springframework.batch.core.repository.JobRepository;

import org.springframework.beans.factory.annotation.Autowired;

import org.springframework.beans.factory.annotation.Qualifier;

import org.springframework.context.annotation.Bean;

import org.springframework.context.annotation.Configuration;

import org.springframework.context.annotation.ImportResource;

import org.springframework.core.task.TaskExecutor;

@Configuration

@EnableBatchProcessing

@ImportResource("classpath:batch/spring-batch.xml")

public class SpringBatchConfigurer {

@Autowired

private ListableJobLocator jobRegistry;

@Autowired

private JobLauncher jobLauncher;

@Autowired

private JobRepository jobRepository;

@Autowired

private JobExplorer jobExplorer;

@Autowired

private TaskExecutor batchExecutor;

@Bean

public JobOperator jobOperator() {

SimpleJobOperator jobOperator = new SimpleJobOperator();

jobOperator.setJobExplorer(this.jobExplorer);

jobOperator.setJobRepository(this.jobRepository);

jobOperator.setJobLauncher(this.jobLauncher);

jobOperator.setJobRegistry(this.jobRegistry);

return jobOperator;

}

/**

* 异步批处理执行者

*

* @author Canaan

* @date 2020/2/20 9:08

*/

@Bean

@Qualifier("async")

public JobLauncher asyncJobLauncher() throws Exception {

SimpleJobLauncher jobLauncher = new SimpleJobLauncher();

jobLauncher.setJobRepository(this.jobRepository);

jobLauncher.setTaskExecutor(batchExecutor);

jobLauncher.afterPropertiesSet();

return jobLauncher;

}

//@Override

//protected JobRepository createJobRepository() throws Exception {

// JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();

// factory.setDataSource(dataSource);

// factory.setTransactionManager(getTransactionManager());

// factory.setIsolationLevelForCreate("ISOLATION_SERIALIZABLE"); //ISOLATION_REPEATABLE_READ

// factory.setTablePrefix("BATCH_");

// factory.setMaxVarCharLength(1000);

// factory.afterPropertiesSet();

// return factory.getObject();

//}

//

//@Override

//protected JobExplorer createJobExplorer() throws Exception {

// JobExplorerFactoryBean jobExplorerFactoryBean = new JobExplorerFactoryBean();

// jobExplorerFactoryBean.setDataSource(this.dataSource);

// jobExplorerFactoryBean.setTablePrefix("BATCH_");

// jobExplorerFactoryBean.afterPropertiesSet();

// return jobExplorerFactoryBean.getObject();

//}

}

以上是 SpringBatch多数据源配置 的全部内容, 来源链接: utcz.com/z/514196.html

回到顶部