This commit is contained in:
yidao620
2018-02-26 19:05:53 +08:00
commit 4d57c022c6
534 changed files with 96924 additions and 0 deletions

View File

@ -0,0 +1,13 @@
package com.enzhico.trans;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}

View File

@ -0,0 +1,44 @@
package com.enzhico.trans.config;
import com.alibaba.druid.pool.DruidDataSource;
import com.baomidou.mybatisplus.plugins.PaginationInterceptor;
import com.enzhico.trans.config.properties.DruidProperties;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.annotation.Resource;
/**
* MybatisPlus配置
*
* @author xiongneng
* @since 2017/5/20 21:58
*/
@Configuration
@EnableTransactionManagement(order = 2)
@MapperScan(basePackages = {"com.enzhico.trans.dao.repository"})
public class MybatisPlusConfig {
@Resource
private DruidProperties druidProperties;
/**
* 单数据源连接池配置
*/
@Bean
public DruidDataSource singleDatasource() {
DruidDataSource dataSource = new DruidDataSource();
druidProperties.config(dataSource);
return dataSource;
}
/**
* mybatis-plus分页插件
*/
@Bean
public PaginationInterceptor paginationInterceptor() {
return new PaginationInterceptor();
}
}

View File

@ -0,0 +1,88 @@
package com.enzhico.trans.config.properties;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.util.JdbcConstants;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
import java.sql.SQLException;
/**
* 自定义配置文件
*
* @author xiongneng
* @since 2017-05-21 11:18
*/
@Component
@ConfigurationProperties(prefix = "common")
public class CommonProperties {
/**
* csv文件路径文件名格式为“表名.csv”
*/
private String csvDir;
private String csvVtoll;
private String csvCanton;
private String csvExeOffice;
private String csvApp;
private String csvLog;
/**
* csv文件在哪 1:文件系统 2:类路径下面
*/
private Integer location;
public String getCsvDir() {
return csvDir;
}
public void setCsvDir(String csvDir) {
this.csvDir = csvDir;
}
public String getCsvExeOffice() {
return csvExeOffice;
}
public void setCsvExeOffice(String csvExeOffice) {
this.csvExeOffice = csvExeOffice;
}
public String getCsvVtoll() {
return csvVtoll;
}
public void setCsvVtoll(String csvVtoll) {
this.csvVtoll = csvVtoll;
}
public String getCsvApp() {
return csvApp;
}
public void setCsvApp(String csvApp) {
this.csvApp = csvApp;
}
public String getCsvLog() {
return csvLog;
}
public void setCsvLog(String csvLog) {
this.csvLog = csvLog;
}
public String getCsvCanton() {
return csvCanton;
}
public void setCsvCanton(String csvCanton) {
this.csvCanton = csvCanton;
}
public Integer getLocation() {
return location;
}
public void setLocation(Integer location) {
this.location = location;
}
}

View File

@ -0,0 +1,249 @@
package com.enzhico.trans.config.properties;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.util.JdbcConstants;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
import java.sql.SQLException;
/**
* <p>数据库数据源配置</p>
* <p>说明:这个类中包含了许多默认配置,若这些配置符合您的情况,您可以不用管,若不符合,建议不要修改本类,建议直接在"application.yml"中配置即可</p>
*
* @author xiongneng
* @since 2017-05-21 11:18
*/
@Component
@ConfigurationProperties(prefix = "spring.datasource")
public class DruidProperties {
private String url;
private String username;
private String password;
private String driverClassName;
private Integer initialSize = 10;
private Integer minIdle = 3;
private Integer maxActive = 60;
private Integer maxWait = 60000;
private Boolean removeAbandoned = true;
private Integer removeAbandonedTimeout = 180;
private Integer timeBetweenEvictionRunsMillis = 60000;
private Integer minEvictableIdleTimeMillis = 300000;
private String validationQuery = "SELECT 1 from dual";
private Boolean testWhileIdle = true;
private Boolean testOnBorrow = false;
private Boolean testOnReturn = false;
private Boolean poolPreparedStatements = true;
private Integer maxPoolPreparedStatementPerConnectionSize = 50;
private String filters = "stat";
public void config(DruidDataSource dataSource) {
dataSource.setDbType(JdbcConstants.ORACLE);
dataSource.setUrl(url);
dataSource.setUsername(username);
dataSource.setPassword(password);
dataSource.setDriverClassName(driverClassName);
dataSource.setInitialSize(initialSize); // 定义初始连接数
dataSource.setMinIdle(minIdle); // 最小空闲
dataSource.setMaxActive(maxActive); // 定义最大连接数
dataSource.setMaxWait(maxWait); // 获取连接等待超时的时间
dataSource.setRemoveAbandoned(removeAbandoned); // 超过时间限制是否回收
dataSource.setRemoveAbandonedTimeout(removeAbandonedTimeout); // 超过时间限制多长
// 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
dataSource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
// 配置一个连接在池中最小生存的时间,单位是毫秒
dataSource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
// 用来检测连接是否有效的sql要求是一个查询语句
dataSource.setValidationQuery(validationQuery);
// 申请连接的时候检测
dataSource.setTestWhileIdle(testWhileIdle);
// 申请连接时执行validationQuery检测连接是否有效配置为true会降低性能
dataSource.setTestOnBorrow(testOnBorrow);
// 归还连接时执行validationQuery检测连接是否有效配置为true会降低性能
dataSource.setTestOnReturn(testOnReturn);
// 打开PSCache并且指定每个连接上PSCache的大小
dataSource.setPoolPreparedStatements(poolPreparedStatements);
dataSource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
// 属性类型是字符串,通过别名的方式配置扩展插件,常用的插件有:
// 监控统计用的filter:stat
// 日志用的filter:log4j
// 防御SQL注入的filter:wall
try {
dataSource.setFilters(filters);
} catch (SQLException e) {
e.printStackTrace();
}
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getDriverClassName() {
return driverClassName;
}
public void setDriverClassName(String driverClassName) {
this.driverClassName = driverClassName;
}
public Integer getInitialSize() {
return initialSize;
}
public void setInitialSize(Integer initialSize) {
this.initialSize = initialSize;
}
public Integer getMinIdle() {
return minIdle;
}
public void setMinIdle(Integer minIdle) {
this.minIdle = minIdle;
}
public Integer getMaxActive() {
return maxActive;
}
public void setMaxActive(Integer maxActive) {
this.maxActive = maxActive;
}
public Integer getMaxWait() {
return maxWait;
}
public void setMaxWait(Integer maxWait) {
this.maxWait = maxWait;
}
public Integer getTimeBetweenEvictionRunsMillis() {
return timeBetweenEvictionRunsMillis;
}
public void setTimeBetweenEvictionRunsMillis(Integer timeBetweenEvictionRunsMillis) {
this.timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis;
}
public Integer getMinEvictableIdleTimeMillis() {
return minEvictableIdleTimeMillis;
}
public void setMinEvictableIdleTimeMillis(Integer minEvictableIdleTimeMillis) {
this.minEvictableIdleTimeMillis = minEvictableIdleTimeMillis;
}
public String getValidationQuery() {
return validationQuery;
}
public void setValidationQuery(String validationQuery) {
this.validationQuery = validationQuery;
}
public Boolean getTestWhileIdle() {
return testWhileIdle;
}
public void setTestWhileIdle(Boolean testWhileIdle) {
this.testWhileIdle = testWhileIdle;
}
public Boolean getTestOnBorrow() {
return testOnBorrow;
}
public void setTestOnBorrow(Boolean testOnBorrow) {
this.testOnBorrow = testOnBorrow;
}
public Boolean getTestOnReturn() {
return testOnReturn;
}
public void setTestOnReturn(Boolean testOnReturn) {
this.testOnReturn = testOnReturn;
}
public Boolean getPoolPreparedStatements() {
return poolPreparedStatements;
}
public void setPoolPreparedStatements(Boolean poolPreparedStatements) {
this.poolPreparedStatements = poolPreparedStatements;
}
public Integer getMaxPoolPreparedStatementPerConnectionSize() {
return maxPoolPreparedStatementPerConnectionSize;
}
public void setMaxPoolPreparedStatementPerConnectionSize(Integer maxPoolPreparedStatementPerConnectionSize) {
this.maxPoolPreparedStatementPerConnectionSize = maxPoolPreparedStatementPerConnectionSize;
}
public String getFilters() {
return filters;
}
public void setFilters(String filters) {
this.filters = filters;
}
public Boolean getRemoveAbandoned() {
return removeAbandoned;
}
public void setRemoveAbandoned(Boolean removeAbandoned) {
this.removeAbandoned = removeAbandoned;
}
public Integer getRemoveAbandonedTimeout() {
return removeAbandonedTimeout;
}
public void setRemoveAbandonedTimeout(Integer removeAbandonedTimeout) {
this.removeAbandonedTimeout = removeAbandonedTimeout;
}
}

View File

@ -0,0 +1,59 @@
package com.enzhico.trans.dao.entity;
import com.baomidou.mybatisplus.activerecord.Model;
import com.baomidou.mybatisplus.annotations.TableId;
import com.baomidou.mybatisplus.annotations.TableName;
import com.baomidou.mybatisplus.enums.IdType;
import java.io.Serializable;
@TableName(value = "t_user")
public class User extends Model<User> {
/**
* 主键ID
*/
@TableId(value = "id", type = IdType.INPUT)
private Integer id;
private String username;
private String password;
public User() {
}
public User(Integer id, String username, String password) {
this.id = id;
this.username = username;
this.password = password;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
@Override
protected Serializable pkVal() {
return this.id;
}
}

View File

@ -0,0 +1,7 @@
package com.enzhico.trans.dao.repository;
import com.baomidou.mybatisplus.mapper.BaseMapper;
import com.enzhico.trans.dao.entity.User;
public interface UserMapper extends BaseMapper<User> {
}

View File

@ -0,0 +1,75 @@
package com.enzhico.trans.modules;
import com.alibaba.druid.pool.DruidDataSource;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.launch.support.SimpleJobLauncher;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
import org.springframework.batch.support.DatabaseType;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.annotation.Order;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.transaction.PlatformTransactionManager;
/**
* 这里是批处理公共配置类
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/5
*/
@Configuration
@EnableBatchProcessing
@Order(3)
public class MyBatchConfig {
@Bean
public ThreadPoolTaskExecutor taskExecutor() {
ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
taskExecutor.setCorePoolSize(5);
taskExecutor.setMaxPoolSize(10);
taskExecutor.setQueueCapacity(200);
return taskExecutor;
}
/**
* JobRepository用来注册Job的容器
* jobRepositor的定义需要dataSource和transactionManagerSpring Boot已为我们自动配置了
* 这两个类Spring可通过方法注入已有的Bean
*
* @param dataSource
* @param transactionManager
* @return
* @throws Exception
*/
@Bean
public JobRepository jobRepository(DruidDataSource dataSource, PlatformTransactionManager transactionManager) throws Exception {
JobRepositoryFactoryBean jobRepositoryFactoryBean = new JobRepositoryFactoryBean();
jobRepositoryFactoryBean.setDataSource(dataSource);
jobRepositoryFactoryBean.setTransactionManager(transactionManager);
jobRepositoryFactoryBean.setDatabaseType(String.valueOf(DatabaseType.ORACLE));
jobRepositoryFactoryBean.setMaxVarCharLength(5000);
// 下面事务隔离级别的配置是针对Oracle的
jobRepositoryFactoryBean.setIsolationLevelForCreate("ISOLATION_READ_COMMITTED");
jobRepositoryFactoryBean.afterPropertiesSet();
return jobRepositoryFactoryBean.getObject();
}
/**
* JobLauncher定义用来启动Job的接口
*
* @param dataSource
* @param transactionManager
* @return
* @throws Exception
*/
@Bean
public SimpleJobLauncher jobLauncher(ThreadPoolTaskExecutor taskExecutor, DruidDataSource dataSource,
PlatformTransactionManager transactionManager) throws Exception {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setTaskExecutor(taskExecutor);
jobLauncher.setJobRepository(jobRepository(dataSource, transactionManager));
return jobLauncher;
}
}

View File

@ -0,0 +1,45 @@
package com.enzhico.trans.modules;
import org.springframework.batch.item.validator.ValidationException;
import org.springframework.batch.item.validator.Validator;
import org.springframework.beans.factory.InitializingBean;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.ValidatorFactory;
import java.util.Set;
/**
* MyBeanValidator
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/3
*/
public class MyBeanValidator<T> implements Validator<T>, InitializingBean {
private javax.validation.Validator validator;
@Override
public void validate(T value) throws ValidationException {
/*
* 使用Validator的validate方法校验数据
*/
Set<ConstraintViolation<T>> constraintViolations = validator.validate(value);
if (constraintViolations.size() > 0) {
StringBuilder message = new StringBuilder();
for (ConstraintViolation<T> constraintViolation : constraintViolations) {
message.append(constraintViolation.getMessage()).append("\n");
}
throw new ValidationException(message.toString());
}
}
/**
* 使用JSR-303的Validator来校验我们的数据在此进行JSR-303的Validator的初始化
*/
@Override
public void afterPropertiesSet() {
ValidatorFactory validatorFactory = Validation.buildDefaultValidatorFactory();
validator = validatorFactory.usingContext().getValidator();
}
}

View File

@ -0,0 +1,44 @@
package com.enzhico.trans.modules;
import com.baomidou.mybatisplus.toolkit.StringUtils;
import com.enzhico.trans.modules.common.DateUtil;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.validation.DataBinder;
import java.beans.PropertyEditorSupport;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
/**
* MyBeanWrapperFieldSetMapper
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/5
*/
public class MyBeanWrapperFieldSetMapper<T> extends BeanWrapperFieldSetMapper<T> {
@Override
protected void initBinder(DataBinder binder) {
binder.registerCustomEditor(Timestamp.class, new PropertyEditorSupport() {
@Override
public void setAsText(String text) throws IllegalArgumentException {
if (StringUtils.isNotEmpty(text)) {
setValue(DateUtil.parseTimestamp(text));
} else {
setValue(null);
}
}
@Override
public String getAsText() throws IllegalArgumentException {
Object date = getValue();
if (date != null) {
return DateUtil.formatTimestamp((Timestamp) date);
} else {
return "";
}
}
});
}
}

View File

@ -0,0 +1,33 @@
package com.enzhico.trans.modules;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobExecutionListener;
/**
* MyJobListener
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/3
*/
public class MyJobListener implements JobExecutionListener {
private Logger logger = LoggerFactory.getLogger(this.getClass());
private long startTime;
private long endTime;
@Override
public void beforeJob(JobExecution jobExecution) {
startTime = System.currentTimeMillis();
String jobName = jobExecution.getJobParameters().getString("input.job.name");
logger.info("任务-{}处理开始", jobName);
}
@Override
public void afterJob(JobExecution jobExecution) {
endTime = System.currentTimeMillis();
String jobName = jobExecution.getJobParameters().getString("input.job.name");
logger.info("任务-{}处理结束,总耗时=" + (endTime - startTime) + "ms", jobName);
}
}

View File

@ -0,0 +1,173 @@
package com.enzhico.trans.modules.canton;
/**
* Canton
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/5
*/
public class Canton {
private String id;
private String code;
private String name;
private String parentid;
private String financial;
private String contactman;
private String tel;
private String email;
private String cantonlev;
private String taxorgcode;
private String memo;
private String using;
private String usingdate;
private Integer level;
private String end;
private String qrcantonid;
private String declare;
private String declareisend;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getParentid() {
return parentid;
}
public void setParentid(String parentid) {
this.parentid = parentid;
}
public String getFinancial() {
return financial;
}
public void setFinancial(String financial) {
this.financial = financial;
}
public String getContactman() {
return contactman;
}
public void setContactman(String contactman) {
this.contactman = contactman;
}
public String getTel() {
return tel;
}
public void setTel(String tel) {
this.tel = tel;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getCantonlev() {
return cantonlev;
}
public void setCantonlev(String cantonlev) {
this.cantonlev = cantonlev;
}
public String getTaxorgcode() {
return taxorgcode;
}
public void setTaxorgcode(String taxorgcode) {
this.taxorgcode = taxorgcode;
}
public String getMemo() {
return memo;
}
public void setMemo(String memo) {
this.memo = memo;
}
public String getUsing() {
return using;
}
public void setUsing(String using) {
this.using = using;
}
public String getUsingdate() {
return usingdate;
}
public void setUsingdate(String usingdate) {
this.usingdate = usingdate;
}
public Integer getLevel() {
return level;
}
public void setLevel(Integer level) {
this.level = level;
}
public String getEnd() {
return end;
}
public void setEnd(String end) {
this.end = end;
}
public String getQrcantonid() {
return qrcantonid;
}
public void setQrcantonid(String qrcantonid) {
this.qrcantonid = qrcantonid;
}
public String getDeclare() {
return declare;
}
public void setDeclare(String declare) {
this.declare = declare;
}
public String getDeclareisend() {
return declareisend;
}
public void setDeclareisend(String declareisend) {
this.declareisend = declareisend;
}
}

View File

@ -0,0 +1,178 @@
package com.enzhico.trans.modules.canton;
import com.alibaba.druid.pool.DruidDataSource;
import com.enzhico.trans.modules.MyBeanValidator;
import com.enzhico.trans.modules.MyJobListener;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.ParseException;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.item.validator.ValidatingItemProcessor;
import org.springframework.batch.item.validator.ValidationException;
import org.springframework.batch.item.validator.Validator;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.FileSystemResource;
/**
* CsvBatchConfig
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/3
*/
@Configuration
public class CantonConfig {
/**
* ItemReader定义,用来读取数据
* 1使用FlatFileItemReader读取文件
* 2使用FlatFileItemReader的setResource方法设置csv文件的路径
* 3对此对cvs文件的数据和领域模型类做对应映射
*
* @return FlatFileItemReader
*/
@Bean(name = "cantonReader")
@StepScope
public FlatFileItemReader<Canton> reader(@Value("#{jobParameters['input.file.name']}") String pathToFile) {
FlatFileItemReader<Canton> reader = new FlatFileItemReader<>();
// reader.setResource(new ClassPathResource(pathToFile));
reader.setResource(new FileSystemResource(pathToFile));
reader.setLineMapper(new DefaultLineMapper<Canton>() {
{
setLineTokenizer(new DelimitedLineTokenizer(",") {
{
setNames(new String[]{
"id", "code", "name", "parentid", "financial", "contactman", "tel", "email",
"cantonlev", "taxorgcode", "memo", "using", "usingdate", "level", "end",
"qrcantonid", "declare", "declareisend"
});
}
});
setFieldSetMapper(new BeanWrapperFieldSetMapper<Canton>() {{
setTargetType(Canton.class);
}});
}
});
// 如果包含header需要忽略掉
reader.setLinesToSkip(1);
return reader;
}
/**
* ItemProcessor定义用来处理数据
*
* @return
*/
@Bean(name = "cantonProcessor")
public ItemProcessor<Canton, Canton> processor() {
//使用我们自定义的ItemProcessor的实现CsvItemProcessor
ValidatingItemProcessor<Canton> processor = new ValidatingItemProcessor<Canton>() {
@Override
public Canton process(Canton item) throws ValidationException {
/*
* 需要执行super.process(item)才会调用自定义校验器
*/
super.process(item);
/*
* 对数据进行简单的处理和转换 todo
*/
return item;
}
};
//为processor指定校验器为CsvBeanValidator()
processor.setValidator(csvBeanValidator());
return processor;
}
/**
* ItemWriter定义用来输出数据
* spring能让容器中已有的Bean以参数的形式注入Spring Boot已经为我们定义了dataSource
*
* @param dataSource
* @return
*/
@Bean(name = "cantonWriter")
public ItemWriter<Canton> writer(DruidDataSource dataSource) {
JdbcBatchItemWriter<Canton> writer = new JdbcBatchItemWriter<>();
//我们使用JDBC批处理的JdbcBatchItemWriter来写数据到数据库
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>());
String sql = "insert into nt_bsc_Canton " + " (f_id,f_code,f_name,f_parentid,f_financial,f_contactman,f_tel,f_email,f_cantonlev,f_taxorgcode,f_memo,f_using,f_usingdate,f_level,f_end,f_qrcantonid,f_declare,f_declareisend) "
+ " values(:id,:code,:name,:parentid,:financial,:contactman,:tel,:email,:cantonlev,:taxorgcode,:memo,:using,:usingdate,:level,:end,:qrcantonid,:declare,:declareisend)";
//在此设置要执行批处理的SQL语句
writer.setSql(sql);
writer.setDataSource(dataSource);
return writer;
}
/**
* Job定义我们要实际执行的任务包含一个或多个Step
*
* @param jobBuilderFactory
* @param s1
* @return
*/
@Bean(name = "cantonJob")
public Job cantonJob(JobBuilderFactory jobBuilderFactory, @Qualifier("cantonStep1") Step s1) {
return jobBuilderFactory.get("cantonJob")
.incrementer(new RunIdIncrementer())
.flow(s1)//为Job指定Step
.end()
.listener(new MyJobListener())//绑定监听器csvJobListener
.build();
}
/**
* step步骤包含ItemReaderItemProcessor和ItemWriter
*
* @param stepBuilderFactory
* @param reader
* @param writer
* @param processor
* @return
*/
@Bean(name = "cantonStep1")
public Step cantonStep1(StepBuilderFactory stepBuilderFactory,
@Qualifier("cantonReader") ItemReader<Canton> reader,
@Qualifier("cantonWriter") ItemWriter<Canton> writer,
@Qualifier("cantonProcessor") ItemProcessor<Canton, Canton> processor) {
return stepBuilderFactory
.get("cantonStep1")
.<Canton, Canton>chunk(5000)//批处理每次提交5000条数据
.reader(reader)//给step绑定reader
.processor(processor)//给step绑定processor
.writer(writer)//给step绑定writer
.faultTolerant()
.retry(Exception.class) // 重试
.noRetry(ParseException.class)
.retryLimit(1) //每条记录重试一次
.skip(Exception.class)
.skipLimit(200) //一共允许跳过200次异常
// .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行一般来讲一个Job就让它串行完成的好
// .throttleLimit(10) //并发任务数为 10,默认为4
.build();
}
@Bean
public Validator<Canton> csvBeanValidator() {
return new MyBeanValidator<>();
}
}

View File

@ -0,0 +1,191 @@
package com.enzhico.trans.modules.common;
import com.alibaba.druid.pool.DruidDataSource;
import com.enzhico.trans.config.properties.CommonProperties;
import com.enzhico.trans.modules.MyBeanValidator;
import com.enzhico.trans.modules.MyBeanWrapperFieldSetMapper;
import com.enzhico.trans.modules.MyJobListener;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.ParseException;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.item.validator.ValidatingItemProcessor;
import org.springframework.batch.item.validator.ValidationException;
import org.springframework.batch.item.validator.Validator;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.FileSystemResource;
import javax.annotation.Resource;
/**
* 泛型配置
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/3
*/
@Configuration
public class CommonConfig {
@Resource
private CommonProperties p;
/**
* ItemReader定义,用来读取数据
* 1使用FlatFileItemReader读取文件
* 2使用FlatFileItemReader的setResource方法设置csv文件的路径
* 3对此对cvs文件的数据和领域模型类做对应映射
*
* @return FlatFileItemReader
*/
@Bean(name = "commonReader")
@StepScope
public FlatFileItemReader reader(@Value("#{jobParameters['input.file.name']}") String pathToFile,
@Value("#{jobParameters['input.vo.name']}") String voClass,
@Value("#{jobParameters['input.columns']}") String columns) {
FlatFileItemReader reader = new FlatFileItemReader<>();
if (p.getLocation() == 1) {
reader.setResource(new FileSystemResource(pathToFile));
} else {
reader.setResource(new ClassPathResource(pathToFile));
}
reader.setLineMapper(new DefaultLineMapper() {
{
setLineTokenizer(new DelimitedLineTokenizer("|") {
{
setNames(columns.split(","));
setQuoteCharacter('^');
}
});
setFieldSetMapper(new MyBeanWrapperFieldSetMapper() {{
try {
setTargetType(Class.forName(voClass));
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}});
}
});
// 如果包含header需要忽略掉
reader.setLinesToSkip(1);
return reader;
}
/**
* ItemProcessor定义用来处理数据
*
* @return
*/
@Bean(name = "commonProcessor")
public ItemProcessor processor() {
//使用我们自定义的ItemProcessor的实现CsvItemProcessor
ValidatingItemProcessor processor = new ValidatingItemProcessor() {
public Object process(Object item) throws ValidationException {
/*
* 需要执行super.process(item)才会调用自定义校验器
*/
super.process(item);
/*
* 对数据进行简单的处理和转换 todo
*/
return item;
}
};
//为processor指定校验器为CsvBeanValidator()
processor.setValidator(csvBeanValidator());
return processor;
}
/**
* ItemWriter定义用来输出数据
* spring能让容器中已有的Bean以参数的形式注入Spring Boot已经为我们定义了dataSource
*
* @param dataSource
* @return
*/
@Bean(name = "commonWriter")
@StepScope
public ItemWriter writer(DruidDataSource dataSource,
@Value("#{jobParameters['input.sql']}") String sql) {
JdbcBatchItemWriter writer = new JdbcBatchItemWriter<>();
//我们使用JDBC批处理的JdbcBatchItemWriter来写数据到数据库
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>());
//在此设置要执行批处理的SQL语句
writer.setSql(sql);
writer.setDataSource(dataSource);
return writer;
}
/**
* Job定义我们要实际执行的任务包含一个或多个Step
*
* @param jobBuilderFactory
* @param s1
* @return
*/
@Bean(name = "commonJob")
public Job commonJob(JobBuilderFactory jobBuilderFactory,
@Qualifier("commonStep1") Step s1) {
return jobBuilderFactory.get("commonJob")
.incrementer(new RunIdIncrementer())
.flow(s1)//为Job指定Step
.end()
.listener(new MyJobListener())//绑定监听器csvJobListener
.build();
}
/**
* step步骤包含ItemReaderItemProcessor和ItemWriter
*
* @param stepBuilderFactory
* @param reader
* @param writer
* @param processor
* @return
*/
@Bean(name = "commonStep1")
public Step commonStep1(StepBuilderFactory stepBuilderFactory,
@Qualifier("commonReader") ItemReader reader,
@Qualifier("commonWriter") ItemWriter writer,
@Qualifier("commonProcessor") ItemProcessor processor) {
return stepBuilderFactory
.get("commonStep1")
.chunk(5000)//批处理每次提交5000条数据
.reader(reader)//给step绑定reader
.processor(processor)//给step绑定processor
.writer(writer)//给step绑定writer
// .faultTolerant()
// .retry(Exception.class) // 重试
// .noRetry(ParseException.class)
// .retryLimit(1) //每条记录重试一次
// .skip(Exception.class)
// .skipLimit(100) //一共允许跳过100次异常
// .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行一般来讲一个Job就让它串行完成的好
// .throttleLimit(10) //并发任务数为 10,默认为4
.build();
}
@Bean
public Validator csvBeanValidator() {
return new MyBeanValidator<>();
}
}

View File

@ -0,0 +1,49 @@
package com.enzhico.trans.modules.common;
import java.sql.Timestamp;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
/**
* DateUtil
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/5
*/
public class DateUtil {
private static final SimpleDateFormat sdf = new SimpleDateFormat("dd-M月-y hh.mm.ss.S a", Locale.CHINA);
private static final SimpleDateFormat sdf2 = new SimpleDateFormat("dd-M月 -y hh.mm.ss.S a", Locale.CHINA);
public static synchronized Date parseDatetime(String dateStr) {
try {
return sdf.parse(dateStr);
} catch (ParseException e) {
return new Date();
}
}
public static synchronized Timestamp parseTimestamp(String dateStr) {
try {
return new Timestamp(sdf.parse(dateStr).getTime());
} catch (ParseException e) {
try {
return new Timestamp(sdf2.parse(dateStr).getTime());
} catch (ParseException ee) {
return new Timestamp(System.currentTimeMillis());
}
}
}
public static synchronized String formatTimestamp(Timestamp date) {
return sdf.format(date);
}
public static void main(String[] args) {
Timestamp t = parseTimestamp("08-12月-17 05.38.07.859000 下午");
System.out.println(t);
System.out.println(formatTimestamp(t));
}
}

View File

@ -0,0 +1,20 @@
package com.enzhico.trans.modules.common.anno;
import java.lang.annotation.*;
/**
* 表名注解
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/6
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Documented
public @interface TableName {
/**
* 表名
*/
String value();
}

View File

@ -0,0 +1,176 @@
package com.enzhico.trans.modules.common.vo;
import com.enzhico.trans.modules.common.anno.TableName;
/**
* BscCanton
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/10
*/
@TableName("NT_BSC_CANTON")
public class BscCanton {
private String F_ID;
private String F_CODE;
private String F_NAME;
private String F_PARENTID;
private String F_FINANCIAL;
private String F_CONTACTMAN;
private String F_TEL;
private String F_EMAIL;
private String F_CANTONLEV;
private String F_TAXORGCODE;
private String F_MEMO;
private String F_USING;
private String F_USINGDATE;
private Integer F_LEVEL;
private String F_END;
private String F_QRCANTONID;
private String F_DECLARE;
private String F_DECLAREISEND;
public String getF_ID() {
return F_ID;
}
public void setF_ID(String f_ID) {
F_ID = f_ID;
}
public String getF_CODE() {
return F_CODE;
}
public void setF_CODE(String f_CODE) {
F_CODE = f_CODE;
}
public String getF_NAME() {
return F_NAME;
}
public void setF_NAME(String f_NAME) {
F_NAME = f_NAME;
}
public String getF_PARENTID() {
return F_PARENTID;
}
public void setF_PARENTID(String f_PARENTID) {
F_PARENTID = f_PARENTID;
}
public String getF_FINANCIAL() {
return F_FINANCIAL;
}
public void setF_FINANCIAL(String f_FINANCIAL) {
F_FINANCIAL = f_FINANCIAL;
}
public String getF_CONTACTMAN() {
return F_CONTACTMAN;
}
public void setF_CONTACTMAN(String f_CONTACTMAN) {
F_CONTACTMAN = f_CONTACTMAN;
}
public String getF_TEL() {
return F_TEL;
}
public void setF_TEL(String f_TEL) {
F_TEL = f_TEL;
}
public String getF_EMAIL() {
return F_EMAIL;
}
public void setF_EMAIL(String f_EMAIL) {
F_EMAIL = f_EMAIL;
}
public String getF_CANTONLEV() {
return F_CANTONLEV;
}
public void setF_CANTONLEV(String f_CANTONLEV) {
F_CANTONLEV = f_CANTONLEV;
}
public String getF_TAXORGCODE() {
return F_TAXORGCODE;
}
public void setF_TAXORGCODE(String f_TAXORGCODE) {
F_TAXORGCODE = f_TAXORGCODE;
}
public String getF_MEMO() {
return F_MEMO;
}
public void setF_MEMO(String f_MEMO) {
F_MEMO = f_MEMO;
}
public String getF_USING() {
return F_USING;
}
public void setF_USING(String f_USING) {
F_USING = f_USING;
}
public String getF_USINGDATE() {
return F_USINGDATE;
}
public void setF_USINGDATE(String f_USINGDATE) {
F_USINGDATE = f_USINGDATE;
}
public Integer getF_LEVEL() {
return F_LEVEL;
}
public void setF_LEVEL(Integer f_LEVEL) {
F_LEVEL = f_LEVEL;
}
public String getF_END() {
return F_END;
}
public void setF_END(String f_END) {
F_END = f_END;
}
public String getF_QRCANTONID() {
return F_QRCANTONID;
}
public void setF_QRCANTONID(String f_QRCANTONID) {
F_QRCANTONID = f_QRCANTONID;
}
public String getF_DECLARE() {
return F_DECLARE;
}
public void setF_DECLARE(String f_DECLARE) {
F_DECLARE = f_DECLARE;
}
public String getF_DECLAREISEND() {
return F_DECLAREISEND;
}
public void setF_DECLAREISEND(String f_DECLAREISEND) {
F_DECLAREISEND = f_DECLAREISEND;
}
}

View File

@ -0,0 +1,242 @@
package com.enzhico.trans.modules.common.vo;
import com.enzhico.trans.modules.common.anno.TableName;
import org.springframework.format.annotation.DateTimeFormat;
import java.sql.Timestamp;
/**
* BscExeOffice
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/5
*/
@TableName("NT_BSC_EXEOFFICE")
public class BscExeOffice {
private String F_ID;
private String F_CANTONID;
private String F_CODE;
private String F_NAME;
private String F_MEMCODE;
private String F_SUPDEPTID;
private String F_COMDEPTID;
private String F_CONTACTMAN;
private String F_TEL;
private String F_MOBIL;
private String F_EMAIL;
private String F_BGOFFICEID;
private String F_INFOMOBIL;
private String F_INFOMAN;
private String F_LOGPASS;
private String F_STARTDATE;
private String F_STOPDATE;
private String F_STATUS;
private String F_MEMO;
private String F_AUDITER;
private String F_AUDITTIME;
private String F_ISAUDIT;
private Timestamp F_EDITTIME;
private Integer F_PLATFORM_ID;
private String F_ISPRINTBILL;
public String getF_ID() {
return F_ID;
}
public void setF_ID(String f_ID) {
F_ID = f_ID;
}
public String getF_CANTONID() {
return F_CANTONID;
}
public void setF_CANTONID(String f_CANTONID) {
F_CANTONID = f_CANTONID;
}
public String getF_CODE() {
return F_CODE;
}
public void setF_CODE(String f_CODE) {
F_CODE = f_CODE;
}
public String getF_NAME() {
return F_NAME;
}
public void setF_NAME(String f_NAME) {
F_NAME = f_NAME;
}
public String getF_MEMCODE() {
return F_MEMCODE;
}
public void setF_MEMCODE(String f_MEMCODE) {
F_MEMCODE = f_MEMCODE;
}
public String getF_SUPDEPTID() {
return F_SUPDEPTID;
}
public void setF_SUPDEPTID(String f_SUPDEPTID) {
F_SUPDEPTID = f_SUPDEPTID;
}
public String getF_COMDEPTID() {
return F_COMDEPTID;
}
public void setF_COMDEPTID(String f_COMDEPTID) {
F_COMDEPTID = f_COMDEPTID;
}
public String getF_CONTACTMAN() {
return F_CONTACTMAN;
}
public void setF_CONTACTMAN(String f_CONTACTMAN) {
F_CONTACTMAN = f_CONTACTMAN;
}
public String getF_TEL() {
return F_TEL;
}
public void setF_TEL(String f_TEL) {
F_TEL = f_TEL;
}
public String getF_MOBIL() {
return F_MOBIL;
}
public void setF_MOBIL(String f_MOBIL) {
F_MOBIL = f_MOBIL;
}
public String getF_EMAIL() {
return F_EMAIL;
}
public void setF_EMAIL(String f_EMAIL) {
F_EMAIL = f_EMAIL;
}
public String getF_BGOFFICEID() {
return F_BGOFFICEID;
}
public void setF_BGOFFICEID(String f_BGOFFICEID) {
F_BGOFFICEID = f_BGOFFICEID;
}
public String getF_INFOMOBIL() {
return F_INFOMOBIL;
}
public void setF_INFOMOBIL(String f_INFOMOBIL) {
F_INFOMOBIL = f_INFOMOBIL;
}
public String getF_INFOMAN() {
return F_INFOMAN;
}
public void setF_INFOMAN(String f_INFOMAN) {
F_INFOMAN = f_INFOMAN;
}
public String getF_LOGPASS() {
return F_LOGPASS;
}
public void setF_LOGPASS(String f_LOGPASS) {
F_LOGPASS = f_LOGPASS;
}
public String getF_STARTDATE() {
return F_STARTDATE;
}
public void setF_STARTDATE(String f_STARTDATE) {
F_STARTDATE = f_STARTDATE;
}
public String getF_STOPDATE() {
return F_STOPDATE;
}
public void setF_STOPDATE(String f_STOPDATE) {
F_STOPDATE = f_STOPDATE;
}
public String getF_STATUS() {
return F_STATUS;
}
public void setF_STATUS(String f_STATUS) {
F_STATUS = f_STATUS;
}
public String getF_MEMO() {
return F_MEMO;
}
public void setF_MEMO(String f_MEMO) {
F_MEMO = f_MEMO;
}
public String getF_AUDITER() {
return F_AUDITER;
}
public void setF_AUDITER(String f_AUDITER) {
F_AUDITER = f_AUDITER;
}
public String getF_AUDITTIME() {
return F_AUDITTIME;
}
public void setF_AUDITTIME(String f_AUDITTIME) {
F_AUDITTIME = f_AUDITTIME;
}
public String getF_ISAUDIT() {
return F_ISAUDIT;
}
public void setF_ISAUDIT(String f_ISAUDIT) {
F_ISAUDIT = f_ISAUDIT;
}
public Timestamp getF_EDITTIME() {
return F_EDITTIME;
}
public void setF_EDITTIME(Timestamp f_EDITTIME) {
F_EDITTIME = f_EDITTIME;
}
public Integer getF_PLATFORM_ID() {
return F_PLATFORM_ID;
}
public void setF_PLATFORM_ID(Integer f_PLATFORM_ID) {
F_PLATFORM_ID = f_PLATFORM_ID;
}
public String getF_ISPRINTBILL() {
return F_ISPRINTBILL;
}
public void setF_ISPRINTBILL(String f_ISPRINTBILL) {
F_ISPRINTBILL = f_ISPRINTBILL;
}
}

View File

@ -0,0 +1,95 @@
package com.enzhico.trans.modules.common.vo;
import com.enzhico.trans.modules.common.anno.TableName;
/**
* BscOfficeExeItem
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/6
*/
@TableName("NT_BSC_OFFICEEXEITEM")
public class BscOfficeExeItem {
private String F_ID;
private String F_CANTONID;
private String F_OFFICEID;
private String F_TOLLID;
private String F_TOLLCODE;
private String F_START;
private String F_END;
private String F_STATUS;
private String F_VERSION;
public String getF_ID() {
return F_ID;
}
public void setF_ID(String f_ID) {
F_ID = f_ID;
}
public String getF_CANTONID() {
return F_CANTONID;
}
public void setF_CANTONID(String f_CANTONID) {
F_CANTONID = f_CANTONID;
}
public String getF_OFFICEID() {
return F_OFFICEID;
}
public void setF_OFFICEID(String f_OFFICEID) {
F_OFFICEID = f_OFFICEID;
}
public String getF_TOLLID() {
return F_TOLLID;
}
public void setF_TOLLID(String f_TOLLID) {
F_TOLLID = f_TOLLID;
}
public String getF_TOLLCODE() {
return F_TOLLCODE;
}
public void setF_TOLLCODE(String f_TOLLCODE) {
F_TOLLCODE = f_TOLLCODE;
}
public String getF_START() {
return F_START;
}
public void setF_START(String f_START) {
F_START = f_START;
}
public String getF_END() {
return F_END;
}
public void setF_END(String f_END) {
F_END = f_END;
}
public String getF_STATUS() {
return F_STATUS;
}
public void setF_STATUS(String f_STATUS) {
F_STATUS = f_STATUS;
}
public String getF_VERSION() {
return F_VERSION;
}
public void setF_VERSION(String f_VERSION) {
F_VERSION = f_VERSION;
}
}

View File

@ -0,0 +1,305 @@
package com.enzhico.trans.modules.common.vo;
import com.enzhico.trans.modules.common.anno.TableName;
import java.math.BigDecimal;
import java.sql.Timestamp;
/**
* BscTollItem
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/6
*/
@TableName("NT_BSC_TOLLITEM")
public class BscTollItem {
private String F_ID;
private String F_CANTONID;
private String F_CODE;
private String F_NAME;
private String F_MEMCODE;
private String F_UNICODE;
private String F_UNIT;
private String F_ISRNGSTD;
private String F_APRLEVEL;
private String F_ISSHARE;
private String F_SHAREMODE;
private String F_ENDCANTONID;
private String F_FUNDSORT;
private String F_TOLLSORT;
private String F_STARTDATE;
private String F_ENDDATE;
private String F_TAX;
private String F_ISPUB;
private String F_ALLOWADD;
private String F_MEMO;
private String F_LEVEL;
private String F_END;
private String F_SHAREDIREC;
private String F_HISTORYID;
private String F_STATUS;
private String F_VERSION;
private String F_ISSTD;
private Timestamp F_EDITTIME;
private String F_PLATFORM_ID;
private String F_ISENTERPRISES;
private String F_ISCLEAR;
private String F_PARENTID;
public String getF_ID() {
return F_ID;
}
public void setF_ID(String f_ID) {
F_ID = f_ID;
}
public String getF_CANTONID() {
return F_CANTONID;
}
public void setF_CANTONID(String f_CANTONID) {
F_CANTONID = f_CANTONID;
}
public String getF_CODE() {
return F_CODE;
}
public void setF_CODE(String f_CODE) {
F_CODE = f_CODE;
}
public String getF_NAME() {
return F_NAME;
}
public void setF_NAME(String f_NAME) {
F_NAME = f_NAME;
}
public String getF_MEMCODE() {
return F_MEMCODE;
}
public void setF_MEMCODE(String f_MEMCODE) {
F_MEMCODE = f_MEMCODE;
}
public String getF_UNICODE() {
return F_UNICODE;
}
public void setF_UNICODE(String f_UNICODE) {
F_UNICODE = f_UNICODE;
}
public String getF_UNIT() {
return F_UNIT;
}
public void setF_UNIT(String f_UNIT) {
F_UNIT = f_UNIT;
}
public String getF_ISRNGSTD() {
return F_ISRNGSTD;
}
public void setF_ISRNGSTD(String f_ISRNGSTD) {
F_ISRNGSTD = f_ISRNGSTD;
}
public String getF_APRLEVEL() {
return F_APRLEVEL;
}
public void setF_APRLEVEL(String f_APRLEVEL) {
F_APRLEVEL = f_APRLEVEL;
}
public String getF_ISSHARE() {
return F_ISSHARE;
}
public void setF_ISSHARE(String f_ISSHARE) {
F_ISSHARE = f_ISSHARE;
}
public String getF_SHAREMODE() {
return F_SHAREMODE;
}
public void setF_SHAREMODE(String f_SHAREMODE) {
F_SHAREMODE = f_SHAREMODE;
}
public String getF_ENDCANTONID() {
return F_ENDCANTONID;
}
public void setF_ENDCANTONID(String f_ENDCANTONID) {
F_ENDCANTONID = f_ENDCANTONID;
}
public String getF_FUNDSORT() {
return F_FUNDSORT;
}
public void setF_FUNDSORT(String f_FUNDSORT) {
F_FUNDSORT = f_FUNDSORT;
}
public String getF_TOLLSORT() {
return F_TOLLSORT;
}
public void setF_TOLLSORT(String f_TOLLSORT) {
F_TOLLSORT = f_TOLLSORT;
}
public String getF_STARTDATE() {
return F_STARTDATE;
}
public void setF_STARTDATE(String f_STARTDATE) {
F_STARTDATE = f_STARTDATE;
}
public String getF_ENDDATE() {
return F_ENDDATE;
}
public void setF_ENDDATE(String f_ENDDATE) {
F_ENDDATE = f_ENDDATE;
}
public String getF_TAX() {
return F_TAX;
}
public void setF_TAX(String f_TAX) {
F_TAX = f_TAX;
}
public String getF_ISPUB() {
return F_ISPUB;
}
public void setF_ISPUB(String f_ISPUB) {
F_ISPUB = f_ISPUB;
}
public String getF_ALLOWADD() {
return F_ALLOWADD;
}
public void setF_ALLOWADD(String f_ALLOWADD) {
F_ALLOWADD = f_ALLOWADD;
}
public String getF_MEMO() {
return F_MEMO;
}
public void setF_MEMO(String f_MEMO) {
F_MEMO = f_MEMO;
}
public String getF_LEVEL() {
return F_LEVEL;
}
public void setF_LEVEL(String f_LEVEL) {
F_LEVEL = f_LEVEL;
}
public String getF_END() {
return F_END;
}
public void setF_END(String f_END) {
F_END = f_END;
}
public String getF_SHAREDIREC() {
return F_SHAREDIREC;
}
public void setF_SHAREDIREC(String f_SHAREDIREC) {
F_SHAREDIREC = f_SHAREDIREC;
}
public String getF_HISTORYID() {
return F_HISTORYID;
}
public void setF_HISTORYID(String f_HISTORYID) {
F_HISTORYID = f_HISTORYID;
}
public String getF_STATUS() {
return F_STATUS;
}
public void setF_STATUS(String f_STATUS) {
F_STATUS = f_STATUS;
}
public String getF_VERSION() {
return F_VERSION;
}
public void setF_VERSION(String f_VERSION) {
F_VERSION = f_VERSION;
}
public String getF_ISSTD() {
return F_ISSTD;
}
public void setF_ISSTD(String f_ISSTD) {
F_ISSTD = f_ISSTD;
}
public Timestamp getF_EDITTIME() {
return F_EDITTIME;
}
public void setF_EDITTIME(Timestamp f_EDITTIME) {
F_EDITTIME = f_EDITTIME;
}
public String getF_PLATFORM_ID() {
return F_PLATFORM_ID;
}
public void setF_PLATFORM_ID(String f_PLATFORM_ID) {
F_PLATFORM_ID = f_PLATFORM_ID;
}
public String getF_ISENTERPRISES() {
return F_ISENTERPRISES;
}
public void setF_ISENTERPRISES(String f_ISENTERPRISES) {
F_ISENTERPRISES = f_ISENTERPRISES;
}
public String getF_ISCLEAR() {
return F_ISCLEAR;
}
public void setF_ISCLEAR(String f_ISCLEAR) {
F_ISCLEAR = f_ISCLEAR;
}
public String getF_PARENTID() {
return F_PARENTID;
}
public void setF_PARENTID(String f_PARENTID) {
F_PARENTID = f_PARENTID;
}
}

View File

@ -0,0 +1,205 @@
package com.enzhico.trans.modules.common.vo;
import com.enzhico.trans.modules.common.anno.TableName;
import java.math.BigDecimal;
/**
* BscTollSpecialShare
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/6
*/
@TableName("NT_BSC_TOLLSPECIALSHARE")
public class BscTollSpecialShare {
private String F_ID;
private String F_SUBID;
private String F_CANTONID;
private String F_OFFICEID;
private String F_TOLLID;
private Integer F_ORDER;
private BigDecimal F_SHARE;
private String F_ISRATION;
private String F_INCANTONID;
private String F_INOFFICEID;
private Integer F_INACCOUNTTYPE;
private String F_INACCOUNTID;
private String F_STARTDATE;
private String F_ENDDATE;
private String F_MEMO;
private String F_STATUS;
private String F_ISAUDIT;
private String F_AUDITER;
private String F_AUDITTIME;
private String F_VERSION;
private String F_BUDGETCODE;
public String getF_ID() {
return F_ID;
}
public void setF_ID(String f_ID) {
F_ID = f_ID;
}
public String getF_SUBID() {
return F_SUBID;
}
public void setF_SUBID(String f_SUBID) {
F_SUBID = f_SUBID;
}
public String getF_CANTONID() {
return F_CANTONID;
}
public void setF_CANTONID(String f_CANTONID) {
F_CANTONID = f_CANTONID;
}
public String getF_OFFICEID() {
return F_OFFICEID;
}
public void setF_OFFICEID(String f_OFFICEID) {
F_OFFICEID = f_OFFICEID;
}
public String getF_TOLLID() {
return F_TOLLID;
}
public void setF_TOLLID(String f_TOLLID) {
F_TOLLID = f_TOLLID;
}
public Integer getF_ORDER() {
return F_ORDER;
}
public void setF_ORDER(Integer f_ORDER) {
F_ORDER = f_ORDER;
}
public BigDecimal getF_SHARE() {
return F_SHARE;
}
public void setF_SHARE(BigDecimal f_SHARE) {
F_SHARE = f_SHARE;
}
public String getF_ISRATION() {
return F_ISRATION;
}
public void setF_ISRATION(String f_ISRATION) {
F_ISRATION = f_ISRATION;
}
public String getF_INCANTONID() {
return F_INCANTONID;
}
public void setF_INCANTONID(String f_INCANTONID) {
F_INCANTONID = f_INCANTONID;
}
public String getF_INOFFICEID() {
return F_INOFFICEID;
}
public void setF_INOFFICEID(String f_INOFFICEID) {
F_INOFFICEID = f_INOFFICEID;
}
public Integer getF_INACCOUNTTYPE() {
return F_INACCOUNTTYPE;
}
public void setF_INACCOUNTTYPE(Integer f_INACCOUNTTYPE) {
F_INACCOUNTTYPE = f_INACCOUNTTYPE;
}
public String getF_INACCOUNTID() {
return F_INACCOUNTID;
}
public void setF_INACCOUNTID(String f_INACCOUNTID) {
F_INACCOUNTID = f_INACCOUNTID;
}
public String getF_STARTDATE() {
return F_STARTDATE;
}
public void setF_STARTDATE(String f_STARTDATE) {
F_STARTDATE = f_STARTDATE;
}
public String getF_ENDDATE() {
return F_ENDDATE;
}
public void setF_ENDDATE(String f_ENDDATE) {
F_ENDDATE = f_ENDDATE;
}
public String getF_MEMO() {
return F_MEMO;
}
public void setF_MEMO(String f_MEMO) {
F_MEMO = f_MEMO;
}
public String getF_STATUS() {
return F_STATUS;
}
public void setF_STATUS(String f_STATUS) {
F_STATUS = f_STATUS;
}
public String getF_ISAUDIT() {
return F_ISAUDIT;
}
public void setF_ISAUDIT(String f_ISAUDIT) {
F_ISAUDIT = f_ISAUDIT;
}
public String getF_AUDITER() {
return F_AUDITER;
}
public void setF_AUDITER(String f_AUDITER) {
F_AUDITER = f_AUDITER;
}
public String getF_AUDITTIME() {
return F_AUDITTIME;
}
public void setF_AUDITTIME(String f_AUDITTIME) {
F_AUDITTIME = f_AUDITTIME;
}
public String getF_VERSION() {
return F_VERSION;
}
public void setF_VERSION(String f_VERSION) {
F_VERSION = f_VERSION;
}
public String getF_BUDGETCODE() {
return F_BUDGETCODE;
}
public void setF_BUDGETCODE(String f_BUDGETCODE) {
F_BUDGETCODE = f_BUDGETCODE;
}
}

View File

@ -0,0 +1,152 @@
package com.enzhico.trans.modules.vtoll;
import javax.validation.constraints.Size;
/**
* BudgetVtoll
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/3
*/
public class BudgetVtoll {
private String id;
private String year;
private String tollid;
private String budgetid;
private String cbudgetid;
private String version;
/**
* 使用JSR-303注解来校验数据
*/
@Size(max = 100)
private String auditmsg;
private String trialstatus;
private String firauditer;
private String firaudittime;
private String finauditer;
private String finaudittime;
private String edittime;
private String startdate;
private String enddate;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getYear() {
return year;
}
public void setYear(String year) {
this.year = year;
}
public String getTollid() {
return tollid;
}
public void setTollid(String tollid) {
this.tollid = tollid;
}
public String getBudgetid() {
return budgetid;
}
public void setBudgetid(String budgetid) {
this.budgetid = budgetid;
}
public String getCbudgetid() {
return cbudgetid;
}
public void setCbudgetid(String cbudgetid) {
this.cbudgetid = cbudgetid;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getAuditmsg() {
return auditmsg;
}
public void setAuditmsg(String auditmsg) {
this.auditmsg = auditmsg;
}
public String getTrialstatus() {
return trialstatus;
}
public void setTrialstatus(String trialstatus) {
this.trialstatus = trialstatus;
}
public String getFirauditer() {
return firauditer;
}
public void setFirauditer(String firauditer) {
this.firauditer = firauditer;
}
public String getFiraudittime() {
return firaudittime;
}
public void setFiraudittime(String firaudittime) {
this.firaudittime = firaudittime;
}
public String getFinauditer() {
return finauditer;
}
public void setFinauditer(String finauditer) {
this.finauditer = finauditer;
}
public String getFinaudittime() {
return finaudittime;
}
public void setFinaudittime(String finaudittime) {
this.finaudittime = finaudittime;
}
public String getEdittime() {
return edittime;
}
public void setEdittime(String edittime) {
this.edittime = edittime;
}
public String getStartdate() {
return startdate;
}
public void setStartdate(String startdate) {
this.startdate = startdate;
}
public String getEnddate() {
return enddate;
}
public void setEnddate(String enddate) {
this.enddate = enddate;
}
}

View File

@ -0,0 +1,178 @@
package com.enzhico.trans.modules.vtoll;
import com.alibaba.druid.pool.DruidDataSource;
import com.enzhico.trans.modules.MyBeanValidator;
import com.enzhico.trans.modules.MyJobListener;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.ParseException;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.item.validator.ValidatingItemProcessor;
import org.springframework.batch.item.validator.ValidationException;
import org.springframework.batch.item.validator.Validator;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
/**
* CsvBatchConfig
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/3
*/
@Configuration
public class BudgetVtollConfig {
/**
* ItemReader定义,用来读取数据
* 1使用FlatFileItemReader读取文件
* 2使用FlatFileItemReader的setResource方法设置csv文件的路径
* 3对此对cvs文件的数据和领域模型类做对应映射
*
* @return FlatFileItemReader
*/
@Bean(name = "vtollReader")
@StepScope
public FlatFileItemReader<BudgetVtoll> reader(@Value("#{jobParameters['input.file.name']}") String pathToFile) {
FlatFileItemReader<BudgetVtoll> reader = new FlatFileItemReader<>();
// reader.setResource(new ClassPathResource(pathToFile));
reader.setResource(new FileSystemResource(pathToFile));
reader.setLineMapper(new DefaultLineMapper<BudgetVtoll>() {
{
setLineTokenizer(new DelimitedLineTokenizer(",") {
{
setNames(new String[]{
"id", "year", "tollid", "budgetid", "cbudgetid", "version", "auditmsg", "trialstatus",
"firauditer", "firaudittime", "finauditer", "finaudittime", "edittime", "startdate", "enddate"
});
}
});
setFieldSetMapper(new BeanWrapperFieldSetMapper<BudgetVtoll>() {{
setTargetType(BudgetVtoll.class);
}});
}
});
// 如果包含header需要忽略掉
reader.setLinesToSkip(1);
return reader;
}
/**
* ItemProcessor定义用来处理数据
*
* @return
*/
@Bean(name = "vtollProcessor")
public ItemProcessor<BudgetVtoll, BudgetVtoll> processor() {
//使用我们自定义的ItemProcessor的实现CsvItemProcessor
ValidatingItemProcessor<BudgetVtoll> processor = new ValidatingItemProcessor<BudgetVtoll>() {
@Override
public BudgetVtoll process(BudgetVtoll item) throws ValidationException {
/*
* 需要执行super.process(item)才会调用自定义校验器
*/
super.process(item);
/*
* 对数据进行简单的处理和转换 todo
*/
return item;
}
};
//为processor指定校验器为CsvBeanValidator()
processor.setValidator(csvBeanValidator());
return processor;
}
/**
* ItemWriter定义用来输出数据
* spring能让容器中已有的Bean以参数的形式注入Spring Boot已经为我们定义了dataSource
*
* @param dataSource
* @return
*/
@Bean(name = "vtollWriter")
public ItemWriter<BudgetVtoll> writer(DruidDataSource dataSource) {
JdbcBatchItemWriter<BudgetVtoll> writer = new JdbcBatchItemWriter<>();
//我们使用JDBC批处理的JdbcBatchItemWriter来写数据到数据库
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>());
String sql = "insert into nt_bsc_BudgetVtoll " + " (f_id,f_year,f_tollid,f_budgetid,f_cbudgetid,f_version,f_auditmsg,f_trialstatus,f_firauditer,f_firaudittime,f_finauditer,f_finaudittime,f_edittime,f_startdate,f_enddate) "
+ " values(:id,:year,:tollid,:budgetid,:cbudgetid,:version,:auditmsg,:trialstatus,:firauditer,:firaudittime,:finauditer,:finaudittime,:edittime,:startdate,:enddate)";
//在此设置要执行批处理的SQL语句
writer.setSql(sql);
writer.setDataSource(dataSource);
return writer;
}
/**
* Job定义我们要实际执行的任务包含一个或多个Step
*
* @param jobBuilderFactory
* @param s1
* @return
*/
@Bean(name = "vtollJob")
public Job vtollJob(JobBuilderFactory jobBuilderFactory, @Qualifier("vtollStep1") Step s1) {
return jobBuilderFactory.get("vtollJob")
.incrementer(new RunIdIncrementer())
.flow(s1)//为Job指定Step
.end()
.listener(new MyJobListener())//绑定监听器csvJobListener
.build();
}
/**
* step步骤包含ItemReaderItemProcessor和ItemWriter
*
* @param stepBuilderFactory
* @param reader
* @param writer
* @param processor
* @return
*/
@Bean(name = "vtollStep1")
public Step vtollStep1(StepBuilderFactory stepBuilderFactory,
@Qualifier("vtollReader") ItemReader<BudgetVtoll> reader,
@Qualifier("vtollWriter") ItemWriter<BudgetVtoll> writer,
@Qualifier("vtollProcessor") ItemProcessor<BudgetVtoll, BudgetVtoll> processor) {
return stepBuilderFactory
.get("vtollStep1")
.<BudgetVtoll, BudgetVtoll>chunk(5000)//批处理每次提交5000条数据
.reader(reader)//给step绑定reader
.processor(processor)//给step绑定processor
.writer(writer)//给step绑定writer
.faultTolerant()
.retry(Exception.class) // 重试
.noRetry(ParseException.class)
.retryLimit(1) //每条记录重试一次
.skip(Exception.class)
.skipLimit(200) //一共允许跳过200次异常
// .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行一般来讲一个Job就让它串行完成的好
// .throttleLimit(10) //并发任务数为 10,默认为4
.build();
}
@Bean
public Validator<BudgetVtoll> csvBeanValidator() {
return new MyBeanValidator<>();
}
}

View File

@ -0,0 +1,41 @@
package com.enzhico.trans.modules.zapp;
import com.enzhico.trans.modules.common.anno.TableName;
/**
* App
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/5
*/
@TableName("Z_TEST_APP")
public class App {
private int appid;
private String zname;
private String flag;
public int getAppid() {
return appid;
}
public void setAppid(int appid) {
this.appid = appid;
}
public String getZname() {
return zname;
}
public void setZname(String zname) {
this.zname = zname;
}
public String getFlag() {
return flag;
}
public void setFlag(String flag) {
this.flag = flag;
}
}

View File

@ -0,0 +1,175 @@
package com.enzhico.trans.modules.zapp;
import com.alibaba.druid.pool.DruidDataSource;
import com.enzhico.trans.modules.MyBeanValidator;
import com.enzhico.trans.modules.MyJobListener;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.ParseException;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.item.validator.ValidatingItemProcessor;
import org.springframework.batch.item.validator.ValidationException;
import org.springframework.batch.item.validator.Validator;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.FileSystemResource;
/**
* CsvBatchConfig
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/3
*/
@Configuration
public class AppConfig {
/**
* ItemReader定义,用来读取数据
* 1使用FlatFileItemReader读取文件
* 2使用FlatFileItemReader的setResource方法设置csv文件的路径
* 3对此对cvs文件的数据和领域模型类做对应映射
*
* @return FlatFileItemReader
*/
@Bean(name = "appReader")
@StepScope
public FlatFileItemReader<App> reader(@Value("#{jobParameters['input.file.name']}") String pathToFile) {
FlatFileItemReader<App> reader = new FlatFileItemReader<>();
// reader.setResource(new ClassPathResource(pathToFile));
reader.setResource(new FileSystemResource(pathToFile));
reader.setLineMapper(new DefaultLineMapper<App>() {
{
setLineTokenizer(new DelimitedLineTokenizer("|") {
{
setNames(new String[]{
"appid", "zname", "flag"
});
}
});
setFieldSetMapper(new BeanWrapperFieldSetMapper<App>() {{
setTargetType(App.class);
}});
}
});
// 如果包含header需要忽略掉
reader.setLinesToSkip(0);
return reader;
}
/**
* ItemProcessor定义用来处理数据
*
* @return
*/
@Bean(name = "appProcessor")
public ItemProcessor<App, App> processor() {
//使用我们自定义的ItemProcessor的实现CsvItemProcessor
ValidatingItemProcessor<App> processor = new ValidatingItemProcessor<App>() {
@Override
public App process(App item) throws ValidationException {
/*
* 需要执行super.process(item)才会调用自定义校验器
*/
super.process(item);
/*
* 对数据进行简单的处理和转换 todo
*/
return item;
}
};
//为processor指定校验器为CsvBeanValidator()
processor.setValidator(csvBeanValidator());
return processor;
}
/**
* ItemWriter定义用来输出数据
* spring能让容器中已有的Bean以参数的形式注入Spring Boot已经为我们定义了dataSource
*
* @param dataSource
* @return
*/
@Bean(name = "appWriter")
public ItemWriter<App> writer(DruidDataSource dataSource) {
JdbcBatchItemWriter<App> writer = new JdbcBatchItemWriter<>();
//我们使用JDBC批处理的JdbcBatchItemWriter来写数据到数据库
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>());
String sql = "insert into z_test_App (appid, zname, flag) values(:appid, :zname, :flag)";
//在此设置要执行批处理的SQL语句
writer.setSql(sql);
writer.setDataSource(dataSource);
return writer;
}
/**
* Job定义我们要实际执行的任务包含一个或多个Step
*
* @param jobBuilderFactory
* @param s1
* @return
*/
@Bean(name = "zappJob")
public Job zappJob(JobBuilderFactory jobBuilderFactory, @Qualifier("zappStep1") Step s1) {
return jobBuilderFactory.get("zappJob")
.incrementer(new RunIdIncrementer())
.flow(s1)//为Job指定Step
.end()
.listener(new MyJobListener())//绑定监听器csvJobListener
.build();
}
/**
* step步骤包含ItemReaderItemProcessor和ItemWriter
*
* @param stepBuilderFactory
* @param reader
* @param writer
* @param processor
* @return
*/
@Bean(name = "zappStep1")
public Step zappStep1(StepBuilderFactory stepBuilderFactory,
@Qualifier("appReader") ItemReader<App> reader,
@Qualifier("appWriter") ItemWriter<App> writer,
@Qualifier("appProcessor") ItemProcessor<App, App> processor) {
return stepBuilderFactory
.get("zappStep1")
.<App, App>chunk(5000)//批处理每次提交5000条数据
.reader(reader)//给step绑定reader
.processor(processor)//给step绑定processor
.writer(writer)//给step绑定writer
.faultTolerant()
.retry(Exception.class) // 重试
.noRetry(ParseException.class)
.retryLimit(1) //每条记录重试一次
.skip(Exception.class)
.skipLimit(200) //一共允许跳过200次异常
// .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行一般来讲一个Job就让它串行完成的好
// .throttleLimit(10) //并发任务数为 10,默认为4
.build();
}
@Bean
public Validator<App> csvBeanValidator() {
return new MyBeanValidator<>();
}
}

View File

@ -0,0 +1,38 @@
package com.enzhico.trans.modules.zlog;
/**
* Log
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/5
*/
public class Log {
private int logid;
private String msg;
private String logtime;
public int getLogid() {
return logid;
}
public void setLogid(int logid) {
this.logid = logid;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public String getLogtime() {
return logtime;
}
public void setLogtime(String logtime) {
this.logtime = logtime;
}
}

View File

@ -0,0 +1,174 @@
package com.enzhico.trans.modules.zlog;
import com.alibaba.druid.pool.DruidDataSource;
import com.enzhico.trans.modules.MyBeanValidator;
import com.enzhico.trans.modules.MyJobListener;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.ParseException;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.item.validator.ValidatingItemProcessor;
import org.springframework.batch.item.validator.ValidationException;
import org.springframework.batch.item.validator.Validator;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.FileSystemResource;
/**
* CsvBatchConfig
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/3
*/
@Configuration
public class LogConfig {
/**
* ItemReader定义,用来读取数据
* 1使用FlatFileItemReader读取文件
* 2使用FlatFileItemReader的setResource方法设置csv文件的路径
* 3对此对cvs文件的数据和领域模型类做对应映射
*
* @return FlatFileItemReader
*/
@Bean(name = "logReader")
@StepScope
public FlatFileItemReader<Log> reader(@Value("#{jobParameters['input.file.name']}") String pathToFile) {
FlatFileItemReader<Log> reader = new FlatFileItemReader<>();
// reader.setResource(new ClassPathResource(pathToFile));
reader.setResource(new FileSystemResource(pathToFile));
reader.setLineMapper(new DefaultLineMapper<Log>() {
{
setLineTokenizer(new DelimitedLineTokenizer("|") {
{
setNames(new String[]{
"logid", "msg", "logtime"
});
}
});
setFieldSetMapper(new BeanWrapperFieldSetMapper<Log>() {{
setTargetType(Log.class);
}});
}
});
// 如果包含header需要忽略掉
reader.setLinesToSkip(1);
return reader;
}
/**
* ItemProcessor定义用来处理数据
*
* @return
*/
@Bean(name = "logProcessor")
public ItemProcessor<Log, Log> processor() {
//使用我们自定义的ItemProcessor的实现CsvItemProcessor
ValidatingItemProcessor<Log> processor = new ValidatingItemProcessor<Log>() {
@Override
public Log process(Log item) throws ValidationException {
/*
* 需要执行super.process(item)才会调用自定义校验器
*/
super.process(item);
/*
* 对数据进行简单的处理和转换 todo
*/
return item;
}
};
//为processor指定校验器为CsvBeanValidator()
processor.setValidator(csvBeanValidator());
return processor;
}
/**
* ItemWriter定义用来输出数据
* spring能让容器中已有的Bean以参数的形式注入Spring Boot已经为我们定义了dataSource
*
* @param dataSource
* @return
*/
@Bean(name = "logWriter")
public ItemWriter<Log> writer(DruidDataSource dataSource) {
JdbcBatchItemWriter<Log> writer = new JdbcBatchItemWriter<>();
//我们使用JDBC批处理的JdbcBatchItemWriter来写数据到数据库
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>());
String sql = "insert into z_test_Log (logid, msg, logtime) values(:logid, :msg, :logtime)";
//在此设置要执行批处理的SQL语句
writer.setSql(sql);
writer.setDataSource(dataSource);
return writer;
}
/**
* Job定义我们要实际执行的任务包含一个或多个Step
*
* @param jobBuilderFactory
* @param s1
* @return
*/
@Bean(name = "zlogJob")
public Job zlogJob(JobBuilderFactory jobBuilderFactory, @Qualifier("logStep1") Step s1) {
return jobBuilderFactory.get("zlogJob")
.incrementer(new RunIdIncrementer())
.flow(s1)//为Job指定Step
.end()
.listener(new MyJobListener())//绑定监听器csvJobListener
.build();
}
/**
* step步骤包含ItemReaderItemProcessor和ItemWriter
*
* @param stepBuilderFactory
* @param reader
* @param writer
* @param processor
* @return
*/
@Bean(name = "logStep1")
public Step logStep1(StepBuilderFactory stepBuilderFactory,
@Qualifier("logReader") ItemReader<Log> reader,
@Qualifier("logWriter") ItemWriter<Log> writer,
@Qualifier("logProcessor") ItemProcessor<Log, Log> processor) {
return stepBuilderFactory
.get("logStep1")
.<Log, Log>chunk(5000)//批处理每次提交5000条数据
.reader(reader)//给step绑定reader
.processor(processor)//给step绑定processor
.writer(writer)//给step绑定writer
.faultTolerant()
.retry(Exception.class) // 重试
.noRetry(ParseException.class)
.retryLimit(1) //每条记录重试一次
.skip(Exception.class)
.skipLimit(200) //一共允许跳过200次异常
// .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行一般来讲一个Job就让它串行完成的好
// .throttleLimit(10) //并发任务数为 10,默认为4
.build();
}
@Bean
public Validator<Log> csvBeanValidator() {
return new MyBeanValidator<>();
}
}

View File

@ -0,0 +1,85 @@
package com.enzhico.trans.service;
import com.enzhico.trans.config.properties.CommonProperties;
import com.enzhico.trans.modules.common.anno.TableName;
import com.enzhico.trans.modules.common.vo.BscCanton;
import com.enzhico.trans.modules.common.vo.BscExeOffice;
import com.enzhico.trans.modules.common.vo.BscOfficeExeItem;
import com.enzhico.trans.modules.common.vo.BscTollItem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
@Service
public class CsvService {
private Logger logger = LoggerFactory.getLogger(this.getClass());
@Resource
private CommonProperties p;
@Resource
private JobLauncher jobLauncher;
@Resource
@Qualifier("commonJob")
private Job commonJob;
private static final String KEY_JOB_NAME = "input.job.name";
private static final String KEY_FILE_NAME = "input.file.name";
private static final String KEY_VO_NAME = "input.vo.name";
private static final String KEY_COLUMNS = "input.columns";
private static final String KEY_SQL = "input.sql";
/**
* 导入数据库数据
* @throws Exception ex
*/
public void importTables() throws Exception {
runTask(BscCanton.class);
runTask(BscOfficeExeItem.class);
runTask(BscExeOffice.class);
runTask(BscTollItem.class);
}
/**
* 根据类名反射运行相应的任务
*
* @param c 定义的Bean类
*/
public void runTask(Class c) throws Exception {
TableName a = (TableName) c.getAnnotation(TableName.class);
String tableName = a.value();
Field[] fields = c.getDeclaredFields();
List<String> fieldNames = new ArrayList<>();
List<String> paramNames = new ArrayList<>();
for (Field f : fields) {
fieldNames.add(f.getName());
paramNames.add(":" + f.getName());
}
String columnsStr = String.join(",", fieldNames);
String paramsStr = String.join(",", paramNames);
String csvFileName;
if (p.getLocation() == 1) {
csvFileName = p.getCsvDir() + tableName + ".csv";
} else {
csvFileName = tableName + ".csv";
}
JobParameters jobParameters1 = new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString(KEY_JOB_NAME, tableName)
.addString(KEY_FILE_NAME, csvFileName)
.addString(KEY_VO_NAME, c.getCanonicalName())
.addString(KEY_COLUMNS, String.join(",", fieldNames))
.addString(KEY_SQL, "insert into " + tableName + " (" + columnsStr + ")" + " values(" + paramsStr + ")")
.toJobParameters();
jobLauncher.run(commonJob, jobParameters1);
}
}

View File

@ -0,0 +1,30 @@
package com.enzhico.trans.start;
import com.enzhico.trans.service.CsvService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
/**
* 内网服务启动器
*
* @author XiongNeng
* @version 1.0
* @since 2018/1/27
*/
@Component
public class StartRunner implements CommandLineRunner {
private Logger logger = LoggerFactory.getLogger(this.getClass());
@Resource
private CsvService csvService;
@Override
public void run(String... args) throws Exception {
logger.info("导入数据主进程启动啦啦啦...");
csvService.importTables();
logger.info("导入数据主进程完成啦啦啦...");
}
}

View File

@ -0,0 +1,90 @@
##########################################################
################## 所有profile共有的配置 #################
##########################################################
################### 自定义配置 ###################
common:
csvVtoll: /var/csv/
csvCanton: /var/csv/
csvExeOffice: /var/csv/
csvApp: /var/csv/
csvLog: /var/csv/
################### spring配置 ###################
spring:
profiles:
active: test
batch:
job:
enabled: false
initializer:
enabled: false
################### mybatis-plus配置 ###################
mybatis-plus:
mapper-locations: classpath*:com/enzhico/trans/dao/repository/mapping/*.xml
typeAliasesPackage: >
com.enzhico.trans.dao.entity
global-config:
id-type: 1 # 0:数据库ID自增 1:用户输入id 2:全局唯一id(IdWorker) 3:全局唯一ID(uuid)
db-column-underline: false
refresh-mapper: true
configuration:
jdbcTypeForNull: NULL
map-underscore-to-camel-case: true
cache-enabled: true #配置的缓存的全局开关
lazyLoadingEnabled: true #延时加载的开关
multipleResultSetsEnabled: true #开启的话,延时加载一个属性时会加载该对象全部属性,否则按需加载属性
logging:
level:
org.springframework.web.servlet: ERROR
---
#####################################################################
######################## 开发环境profile ##########################
#####################################################################
spring:
profiles: dev
datasource:
driver-class-name: oracle.jdbc.driver.OracleDriver
url: jdbc:oracle:thin:@111.230.194.170:1521:orcl11g
username: adm_real
password: adm_real
common:
location: 1
csvDir: E:/
logging:
level:
ROOT: INFO
com:
enzhico: DEBUG
file: /var/logs/batch.log
---
#####################################################################
######################## 测试环境profile ##########################
#####################################################################
spring:
profiles: test
datasource:
driver-class-name: oracle.jdbc.driver.OracleDriver
url: jdbc:oracle:thin:@127.0.0.1:1521:orcl11g
username: adm_123
password: adm_123
common:
csvDir: /var/csv/
location: 2
logging:
level:
ROOT: INFO
com:
enzhico: DEBUG
file: /var/logs/batch.log

View File

@ -0,0 +1,12 @@
-- Autogenerated: do not edit this file
DROP TABLE BATCH_STEP_EXECUTION_CONTEXT ;
DROP TABLE BATCH_JOB_EXECUTION_CONTEXT ;
DROP TABLE BATCH_STEP_EXECUTION ;
DROP TABLE BATCH_JOB_EXECUTION_PARAMS ;
DROP TABLE BATCH_JOB_EXECUTION ;
DROP TABLE BATCH_JOB_INSTANCE ;
DROP SEQUENCE BATCH_STEP_EXECUTION_SEQ ;
DROP SEQUENCE BATCH_JOB_EXECUTION_SEQ ;
DROP SEQUENCE BATCH_JOB_SEQ ;

View File

@ -0,0 +1,81 @@
-- Autogenerated: do not edit this file
CREATE TABLE BATCH_JOB_INSTANCE (
JOB_INSTANCE_ID NUMBER(19,0) NOT NULL PRIMARY KEY ,
VERSION NUMBER(19,0) ,
JOB_NAME VARCHAR2(100) NOT NULL,
JOB_KEY VARCHAR2(32) NOT NULL,
constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY)
) ;
CREATE TABLE BATCH_JOB_EXECUTION (
JOB_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY ,
VERSION NUMBER(19,0) ,
JOB_INSTANCE_ID NUMBER(19,0) NOT NULL,
CREATE_TIME TIMESTAMP NOT NULL,
START_TIME TIMESTAMP DEFAULT NULL ,
END_TIME TIMESTAMP DEFAULT NULL ,
STATUS VARCHAR2(10) ,
EXIT_CODE VARCHAR2(3600) ,
EXIT_MESSAGE VARCHAR2(3600) ,
LAST_UPDATED TIMESTAMP,
JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,
constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)
references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)
) ;
CREATE TABLE BATCH_JOB_EXECUTION_PARAMS (
JOB_EXECUTION_ID NUMBER(19,0) NOT NULL ,
TYPE_CD VARCHAR2(6) NOT NULL ,
KEY_NAME VARCHAR2(200) NOT NULL ,
STRING_VAL VARCHAR2(3000) ,
DATE_VAL TIMESTAMP DEFAULT NULL ,
LONG_VAL NUMBER(19,0) ,
DOUBLE_VAL NUMBER ,
IDENTIFYING CHAR(1) NOT NULL ,
constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)
references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
) ;
CREATE TABLE BATCH_STEP_EXECUTION (
STEP_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY ,
VERSION NUMBER(19,0) NOT NULL,
STEP_NAME VARCHAR2(100) NOT NULL,
JOB_EXECUTION_ID NUMBER(19,0) NOT NULL,
START_TIME TIMESTAMP NOT NULL ,
END_TIME TIMESTAMP DEFAULT NULL ,
STATUS VARCHAR2(10) ,
COMMIT_COUNT NUMBER(19,0) ,
READ_COUNT NUMBER(19,0) ,
FILTER_COUNT NUMBER(19,0) ,
WRITE_COUNT NUMBER(19,0) ,
READ_SKIP_COUNT NUMBER(19,0) ,
WRITE_SKIP_COUNT NUMBER(19,0) ,
PROCESS_SKIP_COUNT NUMBER(19,0) ,
ROLLBACK_COUNT NUMBER(19,0) ,
EXIT_CODE VARCHAR2(3600) ,
EXIT_MESSAGE VARCHAR2(3600) ,
LAST_UPDATED TIMESTAMP,
constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)
references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
) ;
CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT (
STEP_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,
SHORT_CONTEXT VARCHAR2(3600) NOT NULL,
SERIALIZED_CONTEXT CLOB ,
constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)
references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)
) ;
CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT (
JOB_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,
SHORT_CONTEXT VARCHAR2(3600) NOT NULL,
SERIALIZED_CONTEXT CLOB ,
constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)
references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)
) ;
CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE;
CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE;
CREATE SEQUENCE BATCH_JOB_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE;

View File

@ -0,0 +1,222 @@
package com.enzhico.service;
import com.enzhico.trans.Application;
import com.enzhico.trans.config.properties.CommonProperties;
import com.enzhico.trans.modules.common.anno.TableName;
import com.enzhico.trans.modules.common.vo.*;
import com.enzhico.trans.service.CsvService;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import javax.annotation.Resource;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
/**
* BatchServiceTest
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/2
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = Application.class)
public class BatchServiceTest {
private Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private CommonProperties p;
@Autowired
private JobLauncher jobLauncher;
@Autowired
@Qualifier("commonJob")
private Job commonJob;
@Autowired
@Qualifier("vtollJob")
private Job vtollJob;
@Autowired
@Qualifier("cantonJob")
private Job cantonJob;
@Autowired
@Qualifier("zappJob")
private Job zappJob;
@Autowired
@Qualifier("zlogJob")
private Job zlogJob;
@Resource
private CsvService csvService;
private static final String KEY_JOB_NAME = "input.job.name";
private static final String KEY_FILE_NAME = "input.file.name";
private static final String KEY_VO_NAME = "input.vo.name";
private static final String KEY_COLUMNS = "input.columns";
private static final String KEY_SQL = "input.sql";
@Test
public void testBudgetVtoll() throws Exception {
JobParameters jobParameters = new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name", p.getCsvVtoll())
.toJobParameters();
jobLauncher.run(vtollJob, jobParameters);
logger.info("Main线程执行完成");
while (true) {
Thread.sleep(2000000L);
}
}
@Test
public void testCanton() throws Exception {
JobParameters jobParameters = new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name", p.getCsvCanton())
.toJobParameters();
jobLauncher.run(cantonJob, jobParameters);
logger.info("Main线程执行完成");
while (true) {
Thread.sleep(2000000L);
}
}
/**
* 测试一个配置类,可同时运行多个任务
* @throws Exception 异常
*/
@Test
public void testCommonJobs() throws Exception {
JobParameters jobParameters1 = new JobParametersBuilder()
.addLong("time",System.currentTimeMillis())
.addString(KEY_JOB_NAME, "App")
.addString(KEY_FILE_NAME, p.getCsvApp())
.addString(KEY_VO_NAME, "com.enzhico.trans.modules.zapp.App")
.addString(KEY_COLUMNS, String.join(",", new String[]{
"appid", "zname", "flag"
}))
.addString(KEY_SQL, "insert into z_test_App (appid, zname, flag) values(:appid, :zname, :flag)")
.toJobParameters();
jobLauncher.run(commonJob, jobParameters1);
JobParameters jobParameters2 = new JobParametersBuilder()
.addLong("time",System.currentTimeMillis())
.addString(KEY_JOB_NAME, "Log")
.addString(KEY_FILE_NAME, p.getCsvLog())
.addString(KEY_VO_NAME, "com.enzhico.trans.modules.zlog.Log")
.addString(KEY_COLUMNS, String.join(",", new String[]{
"logid", "msg", "logtime"
}))
.addString(KEY_SQL, "insert into z_test_Log (logid, msg, logtime) values(:logid, :msg, :logtime)")
.toJobParameters();
jobLauncher.run(commonJob, jobParameters2);
logger.info("Main线程执行完成");
while (true) {
Thread.sleep(2000000L);
}
}
/**
* 一起测试4个CSV文件导入
* @throws Exception 异常
*/
@Test
public void testImportCsv4() throws Exception {
JobParameters jobParameters1 = new JobParametersBuilder()
.addLong("time",System.currentTimeMillis())
.addString(KEY_JOB_NAME, "BscExeOffice")
.addString(KEY_FILE_NAME, p.getCsvExeOffice())
.addString(KEY_VO_NAME, "com.enzhico.trans.modules.common.vo.BscExeOffice")
.addString(KEY_COLUMNS, String.join(",", new String[]{
"id","cantonid","code","name","memcode","supdeptid","comdeptid","contactman","tel","mobil","email","bgofficeid","infomobil","infoman","logpass","startdate","stopdate","status","memo","auditer","audittime","isaudit","edittime","platform_id","isprintbill"
}))
.addString(KEY_SQL, "insert into NT_BSC_EXEOFFICE (F_ID,F_CANTONID,F_CODE,F_NAME,F_MEMCODE,F_SUPDEPTID,F_COMDEPTID,F_CONTACTMAN,F_TEL,F_MOBIL,F_EMAIL,F_BGOFFICEID,F_INFOMOBIL,F_INFOMAN,F_LOGPASS,F_STARTDATE,F_STOPDATE,F_STATUS,F_MEMO,F_AUDITER,F_AUDITTIME,F_ISAUDIT,F_EDITTIME,F_PLATFORM_ID,F_ISPRINTBILL)" +
" values(:id, :cantonid, :code, :name, :memcode, :supdeptid, :comdeptid, :contactman, :tel, :mobil, :email, :bgofficeid, :infomobil, :infoman, :logpass, :startdate, :stopdate, :status, :memo, :auditer, :audittime, :isaudit, :edittime, :platform_id, :isprintbill)")
.toJobParameters();
jobLauncher.run(commonJob, jobParameters1);
// JobParameters jobParameters2 = new JobParametersBuilder()
// .addLong("time",System.currentTimeMillis())
// .addString(KEY_JOB_NAME, "Log")
// .addString(KEY_FILE_NAME, p.getCsvLog())
// .addString(KEY_VO_NAME, "com.enzhico.trans.modules.zlog.Log")
// .addString(KEY_COLUMNS, String.join(",", new String[]{
// "logid", "msg", "logtime"
// }))
// .addString(KEY_SQL, "insert into z_test_Log (logid, msg, logtime) values(:logid, :msg, :logtime)")
// .toJobParameters();
// jobLauncher.run(commonJob, jobParameters2);
logger.info("Main线程执行完成");
while (true) {
Thread.sleep(2000000L);
}
}
/**
CREATE TABLE Z_TEST_APP (
appid INT,
zname VARCHAR2 (20),
flag VARCHAR2 (2),
CONSTRAINT app_pk PRIMARY KEY (appid)
);
CREATE TABLE Z_TEST_LOG (
logid INT,
msg VARCHAR2 (20),
logtime VARCHAR2 (8),
CONSTRAINT log_pk PRIMARY KEY (logid)
);
* @throws Exception
*/
@Test
public void testTwoJobs() throws Exception {
JobParameters jobParameters1 = new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name", p.getCsvApp())
.toJobParameters();
jobLauncher.run(zappJob, jobParameters1);
JobParameters jobParameters2 = new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name", p.getCsvLog())
.toJobParameters();
jobLauncher.run(zlogJob, jobParameters2);
logger.info("Main线程执行完成");
while (true) {
Thread.sleep(2000000L);
}
}
@Test
public void testRunSimple() throws Exception {
csvService.runTask(BscCanton.class);
csvService.runTask(BscOfficeExeItem.class);
csvService.runTask(BscExeOffice.class);
csvService.runTask(BscTollItem.class);
while (true) {
Thread.sleep(200000L);
}
}
}

View File

@ -0,0 +1,28 @@
package com.enzhico.service;
import org.junit.Test;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.Locale;
/**
* SimpleTest
*
* @author XiongNeng
* @version 1.0
* @since 2018/2/5
*/
public class SimpleTest {
@Test
public void test() throws Exception {
System.out.println(Arrays.toString(new String[]{"11", "22"}));
System.out.println(String.join(",", new String[]{"11", "22"}));
String d = "08-12月-17 05.38.07.812000 下午";
Locale locale = Locale.CHINA;
Date dd = new SimpleDateFormat("dd-M月-y hh.mm.ss.S a", locale).parse(d);
System.out.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(dd));
}
}