Spring-boot+Spring-batch+hibernate+Quartz简单批量读文件写数据用例

本文程序集成了Spring-boot、Spring-batch、Spring-data-jpa、hibernate、Quartz、H2等。完整代码在Github上共享,地址https://github.com/birdstudiocn/spring-sample

这程序功能是简单批量读取文件记录,然后将记录数据保存在数据库。是Quartz定时任务每20秒执行一次。功能简单只作框架搭建使用。

首先是主类QuartzApplication.java

package cn.birdstudio;

import org.quartz.CronScheduleBuilder;
import org.quartz.JobBuilder;
import org.quartz.JobDetail;
import org.quartz.Trigger;
import org.quartz.TriggerBuilder;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;

/**
 * @author Sam Zhang
 */
@SpringBootApplication
public class QuartzApplication {

	public static void main(String[] args) {
		SpringApplication.run(QuartzApplication.class, args);
	}

	@Bean
	public JobDetail simpleJobDetail() {
		return JobBuilder.newJob(QuartzJob.class).withIdentity("simpleJob").storeDurably().build();
	}

	@Bean
	public Trigger simpleJobTrigger() {
		CronScheduleBuilder scheduleBuilder = CronScheduleBuilder.cronSchedule("0/20 * * * * ?");

		return TriggerBuilder.newTrigger().forJob(simpleJobDetail()).withIdentity("simpleTrigger")
				.withSchedule(scheduleBuilder).build();
	}

}

Quartz工作类QuartzJob.java

package cn.birdstudio;

import javax.annotation.Resource;

import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.scheduling.quartz.QuartzJobBean;

import cn.birdstudio.service.UserService;

/**
 * @author Sam Zhang
 */
public class QuartzJob extends QuartzJobBean {
	private static final Logger logger = LoggerFactory.getLogger(QuartzJob.class);

	@Resource
	JobLauncher jobLauncher;
	@Resource(name = "importJob")
	Job job;

	@Resource
	private UserService qdParaWayinfoService;

	/**
	 * 将文件数据批量入库
	 */
	@Override
	protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
		logger.info("start COMS daemon");
		try {
			JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
			jobParametersBuilder.addLong("run.id", System.currentTimeMillis());
			JobParameters jobParameters = jobParametersBuilder.toJobParameters();
			jobLauncher.run(job, jobParameters);
		} catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException
				| JobParametersInvalidException e) {
		}
	}
}

批量处理类BatchConfiguration.java,读入一个文件,然后将文件的字段内容数据入库,主要包含三个部分:读数据、处理数据、写数据。

package cn.birdstudio.batch;

import javax.persistence.EntityManagerFactory;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecutionListener;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.database.JpaItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;

import cn.birdstudio.domain.User;

/**
 * 数据入库,主要包含三个部分:读数据、处理数据、写数据
 * 
 * @author Sam Zhang
 */
@Configuration
@EnableBatchProcessing
@EnableAutoConfiguration
public class BatchConfiguration {
	@Autowired
	private JobBuilderFactory jobs;

	@Autowired
	private StepBuilderFactory steps;

	private static final Logger logger = LoggerFactory.getLogger(BatchConfiguration.class);

	/**
	 * 1.读数据
	 * 
	 * @return
	 */
	@Bean(name = "reader1")
	@StepScope
	public ItemReader<User> reader() {
		logger.info("read txt");
		ClassPathResource pathResource = new ClassPathResource("user.txt");
		FlatFileItemReader<User> reader = new FlatFileItemReader<>();
		reader.setResource(pathResource);
		reader.setLineMapper(new DefaultLineMapper<User>() {
			{
				setLineTokenizer(new DelimitedLineTokenizer("|") {
					{
						setNames(new String[] { "name", "gender" });
					}
				});
				setFieldSetMapper(new BeanWrapperFieldSetMapper<User>() {
					{
						setTargetType(User.class);
					}
				});
			}
		});
		reader.open(new ExecutionContext());
		return reader;
	}

	/**
	 * 2.处理数据
	 * 
	 * @return
	 */
	@Bean(name = "processor1")
	@StepScope
	public SampleItemProcessor processor() {
		return new SampleItemProcessor();
	}

	/**
	 * 3.写数据
	 * 
	 * @param entityManagerFactory
	 * @return
	 */
	@Bean(name = "writer1")
	@StepScope
	public ItemWriter<User> writer(EntityManagerFactory entityManagerFactory) {
		logger.info("write data in database");
		JpaItemWriter<User> writer = new JpaItemWriter<>();
		writer.setEntityManagerFactory(entityManagerFactory);
		return writer;
	}

	@Bean
	public Job importJob(@Qualifier("step1") Step s1, JobExecutionListener listener) {
		return jobs.get("importJob").incrementer(new RunIdIncrementer()).listener(listener).flow(s1).end().build();
	}

	@Bean
	public Step step1(@Qualifier("reader1") ItemReader<User> reader, @Qualifier("writer1") ItemWriter<User> writer,
			@Qualifier("processor1") ItemProcessor<User, User> processor, JobExecutionListener listener) {
		return steps.get("step1").<User, User>chunk(10).reader(reader).processor(processor).writer(writer).build();

	}

}

 

posted on 2017-07-11 09:56  力奋  阅读(927)  评论(0编辑  收藏  举报