Spring-boot+Spring-batch+hibernate+Quartz简单批量读文件写数据用例
时间: 2023-06-28 21:11:27 浏览: 224
这里是一个简单的 Spring-boot+Spring-batch+hibernate+Quartz 的批量读文件写数据的例子。
首先,需要在 pom.xml 文件中添加以下依赖:
```xml
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-batch</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-quartz</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
```
在 application.yml 文件中配置数据源和 Quartz:
```yaml
spring:
datasource:
url: jdbc:mysql://localhost:3306/test?useUnicode=true&characterEncoding=utf-8&useSSL=false&allowPublicKeyRetrieval=true
username: root
password: root
jpa:
hibernate:
ddl-auto: update
show-sql: true
quartz:
job-store-type: jdbc
jdbc:
initialize-schema: always
```
接下来,定义实体类 FileData:
```java
@Entity
@Table(name = "file_data")
public class FileData {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@Column(name = "file_name")
private String fileName;
@Column(name = "line_number")
private Integer lineNumber;
@Column(name = "line_data")
private String lineData;
// getter and setter
}
```
定义读取文件的 ItemReader:
```java
@Component
@StepScope
public class FileItemReader implements ItemReader<String> {
private static final Logger LOGGER = LoggerFactory.getLogger(FileItemReader.class);
private String file;
private BufferedReader reader;
@Value("#{jobParameters['file']}")
public void setFile(String file) {
this.file = file;
}
@BeforeStep
public void beforeStep(StepExecution stepExecution) throws Exception {
LOGGER.info("Starting to read file: {}", file);
reader = new BufferedReader(new FileReader(file));
}
@Override
public String read() throws Exception {
String line = reader.readLine();
if (line != null) {
LOGGER.debug("Read line: {}", line);
} else {
LOGGER.info("Finished reading file: {}", file);
reader.close();
}
return line;
}
}
```
定义处理数据的 ItemProcessor:
```java
@Component
public class FileItemProcessor implements ItemProcessor<String, FileData> {
private static final Logger LOGGER = LoggerFactory.getLogger(FileItemProcessor.class);
@Override
public FileData process(String line) throws Exception {
LOGGER.debug("Processing line: {}", line);
String[] parts = line.split(",");
FileData fileData = new FileData();
fileData.setFileName(parts[0]);
fileData.setLineNumber(Integer.parseInt(parts[1]));
fileData.setLineData(parts[2]);
return fileData;
}
}
```
定义写数据的 ItemWriter:
```java
@Component
public class FileItemWriter implements ItemWriter<FileData> {
private static final Logger LOGGER = LoggerFactory.getLogger(FileItemWriter.class);
@Autowired
private EntityManager entityManager;
@Override
@Transactional
public void write(List<? extends FileData> items) throws Exception {
LOGGER.info("Writing {} items", items.size());
for (FileData item : items) {
entityManager.persist(item);
}
entityManager.flush();
}
}
```
定义 Job:
```java
@Configuration
@EnableBatchProcessing
public class BatchConfiguration {
@Autowired
private JobBuilderFactory jobBuilderFactory;
@Autowired
private StepBuilderFactory stepBuilderFactory;
@Autowired
private FileItemReader fileItemReader;
@Autowired
private FileItemProcessor fileItemProcessor;
@Autowired
private FileItemWriter fileItemWriter;
@Bean
public Job fileToDatabaseJob() {
return jobBuilderFactory.get("fileToDatabaseJob")
.incrementer(new RunIdIncrementer())
.start(step1())
.build();
}
@Bean
public Step step1() {
return stepBuilderFactory.get("step1")
.<String, FileData>chunk(10)
.reader(fileItemReader)
.processor(fileItemProcessor)
.writer(fileItemWriter)
.build();
}
}
```
定义 Quartz 定时任务:
```java
@Component
public class FileToDatabaseJobScheduler {
@Autowired
private SchedulerFactory schedulerFactory;
@Autowired
private JobDetail fileToDatabaseJobDetail;
@Autowired
private CronTriggerFactoryBean fileToDatabaseJobTrigger;
@PostConstruct
public void scheduleFileToDatabaseJob() throws SchedulerException {
Scheduler scheduler = schedulerFactory.getScheduler();
scheduler.scheduleJob(fileToDatabaseJobDetail, fileToDatabaseJobTrigger.getObject());
scheduler.start();
}
}
```
最后,启动应用程序并将文件作为参数传递:
```java
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
@Bean
@StepScope
public FileItemReader fileItemReader(@Value("#{jobParameters['file']}") String file) {
FileItemReader reader = new FileItemReader();
reader.setFile(file);
return reader;
}
@Bean
public JobDetail fileToDatabaseJobDetail() {
return JobBuilder.newJob(BatchConfiguration.class)
.withIdentity("fileToDatabaseJob")
.storeDurably()
.build();
}
@Bean
public CronTriggerFactoryBean fileToDatabaseJobTrigger(@Autowired JobDetail fileToDatabaseJobDetail) {
CronTriggerFactoryBean trigger = new CronTriggerFactoryBean();
trigger.setJobDetail(fileToDatabaseJobDetail);
trigger.setCronExpression("0 0/1 * 1/1 * ? *"); // 每分钟执行一次
return trigger;
}
}
```
以上就是一个简单的 Spring-boot+Spring-batch+hibernate+Quartz 的批量读文件写数据的例子。
阅读全文