I have a spring download package working with a MongoDB database to feed a MySQL database. I have about half of my database handled by the program, but only something like 200 errors in my logs.
The BATCH_STEP_EXECUTION table will tell me that the process was successful (status completed) and displays READ_COUNT of 5692, although I have 11800 documents in the database.
Did I remember something in the configuration to prevent skipping the entire database?
Here is my configuration class:
@Configuration @EnableBatchProcessing @Import(PersistenceConfig.class) public class BatchConfiguration { @Autowired MongoTemplate mongoTemplate; @Autowired SessionFactory sessionFactory; @Bean @StepScope public ItemReader<CourseData> reader() { MongoItemReader<CourseData> mongoItemReader = new MongoItemReader<>(); mongoItemReader.setTemplate(mongoTemplate); mongoItemReader.setCollection("foo"); mongoItemReader.setQuery("{}"); mongoItemReader.setTargetType(CourseData.class); Map<String, Sort.Direction> sort = new HashMap<>(); sort.put("_id", Sort.Direction.ASC); mongoItemReader.setSort(sort); return mongoItemReader; } @Bean public ItemProcessor<CourseData, MatrixOne> processor() { return new CourseDataMatrixOneProcessor(); } @Bean public ItemWriter<MatrixOne> writer() { HibernateItemWriter writer = new HibernateItemWriter(); writer.setSessionFactory(sessionFactory); System.out.println("writing stuff"); return writer; } @Bean public Job importUserJob(JobBuilderFactory jobs, Step s1) { return jobs.get("importRawCourseJob") .incrementer(new RunIdIncrementer()) .flow(s1) .end() .build(); } @Bean @Transactional public Step step1(StepBuilderFactory stepBuilderFactory, ItemReader<CourseData> reader, ItemWriter<MatrixOne> writer, ItemProcessor<CourseData, MatrixOne> processor) { return stepBuilderFactory.get("step1") .<CourseData, MatrixOne>chunk(10) .reader(reader) .processor(processor) .writer(writer) .build(); } }
source share