I'm using a Partitioner. The job works fine, but when I'm trying to use stepExecutionContext to get data from Partitioner class I get it null in ItemReader:
#Bean
#StepScope
public FlatFileItemReader<ClassName> Reader(
#Value("#{stepExecutionContext['fileResource']}") String file) {
FlatFileItemReader<ClassName> fileReader=new FlatFileItemReader<ClassName>();
fileReader.setResource(new ClassPathResource(file));
return fileReader;
}
Partitionner Class looks like:
public class PartitionerClass implements Partitioner {
private String rep;
#Override
public Map<String, ExecutionContext> partition(int gridSize) {
Map<String, ExecutionContext> partitionMap = new HashMap<String, ExecutionContext>();
File dir = new File(rep);
if (dir.isDirectory()) {
File[] files = dir.listFiles();
for (File file : files) {
if(file.isFile()) {
ExecutionContext context = new ExecutionContext();
context.putString("fileResource", file.getName());
partitionMap.put(file.getName(), context);
}
}
}
return partitionMap;
}
public String getRep() {
return rep;
}
public void setRep(String rep) {
this.rep = rep;
}
}
Calling Partitioner Class:
#Bean
public FilesPartitioner partitioner() {
PartitionerClass partitioner = new PartitionerClass();
partitioner.setRep("MyDirectory");
return partitioner;
}
Related
I am building an Spring Batch application that loads fetches data from external API and loading it to Database. I am making a REST call to external API which API return JSON/CSV string based on request parameters(json/csv).
The application currently works fine for CSV file saved in filesystem. I am trying to get rid of creating a new file and using the file as input every time. I would want to achieve the loading without having to create any file on the disk.
I have googled and tried different solutions, I have replaced FlatFileItemReader with JsonItemReader.
Can you please help me here.
ReportService.java
#Slf4j
#Service
public class ReportService {
public static final String OUTPUT_FILE_FORMAT = "csv";
#Autowired
ProxyRestClient proxyRestClient;
#Autowired
FileOperations fileWriter;
#Autowired
CommonUtility commonUtil;
#Autowired
ReportMapping reportMapping;
#Autowired
ReportConfig reportConfig;
#Autowired
JobMapping jobMap;
#Autowired
DataIntegrationResponseBuilder responseBuilder;
#Autowired
ImportJobLauncher batchJob;
#Autowired
WFitsPricingParser wFitsPricingParser;
public ResponseEntity<DataIntegrationResponse> getReport(String jobName) throws Exception {
HashMap<String, String> fitsParamMap = new HashMap<String, String>();
fitsParamMap = getFitsParameters(jobName);
ResponseEntity<String> response;
final String filePath = Path.of("").toAbsolutePath().toString() + "\\";
final String baseUrl = reportConfig.getBaseURL();
String reportURL = baseUrl + fitsParamMap.get("reportId") + "?format=" + OUTPUT_FILE_FORMAT;
String reportName = fitsParamMap.get("reportName");
log.info("Fetching report from FITS API.");
log.info("FITS API URL: " + reportURL + ".");
response = proxyRestClient.callFitsApi(reportURL, commonUtil.encodedCredentials());
/*
* ObjectMapper mapper = new ObjectMapper(); WFitsVendor[] jsonObj =
* mapper.readValue(response.getBody(), WFitsVendor[].class);
*/
if (response != null) {
if (response.getStatusCodeValue() == 200 && response.hasBody()) {
fileWriter.writeToFile(response.getBody(), filePath + reportName + "." + OUTPUT_FILE_FORMAT);
if (jobName.equals("vendor")) {
BatchJobResponse batchJobResponse = batchJob.importFitsVendor(filePath + reportName + "." + OUTPUT_FILE_FORMAT, response.getBody());
return new ResponseEntity<>(responseBuilder.buildResponse(response.getStatusCode(),
reportName + "." + OUTPUT_FILE_FORMAT, batchJobResponse.getJobName(), batchJobResponse.getJobStatus(), batchJobResponse.getJobId()),
HttpStatus.CREATED);
}
}
}
return new ResponseEntity<>(responseBuilder.buildResponse(HttpStatus.NOT_FOUND, "", "", "", 0 L),
HttpStatus.NOT_FOUND);
}
private HashMap<String, String> getFitsParameters(String jobName) {
HashMap<String, String> fitsParamMap = new HashMap<String, String>();
fitsParamMap.put("reportName", reportMapping.getMappings().getOrDefault(jobMap.getMappings().get(jobName), ""));
fitsParamMap.put("reportId", jobMap.getMappings().getOrDefault(jobName, ""));
return fitsParamMap;
}
}
ProxyRestClient.java
#Service
public class ProxyRestClient {
#Autowired
#Qualifier("externalRestTemplate")
RestTemplate restTemplate;
public ResponseEntity<String> callFitsApi(
String reportURL, String encodedCredentials)
throws JsonMappingException, JsonProcessingException {
HttpHeaders headers = new HttpHeaders();
headers.add("Authorization", "Basic " + encodedCredentials);
HttpEntity<String> request = new HttpEntity<>(headers);
ResponseEntity<String> response = null;
ResponseEntity<String> responseWfits = null;
// response = restTemplate.exchange(reportURL, HttpMethod.GET, request,
// String.class);
responseWfits = restTemplate.exchange(https://fitsonline.trgrp.com/msmdsqa/api/report/user/5010296?format=json, HttpMethod.GET, request, String.class);
return responseWfits;
}
}
VendorJob.java
#Configuration
#EnableBatchProcessing
#AllArgsConstructor
public class VendorJob {
private JobBuilderFactory jobBuilderFactory;
private StepBuilderFactory stepBuilderFactory;
private static final String DROP_SCRIPT = "TRUNCATE TABLE MDIA.WFITS_VENDOR";
#Autowired
private HikariDataSource dataSource;
/*
* This is for CSV File
*
* #Bean
* #StepScope
public FlatFileItemReader<WFitsVendor>
* VendorReader(#Value("#{jobParameters['filePath']}") String filePath,
#Value("#{jobParameters['jsonObj']}") String jsonObj) {
FlatFileItemReader<WFitsVendor> itemReader = new FlatFileItemReader<>();
itemReader.setResource(new FileSystemResource(filePath));
itemReader.setName("csvReader"); itemReader.setLinesToSkip(1);
itemReader.setLineMapper(lineMapper());
itemReader.setRecordSeparatorPolicy(new ReaderPolicy());
return itemReader; }
*/
#Bean
#StepScope
public JsonItemReader<WFitsVendor> jsonItemReader(#Value("#{jobParameters['filePath']}") String filePath,
#Value("#{jobParameters['jsonObj']}") String jsonObj) {
ObjectMapper objectMapper = new ObjectMapper();
// configure the objectMapper as required
JacksonJsonObjectReader<WFitsVendor> jsonObjectReader =
new JacksonJsonObjectReader<>(WFitsVendor.class);
jsonObjectReader.setMapper(objectMapper);
return new JsonItemReaderBuilder<WFitsVendor>()
.jsonObjectReader(jsonObjectReader)
.resource(new ByteArrayResource(jsonObj.getBytes()))
.name("jsonItemReader")
.build();
}
private LineMapper<WFitsVendor> lineMapper() {
WFitsVendorLineMapper<WFitsVendor> lineMapper = new WFitsVendorLineMapper<>();
DelimitedLineTokenizer lineTokenizer = new DelimitedLineTokenizer();
lineTokenizer.setDelimiter(",");
lineTokenizer.setNames("VENDORNAME", "Type", "Notes");
lineTokenizer.setStrict(true);
lineTokenizer.setIncludedFields(0, 1, 2);
BeanWrapperFieldSetMapper<WFitsVendor> fieldSetMapper = new BeanWrapperFieldSetMapper<>();
fieldSetMapper.setTargetType(WFitsVendor.class);
lineMapper.setLineTokenizer(lineTokenizer);
lineMapper.setFieldSetMapper(fieldSetMapper);
return lineMapper;
}
#Bean
public VendorProcessor VendorProcessor() {
return new VendorProcessor();
}
#Bean
public JdbcBatchItemWriter<WFitsVendor> VendorWriter() {
JdbcBatchItemWriter<WFitsVendor> databaseItemWriter = new JdbcBatchItemWriter<>();
databaseItemWriter.setDataSource(dataSource);
databaseItemWriter.setSql(
"INSERT INTO MDIA.WFITS_VENDOR(VENDOR, VENDOR_INFO, VENDOR_TYPE, CREATED_BY) VALUES (?, ?, ?, ?)");
ItemPreparedStatementSetter<WFitsVendor> valueSetter = new WFitsVendorPreparedStatementSetter();
databaseItemWriter.setItemPreparedStatementSetter(valueSetter);
return databaseItemWriter;
}
#Bean
public Step loadVendorTable() {
return stepBuilderFactory.get("load-wfitsvendor-table").<WFitsVendor, WFitsVendor> chunk(10000)
.reader(jsonItemReader(null, null)).writer(VendorWriter()).processor(VendorProcessor()).faultTolerant()
.taskExecutor(VendortaskExecutor()).build();
}
#Bean
public Step truncateVendorTable() {
return stepBuilderFactory.get("truncate-wfitsvendor-table").tasklet(truncateTableTasklet()).build();
}
public Tasklet truncateTableTasklet() {
return (contribution, chunkContext) -> {
new JdbcTemplate(dataSource).execute(DROP_SCRIPT);
return RepeatStatus.FINISHED;
};
}
#Bean
#Qualifier("VendorJob")
public Job runVendorJob() {
return jobBuilderFactory.get("VendorJob").listener(new JobCompletionListener()).start(truncateVendorTable())
.next(loadVendorTable()).build();
}
#Bean
public TaskExecutor VendortaskExecutor() {
return new ConcurrentTaskExecutor(Executors.newCachedThreadPool());
}
}
}
VendorLineMapper.java
public class VendorLineMapper<T> implements LineMapper<WFitsVendor> , InitializingBean {
private LineTokenizer tokenizer;
private FieldSetMapper<WFitsVendor> fieldSetMapper;
#Override
public WFitsVendor mapLine(String line, int lineNumber) throws Exception {
WFitsVendor vr = fieldSetMapper.mapFieldSet(tokenizer.tokenize(line));
//System.out.println(line);
vr.setLineNo(lineNumber);
return vr;
}
public void setLineTokenizer(LineTokenizer tokenizer) {
this.tokenizer = tokenizer;
}
public void setFieldSetMapper(FieldSetMapper<WFitsVendor> fieldSetMapper) {
this.fieldSetMapper = fieldSetMapper;
}
#Override
public void afterPropertiesSet() {
Assert.notNull(tokenizer, "The LineTokenizer must be set");
Assert.notNull(fieldSetMapper, "The FieldSetMapper must be set");
}
}
ReaderPolicy.java
public class ReaderPolicy extends DefaultRecordSeparatorPolicy {
#Override
public boolean isEndOfRecord(final String line) {
return line.trim().length() != 0 && super.isEndOfRecord(line);
}
#Override
public String postProcess(final String record) {
if (record == null || record.trim().length() == 0) {
return null;
}
return super.postProcess(record);
}
JSON Returned from API:
You can use a URLResource and point your JSON item reader to it, something like:
#Bean(destroyMethod = "close")
public InputStream urlResource() throws IOException {
URL url = new URL("https://path.to.your.resource");
URLConnection urlConnection = url.openConnection();
// urlConnection.setRequestProperty("", ""); // set auth headers if necessary
return urlConnection.getInputStream();
}
#Bean
public JsonItemReader<Pojo> itemReader() throws IOException {
return new JsonItemReaderBuilder<Pojo>()
.name("restReader")
.resource(new InputStreamResource(urlResource()))
.strict(true)
.jsonObjectReader(new JacksonJsonObjectReader<>(Pojo.class))
.build();
}
I have read about partitioning in spring-batch I've found an example which demonstrates partitioning. The example reads persons from CSV files, does some processing and insert data into the database. So at this example 1 partitioning = 1 file and so partitioner implementation looks like this:
public class MultiResourcePartitioner implements Partitioner {
private final Logger logger = LoggerFactory.getLogger(MultiResourcePartitioner.class);
public static final String FILE_PATH = "filePath";
private static final String PARTITION_KEY = "partition";
private final Collection<Resource> resources;
public MultiResourcePartitioner(Collection<Resource> resources) {
this.resources = resources;
}
#Override
public Map<String, ExecutionContext> partition(int gridSize) {
Map<String, ExecutionContext> map = new HashMap<>(gridSize);
int i = 0;
for (Resource resource : resources) {
ExecutionContext context = new ExecutionContext();
context.putString(FILE_PATH, getPath(resource)); //Depends on what logic you want to use to split
map.put(PARTITION_KEY + i++, context);
}
return map;
}
private String getPath(Resource resource) {
try {
return resource.getFile().getPath();
} catch (IOException e) {
logger.warn("Can't get file from from resource {}", resource);
throw new RuntimeException(e);
}
}
}
But what if I have single 10TB file? Does spring batch allow to partition it in some way?
update:
I tried following approach to achieve what I want:
make 2 steps - first step to divide file into pieces and second step to process pieces we got after the first step:
#Configuration
public class SingleFilePartitionedJob {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private ToLowerCasePersonProcessor toLowerCasePersonProcessor;
#Autowired
private DbPersonWriter dbPersonWriter;
#Autowired
private ResourcePatternResolver resourcePatternResolver;
#Value("${app.file-to-split}")
private Resource resource;
#Bean
public Job splitFileProcessingJob() throws IOException {
return jobBuilderFactory.get("splitFileProcessingJob")
.incrementer(new RunIdIncrementer())
.flow(splitFileIntoPiecesStep())
.next(csvToDbLowercaseMasterStep())
.end()
.build();
}
private Step splitFileIntoPiecesStep() throws IOException {
return stepBuilderFactory.get("splitFile")
.tasklet(new FileSplitterTasklet(resource.getFile()))
.build();
}
#Bean
public Step csvToDbLowercaseMasterStep() throws IOException {
MultiResourcePartitioner partitioner = new MultiResourcePartitioner();
partitioner.setResources(resourcePatternResolver.getResources("split/*.csv"));
return stepBuilderFactory.get("csvReaderMasterStep")
.partitioner("csvReaderMasterStep", partitioner)
.gridSize(10)
.step(csvToDataBaseSlaveStep())
.taskExecutor(jobTaskExecutorSplitted())
.build();
}
#Bean
public Step csvToDataBaseSlaveStep() throws MalformedURLException {
return stepBuilderFactory.get("csvToDatabaseStep")
.<Person, Person>chunk(50)
.reader(csvPersonReaderSplitted(null))
.processor(toLowerCasePersonProcessor)
.writer(dbPersonWriter)
.build();
}
#Bean
#StepScope
public FlatFileItemReader csvPersonReaderSplitted(#Value("#{stepExecutionContext[fileName]}") String fileName) throws MalformedURLException {
return new FlatFileItemReaderBuilder()
.name("csvPersonReaderSplitted")
.resource(new UrlResource(fileName))
.delimited()
.names(new String[]{"firstName", "lastName"})
.fieldSetMapper(new BeanWrapperFieldSetMapper<Person>() {{
setTargetType(Person.class);
}})
.build();
}
#Bean
public TaskExecutor jobTaskExecutorSplitted() {
ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
taskExecutor.setMaxPoolSize(30);
taskExecutor.setCorePoolSize(25);
taskExecutor.setThreadNamePrefix("cust-job-exec2-");
taskExecutor.afterPropertiesSet();
return taskExecutor;
}
}
tasklet:
public class FileSplitterTasklet implements Tasklet {
private final Logger logger = LoggerFactory.getLogger(FileSplitterTasklet.class);
private File file;
public FileSplitterTasklet(File file) {
this.file = file;
}
#Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
int count = FileSplitter.splitTextFiles(file, 100);
logger.info("File was split on {} files", count);
return RepeatStatus.FINISHED;
}
}
logic for splitting file:
public static int splitTextFiles(File bigFile, int maxRows) throws IOException {
int fileCount = 1;
try (BufferedReader reader = Files.newBufferedReader(Paths.get(bigFile.getPath()))) {
String line = null;
int lineNum = 1;
Path splitFile = Paths.get(bigFile.getParent() + "/" + fileCount + "split.txt");
BufferedWriter writer = Files.newBufferedWriter(splitFile, StandardOpenOption.CREATE);
while ((line = reader.readLine()) != null) {
if (lineNum > maxRows) {
writer.close();
lineNum = 1;
fileCount++;
splitFile = Paths.get("split/" + fileCount + "split.txt");
writer = Files.newBufferedWriter(splitFile, StandardOpenOption.CREATE);
}
writer.append(line);
writer.newLine();
lineNum++;
}
writer.close();
}
return fileCount;
}
So I put all file pieces to the special directory.
But this doesn't work because on the moment of context initialization folder /split does not exist yet.
update
I've generated workaround which works:
public class MultiResourcePartitionerWrapper implements Partitioner {
private final MultiResourcePartitioner multiResourcePartitioner = new MultiResourcePartitioner();
private final ResourcePatternResolver resourcePatternResolver;
private final String pathPattern;
public MultiResourcePartitionerWrapper(ResourcePatternResolver resourcePatternResolver, String pathPattern) {
this.resourcePatternResolver = resourcePatternResolver;
this.pathPattern = pathPattern;
}
#Override
public Map<String, ExecutionContext> partition(int gridSize) {
try {
Resource[] resources = resourcePatternResolver.getResources(pathPattern);
multiResourcePartitioner.setResources(resources);
return multiResourcePartitioner.partition(gridSize);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
But it looks ugly. Is it a correct solution?
Spring batch allow you to partition, but it's up to you how to do it.
You can simply split your 10TB file in the partitioner class (by number or by max rows), and each partion reads one splitted file. You can find a lot of example of how to split large file in java.
split very large text file by max rows
I have written a controller which is a default for MototuploadService(for Motor Upload), but I need to make one Factory Design so that
based on parentPkId, need to call HealUploadService, TempUploadService, PersonalUploadService etc which will have separate file processing stages.
controller is below.
#RequestMapping(value = "/csvUpload", method = RequestMethod.POST)
public List<String> csvUpload(#RequestParam String parentPkId, #RequestParam List<MultipartFile> files)
throws IOException, InterruptedException, ExecutionException, TimeoutException {
log.info("Entered method csvUpload() of DaoController.class");
List<String> response = new ArrayList<String>();
ExecutorService executor = Executors.newFixedThreadPool(10);
CompletionService<String> compService = new ExecutorCompletionService<String>(executor);
List< Future<String> > futureList = new ArrayList<Future<String>>();
for (MultipartFile f : files) {
compService.submit(new ProcessMutlipartFile(f ,parentPkId,uploadService));
futureList.add(compService.take());
}
for (Future<String> f : futureList) {
long timeout = 0;
System.out.println(f.get(timeout, TimeUnit.SECONDS));
response.add(f.get());
}
executor.shutdown();
return response;
}
Here is ProcessMutlipartFile class which extends the callable interface, with CompletionService's compService.submit() invoke this class, which in turn executes call() method, which will process a file.
public class ProcessMutlipartFile implements Callable<String>
{
private MultipartFile file;
private String temp;
private MotorUploadService motUploadService;
public ProcessMutlipartFile(MultipartFile file,String temp, MotorUploadService motUploadService )
{
this.file=file;
this.temp=temp;
this.motUploadService=motUploadService;
}
public String call() throws Exception
{
return motUploadService.csvUpload(temp, file);
}
}
Below is MotorUploadService class, where I'm processing uploaded CSV file, line by line and then calling validateCsvData() method to validate Data,
which returns ErrorObject having line number and Errors associated with it.
if csvErrorRecords is null, then error-free and proceed with saving to Db.
else save errorList to Db and return Upload Failure.
#Component
public class MotorUploadService {
#Value("${external.resource.folder}")
String resourceFolder;
public String csvUpload(String parentPkId, MultipartFile file) {
String OUT_PATH = resourceFolder;
try {
DateFormat df = new SimpleDateFormat("yyyyMMddhhmmss");
String filename = file.getOriginalFilename().split(".")[0] + df.format(new Date()) + file.getOriginalFilename().split(".")[1];
Path path = Paths.get(OUT_PATH,fileName)
Files.copy(file.getInputStream(), path, StandardCopyOption.REPLACE_EXISTING);
}
catch(IOException e){
e.printStackTrace();
return "Failed to Upload File...try Again";
}
List<TxnMpMotSlaveRaw> txnMpMotSlvRawlist = new ArrayList<TxnMpMotSlaveRaw>();
try {
BufferedReader br = new BufferedReader(new InputStreamReader(file.getInputStream()));
String line = "";
int header = 0;
int lineNum = 1;
TxnMpSlaveErrorNew txnMpSlaveErrorNew = new TxnMpSlaveErrorNew();
List<CSVErrorRecords> errList = new ArrayList<CSVErrorRecords>();
while ((line = br.readLine()) != null) {
// TO SKIP HEADER
if (header == 0) {
header++;
continue;
}
lineNum++;
header++;
// Use Comma As Separator
String[] csvDataSet = line.split(",");
CSVErrorRecords csvErrorRecords = validateCsvData(lineNum, csvDataSet);
System.out.println("Errors from csvErrorRecords is " + csvErrorRecords);
if (csvErrorRecords.equals(null) || csvErrorRecords.getRecordNo() == 0) {
//Function to Save to Db
} else {
// add to errList
continue;
}
}
if (txnMpSlaveErrorNew.getErrRecord().size() == 0) {
//save all
return "Successfully Uploaded " + file.getOriginalFilename();
}
else {
// save the error in db;
return "Failure as it contains Faulty Information" + file.getOriginalFilename();
}
} catch (IOException ex) {
ex.printStackTrace();
return "Failure Uploaded " + file.getOriginalFilename();
}
}
private TxnMpMotSlaveRaw saveCsvData(String[] csvDataSet, String parentPkId) {
/*
Mapping csvDataSet to PoJo
returning Mapped Pojo;
*/
}
private CSVErrorRecords validateCsvData(int lineNum, String[] csvDataSet) {
/*
Logic for Validation goes here
*/
}
}
How to make it as a factory design pattern from controller,
so that if
parentPkId='Motor' call MotorUploadService,
parentPkId='Heal' call HealUploadService
I'm not so aware of the Factory Design pattern, please help me out.
Thanks in advance.
If I understood the question, in essence you would create an interface, and then return a specific implementation based upon the desired type.
So
public interface UploadService {
void csvUpload(String temp, MultipartFile file) throws IOException;
}
The particular implementations
public class MotorUploadService implements UploadService
{
public void csvUpload(String temp, MultipartFile file) {
...
}
}
public class HealUploadService implements UploadService
{
public void csvUpload(String temp, MultipartFile file) {
...
}
}
Then a factory
public class UploadServiceFactory {
public UploadService getService(String type) {
if ("Motor".equals(type)) {
return new MotorUploadService();
}
else if ("Heal".equals(type)) {
return new HealUploadService();
}
}
}
The factory might cache the particular implementations. One can also use an abstract class rather than an interface if appropriate.
I think you currently have a class UploadService but that is really the MotorUploadService if I followed your code, so I would rename it to be specific.
Then in the controller, presumably having used injection for the UploadServiceFactory
...
for (MultipartFile f : files) {
UploadService uploadSrvc = uploadServiceFactory.getService(parentPkId);
compService.submit(new ProcessMutlipartFile(f ,parentPkId,uploadService));
futureList.add(compService.take());
}
So with some additional reading in your classes:
public class ProcessMutlipartFile implements Callable<String>
{
private MultipartFile file;
private String temp;
private UploadService uploadService;
// change to take the interface UploadService
public ProcessMutlipartFile(MultipartFile file,String temp, UploadService uploadService )
{
this.file=file;
this.temp=temp;
this.uploadService=uploadService;
}
public String call() throws Exception
{
return uploadService.csvUpload(temp, file);
}
}
I am trying to write some integration tests relative to some methods that needs to extract data from MongoDB. In detail, I am using the Embedded Mongo given by Spring Data project. The embedded mongo is clearly provided by Flapdoodle.
I need to import some json file into the Embedded Mongo. I have looked at the tests provided with flapdoodle, but I am not able to understand how they integrates with the magic given by Spring Data + Spring Boot.
Can anyone post some clarifying snippets?
You can create a junit rule (ExternalResource) which runs before and after each test. Check the MongoEmbeddedRule class to get some idea on the implementation details.
Integration test:
#RunWith(SpringRunner.class)
#SpringBootTest(webEnvironment = RANDOM_PORT)
public abstract class TestRunner {
#Autowired
protected MongoTemplate mongoTemplate;
#Rule
public MongoEmbeddedRule mongoEmbeddedRule = new MongoEmbeddedRule(this);
ExternalResource Rule:
public class MongoEmbeddedRule extends ExternalResource {
private final Object testClassInstance;
private final Map<String, Path> mongoCollectionDataPaths;
private final String fieldName;
private final String getterName;
public MongoEmbeddedRule(final Object testClassInstance) {
this(testClassInstance, "mongoTemplate", "getMongoTemplate");
}
protected MongoEmbeddedRule(final Object testClassInstance, final String fieldName, final String getterName) {
this.fieldName = fieldName;
this.getterName = getterName;
this.testClassInstance = testClassInstance;
this.mongoCollectionDataPaths = mongoExtendedJsonFilesLookup();
}
#Override
protected void before() {
dropCollections();
createAndPopulateCollections();
}
#Override
protected void after() {
}
protected Set<String> getMongoCollectionNames() {
return mongoCollectionDataPaths.keySet();
}
public void dropCollections() {
getMongoCollectionNames().forEach(collectionName -> getMongoTemplate().dropCollection(collectionName));
}
protected void createAndPopulateCollections() {
mongoCollectionDataPaths.forEach((key, value) -> insertDocumentsFromMongoExtendedJsonFile(value, key));
}
protected MongoTemplate getMongoTemplate() {
try {
Object value = ReflectionTestUtils.getField(testClassInstance, fieldName);
if (value instanceof MongoTemplate) {
return (MongoTemplate) value;
}
value = ReflectionTestUtils.invokeGetterMethod(testClassInstance, getterName);
if (value instanceof MongoTemplate) {
return (MongoTemplate) value;
}
} catch (final IllegalArgumentException e) {
// throw exception with dedicated message at the end
}
throw new IllegalArgumentException(
String.format(
"%s expects either field '%s' or method '%s' in order to access the required MongoTemmplate",
this.getClass().getSimpleName(), fieldName, getterName));
}
private Map<String, Path> mongoExtendedJsonFilesLookup() {
Map<String, Path> collections = new HashMap<>();
try {
Files.walk(Paths.get("src","test","resources","mongo"))
.filter(Files::isRegularFile)
.forEach(filePath -> collections.put(
filePath.getFileName().toString().replace(".json", ""),
filePath));
} catch (IOException e) {
e.printStackTrace();
}
return collections;
}
private void insertDocumentsFromMongoExtendedJsonFile(Path path, String collectionName) {
try {
List<Document> documents = new ArrayList<>();
Files.readAllLines(path).forEach(l -> documents.add(Document.parse(l)));
getMongoTemplate().getCollection(collectionName).insertMany(documents);
System.out.println(documents.size() + " documents loaded for " + collectionName + " collection.");
} catch (IOException e) {
e.printStackTrace();
}
}
}
json file (names.json) with MongoDB Extended JSON, where every document is in one line and the collection name is the filename without extension.
{ "_id" : ObjectId("594d324d5b49b78da8ce2f28"), "someId" : NumberLong(1), "name" : "Some Name 1", "lastModified" : ISODate("1970-01-01T00:00:00Z")}
{ "_id" : ObjectId("594d324d5b49b78da8ce2f29"), "someId" : NumberLong(2), "name" : "Some Name 2", "lastModified" : ISODate("1970-01-01T00:00:00Z")}
You can have a look at this following Test class, provided by "flapdoodle". The test shows how to import a JSON file containing the collection dataset:
MongoImportExecutableTest.java
You could theoretically also import a whole dump of a database. (using MongoDB restore):
MongoRestoreExecutableTest.java
You can create an abstract class and have setup logic to start mongod and mongoimport process.
AbstractMongoDBTest.java
public abstract class AbstractMongoDBTest {
private MongodProcess mongodProcess;
private MongoImportProcess mongoImportProcess;
private MongoTemplate mongoTemplate;
void setup(String dbName, String collection, String jsonFile) throws Exception {
String ip = "localhost";
int port = 12345;
IMongodConfig mongodConfig = new MongodConfigBuilder().version(Version.Main.PRODUCTION)
.net(new Net(ip, port, Network.localhostIsIPv6()))
.build();
MongodStarter starter = MongodStarter.getDefaultInstance();
MongodExecutable mongodExecutable = starter.prepare(mongodConfig);
File dataFile = new File(Thread.currentThread().getContextClassLoader().getResource(jsonFile).getFile());
MongoImportExecutable mongoImportExecutable = mongoImportExecutable(port, dbName,
collection, dataFile.getAbsolutePath()
, true, true, true);
mongodProcess = mongodExecutable.start();
mongoImportProcess = mongoImportExecutable.start();
mongoTemplate = new MongoTemplate(new MongoClient(ip, port), dbName);
}
private MongoImportExecutable mongoImportExecutable(int port, String dbName, String collection, String jsonFile,
Boolean jsonArray, Boolean upsert, Boolean drop) throws
IOException {
IMongoImportConfig mongoImportConfig = new MongoImportConfigBuilder()
.version(Version.Main.PRODUCTION)
.net(new Net(port, Network.localhostIsIPv6()))
.db(dbName)
.collection(collection)
.upsert(upsert)
.dropCollection(drop)
.jsonArray(jsonArray)
.importFile(jsonFile)
.build();
return MongoImportStarter.getDefaultInstance().prepare(mongoImportConfig);
}
#AfterEach
void clean() {
mongoImportProcess.stop();
mongodProcess.stop();
}
public MongoTemplate getMongoTemplate(){
return mongoTemplate;
}
}
YourTestClass.java
public class YourTestClass extends AbstractMongoDBTest{
#BeforeEach
void setup() throws Exception {
super.setup("db", "collection", "jsonfile");
}
#Test
void test() throws Exception {
}
}
Currently I am working on the spring annotation based dependency injection for Activity Worker and Workflow worker As per the documentation.I have defined my beans inside my spring boot application. Each worker is defined in the separate maven module. The issue that I am facing is that when while running my ActivityWorker spring boot module it stays active and start looking up the activities but workflow worker stops immediately after starting the module with the message '
Unregistering JMX-exposed beans on shutdown
My implementation are as following:
#Activities(version = "2.2")
#ActivityRegistrationOptions(defaultTaskScheduleToStartTimeoutSeconds = 300, defaultTaskStartToCloseTimeoutSeconds = 100)
public interface TempActivities {
public GreetWrapper getName();
public void say(String what);
/* public Integer doProcess();
public int sum(Integer num);*/
}
public class TempActivitiesImpl implements TempActivities {
GreetWrapper greetObj = new GreetWrapper();
public TempActivitiesImpl() {
// TODO Auto-generated constructor stub
}
#Override
public GreetWrapper getName() {
greetObj.setGreet("World");
return greetObj;
}
#Override
public void say(String what) {
System.out.println(what);
}
}
#Workflow(dataConverter = GreetWrapper.class)
#WorkflowRegistrationOptions(defaultExecutionStartToCloseTimeoutSeconds = 3600)
public interface TempWorkflow {
#Execute(name = "TempWorkflow", version = "2.2")
public void greet();
}
public class TempWorkflowImpl implements TempWorkflow {
private TempActivitiesClient activitiesClientImpl = new TempActivitiesClientImpl();
private DecisionContextProvider contextProvider = new DecisionContextProviderImpl();
private WorkflowClock clock
= contextProvider.getDecisionContext().getWorkflowClock();
#Override
public void greet() {
greet1(0);
}
public void greet1(int count,
Promise < ? > ...waitFor) {
if (count == 3) {
return;
}
Promise < GreetWrapper > name = activitiesClientImpl.getName();
Promise < String > greeting = getGreeting(name);
activitiesClientImpl.say(greeting);
Promise < Void > timer = clock.createTimer(30);
greet1(count + 1, timer);
}
#Asynchronous
public Promise < String > getGreeting(Promise < GreetWrapper > name) {
String greeting = "Hello " + name.get().getGreet();
System.out.println("Greeting: " + greeting);
return Promise.asPromise(greeting);
}
}
Here is my Activity Worker beans
#Configuration
public class AppConfig {
public String getActivityTasklistName() {
return "HelloWorldTaskList";
}
public String getDomainName() {
return "helloWorldWalkthrough2";
}
public String getWorkflowTasklistName() {
return "HelloWorldWorkflow";
}
public String getEndPoint() {
String endPoint = "https://swf.us-east-1.amazonaws.com";
return endPoint;
}
String swfAccessId = System.getenv("AWS_ACCESS_KEY_ID");
String swfSecretKey = System.getenv("AWS_SECRET_ACCESS_KEY");
/*#Autowired
TempActivities tempActivitiesImpl;
#Autowired
TempWorkflow tempWorkflowImpl; */
#Bean
public ClientConfiguration clientConfiguration() {
ClientConfiguration config = new ClientConfiguration();
config.withSocketTimeout(70 * 1000);
return config;
}
#Bean
public AWSCredentials basicAWSCredentials() {
BasicAWSCredentials basicAWSCredentials = new BasicAWSCredentials(swfAccessId, swfSecretKey);
return basicAWSCredentials;
}
#Bean
public AmazonSimpleWorkflow amazonSimpleWorkflowClient() {
AmazonSimpleWorkflow amazonSimpleWorkflowClient = new AmazonSimpleWorkflowClient(basicAWSCredentials(), clientConfiguration());
amazonSimpleWorkflowClient.setEndpoint(getEndPoint());
return amazonSimpleWorkflowClient;
}
#Bean
public TempActivitiesClient tempActivitiesClient() {
TempActivitiesClient tempActivitiesClient = new TempActivitiesClientImpl();
return tempActivitiesClient;
}
#Bean
public SpringActivityWorker springActivityWorker() throws InstantiationException, IllegalAccessException, SecurityException, NoSuchMethodException {
SpringActivityWorker activityWorker = new SpringActivityWorker(amazonSimpleWorkflowClient(), getDomainName(), getWorkflowTasklistName());
activityWorker.addActivitiesImplementation(new TempActivitiesImpl());
return activityWorker;
}
}
Here is my workflow worker beans
public class WorkFlowAppConfig {
public String getActivityTasklistName() {
return "HelloWorldTaskList";
}
public String getDomainName() {
return "helloWorldWalkthrough2";
}
public String getWorkflowTasklistName() {
return "HelloWorldWorkflow";
}
public String getEndPoint() {
String endPoint = "https://swf.us-east-1.amazonaws.com";
return endPoint;
}
String swfAccessId = System.getenv("AWS_ACCESS_KEY_ID");
String swfSecretKey = System.getenv("AWS_SECRET_ACCESS_KEY");
/*#Autowired
TempActivities tempActivitiesImpl;*/
#Autowired
TempWorkflow tempWorkflowImpl;
#Bean
#Scope("workflow")
public ClientConfiguration clientConfiguration() {
ClientConfiguration config = new ClientConfiguration();
config.withSocketTimeout(70 * 1000);
return config;
}
#Bean
#Scope("workflow")
public AWSCredentials basicAWSCredentials() {
BasicAWSCredentials basicAWSCredentials = new BasicAWSCredentials(swfAccessId, swfSecretKey);
return basicAWSCredentials;
}
#Bean
#Scope("workflow")
public AmazonSimpleWorkflow amazonSimpleWorkflowClient() {
AmazonSimpleWorkflow amazonSimpleWorkflowClient = new AmazonSimpleWorkflowClient(basicAWSCredentials(), clientConfiguration());
amazonSimpleWorkflowClient.setEndpoint(getEndPoint());
return amazonSimpleWorkflowClient;
}
#Bean
#Scope("workflow")
public TempActivitiesClient activitiesClientImpl() {
return new TempActivitiesClientImpl();
}
#Bean
#Scope("workflow")
public SpringWorkflowWorker springWorkflowWorker() throws InstantiationException, IllegalAccessException {
SpringWorkflowWorker workflowWorker = new SpringWorkflowWorker(amazonSimpleWorkflowClient(), getDomainName(), getWorkflowTasklistName());
workflowWorker.addWorkflowImplementation(tempWorkflowImpl);
workflowWorker.setRegisterDomain(true);
// workflowWorker.setDomainRetentionPeriodInDays(1);
return workflowWorker;
}
#Bean
public CustomScopeConfigurer customScope() {
CustomScopeConfigurer configurer = new CustomScopeConfigurer();
Map < String, Object > workflowScope = new HashMap < String, Object > ();
workflowScope.put("workflow", new WorkflowScope());
configurer.setScopes(workflowScope);
return configurer;
}
}