Import JSON file in Mongo db using Spring Data Embedded Mongo - java

I am trying to write some integration tests relative to some methods that needs to extract data from MongoDB. In detail, I am using the Embedded Mongo given by Spring Data project. The embedded mongo is clearly provided by Flapdoodle.
I need to import some json file into the Embedded Mongo. I have looked at the tests provided with flapdoodle, but I am not able to understand how they integrates with the magic given by Spring Data + Spring Boot.
Can anyone post some clarifying snippets?

You can create a junit rule (ExternalResource) which runs before and after each test. Check the MongoEmbeddedRule class to get some idea on the implementation details.
Integration test:
#RunWith(SpringRunner.class)
#SpringBootTest(webEnvironment = RANDOM_PORT)
public abstract class TestRunner {
#Autowired
protected MongoTemplate mongoTemplate;
#Rule
public MongoEmbeddedRule mongoEmbeddedRule = new MongoEmbeddedRule(this);
ExternalResource Rule:
public class MongoEmbeddedRule extends ExternalResource {
private final Object testClassInstance;
private final Map<String, Path> mongoCollectionDataPaths;
private final String fieldName;
private final String getterName;
public MongoEmbeddedRule(final Object testClassInstance) {
this(testClassInstance, "mongoTemplate", "getMongoTemplate");
}
protected MongoEmbeddedRule(final Object testClassInstance, final String fieldName, final String getterName) {
this.fieldName = fieldName;
this.getterName = getterName;
this.testClassInstance = testClassInstance;
this.mongoCollectionDataPaths = mongoExtendedJsonFilesLookup();
}
#Override
protected void before() {
dropCollections();
createAndPopulateCollections();
}
#Override
protected void after() {
}
protected Set<String> getMongoCollectionNames() {
return mongoCollectionDataPaths.keySet();
}
public void dropCollections() {
getMongoCollectionNames().forEach(collectionName -> getMongoTemplate().dropCollection(collectionName));
}
protected void createAndPopulateCollections() {
mongoCollectionDataPaths.forEach((key, value) -> insertDocumentsFromMongoExtendedJsonFile(value, key));
}
protected MongoTemplate getMongoTemplate() {
try {
Object value = ReflectionTestUtils.getField(testClassInstance, fieldName);
if (value instanceof MongoTemplate) {
return (MongoTemplate) value;
}
value = ReflectionTestUtils.invokeGetterMethod(testClassInstance, getterName);
if (value instanceof MongoTemplate) {
return (MongoTemplate) value;
}
} catch (final IllegalArgumentException e) {
// throw exception with dedicated message at the end
}
throw new IllegalArgumentException(
String.format(
"%s expects either field '%s' or method '%s' in order to access the required MongoTemmplate",
this.getClass().getSimpleName(), fieldName, getterName));
}
private Map<String, Path> mongoExtendedJsonFilesLookup() {
Map<String, Path> collections = new HashMap<>();
try {
Files.walk(Paths.get("src","test","resources","mongo"))
.filter(Files::isRegularFile)
.forEach(filePath -> collections.put(
filePath.getFileName().toString().replace(".json", ""),
filePath));
} catch (IOException e) {
e.printStackTrace();
}
return collections;
}
private void insertDocumentsFromMongoExtendedJsonFile(Path path, String collectionName) {
try {
List<Document> documents = new ArrayList<>();
Files.readAllLines(path).forEach(l -> documents.add(Document.parse(l)));
getMongoTemplate().getCollection(collectionName).insertMany(documents);
System.out.println(documents.size() + " documents loaded for " + collectionName + " collection.");
} catch (IOException e) {
e.printStackTrace();
}
}
}
json file (names.json) with MongoDB Extended JSON, where every document is in one line and the collection name is the filename without extension.
{ "_id" : ObjectId("594d324d5b49b78da8ce2f28"), "someId" : NumberLong(1), "name" : "Some Name 1", "lastModified" : ISODate("1970-01-01T00:00:00Z")}
{ "_id" : ObjectId("594d324d5b49b78da8ce2f29"), "someId" : NumberLong(2), "name" : "Some Name 2", "lastModified" : ISODate("1970-01-01T00:00:00Z")}

You can have a look at this following Test class, provided by "flapdoodle". The test shows how to import a JSON file containing the collection dataset:
MongoImportExecutableTest.java
You could theoretically also import a whole dump of a database. (using MongoDB restore):
MongoRestoreExecutableTest.java

You can create an abstract class and have setup logic to start mongod and mongoimport process.
AbstractMongoDBTest.java
public abstract class AbstractMongoDBTest {
private MongodProcess mongodProcess;
private MongoImportProcess mongoImportProcess;
private MongoTemplate mongoTemplate;
void setup(String dbName, String collection, String jsonFile) throws Exception {
String ip = "localhost";
int port = 12345;
IMongodConfig mongodConfig = new MongodConfigBuilder().version(Version.Main.PRODUCTION)
.net(new Net(ip, port, Network.localhostIsIPv6()))
.build();
MongodStarter starter = MongodStarter.getDefaultInstance();
MongodExecutable mongodExecutable = starter.prepare(mongodConfig);
File dataFile = new File(Thread.currentThread().getContextClassLoader().getResource(jsonFile).getFile());
MongoImportExecutable mongoImportExecutable = mongoImportExecutable(port, dbName,
collection, dataFile.getAbsolutePath()
, true, true, true);
mongodProcess = mongodExecutable.start();
mongoImportProcess = mongoImportExecutable.start();
mongoTemplate = new MongoTemplate(new MongoClient(ip, port), dbName);
}
private MongoImportExecutable mongoImportExecutable(int port, String dbName, String collection, String jsonFile,
Boolean jsonArray, Boolean upsert, Boolean drop) throws
IOException {
IMongoImportConfig mongoImportConfig = new MongoImportConfigBuilder()
.version(Version.Main.PRODUCTION)
.net(new Net(port, Network.localhostIsIPv6()))
.db(dbName)
.collection(collection)
.upsert(upsert)
.dropCollection(drop)
.jsonArray(jsonArray)
.importFile(jsonFile)
.build();
return MongoImportStarter.getDefaultInstance().prepare(mongoImportConfig);
}
#AfterEach
void clean() {
mongoImportProcess.stop();
mongodProcess.stop();
}
public MongoTemplate getMongoTemplate(){
return mongoTemplate;
}
}
YourTestClass.java
public class YourTestClass extends AbstractMongoDBTest{
#BeforeEach
void setup() throws Exception {
super.setup("db", "collection", "jsonfile");
}
#Test
void test() throws Exception {
}
}

Related

Repository Returning Null while attempting Horizontal Scaling of a Service Class

[ISSUE] repo always returns null when I call repo methods, while stepping through, throws null pointer exception. then front end receives
500: Http failure response for http://localhost:4200/api/aiprollout/updatecsv: 500 Internal Server Error
[HAVE TRIED] Adjusting AutoWired and components and service annotations.
[QUESTIONS]
1- Does every repo method need its own service and controller method?
2- Is it okay to create a new service that uses an existing controller?
3- If this new service uses SuperCsv and I create custom CsvCellProcessors, can these cell processors also call the repo? Should these cell processors perform logic? or should it be done else where? What class annotations should these cellProcessors classes have? #Component?
Any advice is greatly appreciated, feel a little lost at this point not even sure what to do.
[CODE]
Controller:
#RestController
#EnableConfigurationProperties({SpoofingConfigurationProperties.class})
#RequestMapping(value = "")
public class AipRolloutController {
private final Logger logger = some logger
private final AipRolloutService AipRolloutService;
private final CsvParserService csvParserService;
#Autowired
public AipRolloutController(AipRolloutService aipRolloutService, CsvParserService csvParserService) {
this.AipRolloutService = aipRolloutService;
this.csvParserService = csvParserService;
}
#PostMapping(value = "/updatecsv", produces = MediaType.APPLICATION_JSON_VALUE)
#ResponseBody
public ResponseEntity<?> processCsv(#RequestParam("csvFile") MultipartFile csvFile) throws IOException {
if (csvFile.isEmpty()) return new ResponseEntity(
responceJson("please select a file!"),
HttpStatus.NO_CONTENT
);
csvParserService.parseCsvFile(csvFile);
return new ResponseEntity(
responceJson("Successfully uploaded - " + csvFile.getOriginalFilename()),
new HttpHeaders(),
HttpStatus.CREATED
);
}
Service:
#Service
public class AipRolloutService {
private static final Logger logger = some logger
#Autowired
private AIPRolloutRepository AIPRolloutRepository;
New Csv parser Service
#Service
public class CsvParserService {
#Autowired private AipRolloutService aipRolloutService;
public CsvParserService(AipRolloutService aipRolloutService) {
this.aipRolloutService = aipRolloutService;
}
public void parseCsvFile(MultipartFile csvFile) throws IOException {
CsvMapReader csvMapReader = new CsvMapReader(new InputStreamReader(csvFile.getInputStream()), CsvPreference.STANDARD_PREFERENCE);
parseCsv(csvMapReader);
csvMapReader.close();
}
private void parseCsv(CsvMapReader csvMapReader) throws IOException {
String[] header = csvMapReader.getHeader(true);
List<String> headers = Arrays.asList(header);
verifySourceColumn(headers);
verifyPovColumn(headers);
final CellProcessor[] processors = getProcessors(headers);
Map<String, Object> csvImportMap = null;
while ((csvImportMap = csvMapReader.read(header, processors)) != null) {
CsvImportDTO csvImportDto = new CsvImportDTO(csvImportMap);
if ( activationTypeP(csvImportDto) ){
int mssValue = Integer.parseInt(csvImportDto.getMssValue());
aipRolloutService.updateAipRollout(csvImportDto.getSource(),
csvImportDto.getPov(),
csvImportDto.getActivationType(),
mssValue);
}
}
}
private CellProcessor[] getProcessors(List<String> headers) {
CellProcessor[] processors = new CellProcessor[headers.size()];
int index = 0;
for (String header : headers) {
if (header.contains(SOURCE_ID)) {
processors[index++] = new CsvSourceIdCellParser();
} else if (header.contains(POV)) {
processors[index++] = new CsvPovCellParser();
} else if (header.contains(ACTIVATION_TYPE)) {
processors[index++] = new CsvActivationTypeCellParser();
} else if (header.contains(ACTIVATION_DATE)) {
processors[index++] = new Optional();
} else if (header.contains(DEACTIVATION_DATE)) {
processors[index++] = new Optional();
} else if (header.contains(MSS_VALUE)) {
processors[index++] = new CsvMssValueCellParser();
} else {
processors[index++] = null; // throw exception? wrong header info instead of allowing null?
}
}
return processors;
}
Custom Cell Processor that calls repo and returns null
public class CsvSourceIdCellParser extends CellProcessorAdaptor {
#Autowired AIPRolloutRepository aipRolloutRepository;
public CsvSourceIdCellParser(){ super(); }
// this constructor allows other processors to be chained
public CsvSourceIdCellParser(CellProcessor next){ super(next); }
#Override
public Object execute(Object value, CsvContext csvContext) {
// throws an Exception if the input is null
validateInputNotNull(value, csvContext);
// get rid of description only need first 3 #'s
value = value.toString().substring(0,3);
// check if WH exists
if( aipRolloutRepository.dcExistsInDatabase(value.toString()) )
return value;
else
throw new RuntimeException("Check Warehouse Value, Value Not Found "
+ "Row number: " + csvContext.getRowNumber()
+ " Column number: " + csvContext.getColumnNumber());
}
}
Repository
#Repository
public class AIPRolloutRepository {
private static final Logger logger = LoggerFactory.getLogger(AIPRolloutRepository.class);
#Autowired
JdbcTemplate jdbcTemplate;
public AIPRolloutRepository() {
}
public boolean dcExistsInDatabase(String dc){
// Query for a count saves time and memory, query for distinct saves time and memory on execution
boolean hasRecord =
jdbcTemplate
.query( "select count (distinct '" + dc +"')" +
"from xxcus.XX_AIP_ROLLOUT" +
"where DC = '" + dc + "';",
new Object[] { dc },
(ResultSet rs) -> {
if (rs.next()) {
return true;
}
return false;
}
);
return hasRecord;
}

Persist data to the DB with message queue

We have a gRPC server that inserts the data into the CockRoachDB and the data is coming from a Spring Boot micro-service.
This is my code to persist in the CRDB database:
#Service
#Transactional(propagation = Propagation.REQUIRED, rollbackFor = Exception.class)
public class CockroachPersister {
private static final String X_AMZN_REQUESTID = "x-amzn-RequestId";
private static final String X_AMZN_RESPONSE = "x-amzn-Response";
private static final String PUTITEM = "PutItem";
private static final String GETITEM = "GetItem";
private static final String DELETEITEM = "DeleteItem";
private static final String UPDATEITEM = "UpdateItem";
public <T extends Message> T save(final String requestBody, final String action, final String tableName) {
T t = null;
try {
List<GRPCMapper> lGRPCMapper = ServiceMapper.getServices(action,tableName);
for (GRPCMapper grpcMapper : lGRPCMapper) {
System.out.println("grpcMapper.getClassName() ==> "+grpcMapper.getClassName());
Class<?> className = Class.forName(grpcMapper.getClassName());
Class<?> implementedClassType = Class.forName(grpcMapper.getImplementedClass());
Method userMethod = implementedClassType.getDeclaredMethod(grpcMapper.getServiceName(), className);
System.out.println("userMethod\t" + userMethod.getName());
t = (T) userMethod.invoke(null, ProtoUtil.getInstance(requestBody, grpcMapper.getProtoType()));
System.out.printf("Service => %s row(s) Inserted \n", t.getAllFields().toString());
}
} catch (Exception e) {
e.printStackTrace();
}
return t;
}
}
If the initial insertion failed, I would like to try at least 3 TIMES before we can log the error. How do I implement that?
A solution that use message queue will be also acceptable.

Springboot Project use AbstractRoutingDataSource question

My project uses springboot+springDataJpa+shiro.
Because my server database uses the master and salve method, so I need to call my code to connect to the two databases, I designed to use the AbstractRoutingDataSource + aop method. Now I have a problem, I think it may be caused by shiro.
I know that the connection switching is performed by the getconnection() method of AbstractRoutingDataSource, and I cannot manually control this method. The problem now is that my getconnection() is executed at most twice in an interface request. Let me post my code and describe it:
#Order(0)
#Aspect
#Component
public class RoutingAopAspect {
#Around("#annotation(targetDataSource)")
public Object routingWithDataSource(ProceedingJoinPoint joinPoint, TargetDataSource targetDataSource) throws Throwable {
try {
DynamicRoutingDataSourceContext.setRoutingDataSource(targetDataSource.value());
return joinPoint.proceed();
} finally {
DynamicRoutingDataSourceContext.removeRoutingDataSource();
}
}
}
public class DynamicRoutingDataSourceContext {
public static final String MASTER = "master";
public static final String SLAVE = "slave";
private static final ThreadLocal<Object> threadLocalDataSource = new ThreadLocal<>();
public static void setRoutingDataSource(Object dataSource) {
if (dataSource == null) {
throw new NullPointerException();
}
threadLocalDataSource.set(dataSource);
// System.err.println(Thread.currentThread().getName()+" set RoutingDataSource : " + dataSource);
}
public static Object getRoutingDataSource() {
Object dataSourceType = threadLocalDataSource.get();
if (dataSourceType == null) {
threadLocalDataSource.set(DynamicRoutingDataSourceContext.MASTER);
return getRoutingDataSource();
}
// System.err.println(Thread.currentThread().getName()+" get RoutingDataSource : " + dataSourceType);
return dataSourceType;
}
public static void removeRoutingDataSource() {
threadLocalDataSource.remove();
// System.err.println(Thread.currentThread().getName()+" remove RoutingDataSource");
}
}
#EnableTransactionManagement
#Configuration
public class DataSourceConfig {
#Value("${datasource.master.url}")
private String masterUrl;
#Value("${datasource.master.username}")
private String masterUsername;
#Value("${datasource.master.password}")
private String masterPassword;
#Value("${dataSource.driverClass}")
private String masterDriverClassName;
#Value("${datasource.slave.url}")
private String slaveUrl;
#Value("${datasource.slave.username}")
private String slaveUsername;
#Value("${datasource.slave.password}")
private String slavePassword;
#Value("${dataSource.driverClass}")
private String slaveDriverClassName;
#Bean(name = "masterDataSource")
public DataSource masterDataSource(){
DruidDataSource datasource = new DruidDataSource();
datasource.setUrl(masterUrl);
datasource.setUsername(masterUsername);
datasource.setPassword(masterPassword);
datasource.setDriverClassName(masterDriverClassName);
return datasource;
}
#Bean(name = "slaveDataSource")
public DataSource slaveDataSource(){
DruidDataSource datasource = new DruidDataSource();
datasource.setUrl(slaveUrl);
datasource.setUsername(slaveUsername);
datasource.setPassword(slavePassword);
datasource.setDriverClassName(slaveDriverClassName);
return datasource;
}
#Primary
#Bean
public DynamicRoutingDataSource dynamicDataSource(#Qualifier(value = "masterDataSource") DataSource masterDataSource,
#Qualifier(value = "slaveDataSource") DataSource slaveDataSource) {
Map<Object, Object> targetDataSources = new HashMap<>(2);
targetDataSources.put(DynamicRoutingDataSourceContext.MASTER, masterDataSource);
targetDataSources.put(DynamicRoutingDataSourceContext.SLAVE, slaveDataSource);
DynamicRoutingDataSource dynamicRoutingDataSource = new DynamicRoutingDataSource();
dynamicRoutingDataSource.setTargetDataSources(targetDataSources);
dynamicRoutingDataSource.setDefaultTargetDataSource(masterDataSource);
dynamicRoutingDataSource.afterPropertiesSet();
return dynamicRoutingDataSource;
}
}
public class DynamicRoutingDataSourceContext {
public static final String MASTER = "master";
public static final String SLAVE = "slave";
private static final ThreadLocal<Object> threadLocalDataSource = new ThreadLocal<>();
public static void setRoutingDataSource(Object dataSource) {
if (dataSource == null) {
throw new NullPointerException();
}
threadLocalDataSource.set(dataSource);
// System.err.println(Thread.currentThread().getName()+" set RoutingDataSource : " + dataSource);
}
public static Object getRoutingDataSource() {
Object dataSourceType = threadLocalDataSource.get();
if (dataSourceType == null) {
threadLocalDataSource.set(DynamicRoutingDataSourceContext.MASTER);
return getRoutingDataSource();
}
// System.err.println(Thread.currentThread().getName()+" get RoutingDataSource : " + dataSourceType);
return dataSourceType;
}
public static void removeRoutingDataSource() {
threadLocalDataSource.remove();
// System.err.println(Thread.currentThread().getName()+" remove RoutingDataSource");
}
}
This is the relevant basic configuration of AbstractRoutingDataSource.
I defined an aspect to get the parameters of #TargetDataSource in the method. This parameter is a data source that needs to be executed currently. I think there is no problem with my configuration.
Then I will use #TargetDataSource on my service method, and I use shiro, shiro’s doGetAuthorizationInfo() method and doGetAuthenticationInfo() are executed before my service, and both methods need to call my userservice .
Then the problem now is that after calling the doGetAuthorizationInfo() and doGetAuthenticationInfo() methods, they will automatically execute the getconnection() method of AbstractRoutingDataSource to switch the data source, and then execute to my own service, it will not execute the getconnection() method. , This is what I said getconnection() is executed at most twice in an interface request.
#Slf4j
#Component
public class ShiroRealm extends AuthorizingRealm {
#Autowired
#Lazy
private UserService userService;
#Autowired
CacheUtil cacheUtil;
#Override
public boolean supports(AuthenticationToken token) {
return token instanceof JwtToken;
}
#Override
protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principals) {
String username = JwtUtil.getClaim(principals.toString(), "username");
User user = userService.getUserByUsername(username);
SimpleAuthorizationInfo simpleAuthorizationInfo = new SimpleAuthorizationInfo();
simpleAuthorizationInfo.addRole(user.getRole());
return simpleAuthorizationInfo;
}
#Override
protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken auth) {
String token = (String) auth.getCredentials();
String username = JwtUtil.getClaim(token, "username");
if (username == null) {
throw new AuthenticationException("token invalid");
}
User user = userService.getUserByUsername(username);
if (user == null) {
throw new AuthenticationException("User didn't existed!");
}
if (JwtUtil.verify(token, username, user.getPassword(), TokenType.ACCESS_TOKEN) &&
cacheUtil.hasKey(CacheKey.ACCESS_TOKEN_KEY + token)
) {
return new SimpleAuthenticationInfo(token, token, "userRealm");
}
throw new AuthenticationException("Token expired or incorrect");
}
}
#Service
public class PageServiceImpl implements PageService {
#Autowired
PageRepository pageRepository;
#Override
#TargetDataSource("slave")
#Transactional(rollbackFor = Exception.class)
public List<Page> adminFindAll() {
List<Page> pageList = pageRepository.findAll();
if (pageList.isEmpty()) {
throw new CustomNotFoundException("page list not found");
}
return pageList;
}
}
I don’t know if my description is clear. If it is not clear, please ask questions. I hope to get your help, thanks very much!

How to Implement Factory Design Pattern for CsvProcessing based on Key

I have written a controller which is a default for MototuploadService(for Motor Upload), but I need to make one Factory Design so that
based on parentPkId, need to call HealUploadService, TempUploadService, PersonalUploadService etc which will have separate file processing stages.
controller is below.
#RequestMapping(value = "/csvUpload", method = RequestMethod.POST)
public List<String> csvUpload(#RequestParam String parentPkId, #RequestParam List<MultipartFile> files)
throws IOException, InterruptedException, ExecutionException, TimeoutException {
log.info("Entered method csvUpload() of DaoController.class");
List<String> response = new ArrayList<String>();
ExecutorService executor = Executors.newFixedThreadPool(10);
CompletionService<String> compService = new ExecutorCompletionService<String>(executor);
List< Future<String> > futureList = new ArrayList<Future<String>>();
for (MultipartFile f : files) {
compService.submit(new ProcessMutlipartFile(f ,parentPkId,uploadService));
futureList.add(compService.take());
}
for (Future<String> f : futureList) {
long timeout = 0;
System.out.println(f.get(timeout, TimeUnit.SECONDS));
response.add(f.get());
}
executor.shutdown();
return response;
}
Here is ProcessMutlipartFile class which extends the callable interface, with CompletionService's compService.submit() invoke this class, which in turn executes call() method, which will process a file.
public class ProcessMutlipartFile implements Callable<String>
{
private MultipartFile file;
private String temp;
private MotorUploadService motUploadService;
public ProcessMutlipartFile(MultipartFile file,String temp, MotorUploadService motUploadService )
{
this.file=file;
this.temp=temp;
this.motUploadService=motUploadService;
}
public String call() throws Exception
{
return motUploadService.csvUpload(temp, file);
}
}
Below is MotorUploadService class, where I'm processing uploaded CSV file, line by line and then calling validateCsvData() method to validate Data,
which returns ErrorObject having line number and Errors associated with it.
if csvErrorRecords is null, then error-free and proceed with saving to Db.
else save errorList to Db and return Upload Failure.
#Component
public class MotorUploadService {
#Value("${external.resource.folder}")
String resourceFolder;
public String csvUpload(String parentPkId, MultipartFile file) {
String OUT_PATH = resourceFolder;
try {
DateFormat df = new SimpleDateFormat("yyyyMMddhhmmss");
String filename = file.getOriginalFilename().split(".")[0] + df.format(new Date()) + file.getOriginalFilename().split(".")[1];
Path path = Paths.get(OUT_PATH,fileName)
Files.copy(file.getInputStream(), path, StandardCopyOption.REPLACE_EXISTING);
}
catch(IOException e){
e.printStackTrace();
return "Failed to Upload File...try Again";
}
List<TxnMpMotSlaveRaw> txnMpMotSlvRawlist = new ArrayList<TxnMpMotSlaveRaw>();
try {
BufferedReader br = new BufferedReader(new InputStreamReader(file.getInputStream()));
String line = "";
int header = 0;
int lineNum = 1;
TxnMpSlaveErrorNew txnMpSlaveErrorNew = new TxnMpSlaveErrorNew();
List<CSVErrorRecords> errList = new ArrayList<CSVErrorRecords>();
while ((line = br.readLine()) != null) {
// TO SKIP HEADER
if (header == 0) {
header++;
continue;
}
lineNum++;
header++;
// Use Comma As Separator
String[] csvDataSet = line.split(",");
CSVErrorRecords csvErrorRecords = validateCsvData(lineNum, csvDataSet);
System.out.println("Errors from csvErrorRecords is " + csvErrorRecords);
if (csvErrorRecords.equals(null) || csvErrorRecords.getRecordNo() == 0) {
//Function to Save to Db
} else {
// add to errList
continue;
}
}
if (txnMpSlaveErrorNew.getErrRecord().size() == 0) {
//save all
return "Successfully Uploaded " + file.getOriginalFilename();
}
else {
// save the error in db;
return "Failure as it contains Faulty Information" + file.getOriginalFilename();
}
} catch (IOException ex) {
ex.printStackTrace();
return "Failure Uploaded " + file.getOriginalFilename();
}
}
private TxnMpMotSlaveRaw saveCsvData(String[] csvDataSet, String parentPkId) {
/*
Mapping csvDataSet to PoJo
returning Mapped Pojo;
*/
}
private CSVErrorRecords validateCsvData(int lineNum, String[] csvDataSet) {
/*
Logic for Validation goes here
*/
}
}
How to make it as a factory design pattern from controller,
so that if
parentPkId='Motor' call MotorUploadService,
parentPkId='Heal' call HealUploadService
I'm not so aware of the Factory Design pattern, please help me out.
Thanks in advance.
If I understood the question, in essence you would create an interface, and then return a specific implementation based upon the desired type.
So
public interface UploadService {
void csvUpload(String temp, MultipartFile file) throws IOException;
}
The particular implementations
public class MotorUploadService implements UploadService
{
public void csvUpload(String temp, MultipartFile file) {
...
}
}
public class HealUploadService implements UploadService
{
public void csvUpload(String temp, MultipartFile file) {
...
}
}
Then a factory
public class UploadServiceFactory {
public UploadService getService(String type) {
if ("Motor".equals(type)) {
return new MotorUploadService();
}
else if ("Heal".equals(type)) {
return new HealUploadService();
}
}
}
The factory might cache the particular implementations. One can also use an abstract class rather than an interface if appropriate.
I think you currently have a class UploadService but that is really the MotorUploadService if I followed your code, so I would rename it to be specific.
Then in the controller, presumably having used injection for the UploadServiceFactory
...
for (MultipartFile f : files) {
UploadService uploadSrvc = uploadServiceFactory.getService(parentPkId);
compService.submit(new ProcessMutlipartFile(f ,parentPkId,uploadService));
futureList.add(compService.take());
}
So with some additional reading in your classes:
public class ProcessMutlipartFile implements Callable<String>
{
private MultipartFile file;
private String temp;
private UploadService uploadService;
// change to take the interface UploadService
public ProcessMutlipartFile(MultipartFile file,String temp, UploadService uploadService )
{
this.file=file;
this.temp=temp;
this.uploadService=uploadService;
}
public String call() throws Exception
{
return uploadService.csvUpload(temp, file);
}
}

How to make Spring Controller to return CSV from a POJO? [duplicate]

This question already has answers here:
Configuring Spring MVC controller to send file to client
(2 answers)
Closed 8 years ago.
Given a simple Java Object:
public class Pojo {
private String x;
private String y;
private String z;
//... getters/setters ...
}
Is there some lib that i can put on my project that will make a controller like this:
#RequestMapping(value="/csv", method=RequestMethod.GET, produces= MediaType.TEXT_PLAIN)
#ResponseBody
public List<Pojo> csv() {
//Some code to get a list of Pojo objects
//...
return myListOfPojos;
}
To produce a csv file of my Pojos? For a Json result, i use Jackson lib. I need another lib for CSV results.
As a simple variant. You can generate csv by any way you want and return it as String.
Something like this:
#RequestMapping(value="/csv", method=RequestMethod.GET)
#ResponseBody
public String csv() {
//Some code to get a list of Pojo objects
//...
StringBuilder sb = new StringBuilder();
for (Pojo pojo: myListOfPojos){
sb.append(pojo.getX());
sb.append(",");
sb.append(pojo.getY());
sb.append(",");
sb.append(pojo.getZ());
sb.append("\n");
}
return sb.toString;
}
Should work.
Autogenerate this strings by reflection looks like simple work too.
Based on another question, i did my own HTTPMessageConverter for Tsv Responses.
TsvMessageConverter.java
public class TsvMessageConverter extends AbstractHttpMessageConverter<TsvResponse> {
public static final MediaType MEDIA_TYPE = new MediaType("text", "tsv", Charset.forName("utf-8"));
private static final Logger logger = LoggerFactory.getLogger(TsvMessageConverter.class);
public TsvMessageConverter() {
super(MEDIA_TYPE);
}
protected boolean supports(Class<?> clazz) {
return TsvResponse.class.equals(clazz);
}
#Override
protected TsvResponse readInternal(Class<? extends TsvResponse> clazz, HttpInputMessage inputMessage) throws IOException, HttpMessageNotReadableException {
return null;
}
protected void writeInternal(TsvResponse tsvResponse, HttpOutputMessage output) throws IOException, HttpMessageNotWritableException {
output.getHeaders().setContentType(MEDIA_TYPE);
output.getHeaders().set("Content-Disposition", "attachment; filename=\"" + tsvResponse.getFilename() + "\"");
final OutputStream out = output.getBody();
writeColumnTitles(tsvResponse, out);
if (tsvResponse.getRecords() != null && tsvResponse.getRecords().size() != 0) {
writeRecords(tsvResponse, out);
}
out.close();
}
private void writeRecords(TsvResponse response, OutputStream out) throws IOException {
List<String> getters = getObjectGetters(response);
for (final Object record : response.getRecords()) {
for (String getter : getters) {
try {
Method method = ReflectionUtils.findMethod(record.getClass(), getter);
out.write(method.invoke(record).toString().getBytes(Charset.forName("utf-8")));
out.write('\t');
} catch (IllegalAccessException | InvocationTargetException e) {
logger.error("Erro ao transformar em CSV", e);
}
}
out.write('\n');
}
}
private List<String> getObjectGetters(TsvResponse response) {
List<String> getters = new ArrayList<>();
for (Method method : ReflectionUtils.getAllDeclaredMethods(response.getRecords().get(0).getClass())) {
String methodName = method.getName();
if (methodName.startsWith("get") && !methodName.equals("getClass")) {
getters.add(methodName);
}
}
sort(getters);
return getters;
}
private void writeColumnTitles(TsvResponse response, OutputStream out) throws IOException {
for (String columnTitle : response.getColumnTitles()) {
out.write(columnTitle.getBytes());
out.write('\t');
}
out.write('\n');
}
}
TsvResponse.java
public class TsvResponse {
private final String filename;
private final List records;
private final String[] columnTitles;
public TsvResponse(List records, String filename, String ... columnTitles) {
this.records = records;
this.filename = filename;
this.columnTitles = columnTitles;
}
public String getFilename() {
return filename;
}
public List getRecords() {
return records;
}
public String[] getColumnTitles() {
return columnTitles;
}
}
And on SpringContext.xml add the following:
<mvc:annotation-driven>
<mvc:message-converters register-defaults="true">
<bean class="com.mypackage.TsvMessageConverter"/>
</mvc:message-converters>
</mvc:annotation-driven>
So, you can use on your controller like this:
#RequestMapping(value="/tsv", method= RequestMethod.GET, produces = "text/tsv")
#ResponseBody
public TsvResponse tsv() {
return new TsvResponse(myListOfPojos, "fileName.tsv",
"Name", "Email", "Phone", "Mobile");
}

Categories