Elasticsearch retrieves data from bad the index - java

I do not know why elasticsearch does not set the appropriate values for the index, and type. He needs to retrieve data from the index = auctions and type = auctions like this is in Model:
AuctionIndex.java:
#Document(indexName = "auctions", type = "auctions")
public class AuctionIndex {
#Id
private String id;
private Long cat;
private Long tcat;
private String curr;
private Long price;
private Long start_date;
private Long end_date;
private String title;
private String pow;
private String woj;
private String loc;
private String cat_name;
private Long catdec;
private Long uid;
private Long qty;
...getters and setters...
}
This code works when downloading data as follows:
public Map searchByIndexParams(AuctionIndexSearchParams searchParams, Pageable pageable) {
Map response = new HashMap();
NativeSearchQuery searchQuery = this.getSearchQuery(searchParams, pageable).build();
final FacetedPage<AuctionIndex> search = auctionIndexRepository.search(searchQuery);
List<AuctionIndex> content = search.getContent();
response.put("content", content.stream().map(row -> {
return Auction.builder()
.cat(row.getCat())
.item(Long.parseLong(row.getId()))
.endts(row.getEnd_date())
.startts(row.getStart_date())
.loc(row.getLoc())
.pow(row.getPow())
.woj(row.getWoj())
.price(row.getPrice())
.qty(row.getQty())
.title(row.getTitle())
.user(row.getUid())
.catName(row.getCat_name())
.build();
}).collect(Collectors.toList()));
response.put("first", search.isFirst());
response.put("last", search.isLast());
response.put("number", search.getNumber());
response.put("numberOfElements", search.getNumberOfElements());
response.put("size", search.getSize());
response.put("sort", search.getSort());
response.put("totalElements", search.getTotalElements());
response.put("totalPages", search.getTotalPages());
return response;
}
By downloading all the records in this way:
public Map findAllByIndexParams(AuctionIndexSearchParams searchParams, Pageable pageable) {
List rows = new ArrayList();
Map response = new HashMap();
final List<FilterBuilder> filters = Lists.newArrayList();
final NativeSearchQueryBuilder searchQuery = new NativeSearchQueryBuilder().withQuery(matchAllQuery());
Optional.ofNullable(searchParams.getCategoryId()).ifPresent(v -> filters.add(boolFilter().must(termFilter("cat", v))));
Optional.ofNullable(searchParams.getCurrency()).ifPresent(v -> filters.add(boolFilter().must(termFilter("curr", v))));
Optional.ofNullable(searchParams.getTreeCategoryId()).ifPresent(v -> filters.add(boolFilter().must(termFilter("tcat", v))));
Optional.ofNullable(searchParams.getUid()).ifPresent(v -> filters.add(boolFilter().must(termFilter("uid", v))));
final BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
//access for many uids
if (searchParams.getUids() != null) {
if (searchParams.getItemId() != null || searchParams.getTitle() != null) {
Optional.ofNullable(searchParams.getUids().split(",")).ifPresent(v -> {
filters.add(boolFilter().must(termsFilter("uid", v)));
});
} else {
for (String user : searchParams.getUids().split(",")) {
boolQueryBuilder.should(queryStringQuery(user).field("uid"));
}
}
}
//access for many categories
if (searchParams.getCategories() != null) {
Optional.ofNullable(searchParams.getCategories().split(",")).ifPresent(v -> {
filters.add(boolFilter().must(termsFilter("cat", v)));
});
}
if (searchParams.getItemId() != null) {
boolQueryBuilder.must(queryStringQuery(searchParams.getItemId()).field("_id"));
}
if (Optional.ofNullable(searchParams.getTitle()).isPresent()) {
boolQueryBuilder.must(queryStringQuery(searchParams.getTitle()).analyzeWildcard(true).field("title"));
}
if (searchParams.getLoc() != null) {
filters.add(boolFilter().must(termsFilter("loc", searchParams.getLoc())));
// boolQueryBuilder.must(queryStringQuery(searchParams.getLoc()).field("loc"));
}
if (searchParams.getPow() != null) {
filters.add(boolFilter().must(termsFilter("pow", searchParams.getPow())));
// boolQueryBuilder.must(queryStringQuery(searchParams.getPow()).field("pow"));
}
if (searchParams.getWoj() != null) {
filters.add(boolFilter().must(termsFilter("woj", searchParams.getWoj())));
// boolQueryBuilder.must(queryStringQuery(searchParams.getWoj()).field("woj"));
}
if (searchParams.getCatdec() != null) {
boolQueryBuilder.must(queryStringQuery(String.valueOf(searchParams.getCatdec())).field("catdec"));
}
if (Optional.ofNullable(searchParams.getStartDateFrom()).isPresent()
|| Optional.ofNullable(searchParams.getStartDateTo()).isPresent()) {
filters.add(rangeFilter("start_date").from(searchParams.getStartDateFrom()).to(searchParams.getStartDateTo()));
}
if (Optional.ofNullable(searchParams.getEndDateFrom()).isPresent()
|| Optional.ofNullable(searchParams.getEndDateTo()).isPresent()) {
filters.add(rangeFilter("end_date").from(searchParams.getEndDateFrom()).to(searchParams.getEndDateTo()));
}
if (Optional.ofNullable(searchParams.getPriceFrom()).isPresent()
|| Optional.ofNullable(searchParams.getPriceTo()).isPresent()) {
filters.add(rangeFilter("price").from(searchParams.getPriceFrom()).to(searchParams.getPriceTo()));
}
searchQuery.withQuery(boolQueryBuilder);
FilterBuilder[] filterArr = new FilterBuilder[filters.size()];
filterArr = filters.toArray(filterArr);
searchQuery.withFilter(andFilter(filterArr));
if (searchParams.getOrderBy() != null && searchParams.getOrderDir() != null) {
if (searchParams.getOrderDir().toLowerCase().equals("asc")) {
searchQuery.withSort(SortBuilders.fieldSort(searchParams.getOrderBy()).order(SortOrder.ASC));
} else {
searchQuery.withSort(SortBuilders.fieldSort(searchParams.getOrderBy()).order(SortOrder.DESC));
}
}
String scrollId = searchTemplate.scan(searchQuery.build(), 100000, false);
System.out.println(scrollId);
Page<AuctionIndex> page = searchTemplate.scroll(scrollId, 500000, AuctionIndex.class);
System.out.println(page.getTotalElements());
if (page.hasContent()) {
while (true) {
for (AuctionIndex hit : page.getContent()) {
Auction row = Auction.builder()
.cat(hit.getCat())
.item(Long.parseLong(hit.getId()))
.endts(hit.getEnd_date())
.startts(hit.getStart_date())
.loc(hit.getLoc())
.pow(hit.getPow())
.woj(hit.getWoj())
.price(hit.getPrice())
.qty(hit.getQty())
.title(hit.getTitle())
.user(hit.getUid())
.catName(hit.getCat_name())
.build();
rows.add(row);
}
page = searchTemplate.scroll(scrollId, 500000, AuctionIndex.class);
if (page.hasContent() == false) {
break;
}
}
}
response.put("content", rows);
return response;
}
AuctionService.java:
private AuctionRepository auctionRepository;
private AuctionIndexRepository auctionIndexRepository;
#Autowired
public AuctionService(AuctionRepository auctionRepository, AuctionIndexRepository auctionIndexRepository) {
this.auctionRepository = auctionRepository;
this.auctionIndexRepository = auctionIndexRepository;
}
#Autowired
private ElasticsearchTemplate searchTemplate;
AuctionIndexRepository.java:
public interface AuctionIndexRepository extends ElasticsearchRepository<AuctionIndex, Integer> {
}
AuctionRepository.java:
#Repository
public class AuctionRepository {
private final AerospikeClient aerospikeClient;
#Autowired
public AuctionRepository(AerospikeClient aerospikeClient) {
this.aerospikeClient = aerospikeClient;
}
/**
*
* #param auctionId
* #param transactionIndexId
* #return
*/
public Map findTransactionAuctionById(Long auctionId, String transactionIndexId) {
final Statement stmt = new Statement();
stmt.setNamespace(NAMESPACE_ALLEK);
stmt.setSetName(SET_U);
final Map<String, Object> aMap = findAuctionUserInSetA(auctionId);
final Key uKey = new Key(NAMESPACE_ALLEK, SET_U, aMap.get("u") + "_" + aMap.get("p"));
final Object uRecord = aerospikeClient.execute(null, uKey, NAMESPACE_ALLEK, FUN_FIND_U_ITEM, Value.get(auctionId));
return parseTransactionAuction((HashMap) uRecord, auctionId, transactionIndexId);
}
/**
*
* #param r
* #return
*/
private Map parseTransactionAuction(HashMap r, Long auctionId, String transactionIndexId) {
return parseTransactionAuction(new Record(r, 0, 0), auctionId, transactionIndexId);
}
/**
*
* #param r rekord z aerospike
* #return
* #return
*/
private Map parseTransactionAuction(Record r, Long auctionId, String transactionIndexId) {
Map response = new HashMap();
final Object recordTrans = r.getValue("t");
final ArrayList<HashMap> trans = Optional.ofNullable(recordTrans).isPresent() ? (ArrayList<HashMap>) recordTrans : new ArrayList<>();
Object qty = 0;
Object price = 0;
for (HashMap hit : trans) {
if (transactionIndexId.equals(auctionId + "_" + hit.get("buyer") + "_" + hit.get("ts"))) {
qty = hit.get("qty");
price = hit.get("price");
break;
}
}
response.put("qty", qty);
response.put("price", price);
response.put("startts", r.getLong("startts"));
response.put("endts", r.getLong("endts"));
response.put("qty_auction", r.getLong("qty"));
return response;
}
public AuctionRaw findAuctionRawById(Long auctionId) {
final Statement stmt = new Statement();
stmt.setNamespace(NAMESPACE_ALLEK);
stmt.setSetName(SET_U);
final Map<String, Object> aMap = findAuctionUserInSetA(auctionId);
final Key uKey = new Key(NAMESPACE_ALLEK, SET_U, aMap.get("u") + "_" + aMap.get("p"));
final Object uRecord = aerospikeClient.execute(null, uKey, NAMESPACE_ALLEK, FUN_FIND_U_ITEM, Value.get(auctionId));
return parseAuctionRaw((HashMap) uRecord);
}
private AuctionRaw parseAuctionRaw(HashMap r) {
return parseAuctionRaw(new Record(r, 0, 0));
}
private AuctionRaw parseAuctionRaw(Record r) {
return AuctionRaw.builder()
.cat(r.getLong("cat"))
.len(r.getInt("len"))
.start(r.getLong("start"))
.build();
}
public Auction findAuctionById(Long auctionId) {
final Statement stmt = new Statement();
stmt.setNamespace(NAMESPACE_ALLEK);
stmt.setSetName(SET_U);
final Map<String, Object> aMap = findAuctionUserInSetA(auctionId);
final Key uKey = new Key(NAMESPACE_ALLEK, SET_U, aMap.get("u") + "_" + aMap.get("p"));
final Object uRecord = aerospikeClient.execute(null, uKey, NAMESPACE_ALLEK, FUN_FIND_U_ITEM, Value.get(auctionId));
return parseAuction((HashMap) uRecord);
}
public Map<String, Object> findAuctionUserInSetA(Long auctionId) {
final Statement stmt = new Statement();
stmt.setNamespace(NAMESPACE_ALLEK);
stmt.setSetName(SET_U);
final Key aKey = new Key(NAMESPACE_ALLEK, SET_A, Value.get(auctionId / 1024));
final Map<String, Object> aMap = (Map<String, Object>) aerospikeClient.execute(null, aKey, NAMESPACE_ALLEK, FUN_FIND_A_ITEM, Value.get(auctionId));
return aMap;
}
public List<Auction> findAuctionByUserId(Long userId) {
final Statement stmt = new Statement();
stmt.setNamespace(NAMESPACE_ALLEK);
stmt.setSetName(SET_U);
stmt.setFilters(Filter.equal("u", userId));
final RecordSet records = aerospikeClient.query(null, stmt);
return StreamSupport.stream(records.spliterator(), true)
.flatMap(l -> {
final ArrayList<HashMap> auctionsFromRecord = (ArrayList<HashMap>) l.record.getValue("v");
return Optional.ofNullable(auctionsFromRecord).isPresent() ? auctionsFromRecord.stream() : Stream.<HashMap>empty();
})
.map(r -> parseAuction(r))
.collect(Collectors.toList());
}
private Auction parseAuction(HashMap r) {
return parseAuction(new Record(r, 0, 0));
}
private Auction parseAuction(Record r) {
// final Object recordTrans = r.getValue("t");
// final ArrayList<HashMap> trans = Optional.ofNullable(recordTrans).isPresent() ? (ArrayList<HashMap>) recordTrans : new ArrayList<>();
// final List<Transaction> transactions = trans.stream()
// .map(m -> {
// HashMap recordComment = (HashMap) m.get("c");
// Comment comment = null;
// if (recordComment != null && recordComment.size() > 0) {
// comment = Comment.builder()
// .id((Long) recordComment.get("id"))
// .ts((Long) recordComment.get("ts"))
// .text((String) recordComment.get("text"))
// .type((Long) recordComment.get("type"))
// .build();
// }
// return Transaction.builder()
// .ts((Long) m.get("ts"))
// .qty((Long) m.get("qty"))
// .price((Long) m.get("price"))
// .c(comment)
// .buyer((Long) m.get("buyer"))
// .build();
// })
// .collect(Collectors.toList());
return Auction.builder()
.item(r.getLong("item"))
.startts(r.getLong("startts"))
.endts(r.getLong("endts"))
.user(r.getLong("user"))
.qty(r.getLong("qty"))
.price(r.getLong("price"))
.title(r.getString("title"))
.cat(r.getLong("cat"))
// .tcat(r.getLong("tcat"))
// .curr(r.getString("curr"))
.loc(r.getString("loc"))
.woj(r.getString("woj"))
.pow(r.getString("pow"))
.catName(r.getString("cat_name"))
// .t(transactions)
// .len(r.getInt("len"))
// .detSt(r.getLong("det_st"))
// .detLen(r.getLong("det_len"))
// .start(r.getLong("start"))
.build();
}
}
I do not know why but scroll retrieves the data from the old the index = allek and type = auctions.
How do I know that the old index? And so the result in the old Index is equal to 16k (there is just more data and there are other fields than in the new Index) while in the new Index is the records of about 400.
My question is why is this happening? What I should change to be able to use the scrollbar configuration index = auctions and type = auctions?
I ask you for help I have no idea why this is happening.

Related

How can I simplify if else using java 8 aproach

public Pair<String, String> getSalesChannelDisplayData(DiscountRule rule, List<SalesChannelDto> allSalesChannels) {
String salesChannelDisplayNames = "";
String salesChannelDefaultCountryCodes = "";
Set<String> storeCodes = new HashSet<>();
if(rule.getConditions() != null) {
for (Condition condition : rule.getConditions()) {
if (condition instanceof ValueCondition) {
if (((ValueCondition) condition).getField() == Field.SALES_CHANNEL) {
Set<String> salesChannelIds = new HashSet<>();
if(((ValueCondition) condition).getOperator().equals(Operator.IN)){
salesChannelIds = ((ValueCondition) condition).getValues();
}else if (((ValueCondition) condition).getOperator().equals(Operator.NOT_IN)) {
salesChannelIds = allSalesChannels.stream().map(SalesChannelDto::getId).collect(Collectors.toSet());
salesChannelIds.removeAll(((ValueCondition) condition).getValues());
}
for (String salesChannelId : salesChannelIds) {
SalesChannelDto salesChannel = Beans.find(allSalesChannels, s-> s.getId().equals(salesChannelId));
salesChannelDisplayNames += salesChannel.getDisplayName() + ", ";
storeCodes.add(salesChannel.getDefaultCountryCode());
}
}
}
}
if (salesChannelDisplayNames.length()>1) {
salesChannelDisplayNames = salesChannelDisplayNames.substring(0,salesChannelDisplayNames.length()-2);
salesChannelDefaultCountryCodes = Joiner.on(", ").join(storeCodes);
}
return new Pair<>(salesChannelDisplayNames, salesChannelDefaultCountryCodes);
}
I want to simplify the above code using java stream API. Is that possible for replace the if, else if with java 8 approach?
The stream API is not a good choice to simplify your code. There are some parts in your code that you can modify them.
1- Not to need to check rule.getConditions() nullity.
if(rule.getConditions() != null) {...}
2- Don't repeat yourself by this: ((ValueCondition) condition) instead you can define a variable for it and use it.
ValueCondition vCondition = (ValueCondition) condition;
3- Instead concatenating salesChannelDisplayNames declare a List<String> salesChannelNames = new ArrayList<>(); and add channelName into it.
salesChannelNames.add(salesChannel.getDisplayName());
at the end use String.join(",", salesChannelNames) to add , delimeter between them.
This is a sample you can try out. I have tried to completely eliminate if-else.
public class FunctionalIfElse {
public static void main(String[] args) {
Product product1 = new Product(1, "Audi A8");
String category1 = "car";
System.out.println(ProductProxy.getEnrichedProduct.apply(product1, category1).toString());
Product product2 = new Product(2, "OnePlus 8 Pro");
String category2 = "mobile";
System.out.println(ProductProxy.getEnrichedProduct.apply(product2, category2).toString());
Product product3 = new Product(3, "Macbook Pro");
String category3 = "laptop";
System.out.println(ProductProxy.getEnrichedProduct.apply(product3, category3).toString());
Product product4 = new Product(4, "Emaar Palm Heights");
String category4 = "home";
System.out.println(ProductProxy.getEnrichedProduct.apply(product4, category4).toString());
}
}
#AllArgsConstructor
#Data
class Product {
private int productId;
private String productName;
}
class ProductProxy {
static BiFunction<Product, String, Product> getEnrichedProduct = (inputProduct, category) -> {
AtomicReference<Product> outputProduct = new AtomicReference<>();
Objects.requireNonNull(category, "The category is null");
Predicate<String> checkIsCar = productCategory -> productCategory.equalsIgnoreCase("car") ? true : false;
Predicate<String> checkIsMobile = productCategory -> productCategory.equalsIgnoreCase("mobile") ? true : false;
Predicate<String> checkIsLaptop = productCategory -> productCategory.equalsIgnoreCase("laptop") ? true : false;
Optional.ofNullable(category).filter(checkIsCar).map(input -> ProductService.enrichProductForCar.apply(inputProduct)).map(Optional::of).ifPresent(returnedProduct -> outputProduct.set(returnedProduct.get()));
Optional.ofNullable(category).filter(checkIsMobile).map(input -> ProductService.enrichProductForMobile.apply(inputProduct)).map(Optional::of).ifPresent(returnedProduct -> outputProduct.set(returnedProduct.get()));
Optional.ofNullable(category).filter(checkIsLaptop).map(input -> ProductService.enrichProductForLaptop.apply(inputProduct)).map(Optional::of).ifPresent(returnedProduct -> outputProduct.set(returnedProduct.get()));
Optional.ofNullable(outputProduct.get()).orElseThrow(() -> new RuntimeException("This is not a valid category"));
return outputProduct.get();
};
}
class ProductService {
static Function<Product, Product> enrichProductForCar = inputProduct -> {
inputProduct.setProductName(inputProduct.getProductName() + ":Car");
return inputProduct;
};
static Function<Product, Product> enrichProductForMobile = inputProduct -> {
inputProduct.setProductName(inputProduct.getProductName() + ":Mobile");
return inputProduct;
};
static Function<Product, Product> enrichProductForLaptop = inputProduct -> {
inputProduct.setProductName(inputProduct.getProductName() + ":Laptop");
return inputProduct;
};
}

How to retrieve tables which exists in a pdf using AWS Textract in java

I found article below to do in python.
https://docs.aws.amazon.com/textract/latest/dg/examples-export-table-csv.html
also I used article below to extract text.
https://docs.aws.amazon.com/textract/latest/dg/detecting-document-text.html
but above article helped to get only text, I also used function "block.getBlockType()"
of Block but none of block returned its type as "CELL" even tables are there in image/pdf.
Help me found java library similar to "boto3" to extract all tables.
What I did, I created models of each dataset in the json response and can use this models to build a table view in jsf.
public static List<TableModel> getTablesFromTextract(TextractModel textractModel) {
List<TableModel> tables = null;
try {
if (textractModel != null) {
tables = new ArrayList<>();
List<BlockModel> tableBlocks = new ArrayList<>();
Map<String, BlockModel> blockMap = new HashMap<>();
for (BlockModel block : textractModel.getBlocks()) {
if (block.getBlockType().equals("TABLE")) {
tableBlocks.add(block);
}
blockMap.put(block.getId(), block);
}
for (BlockModel blockModel : tableBlocks) {
Map<Long, Map<Long, String>> rowMap = new HashMap<>();
for (RelationshipModel relationship : blockModel.getRelationships()) {
if (relationship.getType().equals("CHILD")) {
for (String id : relationship.getIds()) {
BlockModel cell = blockMap.get(id);
if (cell.getBlockType().equals("CELL")) {
long rowIndex = cell.getRowIndex();
long columnIndex = cell.getColumnIndex();
if (!rowMap.containsKey(rowIndex)) {
rowMap.put(rowIndex, new HashMap<>());
}
Map<Long, String> columnMap = rowMap.get(rowIndex);
columnMap.put(columnIndex, getCellText(cell, blockMap));
}
}
}
}
tables.add(new TableModel(blockModel, rowMap));
}
System.out.println("row Map " + tables.toString());
}
} catch (Exception e) {
LOG.error("Could not get table from textract model", e);
}
return tables;
}
private static String getCellText(BlockModel cell, Map<String, BlockModel> blockMap) {
String text = "";
try {
if (cell != null
&& CollectionUtils.isNotEmpty(cell.getRelationships())) {
for (RelationshipModel relationship : cell.getRelationships()) {
if (relationship.getType().equals("CHILD")) {
for (String id : relationship.getIds()) {
BlockModel word = blockMap.get(id);
if (word.getBlockType().equals("WORD")) {
text += word.getText() + " ";
} else if (word.getBlockType().equals("SELECTION_ELEMENT")) {
if (word.getSelectionStatus().equals("SELECTED")) {
text += "X ";
}
}
}
}
}
}
} catch (Exception e) {
LOG.error("Could not get cell text of table", e);
}
return text;
}
TableModel to create the view from:
public class TableModel {
private BlockModel table;
private Map<Long, Map<Long, String>> rowMap;
public TableModel(BlockModel table, Map<Long, Map<Long, String>> rowMap) {
this.table = table;
this.rowMap = rowMap;
}
public BlockModel getTable() {
return table;
}
public void setTable(BlockModel table) {
this.table = table;
}
public Map<Long, Map<Long, String>> getRowMap() {
return rowMap;
}
public void setRowMap(Map<Long, Map<Long, String>> rowMap) {
this.rowMap = rowMap;
}
#Override
public String toString() {
return table.getId() + " - " + rowMap.toString();
}
I have something similar:
public class AnalyzeDocument {
public DocumentModel startProcess(byte[] content) {
Region region = Region.EU_WEST_2;
TextractClient textractClient = TextractClient.builder().region(region)
.credentialsProvider(EnvironmentVariableCredentialsProvider.create()).build();
return analyzeDoc(textractClient, content);
}
public DocumentModel analyzeDoc(TextractClient textractClient, byte[] content) {
try {
SdkBytes sourceBytes = SdkBytes.fromByteArray(content);
Util util = new Util();
Document myDoc = Document.builder().bytes(sourceBytes).build();
List<FeatureType> featureTypes = new ArrayList<FeatureType>();
featureTypes.add(FeatureType.FORMS);
featureTypes.add(FeatureType.TABLES);
AnalyzeDocumentRequest analyzeDocumentRequest = AnalyzeDocumentRequest.builder().featureTypes(featureTypes)
.document(myDoc).build();
AnalyzeDocumentResponse analyzeDocument = textractClient.analyzeDocument(analyzeDocumentRequest);
List<Block> docInfo = analyzeDocument.blocks();
// util.displayBlockInfo(docInfo);
PageModel pageModel = util.getTableResults(docInfo);
DocumentModel documentModel = new DocumentModel();
documentModel.getPages().add(pageModel);
Iterator<Block> blockIterator = docInfo.iterator();
while (blockIterator.hasNext()) {
Block block = blockIterator.next();
log.debug("The block type is " + block.blockType().toString());
}
return documentModel;
} catch (TextractException e) {
System.err.println(e.getMessage());
}
return null;
}
and this is the util file:
public PageModel getTableResults(List<Block> blocks) {
List<Block> tableBlocks = new ArrayList<>();
Map<String, Block> blockMap = new HashMap<>();
for (Block block : blocks) {
blockMap.put(block.id(), block);
if (block.blockType().equals(BlockType.TABLE)) {
tableBlocks.add(block);
log.debug("added table: " + block.text());
}
}
PageModel page = new PageModel();
if (tableBlocks.size() == 0) {
return null;
}
int i = 0;
for (Block table : tableBlocks) {
page.getTables().add(generateTable(table, blockMap, i++));
}
return page;
}
private TableModel generateTable(Block table, Map<String, Block> blockMap, int index) {
TableModel model = new TableModel();
Map<Integer, Map<Integer, String>> rows = getRowsColumnsMap(table, blockMap);
model.setTableId("Table_" + index);
for (Map.Entry<Integer, Map<Integer, String>> entry : rows.entrySet()) {
RowModel rowModel = new RowModel();
Map<Integer, String> value = entry.getValue();
for (int i = 0; i < value.size(); i++) {
rowModel.getCells().add(value.get(i));
}
model.getRows().add(rowModel);
}
return model;
}
private Map<Integer, Map<Integer, String>> getRowsColumnsMap(Block block, Map<String, Block> blockMap) {
Map<Integer, Map<Integer, String>> rows = new HashMap<>();
for (Relationship relationship : block.relationships()) {
if (relationship.type().equals(RelationshipType.CHILD)) {
for (String childId : relationship.ids()) {
Block cell = blockMap.get(childId);
if (cell != null) {
int rowIndex = cell.rowIndex();
int colIndex = cell.columnIndex();
if (rows.get(rowIndex) == null) {
Map<Integer, String> row = new HashMap<>();
rows.put(rowIndex, row);
}
rows.get(rowIndex).put(colIndex, getText(cell, blockMap));
}
}
}
}
return rows;
}
public String getText(Block block, Map<String, Block> blockMap) {
String text = "";
if (block.relationships() != null && block.relationships().size() > 0) {
for (Relationship relationship : block.relationships()) {
if (relationship.type().equals(RelationshipType.CHILD)) {
for (String childId : relationship.ids()) {
Block wordBlock = blockMap.get(childId);
if (wordBlock != null && wordBlock.blockType() != null) {
if (wordBlock.blockType().equals(BlockType.WORD))) {
text += wordBlock.text() + " ";
}
}
}
}
}
}
return text;
}

Having Error while recurssing though A list of Objects that contains another List

I have a class in Java which has an object
List<GroupNavigationItemSRO> children
Now Every GroupNaviagationItemSRO has the same List
List<GroupNavigationItemSRO> children
NOw i want to iterate through every GroupNavigationSRO and populate a List of String . Currently i am trying to do that like this
void getNavItems(List<GroupNavigationItemSRO> items,List<String> nitems){
System.out.println("PRINT");
for(GroupNavigationItemSRO item : items) {
nitems.add(item.getUrl());
System.out.println(item.getUrl());
// g.add(item.getUrl());
System.out.println("PRINT");
List<GroupNavigationItemSRO> nextItem = item.getChildren();
if (nextItem != null && nextItem.size()>0) {
getNavItems(nextItem,nitems);
}
}
}
When i only print the objects ,it doesn't give any errors But as soon as i try and add to the List the recurssion stops
nitems.add(item.getUrl())
Why is this happening . Here's the entire java file in case its helpful
#Service("labelSearchService")
public class LabelSearchServiceImpl extends AbstractSearchService {
private static final String version = SearchVersion.VERSION_2.getValue();
private static final Logger LOG = LoggerFactory.getLogger(LabelSearchServiceImpl.class);
private static final String EXPIRY_SET = "expiry";
private static final String DATA_SET = "data";
#Autowired
#Qualifier("searchServiceFactory")
private ISearchServiceFactory searchServiceFactory;
#Autowired
IAerospikeTopSellingBrandsCacheService topSellingBrandsCacheService;
#Autowired
private LabelSearchCacheServiceImplFactory labelSearchCacheServiceImplFactory;
#Autowired
AerospikeGuidedResponse aerospikeGuidedResponse ;
List<String> g = null;
#Override
public SearchSRO getSolrResponse(KeyGenerator keyGenerator, String queryFromBrowser, String searchTerm, Integer productCategoryId, int start, int number, String sortBy,
String userZone, String vertical, String clickSrc, boolean isSpellCheckEnabled, String categoryURL, boolean isNested) throws SearchException, ShardNotFoundException, IllegalAccessException {
String originalKeyword = searchTerm;
searchTerm = SearchUtils.modifySearchTerm(searchTerm);
SearchSRO sro = new SearchSRO();
boolean isPartialSearch = SearchUtils.isPartialSearchEnabled();
keyGenerator.setPartialSearch(SearchUtils.isPartialSearchEnabled());
LabelNodeSRO labelNode = SearchUtils.getLabelNodeByNodePath(categoryURL);
// for 'ALL' categories labelNode would be null.
LOG.info("categoryURL : " + categoryURL);
if (ALL.equals(categoryURL)) {
if (number == 0 && !CacheManager.getInstance().getCache(SearchConfigurationCache.class).getBooleanProperty(SearchProperty.ALLOW_ZERO_RESULT_REQUESTS)) {
return new SearchSRO();
}
sro = labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().getSearchBinResultsForAllLabels(keyGenerator, queryFromBrowser, searchTerm, labelNode, start, number, sortBy, userZone, vertical,
isPartialSearch, isSpellCheckEnabled, originalKeyword, false, isNested);
} else if (labelNode != null) {
sro = getSearchProducts(keyGenerator, queryFromBrowser, searchTerm, null, null, labelNode, start, number, sortBy, userZone, isPartialSearch, isSpellCheckEnabled,
originalKeyword, isNested, false,categoryURL);
} else {
throw new SearchException("Search was hit without selecting any category");
}
// this is the minimum number to results that should match for results to be shown on 'people who search this bought this widget'
SearchConfigurationCache cache = CacheManager.getInstance().getCache(SearchConfigurationCache.class);
if ((ClickSourceType.PWSTBT_WIDGET.getValue()).equalsIgnoreCase(clickSrc) && sro.getNoOfMatches() < cache.getIntegerProperty(SearchProperty.BEST_SELLER_MINIMUM_RESULTS)) {
LOG.info("The minimum number of results to match for PWSTBT widget are " + cache.getIntegerProperty(SearchProperty.BEST_SELLER_MINIMUM_RESULTS)
+ " but number of matched results are " + sro.getNoOfMatches());
sro = new SearchSRO();
}
return sro;
}
#Override
public SearchSRO getSolrResponseForMobile(KeyGenerator keyGenerator, String queryFromBrowser, String searchTerm, Integer productCategoryId, int start, int number,
String sortBy, String userZone, String vertical, String clickSrc, boolean isBinSearch, int noOfResultsPerBin, boolean isSpellCheckEnabled, boolean isPartialSearch,
String categoryURL) throws SearchException, ShardNotFoundException, IllegalAccessException {
String originalKeyword = searchTerm;
searchTerm = SearchUtils.modifySearchTerm(searchTerm);
SearchSRO sro = new SearchSRO();
isPartialSearch = isPartialSearch && SearchUtils.isPartialSearchEnabled();
// this is to disable partial search in case of PWSTBT
if (ClickSourceType.PWSTBT_WIDGET.getValue().equalsIgnoreCase(clickSrc)) {
isPartialSearch = false;
}
LabelNodeSRO labelNode = SearchUtils.getLabelNodeByNodePath(categoryURL);
// for 'ALL' categories labelNode would be null
if (ALL.equals(categoryURL)) {
if (number == 0 && !CacheManager.getInstance().getCache(SearchConfigurationCache.class).getBooleanProperty(SearchProperty.ALLOW_ZERO_RESULT_REQUESTS)) {
return new SearchSRO();
}
// Response for Search result page in mobile - same as web.
sro = labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().getSearchBinResultsForAllLabels(keyGenerator, queryFromBrowser, searchTerm, labelNode, start, number, sortBy, userZone, vertical,
isPartialSearch, isSpellCheckEnabled, originalKeyword, true, false);
} else if (labelNode != null) {
sro = getSearchProducts(keyGenerator, queryFromBrowser, searchTerm, null, null, labelNode, start, number, sortBy, userZone, isPartialSearch, isSpellCheckEnabled,
originalKeyword, false, true,categoryURL);
} else {
throw new SearchException("Search was hit without selecting any category");
}
// this is the minimum number to results that should match for results to be shown on 'people who search this bought this widget'
SearchConfigurationCache cache = CacheManager.getInstance().getCache(SearchConfigurationCache.class);
if ((ClickSourceType.PWSTBT_WIDGET.getValue()).equalsIgnoreCase(clickSrc) && sro.getNoOfMatches() < cache.getIntegerProperty(SearchProperty.BEST_SELLER_MINIMUM_RESULTS)) {
LOG.info("The minimum number of results to match for PWSTBT widget are " + cache.getIntegerProperty(SearchProperty.BEST_SELLER_MINIMUM_RESULTS)
+ " but number of matched results are " + sro.getNoOfMatches());
sro = new SearchSRO();
}
return sro;
}
#Autowired
private IUserPersonaSegmentService personaSegmentService;
#Autowired
private IContextHolder<SearchRequestContext> ctxProvider;
private boolean isClientPersonaEnabled(SearchRequestContext ctx) {
SearchConfigurationCache cache = CacheManager.getInstance().getCache(SearchConfigurationCache.class);
String clientsEnabled = cache.getProperty(SearchProperty.PERSONA_CLIENTS_ENABLED);
String client = ctx.req.getContextSRO().getAppIdent();
return !StringUtils.isEmpty(client) && !StringUtils.isEmpty(clientsEnabled) && Pattern.matches("(?i).*\\b" + client + "\\b.*", clientsEnabled);
}
protected UserSegmentDTO setupPersonalizationContext(LabelNodeSRO labelNode, String searchTerm, String sortBy) {
SearchRequestContext ctx = ctxProvider.getContext();
if (ctx == null || ctx.req == null) {
LOG.warn("No Request Context found");
return null;
}
SearchConfigurationCache cache = CacheManager.getInstance().getCache(SearchConfigurationCache.class);
// check if Personalization is enabled
if (labelNode == null || !cache.getBooleanProperty(SearchProperty.PERSONA_SEARCH_ENABLED) || StringUtils.isEmpty(searchTerm)
|| !SolrSortCategory.RELEVANCY.getValue().equalsIgnoreCase(sortBy) || !isClientPersonaEnabled(ctx)) {
LOG.debug("Personalization not enabled");
return null;
}
LOG.info("Trying to set up personalization context");
// setup the context for later use
ctx.personaSegments = personaSegmentService.getUserSegments(ctx.req.getUserTrackingId(), labelNode.getNodePath());
return ctx.personaSegments;
}
#Override
public SearchSRO getSearchProducts(KeyGenerator keyGenerator, String queryFromBrowser, String searchTerm, Integer campaignId, ProductCategorySRO pc, LabelNodeSRO labelNode,
int start, int number, String sortBy, String userZone, boolean isPartialSearch, boolean isSpellCheckEnabled, String originalKeyword, boolean isNested, boolean isMobile,String categoryURL)
throws SearchException, ShardNotFoundException, IllegalAccessException {
LOG.info("------------------Product category page---------------");
// build cache key considering campaign id
keyGenerator.setCampaignId(String.valueOf(campaignId));
// Search results will vary based on isNested flag even for exact same keywords hence, when we cache
// we cache both results with different key
keyGenerator.setNested(isNested);
SearchConfigurationCache cache = CacheManager.getInstance().getCache(SearchConfigurationCache.class);
LOG.info("sortBy : " + sortBy + ", personalization Enabled : " + cache.getBooleanProperty(SearchProperty.PERSONA_SEARCH_ENABLED) + ", labelNode : " + labelNode);
// try to set persona context
keyGenerator.setPersonaSegment(setupPersonalizationContext(labelNode, searchTerm, sortBy));
SearchSRO searchSRO = labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().getSearchProducts(keyGenerator, queryFromBrowser, searchTerm, campaignId, pc, labelNode, start, number, sortBy, userZone, isPartialSearch,
isSpellCheckEnabled, originalKeyword, isNested, isMobile,categoryURL);
/*SearchCoreContext coreContext = CoreContextHolderThreadLocal.getContext();
if (coreContext != null) {
if (coreContext.getCategoryUrlUsed().equalsIgnoreCase("ALL")) {
String cacheKey = keyGenerator.buildKey();
try {
final AerospikeClient aClient = AerospikeClientFactory.getInstance();
LOG.info("Clearing Cache as Category redirected was ambiguous so redirected to ALL and removing key " + cacheKey);
aClient.delete(null, new Key("search", EXPIRY_SET, cacheKey));
aClient.delete(null, new Key("search", DATA_SET, cacheKey));
} catch (AerospikeException e) {
e.printStackTrace();
}
}
}*/
return searchSRO;
}
#Override
public FilterListSRO getFiltersForProducts(Integer categoryId, Integer campaignId, String q, String keyword, boolean partialSearch, boolean isBrand, String categoryUrl,
String userZone, HyperlocalCriteria hyperlocalCriteria, Set<Integer> pinCodes, GetFiltersRequest request) throws SearchException, ShardNotFoundException {
String key = new KeyGenerator(String.valueOf(categoryId), String.valueOf(campaignId), q, keyword, partialSearch, null, categoryUrl, version, userZone, hyperlocalCriteria, pinCodes).buildFilterKey();
if (campaignId != null) {
LOG.info("Get Filters for Campaign Products wrt : " + key);
}
FilterListSRO filterListSRO = labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().getFiltersForProducts(key, categoryId, campaignId, q, keyword, partialSearch, isBrand, categoryUrl, userZone, hyperlocalCriteria, pinCodes,request);
return filterListSRO;
}
#Override
public FilterListSRO getFiltersForProducts(Integer categoryId, Integer campaignId, String q, String keyword,
boolean partialSearch, boolean isBrand, String categoryUrl, String userZone,
HyperlocalCriteria hyperlocalCriteria, Set<Integer> pinCodes)
throws SearchException, ShardNotFoundException {
return getFiltersForProducts(categoryId, campaignId, q, keyword, partialSearch, isBrand, categoryUrl, userZone, hyperlocalCriteria, pinCodes,null);
}
#Override
public FilterListSRO getFilterValuesForFilter(String categoryId, String campaignId, String q, String keyword, boolean partialSearch, String filterName, String fullQ,
String categoryUrl, String[] filtersToFetch, String userZone, HyperlocalCriteria hyperlocalCriteria, Set<Integer> pinCodes) throws SearchException, NumberFormatException, ShardNotFoundException {
String uniqueFilterKey = new KeyGenerator(categoryId, campaignId, q, keyword, partialSearch, filterName, categoryUrl, version, userZone, hyperlocalCriteria, pinCodes).buildFilterKey();
String[] filterNames = filterName.split(",");
FilterListSRO filterListSRO = labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().getFilterValuesForFilter(uniqueFilterKey, categoryId, campaignId, q, keyword, partialSearch, filterNames, categoryUrl, filtersToFetch,
userZone, hyperlocalCriteria, pinCodes);
/*SearchCoreContext coreContext = CoreContextHolderThreadLocal.getContext();
if (coreContext != null) {
if (coreContext.getCategoryUrlUsed().equalsIgnoreCase("ALL")) {
String cacheKey = uniqueFilterKey.concat(".FilterSRO");
try {
final AerospikeClient aClient = AerospikeClientFactory.getInstance();
LOG.info("Clearing Cache as Category redirected was ambiguous so redirected to ALL and removing key " + cacheKey);
aClient.delete(null, new Key("search", EXPIRY_SET, cacheKey));
aClient.delete(null, new Key("search", DATA_SET, cacheKey));
} catch (AerospikeException e) {
e.printStackTrace();
}
}
}*/
return filterListSRO;
}
#Override
public GroupNavigationSRO getGroupNavigation(KeyGenerator keyGenerator, String keyword, String q, String categoryUrl, Integer campaignId, boolean isSpellCheck, String userZone) throws IllegalAccessException {
GroupNavigationSRO sro = new GroupNavigationSRO();
try {
ProductCategoryCache categoryCache = CacheManager.getInstance().getCache(ProductCategoryCache.class);
LabelNodeSRO labelNode = ALL.equals(categoryUrl) ? null : categoryCache.getLabelForLabelPath(categoryUrl);
if (!ALL.equals(categoryUrl) && labelNode == null) {
LOG.error("Invalid label : " + categoryUrl);
return null;
}
// try to setup persona context - using sort to relevancy since group left nav doesn't change with sortBy
keyGenerator.setPersonaSegment(setupPersonalizationContext(labelNode, keyword, SolrSortCategory.RELEVANCY.getValue()));
sro = labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().getGroupNavigation(keyGenerator, keyword, q, categoryUrl, campaignId, isSpellCheck, userZone);
/*SearchCoreContext coreContext = CoreContextHolderThreadLocal.getContext();
if (coreContext != null) {
if (coreContext.getCategoryUrlUsed().equalsIgnoreCase("ALL")) {
String cacheKey = keyGenerator.buildKey().concat(".GroupNavigationSRO");
try {
final AerospikeClient aClient = AerospikeClientFactory.getInstance();
LOG.info("Clearing Cache as Category redirected was ambiguous so redirected to ALL and removing key " + cacheKey);
aClient.delete(null, new Key("search", EXPIRY_SET, cacheKey));
aClient.delete(null, new Key("search", DATA_SET, cacheKey));
} catch (AerospikeException e) {
e.printStackTrace();
}
}
}*/
} catch (SearchException e) {
LOG.error("Error in fetching GroupSRO: ", e);
}
return sro;
}
#Override
public QueryResponse setCategoryFilterQueryAndExecute(SearchCriteria sc, Integer id) throws SearchException {
labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().setCategoryFilterQuery(sc, id);
return labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().executeQuery(sc.buildQuery(), id);
}
#Override
public Long getProductCategoryCount(Integer categoryId, String categoryUrl) {
Long count = CacheManager.getInstance().getCache(ProductCategoryCache.class).getCategoryCountByUrl(categoryUrl);
if (count == null) {
count = new Long(0);
}
LOG.info("Product Category Counts for CategoryUrl: " + categoryUrl + " = " + count);
return count;
}
#Override
public List<TopSellingProductCategorySRO> getTopSellingProductsforCategories(List<Integer> categoryIds, List<String> categoryUrls) {
List<TopSellingProductCategorySRO> topSellingProductCategorySROs = new ArrayList<TopSellingProductCategorySRO>();
for (String categoryUrl : categoryUrls) {
try {
TopSellingProductCategorySRO topSellingProductCategorySRO = null;
Integer searchId = ShardResolverService.getSearchIdByLabel(categoryUrl);
QueryResponse rsp = searchServiceFactory.getSearchService(SearchVersion.VERSION_2.getValue()).getTopProductsInfoById(searchId,
CacheManager.getInstance().getCache(SearchConfigurationCache.class).getIntegerProperty(SearchProperty.MAX_TOP_SELLING_PRODUCTS_PER_CATEGORY));
List<Long> pogIds = SearchUtils.extractTopProductsByCategoryId(rsp);
if (pogIds != null && !pogIds.isEmpty()) {
topSellingProductCategorySRO = new TopSellingProductCategorySRO(categoryUrl, pogIds);
topSellingProductCategorySROs.add(topSellingProductCategorySRO);
}
} catch (Exception e) {
LOG.error("Unable to get Top Selling Products for categoryId: " + categoryUrl + ", Exception:" + e.getMessage());
}
}
return topSellingProductCategorySROs;
}
#Override
public List<TopSellingBrandSRO> getTopSellingBrandsforCategories(List<Integer> categoryIds, List<String> categoryUrls) {
List<TopSellingBrandSRO> topSellingBrandSROs = new ArrayList<TopSellingBrandSRO>();
for (String categoryUrl : categoryUrls) {
TopSellingBrandSRO topSellingBrandSRO = topSellingBrandsCacheService.getTopSellingBrandsByUrl(categoryUrl);
if (topSellingBrandSRO != null) {
topSellingBrandSROs.add(topSellingBrandSRO);
}
}
return topSellingBrandSROs;
}
#Override
public List<TopSellingBrandSRO> getAllTopSellingProducts(){
List<TopSellingBrandSRO> topSellingBrandSROs = topSellingBrandsCacheService.getAllTopSellingProducts();
return topSellingBrandSROs;
}
#Override
public FacetSRO getFacets(String cachekey, String keyword, String queryFieldName, String[] facetFields, Map<String, List<String>> filterMap, int number) throws SearchException {
// update values for mainCategoryXpath & categoryXpath fields
/*if(SolrFields.CATEGORY_XPATH.equals(queryFieldName) || SolrFields.MAIN_CATEGORY_XPATH.equals(queryFieldName)) {
String labelPath = SearchUtils.getLabelPathByUrl(keyword);
keyword = String.valueOf(ShardResolverService.getSearchIdByLabel(labelPath));
}*/
for (String filterField : filterMap.keySet()) {
if (SolrFields.CATEGORY_XPATH.equals(filterField) || SolrFields.MAIN_CATEGORY_XPATH.equals(filterField)) {
List<String> searchIds = new ArrayList<String>();
for (String val : filterMap.get(filterField)) {
String labelPath = SearchUtils.getLabelPathByUrl(val);
searchIds.add(String.valueOf(ShardResolverService.getSearchIdByLabel(labelPath)));
}
filterMap.put(filterField, searchIds);
}
}
return labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().getFacets(cachekey, keyword, queryFieldName, facetFields, filterMap, number);
}
#Override
public FilterListSRO getSRPFilters(KeyGenerator keyGenerator, String q, String keyword, boolean partialSearch, String categoryUrl, String userZone, HyperlocalCriteria hyperlocalCriteria, Set<Integer> pinCodes) throws SearchException {
if (StringUtils.isEmpty(keyword)) {
LOG.error("Invalid parameters.");
return null;
}
keyword = SearchUtils.modifySearchTerm(keyword);
if (StringUtils.isEmpty(keyword)) {
LOG.info(" Returning empty filters for empty keyword.");
return new FilterListSRO();
}
return labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().getSRPFilters(keyGenerator.buildKey(), q, keyword, partialSearch, categoryUrl, userZone, hyperlocalCriteria, pinCodes);
}
#Override
public SearchSRO getSearchProducts(KeyGenerator keyGenerator, String queryFromBrowser, String searchTerm,
Integer campaignId, ProductCategorySRO pc, LabelNodeSRO labelNode, int start, int number, String sortBy,
String userZone, boolean isPartialSearch, boolean isSpellCheckEnabled, String originalKeyword,
boolean isNested, boolean isMobile) throws SearchException, ShardNotFoundException, IllegalAccessException {
// TODO Auto-generated method stub
return getSearchProducts(keyGenerator, queryFromBrowser, searchTerm, campaignId, pc, labelNode, start, number, sortBy, userZone, isPartialSearch, isSpellCheckEnabled, originalKeyword, isNested, isMobile, null);
}
#Override
public String getmodelSearch(String query, String type) throws SearchException {
return labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().getmodelSearch(query, type);
}
#Override
public String classifierResponse(String query, String type) throws SearchException {
try {
return labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().getClassifierResponse(query, type);
} catch (JsonGenerationException e) {
return e.getMessage();
} catch (JsonMappingException e) {
return e.getMessage();
} catch (IOException e) {
return e.getMessage();
}
}
public GetGuidedSearchResponse getGuides(String query, String url) {
if(!StringUtils.isEmpty(query) && !StringUtils.isEmpty(url) && url.equalsIgnoreCase("ALL"))
{
KeyGenerator keyGenerator = new KeyGenerator();
keyGenerator.setQ("sNapDeAl.sEarcH.getGuides=" +"##"+ query+ "##"+ url);
return labelSearchCacheServiceImplFactory.getSearchCacheServiceImpl().getGuides(keyGenerator, query, url);
}
return null;
}
public GetGuidedSearchResponse getFilteredGuides(String query ,GetGroupLeftNavResponse leftNavBarResponse) {
g=null;
GroupNavigationSRO groups = leftNavBarResponse.getGroups();
List<GroupNavigationItemSRO> items = groups.getItems() ;
// List<String> nitems = getNavItems(items);
List<String> nitems = null;
getNavItems(items,nitems);
System.out.println("SIZE" + nitems.size());
List<String> navItems = new ArrayList<String>();
System.out.println("GETTING GUIDED FILE FROM AEROSPIKE");
List<String> guideItems = aerospikeGuidedResponse.getGuides(query);
//HashMap<String,String> nodeUrlMapping = new HashMap<String,String>();
if(guideItems.isEmpty())
{
System.out.println("\n\n\n\n" + "EMPTY GUIDED" + " \n\n\n\n\n");
}
guideItems.set(0, guideItems.get(0).trim());
System.out.println("GUIDED RESPONSE");
for(int i=0 ; i < guideItems.size() ;i ++)
{
System.out.println(guideItems.get(i));
}
/*for (int i =0 ;i < items.size() ;i++) {
List<GroupNavigationItemSRO> children_items = items.get(i).getChildren();
String s = items.get(i).getNodePath();
String m = items.get(i).getUrl();
System.out.println(s + " " + m);
navItems.add(m);
//nodeUrlMapping.put(s,m);
for (int j=0;j<children_items.size();j++) {
String r = children_items.get(j).getNodePath();
String n = children_items.get(j).getUrl();
System.out.println(r +" " + n);
// nodeUrlMapping.put(r,n);
navItems.add(n);
}
}*/
System.out.println("ITEM RESPONSE");
//navItems = g ;
for(int i=0 ; i < navItems.size() ;i ++)
{
System.out.println(navItems.get(i));
}
List<String> filteredGuides = new ArrayList<String>();
for(int i=0 ; i < guideItems.size() ;i++)
{
if(navItems.contains(guideItems.get(i)))
filteredGuides.add(guideItems.get(i));
else {
}
}
System.out.println("NAV ITEMS" + navItems.size() + navItems.toString());
System.out.println("GUIDE ITEMS" + filteredGuides.size() + filteredGuides.toString());
List<WidgetEntity> entities = new ArrayList<WidgetEntity>();
/* Iterator it = nodeUrlMapping.entrySet().iterator();
while (it.hasNext()) {
Map.Entry pair = (Map.Entry)it.next();
System.out.println(pair.getKey() + " = " + pair.getValue());
}*/
for(int i=0;i<filteredGuides.size();i++)
{
String guide = filteredGuides.get(i);
guide = guide.trim();
System.out.println(guide);
LabelNodeSRO labelSRO = getLableSRO(guide);
System.out.println(labelSRO.toString() + guide);
WidgetEntity entity = new WidgetEntity();
entity.setId(labelSRO.getUrl());
entity.setName(labelSRO.getDisplayName());
entity.setType("category");
entities.add(entity);
}
System.out.println("ENTITIES DEtails" );
GetGuidedSearchResponse response = new GetGuidedSearchResponse();
for(int i =0 ;i<entities.size();i++)
{
System.out.println(entities.get(i).getId() + entities.get(i).getName() + entities.get(i).getType());
// System.out.println(nodeUrlMapping.get(entities.get(i).getId()));
}
response.setEntities(entities);
return response;
}
LabelNodeSRO getLableSRO(String guide)
{
System.out.println(guide + "GET");
LabelNodeSRO label =SearchUtils.getLabelNodeByNodePath(guide);
return label;
}
void getNavItems(List<GroupNavigationItemSRO> items,List<String> nitems){
System.out.println("PRINT");
for(GroupNavigationItemSRO item : items) {
nitems.add(item.getUrl());
System.out.println(item.getUrl());
// g.add(item.getUrl());
System.out.println("PRINT");
List<GroupNavigationItemSRO> nextItem = item.getChildren();
if (nextItem != null && nextItem.size()>0) {
getNavItems(nextItem,nitems);
}
}
}
}
You could try something like this, when you return a list with all Strings, and if there are no more elments stop the recursivity and returns an empty list
List<String> getNavItems(List<GroupNavigationItemSRO> items){
List<String> results = new ArrayList();
System.out.println("PRINT");
if(items != null && !items.isEmpty()){
for(GroupNavigationItemSRO item : items) {
results.add(item.getUrl());
System.out.println(item.getUrl());
// g.add(item.getUrl());
System.out.println("PRINT");
results.addAll(getNavItems(item.getChildren()));
}
}
}
return results;
}
In getFilteredGuides() method you're passing nitems as null and this would cause NullPointerException.
Just Pass it as the following:
List<String> nitems = new ArrayList<String>();
getNavItems(items,nitems);
Or you can add a check for null inside getNavItems() method and initialize it accordingly:
void getNavItems(List<GroupNavigationItemSRO> items,List<String> nitems){
if(nitems == null)
{
nitems = new ArrayList<String>();
}
System.out.println("PRINT");
for(GroupNavigationItemSRO item : items) {
nitems.add(item.getUrl());
System.out.println(item.getUrl());
// g.add(item.getUrl());
System.out.println("PRINT");
List<GroupNavigationItemSRO> nextItem = item.getChildren();
if (nextItem != null && nextItem.size()>0) {
getNavItems(nextItem,nitems);
}
}
}

myBATIS foreach hitting limit of 1000

Here's what myBATIS has on their own documentation for foreach.
<select id="selectPostIn" resultType="domain.blog.Post">
SELECT *
FROM POST P
WHERE ID in
<foreach item="item" index="index" collection="list"
open="(" separator="," close=")">
#{item}
</foreach>
</select>
However, if list contains over 1000 items and you're using Oracle DB, you get this exception:
java.sql.SQLSyntaxErrorException: ORA-01795: maximum number of expressions in a list is 1000
What can I do to fix this so it works with more than 1000 elements?
I'm not sure if this is the most elegant solution or not, but here's what I did:
<select id="selectPostIn" resultType="domain.blog.Post">
SELECT *
FROM POST P
WHERE ID in
<trim suffixOverrides=" OR ID IN ()">
<foreach item="item" index="index" collection="list"
open="(" close=")">
<if test="index != 0">
<choose>
<when test="index % 1000 == 999">) OR ID IN (</when>
<otherwise>,</otherwise>
</choose>
</if>
#{item}
</foreach>
</trim>
</select>
Explanation
Lets start with the foreach. We want to surround it in ( and ). Most elements we want commas between, except every thousand elements we want to stop the list and OR with another one. That's what the choose, when, otherwise construct handles. Except we don't want either of those before the first element, thus the if that the choose is inside of. Finally, the foreach ends with actually having the #{item} inserted.
The outer trim is just so that if we have exactly 1000 elements, for example, we don't end with OR ID IN () which would be invalid ((), specifically, is the invalid part. That's a syntax error in SQL, not an empty list like I hoped it would be.)
We have tried delete query in clause more then 1000 records with above reference:
<delete id="delete" parameterType="Map">
The following query working:
DELETE FROM Employee
where
emp_id = #{empId}
<foreach item="deptId" index= "index" collection="ids" open="AND DEPT_ID NOT IN (" close=")" >
<if test="index != 0">
<choose>
<when test="index % 1000 == 999">) AND DEPT_ID NOT IN (</when>
<otherwise>,</otherwise>
</choose>
</if>
#{deptId}
</foreach>
</delete>
Mybatis plugin query and then combine partitioned params :
#Intercepts({
#Signature(type = Executor.class, method = "query", args = {MappedStatement.class, Object.class, RowBounds.class, ResultHandler.class}),
#Signature(type = Executor.class, method = "query", args = {MappedStatement.class, Object.class, RowBounds.class, ResultHandler.class, CacheKey.class, BoundSql.class})}
)
public class BigSizeParamQueryPlugin implements Interceptor {
private final int singleBatchSize;
private static final HeavyParamContext NO_BIG_PARAM = new HeavyParamContext();
public BigSizeParamQueryPlugin() {
this.singleBatchSize = 1000;
}
public BigSizeParamQueryPlugin(Integer singleBatchSize) {
if (singleBatchSize < 500) {
throw new IllegalArgumentException("batch size less than 500 is not recommended");
}
this.singleBatchSize = singleBatchSize;
}
#Override
public Object intercept(Invocation invocation) throws Throwable {
Object[] args = invocation.getArgs();
Object parameter = args[1];
if (parameter instanceof MapperMethod.ParamMap && RowBounds.DEFAULT == args[2]) {
MapperMethod.ParamMap paramMap = (MapperMethod.ParamMap) parameter;
if (MapUtils.isNotEmpty(paramMap)) {
try {
HeavyParamContext context = findHeavyParam(paramMap);
if (context.hasHeavyParam()) {
QueryExecutor queryExecutor = new QueryExecutor(invocation, context);
return queryExecutor.query();
}
} catch (Throwable e) {
log.warn("BigSizeParamQueryPlugin process error", e);
return invocation.proceed();
}
}
}
return invocation.proceed();
}
private class QueryExecutor {
private final MappedStatement ms;
private final Map<String, Object> paramMap;
private final RowBounds rowBounds;
private final ResultHandler resultHandler;
private final Executor executor;
private final List<Object> finalResult;
private final Iterator<HeavyParam> heavyKeyIter;
public QueryExecutor(Invocation invocation, HeavyParamContext context) {
Object[] args = invocation.getArgs();
this.ms = (MappedStatement) args[0];
this.paramMap = context.getParameter();
this.rowBounds = (RowBounds) args[2];
this.resultHandler = (ResultHandler) args[3];
this.executor = (Executor) invocation.getTarget();
List<HeavyParam> heavyParams = context.getHeavyParams();
this.finalResult = new ArrayList<>(heavyParams.size() * singleBatchSize);
this.heavyKeyIter = heavyParams.iterator();
}
public Object query() throws SQLException {
while (heavyKeyIter.hasNext()) {
HeavyParam currKey = heavyKeyIter.next();
List<List<Object>> param = partitionParam(currKey.getParam());
doQuery(currKey, param);
}
return finalResult;
}
private void doQuery(HeavyParam currKey, List<List<Object>> param) throws SQLException {
if (!heavyKeyIter.hasNext()) {
for (List<Object> currentParam : param) {
updateParamMap(currKey, currentParam);
List<Object> oneBatchResult = executor.query(ms, paramMap, rowBounds, resultHandler);
finalResult.addAll(oneBatchResult);
}
return;
} else {
HeavyParam nextKey = heavyKeyIter.next();
log.warn("get mutil heavy key [{}], batchSize[{}]", nextKey.shadowHeavyKeys, nextKey.getParam().size());
List<List<Object>> nextParam = partitionParam(nextKey.getParam());
for (List<Object> currParam : param) {
updateParamMap(currKey, currParam);
doQuery(nextKey, nextParam);
}
}
}
private void updateParamMap(HeavyParam currKey, List<Object> param) {
for (String shadowKey : currKey.getShadowHeavyKeys()) {
paramMap.put(shadowKey, param);
}
}
}
private HeavyParamContext findHeavyParam(Map<String, Object> parameterMap) {
List<Map.Entry<String, Object>> heavyKeys = doFindHeavyParam(parameterMap);
if (heavyKeys == null) {
return BigSizeParamQueryPlugin.NO_BIG_PARAM;
} else {
HeavyParamContext result = new HeavyParamContext();
List<HeavyParam> heavyParams;
if (heavyKeys.size() == 1) {
heavyParams = buildSingleHeavyParam(heavyKeys);
} else {
heavyParams = buildMultiHeavyParam(heavyKeys);
}
result.setHeavyParams(heavyParams);
result.setParameter(new HashMap<>(parameterMap));
return result;
}
}
private List<HeavyParam> buildSingleHeavyParam(List<Map.Entry<String, Object>> heavyKeys) {
Map.Entry<String, Object> single = heavyKeys.get(0);
return Collections.singletonList(new HeavyParam((Collection) single.getValue(), Collections.singletonList(single.getKey())));
}
private List<List<Object>> partitionParam(Object o) {
Collection c = (Collection) o;
List res;
if (c instanceof List) {
res = (List) c.stream().distinct().collect(Collectors.toList());
} else {
res = new ArrayList(c);
}
return Lists.partition(res, singleBatchSize);
}
private List<HeavyParam> buildMultiHeavyParam(List<Map.Entry<String, Object>> heavyKeys) {
//when heavy keys used multi time in xml, its name will be different.
TreeMap<Collection, List<String>> params = new TreeMap<>(new Comparator<Collection>() {
#Override
public int compare(Collection o1, Collection o2) {
//fixme workable but have corner case.
return CollectionUtils.isEqualCollection(o1, o2) == true ? 0 : o1.hashCode() - o2.hashCode();
}
});
for (Map.Entry<String, Object> keyEntry : heavyKeys) {
String key = keyEntry.getKey();
List<String> keys = params.computeIfAbsent((Collection) keyEntry.getValue(), k -> new ArrayList<>(1));
keys.add(key);
}
List<HeavyParam> hps = new ArrayList<>(params.size());
for (Map.Entry<Collection, List<String>> heavyEntry : params.entrySet()) {
List<String> shadowKeys = heavyEntry.getValue();
hps.add(new HeavyParam(heavyEntry.getKey(), shadowKeys));
}
return hps;
}
private List<Map.Entry<String, Object>> doFindHeavyParam(Map<String, Object> parameterMap) {
List<Map.Entry<String, Object>> result = null;
for (Map.Entry<String, Object> p : parameterMap.entrySet()) {
if (p != null) {
Object value = p.getValue();
if (value != null && value instanceof Collection) {
int size = CollectionUtils.size(value);
if (size > singleBatchSize) {
if (result == null) {
result = new ArrayList<>(1);
}
result.add(p);
}
}
}
}
return result;
}
#Getter
#Setter
private static class HeavyParamContext {
private Boolean hasHeavyParam;
private List<HeavyParam> heavyParams;
private Map<String, Object> parameter;
public Boolean hasHeavyParam() {
return heavyParams != null;
}
}
#Data
#AllArgsConstructor
#NoArgsConstructor
private class HeavyParam {
private Collection param;
private List<String> shadowHeavyKeys;
}
#Override
public Object plugin(Object o) {
return Plugin.wrap(o, this);
}
#Override
public void setProperties(Properties properties) {
}
}

How to isolate an instance of a class with SuperCsv?

Actually I have a class ArticleModele where I store the content of the columns of the .csv, but I don't know how to access to a particular instance of the class which corresponds to a particular row in the .csv. Here is my code:
public static ArticleModele readWithCsvDozerBeanReader() throws Exception {
final CellProcessor[] processors = new CellProcessor[] {
new Optional(),
new Optional(),
new Optional()
};
ICsvDozerBeanReader beanReader = null;
try {
beanReader = new CsvDozerBeanReader(new FileReader(CSV_FILENAME), CsvPreference.EXCEL_NORTH_EUROPE_PREFERENCE);
beanReader.getHeader(true); // ignore the header
beanReader.configureBeanMapping(ArticleModele.class, FIELD_MAPPING);
ArticleModele articleModele;
while( (articleModele = beanReader.read(ArticleModele.class, processors)) != null ) {
System.out.println(String.format(" %s", articleModele));}
return articleModele;
}
finally {
if( beanReader != null ) {
beanReader.close();
}
}
}
}
And here is the class:
public class ArticleModele {
public String titre;
public String contenu;
public String attachement;
public ArticleModele(){}
public ArticleModele(String titre, String contenu, String attachement){
this.titre=titre;
this.contenu=contenu;
this.attachement=attachement;
}
public String getTitre(){
return titre;
}
public void setTitre(String titre){
this.titre=titre;
}
public String getContenu(){
return contenu;
}
public void setContenu(String contenu){
this.contenu=contenu;
}
public String getAttachement(){
return attachement;
}
public void setAttachement(String attachement){
this.attachement=attachement;
}
public String toString() {
return String.format("ArticleModele [titre=%s, content=%s, attachement=%s]", titre, contenu, attachement);
}
}
The code returns with last result, as it overwrites articleModele.
ArticleModele articleModele;
while( (articleModele = beanReader.read(ArticleModele.class, processors))
!= null) {
System.out.println(articleModele);
}
return articleModele;
So collect a list.
public static List<ArticleModele> readWithCsvDozerBeanReader() throws Exception {
List<ArticleModele> articleModele = new ArrayList<>();
ArticleModele articleModele;
while( (articleModele = beanReader.read(ArticleModele.class, processors))
!= null) {
System.out.println(articleModele);
articleModeles.add(articleModele);
}
return articleModeles;
If this works you can get the ith article. And walk the articles:
for (ArticleModele articleModele : articleModeles) { ...
For example if you want to fetch a row by title, you can have something like:
public static Map<String, ArticleModele> readWithCsvDozerBeanReader() throws Exception {
final CellProcessor[] processors = new CellProcessor[] {
new Optional(),
new Optional(),
new Optional()
};
Map<String, ArticleModele> map = new HashMap<String,ArticleModele>();
ICsvDozerBeanReader beanReader = null;
try {
beanReader = new CsvDozerBeanReader(new FileReader(CSV_FILENAME), CsvPreference.EXCEL_NORTH_EUROPE_PREFERENCE);
beanReader.getHeader(true); // ignore the header
beanReader.configureBeanMapping(ArticleModele.class, FIELD_MAPPING);
ArticleModele articleModele;
while( (articleModele = beanReader.read(ArticleModele.class, processors)) != null ) {
System.out.println(String.format(" %s", articleModele));}
map.put(articleModele .getTitre(),articleModele);
}
finally {
if( beanReader != null ) {
beanReader.close();
}
}
}
}
and get whatever articleModele you want using:
map.get("titre");

Categories