NamedList<Object> Throws Class cast Exception - java

I want to form an Object of NamedList as this :
response={numFound=57279026,start=0,docs=[SolrDocument{timestamp_update=Thu Jan 01 01:00:00 CET 1970}]}}
When I do that I am getting an exception thrown when my code tries to access the result: SolrDocumentList results = response.getResults();
**java.lang.ClassCastException: org.apache.solr.common.util.SimpleOrderedMap cannot be cast to org.apache.solr.common.SolrDocumentList**
How should I create the NamedList, so that it doesn't throw an exception
Here is my way of doing it:
NamedList<Object> nl = new SimpleOrderedMap<>();
private static Map<String, Object> solrDocumentMap= new HashMap<>();
solrDocumentMap.put("timestamp_update", TIMESTAMP_UPDATE);
solrDocument= new SolrDocument(solrDocumentMap);
solrDocumentList.add(solrDocument);
nl.add("numFound", "57279026");
nl.add("start", "0");
nl.add("docs", solrDocumentList);
NamedList<Object> nl1 = new NamedList<>(Collections.singletonMap("response", nl));
response.setResponse(nl1);
Here is the in built class of QueryResponse which is casting the response to SolarDocument
public void setResponse(NamedList<Object> res) {
super.setResponse(res);
for(int i = 0; i < res.size(); ++i) {
String n = res.getName(i);
if ("responseHeader".equals(n)) {
this._header = (NamedList)res.getVal(i);
} else if ("response".equals(n)) {
this._results = (SolrDocumentList)res.getVal(i);
} else if ("sort_values".equals(n)) {
this._sortvalues = (NamedList)res.getVal(i);
} else if ("facet_counts".equals(n)) {
this._facetInfo = (NamedList)res.getVal(i);
} else if ("debug".equals(n)) {
this._debugInfo = (NamedList)res.getVal(i);
this.extractDebugInfo(this._debugInfo);
} else if ("grouped".equals(n)) {
this._groupedInfo = (NamedList)res.getVal(i);
this.extractGroupedInfo(this._groupedInfo);
} else if ("expanded".equals(n)) {
NamedList map = (NamedList)res.getVal(i);
this._expandedResults = map.asMap(1);
} else if ("highlighting".equals(n)) {
this._highlightingInfo = (NamedList)res.getVal(i);
this.extractHighlightingInfo(this._highlightingInfo);
} else if ("spellcheck".equals(n)) {
this._spellInfo = (NamedList)res.getVal(i);
this.extractSpellCheckInfo(this._spellInfo);
} else if ("clusters".equals(n)) {
this._clusterInfo = (ArrayList)res.getVal(i);
this.extractClusteringInfo(this._clusterInfo);
} else if ("suggest".equals(n)) {
this._suggestInfo = (Map)res.getVal(i);
this.extractSuggesterInfo(this._suggestInfo);
} else if ("stats".equals(n)) {
this._statsInfo = (NamedList)res.getVal(i);
this.extractStatsInfo(this._statsInfo);
} else if ("terms".equals(n)) {
this._termsInfo = (NamedList)res.getVal(i);
this.extractTermsInfo(this._termsInfo);
} else if ("moreLikeThis".equals(n)) {
this._moreLikeThisInfo = (NamedList)res.getVal(i);
} else if ("nextCursorMark".equals(n)) {
this._cursorMarkNext = (String)res.getVal(i);
}
}
if (this._facetInfo != null) {
this.extractFacetInfo(this._facetInfo);
}
}

I have finally found a solution, I hope this helps everyone. I got the response from Solr in XML format and read the file and parsed it with the XMLResponseParser. Somehow JsonParser is not working for Solar and if you use java deserialization, there is an existing bug in Solr for incompatibility.
This works well with the internal typecasting of query Response class as well.
protected QueryResponse getResponse(String fileName) throws IOException {
Path path = Paths.get(resDir + "/" + fileName);
InputStream body= new FileInputStream(path.toFile());
NamedList<Object> result= processResponse(body, null);
QueryResponse response = new QueryResponse();
response.setResponse(result);
return response;
}
private NamedList<Object> processResponse(InputStream body, Object o) {
XMLResponseParser parser= new XMLResponseParser();
NamedList<Object> result= parser.processResponse(body, "UTF-8");
return result;
}

Related

How to prevent swapping of multi threaded response in Concurrent Future Java?

Check the below code. There is a problem in multi threading where results from each thread are swapped
We have tried using withWheelColor.isDone and withWheelColor.isCancelled and also surrounded it with withWheelColor.get() but no luck
final Map<String, Object> result = new HashMap<>();
final ExecutorService executor = Executors.newFixedThreadPool(2);
ProductSearchPageData<SearchStateData, ProductData> searchPageData = null;
Future<ProductSearchPageData<SearchStateData, ProductData>> withWheelColor= null;
Future<ProductSearchPageData<SearchStateData, ProductData>> withoutWheelColor = null;
final LinkedHashMap<String, String> sortedParams = getSortedParams(form.getParams(), form.getLastParam());
final PageableData pageableData = createPageableData(0,
getSearchPageSize(), null, ShowMode.Page);
final SearchStateData searchState = new SearchStateData();
searchState.setPdpFlow(Boolean.TRUE);
final SearchQueryData searchQueryData = new SearchQueryData();
String productCode = null;
try {
Callable<ProductSearchPageData<SearchStateData, ProductData>> withWheelColorThread = new DTCallable<ProductSearchPageData<SearchStateData, ProductData>>(
null) {
#Override
public ProductSearchPageData<SearchStateData, ProductData> callMe(
String pk) throws Exception {
buildWithParams(sortedParams, searchState,
searchQueryData, form);
return productSearchFacade.textSearch(searchState,
pageableData);
}
};
withWheelColor = executor.submit(withWheelColorThread);
if (sortedParams.containsKey(WheelProductModel.WHEELCOLOR)
&& sortedParams.size() > 1) {
Callable<ProductSearchPageData<SearchStateData, ProductData>> withoutWheelColorThread = new DTCallable<ProductSearchPageData<SearchStateData, ProductData>>(
null) {
#Override
public ProductSearchPageData<SearchStateData, ProductData> callMe(
String pk) throws Exception {
sortedParams.remove(WheelProductModel.WHEELCOLOR);//Call without wheelColor
buildWithParams(sortedParams, searchState,
searchQueryData, form);
return productSearchFacade.textSearch(searchState,
pageableData);
}
};
withoutWheelColor = executor.submit(withoutWheelColorThread);
}
executor.shutdown();
if (!executor.awaitTermination(120, TimeUnit.SECONDS)) {
if(LOG.isDebugEnabled()) {
LOG.debug("No Result Found!");
}
}
if (null != withoutWheelColor
&& null != withoutWheelColor.get()
&& CollectionUtils.isNotEmpty(withoutWheelColor.get()
.getFacets())) {
for (final FacetData<SearchStateData> facet : withoutWheelColor
.get().getFacets()) {
if (null != facet.getCode() && facet.getCode()
.equals(WheelProductModel.WHEELCOLOR)) {
result.put("availableColors", facet.getValues());
}
}
}
searchPageData = withWheelColor.get();
} catch (final Exception e) {
LOG.error("Error getting PDP results : " + e.getMessage());
}
processResult(request, result, searchPageData, productCode);
return result;
Expected is withWheelColor only to be in searchPageData and not withoutWheelColor. What is missing in the above to prevent swapping?

Netflix Zuul Pre Filter for Cache is not working for a smal amount of compressed responses

I'd like to use zuul to cache some requests. The Cache is stored in a Redis as a POJO and contains plaintext (not gzip compressed data).
For normal tests and integration tests, everything works pretty well. With a jmeter load test, some of the requests fails with
java.util.zip.ZipException: Not in GZIP format (from jmeter)
We figure out, that at this point, zuul is returning an empty response.
My PreFilter:
public class CachePreFilter extends CacheBaseFilter {
private static DynamicIntProperty INITIAL_STREAM_BUFFER_SIZE = DynamicPropertyFactory.getInstance().getIntProperty(ZuulConstants.ZUUL_INITIAL_STREAM_BUFFER_SIZE, 8192);
#Autowired
CounterService counterService;
public CachePreFilter(RedisCacheManager redisCacheManager, Properties properties) {
super(redisCacheManager, properties);
}
#Override
public Object run() {
RequestContext ctx = RequestContext.getCurrentContext();
CachedResponse data = getFromCache(ctx);
if (null != data) {
counterService.increment("counter.cached");
HttpServletResponse response = ctx.getResponse();
response.addHeader("X-Cache", "HIT");
if (null != data.getContentType()) {
response.setContentType(data.getContentType());
}
if (null != data.getHeaders()) {
for (Entry<String, String> header : data.getHeaders().entrySet()) {
if (!response.containsHeader(header.getKey())) {
response.addHeader(header.getKey(), header.getValue());
}
}
}
OutputStream outStream = null;
try {
outStream = response.getOutputStream();
boolean isGzipRequested = ctx.isGzipRequested();
if (null != data.getBody()) {
final String requestEncoding = ctx.getRequest().getHeader(ZuulHeaders.ACCEPT_ENCODING);
if (requestEncoding != null && HTTPRequestUtils.getInstance().isGzipped(requestEncoding)) {
isGzipRequested = true;
}
ByteArrayOutputStream byteArrayOutputStream = null;
ByteArrayInputStream is = null;
try {
if (isGzipRequested) {
byteArrayOutputStream = new ByteArrayOutputStream();
GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream);
gzipOutputStream.write(data.getBody().getBytes(StandardCharsets.UTF_8));
gzipOutputStream.flush();
gzipOutputStream.close();
ctx.setResponseGZipped(true);
is = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());
logger.debug(String.format("Send gzip content %s", data.getBody()));
response.setHeader(ZuulHeaders.CONTENT_ENCODING, "gzip");
} else {
logger.debug(String.format("Send content %s", data.getBody()));
is = new ByteArrayInputStream(data.getBody().getBytes(StandardCharsets.UTF_8));
}
writeResponse(is, outStream);
} catch (Exception e) {
logger.error("Error at sending response " + e.getMessage(), e);
throw new RuntimeException("Failed to send content", e);
} finally {
if (null != byteArrayOutputStream) {
byteArrayOutputStream.close();
}
if (null != is) {
is.close();
}
}
}
ctx.setSendZuulResponse(false);
} catch (IOException e) {
logger.error("Cannot read from Stream " + e.getMessage(), e.getMessage());
} finally {
// don't close the outputstream
}
ctx.set(CACHE_HIT, true);
return data;
} else {
counterService.increment("counter.notcached");
}
ctx.set(CACHE_HIT, false);
return null;
}
private ThreadLocal<byte[]> buffers = new ThreadLocal<byte[]>() {
#Override
protected byte[] initialValue() {
return new byte[INITIAL_STREAM_BUFFER_SIZE.get()];
}
};
private void writeResponse(InputStream zin, OutputStream out) throws Exception {
byte[] bytes = buffers.get();
int bytesRead = -1;
while ((bytesRead = zin.read(bytes)) != -1) {
out.write(bytes, 0, bytesRead);
}
}
#Override
public int filterOrder() {
return 99;
}
#Override
public String filterType() {
return "pre";
}
}
My Post Filter
public class CachePostFilter extends CacheBaseFilter {
public CachePostFilter(RedisCacheManager redisCacheManager, Properties properties) {
super(redisCacheManager, properties);
}
#Override
public boolean shouldFilter() {
RequestContext ctx = RequestContext.getCurrentContext();
return super.shouldFilter() && !ctx.getBoolean(CACHE_HIT);
}
#Override
public Object run() {
RequestContext ctx = RequestContext.getCurrentContext();
HttpServletRequest req = ctx.getRequest();
HttpServletResponse res = ctx.getResponse();
if (isSuccess(res, ctx.getOriginResponseHeaders())) {
// Store only successful responses
String cacheKey = cacheKey(req);
if (cacheKey != null) {
String body = null;
if (null != ctx.getResponseBody()) {
body = ctx.getResponseBody();
} else if (null != ctx.getResponseDataStream()) {
InputStream is = null;
try {
is = ctx.getResponseDataStream();
final Long len = ctx.getOriginContentLength();
if (len == null || len > 0) {
if (ctx.getResponseGZipped()) {
is = new GZIPInputStream(is);
}
StringWriter writer = new StringWriter();
IOUtils.copy(is, writer, "UTF-8");
body = writer.toString();
if (null != body && !body.isEmpty()) {
ctx.setResponseDataStream(new ByteArrayInputStream(body.getBytes()));
ctx.setResponseGZipped(false);
ctx.setOriginContentLength(String.valueOf(body.getBytes().length));
} else {
ctx.setResponseBody("{}");
}
}
} catch (IOException e) {
logger.error("Cannot read body " + e.getMessage(), e);
} finally {
if (null != is) {
try {
is.close();
} catch (IOException e) {
}
}
}
saveToCache(ctx, cacheKey, body);
}
}
}
return null;
}
#Override
public int filterOrder() {
return 1;
}
#Override
public String filterType() {
return "post";
}
private boolean isSuccess(HttpServletResponse res, List<Pair<String, String>> originHeaders) {
if (res != null && res.getStatus() < 300) {
if (null != originHeaders) {
for (Pair<String, String> header : originHeaders) {
if (header.first().equals("X-CACHEABLE") && header.second().equals("1")) {
return true;
}
}
}
}
return false;
}
We test it without Redis (just store it into a local variable) and this is still the same. We logged always the response from cache (before gzip) and everything looks good.
(Posted on behalf of the question author).
Solution
We refactor our PostFilter and don't change so much in the Response for zuul. After this change, we don't see any problems any more:
Working Post Filter
public class CachePostFilter extends CacheBaseFilter {
public CachePostFilter(RedisCacheManager redisCacheManager, Properties properties) {
super(redisCacheManager, properties);
}
#Override
public boolean shouldFilter() {
RequestContext ctx = RequestContext.getCurrentContext();
return super.shouldFilter() && !ctx.getBoolean(CACHE_HIT);
}
#Override
public Object run() {
RequestContext ctx = RequestContext.getCurrentContext();
HttpServletRequest req = ctx.getRequest();
HttpServletResponse res = ctx.getResponse();
if (isSuccess(res, ctx.getOriginResponseHeaders())) {
// Store only successful responses
String cacheKey = cacheKey(req);
if (cacheKey != null) {
String body = null;
if (null != ctx.getResponseBody()) {
body = ctx.getResponseBody();
} else if (null != ctx.getResponseDataStream()) {
InputStream rawInputStream = null;
InputStream gzipByteArrayInputStream = null;
try {
rawInputStream = ctx.getResponseDataStream();
gzipByteArrayInputStream = null;
// If origin tell it's GZipped but the content is ZERO
// bytes,
// don't try to uncompress
final Long len = ctx.getOriginContentLength();
if (len == null || len > 0) {
byte[] rawData = IOUtils.toByteArray(rawInputStream);
ctx.setResponseDataStream(new ByteArrayInputStream(rawData));
if (ctx.getResponseGZipped()) {
gzipByteArrayInputStream = new GZIPInputStream(new ByteArrayInputStream(rawData));
} else {
gzipByteArrayInputStream = new ByteArrayInputStream(rawData);
}
StringWriter writer = new StringWriter();
IOUtils.copy(gzipByteArrayInputStream, writer, "UTF-8");
body = writer.toString();
}
} catch (IOException e) {
logger.error("Cannot read body " + e.getMessage(), e);
} finally {
if (null != rawInputStream) {
try {
rawInputStream.close();
} catch (IOException e) {
}
}
if (null != gzipByteArrayInputStream) {
try {
gzipByteArrayInputStream.close();
} catch (IOException e) {
}
}
}
// if we read from the stream, the other filter cannot read
// and they dont' deliver any response
// ctx.setResponseBody(body);
// ctx.setResponseGZipped(false);
saveToCache(ctx, cacheKey, body);
}
}
}
return null;
}
#Override
public int filterOrder() {
return 1;
}
#Override
public String filterType() {
return "post";
}
private boolean isSuccess(HttpServletResponse res, List<Pair<String, String>> originHeaders) {
if (res != null && res.getStatus() == 200) {
if (null != originHeaders) {
for (Pair<String, String> header : originHeaders) {
if (header.first().equals("X-CACHEABLE") && header.second().equals("1")) {
return true;
}
}
}
}
return false;
}
}

An Error While parsing data using Jsoup with google app engine

I start using google app engine but I'm a beginner.
I created a web application and I added the library Jsoup.
I'm trying to parse a lot of data from a web site but when I deploy the application I get this error:
Error: Server Error
The server encountered an error and could not complete your request.
Please try again in 30 seconds.
here is my code :
public void doGet(HttpServletRequest req, HttpServletResponse resp)
throws IOException {
resp.setContentType("text/plain");
resp.getWriter().println("{\"Restaurant\":[");
listPages = new ArrayList<>();
listRestaurant = new ArrayList<>();
listUrlRestaurant = new ArrayList<>();
listObj = new ArrayList<>();
try {
doc = Jsoup.connect(url).userAgent("Mozilla").timeout(60000).get();
//System.out.println(doc.select("strong.next.page-numbers").text());
int i=1;
while(doc.select("strong.next.page-numbers").text().contains("SUIV")){
listPages.add(url);
//System.out.println("exist : "+url);
//System.out.println("*******");
//restaurants = doc.select("div.listing_img > a");
url = url.replace("page/"+i+"/", "page/"+(i+1)+"/");
i=i+1;
//System.out.println("*****"+url);
doc = Jsoup.connect(url).userAgent("Mozilla").timeout(60000).get();
//ParsingRestaurant(restaurants,resp,doc);
}
} catch (IOException e) {
e.printStackTrace();
}
//System.out.println(listPages.size());
try{
for (int i = 0; i < listPages.size(); i++) {
doc2 = Jsoup.connect(listPages.get(i)).userAgent("Mozilla").timeout(60000).get();
restaurants = doc2.select("div.listing_img > a");
for (Element element : restaurants) {
listUrlRestaurant.add(element.attr("href"));
//System.out.println(element.attr("href"));
}
}
} catch (IOException e) {
e.printStackTrace();
}
//System.out.println(listUrlRestaurant.size());
for (int i = 0; i < listUrlRestaurant.size(); i++) {
ParsingRestaurant(listUrlRestaurant.get(i), resp, doc3,listObj);
}
for (int i = 0; i < listObj.size(); i++) {
if (i!=listObj.size()) {
resp.getWriter().println(listObj.get(i)+",");
}else{
resp.getWriter().println(listObj.get(i));
}
}
resp.getWriter().println("]}");
}
private void ParsingRestaurant(String url, HttpServletResponse resp, Document doc,List<String> listObj) {
// TODO Auto-generated method stub
Gson gson = new GsonBuilder().setPrettyPrinting().create();
Restaurant obj = new Restaurant();
try {
doc = Jsoup.connect(url).userAgent("Mozilla").timeout(60000).get();
name = doc.select("h1.entry-title").first();
obj.setName(name.text());
adress = doc.select("span#frontend_address").first();
obj.setAdress(adress.text());
facebook = doc.select("a#facebook").first();
if (facebook == null) {
obj.setFacebook("empty");
}else{
obj.setFacebook(facebook.attr("href"));
}
phone = doc.select("span.entry-phone.frontend_phone.listing_custom").first();
if (phone == null) {
obj.setPhone("empty");
}else{
obj.setPhone(phone.text());
}
time = doc.select("span.entry-listing_timing.frontend_listing_timing.listing_custom").first();
if (time == null) {
obj.setPhone("empty");
}else{
obj.setTime(time.text());
}
map = doc.select("div.google-map-directory > a ").first();
//System.out.println(name.text()+adress.text()+facebook.attr("href")+phone.text()+time.text());
String location = map.attr("href");
location = location.replace("http://www.google.com/maps/dir/Current+Location/", "");
String[] output = location.split(",");
obj.setLongitude(output[0]);
obj.setLatitude(output[1]);
images = doc.select("a.listing_img.galerie_listing");
for (Element e : images) {
obj.images.add(e.attr("href"));
}
details = doc.select("div#listing_apercu > div");
for (Element e : details) {
//System.out.println(e.select("label").text());
obj.titles.add(e.select("label").text());
String x = e.select("p > span").text();
for (int j = 1; j < x.length(); j++) {
if (Character.isUpperCase(x.charAt(j))) {
x = changeCharInPosition(j-1, ',', x);
}
}
obj.details.add(x);
}
String json = gson.toJson(obj);
listObj.add(json);
} catch (IOException e) {
e.printStackTrace();
}
}
public String changeCharInPosition(int position, char ch, String str){
char[] charArray = str.toCharArray();
charArray[position] = ch;
return new String(charArray);
}
}
any idea about the problem?!

Java output JSON to CSV file

I'm not too familiar with how to output files back to the client with Java. I am trying to create a CSV file to be sent back to the client and opened in Excel.
I found this tool for the server side creation. I'm not sure exactly how to use it to return the actual file though. Here is a sample of code I have used to return a txt file that I think I can use parts of the response for, but I'm not fetching a file anymore since I'm creating this CSV so I'm not sure what I can use.
In the code below my biggest question is what do I have to return with the controller and how do I accomplish that? I'm not sure what I need to be returning between that and also from the CSV writer to the controller. Any help would be appreciated.
Here's my code so far:
Controller:
#RequestMapping(value = "/web/csvexport", method = RequestMethod.POST)
protected void processCSV(HttpServletRequest request, HttpServletResponse response, #RequestBody String jsonRequest)
throws ServletException, IOException {
response.setContentType("text/html;charset=UTF-8");
try {
CSVWriter csvWriter = new CSVWriter();
JsonFlattener jsonFlattener = new JsonFlattener();
String fileName = "StandardQuery";
csvWriter.writeAsCSV(jsonFlattener.parseJson(jsonRequest), fileName);
} catch (Exception e) {
System.out.println("Exception: " + e);
}
}
CVS Writer:
public class CSVWriter {
public void writeAsCSV(List<Map<String, String>> flatJson, String fileName) throws FileNotFoundException {
Set<String> headers = collectHeaders(flatJson);
String output = StringUtils.join(headers.toArray(), ",") + "\n";
for (Map<String, String> map : flatJson) {
output = output + getCommaSeperatedRow(headers, map) + "\n";
}
writeToFile(output, fileName);
}
private void writeToFile(String output, String fileName) throws FileNotFoundException {
BufferedWriter writer = null;
try {
writer = new BufferedWriter(new FileWriter(fileName));
writer.write(output);
} catch (IOException e) {
e.printStackTrace();
} finally {
close(writer);
}
}
private void close(BufferedWriter writer) {
try {
if (writer != null) {
writer.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
private String getCommaSeperatedRow(Set<String> headers, Map<String, String> map) {
List<String> items = new ArrayList<String>();
for (String header : headers) {
String value = map.get(header) == null ? "" : map.get(header).replace(",", "");
items.add(value);
}
return StringUtils.join(items.toArray(), ",");
}
private Set<String> collectHeaders(List<Map<String, String>> flatJson) {
Set<String> headers = new TreeSet<String>();
for (Map<String, String> map : flatJson) {
headers.addAll(map.keySet());
}
return headers;
}
}
Json Flattener:
public class JsonFlattener {
public Map<String, String> parse(JSONObject jsonObject) {
Map<String, String> flatJson = new HashMap<String, String>();
flatten(jsonObject, flatJson, "");
return flatJson;
}
public List<Map<String, String>> parse(JSONArray jsonArray) {
List<Map<String, String>> flatJson = new ArrayList<Map<String, String>>();
int length = jsonArray.length();
for (int i = 0; i < length; i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i);
Map<String, String> stringMap = parse(jsonObject);
flatJson.add(stringMap);
}
return flatJson;
}
public List<Map<String, String>> parseJson(String json) throws Exception {
List<Map<String, String>> flatJson = null;
try {
JSONObject jsonObject = new JSONObject(json);
flatJson = new ArrayList<Map<String, String>>();
flatJson.add(parse(jsonObject));
} catch (JSONException je) {
flatJson = handleAsArray(json);
}
return flatJson;
}
private List<Map<String, String>> handleAsArray(String json) throws Exception {
List<Map<String, String>> flatJson = null;
try {
JSONArray jsonArray = new JSONArray(json);
flatJson = parse(jsonArray);
} catch (Exception e) {
throw new Exception("Json might be malformed");
}
return flatJson;
}
private void flatten(JSONArray obj, Map<String, String> flatJson, String prefix) {
int length = obj.length();
for (int i = 0; i < length; i++) {
if (obj.get(i).getClass() == JSONArray.class) {
JSONArray jsonArray = (JSONArray) obj.get(i);
if (jsonArray.length() < 1) continue;
flatten(jsonArray, flatJson, prefix + i);
} else if (obj.get(i).getClass() == JSONObject.class) {
JSONObject jsonObject = (JSONObject) obj.get(i);
flatten(jsonObject, flatJson, prefix + (i + 1));
} else {
String value = obj.getString(i);
if (value != null)
flatJson.put(prefix + (i + 1), value);
}
}
}
private void flatten(JSONObject obj, Map<String, String> flatJson, String prefix) {
Iterator iterator = obj.keys();
while (iterator.hasNext()) {
String key = iterator.next().toString();
if (obj.get(key).getClass() == JSONObject.class) {
JSONObject jsonObject = (JSONObject) obj.get(key);
flatten(jsonObject, flatJson, prefix);
} else if (obj.get(key).getClass() == JSONArray.class) {
JSONArray jsonArray = (JSONArray) obj.get(key);
if (jsonArray.length() < 1) continue;
flatten(jsonArray, flatJson, key);
} else {
String value = obj.getString(key);
if (value != null && !value.equals("null"))
flatJson.put(prefix + key, value);
}
}
}
}
Here's the service that I'm calling the controller from. I used this to return a .txt file before so I'm not sure how usable it is, but I think if I stream the file back it will handle it...:
getFile: function(jsonObj, fileName) {
var _defer = $q.defer();
$http.post("/web/csvexport/", jsonObj).success(function(data, status, headers) {
var octetStreamMime = "application/octet-stream";
// Get the headers
headers = headers();
// Get the filename from the x-filename header or default to "download.bin"
//var filename = headers["x-filename"] || "logfile.log";
var filename = fileName;
// Determine the content type from the header or default to "application/octet-stream"
var contentType = headers["content-type"] || octetStreamMime;
if(navigator.msSaveBlob)
{
// Save blob is supported, so get the blob as it's contentType and call save.
var blob = new Blob([data], { type: contentType });
navigator.msSaveBlob(blob, filename);
console.log("SaveBlob Success");
}
else
{
// Get the blob url creator
var urlCreator = window.URL || window.webkitURL || window.mozURL || window.msURL;
if(urlCreator)
{
// Try to use a download link
var link = document.createElement("a");
if("download" in link)
{
// Prepare a blob URL
var blob = new Blob([data], { type: contentType });
var url = urlCreator.createObjectURL(blob);
link.setAttribute("href", url);
// Set the download attribute (Supported in Chrome 14+ / Firefox 20+)
link.setAttribute("download", filename);
// Simulate clicking the download link
var event = document.createEvent('MouseEvents');
event.initMouseEvent('click', true, true, window, 1, 0, 0, 0, 0, false, false, false, false, 0, null);
link.dispatchEvent(event);
console.log("Download link Success");
} else {
// Prepare a blob URL
// Use application/octet-stream when using window.location to force download
var blob = new Blob([data], { type: octetStreamMime });
var url = urlCreator.createObjectURL(blob);
window.location = url;
console.log("window.location Success");
}
} else {
console.log("Not supported");
}
}
Firstly, why don't use CSV mime type instead of html ?
replace
response.setContentType("text/html;charset=UTF-8");
by
response.setContentType("text/csv");
And do you know that Jackson, Java JSON API handle CSV ? see
https://github.com/FasterXML/jackson-dataformat-csv
Finaly, in the controler you need to use the printWriter from the response to write the CSV.
Dont forget, to prefer Stream or BufferedString to handle large file and have better performances.

How do I get all the attachments from a .nsf(lotus notes) file using java

Steps followed :
Took a back of my lotus notes as sample.nsf
And then tried to read the attachments from the sample.nsf
Code snippet :
Database db = session.getDatabase("","C:\\Projects\\NotesToJava\\sample.nsf");
DocumentCollection dc = db.getAllDocuments();
Document doc = dc.getFirstDocument();
while (doc != null) {
RichTextItem body = (RichTextItem) doc.getFirstItem("Body");
if (body.getEmbeddedObject("Request.xlsx") != null)
System.out.println("Found BPM_Dev_Access_Request.xlsx in " + doc.getItemValueString("Subject"));
doc = dc.getNextDocument();
}
No need to use evaluate, look up the extractFile in the Lotus Designer Help
From the Lotus help:
import lotus.domino.*;
import java.util.Vector;
import java.util.Enumeration;
public class JavaAgent extends AgentBase {
public void NotesMain() {
try {
Session session = getSession();
AgentContext agentContext = session.getAgentContext();
// (Your code goes here)
Database db = agentContext.getCurrentDatabase();
DocumentCollection dc = db.getAllDocuments();
Document doc = dc.getFirstDocument();
boolean saveFlag = false;
while (doc != null) {
RichTextItem body =
(RichTextItem)doc.getFirstItem("Body");
System.out.println(doc.getItemValueString("Subject"));
Vector v = body.getEmbeddedObjects();
Enumeration e = v.elements();
while (e.hasMoreElements()) {
EmbeddedObject eo = (EmbeddedObject)e.nextElement();
if (eo.getType() == EmbeddedObject.EMBED_ATTACHMENT) {
eo.extractFile("c:\\extracts\\" + eo.getSource());
eo.remove();
saveFlag = true;
}
}
if (saveFlag) {
doc.save(true, true);
saveFlag = false;
}
doc = dc.getNextDocument();
}
} catch(NotesException e) {
System.out.println(e.id + " " + e.text);
e.printStackTrace();
}
}
}
You need to get the attachments out of each document, as opposed to the EmbeddedObjects. Something like this:
import java.util.Iterator;
import lotus.domino.*;
public final class DocAttachmentParser implements Iterator {
private Session s;
private Document doc;
private Double count ;
private Iterator attIterator = null;
public Double getCount() {
return count;
}
public DocAttachmentParser(Session s, Document doc) throws NotesException {
this.s = s;
this.doc = doc;
if (s!=null && doc !=null){
this.count = (Double) s.evaluate("#Attachments", doc).elementAt(0);
if (count.intValue() > 0){
attIterator = s.evaluate("#AttachmentNames", doc).iterator();
}
}
}
public boolean hasNext() {
return count.intValue() > 0 ? attIterator.hasNext(): false;
}
public Object next() {
return count.intValue() > 0 ? attIterator.next(): null;
}
private String nextAttName(){
return count.intValue() > 0 ? attIterator.next().toString(): null;
}
public void remove() {
if (count.intValue() > 0) attIterator.remove();
}
public String getAll(){
StringBuilder sb = new StringBuilder();
if (count.intValue()>0){
while (hasNext()) {
sb.append(nextAttName());
}
}
return sb.toString();
}
}
To get all attachment from a notes document then i wrote this method(part of my class).
This method takes the document and extract the attachment(Rich Text field)from Notes Document. This class also help you to know consider Example: In two document if there is same Attachment it extracts only one.
Here just you have to set "filePath" where you have to extract your attachment.
public boolean export(Document doc ) throws NotesException {
if (doc.hasEmbedded()) {
Vector<Item> allItems;
allItems = doc.getItems();
HashSet<String> attNames = new HashSet<String>();
for (int i = 0; i < allItems.size(); i++) {
Item item = allItems.get(i);
if (item.getType() == Item.RICHTEXT) {
RichTextItem riItem = (RichTextItem) item;
Vector emb = riItem.getEmbeddedObjects();
String[] doublette = new String[emb.size()];
Set<String> atts = new HashSet<String>();
for (int j = 0; j < emb.size(); j++) {
EmbeddedObject embObj = (EmbeddedObject) emb.get(j);
if (!attNames.contains(embObj.getName())) {
attNames.add(embObj.getName());
StringBuffer test = new StringBuffer(
embObj.getSource());
test.append('-');
test.append(embObj.getName());
test.append('-');
test.append(embObj.getFileSize());
String attDesc = test.toString();
if (atts.contains(attDesc)) {
doublette[j] = attDesc;
} else {
doublette[j] = "";
atts.add(attDesc);
}
}
}
for (int j = 0; j < emb.size(); j++) {
try {
EmbeddedObject embObj = (EmbeddedObject) emb.get(j);
String itemName = riItem.getName();
bOk = extractFile(embObj, itemName);
embObj.recycle();
} catch (NotesException e) {
bOk = false;
if (!"".equals(doublette[j])) {
bOk = true;
System.out.println(" duplicated attachment:")
log.append(doublette[j]);
}
}
}
}
}
}
return bOk;
}
private boolean extractFile(EmbeddedObject embObj, String itemName)
throws NotesException {
boolean bOk = true;
if (embObj.getType() == EmbeddedObject.EMBED_ATTACHMENT) {
String fileName = embObj.getName();
String filePath = this.filesPath + fileName;
// Check if file already exists, then delete
if (FileUtils.killFile(filePath, false, true, true)) {
embObj.extractFile(filePath);
} else {
bOk = false;
System.out.println(", error in kill: " + filePath);
}
}
return bOk;
}
Easy way to get all the attachments from Lotus Notes using Java.
Document doc = dc.getFirstDocument();
for(var att :session.evaluate("#AttachmentNames", doc)){
if (att == null || att.toString().isEmpty()) {
continue;
}
EmbeddedObject eb = doc.getAttachment(att.toString());
System.out.println(eb.getName());
System.out.println(eb.getFileSize());
eb.extractFile("a.txt");// give file name what u want
}

Categories