Unable to get all JSONObjects using for loop in Java - java

first i have two arrays
ArrayList of type JSONObject jsonArrayResponse, jsonArraySubResponse
note: both arrays have the same size
here is my code to get and insert place data into an array :
for (int i = 0; i<jsonArrayResponse.size() - 1; i++) {
try {
JSONObject objectDictionary = jsonArrayResponse.get(i);
JSONObject objectSubDictionary = jsonArraySubResponse.get(i);
PlaceObject placeObject = new PlaceObject();
placeObject.setPlace_id(objectDictionary.getString("place_id"));
placeObject.setLat(objectDictionary.getJSONObject("geometry").getJSONObject("location").getDouble("lat"));
placeObject.setLng(objectDictionary.getJSONObject("geometry").getJSONObject("location").getDouble("lng"));
placeObject.setName(objectDictionary.getString("name"));
if (objectDictionary.has("photos")) {
JSONObject photoReferenceObject = objectDictionary.getJSONArray("photos").getJSONObject(0);
if (photoReferenceObject.has("photo_reference")) {
placeObject.setPhotoReference(photoReferenceObject.getString("photo_reference"));
}
}
if (objectSubDictionary.has("vicinity")) {
placeObject.setVicinity(objectSubDictionary.getString("vicinity"));
}
if (objectSubDictionary.has("formatted_address")) {
placeObject.setFormatted_address(objectSubDictionary.getString("formatted_address"));
}
if (objectSubDictionary.has("formatted_phone_number")) {
placeObject.setFormatted_phone_number(objectSubDictionary.getString("formatted_phone_number"));
}
if (objectSubDictionary.has("international_phone_number")) {
placeObject.setInternational_phone_number(objectSubDictionary.getString("international_phone_number"));
}
if (objectSubDictionary.has("url")) {
placeObject.setUrl(objectSubDictionary.getString("url"));
}
if (objectSubDictionary.has("website")) {
placeObject.setWebsite(objectSubDictionary.getString("website"));
}
if (objectSubDictionary.has("reviews")) {
ArrayList<Reviews> reviews = new ArrayList<Reviews>();
for (int j = 0; j<objectSubDictionary.getJSONArray("reviews").length(); j++) {
Reviews reviewObject = new Reviews();
JSONObject review = objectSubDictionary.getJSONArray("reviews").getJSONObject(j);
reviewObject.setAuthor_name(review.getString("author_name"));
if (review.has("rating")) {
reviewObject.setRating(review.getString("rating"));
}
reviewObject.setText(review.getString("text"));
if (review.has("type")) {
reviewObject.setType(review.getString("type"));
}
reviews.add(reviewObject);
}
placeObject.setReviews(reviews);
}
if (objectDictionary.has("opening_hours")) {
ArrayList<String> days = new ArrayList<String>();
for (int z = 0; z<objectDictionary.getJSONObject("opening_hours").getJSONArray("reviews").length(); z++) {
String day = objectDictionary.getJSONObject("opening_hours").getJSONArray("reviews").getString(z);
days.add(day);
}
placeObject.setWeekday_text(days);
}
if (objectDictionary.has("opening_hours")) {
if (objectDictionary.getJSONObject("opening_hours").has("open_now")) {
placeObject.setOpen_now(objectDictionary.getJSONObject("opening_hours").getBoolean("open_now"));
}
}
Float distanceInKilometers = distance(objectDictionary.getJSONObject("geometry").getJSONObject("location").getDouble("lat"), objectDictionary.getJSONObject("geometry").getJSONObject("location").getDouble("lng"), Global.loc.getLatitude(), Global.loc.getLongitude()) / 1000;
placeObject.setDistance(distanceInKilometers.doubleValue());
if (objectDictionary.has("rating")) {
placeObject.setRating(objectDictionary.getDouble("rating"));
}
if (objectDictionary.has("price_level")) {
placeObject.setPrice_level(objectDictionary.getInt("price_level"));
}
placeObjects.add(placeObject);
if (i == jsonArrayResponse.size() - 1) {
this.placeObjectsResopones = placeObjects;
this.placeObjects = placeObjects;
}
} catch (JSONException e) {
e.printStackTrace();
}
}
Sample Json data:
1) Places Search request:
{
"html_attributions" : [],
"results" : [
{
"geometry" : {
"location" : {
"lat" : -33.870775,
"lng" : 151.199025
}
},
"icon" : "http://maps.gstatic.com/mapfiles/place_api/icons/travel_agent-71.png",
"id" : "21a0b251c9b8392186142c798263e289fe45b4aa",
"name" : "Rhythmboat Cruises",
"opening_hours" : {
"open_now" : true
},
"photos" : [
{
"height" : 270,
"html_attributions" : [],
"photo_reference" : "CnRnAAAAF-LjFR1ZV93eawe1cU_3QNMCNmaGkowY7CnOf-kcNmPhNnPEG9W979jOuJJ1sGr75rhD5hqKzjD8vbMbSsRnq_Ni3ZIGfY6hKWmsOf3qHKJInkm4h55lzvLAXJVc-Rr4kI9O1tmIblblUpg2oqoq8RIQRMQJhFsTr5s9haxQ07EQHxoUO0ICubVFGYfJiMUPor1GnIWb5i8",
"width" : 519
}
],
"place_id" : "ChIJyWEHuEmuEmsRm9hTkapTCrk",
"scope" : "GOOGLE",
"alt_ids" : [
{
"place_id" : "D9iJyWEHuEmuEmsRm9hTkapTCrk",
"scope" : "APP"
}
],
"reference" : "CoQBdQAAAFSiijw5-cAV68xdf2O18pKIZ0seJh03u9h9wk_lEdG-cP1dWvp_QGS4SNCBMk_fB06YRsfMrNkINtPez22p5lRIlj5ty_HmcNwcl6GZXbD2RdXsVfLYlQwnZQcnu7ihkjZp_2gk1-fWXql3GQ8-1BEGwgCxG-eaSnIJIBPuIpihEhAY1WYdxPvOWsPnb2-nGb6QGhTipN0lgaLpQTnkcMeAIEvCsSa0Ww",
"types" : [ "travel_agency", "restaurant", "food", "establishment" ],
"vicinity" : "Pyrmont Bay Wharf Darling Dr, Sydney"
},...
],
"status" : "OK"
}
2) Place Details request:
{
"html_attributions" : [],
"result" : {
"address_components" : [
{
"long_name" : "48",
"short_name" : "48",
"types" : [ "street_number" ]
},
{
"long_name" : "Pirrama Road",
"short_name" : "Pirrama Road",
"types" : [ "route" ]
},
{
"long_name" : "Pyrmont",
"short_name" : "Pyrmont",
"types" : [ "locality", "political" ]
},
{
"long_name" : "NSW",
"short_name" : "NSW",
"types" : [ "administrative_area_level_1", "political" ]
},
{
"long_name" : "AU",
"short_name" : "AU",
"types" : [ "country", "political" ]
},
{
"long_name" : "2009",
"short_name" : "2009",
"types" : [ "postal_code" ]
}
],
"formatted_address" : "48 Pirrama Road, Pyrmont NSW, Australia",
"formatted_phone_number" : "(02) 9374 4000",
"geometry" : {
"location" : {
"lat" : -33.8669710,
"lng" : 151.1958750
},
"viewport" : {
"northeast" : {
"lat" : -33.8665053,
"lng" : 151.1960371
},
"southwest" : {
"lat" : -33.8669293,
"lng" : 151.1952183
}
}
},
"icon" : "http://maps.gstatic.com/mapfiles/place_api/icons/generic_business-71.png",
"id" : "4f89212bf76dde31f092cfc14d7506555d85b5c7",
"international_phone_number" : "+61 2 9374 4000",
"name" : "Google Sydney",
"place_id" : "ChIJN1t_tDeuEmsRUsoyG83frY4",
"scope" : "GOOGLE",
"alt_ids" : [
{
"place_id" : "D9iJyWEHuEmuEmsRm9hTkapTCrk",
"scope" : "APP"
}
],
"rating" : 4.70,
"reference" : "CnRsAAAA98C4wD-VFvzGq-KHVEFhlHuy1TD1W6UYZw7KjuvfVsKMRZkbCVBVDxXFOOCM108n9PuJMJxeAxix3WB6B16c1p2bY1ZQyOrcu1d9247xQhUmPgYjN37JMo5QBsWipTsnoIZA9yAzA-0pnxFM6yAcDhIQbU0z05f3xD3m9NQnhEDjvBoUw-BdcocVpXzKFcnMXUpf-nkyF1w",
"reviews" : [
{
"aspects" : [
{
"rating" : 3,
"type" : "quality"
}
],
"author_name" : "Simon Bengtsson",
"author_url" : "https://plus.google.com/104675092887960962573",
"language" : "en",
"rating" : 5,
"text" : "Just went inside to have a look at Google. Amazing.",
"time" : 1338440552869
},
{
"aspects" : [
{
"rating" : 3,
"type" : "quality"
}
],
"author_name" : "Felix Rauch Valenti",
"author_url" : "https://plus.google.com/103291556674373289857",
"language" : "en",
"rating" : 5,
"text" : "Best place to work :-)",
"time" : 1338411244325
},
{
"aspects" : [
{
"rating" : 3,
"type" : "quality"
}
],
"author_name" : "Chris",
"language" : "en",
"rating" : 5,
"text" : "Great place to work, always lots of free food!",
"time" : 1330467089039
}
],
"types" : [ "establishment" ],
"url" : "http://maps.google.com/maps/place?cid=10281119596374313554",
"vicinity" : "48 Pirrama Road, Pyrmont",
"website" : "http://www.google.com.au/"
},
"status" : "OK"
}
my problem is i get messing data and i think it skips if data took time to process. how to improve it to make sure i get all data of a place object ?

I note that
inside your loop this condition will not happen
if (i == jsonArrayResponse.size() - 1) {
...
}
because your loop will not continue till i == jsonArrayResponse.size() - 1
try to add = in the loop line like this
for (int i = 0; i <= jsonArrayResponse.size() - 1; i++) { ...}
Maybe this is the reason of missing last element from the array

Please provide sample data. I will try using GSON.

Related

What is meant by processedWithError in the report task manager?

I already ingested the file into the druid, greatfully it shows the ingestion is success. However when I checked in the reports of the ingestion, there are all rows are processed with error yet the Datasource is display in the "Datasource" tab.
I have tried to minimise the rows from 20M to 20 rows only. Here is my configuration file:
"type" : "index",
"spec" : {
"ioConfig" : {
"type" : "index",
"firehose" : {
"type" : "local",
"baseDir" : "/home/data/Salutica",
"filter" : "outDashboard2RawV3.csv"
}
},
"dataSchema" : {
"dataSource": "DaTRUE2_Dashboard_V3",
"granularitySpec" : {
"type" : "uniform",
"segmentGranularity" : "WEEK",
"queryGranularity" : "none",
"intervals" : ["2017-05-08/2019-05-17"],
"rollup" : false
},
"parser" : {
"type" : "string",
"parseSpec": {
"format" : "csv",
"timestampSpec" : {
"column" : "Date_Time",
"format" : "auto"
},
"columns" : [
"Main_ID","Parameter_ID","Date_Time","Serial_Number","Status","Station_ID",
"Station_Type","Parameter_Name","Failed_Date_Time","Failed_Measurement",
"Database_Name","Date_Time_Year","Date_Time_Month",
"Date_Time_Day","Date_Time_Hour","Date_Time_Weekday","Status_New"
],
"dimensionsSpec" : {
"dimensions" : [
"Date_Time","Serial_Number","Status","Station_ID",
"Station_Type","Parameter_Name","Failed_Date_Time",
"Failed_Measurement","Database_Name","Status_New",
{
"name" : "Main_ID",
"type" : "long"
},
{
"name" : "Parameter_ID",
"type" : "long"
},
{
"name" : "Date_Time_Year",
"type" : "long"
},
{
"name" : "Date_Time_Month",
"type" : "long"
},
{
"name" : "Date_Time_Day",
"type" : "long"
},
{
"name" : "Date_Time_Hour",
"type" : "long"
},
{
"name" : "Date_Time_Weekday",
"type" : "long"
}
]
}
}
},
"metricsSpec" : [
{
"name" : "count",
"type" : "count"
}
]
},
"tuningConfig" : {
"type" : "index",
"partitionsSpec" : {
"type" : "hashed",
"targetPartitionSize" : 5000000
},
"jobProperties" : {}
}
}
}
Report:
{"ingestionStatsAndErrors":{"taskId":"index_DaTRUE2_Dashboard_V3_2019-09-10T01:16:47.113Z","payload":{"ingestionState":"COMPLETED","unparseableEvents":{},"rowStats":{"determinePartitions":{"processed":0,"processedWithError":0,"thrownAway":0,"unparseable":0},"buildSegments":{"processed":0,"processedWithError":20606701,"thrownAway":0,"unparseable":1}},"errorMsg":null},"type":"ingestionStatsAndErrors"}}
I'm expecting this:
{"processed":20606701,"processedWithError":0,"thrownAway":0,"unparseable":1}},"errorMsg":null},"type":"ingestionStatsAndErrors"}}
instead of this:
{"processed":0,"processedWithError":20606701,"thrownAway":0,"unparseable":1}},"errorMsg":null},"type":"ingestionStatsAndErrors"}}
Below is my input data from csv;
"Main_ID","Parameter_ID","Date_Time","Serial_Number","Status","Station_ID","Station_Type","Parameter_Name","Failed_Date_Time","Failed_Measurement","Database_Name","Date_Time_Year","Date_Time_Month","Date_Time_Day","Date_Time_Hour","Date_Time_Weekday","Status_New"
1,3,"2018-10-05 15:00:55","1840SDF00038","Passed","ST1","BLTBoard","1.8V","","","DaTRUE2Left",2018,10,5,15,"Friday","Passed"
1,4,"2018-10-05 15:00:55","1840SDF00038","Passed","ST1","BLTBoard","1.35V","","","DaTRUE2Left",2018,10,5,15,"Friday","Passed"
1,5,"2018-10-05 15:00:55","1840SDF00038","Passed","ST1","BLTBoard","Isc_VChrg","","","DaTRUE2Left",2018,10,5,15,"Friday","Passed"
1,6,"2018-10-05 15:00:55","1840SDF00038","Passed","ST1","BLTBoard","Isc_VBAT","","","DaTRUE2Left",2018,10,5,15,"Friday","Passed"

Elasticsearch geo search strange behavior

A few days ago I faced with the strange behavior of geo search in Elasticsearch.
I use AWS managed ES 5.5, obviously over REST interface.
Assume we have 200k objects with location info represented as the point only. I use geo search to find the points within multiple polygons. They are shown on the image below. Coordinates were extracted from final request to the ES.
The request is built using official Java High-level REST client. The request query will be attached below.
I want to search for all objects within at least one polygon.
Here is the query (real fields names and values were replaced by stub, Except location and locationPoint.coordinates)
{
"size" : 20,
"query" : {
"constant_score" : {
"filter" : {
"bool" : {
"must" : [
{
"terms" : {
"field1" : [
"a",
"b",
"c",
"d",
"e",
"f"
],
"boost" : 1.0
}
},
{
"term" : {
"field2" : {
"value" : "q",
"boost" : 1.0
}
}
},
{
"range" : {
"field3" : {
"from" : "10",
"to" : null,
"include_lower" : true,
"include_upper" : true,
"boost" : 1.0
}
}
},
{
"range" : {
"field4" : {
"from" : "10",
"to" : null,
"include_lower" : true,
"include_upper" : true,
"boost" : 1.0
}
}
},
{
"geo_shape" : {
"location" : {
"shape" : {
"type" : "geometrycollection",
"geometries" : [
{
"type" : "multipolygon",
"orientation" : "right",
"coordinates" : [
[
// coords here
]
]
},
{
"type" : "polygon",
"orientation" : "right",
"coordinates" : [
[
// coords here
]
]
},
{
"type" : "polygon",
"orientation" : "right",
"coordinates" : [
[
// coords here
]
]
},
{
"type" : "polygon",
"orientation" : "right",
"coordinates" : [
[
// coords here
]
]
}
]
},
"relation" : "intersects"
},
"ignore_unmapped" : false,
"boost" : 1.0
}
}
]
}
},
"boost" : 1.0
}
},
"_source" : {
"includes" : [
"field1",
"field2",
"field3",
"field4",
"field8"
],
"excludes" : [ ]
},
"sort" : [
{
"field1" : {
"order" : "desc"
}
}
],
"aggregations" : {
"agg1" : {
"terms" : {
"field" : "field1",
"size" : 10000,
"min_doc_count" : 1,
"shard_min_doc_count" : 0,
"show_term_doc_count_error" : false,
"order" : [
{
"_count" : "desc"
},
{
"_term" : "asc"
}
]
}
},
"agg2" : {
"terms" : {
"field" : "field2",
"size" : 10000,
"min_doc_count" : 1,
"shard_min_doc_count" : 0,
"show_term_doc_count_error" : false,
"order" : [
{
"_count" : "desc"
},
{
"_term" : "asc"
}
]
}
},
"agg3" : {
"terms" : {
"field" : "field3",
"size" : 10000,
"min_doc_count" : 1,
"shard_min_doc_count" : 0,
"show_term_doc_count_error" : false,
"order" : [
{
"_count" : "desc"
},
{
"_term" : "asc"
}
]
}
},
"agg4" : {
"terms" : {
"field" : "field4",
"size" : 10000,
"min_doc_count" : 1,
"shard_min_doc_count" : 0,
"show_term_doc_count_error" : false,
"order" : [
{
"_count" : "desc"
},
{
"_term" : "asc"
}
]
}
},
"agg5" : {
"terms" : {
"field" : "field5",
"size" : 10000,
"min_doc_count" : 1,
"shard_min_doc_count" : 0,
"show_term_doc_count_error" : false,
"order" : [
{
"_count" : "desc"
},
{
"_term" : "asc"
}
]
}
},
"agg6" : {
"terms" : {
"field" : "field6",
"size" : 10000,
"min_doc_count" : 1,
"shard_min_doc_count" : 0,
"show_term_doc_count_error" : false,
"order" : [
{
"_count" : "desc"
},
{
"_term" : "asc"
}
]
}
},
"agg7" : {
"terms" : {
"field" : "field7",
"size" : 10000,
"min_doc_count" : 1,
"shard_min_doc_count" : 0,
"show_term_doc_count_error" : false,
"order" : [
{
"_count" : "desc"
},
{
"_term" : "asc"
}
]
}
},
"agg8" : {
"terms" : {
"field" : "field8",
"size" : 10000,
"min_doc_count" : 1,
"shard_min_doc_count" : 0,
"show_term_doc_count_error" : false,
"order" : [
{
"_count" : "desc"
},
{
"_term" : "asc"
}
]
}
},
"map_center" : {
"geo_centroid" : {
"field" : "locationPoint.coordinates"
}
},
"map_bound" : {
"geo_bounds" : {
"field" : "locationPoint.coordinates",
"wrap_longitude" : true
}
}
}
}
Note, that field location is mapped as geo_shape and field location.coordinates is mapped as geo_point.
So the problem is next. Below the results (hits count) of requests are presented. Only polygons are changing.
# Polygons Hits count
1) 1,2,3,4 5565
2) 1 4897
3) 3,4 75
4) 2 9
5) 1,3,4 5543
6) 1,2 5466
7) 2,3,4 84
So, if I add results of polygon 1st with 2,3,4 polygons I will not obtain the number as it was in full request.
For example, #1 != #2 + #7, also #1 != #5 + #4, but #7 == #4 + #3
I cannot understand whether it is the issue in this request or expected behavior or even bug in ES.
Can anyone help me to understand the logic of such ES behavior or point to the solution?
Thanks!
After a short conversation with Elasticsearch team member, we come up to AWS.
Build hashes of AWS and pure ES is not equal so, ES is modified by AWS team and we do not know exact changes. There can be some changes that might affect search in posted question.
Need to reproduce this behavior on pure ES cluster before we will continue our conversation.

Google direction api skipping information about small turns

Google direction api skipping information about small turns .
for example following information about this turn is skipping
i am using this code
https://maps.googleapis.com/maps/api/directions/json?origin=30.6545095,76.8163058&destination=paras%20,downtown%20zirakpur&key=mykey&optimize:true
{
"geocoded_waypoints" : [
{
"geocoder_status" : "OK",
"place_id" : "ChIJb4RRbjbrDzkRJ6rnllrRaHo",
"types" : [ "political", "sublocality", "sublocality_level_1" ]
},
{
"geocoder_status" : "OK",
"partial_match" : true,
"place_id" : "ChIJBQuF-jDrDzkR49h1v8UzbpM",
"types" : [ "premise" ]
}
],
"routes" : [
{
"bounds" : {
"northeast" : {
"lat" : 30.6545368,
"lng" : 76.8166615
},
"southwest" : {
"lat" : 30.6482275,
"lng" : 76.8120363
}
},
"copyrights" : "Map data ©2016 Google",
"legs" : [
{
"distance" : {
"text" : "1.4 km",
"value" : 1415
},
"duration" : {
"text" : "6 mins",
"value" : 371
},
"end_address" : "Paras Down Town Square mall, Green Enclave Rd, Badal Colony, Zirakpur, Punjab 140603, India",
"end_location" : {
"lat" : 30.6483638,
"lng" : 76.8166615
},
"start_address" : "Utrathiya, Zirakpur, Punjab, India",
"start_location" : {
"lat" : 30.6545368,
"lng" : 76.8162957
},
"steps" : [
{
"distance" : {
"text" : "0.2 km",
"value" : 153
},
"duration" : {
"text" : "1 min",
"value" : 25
},
"end_location" : {
"lat" : 30.6535831,
"lng" : 76.8151584
},
"html_instructions" : "Head \u003cb\u003esouthwest\u003c/b\u003e",
"polyline" : {
"points" : "{ebzD{dzsM#DDJNTfDzD"
},
"start_location" : {
"lat" : 30.6545368,
"lng" : 76.8162957
},
"travel_mode" : "DRIVING"
},
{
"distance" : {
"text" : "0.2 km",
"value" : 204
},
"duration" : {
"text" : "1 min",
"value" : 76
},
"end_location" : {
"lat" : 30.6522304,
"lng" : 76.8165913
},
"html_instructions" : "Turn \u003cb\u003eleft\u003c/b\u003e toward \u003cb\u003eNH5\u003c/b\u003e/\u003cb\u003eNH7\u003c/b\u003e\u003cdiv style=\"font-size:0.9em\"\u003ePass by Patanjali Retail Store (on the left)\u003c/div\u003e",
"maneuver" : "turn-left",
"polyline" : {
"points" : "{_bzDw}ysMn#s#hAkA`#c#NQ~AeB#A"
},
"start_location" : {
"lat" : 30.6535831,
"lng" : 76.8151584
},
"travel_mode" : "DRIVING"
},
{
"distance" : {
"text" : "0.5 km",
"value" : 547
},
"duration" : {
"text" : "1 min",
"value" : 81
},
"end_location" : {
"lat" : 30.6492641,
"lng" : 76.8120363
},
"html_instructions" : "Turn \u003cb\u003eright\u003c/b\u003e at Anoopam Dhaba onto \u003cb\u003eNH5\u003c/b\u003e/\u003cb\u003eNH7\u003c/b\u003e\u003cdiv style=\"font-size:0.9em\"\u003ePass by Gurdwara Singh Sabha (on the right)\u003c/div\u003e",
"maneuver" : "turn-right",
"polyline" : {
"points" : "mwazDufzsMz#|Az#zA\\h#bAjBHL^n#zB`Eb#z#jArB`#t#`#p#\\d#"
},
"start_location" : {
"lat" : 30.6522304,
"lng" : 76.8165913
},
"travel_mode" : "DRIVING"
},
{
"distance" : {
"text" : "0.1 km",
"value" : 137
},
"duration" : {
"text" : "1 min",
"value" : 41
},
"end_location" : {
"lat" : 30.6482275,
"lng" : 76.8128078
},
"html_instructions" : "Turn \u003cb\u003eleft\u003c/b\u003e at Bir Real Estates, Zirakpur, Punjab onto \u003cb\u003eLohgarh Rd\u003c/b\u003e\u003cdiv style=\"font-size:0.9em\"\u003ePass by Web Design Courses In Zirakpur (on the right)\u003c/div\u003e",
"maneuver" : "turn-left",
"polyline" : {
"points" : "{dazDgjysMfBiAdBoA"
},
"start_location" : {
"lat" : 30.6492641,
"lng" : 76.8120363
},
"travel_mode" : "DRIVING"
},
{
"distance" : {
"text" : "0.4 km",
"value" : 374
},
"duration" : {
"text" : "2 mins",
"value" : 148
},
"end_location" : {
"lat" : 30.6483638,
"lng" : 76.8166615
},
"html_instructions" : "Turn \u003cb\u003eleft\u003c/b\u003e at Garg Property Consultant onto \u003cb\u003eGreen Enclave Rd\u003c/b\u003e\u003cdiv style=\"font-size:0.9em\"\u003ePass by Happy Electronics (on the left)\u003c/div\u003e\u003cdiv style=\"font-size:0.9em\"\u003eDestination will be on the right\u003c/div\u003e",
"maneuver" : "turn-left",
"polyline" : {
"points" : "m~`zDaoysM?A_#s#?SAoE#{C#eA?qA#y#?wA#a#"
},
"start_location" : {
"lat" : 30.6482275,
"lng" : 76.8128078
},
"travel_mode" : "DRIVING"
}
],
"traffic_speed_entry" : [],
"via_waypoint" : []
}
],
"overview_polyline" : {
"points" : "{ebzD{dzsMFPvDpEjDuD`BgBvBxDjCrElGfL~#vAlEyC_#u#AcFDmJ#yB"
},
"summary" : "NH5/NH7 and Green Enclave Rd",
"warnings" : [],
"waypoint_order" : []
}
],
"status" : "OK"
}
So how this problem can be solve?
This seems to be a data issue.
In this case you can use the "Report a problem" link on the bottom right corner of the maps.google.com.
https://support.google.com/maps/answer/162873
You can also try to use mapmaker.google.com to make the edits yourself and it will be reviewed by the community. You can read about the Map Maker tool here
https://support.google.com/mapmaker#topic=3180752
The URL for your route in Map Maker is
https://mapmaker.google.com/mapmaker?saddr=30.6545095,76.8163058&daddr=paras,+downtown+zirakpur&dirflg=d&gw=56&ll=30.655489,76.818573&spn=0.005002,0.008465&z=17&lyt=large_map_v3
Hope it helps!

elasticsearch disable term frequency scoring

I want to change the scoring system in elasticsearch to get rid of counting multiple appearances of a term. For example, I want:
"texas texas texas"
and
"texas"
to come out as the same score. I had found this mapping that elasticsearch said would disable term frequency counting but my searches do not come out as the same score:
"mappings":{
"business": {
"properties" : {
"name" : {
"type" : "string",
"index_options" : "docs",
"norms" : { "enabled": false}}
}
}
}
}
Any help will be appreciated, I have not been able to find a lot of information on this.
I am adding my search code and what gets returned when I use explain.
My search code:
Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "escluster").build();
Client client = new TransportClient(settings)
.addTransportAddress(new InetSocketTransportAddress("127.0.0.1", 9300));
SearchRequest request = Requests.searchRequest("businesses")
.source(SearchSourceBuilder.searchSource().query(QueryBuilders.boolQuery()
.should(QueryBuilders.matchQuery("name", "Texas")
.minimumShouldMatch("1")))).searchType(SearchType.DFS_QUERY_THEN_FETCH);
ExplainRequest request2 = client.prepareIndex("businesses", "business")
and when I search with explain I get:
"took" : 14,
"timed_out" : false,
"_shards" : {
"total" : 3,
"successful" : 3,
"failed" : 0
},
"hits" : {
"total" : 2,
"max_score" : 1.0,
"hits" : [ {
"_shard" : 1,
"_node" : "BTqBPVDET5Kr83r-CYPqfA",
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9U5KBks4zEorv9YI4n",
"_score" : 1.0,
"_source":{
"name" : "texas"
}
,
"_explanation" : {
"value" : 1.0,
"description" : "weight(_all:texas in 0) [PerFieldSimilarity], result of:",
"details" : [ {
"value" : 1.0,
"description" : "fieldWeight in 0, product of:",
"details" : [ {
"value" : 1.0,
"description" : "tf(freq=1.0), with freq of:",
"details" : [ {
"value" : 1.0,
"description" : "termFreq=1.0"
} ]
}, {
"value" : 1.0,
"description" : "idf(docFreq=2, maxDocs=3)"
}, {
"value" : 1.0,
"description" : "fieldNorm(doc=0)"
} ]
} ]
}
}, {
"_shard" : 1,
"_node" : "BTqBPVDET5Kr83r-CYPqfA",
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9U5K6Ks4zEorv9YI4o",
"_score" : 0.8660254,
"_source":{
"name" : "texas texas texas"
}
,
"_explanation" : {
"value" : 0.8660254,
"description" : "weight(_all:texas in 0) [PerFieldSimilarity], result of:",
"details" : [ {
"value" : 0.8660254,
"description" : "fieldWeight in 0, product of:",
"details" : [ {
"value" : 1.7320508,
"description" : "tf(freq=3.0), with freq of:",
"details" : [ {
"value" : 3.0,
"description" : "termFreq=3.0"
} ]
}, {
"value" : 1.0,
"description" : "idf(docFreq=2, maxDocs=3)"
}, {
"value" : 0.5,
"description" : "fieldNorm(doc=0)"
} ]
} ]
}
} ]
}
It looks like it is still considering frequency and doc frequency. Any ideas? Sorry for the bad formatting I don't know why it is appearing so grotesque.
My code from the browser search http://localhost:9200/businesses/business/_search?pretty=true&qname=texas
is:
{
"took" : 2,
"timed_out" : false,
"_shards" : {
"total" : 3,
"successful" : 3,
"failed" : 0
},
"hits" : {
"total" : 4,
"max_score" : 1.0,
"hits" : [ {
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9YcCKjKvtg8NgyozGK",
"_score" : 1.0,
"_source":{"business" : {
"name" : "texas texas texas texas" }
}
}, {
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9YateBKvtg8Ngyoy-p",
"_score" : 1.0,
"_source":{
"name" : "texas" }
}, {
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9YavVnKvtg8Ngyoy-4",
"_score" : 1.0,
"_source":{
"name" : "texas texas texas" }
}, {
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9Yb7NgKvtg8NgyozFf",
"_score" : 1.0,
"_source":{"business" : {
"name" : "texas texas texas" }
}
} ]
}
}
It finds all 4 objects I have in there and has them all the same score.
When I run my java API search with explain I get:
{
"took" : 2,
"timed_out" : false,
"_shards" : {
"total" : 3,
"successful" : 3,
"failed" : 0
},
"hits" : {
"total" : 2,
"max_score" : 1.287682,
"hits" : [ {
"_shard" : 1,
"_node" : "BTqBPVDET5Kr83r-CYPqfA",
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9YateBKvtg8Ngyoy-p",
"_score" : 1.287682,
"_source":{
"name" : "texas" }
,
"_explanation" : {
"value" : 1.287682,
"description" : "weight(name:texas in 0) [PerFieldSimilarity], result of:",
"details" : [ {
"value" : 1.287682,
"description" : "fieldWeight in 0, product of:",
"details" : [ {
"value" : 1.0,
"description" : "tf(freq=1.0), with freq of:",
"details" : [ {
"value" : 1.0,
"description" : "termFreq=1.0"
} ]
}, {
"value" : 1.287682,
"description" : "idf(docFreq=2, maxDocs=4)"
}, {
"value" : 1.0,
"description" : "fieldNorm(doc=0)"
} ]
} ]
}
}, {
"_shard" : 1,
"_node" : "BTqBPVDET5Kr83r-CYPqfA",
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9YavVnKvtg8Ngyoy-4",
"_score" : 1.1151654,
"_source":{
"name" : "texas texas texas" }
,
"_explanation" : {
"value" : 1.1151654,
"description" : "weight(name:texas in 0) [PerFieldSimilarity], result of:",
"details" : [ {
"value" : 1.1151654,
"description" : "fieldWeight in 0, product of:",
"details" : [ {
"value" : 1.7320508,
"description" : "tf(freq=3.0), with freq of:",
"details" : [ {
"value" : 3.0,
"description" : "termFreq=3.0"
} ]
}, {
"value" : 1.287682,
"description" : "idf(docFreq=2, maxDocs=4)"
}, {
"value" : 0.5,
"description" : "fieldNorm(doc=0)"
} ]
} ]
}
} ]
}
}
Looks like one cannot override the index options for a field after the field has been initial set in mapping
Example:
put test
put test/business/_mapping
{
"properties": {
"name": {
"type": "string",
"index_options": "freqs",
"norms": {
"enabled": false
}
}
}
}
put test/business/_mapping
{
"properties": {
"name": {
"type": "string",
"index_options": "docs",
"norms": {
"enabled": false
}
}
}
}
get test/business/_mapping
{
"test": {
"mappings": {
"business": {
"properties": {
"name": {
"type": "string",
"norms": {
"enabled": false
},
"index_options": "freqs"
}
}
}
}
}
}
You would have to recreate the index to pick up the new mapping
your field type must be text
you must re-indexing elasticsearch - create a new index
"mappings": {
"properties": {
"text": {
"type": "text",
"index_options": "docs"
}
}
}
https://www.elastic.co/guide/en/elasticsearch/reference/current/index-options.html

creating a JSONObject from InputStreamReader

I want to create a JSONObject object from the URL's content,
so I am getting the URL content from the google APIs, that's the result:
"results" : [
{
"address_components" : [
{
"long_name" : "29",
"short_name" : "29",
"types" : [ "street_number" ]
},
{
"long_name" : "Jean",
"short_name" : "Jean",
"types" : [ "route" ]
},
{
"long_name" : "Toulouse",
"short_name" : "Toulouse",
"types" : [ "locality", "political" ]
},
{
"long_name" : "Haute-Garonne",
"short_name" : "31",
"types" : [ "administrative_area_level_2", "political" ]
},
{
"long_name" : "Midi",
"short_name" : "Midi",
"types" : [ "administrative_area_level_1", "political" ]
},
{
"long_name" : "France",
"short_name" : "FR",
"types" : [ "country", "political" ]
},
{
"long_name" : "31000",
"short_name" : "31000",
"types" : [ "postal_code" ]
}
],
"formatted_address" : "99 Jean , 31900 Toulouse, France",
"geometry" : {
"location" : {
"lat" : 43.6069496,
"lng" : 1.4498134
},
"location_type" : "ROOFTOP",
"viewport" : {
"northeast" : {
"lat" : 43.6082985802915,
"lng" : 1.451162380291502
},
"southwest" : {
"lat" : 43.6056006197085,
"lng" : 1.448464419708498
}
}
},
"place_id" : "ChIJTSvW45i8rhIRu8OEgnpnZMY",
"types" : [ "street_address" ]
}
],
"status" : "OK"
}
I would like to create a JSONObject from this content , something like
JSONObject obj = JSONObject.fromObject(urlConnection.getInputStream());
but checking the size of this object is 0
You need to read the content of that InputStream to a String, you can't use it directly that way.
Read the InputStream with something similar to this:
public static String slurp(InputStream is){
BufferedReader br = new BufferedReader(new InputStreamReader(is));
StringBuilder sb = new StringBuilder();
String line;
while ((line = br.readLine()) != null) {
sb.append(line+"\n");
}
br.close();
return sb.toString();
}
And then use it to get a JSONObject:
JSONObject obj = JSONObject.fromObject(slurp(urlConnection.getInputStream()));
Done !
JSONParser parser = new JSONParser();
Object obj = parser.parse(new InputStreamReader(inputStream));
JSONObject jsonObject = (JSONObject) obj;

Categories