Google direction api skipping information about small turns .
for example following information about this turn is skipping
i am using this code
https://maps.googleapis.com/maps/api/directions/json?origin=30.6545095,76.8163058&destination=paras%20,downtown%20zirakpur&key=mykey&optimize:true
{
"geocoded_waypoints" : [
{
"geocoder_status" : "OK",
"place_id" : "ChIJb4RRbjbrDzkRJ6rnllrRaHo",
"types" : [ "political", "sublocality", "sublocality_level_1" ]
},
{
"geocoder_status" : "OK",
"partial_match" : true,
"place_id" : "ChIJBQuF-jDrDzkR49h1v8UzbpM",
"types" : [ "premise" ]
}
],
"routes" : [
{
"bounds" : {
"northeast" : {
"lat" : 30.6545368,
"lng" : 76.8166615
},
"southwest" : {
"lat" : 30.6482275,
"lng" : 76.8120363
}
},
"copyrights" : "Map data ©2016 Google",
"legs" : [
{
"distance" : {
"text" : "1.4 km",
"value" : 1415
},
"duration" : {
"text" : "6 mins",
"value" : 371
},
"end_address" : "Paras Down Town Square mall, Green Enclave Rd, Badal Colony, Zirakpur, Punjab 140603, India",
"end_location" : {
"lat" : 30.6483638,
"lng" : 76.8166615
},
"start_address" : "Utrathiya, Zirakpur, Punjab, India",
"start_location" : {
"lat" : 30.6545368,
"lng" : 76.8162957
},
"steps" : [
{
"distance" : {
"text" : "0.2 km",
"value" : 153
},
"duration" : {
"text" : "1 min",
"value" : 25
},
"end_location" : {
"lat" : 30.6535831,
"lng" : 76.8151584
},
"html_instructions" : "Head \u003cb\u003esouthwest\u003c/b\u003e",
"polyline" : {
"points" : "{ebzD{dzsM#DDJNTfDzD"
},
"start_location" : {
"lat" : 30.6545368,
"lng" : 76.8162957
},
"travel_mode" : "DRIVING"
},
{
"distance" : {
"text" : "0.2 km",
"value" : 204
},
"duration" : {
"text" : "1 min",
"value" : 76
},
"end_location" : {
"lat" : 30.6522304,
"lng" : 76.8165913
},
"html_instructions" : "Turn \u003cb\u003eleft\u003c/b\u003e toward \u003cb\u003eNH5\u003c/b\u003e/\u003cb\u003eNH7\u003c/b\u003e\u003cdiv style=\"font-size:0.9em\"\u003ePass by Patanjali Retail Store (on the left)\u003c/div\u003e",
"maneuver" : "turn-left",
"polyline" : {
"points" : "{_bzDw}ysMn#s#hAkA`#c#NQ~AeB#A"
},
"start_location" : {
"lat" : 30.6535831,
"lng" : 76.8151584
},
"travel_mode" : "DRIVING"
},
{
"distance" : {
"text" : "0.5 km",
"value" : 547
},
"duration" : {
"text" : "1 min",
"value" : 81
},
"end_location" : {
"lat" : 30.6492641,
"lng" : 76.8120363
},
"html_instructions" : "Turn \u003cb\u003eright\u003c/b\u003e at Anoopam Dhaba onto \u003cb\u003eNH5\u003c/b\u003e/\u003cb\u003eNH7\u003c/b\u003e\u003cdiv style=\"font-size:0.9em\"\u003ePass by Gurdwara Singh Sabha (on the right)\u003c/div\u003e",
"maneuver" : "turn-right",
"polyline" : {
"points" : "mwazDufzsMz#|Az#zA\\h#bAjBHL^n#zB`Eb#z#jArB`#t#`#p#\\d#"
},
"start_location" : {
"lat" : 30.6522304,
"lng" : 76.8165913
},
"travel_mode" : "DRIVING"
},
{
"distance" : {
"text" : "0.1 km",
"value" : 137
},
"duration" : {
"text" : "1 min",
"value" : 41
},
"end_location" : {
"lat" : 30.6482275,
"lng" : 76.8128078
},
"html_instructions" : "Turn \u003cb\u003eleft\u003c/b\u003e at Bir Real Estates, Zirakpur, Punjab onto \u003cb\u003eLohgarh Rd\u003c/b\u003e\u003cdiv style=\"font-size:0.9em\"\u003ePass by Web Design Courses In Zirakpur (on the right)\u003c/div\u003e",
"maneuver" : "turn-left",
"polyline" : {
"points" : "{dazDgjysMfBiAdBoA"
},
"start_location" : {
"lat" : 30.6492641,
"lng" : 76.8120363
},
"travel_mode" : "DRIVING"
},
{
"distance" : {
"text" : "0.4 km",
"value" : 374
},
"duration" : {
"text" : "2 mins",
"value" : 148
},
"end_location" : {
"lat" : 30.6483638,
"lng" : 76.8166615
},
"html_instructions" : "Turn \u003cb\u003eleft\u003c/b\u003e at Garg Property Consultant onto \u003cb\u003eGreen Enclave Rd\u003c/b\u003e\u003cdiv style=\"font-size:0.9em\"\u003ePass by Happy Electronics (on the left)\u003c/div\u003e\u003cdiv style=\"font-size:0.9em\"\u003eDestination will be on the right\u003c/div\u003e",
"maneuver" : "turn-left",
"polyline" : {
"points" : "m~`zDaoysM?A_#s#?SAoE#{C#eA?qA#y#?wA#a#"
},
"start_location" : {
"lat" : 30.6482275,
"lng" : 76.8128078
},
"travel_mode" : "DRIVING"
}
],
"traffic_speed_entry" : [],
"via_waypoint" : []
}
],
"overview_polyline" : {
"points" : "{ebzD{dzsMFPvDpEjDuD`BgBvBxDjCrElGfL~#vAlEyC_#u#AcFDmJ#yB"
},
"summary" : "NH5/NH7 and Green Enclave Rd",
"warnings" : [],
"waypoint_order" : []
}
],
"status" : "OK"
}
So how this problem can be solve?
This seems to be a data issue.
In this case you can use the "Report a problem" link on the bottom right corner of the maps.google.com.
https://support.google.com/maps/answer/162873
You can also try to use mapmaker.google.com to make the edits yourself and it will be reviewed by the community. You can read about the Map Maker tool here
https://support.google.com/mapmaker#topic=3180752
The URL for your route in Map Maker is
https://mapmaker.google.com/mapmaker?saddr=30.6545095,76.8163058&daddr=paras,+downtown+zirakpur&dirflg=d&gw=56&ll=30.655489,76.818573&spn=0.005002,0.008465&z=17&lyt=large_map_v3
Hope it helps!
Related
I am trying to parse following json i am getting from elastic-search api in groovy using jsonslurper . I need to create a list of _id out of this json. Tried multiple variation of code but no success
please suggest , any help appreciated.
{
"took" : 2535,
"timed_out" : false,
"_shards" : {
"total" : 384,
"successful" : 384,
"skipped" : 0,
"failed" : 0
},
"hits" : {
"total" : {
"value" : 10000,
"relation" : "gte"
},
"max_score" : null,
"hits" : [
{
"_index" : "X",
"_type" : "_doc",
"_id" : "310165903526204",
"_score" : null,
"sort" : [
"310165903526204"
]
},
{
"_index" : "X",
"_type" : "_doc",
"_id" : "310165903698515",
"_score" : null,
"sort" : [
"310165903698515"
]
},
{
"_index" : "X",
"_type" : "_doc",
**"_id" : "310165903819494"**,
"_score" : null,
"sort" : [
"310165903819494"
]
}
]
}
}
PS: I tried using multiple clients provided by elasticsearch to search ES and parse data but i am facing another issue with that, so had to switch to HTTP client and do manual parse . this is link for client issue RestHighLevelClient with Elasticsearch client error
Update:
{
"took" : 19,
"timed_out" : false,
"_shards" : {
"total" : 370,
"successful" : 370,
"skipped" : 0,
"failed" : 0
},
"hits" : {
"total" : {
"value" : 1,
"relation" : "eq"
},
"max_score" : 1.0,
"hits" : [
{
"_index" : "index",
"_type" : "_doc",
"_id" : "3961655114649",
"_score" : 1.0,
"_source" : {
"location" : {
"lat" : 14.94046,
"lon" : -23.48016
},
"place" : {
"country" : "USA",
"pois" : [
{
"externalIdentifier" : "3961655114649",
"gdfFeatureCode" : "7376",
"officialNames" : [
{
"name" : "ENG",
"language" : "ENG"
}
],
"alternateNames" : [ ],
"brandNames" : [ ],
"streetsAndCities" : [
{
"city" : "California",
"cityLanguage" : "UND"
}
],
"postalCode" : "",
"postalCodeMain" : ""
}
],
"providers" : [
{
"UniqueId" : """{"mostSigBits": 6332787932357083, "leastSigBits": -6052983698683356}""",
"code" : "ABC",
"deliveryId" : "3959",
"rawId" : """{"mostSigBits": 8772023489060096, "leastSigBits": -6327158443391381}""",
"totalAttributes" : "1",
"visibleAttributes" : "1"
},
{
"UniqueId" : """{"mostSigBits": 6332787932357083, "leastSigBits": -6052983698683356}_1""",
"rawId" : """{"mostSigBits": 8772023489060096, "leastSigBits": -6327158443391381}""",
"totalAttributes" : "1",
"visibleAttributes" : "1"
}
],
"attributes" : [ ],
"isAddObservation" : false
},
"transactionCommitDate" : 0
}
}
]
}
}
With this updated Json, i want to pull mostSigBits and leastSigBits values in typeId under providers. but the catch is i want to pull only that typeID inside providers[] which is not having _1 or _2 or anything suffix with it.
i have tried to get that data by doing this but looking for some better approach
json.hits.hits[0]._source.Place.provider[0].typeId
def _id = json.hits.hits.collect{ it._id }
_id.each{println it}
Basic Groovy's object navigation is here to help:
import groovy.json.*
String response = '''\
{
"took" : 2535,
"timed_out" : false,
"_shards" : {
"total" : 384,
"successful" : 384,
"skipped" : 0,
"failed" : 0
},
"hits" : {
"total" : {
"value" : 10000,
"relation" : "gte"
},
"max_score" : null,
"hits" : [
{
"_index" : "X",
"_type" : "_doc",
"_id" : "310165903526204",
"_score" : null,
"sort" : [
"310165903526204"
]
},
{
"_index" : "X",
"_type" : "_doc",
"_id" : "310165903698515",
"_score" : null,
"sort" : [
"310165903698515"
]
},
{
"_index" : "X",
"_type" : "_doc",
"_id" : "310165903819494",
"_score" : null,
"sort" : [
"310165903819494"
]
}
]
}
}'''
def json = new JsonSlurper().parseText response
List ids = json.hits.hits*._id
assert ids.toString() == '[310165903526204, 310165903698515, 310165903819494]'
I already ingested the file into the druid, greatfully it shows the ingestion is success. However when I checked in the reports of the ingestion, there are all rows are processed with error yet the Datasource is display in the "Datasource" tab.
I have tried to minimise the rows from 20M to 20 rows only. Here is my configuration file:
"type" : "index",
"spec" : {
"ioConfig" : {
"type" : "index",
"firehose" : {
"type" : "local",
"baseDir" : "/home/data/Salutica",
"filter" : "outDashboard2RawV3.csv"
}
},
"dataSchema" : {
"dataSource": "DaTRUE2_Dashboard_V3",
"granularitySpec" : {
"type" : "uniform",
"segmentGranularity" : "WEEK",
"queryGranularity" : "none",
"intervals" : ["2017-05-08/2019-05-17"],
"rollup" : false
},
"parser" : {
"type" : "string",
"parseSpec": {
"format" : "csv",
"timestampSpec" : {
"column" : "Date_Time",
"format" : "auto"
},
"columns" : [
"Main_ID","Parameter_ID","Date_Time","Serial_Number","Status","Station_ID",
"Station_Type","Parameter_Name","Failed_Date_Time","Failed_Measurement",
"Database_Name","Date_Time_Year","Date_Time_Month",
"Date_Time_Day","Date_Time_Hour","Date_Time_Weekday","Status_New"
],
"dimensionsSpec" : {
"dimensions" : [
"Date_Time","Serial_Number","Status","Station_ID",
"Station_Type","Parameter_Name","Failed_Date_Time",
"Failed_Measurement","Database_Name","Status_New",
{
"name" : "Main_ID",
"type" : "long"
},
{
"name" : "Parameter_ID",
"type" : "long"
},
{
"name" : "Date_Time_Year",
"type" : "long"
},
{
"name" : "Date_Time_Month",
"type" : "long"
},
{
"name" : "Date_Time_Day",
"type" : "long"
},
{
"name" : "Date_Time_Hour",
"type" : "long"
},
{
"name" : "Date_Time_Weekday",
"type" : "long"
}
]
}
}
},
"metricsSpec" : [
{
"name" : "count",
"type" : "count"
}
]
},
"tuningConfig" : {
"type" : "index",
"partitionsSpec" : {
"type" : "hashed",
"targetPartitionSize" : 5000000
},
"jobProperties" : {}
}
}
}
Report:
{"ingestionStatsAndErrors":{"taskId":"index_DaTRUE2_Dashboard_V3_2019-09-10T01:16:47.113Z","payload":{"ingestionState":"COMPLETED","unparseableEvents":{},"rowStats":{"determinePartitions":{"processed":0,"processedWithError":0,"thrownAway":0,"unparseable":0},"buildSegments":{"processed":0,"processedWithError":20606701,"thrownAway":0,"unparseable":1}},"errorMsg":null},"type":"ingestionStatsAndErrors"}}
I'm expecting this:
{"processed":20606701,"processedWithError":0,"thrownAway":0,"unparseable":1}},"errorMsg":null},"type":"ingestionStatsAndErrors"}}
instead of this:
{"processed":0,"processedWithError":20606701,"thrownAway":0,"unparseable":1}},"errorMsg":null},"type":"ingestionStatsAndErrors"}}
Below is my input data from csv;
"Main_ID","Parameter_ID","Date_Time","Serial_Number","Status","Station_ID","Station_Type","Parameter_Name","Failed_Date_Time","Failed_Measurement","Database_Name","Date_Time_Year","Date_Time_Month","Date_Time_Day","Date_Time_Hour","Date_Time_Weekday","Status_New"
1,3,"2018-10-05 15:00:55","1840SDF00038","Passed","ST1","BLTBoard","1.8V","","","DaTRUE2Left",2018,10,5,15,"Friday","Passed"
1,4,"2018-10-05 15:00:55","1840SDF00038","Passed","ST1","BLTBoard","1.35V","","","DaTRUE2Left",2018,10,5,15,"Friday","Passed"
1,5,"2018-10-05 15:00:55","1840SDF00038","Passed","ST1","BLTBoard","Isc_VChrg","","","DaTRUE2Left",2018,10,5,15,"Friday","Passed"
1,6,"2018-10-05 15:00:55","1840SDF00038","Passed","ST1","BLTBoard","Isc_VBAT","","","DaTRUE2Left",2018,10,5,15,"Friday","Passed"
first i have two arrays
ArrayList of type JSONObject jsonArrayResponse, jsonArraySubResponse
note: both arrays have the same size
here is my code to get and insert place data into an array :
for (int i = 0; i<jsonArrayResponse.size() - 1; i++) {
try {
JSONObject objectDictionary = jsonArrayResponse.get(i);
JSONObject objectSubDictionary = jsonArraySubResponse.get(i);
PlaceObject placeObject = new PlaceObject();
placeObject.setPlace_id(objectDictionary.getString("place_id"));
placeObject.setLat(objectDictionary.getJSONObject("geometry").getJSONObject("location").getDouble("lat"));
placeObject.setLng(objectDictionary.getJSONObject("geometry").getJSONObject("location").getDouble("lng"));
placeObject.setName(objectDictionary.getString("name"));
if (objectDictionary.has("photos")) {
JSONObject photoReferenceObject = objectDictionary.getJSONArray("photos").getJSONObject(0);
if (photoReferenceObject.has("photo_reference")) {
placeObject.setPhotoReference(photoReferenceObject.getString("photo_reference"));
}
}
if (objectSubDictionary.has("vicinity")) {
placeObject.setVicinity(objectSubDictionary.getString("vicinity"));
}
if (objectSubDictionary.has("formatted_address")) {
placeObject.setFormatted_address(objectSubDictionary.getString("formatted_address"));
}
if (objectSubDictionary.has("formatted_phone_number")) {
placeObject.setFormatted_phone_number(objectSubDictionary.getString("formatted_phone_number"));
}
if (objectSubDictionary.has("international_phone_number")) {
placeObject.setInternational_phone_number(objectSubDictionary.getString("international_phone_number"));
}
if (objectSubDictionary.has("url")) {
placeObject.setUrl(objectSubDictionary.getString("url"));
}
if (objectSubDictionary.has("website")) {
placeObject.setWebsite(objectSubDictionary.getString("website"));
}
if (objectSubDictionary.has("reviews")) {
ArrayList<Reviews> reviews = new ArrayList<Reviews>();
for (int j = 0; j<objectSubDictionary.getJSONArray("reviews").length(); j++) {
Reviews reviewObject = new Reviews();
JSONObject review = objectSubDictionary.getJSONArray("reviews").getJSONObject(j);
reviewObject.setAuthor_name(review.getString("author_name"));
if (review.has("rating")) {
reviewObject.setRating(review.getString("rating"));
}
reviewObject.setText(review.getString("text"));
if (review.has("type")) {
reviewObject.setType(review.getString("type"));
}
reviews.add(reviewObject);
}
placeObject.setReviews(reviews);
}
if (objectDictionary.has("opening_hours")) {
ArrayList<String> days = new ArrayList<String>();
for (int z = 0; z<objectDictionary.getJSONObject("opening_hours").getJSONArray("reviews").length(); z++) {
String day = objectDictionary.getJSONObject("opening_hours").getJSONArray("reviews").getString(z);
days.add(day);
}
placeObject.setWeekday_text(days);
}
if (objectDictionary.has("opening_hours")) {
if (objectDictionary.getJSONObject("opening_hours").has("open_now")) {
placeObject.setOpen_now(objectDictionary.getJSONObject("opening_hours").getBoolean("open_now"));
}
}
Float distanceInKilometers = distance(objectDictionary.getJSONObject("geometry").getJSONObject("location").getDouble("lat"), objectDictionary.getJSONObject("geometry").getJSONObject("location").getDouble("lng"), Global.loc.getLatitude(), Global.loc.getLongitude()) / 1000;
placeObject.setDistance(distanceInKilometers.doubleValue());
if (objectDictionary.has("rating")) {
placeObject.setRating(objectDictionary.getDouble("rating"));
}
if (objectDictionary.has("price_level")) {
placeObject.setPrice_level(objectDictionary.getInt("price_level"));
}
placeObjects.add(placeObject);
if (i == jsonArrayResponse.size() - 1) {
this.placeObjectsResopones = placeObjects;
this.placeObjects = placeObjects;
}
} catch (JSONException e) {
e.printStackTrace();
}
}
Sample Json data:
1) Places Search request:
{
"html_attributions" : [],
"results" : [
{
"geometry" : {
"location" : {
"lat" : -33.870775,
"lng" : 151.199025
}
},
"icon" : "http://maps.gstatic.com/mapfiles/place_api/icons/travel_agent-71.png",
"id" : "21a0b251c9b8392186142c798263e289fe45b4aa",
"name" : "Rhythmboat Cruises",
"opening_hours" : {
"open_now" : true
},
"photos" : [
{
"height" : 270,
"html_attributions" : [],
"photo_reference" : "CnRnAAAAF-LjFR1ZV93eawe1cU_3QNMCNmaGkowY7CnOf-kcNmPhNnPEG9W979jOuJJ1sGr75rhD5hqKzjD8vbMbSsRnq_Ni3ZIGfY6hKWmsOf3qHKJInkm4h55lzvLAXJVc-Rr4kI9O1tmIblblUpg2oqoq8RIQRMQJhFsTr5s9haxQ07EQHxoUO0ICubVFGYfJiMUPor1GnIWb5i8",
"width" : 519
}
],
"place_id" : "ChIJyWEHuEmuEmsRm9hTkapTCrk",
"scope" : "GOOGLE",
"alt_ids" : [
{
"place_id" : "D9iJyWEHuEmuEmsRm9hTkapTCrk",
"scope" : "APP"
}
],
"reference" : "CoQBdQAAAFSiijw5-cAV68xdf2O18pKIZ0seJh03u9h9wk_lEdG-cP1dWvp_QGS4SNCBMk_fB06YRsfMrNkINtPez22p5lRIlj5ty_HmcNwcl6GZXbD2RdXsVfLYlQwnZQcnu7ihkjZp_2gk1-fWXql3GQ8-1BEGwgCxG-eaSnIJIBPuIpihEhAY1WYdxPvOWsPnb2-nGb6QGhTipN0lgaLpQTnkcMeAIEvCsSa0Ww",
"types" : [ "travel_agency", "restaurant", "food", "establishment" ],
"vicinity" : "Pyrmont Bay Wharf Darling Dr, Sydney"
},...
],
"status" : "OK"
}
2) Place Details request:
{
"html_attributions" : [],
"result" : {
"address_components" : [
{
"long_name" : "48",
"short_name" : "48",
"types" : [ "street_number" ]
},
{
"long_name" : "Pirrama Road",
"short_name" : "Pirrama Road",
"types" : [ "route" ]
},
{
"long_name" : "Pyrmont",
"short_name" : "Pyrmont",
"types" : [ "locality", "political" ]
},
{
"long_name" : "NSW",
"short_name" : "NSW",
"types" : [ "administrative_area_level_1", "political" ]
},
{
"long_name" : "AU",
"short_name" : "AU",
"types" : [ "country", "political" ]
},
{
"long_name" : "2009",
"short_name" : "2009",
"types" : [ "postal_code" ]
}
],
"formatted_address" : "48 Pirrama Road, Pyrmont NSW, Australia",
"formatted_phone_number" : "(02) 9374 4000",
"geometry" : {
"location" : {
"lat" : -33.8669710,
"lng" : 151.1958750
},
"viewport" : {
"northeast" : {
"lat" : -33.8665053,
"lng" : 151.1960371
},
"southwest" : {
"lat" : -33.8669293,
"lng" : 151.1952183
}
}
},
"icon" : "http://maps.gstatic.com/mapfiles/place_api/icons/generic_business-71.png",
"id" : "4f89212bf76dde31f092cfc14d7506555d85b5c7",
"international_phone_number" : "+61 2 9374 4000",
"name" : "Google Sydney",
"place_id" : "ChIJN1t_tDeuEmsRUsoyG83frY4",
"scope" : "GOOGLE",
"alt_ids" : [
{
"place_id" : "D9iJyWEHuEmuEmsRm9hTkapTCrk",
"scope" : "APP"
}
],
"rating" : 4.70,
"reference" : "CnRsAAAA98C4wD-VFvzGq-KHVEFhlHuy1TD1W6UYZw7KjuvfVsKMRZkbCVBVDxXFOOCM108n9PuJMJxeAxix3WB6B16c1p2bY1ZQyOrcu1d9247xQhUmPgYjN37JMo5QBsWipTsnoIZA9yAzA-0pnxFM6yAcDhIQbU0z05f3xD3m9NQnhEDjvBoUw-BdcocVpXzKFcnMXUpf-nkyF1w",
"reviews" : [
{
"aspects" : [
{
"rating" : 3,
"type" : "quality"
}
],
"author_name" : "Simon Bengtsson",
"author_url" : "https://plus.google.com/104675092887960962573",
"language" : "en",
"rating" : 5,
"text" : "Just went inside to have a look at Google. Amazing.",
"time" : 1338440552869
},
{
"aspects" : [
{
"rating" : 3,
"type" : "quality"
}
],
"author_name" : "Felix Rauch Valenti",
"author_url" : "https://plus.google.com/103291556674373289857",
"language" : "en",
"rating" : 5,
"text" : "Best place to work :-)",
"time" : 1338411244325
},
{
"aspects" : [
{
"rating" : 3,
"type" : "quality"
}
],
"author_name" : "Chris",
"language" : "en",
"rating" : 5,
"text" : "Great place to work, always lots of free food!",
"time" : 1330467089039
}
],
"types" : [ "establishment" ],
"url" : "http://maps.google.com/maps/place?cid=10281119596374313554",
"vicinity" : "48 Pirrama Road, Pyrmont",
"website" : "http://www.google.com.au/"
},
"status" : "OK"
}
my problem is i get messing data and i think it skips if data took time to process. how to improve it to make sure i get all data of a place object ?
I note that
inside your loop this condition will not happen
if (i == jsonArrayResponse.size() - 1) {
...
}
because your loop will not continue till i == jsonArrayResponse.size() - 1
try to add = in the loop line like this
for (int i = 0; i <= jsonArrayResponse.size() - 1; i++) { ...}
Maybe this is the reason of missing last element from the array
Please provide sample data. I will try using GSON.
I want to change the scoring system in elasticsearch to get rid of counting multiple appearances of a term. For example, I want:
"texas texas texas"
and
"texas"
to come out as the same score. I had found this mapping that elasticsearch said would disable term frequency counting but my searches do not come out as the same score:
"mappings":{
"business": {
"properties" : {
"name" : {
"type" : "string",
"index_options" : "docs",
"norms" : { "enabled": false}}
}
}
}
}
Any help will be appreciated, I have not been able to find a lot of information on this.
I am adding my search code and what gets returned when I use explain.
My search code:
Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "escluster").build();
Client client = new TransportClient(settings)
.addTransportAddress(new InetSocketTransportAddress("127.0.0.1", 9300));
SearchRequest request = Requests.searchRequest("businesses")
.source(SearchSourceBuilder.searchSource().query(QueryBuilders.boolQuery()
.should(QueryBuilders.matchQuery("name", "Texas")
.minimumShouldMatch("1")))).searchType(SearchType.DFS_QUERY_THEN_FETCH);
ExplainRequest request2 = client.prepareIndex("businesses", "business")
and when I search with explain I get:
"took" : 14,
"timed_out" : false,
"_shards" : {
"total" : 3,
"successful" : 3,
"failed" : 0
},
"hits" : {
"total" : 2,
"max_score" : 1.0,
"hits" : [ {
"_shard" : 1,
"_node" : "BTqBPVDET5Kr83r-CYPqfA",
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9U5KBks4zEorv9YI4n",
"_score" : 1.0,
"_source":{
"name" : "texas"
}
,
"_explanation" : {
"value" : 1.0,
"description" : "weight(_all:texas in 0) [PerFieldSimilarity], result of:",
"details" : [ {
"value" : 1.0,
"description" : "fieldWeight in 0, product of:",
"details" : [ {
"value" : 1.0,
"description" : "tf(freq=1.0), with freq of:",
"details" : [ {
"value" : 1.0,
"description" : "termFreq=1.0"
} ]
}, {
"value" : 1.0,
"description" : "idf(docFreq=2, maxDocs=3)"
}, {
"value" : 1.0,
"description" : "fieldNorm(doc=0)"
} ]
} ]
}
}, {
"_shard" : 1,
"_node" : "BTqBPVDET5Kr83r-CYPqfA",
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9U5K6Ks4zEorv9YI4o",
"_score" : 0.8660254,
"_source":{
"name" : "texas texas texas"
}
,
"_explanation" : {
"value" : 0.8660254,
"description" : "weight(_all:texas in 0) [PerFieldSimilarity], result of:",
"details" : [ {
"value" : 0.8660254,
"description" : "fieldWeight in 0, product of:",
"details" : [ {
"value" : 1.7320508,
"description" : "tf(freq=3.0), with freq of:",
"details" : [ {
"value" : 3.0,
"description" : "termFreq=3.0"
} ]
}, {
"value" : 1.0,
"description" : "idf(docFreq=2, maxDocs=3)"
}, {
"value" : 0.5,
"description" : "fieldNorm(doc=0)"
} ]
} ]
}
} ]
}
It looks like it is still considering frequency and doc frequency. Any ideas? Sorry for the bad formatting I don't know why it is appearing so grotesque.
My code from the browser search http://localhost:9200/businesses/business/_search?pretty=true&qname=texas
is:
{
"took" : 2,
"timed_out" : false,
"_shards" : {
"total" : 3,
"successful" : 3,
"failed" : 0
},
"hits" : {
"total" : 4,
"max_score" : 1.0,
"hits" : [ {
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9YcCKjKvtg8NgyozGK",
"_score" : 1.0,
"_source":{"business" : {
"name" : "texas texas texas texas" }
}
}, {
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9YateBKvtg8Ngyoy-p",
"_score" : 1.0,
"_source":{
"name" : "texas" }
}, {
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9YavVnKvtg8Ngyoy-4",
"_score" : 1.0,
"_source":{
"name" : "texas texas texas" }
}, {
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9Yb7NgKvtg8NgyozFf",
"_score" : 1.0,
"_source":{"business" : {
"name" : "texas texas texas" }
}
} ]
}
}
It finds all 4 objects I have in there and has them all the same score.
When I run my java API search with explain I get:
{
"took" : 2,
"timed_out" : false,
"_shards" : {
"total" : 3,
"successful" : 3,
"failed" : 0
},
"hits" : {
"total" : 2,
"max_score" : 1.287682,
"hits" : [ {
"_shard" : 1,
"_node" : "BTqBPVDET5Kr83r-CYPqfA",
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9YateBKvtg8Ngyoy-p",
"_score" : 1.287682,
"_source":{
"name" : "texas" }
,
"_explanation" : {
"value" : 1.287682,
"description" : "weight(name:texas in 0) [PerFieldSimilarity], result of:",
"details" : [ {
"value" : 1.287682,
"description" : "fieldWeight in 0, product of:",
"details" : [ {
"value" : 1.0,
"description" : "tf(freq=1.0), with freq of:",
"details" : [ {
"value" : 1.0,
"description" : "termFreq=1.0"
} ]
}, {
"value" : 1.287682,
"description" : "idf(docFreq=2, maxDocs=4)"
}, {
"value" : 1.0,
"description" : "fieldNorm(doc=0)"
} ]
} ]
}
}, {
"_shard" : 1,
"_node" : "BTqBPVDET5Kr83r-CYPqfA",
"_index" : "businesses",
"_type" : "business",
"_id" : "AU9YavVnKvtg8Ngyoy-4",
"_score" : 1.1151654,
"_source":{
"name" : "texas texas texas" }
,
"_explanation" : {
"value" : 1.1151654,
"description" : "weight(name:texas in 0) [PerFieldSimilarity], result of:",
"details" : [ {
"value" : 1.1151654,
"description" : "fieldWeight in 0, product of:",
"details" : [ {
"value" : 1.7320508,
"description" : "tf(freq=3.0), with freq of:",
"details" : [ {
"value" : 3.0,
"description" : "termFreq=3.0"
} ]
}, {
"value" : 1.287682,
"description" : "idf(docFreq=2, maxDocs=4)"
}, {
"value" : 0.5,
"description" : "fieldNorm(doc=0)"
} ]
} ]
}
} ]
}
}
Looks like one cannot override the index options for a field after the field has been initial set in mapping
Example:
put test
put test/business/_mapping
{
"properties": {
"name": {
"type": "string",
"index_options": "freqs",
"norms": {
"enabled": false
}
}
}
}
put test/business/_mapping
{
"properties": {
"name": {
"type": "string",
"index_options": "docs",
"norms": {
"enabled": false
}
}
}
}
get test/business/_mapping
{
"test": {
"mappings": {
"business": {
"properties": {
"name": {
"type": "string",
"norms": {
"enabled": false
},
"index_options": "freqs"
}
}
}
}
}
}
You would have to recreate the index to pick up the new mapping
your field type must be text
you must re-indexing elasticsearch - create a new index
"mappings": {
"properties": {
"text": {
"type": "text",
"index_options": "docs"
}
}
}
https://www.elastic.co/guide/en/elasticsearch/reference/current/index-options.html
my schema design is i want to retrieve some information from mongodb
{
"_id" : "23423q53q45345",
"value" : "5942178562002.65",
"dataset" : "GDP (current US$)",
"data" : [
{
"data_name" : "country",
"value" : "india"
},
{
"data_name" : "date",
"value" : "2011"
}
]
},
{
"_id" : "23423q53qdsfsd5",
"value" : "1234238562002.65",
"dataset" : "GDP (current US$)",
"data" : [
{
"data_name" : "country",
"value" : "india"
},
{
"data_name" : "date",
"value" : "2012"
}
]
},
{
"_id" : "213423q45345",
"value" : "6576867562002.65",
"dataset" : "GDP (current US$)",
"data" : [
{
"data_name" : "country",
"value" : "us"
},
{
"data_name" : "date",
"value" : "2011"
}
]
},
{
"_id" : "4564564545dsfsd5",
"value" : "2354353462002.65",
"dataset" : "GDP (current US$)",
"data" : [
{
"data_name" : "country",
"value" : "us"
},
{
"data_name" : "date",
"value" : "2012"
}
]
}
i want to get data of india for 2011
i used this query
db.collection.find({
"data.value": {
"$in": [
"india","2011"
]
}
});
it returns two results
{
"_id" : "23423q53q45345",
"value" : "5942178562002.65",
"dataset" : "GDP (current US$)",
"data" : [
{
"data_name" : "country",
"value" : "india"
},
{
"data_name" : "date",
"value" : "2011"
}
]
},
{
"_id" : "23423q53qdsfsd5",
"value" : "1234238562002.65",
"dataset" : "GDP (current US$)",
"data" : [
{
"data_name" : "country",
"value" : "india"
},
{
"data_name" : "date",
"value" : "2012"
}
]
}
it suppose to be one result
{
"_id" : "23423q53q45345",
"value" : "5942178562002.65",
"dataset" : "GDP (current US$)",
"data" : [
{
"data_name" : "country",
"value" : "india"
},
{
"data_name" : "date",
"value" : "2011"
}
]
}
i know that query is wrong but how to achieve that please help me out
db.collection.find({
$and: [
{"data.value": "india"},
{"data.value": "2011"}
]
});