Elasticsearch Search Single Letter Java Api - 1.7 - java

Hello Elasticsearch Friends.
I have a Problem with my settings and mappings in Elasticsearch Java API. I configured my Index and set the mapping and settings. My indexname is "orange11", type "profile". I want that when I'm typing in my search inputField after just 2 or 3 letters Elasticsearch gives me some results. I've read something about analyzer, mapping and all this stuff and so I tried out.
This is my IndexService code:
#Service
public class IndexService {
private Node node;
private Client client;
#Autowired
public IndexService(Node node) throws Exception {
this.node = node;
client = this.node.client();
ImmutableSettings.Builder indexSettings = ImmutableSettings.settingsBuilder();
indexSettings.put("orange11.analysis.filter.autocomplete_filter.type", "edge_ngram");
indexSettings.put("orange11.analysis.filter.autocomplete_filter.min.gram", 1);
indexSettings.put("orange11.analysis.filter.autocomplete_filter.max_gram", 20);
indexSettings.put("orange11.analysis.analyzer.autocomplete.type", "custom");
indexSettings.put("orange11.analysis.analyzer.tokenizer", "standard");
indexSettings.put("orange11.analysis.analyzer.filter", new String[]{"lowercase", "autocomplete_filter"});
IndicesExistsResponse res = client.admin().indices().prepareExists("orange11").execute().actionGet();
if (res.isExists()) {
DeleteIndexRequestBuilder delIdx = client.admin().indices().prepareDelete("orange11");
delIdx.execute().actionGet();
}
CreateIndexRequestBuilder createIndexRequestBuilder = client.admin().indices().prepareCreate("orange11").setSettings(indexSettings);
// MAPPING GOES HERE
XContentBuilder mappingBuilder = jsonBuilder().startObject().startObject("profile").startObject("properties")
.startObject("name").field("type", "string").field("analyzer", "autocomplete").endObject()
.endObject()
.endObject();
System.out.println(mappingBuilder.string());
createIndexRequestBuilder.addMapping("profile ", mappingBuilder);
createIndexRequestBuilder.execute().actionGet();
List<Accounts> accountsList = transformJsonFileToJavaObject();
//Get Data from jsonMap() function into a ListMap.
//List<Map<String, Object>> dataFromJson = jsonToMap();
createIndex(accountsList);
}
public List<Accounts> transformJsonFileToJavaObject() throws IOException {
ObjectMapper mapper = new ObjectMapper();
List<Accounts> list = mapper.readValue(new File("/Users/lucaarchidiacono/IdeaProjects/moap2/MP3_MoapSampleBuild/data/index/testAccount.json"), TypeFactory.defaultInstance().constructCollectionType(List.class, Accounts.class));
return list;
}
public void createIndex(List<Accounts> accountsList) {
for (int i = 0; i < accountsList.size(); ++i) {
Map<String, Object> accountMap = new HashMap<String, Object>();
accountMap.put("id", accountsList.get(i).getId());
accountMap.put("isActive", accountsList.get(i).isActive());
accountMap.put("balance", accountsList.get(i).getBalance());
accountMap.put("age", accountsList.get(i).getAge());
accountMap.put("eyeColor", accountsList.get(i).getEyeColor());
accountMap.put("name", accountsList.get(i).getName());
accountMap.put("gender", accountsList.get(i).getGender());
accountMap.put("company", accountsList.get(i).getCompany());
accountMap.put("email", accountsList.get(i).getEmail());
accountMap.put("phone", accountsList.get(i).getPhone());
accountMap.put("address", accountsList.get(i).getAddress());
accountMap.put("about", accountsList.get(i).getAbout());
accountMap.put("greeting", accountsList.get(i).getGreeting());
accountMap.put("favoriteFruit", accountsList.get(i).getFavoriteFruit());
accountMap.put("url", accountsList.get(i).getUrl());
//Request an Index for indexObject. Set the index specification such as indexName, indexType and ID.
IndexRequestBuilder indexRequest = client.prepareIndex("orange11", "profile", Integer.toString(i)).setSource(accountMap);
//Execute the indexRequest and get the result in indexResponse.
IndexResponse indexResponse = indexRequest.execute().actionGet();
if (indexResponse != null && indexResponse.isCreated()) {
//Print out result of indexResponse
System.out.println("Index has been created !");
System.out.println("------------------------------");
System.out.println("Index name: " + indexResponse.getIndex());
System.out.println("Type name: " + indexResponse.getType());
System.out.println("ID: " + indexResponse.getId());
System.out.println("Version: " + indexResponse.getVersion());
System.out.println("------------------------------");
} else {
System.err.println("Index creation failed.");
}
}
}
}
Every time I wanna run this code I become this exception:
Caused by: org.elasticsearch.index.mapper.MapperParsingException: Root type mapping not empty after parsing! Remaining fields: [profile : {properties={name={analyzer=autocomplete, type=string}}}]
at org.elasticsearch.index.mapper.DocumentMapperParser.parse(DocumentMapperParser.java:278)
at org.elasticsearch.index.mapper.DocumentMapperParser.parseCompressed(DocumentMapperParser.java:192)
at org.elasticsearch.index.mapper.MapperService.parse(MapperService.java:449)
at org.elasticsearch.index.mapper.MapperService.merge(MapperService.java:307)
at org.elasticsearch.cluster.metadata.MetaDataCreateIndexService$2.execute(MetaDataCreateIndexService.java:391)
I don't know how to continue, because I don't see any '.' missing in my indexSettings. Sorry for my bad English.

Change min.gram to min_gram.
(See indexSettings.put("orange11.analysis.filter.autocomplete_filter.min.gram", 1);)

Related

Elasticsearch inner hits reponse

This is my query function :
public List<feed> search(String id) throws IOException {
Query nestedQuery = NestedQuery.of(nq ->nq.path("comment").innerHits(InnerHits.of(ih -> ih)).query(MatchQuery
.of(mq -> mq.field("comment.c_text").query(id))._toQuery()))._toQuery();
Query termQueryTitle = TermQuery.of(tq -> tq.field("title").value(id))._toQuery();
Query termQueryBody = TermQuery.of(tq -> tq.field("body").value(id))._toQuery();
Query boolQuery = BoolQuery.of(bq -> bq.should(nestedQuery, termQueryBody, termQueryTitle))._toQuery();
SearchRequest searchRequest = SearchRequest.of(s -> s.index(indexName).query(boolQuery));
var response = elasticsearchClient.search(searchRequest, feed.class);
for (var hit : response.hits().hits()){
System.out.println("this is inner hit response: " + (hit.innerHits().get("comment").hits().hits())); }
List<Hit<feed>> hits = response.hits().hits();
List<feed> feeds = new ArrayList<>();
feed f=null;
for(Hit object : hits){
f = (feed) object.source();
feeds.add(f); }
return feeds;
}
i have add this code
for (var hit : response.hits().hits()){
System.out.println("this is inner hit response: " + (hit.innerHits().get("comment").hits().hits())); }
if it founds 2 records it gives me the refrence of 2 records but dont show me the actual records like its outpout is as follow if it founds 2 records in inner hit :
this is inner hit response [co.elastic.clients.elasticsearch.core.search.Hit#75679b1a]
this is inner hit response [co.elastic.clients.elasticsearch.core.search.Hit#1916d9c6]
can anyone help me to poput the actual records
This properly works for me in console :
for (var hit : response.hits().hits()) {
var innerHits = hit.innerHits().get("comment").hits().hits();
for (var innerHit : innerHits) {
JsonData source = innerHit.source();
String jsonDataString = source.toString();
System.out.println("Matched comments"+jsonDataString);
}
}
I created a class Comment with property "c_text" and did a cast before adding inside a lists comments.
var comments = new ArrayList<Comment>();
for (var hit : response.hits().hits()) {
comments.addAll(hit.innerHits().get("comment").hits().hits().stream().map(
h -> h.source().to(Comment.class)
).collect(Collectors.toList()));
}
System.out.println(comments);

How to implement logic using elasticsearch Pagination using ElasticsearchRepository in java

Is there any way to use ElasticsearchRepository which extends from PagingAndSortingRepository. This allows built-in support for pagination and sorting.
But I am not able to change my implementation to use ElasticsearchRepository. I just want to know how to apply:
How to use Post Search
How to use esQuery which basically providing Search query
String esQuery = String.format(searchTextQuery, startDate, endDate, formattedQueries);
How to use that Post URI which I m getting as below:
Request request = new Request("GET", "/" + user.getUserId() + "/_search");
So with all the above, how to use Pagination with ElasticsearchRepository
Below is my service code:
public List getResponses(ZonedDateTime startDate, ZonedDateTime endDate,
String cat, FieldFilterVM filter, String query) throws IOException {
User user = (User)
SecurityContextHolder.getContext().getAuthentication().getPrincipal();
Request request = new Request("GET", "/" + user.getUserId() + "/_search");
List<String> matchQueries = new ArrayList<>();
matchQueries.addAll(formatCategoryQuery(cat));
matchQueries.addAll(formatFilterQuery(filter, false));
if (query != null && query.length() > 0) {
matchQueries.add(String.format(textFilterQuery, query));
}
StringBuilder formattedQueries = new StringBuilder();
for (int i = 0; i < matchQueries.size(); i++) {
formattedQueries.append(',');
formattedQueries.append(matchQueries.get(i));
}
String esQuery = String.format(searchTextQuery, startDate, endDate,
formattedQueries);
request.setJsonEntity(esQuery);
Response response =
elasticSearchService.getClient().getLowLevelClient().performRequest(request);
String responseBody = IOUtils.toString(response.getEntity().getContent(),
"UTF-8");
ObjectMapper mapper = new ObjectMapper();
Map map = mapper.readValue(responseBody, new TypeReference<Map>() {
});
List matchedTextResponses = new ArrayList();
if (map != null) {
List<Map> textResponses = (List<Map>) ((Map)
map.get("hits")).get("hits");
for (Map textResponse : textResponses) {
}
}
return matchedTextResponses;
}

elasticsearch: return TotalPages not correct

I have 107 documents in my index base, i created a method to return all these documents with pagination, in my case the first page contains 20 documents and i logically get 6 pages, the 5 first pages contain 20 documents each and the 6th page contains only 7. The problem is that the methods reeturn always 1 page not 6
#Override
#Transactional(readOnly = true)
public Page<Convention> findAll(Pageable pageable) throws UnknownHostException {
String[] parts = pageable.getSort().toString().split(":");
SortOrder sortOrder;
if ("DESC".equalsIgnoreCase(parts[1].trim())) {
sortOrder = SortOrder.DESC;
} else {
sortOrder = SortOrder.ASC;
}
SearchResponse searchResponse = elasticsearchConfiguration.getTransportClient()
.prepareSearch("convention")
.setTypes("convention")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(SortBuilders.fieldSort(parts[0])
.order(sortOrder))
.setSize(pageable.getPageSize())
.setFrom(pageable.getPageNumber() * pageable.getPageSize())
.setSearchType(SearchType.QUERY_THEN_FETCH)
.get();
return searchResults(searchResponse);
}
private Page<Convention> searchResults(SearchResponse searchResponse) {
List<Convention> conventions = new ArrayList<>();
for (SearchHit hit : searchResponse.getHits()) {
if (searchResponse.getHits().getHits().length <= 0) {
return null;
}
String sourceAsString = hit.getSourceAsString();
if (sourceAsString != null) {
ObjectMapper mapper = new ObjectMapper();
Convention convention = null;
try {
convention = mapper.readValue(sourceAsString, Convention.class);
} catch (IOException e) {
LOGGER.error("Error", e);
}
conventions.add(convention);
}
}
return new PageImpl<>(conventions);
}
http://localhost:8081/api/conventions?page=0&size=20&sort=shortname,DESC
When i execute this api, i have TotalElements=20, Number=0, TotalPages=1, and Size=0
#GetMapping("/conventions")
public ResponseEntity<List<Convention>> getAllConventions(final Pageable pageable) throws UnknownHostException {
final Page<Convention> page = conventionService.findAll(pageable);
System.out.println("-------------- 1:" + page.getTotalElements()); // 20
System.out.println("-------------- 2:" + page.getNumber()); // 0
System.out.println("-------------- 3:" + page.getTotalPages()); // 1
System.out.println("-------------- 4:" + page.getSize()); // 0
HttpHeaders headers = new HttpHeaders();
headers.add("X-Total-Count", Long.toString(page.getTotalElements()));
return new ResponseEntity<>(page.getContent(), headers, HttpStatus.OK);
}
This issue is addressed and fixed in current stable version of spring-data-elasticsearch 3.0.7
See https://jira.spring.io/browse/DATAES-402
i think it comes from this line: return new PageImpl<>(conventions);
Maybe you should transfer the total size of the responshits, because you override the query.

Lucene: Payloads and Similarity Function --- Always same Payload value

Overview
I want to implement a Lucene Indexer/Searcher that uses the new Payload feature that allows to add meta information to text. In my specific case, I add weights (that can be understood as % probabilities, between 0 and 100) to conceptual tags in order to use them to overwrite the standard Lucene TF-IDF weighting. I am puzzled by the behaviour of this and I believe there is something wrong with the Similarity class, that I overwrote, but I cannot figure it out.
Example
When I run a search query (e.g. "concept:red") I discover that each payload is always the first number that was passed through MyPayloadSimilarity (in the code example, this is 1.0) and not 1.0, 50.0 and 100.0. As a result, all documents get the same payload and the same score. However, the data should feature picture #1, with a payload of 100.0, followed by picture #2, followed by picture #3 and very diverse scores. I can't get my heard around.
Here are the results of the run:
Query: concept:red
===> docid: 0 payload: 1.0
===> docid: 1 payload: 1.0
===> docid: 2 payload: 1.0
Number of results:3
-> docid: 3.jpg score: 0.2518424
-> docid: 2.jpg score: 0.2518424
-> docid: 1.jpg score: 0.2518424
What is wrong? Did i misunderstand something about Payloads?
Code
Enclosed I share my code as a self-contained example to make it as easy as possible for you to run it, should you consider this option.
public class PayloadShowcase {
public static void main(String s[]) {
PayloadShowcase p = new PayloadShowcase();
p.run();
}
public void run () {
// Step 1: indexing
MyPayloadIndexer indexer = new MyPayloadIndexer();
indexer.index();
// Step 2: searching
MyPayloadSearcher searcher = new MyPayloadSearcher();
searcher.search("red");
}
public class MyPayloadAnalyzer extends Analyzer {
private PayloadEncoder encoder;
MyPayloadAnalyzer(PayloadEncoder encoder) {
this.encoder = encoder;
}
#Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer source = new WhitespaceTokenizer(reader);
TokenStream filter = new LowerCaseFilter(source);
filter = new DelimitedPayloadTokenFilter(filter, '|', encoder);
return new TokenStreamComponents(source, filter);
}
}
public class MyPayloadIndexer {
public MyPayloadIndexer() {}
public void index() {
try {
Directory dir = FSDirectory.open(new File("D:/data/indices/sandbox"));
Analyzer analyzer = new MyPayloadAnalyzer(new FloatEncoder());
IndexWriterConfig iwconfig = new IndexWriterConfig(Version.LUCENE_4_10_1, analyzer);
iwconfig.setSimilarity(new MyPayloadSimilarity());
iwconfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
// load mappings and classifiers
HashMap<String, String> mappings = this.loadDataMappings();
HashMap<String, HashMap> cMaps = this.loadData();
IndexWriter writer = new IndexWriter(dir, iwconfig);
indexDocuments(writer, mappings, cMaps);
writer.close();
} catch (IOException e) {
System.out.println("Exception while indexing: " + e.getMessage());
}
}
private void indexDocuments(IndexWriter writer, HashMap<String, String> fileMappings, HashMap<String, HashMap> concepts) throws IOException {
Set fileSet = fileMappings.keySet();
Iterator<String> iterator = fileSet.iterator();
while (iterator.hasNext()){
// unique file information
String fileID = iterator.next();
String filePath = fileMappings.get(fileID);
// create a new, empty document
Document doc = new Document();
// path of the indexed file
Field pathField = new StringField("path", filePath, Field.Store.YES);
doc.add(pathField);
// lookup all concept probabilities for this fileID
Iterator<String> conceptIterator = concepts.keySet().iterator();
while (conceptIterator.hasNext()){
String conceptName = conceptIterator.next();
HashMap conceptMap = concepts.get(conceptName);
doc.add(new TextField("concept", ("" + conceptName + "|").trim() + (conceptMap.get(fileID) + "").trim(), Field.Store.YES));
}
writer.addDocument(doc);
}
}
public HashMap<String, String> loadDataMappings(){
HashMap<String, String> h = new HashMap<>();
h.put("1", "1.jpg");
h.put("2", "2.jpg");
h.put("3", "3.jpg");
return h;
}
public HashMap<String, HashMap> loadData(){
HashMap<String, HashMap> h = new HashMap<>();
HashMap<String, String> green = new HashMap<>();
green.put("1", "50.0");
green.put("2", "1.0");
green.put("3", "100.0");
HashMap<String, String> red = new HashMap<>();
red.put("1", "100.0");
red.put("2", "50.0");
red.put("3", "1.0");
HashMap<String, String> blue = new HashMap<>();
blue.put("1", "1.0");
blue.put("2", "50.0");
blue.put("3", "100.0");
h.put("green", green);
h.put("red", red);
h.put("blue", blue);
return h;
}
}
class MyPayloadSimilarity extends DefaultSimilarity {
#Override
public float scorePayload(int docID, int start, int end, BytesRef payload) {
float pload = 1.0f;
if (payload != null) {
pload = PayloadHelper.decodeFloat(payload.bytes);
}
System.out.println("===> docid: " + docID + " payload: " + pload);
return pload;
}
}
public class MyPayloadSearcher {
public MyPayloadSearcher() {}
public void search(String queryString) {
try {
IndexReader reader = DirectoryReader.open(FSDirectory.open(new File("D:/data/indices/sandbox")));
IndexSearcher searcher = new IndexSearcher(reader);
searcher.setSimilarity(new PayloadSimilarity());
PayloadTermQuery query = new PayloadTermQuery(new Term("concept", queryString),
new AveragePayloadFunction());
System.out.println("Query: " + query.toString());
TopDocs topDocs = searcher.search(query, 999);
ScoreDoc[] hits = topDocs.scoreDocs;
System.out.println("Number of results:" + hits.length);
// output
for (int i = 0; i < hits.length; i++) {
Document doc = searcher.doc(hits[i].doc);
System.out.println("-> docid: " + doc.get("path") + " score: " + hits[i].score);
}
reader.close();
} catch (Exception e) {
System.out.println("Exception while searching: " + e.getMessage());
}
}
}
}
At MyPayloadSimilarity, PayloadHelper.decodeFloat call is incorrect. In this case, it's also necessary to pass the payload.offset param, like this:
pload = PayloadHelper.decodeFloat(payload.bytes, payload.offset);
I hope it helps.

Get all tasks under the specific iteration(match the conditions)?

I want to use Rally Rest Toolkit java to get all iterations between 2014-06-01 and 2014-06-08,then get all tasks under these iterations.How can I?
Any help would be great! Many Thanks!
To get tasks of iteration that falls within certain dates use this syntax:
taskRequest.setQueryFilter(new QueryFilter("Iteration.StartDate", ">=", "2014-06-01").and(new QueryFilter("Iteration.EndDate", "<=", "2014-06-08")));
and set workspace of the request:
taskRequest.setWorkspace("123456); //use your ObjectID
so that child iterations (from all projects in the workspace) with the same start and end dates are included in the results.
public static void main(String[] args) throws URISyntaxException, IOException {
String host = "https://rally1.rallydev.com";
String username = "user#co.com";
String password = "psw";
String workspaceRef = "/workspace/12352608129";
String applicationName = "ExampleFindTasks";
RallyRestApi restApi = null;
try{
restApi = new RallyRestApi(
new URI(host),
username,
password);
restApi.setApplicationName(applicationName);
QueryRequest taskRequest = new QueryRequest("Task");
taskRequest.setFetch(new Fetch("Name","FormattedID","Iteration","Project"));
taskRequest.setLimit(1000);
taskRequest.setScopedDown(true);
taskRequest.setScopedUp(false);
taskRequest.setWorkspace(workspaceRef);
taskRequest.setQueryFilter(
new QueryFilter("Iteration.StartDate", ">=", "2014-06-01").and(
new QueryFilter("Iteration.EndDate", "<=", "2014-06-08")));
QueryResponse taskQueryResponse = restApi.query(taskRequest);
int numberOfResults = taskQueryResponse.getTotalResultCount();
System.out.println(numberOfResults);
if(numberOfResults > 0){
for (int i=0;i<numberOfResults;i++){
JsonObject taskJsonObject = taskQueryResponse.getResults().get(i).getAsJsonObject();
System.out.println("Name: " + taskJsonObject.get("Name") + " " + "FormattedID: " +
taskJsonObject.get("FormattedID"));
JsonElement iteration = taskJsonObject.get("Iteration");
JsonElement project = taskJsonObject.get("Project");
try{
JsonObject iterationObject = iteration.getAsJsonObject().getAsJsonObject();
JsonObject projectObject = project.getAsJsonObject().getAsJsonObject();
System.out.println(iterationObject.get("Name"));
System.out.println(projectObject.get("Name"));
}
catch (java.lang.IllegalStateException ise) {
System.out.println("ise");
}
}
}
}
finally{
if (restApi != null) {
restApi.close();
}
}
}

Categories