return string array in place of string - java

i have a class which is returning a string type value and i want to return an String array, so please tell how can i able to do that
i have an xml file like resource.xml
<prompts>
<prompt id="p1">welcome to</prompt>
<prompt id ="p2">stack overflow</prompt>
<prompt id="p3">You entered</prompt>
<prompt id="p4">the correct number</prompt>
<prompts>
i am parsing it using sax parser
public class XmlReaderPrompt {
public List<PromptBean> load(String langMode)
{
String fileName="resource.xml";
DocumentBuilderFactory dbf=DocumentBuilderFactory.newInstance();
InputStream prompt_configfile=Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName);
DocumentBuilder db = null;
List<PromptBean> promptMap = new ArrayList<PromptBean>();
try {
try {
db = dbf.newDocumentBuilder();
} catch (ParserConfigurationException e) {
e.printStackTrace();
}
Document doc = null;
try {
doc = db.parse(prompt_configfile);
}
catch (SAXException e) {
e.printStackTrace();
}
NodeList nodeList=doc.getElementsByTagName("prompt");
for(int i=0;i<nodeList.getLength();i++)
{
Node node=nodeList.item(i);
if(node.getNodeType()==Node.ELEMENT_NODE)
{
Element element=(Element)node;
String id = element.getAttribute("id");
String name = element.getAttribute("name");
String prompt=getTextValue(element);
promptMap.add(new PromptBean(id,name,prompt));
}
}
}
catch(Exception io)
{
io.printStackTrace();
}
finally
{
db=null;
dbf=null;
}
return promptMap;
}
private String getTextValue(Element element) {
String textValue=element.getFirstChild().getTextContent().toString();
return textValue;
}
}
and a UserFunction class to return the text from the xml file
public class UserFunction{
List<PromptBean> promptObject = new ArrayList<PromptBean>();
public String getPromptFunction(String promptTag,String langMode )
{
List<PromptBean> promptObject=xrpObject.load(langMode);
for (Iterator<PromptBean> iterator = promptObject.iterator(); iterator.hasNext();){
PromptBean promptBean= (PromptBean)iterator.next();
if(promptBean.getId().equalsIgnoreCase(promptTag)){
return StringEscapeUtils.escapeXml(promptBean.getPrompt());
}
}
return null;
}
The problem is that I have to call the method getPromptFunction of UserFunction class every time I need to get text from the sub element like
String pr1 = UserFunction.getPromptFunction("p1" "resource");
String pr1 = UserFunction.getPromptFunction("p2" "resource");
String pr1 = UserFunction.getPromptFunction("p3" "resource");
and using it in jsp page as <%=pr1%>
So I want to use array like
String[] pr = UserFunction.getPromptFunction('"p1","p2","p3"' "resource")
So how I am able to do that and also tell how to use it in jsp page .

You can do it like this
public String[] getPromptFunction(String promptTag,String langMode )
{
String temp[] = new String[promptObject.size()];
List<PromptBean> promptObject=xrpObject.load(langMode);
int i = 0;
for (Iterator<PromptBean> iterator = promptObject.iterator(); iterator.hasNext();) {
PromptBean promptBean= (PromptBean)iterator.next();
if(promptBean.getId().equalsIgnoreCase(promptTag)){
temp[i] = StringEscapeUtils.escapeXml(promptBean.getPrompt());
}
i++;
}
return temp;
}

Related

NullPointerException while invoking method getInputParameters()

I am doing Parameterized Java Mapping in SAP PI 7.5 with following parameters bound by the tag names specified in the Operation Mapping
(BS_NAME,CHANNEL_NAME,EMAIL).
On testing the below java mapping in the test tab of OM using the payload, it gives the following error:
NullPointerException while trying to invoke the method com.sap.aii.mapping.api.TransformationInput.getInputParameters() of a null object loaded from field of an object loaded from local variable "this**"
I debugged the code but didn't found the issue, any suggestions?
Please find below Java code for XmlNFe_To_Mail Class. BodyText Class is also used to fetch some content. The error is encountered in the XmlNFe_To_Mail Class.
public class XmlNFe_To_Mail extends AbstractTransformation {
private String prefixoSubject = new String();
private String emailFrom = new String();
private String prefixoDocumento = new String();
private String frase = new String();
private String gap = "\n\r";
private AbstractTrace trace = null;
private Map map = null;
private String BSSystem = "";
private String ComChannel = "";
private String Emails = "";
private final String NFE_EMPRESA = "NFE Company: ";
private final String NFe = "NFE";
private final String NFe_Mail = "nfe#company.com";
TransformationInput input = null;
TransformationOutput output = null;
public void execute(InputStream in , OutputStream out) throws StreamTransformationException {
// TODO Auto-generated method stub
{
BSSystem = input.getInputParameters().getString("BS_NAME");
ComChannel = input.getInputParameters().getString("CHANNEL_NAME");
Emails = input.getInputParameters().getString("EMAIL");
try {
configParamEmail();
BufferedReader inpxml = new BufferedReader(new InputStreamReader( in ));
StringBuffer buffer = new StringBuffer();
String line = "";
String quebra = System.getProperty("line.separator");
while ((line = inpxml.readLine()) != null) {
line.replaceAll("\r\n", "");
line.replaceAll(quebra, "");
line.replaceAll(" />", "/>");
line.replaceAll(" />", "/>");
line.replaceAll(" />", "/>");
buffer.append(line);
}
String inptxml = buffer.toString();
inptxml = inptxml.replace("\r\n", "");
inptxml = inptxml.replaceAll(quebra, "");
inptxml = inptxml.replaceAll(" />", "/>");
inptxml = inptxml.replaceAll(" />", "/>");
inptxml = inptxml.replaceAll(" />", "/>");
String idNFe = "";
String numeroNF = "";
String idEvent = "";
idNFe = inptxml.substring(inptxml.indexOf("<chNFe>") + 7, inptxml.indexOf("</chNFe>"));
numeroNF = idNFe.substring(25, 34);
if (inptxml.indexOf("infEvento") > 0) {
idEvent = inptxml.substring(inptxml.indexOf("<tpEvento>") + 10, inptxml.indexOf("</tpEvento>"));
if (idEvent.length() > 0) {
if (idEvent.equals("111111")) {
this.setPrefixoDocumento(this.getPrefixoDocumento().replaceAll("NFE", "CancNFe"));
this.setPrefixoSubject(this.getPrefixoSubject().replaceAll("NFE", "NFE CANCELADA"));
} else if (idEvent.equals("100000")) {
this.setPrefixoDocumento(this.getPrefixoDocumento().replaceAll("NFE", "CCE"));
this.setPrefixoSubject(this.getPrefixoSubject().replaceAll("NFE", "CCE"));
} else {
this.setPrefixoDocumento(this.getPrefixoDocumento().replaceAll("NFE", "ManDest"));
this.setPrefixoSubject(this.getPrefixoSubject().replaceAll("NFE", "MANIFESTO"));
}
}
}
Channel chn = null;
RfcAccessor rfc = null;
String email = "";
String pdf = "";
chn = LookupService.getChannel(getBSystem(), getCChannel());
rfc = LookupService.getRfcAccessor(chn);
String req = "<ns0:TEST_NFE_MAIL_OPT xmlns:ns0='urn:sap-com:document:sap:rfc:functions'><I_ACCESS_KEY>" +
idNFe + "<I_ACCESS_KEY></ns0:ZOTC_NFE_EMAIL_OUTPUT>";
InputStream inputRFC = new ByteArrayInputStream(req.getBytes("UTF-8"));
XmlPayload rfcPayload = LookupService.getXmlPayload(inputRFC);
XmlPayload result = rfc.call(rfcPayload);
InputStream resp = result.getContent();
DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Document doc = builder.parse(resp);
Node node = (Node) doc.getElementsByTagName("E_EMAIL").item(0);
if (node.hasChildNodes() && !node.getFirstChild().getNodeValue().equals("")) {
email = node.getFirstChild().getNodeValue();
}
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transform = tf.newTransformer();
Document docout = db.newDocument();
Element root = docout.createElement("ns0:Mail");
root.setAttribute("xmlns:ns0", "http://sap.com/xi/XI/Mail/30");
docout.appendChild(root);
Element subject = docout.createElement("Subject");
root.appendChild(subject);
Text subjectText = docout.createTextNode(getPrefixoSubject() + numeroNF);
subject.appendChild(subjectText);
Element from = docout.createElement("From");
root.appendChild(from);
Text fromText = docout.createTextNode(getEmailFrom());
from.appendChild(fromText);
if (email.length() > 0) {
email += ";";
} else {
email = this.getEmaillist();
}
Element to = docout.createElement("To");
root.appendChild(to);
Text toText = docout.createTextNode(email);
to.appendChild(toText);
Element contentType = docout.createElement("Content_Type");
root.appendChild(contentType);
Text contentTypeText = docout.createTextNode("multipart/mixed;boundary=--AaZz");
contentType.appendChild(contentTypeText);
BodyText texto = new BodyText(idNFe, getFrase(), inptxml, pdf);
Element content = docout.createElement("Content");
root.appendChild(content);
Text contentText = null;
if ("NFE Company: ".equalsIgnoreCase(getPrefixoSubject())) {
contentText = docout.createTextNode(texto.getnfeText());
} else if ("NFE CANCELADA Company: ".equalsIgnoreCase(getPrefixoSubject())) {
contentText = docout.createTextNode(texto.getCnfeText());
} else if ("CCE Company: ".equalsIgnoreCase(getPrefixoSubject())) {
contentText = docout.createTextNode(texto.getcceText());
}
content.appendChild(contentText);
DOMSource domS = new DOMSource(docout);
transform.transform((domS), new StreamResult(out));
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ParserConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (TransformerConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (TransformerException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SAXException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// Exception Handling }
}
}
}
public String getGap() {
return gap;
}
public void setGap(String gap) {
this.gap = gap;
}
public String getFrase() {
return frase;
}
public void setFrase(String frase) {
this.frase = frase;
}
public String getBSystem() {
return BSSystem;
}
public String getEmaillist() {
return Emails;
}
public String getCChannel() {
return ComChannel;
}
public String getPrefixoSubject() {
return prefixoSubject;
}
public void setPrefixoSubject(String prefixoSubject) {
this.prefixoSubject = prefixoSubject;
}
public String getEmailFrom() {
return emailFrom;
}
public void setEmailFrom(String emailFrom) {
this.emailFrom = emailFrom;
}
public String getPrefixoDocumento() {
return prefixoDocumento;
}
public void setPrefixoDocumento(String prefixoDocumento) {
this.prefixoDocumento = prefixoDocumento;
}
private void configParamEmail() {
setEmailFrom(NFe_Mail);
setPrefixoDocumento(NFe);
setPrefixoSubject(NFE_EMPRESA);
}
#Override
public void transform(TransformationInput in , TransformationOutput out) throws StreamTransformationException {
this.execute( in .getInputPayload().getInputStream(), out.getOutputPayload().getOutputStream());
}
/*public void setParameter(Map arg0) {
// TODO Auto-generated method stub
}*/
}
Kindly let me know what changes should be done.
Thanks.
It's missing instance from the TransformationInput/TransformationOutput classes because their variables are null,
TransformationInput input = null;
TransformationOutput output = null;
So you need to instance them or pass them as reference on some setter.

Lucene IndexWriter.commit() doesn't finished in ubuntu

Here is initialize code
public class Main {
public void index(String input_path, String index_dir, String separator, String extension, String field, DataHandler handler) {
Index index = new Index(handler);
index.initWriter(index_dir, new StandardAnalyzer());
index.run(input_path, field, extension, separator);
}
public List<?> search(String input_path, String index_dir, String separator, String extension, String field, DataHandler handler) {
Search search = new Search(handler);
search.initSearcher(index_dir, new StandardAnalyzer());
return search.runUsingFiles(input_path, field, extension, separator);
}
#SuppressWarnings("unchecked")
public static void main(String[] args) {
String lang = "en-US";
String dType = "data";
String train = "res/input/" +lang+ "/" +dType +"/train/";
String test = "res/input/"+ lang+ "/" +dType+ "/test/";
String separator = "\\|";
String extension = "csv";
String index_dir = "res/index/" +lang+ "." +dType+ ".index";
String output_file = "res/result/" +lang+ "." +dType+ ".output.json";
String searched_field = "utterance";
Main main = new Main();
DataHandler handler = new DataHandler();
main.index(train, index_dir, separator, extension, searched_field, handler);
//List<JSONObject> result = (List<JSONObject>) main.search(test, index_dir, separator, extension, searched_field, handler);
//handler.writeOutputJson(result, output_file);
}
}
It is my code
public class Index {
private IndexWriter writer;
private DataHandler handler;
public Index(DataHandler handler) {
this.handler = handler;
}
public Index() {
this(new DataHandler());
}
public void initWriter(String index_path, Directory store, Analyzer analyzer) {
IndexWriterConfig config = new IndexWriterConfig(analyzer);
try {
this.writer = new IndexWriter(store, config);
} catch (IOException e) {
e.printStackTrace();
}
}
public void initWriter(String index_path, Analyzer analyzer) {
try {
initWriter(index_path, FSDirectory.open(Paths.get(index_path)), analyzer);
} catch (IOException e) {
e.printStackTrace();
}
}
public void initWriter(String index_path) {
List<String> stopWords = Arrays.asList();
CharArraySet stopSet = new CharArraySet(stopWords, false);
initWriter(index_path, new StandardAnalyzer(stopSet));
}
#SuppressWarnings("unchecked")
public void indexDocs(List<?> datas, String field) throws IOException {
FieldType fieldType = new FieldType();
FieldType fieldType2 = new FieldType();
fieldType.setStored(true);
fieldType.setTokenized(true);
fieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
fieldType2.setStored(true);
fieldType2.setTokenized(false);
fieldType2.setIndexOptions(IndexOptions.DOCS);
for(int i = 0 ; i < datas.size() ; i++) {
Map<String,String> temp = (Map<String,String>) datas.get(i);
Document doc = new Document();
for(String key : temp.keySet()) {
if(key.equals(field))
continue;
doc.add(new Field(key, temp.get(key), fieldType2));
}
doc.add(new Field(field, temp.get(field), fieldType));
this.writer.addDocument(doc);
}
}
public void run(String path, String field, String extension, String separator) {
List<File> files = this.handler.getInputFiles(path, extension);
List<?> data = this.handler.readDocs(files, separator);
try {
System.out.println("start index");
indexDocs(data, field);
this.writer.commit();
this.writer.close();
System.out.println("done");
} catch (IOException e) {
e.printStackTrace();
}
}
public void run(String path) {
run(path, "search_field", "csv", "\t");
}
I made simple search module using Java and Lucene.
This module consisted of two phase, index and search.
In index phase, It read csv files and convert to Document each row and add to IndexWriter object using IndexWriter.addDocument() method.
Finaly, It call IndexWriter.commit() method.
It is working well in my local PC (windows)
but in Ubuntu PC, doesn't finished IndexWriter.commit() method.
Of course IndexWriter.flush() method doesn't work.
What is the problem?

How to Parse JSON object from a REST ENDPOINT?

I want to parse a JSON object from an endpoint (this one here: https://api.coinmarketcap.com/v1/ticker/bitcoin/) and store the value in a variable at a specific attribute, which in this case is the name.
This the ERROR i get:
java.lang.IllegalStateException: Expected a name but was STRING...
AsyncTask.execute(new Runnable() {
#Override
public void run() {
// All your networking logic
// should be here
try {
String u = "https://api.coinmarketcap.com/v1/ticker/bitcoin";
URL coinMarketCapApi = new URL(u);
HttpsURLConnection myConnection = (HttpsURLConnection) coinMarketCapApi.openConnection();
myConnection.setRequestProperty("User-Agent", "my-rest-app-v0.1");
if (myConnection.getResponseCode() == 200) {
// Success
InputStream responseBody = myConnection.getInputStream();
InputStreamReader responseBodyReader =
new InputStreamReader(responseBody, "UTF-8");
JsonReader jsonReader = new JsonReader(responseBodyReader);
jsonReader.beginArray();
while (jsonReader.hasNext()) {
String key = jsonReader.nextName();
if (key.equals("name")) {
String value = jsonReader.nextName();
break; // Break out of the loop
} else {
jsonReader.skipValue();
}
}
jsonReader.close();
myConnection.disconnect();
} else {
// Error handling code goes here
}
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
});
you can convert the InputStream to String and then Create JSONArray from that string. like
StringWriter writer = new StringWriter();
IOUtils.copy(inputStream, writer, encoding);
String theString = writer.toString();
JSONArray jsonarray = new JSONArray(theString);
This way you don't have to manually construct the array.
Use this depandency for JSONArray
https://mvnrepository.com/artifact/org.json/json
You can fix the problem using gson.
https://github.com/google/gson
com.google.gson.stream.JsonReader jsonReader =
new com.google.gson.stream.JsonReader(new InputStreamReader(responseBody));
ArrayList<Coin> coins = new Gson().fromJson(jsonReader, Coin.class);
coins.forEach(coin -> System.out.println(coin.name));
public class Coin{
private String id;
private String name;
private String symbol;
private int rank;
#SerializedName("price_usd")
private double priceUsd;
...........
public String getId() {
return id;
}
public String getName() {
return name;
}
public String getSymbol() {
return symbol;
}
public int getRank() {
return rank;
}
public double getPriceUsd() {
return priceUsd;
}
..........
}

parse CSV using BaneUtilBean

I am trying to parse a csv and map the fields to a POJO class. However I can see that the mapping is not achieved correctly.
I am trying to map the header from a POJO file to the csv.
public class CarCSVFileInputBean {
private long Id;
private String shortName;
private String Name;
private String Type;
private String Environment;
//getter and setters
}
Can someone please take a look at my code:
public class carCSVUtil {
private static Log log = LogFactory.getLog(carCSVUtil.class);
private static final List<String> fileHeaderFields = new ArrayList<String>();
private static final String UTF8CHARSET = "UTF-8";
static {
for (Field f : carCSVFileInputBean.class.getDeclaredFields()) {
fileHeaderFields.add(f.getName());
}
}
public static List<carCSVFileInputBean> getCSVInputList(InputStream inputStream) {
CSVReader reader = null;
List<carCSVFileInputBean> csvList = null;
carCSVFileInputBean inputRecord = null;
String[] header = null;
String[] row = null;
try {
reader = new CSVReader(new InputStreamReader(inputStream, UTF8CHARSET));
csvList = new ArrayList<carCSVFileInputBean>();
header = reader.readNext();
boolean isEmptyLine = true;
while ((row = reader.readNext()) != null) {
isEmptyLine = true;
if (!(row.length == 1 && StringUtils.isBlank(row[0]))) { // not an empty line, not even containing ','
inputRecord = new carCSVFileInputBean();
isEmptyLine = populateFields(inputRecord, header, row);
if (!isEmptyLine)
csvList.add(inputRecord);
}
}
} catch (IOException e) {
log.debug("IOException while accessing carCSVFileInputBean: " + e);
return null;
} catch (IllegalAccessException e) {
log.debug("IllegalAccessException while accessing carCSVFileInputBean: " + e);
return null;
} catch (InvocationTargetException e) {
log.debug("InvocationTargetException while copying carCSVFileInputBean properties: " + e);
return null;
} catch (Exception e) {
log.debug("Exception while parsing CSV file: " + e);
return null;
} finally {
try {
if (reader != null)
reader.close();
} catch (IOException ioe) {}
}
return csvList;
}
protected static boolean populateFields(carCSVFileInputBean inputRecord, String[] header, String[] row) throws IllegalAccessException, InvocationTargetException {
boolean isEmptyLine = true;
for (int i = 0; i < row.length; i++) {
String val = row[i];
if (!StringUtils.isBlank(val)) {
BeanUtilsBean.getInstance().copyProperty(inputRecord, header[i], val);
isEmptyLine = false;
}
}
return isEmptyLine;
}
}
I found the solution - the headers in the csv file are expected to begin with a lowercase.

Elasticsearch: Adding manual mapping using Java

I cant change the mapping. Can anybody help me to find the bug in my code?
I have found this standard way to change the mapping according to several tutorials. But when i'm try to call the mapping structure there just appear a blank mapping structure after manuall mapping creation.
But after inserting some data there appear the mapping specification because ES is using of course the default one. To be more specific see the code below.
public class ElasticTest {
private String dbname = "ElasticSearch";
private String index = "indextest";
private String type = "table";
private Client client = null;
private Node node = null;
public ElasticTest(){
this.node = nodeBuilder().local(true).node();
this.client = node.client();
if(isIndexExist(index)){
deleteIndex(this.client, index);
createIndex(index);
}
else{
createIndex(index);
}
System.out.println("mapping structure before data insertion");
getMappings();
System.out.println("----------------------------------------");
createData();
System.out.println("mapping structure after data insertion");
getMappings();
}
public void getMappings() {
ClusterState clusterState = client.admin().cluster().prepareState()
.setFilterIndices(index).execute().actionGet().getState();
IndexMetaData inMetaData = clusterState.getMetaData().index(index);
MappingMetaData metad = inMetaData.mapping(type);
if (metad != null) {
try {
String structure = metad.getSourceAsMap().toString();
System.out.println(structure);
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void createIndex(String index) {
XContentBuilder typemapping = buildJsonMappings();
String mappingstring = null;
try {
mappingstring = buildJsonMappings().string();
} catch (IOException e1) {
e1.printStackTrace();
}
client.admin().indices().create(new CreateIndexRequest(index)
.mapping(type, typemapping)).actionGet();
//try put mapping after index creation
/*
* PutMappingResponse response = null; try { response =
* client.admin().indices() .preparePutMapping(index) .setType(type)
* .setSource(typemapping.string()) .execute().actionGet(); } catch
* (ElasticSearchException e) { e.printStackTrace(); } catch
* (IOException e) { e.printStackTrace(); }
*/
}
private void deleteIndex(Client client, String index) {
try {
DeleteIndexResponse delete = client.admin().indices()
.delete(new DeleteIndexRequest(index)).actionGet();
if (!delete.isAcknowledged()) {
} else {
}
} catch (Exception e) {
}
}
private XContentBuilder buildJsonMappings(){
XContentBuilder builder = null;
try {
builder = XContentFactory.jsonBuilder();
builder.startObject()
.startObject("properties")
.startObject("ATTR1")
.field("type", "string")
.field("store", "yes")
.field("index", "analyzed")
.endObject()
.endObject()
.endObject();
} catch (IOException e) {
e.printStackTrace();
}
return builder;
}
private boolean isIndexExist(String index) {
ActionFuture<IndicesExistsResponse> exists = client.admin().indices()
.exists(new IndicesExistsRequest(index));
IndicesExistsResponse actionGet = exists.actionGet();
return actionGet.isExists();
}
private void createData(){
System.out.println("Data creation");
IndexResponse response=null;
for (int i=0;i<10;i++){
Map<String, Object> json = new HashMap<String, Object>();
json.put("ATTR1", "new value" + i);
response = this.client.prepareIndex(index, type)
.setSource(json)
.setOperationThreaded(false)
.execute()
.actionGet();
}
String _index = response.getIndex();
String _type = response.getType();
long _version = response.getVersion();
System.out.println("Index : "+_index+" Type : "+_type+" Version : "+_version);
System.out.println("----------------------------------");
}
public static void main(String[] args)
{
new ElasticTest();
}
}
I just wanna change the property of ATTR1 field to analyzed to ensure fast queries.
What im doing wrong? I also tried to create the mapping after index creation but it leads to the same affect.
Ok i found the answer by my own. On the type level i had to wrap the "properties" with the type name. E.g:
"type1" : {
"properties" : {
.....
}
}
See the following code:
private XContentBuilder getMappingsByJson(){
XContentBuilder builder = null;
try {
builder = XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties");
for(int i = 1; i<5; i++){
builder.startObject("ATTR" + i)
.field("type", "integer")
.field("store", "yes")
.field("index", "analyzed")
.endObject();
}
builder.endObject().endObject().endObject();
}
catch (IOException e) {
e.printStackTrace();
}
return builder;
}
It creates mappings for the attributes ATTR1 - ATTR4. Now it is possible to define mapping for Example a list of different attributes dynamically. Hope it helps someone else.

Categories