Picocli required options selection based on a primary option - java

I would like to parse options with picocli in the following format:
application -mode CLIENT -c aaaa -d bbbb
application -mode SERVER -e xxxx -f yyyy
mode is an enum with values { CLIENT, SERVER }
If mode == CLIENT, -c and -d options are mandatory, and -e, -f must not be used.
If mode == SERVER, -e and -f options are mandatory, and -c, -d must not be used.
In other words, I would like to choose the required options based on a key option. Is this possible in picocli?

Yes, this is possible. One way is simple programmatic validation:
import picocli.CommandLine;
import picocli.CommandLine.Command;
import picocli.CommandLine.Model.CommandSpec;
import picocli.CommandLine.Option;
import picocli.CommandLine.ParameterException;
import picocli.CommandLine.Spec;
import java.util.Objects;
import java.util.function.Predicate;
#Command(name = "application", mixinStandardHelpOptions = true)
public class MyApp implements Runnable {
enum Mode {CLIENT, SERVER}
#Option(names = "-mode", required = true)
Mode mode;
#Option(names = "-c") String c;
#Option(names = "-d") String d;
#Option(names = "-e") String e;
#Option(names = "-f") String f;
#Spec CommandSpec spec;
public static void main(String[] args) {
System.exit(new CommandLine(new MyApp()).execute(args));
}
#Override
public void run() {
validateInput();
// business logic here...
}
private void validateInput() {
String INVALID = "Error: option(s) %s cannot be used in %s mode";
String REQUIRED = "Error: option(s) %s are required in %s mode";
if (mode == Mode.CLIENT) {
check(INVALID, "CLIENT", Objects::isNull, e, "-e", f, "-f");
check(REQUIRED, "CLIENT", Objects::nonNull, c, "-c", d, "-d");
} else if (mode == Mode.SERVER) {
check(INVALID, "SERVER", Objects::isNull, c, "-c", d, "-d");
check(REQUIRED, "SERVER", Objects::nonNull, e, "-e", f, "-f");
}
}
private void check(String msg, String param, Predicate<String> validator, String... valuesAndLabels) {
String desc = "";
String sep = "";
for (int i = 0; i < valuesAndLabels.length; i += 2) {
if (validator.test(valuesAndLabels[i])) {
desc = sep + valuesAndLabels[i + 1];
sep = ", ";
}
}
if (desc.length() > 0) {
throw new ParameterException(spec.commandLine(), String.format(msg, desc, param));
}
}
}
Alternatively, if you are willing to change your requirements a little bit, we can use picocli's argument groups for a more declarative approach:
import picocli.CommandLine;
import picocli.CommandLine.ArgGroup;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
#Command(name = "application", mixinStandardHelpOptions = true)
public class MyApp2 implements Runnable {
static class ClientArgs {
#Option(names = "-clientMode", required = true) boolean clientMode;
#Option(names = "-c", required = true) String c;
#Option(names = "-d", required = true) String d;
}
static class ServerArgs {
#Option(names = "-serverMode", required = true) boolean serverMode;
#Option(names = "-e", required = true) String e;
#Option(names = "-f", required = true) String f;
}
static class Args {
#ArgGroup(exclusive = false, multiplicity = "1", heading = "CLIENT mode args%n")
ClientArgs clientArgs;
#ArgGroup(exclusive = false, multiplicity = "1", heading = "SERVER mode args%n")
ServerArgs serverArgs;
}
#ArgGroup(exclusive = true, multiplicity = "1")
Args args;
public static void main(String[] args) {
System.exit(new CommandLine(new MyApp2()).execute(args));
}
#Override
public void run() {
// business logic here...
}
}
When invoked with just -serverMode, this second example will show this error message, followed by the usage help message:
Error: Missing required argument(s): -e=<e>, -f=<f>
Usage: application ((-clientMode -c=<c> -d=<d>) | (-serverMode -e=<e> -f=<f>))
...
Note that this declarative approach cannot be achieved with a single -mode option: each argument group needs its own option (I chose -clientMode and -serverMode in this example). This allows the picocli parser to figure out which options must occur together and which are mutually exclusive.

Related

PicoCLI : How to use #ArgGroup for a CommandLine.Command method

I have two options (-n and -t) under a command where if -n is used, then -t is required, but bothare not required. However, I keep getting an error about
I am trying to send the options as a parameter to another method (with business logic) as a parameter.
Valid Usage:
agent.bat install -n -t <blahblah>
agent.bat install -t <blahblah> -n
agent.bat install -t <blah blah>
agent.bat install -t <----This is on interactive so it would ask for a parameter later
Invalid Usage:
agent.bat install -n
agent.bat install -n -t
Current output with valid usage:
agent.bat install -t
Missing required parameter: '<arg0>'
Usage: agent install [-hV] <arg0>
Setup or update the agent service program by install token.
<arg0>
public class Agent implements Callable<Integer> {
static class InstallArgs {
#Option(names = {"-t", "--token"},
order = 0,
arity = "0..1",
interactive = true,
description = "The agent install token.",
required = true) String installToken ;
#Option(names = {"-n", "--noninteractive"},
order = 1,
description = "Sets installation to non-interactive",
required = false) boolean nonInteractive ;
public String toString() {
return String.format("%s,%s", installToken, nonInteractive);
}
}
private static String[] programArgs;
#ArgGroup(exclusive = false, multiplicity = "1")
#CommandLine.Command(name = AgentCommand.INSTALL_COMMAND, mixinStandardHelpOptions = true,
description = "Setup or update the agent service program by install token.")
void install(InstallArgs installArgs) {
String[] installArgsValues = installArgs.toString().split(",");
String installToken = installArgsValues[0];
boolean nonInteractive = Boolean.parseBoolean(installArgsValues[1]);
IcbProgram.initProgramMode(ProgramMode.INSTALL);
MainService mainService = MainService.createInstallInstance(configFile, agentUserFile, backupAgentUserFile, installToken, nonInteractive);
}
public static void main(String... args) {
if (ArgumentValidator.validateArgument(args)) {
programArgs = args;
int exitCode = new CommandLine(new Agent()).execute(args);
System.exit(exitCode);
} else
//Exit with usage error
System.exit(ExitCode.USAGE);
}
}
Can you try using arity=1 for installToken?
static class InstallArgs {
#Option(names = {"-t", "--token"},
order = 0,
arity = "1",
interactive = true,
description = "The agent install token.",
required = true) String installToken ;

OpenCSV Header is missing required fields found []

opencsv 5.1
Caused by: com.opencsv.exceptions.CsvRequiredFieldEmptyException: Header is missing required fields [ALGVERIFICATION, DISTAL MV, LOCATION, PREDICTED STATE, PROXIMAL MV, RUN, SAMPLE TIME]. The list of headers encountered is [].
at com.opencsv.bean.HeaderNameBaseMappingStrategy.captureHeader(HeaderNameBaseMappingStrategy.java:69)
#ParameterizedTest
#ArgumentsSource(MyArgumentsProvider.class)
void test( AlgorithmVerification verifications )
{
Log.d("test", verifications.location);
assertThat(verifications).isNotNull();
}
public enum State {
NA,
ADVANCE,
RETRACT,
}
public static class StateConverter extends AbstractBeanField {
#Override
protected Object convert(String value) {
return State.valueOf(value);
}
}
public static class AlgorithmVerification {
#CsvBindByName(column = "Sample Time", required = true)
protected float sampleTime;
#CsvBindByName(column = "Distal mV", required = true)
protected int distalMV;
#CsvBindByName(column = "Proximal mV", required = true)
protected int proximalMV;
#CsvCustomBindByName(column = "Predicted State", converter = StateConverter.class, required = true)
protected State predictedState;
#CsvBindByName(column = "run", required = true)
protected String run;
#CsvCustomBindByName(column = "Location", converter = StateConverter.class, required = true)
protected String location;
#CsvCustomBindByName(column = "AlgVerification", converter = StateConverter.class, required = true)
protected State algVerification;
}
static class MyArgumentsProvider implements ArgumentsProvider {
#Override
public Stream<? extends Arguments> provideArguments(ExtensionContext context) throws IOException, URISyntaxException {
return Files.list(Paths.get(ClassLoader.getSystemResource("avd").toURI()))
.map(Path::toFile)
.map( f -> Try.withResources( () -> new FileReader(f) )
.of(CsvToBeanBuilder::new)
.map(b -> b.withType(AlgorithmVerification.class) )
.map(CsvToBeanBuilder::build)
.map(CsvToBean::parse)
.getOrElseThrow((throwable) -> new RuntimeException(f.getName(), throwable))
)
.flatMap(List::stream)
.map(Arguments::of);
}
}
and this is the beginning of the file
Sample Time,Distal mV,Proximal mV,Predicted State,run,Location,AlgVerification
0.016,2509,2502,NA,DV-MyString,-1,-1
Did I miss a step? are the headers wrong somehow? I notice that it's looking for uppercase headers... but even then it's finding none
looks like my usage of vavr Try.withResources is wrong, it needed to be this
return Files.list(Paths.get(ClassLoader.getSystemResource("avd").toURI()))
.map(Path::toFile)
.map( f -> Try.withResources( () -> new FileReader(f) )
.of((fr ) -> new CsvToBeanBuilder<AlgorithmVerification>(fr)
.withType(AlgorithmVerification.class)
.build()
.parse())
.getOrElseThrow((throwable) -> new RuntimeException(f.getName(), throwable))
)
.flatMap(List::stream)
.map(Arguments::of);

Calling groovy functions from TestNG test case written in Java

The issue that I am facing is with import of the groovy package. The build error is:
error: package dataProviders.singleOrder does not exist
import dataProviders.singleOrder.NOSDataProvider;
I have a data provider written in groovy:
The groovy file is under:
src/test/groovy
package dataProviders.singleOrder;
public class NOSDataProvider implements OrderTypeAndTIF {
#DataProvider(name="nosDataProvider")
#Override
public Object[][] getNOSParameters() {
def values = []
char[] sideArray = {};
char[] tifArray = {};
char[] orderTypeArray = {};
for (Side side : [Side.BUY, Side.SELL, Side.SELL_SHORT]) {
for (TimeInForce tif : [TimeInForce.DAY, TimeInForce.IMMEDIATE_OR_CANCEL, TimeInForce.FILL_OR_KILL]) {
for (OrdType orderType : [OrdType.LIMIT, OrdType.MARKET]) {
values.add(side, tif, orderType);
}
}
}
return values;
}
}
The Java files are under:
src/test/java/
The Data Provider is injected in the TestNG test case as:
package testcases.testNOS;
import dataProviders.singleOrder.NOSDataProvider;
#Test(dataProvider = "nosDataProvider", dataProviderClass = NOSDataProvider.class)
public void testNOS(char side, char tif, char orderType) throws InterruptedException, SessionNotFound {
NewOrderSingle nos = new NewOrderSingle(new ClOrdID("1"), new HandlInst('1'),
new Symbol("TCS.NS"), new Side(side), new OrderQty(100.0),
new OrdType(orderType));
nos.set(new TimeInForce(tif));
}

Picocli Parse Search String

I would like to provide a search string for my program like:
cmd.execute("getDevices", "-h 1.2.3.4", "-p myPSW", "-u myUser", "-n red|blue&black,-nonprod");
I want to create predicates to search for hostNames that contain red OR blue AND Black, but NOT nonprod. It is unclear to me how to go about parsing this the logical operators along with the Strings in Picocli to create a Predicate. Is there a simple and Straight forward way to parse a String to a predicate?
My CLI is set up as follows:
#Command(name = "HostMagicCLI", mixinStandardHelpOptions = true,
version = "1.0",
description = "Do Stuff With Hosts"
,
subcommands = {TufinDevices.class}
)
public class HostMagicCLI implements Runnable {
public static void main(String[] args) {
CommandLine cmd = new CommandLine(new InterfaceMagicCLI());
cmd.setExecutionStrategy(new RunAll());
cmd.getHelpSectionMap().put(SECTION_KEY_COMMAND_LIST, new MyCommandListRenderer());
cmd.usage(System.out);
cmd.execute("getDevices", "-h1.2.3.4", "-p myPSW", "-u myUser", "-n red|blue&black");
}
#Override
public void run() {
System.out.println("Running..");
}
}
#Command(name = "getDevices", aliases = {"l"}, description = "SpecifyTufin Credentials", subcommands = {InterfaceCommand.class})
class TufinDevices implements Runnable {
.
.//Options to collect user,psw, host etc.
.
#CommandLine.Option(names = {"-n", "--n"}, split = ",", arity = "0..*", description = "Hostname Contains")
String[] hostNameContains;
private void filter(TufinDeviceCollection<TufinDevice> devices) {
if (hostNameContains != null) {
Predicate< ? super TufinDevice> deviceFilter = device -> Arrays.stream(hostNameContains)
.allMatch(input -> device.getHostName().toLowerCase().contains(input.toLowerCase()));
devices = devices.stream()
.sequential()
.filter(deviceFilter)
.collect(Collectors.toCollection(TufinDeviceCollection<TufinDevice>::new));
}
#Override
public void run() {
try {
TufinDeviceCollection<TufinDevice> FETCH_DEVICES = Tufin.FETCH_DEVICES(user.trim(), password.trim(), hostName.trim());
this.filter(FETCH_DEVICES);
} catch (IOException | NoSuchAlgorithmException | KeyManagementException | IPConverter.InvalidIPException ex) {
Logger.getLogger(TufinDevices.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
I suspect you may want to use a library for parsing the string that the end user specifies as the filter expression (the -n parameter). It may be an idea to look at libraries like Spring Expression Language, OGNL, JXPath, there may be others. Alternatively, if it is easy to write such a filter in Groovy or BeanShell, these languages can be called from Java, so you can call that filter from the Java command.
CAUTION:
I notice the example passes parameter to the picocli parser like this:
cmd.execute("getDevices", "-h 1.2.3.4", "-p myPSW", "-u myUser", "-n red|blue&black,-nonprod");
This will probably give an error explaining that "there is no -p myPSW option defined".
In your testing, if you call the execute method directly, make sure to pass parameters separately like this:
cmd.execute("getDevices", "-h", "1.2.3.4", "-p", "myPSW", "-u", "myUser", "-n", "red|blue&black,-nonprod");

Adding mapping to a type from Java - how do I do it?

I am trying to recreate this example using the Java API more or less.
I think all I need is to add a mapping to the index, but the Java API docs are not exactly clear on how to do this.
Please tell me how I create a mapping in Java that is the equivalent of this from the example in the documentation :
curl -X PUT localhost:9200/test/tweet/_mapping -d '{
"tweet" : {
"_ttl" : { "enabled" : true, "default" : "1d" }
}
}'
Here's my code:
package foo;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import java.io.IOException;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.action.get.GetRequestBuilder;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
public class MyTestClass {
private static Client getClient() {
ImmutableSettings.Builder settings = ImmutableSettings.settingsBuilder();
TransportClient transportClient = new TransportClient(settings);
transportClient = transportClient.addTransportAddress(new InetSocketTransportAddress("localhost", 9300));
return (Client) transportClient;
}
public static void main(String[] args) throws IOException, InterruptedException {
final Client client = getClient();
// Create Index and set settings and mappings
final String indexName = "test";
final String documentType = "tweet";
final String documentId = "1";
final String fieldName = "foo";
final String value = "bar";
IndicesExistsResponse res = client.admin().indices().prepareExists(indexName).execute().actionGet();
if (res.isExists()) {
DeleteIndexRequestBuilder delIdx = client.admin().indices().prepareDelete(indexName);
delIdx.execute().actionGet();
}
CreateIndexRequestBuilder createIndexRequestBuilder = client.admin().indices().prepareCreate(indexName);
// MAPPING GOES HERE
// createIndexRequestBuilder.addMapping(documentType, WHATEVER THE MAPPING IS);
// MAPPING DONE
createIndexRequestBuilder.execute().actionGet();
// Add documents
IndexRequestBuilder indexRequestBuilder = client.prepareIndex(indexName, documentType, documentId);
// build json object
XContentBuilder contentBuilder = jsonBuilder().startObject().prettyPrint();
contentBuilder.field(fieldName, value);
indexRequestBuilder.setSource(contentBuilder);
indexRequestBuilder.execute().actionGet();
// Get document
System.out.println(getValue(client, indexName, documentType, documentId, fieldName));
Thread.sleep(10000L);
// Try again
System.out.println(getValue(client, indexName, documentType, documentId, fieldName));
}
protected static String getValue(final Client client, final String indexName, final String documentType,
final String documentId, final String fieldName) {
GetRequestBuilder getRequestBuilder = client.prepareGet(indexName, documentType, documentId);
getRequestBuilder.setFields(new String[] { fieldName });
GetResponse response2 = getRequestBuilder.execute().actionGet();
String name = response2.getField(fieldName).getValue().toString();
return name;
}
}
Finally a day of googling paid off. Frankly the Java API docs for elasticsearch could use some end-to-end examples, not to mention JavaDoc...
Here's a running example. You must have a node running on localhost for this to work!
package foo;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import java.io.IOException;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.action.get.GetRequestBuilder;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
public class MyTestClass {
private static final String ID_NOT_FOUND = "<ID NOT FOUND>";
private static Client getClient() {
final ImmutableSettings.Builder settings = ImmutableSettings.settingsBuilder();
TransportClient transportClient = new TransportClient(settings);
transportClient = transportClient.addTransportAddress(new InetSocketTransportAddress("localhost", 9300));
return transportClient;
}
public static void main(final String[] args) throws IOException, InterruptedException {
final Client client = getClient();
// Create Index and set settings and mappings
final String indexName = "test";
final String documentType = "tweet";
final String documentId = "1";
final String fieldName = "foo";
final String value = "bar";
final IndicesExistsResponse res = client.admin().indices().prepareExists(indexName).execute().actionGet();
if (res.isExists()) {
final DeleteIndexRequestBuilder delIdx = client.admin().indices().prepareDelete(indexName);
delIdx.execute().actionGet();
}
final CreateIndexRequestBuilder createIndexRequestBuilder = client.admin().indices().prepareCreate(indexName);
// MAPPING GOES HERE
final XContentBuilder mappingBuilder = jsonBuilder().startObject().startObject(documentType)
.startObject("_ttl").field("enabled", "true").field("default", "1s").endObject().endObject()
.endObject();
System.out.println(mappingBuilder.string());
createIndexRequestBuilder.addMapping(documentType, mappingBuilder);
// MAPPING DONE
createIndexRequestBuilder.execute().actionGet();
// Add documents
final IndexRequestBuilder indexRequestBuilder = client.prepareIndex(indexName, documentType, documentId);
// build json object
final XContentBuilder contentBuilder = jsonBuilder().startObject().prettyPrint();
contentBuilder.field(fieldName, value);
indexRequestBuilder.setSource(contentBuilder);
indexRequestBuilder.execute().actionGet();
// Get document
System.out.println(getValue(client, indexName, documentType, documentId, fieldName));
int idx = 0;
while (true) {
Thread.sleep(10000L);
idx++;
System.out.println(idx * 10 + " seconds passed");
final String name = getValue(client, indexName, documentType, documentId, fieldName);
if (ID_NOT_FOUND.equals(name)) {
break;
} else {
// Try again
System.out.println(name);
}
}
System.out.println("Document was garbage collected");
}
protected static String getValue(final Client client, final String indexName, final String documentType,
final String documentId, final String fieldName) {
final GetRequestBuilder getRequestBuilder = client.prepareGet(indexName, documentType, documentId);
getRequestBuilder.setFields(new String[] { fieldName });
final GetResponse response2 = getRequestBuilder.execute().actionGet();
if (response2.isExists()) {
final String name = response2.getField(fieldName).getValue().toString();
return name;
} else {
return ID_NOT_FOUND;
}
}
}
I am actually going to add another answer here because frankly speaking, the above answers gave a start to my implementation but didn't answer the actual question 100% (updating not just the root level properties, but the ACTUAL field/properties). Took me almost 2 days to figure this out (Documentation is a bit light for ES Java APIs). My "Mapping" class is not 100% yet, but more fields could be added to it ("format" etc) later.
I hope this helps everyone else who is trying to use update mappings!
GET/RETRIEVE MAPPINGS
ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> indexMappings = response.getMappings();
ImmutableOpenMap<String, MappingMetaData> typeMappings = indexMappings.get(indexName);
MappingMetaData mapping = typeMappings.get(type);
Map<String, Mapping> mappingAsMap = new HashMap<>();
try {
Object properties = mapping.sourceAsMap().get("properties");
mappingAsMap = (Map<String, Mapping>) gson.fromJson(gson.toJson(properties), _elasticsearch_type_mapping_map_type);
return mappingAsMap;
}
UPDATE MAPPINGS
PutMappingRequest mappingRequest = new PutMappingRequest(indexName);
Map<String, Object> properties = new HashMap<>();
Map<String, Object> mappingsMap = (Map<String, Object>) gson.fromJson(gson.toJson(mapping), Json._obj_map_type);
properties.put("properties", mappingsMap);
mappingRequest = mappingRequest.ignoreConflicts(true).type(type).source(properties).actionGet();
My GSON mapping types
public static final Type _obj_map_type = new TypeToken<LinkedHashMap<String, Object>>(){}.getType();
public static final Type _elasticsearch_type_mapping_map_type = new TypeToken<LinkedHashMap<String, Mapping>>(){}.getType();
My Mapping Class
public class Mapping {
private String type;
private String index;
private String analyzer;
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getIndex() {
return index;
}
public void setIndex(String index) {
this.index = index;
}
public String getAnalyzer() {
return analyzer;
}
public void setAnalyzer(String analyzer) {
this.analyzer = analyzer;
}
}
An alternative solution would be to use a feature called dynamic templates. The idea is very well described within this article http://joelabrahamsson.com/dynamic-mappings-and-dates-in-elasticsearch/
So this case uses a regular expression declaring any field that starts with tikaprop_ to be of type String.
curl -XPUT "http://localhost:9200/myindex" -d'
{
"mappings": {
"_default_": {
"date_detection": true,
"dynamic_templates": [
{
"tikaprops": {
"match": "tikaprop_.*",
"match_pattern": "regex",
"mapping": {
"type": "string"
}
}
}
]
}
}
}'
or if you'd rather prefer to do it via the Elasticsearch Java API
CreateIndexRequestBuilder cirb = this.client.admin().indices().prepareCreate(INDEX_NAME).addMapping("_default_", getIndexFieldMapping());
CreateIndexResponse createIndexResponse = cirb.execute().actionGet();
private String getIndexFieldMapping() {
return IOUtils.toString(getClass().getClassLoader().getResourceAsStream("elasticsearch_dynamic_templates_config.json"));
}
with elasticsearch_dynamic_templates_config.json beein:
{
"date_detection": true,
"dynamic_templates": [
{
"tikaprops": {
"match": "tikaprop_.*",
"match_pattern": "regex",
"mapping": {
"type": "string"
}
}
}
]
}
I started with the excellent answer given by Anders Johansen and converted it to Groovy (so that the JSON is easier to read). I share with you my answer.
package com.example
import groovy.json.JsonSlurper
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse
import org.elasticsearch.action.get.GetRequestBuilder
import org.elasticsearch.action.get.GetResponse
import org.elasticsearch.action.index.IndexRequestBuilder
import org.elasticsearch.client.Client
import org.elasticsearch.client.transport.TransportClient
import org.elasticsearch.common.transport.InetSocketTransportAddress
class StackOverflow {
Client client;
final byte[] ipAddr = [192, 168,33, 10]; // Your ElasticSearch node ip goes here
final String indexName = "classifieds";
final String documentType = "job";
public StackOverflow() {
client = TransportClient.builder().build()
.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByAddress(ipAddr), 9300));
}
public void index(){
final IndicesExistsResponse res = client.admin().indices().prepareExists(indexName).execute().actionGet();
if (res.isExists()) {
final DeleteIndexRequestBuilder delIdx = client.admin().indices().prepareDelete(indexName);
delIdx.execute().actionGet();
}
final CreateIndexRequestBuilder createIndexRequestBuilder = client.admin().indices().prepareCreate(indexName);
// Create Mapping
def jsonSlurper = new JsonSlurper()
def mapping = jsonSlurper.parseText '''
{
"job": {
"properties": {
"company": {
"type": "string",
"analyzer": "english"
},
"desc": {
"type": "string",
"analyzer": "english"
},
"loc": {
"type": "string",
"analyzer": "english"
},
"req": {
"type": "string",
"analyzer": "english"
},
"title": {
"type": "string",
"analyzer": "english"
},
"url": {
"type": "string",
"analyzer": "english"
}
}
}
}'''
System.out.println(mapping.toString());
createIndexRequestBuilder.addMapping(documentType, mapping);
// MAPPING DONE
createIndexRequestBuilder.execute().actionGet();
// Add documents
final IndexRequestBuilder indexRequestBuilder = client.prepareIndex(indexName, documentType, "1");
// build json object
def jobcontent = jsonSlurper.parseText '''
{
"company": "ACME",
"title": "Groovy Developer",
"loc": "Puerto Rico",
"desc": "Codes in Groovy",
"req": "ElasticSearch, Groovy ",
"url": "http://stackoverflow.com/questions/22071198/adding-mapping-to-a-type-from-java-how-do-i-do-it#"
}
'''
indexRequestBuilder.setSource(jobcontent);
indexRequestBuilder.execute().actionGet();
}
private String getValue2(final String indexName, final String documentType,
final String documentId, final String fieldName) {
GetRequestBuilder getRequestBuilder = client.prepareGet(indexName, documentType, documentId);
getRequestBuilder.setFields([ fieldName ] as String[]);
GetResponse response2 = getRequestBuilder.execute().actionGet();
String name = response2.getField(fieldName).getValue().toString();
return name;
}
public String getValue(final String documentId, final String fieldName){
getValue2(indexName, documentType, documentId, fieldName )
}
public void close() {
client.close()
}
public static void main (String[] Args){
StackOverflow so = new StackOverflow();
so.index();
Thread.sleep(5000L);
System.out.println(so.getValue("1", "title"));
so.close();
}
}

Categories