Duplicated options are wrongly accepted in Apache Commons Cli - java

When I repeat some options by command line are wrongly accepted
private static void buildMyCliOptions() {
optionsMyAdd.addOption(Option.builder()
.longOpt("my-address")
.argName("property=value")
.hasArg()
.desc("Specify Address")
.required()
.build());
optionsMyAdd.addOption(Option.builder()
.longOpt("my-port")
.argName("property=value")
.hasArgs()
.valueSeparator()
.desc("Specify port")
.required()
.build());
}
tempOptions = ParseCommand.getOptionsMyAdd();
commandLine = cmdLineParser.parse(tempOptions, ParseCommand.args,true);
for instance: add --my-address=addr1 --my-address=addr2 --my-port=port1
But I would like an error for --my-address=addr2, since it is duplicated.

Related

Eclipse debugger JUnit runs pulls wrong class

When I step through my JUnit, I hit a line that runs, and the intended source code is shown in Eclipse. However, the JVM (launched by Eclipse JUnit launcher), does not use the right version. Instead, it's using some other (older) version. Because I'm not sure what's causing this, I'm providing more details than necessary.
I had built this at one point using the Gradle on the command line, and had 2 versions of the same class file. One at /bin/com.woodsman... the other /bin/main/com.woodsman...
I'm using
Java 11
Eclipse: Version: 2021-09 (4.21.0)
Eclipse: Build id: 20210910-1417
Gradle
JUnit 5
Junit Launcher:
Run Single test:
Test Class: com.woodsman.MyClassTest
Test Method: mySingleTest
Test Runner: JUnit 5
Program Argument: None
VM Arguments: -ea
Checked: Use the -XX:+ShowCodeDetails...
Project execution environment 'JavaSE-11'
Dependencies:
Modulepath entries: (empty)
Classpath Entries
JUnit 5 (Advanced Library)
my-project (My Eclipse project)
Project and External Dependencies
JRE System Library [JavaSE-11]
Source:
Default
org.junits (too many to list)
my-project
java ~/my-project/src/test
java ~/my-project/src/main
resource ~/my-project/src/test
resource ~/my-project/src/main
my-project
.git-crypt ~/my-project
.gradle ~/my-project
... many others ...
bin ~/my-project
build ~/my-project
#WebMvcTest(ApiController.class)
class ApiControllerTest extends MVCBaseTest {
#Test
public void generateFeedbackShouldUseBody() throws Exception {
FeedbackResponse feedbackResponse = new FeedbackResponse();
when(toolInvokerService.generateFeedback(any(), any(), any(Boolean.class))).thenReturn(feedbackResponse);
setDinahClient(toolInvokerService);
ObjectNode requestBody = objectMapper.createObjectNode();
requestBody.put("content", "Cellular phones and laptops are used in communication everyone is using it every day.");
final MockHttpServletRequestBuilder request = post("/feedback")
.contentType(MediaType.APPLICATION_JSON)
.content(requestBody.toString());
ResultActions mockResult = mockMvc.perform(request);
String responseBody = mockResult.andReturn().getResponse().getContentAsString();
System.out.println("###="+responseBody);
}
}
#Service
public class ToolInvokerService {
private static final Logger log = LoggerFactory.getLogger(ToolInvokerService.class);
private final ContentBuilderService contentBuilderService;
private final ToolNormalizerService toolNormalizerService;
private final FeedbackMapperService feedbackMapperService;
private final LangDetectClient langDetectClient;
private final GrammartoolClient grammarToolClient;
private final AtdClient atdClient;
private final DinahClient dinahClient;
private final CybertronClient cybertronClient;
private final StatsLogger statsLogger;
public ToolInvokerService(
ContentBuilderService contentBuilderService,
ToolNormalizerService toolNormalizerService,
FeedbackMapperService feedbackMapperService,
LangDetectClient langDetectClient,
GrammartoolClient grammarToolClient,
AtdClient atdClient,
DinahClient dinahClient,
CybertronClient cybertronClient,
StatsLogger statsLogger) {
this.contentBuilderService = contentBuilderService;
this.toolNormalizerService = toolNormalizerService;
this.feedbackMapperService = feedbackMapperService;
this.langDetectClient = langDetectClient;
this.grammarToolClient = grammarToolClient;
this.atdClient = atdClient;
this.dinahClient = dinahClient;
this.cybertronClient = cybertronClient;
this.statsLogger = statsLogger;
}
public FeedbackResponse generateFeedback(UserContext userContext, String content, boolean trustLineBreaks) throws
InterruptedException,
ExecutionException,
URISyntaxException,
IOException,
UnsupportedLanguageException {
long startTime = System.currentTimeMillis();
FileOutputStream fop = new FileOutputStream("ben-test.log",true);
fop.write("gfb-bp1\n".getBytes());
if (StringUtils.isEmpty(content)) {
return getEmptyFeedbackResponse(0L);
}
// to reduce call time on long content verify english for up to 5000 characters
langDetectClient.verifyEnglish(CleanTextUtil.truncateText(content, 5000));
fop.write("gfb-bp2\n".getBytes());
fop.write("gfb-bp2.5\n".getBytes());
fop.write(("## dinahClient.hashCode()="+dinahClient.hashCode()).getBytes());
fop.write(("## dinahClient.toString()="+dinahClient.toString()).getBytes());
ContentMetadataResponse contentMetadata = dinahClient.getContentMetadata(content, trustLineBreaks);
log.info("###"+contentMetadata);
if (CollectionUtils.isEmpty(contentMetadata.getContentMetadata().getSentences())) {
log.warn("no sentence content found: returning empty feedback response");
return getEmptyFeedbackResponse(contentMetadata.getTimedResponseDurationMs());
}
List<ToolResponse> toolResponses = getNormalizedToolResponses(userContext.getLocale(), content, contentMetadata.getContentMetadata());
List<Observation> normalizedObservations = new ArrayList<>();
Map<Tool, Long> toolDurations = new HashMap<>();
Map<Tool, Integer> observationCounts = new HashMap<>();
Map<Tool, List<?>> toolErrors = new HashMap<>();
for (ToolResponse toolResponse : toolResponses) {
normalizedObservations.addAll(toolResponse.getNormalizedToolObservations());
toolDurations.put(toolResponse.getTool(), toolResponse.getTimedResponseDurationMs());
observationCounts.put(toolResponse.getTool(), toolResponse.getToolObservations().size());
toolErrors.put(toolResponse.getTool(), toolResponse.getToolErrors());
}
List<Observation> finalObservations = feedbackMapperService.constructFinalObservations(userContext.getLocale(),
normalizedObservations,
content,
contentMetadata.getContentMetadata());
Stats stats = new Stats(
System.currentTimeMillis() - startTime,
contentMetadata.getTimedResponseDurationMs(),
toolDurations,
observationCounts,
contentMetadata.getContentMetadata().getIgnoreSpans());
FeedbackResponse feedbackResponse = new FeedbackResponse(toolErrors, stats, finalObservations);
statsLogger.logResponse(userContext, feedbackResponse, content, normalizedObservations);
return feedbackResponse;
}
}

Mixed up Test configuration when using #ResourceArg

TL:DR; When running tests with different #ResourceArgs, the configuration of different tests get thrown around and override others, breaking tests meant to run with specific configurations.
So, I have a service that has tests that run in different configuration setups. The main difference at the moment is the service can either manage its own authentication or get it from an external source (Keycloak).
I firstly control this using test profiles, which seem to work fine. Unfortunately, in order to support both cases, the ResourceLifecycleManager I have setup supports setting up a Keycloak instance and returns config values that break the config for self authentication (This is due primarily to the fact that I have not found out how to get the lifecycle manager to determine on its own what profile or config is currently running. If I could do this, I think I would be much better off than using #ResourceArg, so would love to know if I missed something here).
To remedy this shortcoming, I have attempted to use #ResourceArgs to convey to the lifecycle manager when to setup for external auth. However, I have noticed some really odd execution timings and the config that ends up at my test/service isn't what I intend based on the test class's annotations, where it is obvious the lifecycle manager has setup for external auth.
Additionally, it should be noted that I have my tests ordered such that the profiles and configs shouldn't be running out of order; all the tests that don't care are run first, then the 'normal' tests with self auth, then the tests with the external auth profile. I can see this working appropriately when I run in intellij, and the fact I can tell the time is being taken to start up the new service instance between the test profiles.
Looking at the logs when I throw a breakpoint in places, some odd things are obvious:
When breakpoint on an erring test (before the external-configured tests run)
The start() method of my TestResourceLifecycleManager has been called twice
The first run ran with Keycloak starting, would override/break config
though the time I would expect to need to be taken to start up keycloak not happening, a little confused here
The second run is correct, not starting keycloak
The profile config is what is expected, except for what the keycloak setup would override
When breakpoint on an external-configured test (after all self-configured tests run):
The start() method has now been called 4 times; appears that things were started in the same order as before again for the new run of the app
There could be some weirdness in how Intellij/Gradle shows logs, but I am interpreting this as:
Quarkus initting the two instances of LifecycleManager when starting the app for some reason, and one's config overrides the other, causing my woes.
The lifecycle manager is working as expected; it appropriately starts/ doesn't start keycloak when configured either way
At this point I can't tell if I'm doing something wrong, or if there's a bug.
Test class example for self-auth test (same annotations for all tests in this (test) profile):
#Slf4j
#QuarkusTest
#QuarkusTestResource(TestResourceLifecycleManager.class)
#TestHTTPEndpoint(Auth.class)
class AuthTest extends RunningServerTest {
Test class example for external auth test (same annotations for all tests in this (externalAuth) profile):
#Slf4j
#QuarkusTest
#TestProfile(ExternalAuthTestProfile.class)
#QuarkusTestResource(value = TestResourceLifecycleManager.class, initArgs = #ResourceArg(name=TestResourceLifecycleManager.EXTERNAL_AUTH_ARG, value="true"))
#TestHTTPEndpoint(Auth.class)
class AuthExternalTest extends RunningServerTest {
ExternalAuthTestProfile extends this, providing the appropriate profile name:
public class NonDefaultTestProfile implements QuarkusTestProfile {
private final String testProfile;
private final Map<String, String> overrides = new HashMap<>();
protected NonDefaultTestProfile(String testProfile) {
this.testProfile = testProfile;
}
protected NonDefaultTestProfile(String testProfile, Map<String, String> configOverrides) {
this(testProfile);
this.overrides.putAll(configOverrides);
}
#Override
public Map<String, String> getConfigOverrides() {
return new HashMap<>(this.overrides);
}
#Override
public String getConfigProfile() {
return testProfile;
}
#Override
public List<TestResourceEntry> testResources() {
return QuarkusTestProfile.super.testResources();
}
}
Lifecycle manager:
#Slf4j
public class TestResourceLifecycleManager implements QuarkusTestResourceLifecycleManager {
public static final String EXTERNAL_AUTH_ARG = "externalAuth";
private static volatile MongodExecutable MONGO_EXE = null;
private static volatile KeycloakContainer KEYCLOAK_CONTAINER = null;
private boolean externalAuth = false;
public synchronized Map<String, String> startKeycloakTestServer() {
if(!this.externalAuth){
log.info("No need for keycloak.");
return Map.of();
}
if (KEYCLOAK_CONTAINER != null) {
log.info("Keycloak already started.");
} else {
KEYCLOAK_CONTAINER = new KeycloakContainer()
// .withEnv("hello","world")
.withRealmImportFile("keycloak-realm.json");
KEYCLOAK_CONTAINER.start();
log.info(
"Test keycloak started at endpoint: {}\tAdmin creds: {}:{}",
KEYCLOAK_CONTAINER.getAuthServerUrl(),
KEYCLOAK_CONTAINER.getAdminUsername(),
KEYCLOAK_CONTAINER.getAdminPassword()
);
}
String clientId;
String clientSecret;
String publicKey = "";
try (
Keycloak keycloak = KeycloakBuilder.builder()
.serverUrl(KEYCLOAK_CONTAINER.getAuthServerUrl())
.realm("master")
.grantType(OAuth2Constants.PASSWORD)
.clientId("admin-cli")
.username(KEYCLOAK_CONTAINER.getAdminUsername())
.password(KEYCLOAK_CONTAINER.getAdminPassword())
.build();
) {
RealmResource appsRealmResource = keycloak.realms().realm("apps");
ClientRepresentation qmClientResource = appsRealmResource.clients().findByClientId("quartermaster").get(0);
clientSecret = qmClientResource.getSecret();
log.info("Got client id \"{}\" with secret: {}", "quartermaster", clientSecret);
//get private key
for (KeysMetadataRepresentation.KeyMetadataRepresentation curKey : appsRealmResource.keys().getKeyMetadata().getKeys()) {
if (!SIG.equals(curKey.getUse())) {
continue;
}
if (!"RSA".equals(curKey.getType())) {
continue;
}
String publicKeyTemp = curKey.getPublicKey();
if (publicKeyTemp == null || publicKeyTemp.isBlank()) {
continue;
}
publicKey = publicKeyTemp;
log.info("Found a relevant key for public key use: {} / {}", curKey.getKid(), publicKey);
}
}
// write public key
// = new File(TestResourceLifecycleManager.class.getResource("/").toURI().toString() + "/security/testKeycloakPublicKey.pem");
File publicKeyFile;
try {
publicKeyFile = File.createTempFile("oqmTestKeycloakPublicKey",".pem");
// publicKeyFile = new File(TestResourceLifecycleManager.class.getResource("/").toURI().toString().replace("/classes/java/", "/resources/") + "/security/testKeycloakPublicKey.pem");
log.info("path of public key: {}", publicKeyFile);
// if(publicKeyFile.createNewFile()){
// log.info("created new public key file");
//
// } else {
// log.info("Public file already exists");
// }
try (
FileOutputStream os = new FileOutputStream(
publicKeyFile
);
) {
IOUtils.write(publicKey, os, UTF_8);
} catch (IOException e) {
log.error("Failed to write out public key of keycloak: ", e);
throw new IllegalStateException("Failed to write out public key of keycloak.", e);
}
} catch (IOException e) {
log.error("Failed to create public key file: ", e);
throw new IllegalStateException("Failed to create public key file", e);
}
String keycloakUrl = KEYCLOAK_CONTAINER.getAuthServerUrl().replace("/auth", "");
return Map.of(
"test.keycloak.url", keycloakUrl,
"test.keycloak.authUrl", KEYCLOAK_CONTAINER.getAuthServerUrl(),
"test.keycloak.adminName", KEYCLOAK_CONTAINER.getAdminUsername(),
"test.keycloak.adminPass", KEYCLOAK_CONTAINER.getAdminPassword(),
//TODO:: add config for server to talk to
"service.externalAuth.url", keycloakUrl,
"mp.jwt.verify.publickey.location", publicKeyFile.getAbsolutePath()
);
}
public static synchronized void startMongoTestServer() throws IOException {
if (MONGO_EXE != null) {
log.info("Flapdoodle Mongo already started.");
return;
}
Version.Main version = Version.Main.V4_0;
int port = 27018;
log.info("Starting Flapdoodle Test Mongo {} on port {}", version, port);
IMongodConfig config = new MongodConfigBuilder()
.version(version)
.net(new Net(port, Network.localhostIsIPv6()))
.build();
try {
MONGO_EXE = MongodStarter.getDefaultInstance().prepare(config);
MongodProcess process = MONGO_EXE.start();
if (!process.isProcessRunning()) {
throw new IOException();
}
} catch (Throwable e) {
log.error("FAILED to start test mongo server: ", e);
MONGO_EXE = null;
throw e;
}
}
public static synchronized void stopMongoTestServer() {
if (MONGO_EXE == null) {
log.warn("Mongo was not started.");
return;
}
MONGO_EXE.stop();
MONGO_EXE = null;
}
public synchronized static void cleanMongo() throws IOException {
if (MONGO_EXE == null) {
log.warn("Mongo was not started.");
return;
}
log.info("Cleaning Mongo of all entries.");
}
#Override
public void init(Map<String, String> initArgs) {
this.externalAuth = Boolean.parseBoolean(initArgs.getOrDefault(EXTERNAL_AUTH_ARG, Boolean.toString(this.externalAuth)));
}
#Override
public Map<String, String> start() {
log.info("STARTING test lifecycle resources.");
Map<String, String> configOverride = new HashMap<>();
try {
startMongoTestServer();
} catch (IOException e) {
log.error("Unable to start Flapdoodle Mongo server");
}
configOverride.putAll(startKeycloakTestServer());
return configOverride;
}
#Override
public void stop() {
log.info("STOPPING test lifecycle resources.");
stopMongoTestServer();
}
}
The app can be found here: https://github.com/Epic-Breakfast-Productions/OpenQuarterMaster/tree/main/software/open-qm-base-station
The tests are currently failing in the ways I am describing, so feel free to look around.
Note that to run this, you will need to run ./gradlew build publishToMavenLocal in https://github.com/Epic-Breakfast-Productions/OpenQuarterMaster/tree/main/software/libs/open-qm-core to install a dependency locally.
Github issue also tracking this: https://github.com/quarkusio/quarkus/issues/22025
Any use of #QuarkusTestResource() without restrictToAnnotatedClass set to true, means that the QuarkusTestResourceLifecycleManager will be applied to all tests no matter where the annotation is placed.
Hope restrictToAnnotatedClass will solve the problem.

Picocli Parse Search String

I would like to provide a search string for my program like:
cmd.execute("getDevices", "-h 1.2.3.4", "-p myPSW", "-u myUser", "-n red|blue&black,-nonprod");
I want to create predicates to search for hostNames that contain red OR blue AND Black, but NOT nonprod. It is unclear to me how to go about parsing this the logical operators along with the Strings in Picocli to create a Predicate. Is there a simple and Straight forward way to parse a String to a predicate?
My CLI is set up as follows:
#Command(name = "HostMagicCLI", mixinStandardHelpOptions = true,
version = "1.0",
description = "Do Stuff With Hosts"
,
subcommands = {TufinDevices.class}
)
public class HostMagicCLI implements Runnable {
public static void main(String[] args) {
CommandLine cmd = new CommandLine(new InterfaceMagicCLI());
cmd.setExecutionStrategy(new RunAll());
cmd.getHelpSectionMap().put(SECTION_KEY_COMMAND_LIST, new MyCommandListRenderer());
cmd.usage(System.out);
cmd.execute("getDevices", "-h1.2.3.4", "-p myPSW", "-u myUser", "-n red|blue&black");
}
#Override
public void run() {
System.out.println("Running..");
}
}
#Command(name = "getDevices", aliases = {"l"}, description = "SpecifyTufin Credentials", subcommands = {InterfaceCommand.class})
class TufinDevices implements Runnable {
.
.//Options to collect user,psw, host etc.
.
#CommandLine.Option(names = {"-n", "--n"}, split = ",", arity = "0..*", description = "Hostname Contains")
String[] hostNameContains;
private void filter(TufinDeviceCollection<TufinDevice> devices) {
if (hostNameContains != null) {
Predicate< ? super TufinDevice> deviceFilter = device -> Arrays.stream(hostNameContains)
.allMatch(input -> device.getHostName().toLowerCase().contains(input.toLowerCase()));
devices = devices.stream()
.sequential()
.filter(deviceFilter)
.collect(Collectors.toCollection(TufinDeviceCollection<TufinDevice>::new));
}
#Override
public void run() {
try {
TufinDeviceCollection<TufinDevice> FETCH_DEVICES = Tufin.FETCH_DEVICES(user.trim(), password.trim(), hostName.trim());
this.filter(FETCH_DEVICES);
} catch (IOException | NoSuchAlgorithmException | KeyManagementException | IPConverter.InvalidIPException ex) {
Logger.getLogger(TufinDevices.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
I suspect you may want to use a library for parsing the string that the end user specifies as the filter expression (the -n parameter). It may be an idea to look at libraries like Spring Expression Language, OGNL, JXPath, there may be others. Alternatively, if it is easy to write such a filter in Groovy or BeanShell, these languages can be called from Java, so you can call that filter from the Java command.
CAUTION:
I notice the example passes parameter to the picocli parser like this:
cmd.execute("getDevices", "-h 1.2.3.4", "-p myPSW", "-u myUser", "-n red|blue&black,-nonprod");
This will probably give an error explaining that "there is no -p myPSW option defined".
In your testing, if you call the execute method directly, make sure to pass parameters separately like this:
cmd.execute("getDevices", "-h", "1.2.3.4", "-p", "myPSW", "-u", "myUser", "-n", "red|blue&black,-nonprod");

Overriding required() parameters while using help parameter

I'm adding options for the Parser in following way:
options = new Options()
.addOption(Option.builder(CONFIG_PARAM)
.required()
.hasArg(true)
.argName(CONFIG_PARAM_NAME + "_path")
.desc(CONFIG_PARAM_DESC)
.longOpt(CONFIG_PARAM_NAME)
.build())
(...)
.addOption(Option.builder(HELP_PARAM)
.hasArg(false)
.longOpt(HELP_PARAM_NAME)
.desc(HELP_PARAM_DESC)
.build());
Now, I would like to allow the user to use only the help command, for ex.
mypreciousapp --help
With the above solution, it is impossible - I'm receiving the information about missing required parameters
Missing required options: c
Is there any way to flag the help parameter so it can override the required parameters, and allow its usage alone? I can do this manually, but first I would like to know if there's such option in CLI lib.
It seems commons-cli does not support that currently, so I would create a 2nd Options object where the param is not required and parse/check that first, before doing the full parse, something like this:
public static void main(String[] args) {
// define the options with required arguments as needed
Options opts = new Options()
.addOption(Option.builder("p")
.required()
.hasArg(true)
.argName("arg")
.desc("description ")
.longOpt("param")
.build())
.addOption(Option.builder("h")
.hasArg(false)
.longOpt("help")
.desc("help description")
.build());
// first check if usage-output was requested
if (handleHelp(args, opts)) {
return;
}
// now handle the full options
CommandLineParser parser = new DefaultParser();
final CommandLine cmdLine;
try {
cmdLine = parser.parse(opts, args);
} catch (ParseException ex) {
System.out.println("Syntax error: " + ex.getMessage());
printHelp(opts);
return;
}
// now handle options and do your work
}
private boolean handleHelp(String[] args, Options opts) {
Options helpOpts = new Options()
.addOption(Option.builder("p")
//.required()
.hasArg(true)
.argName("arg")
.desc("description ")
.longOpt("param")
.build())
.addOption(Option.builder("h")
.hasArg(false)
.longOpt("help")
.desc("help description")
.build());
CommandLineParser helpParser = new DefaultParser();
final CommandLine cmdLine;
try {
cmdLine = helpParser.parse(helpOpts, args);
} catch (ParseException ex) {
System.out.println("Syntax error: " + ex.getMessage());
printHelp(opts);
return true;
}
if(cmdLine.hasOption("h")) {
printHelp(opts);
return true;
}
return false;
}
private void printHelp(Options opts) {
try (PrintWriter pw = new PrintWriter(System.out)) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(pw, 80, "myapp", "test-header", opts,
formatter.getLeftPadding(), formatter.getDescPadding(), "test-footer", true);
}
}
There is a fluent wrapper for the commons-cli library: https://github.com/bogdanovmn/java-cmdline-app
The -h option is already built-in. You don't need to manage it in your own code.
A better solution can be the following:
public static void main(String[] args) {
// define the options with required arguments as needed
Options opts = new Options()
.addOption(Option.builder("p")
.required()
.hasArg(true)
.argName("arg")
.desc("description ")
.longOpt("param")
.build())
.addOption(Option.builder("h")
.hasArg(false)
.longOpt("help")
.desc("help description")
.build());
// first check if usage-output was requested
if (Arrays.asList(args).contains("--help")) {
HelpFormatter help = new HelpFormatter();
help.printHelp("betgenius-stream", opts);
System.exit(0);
}
CommandLineParser parser = new DefaultParser();
CommandLine cli = parser.parse(options, args);
// now handle options and do your work
}
In this way it is not necessary to rewrite the options.

How to incorporate Environments with JClouds-Chef API?

I am using JClouds-Chef to:
Bootstrap a newly-provisioned Linux VM; and then
Run the chef-client on that node to configure it
It's important to note that all I'm currently configuring Chef with is a role (that's all it needs; everything else is set up on the Chef server for me):
public class ChefClient {
public configure() {
String vmIp = "myapp01.example.com";
String vmSshUsername = "myuser";
String vmSshPassword = "12345";
String endpoint = "https://mychefserver.example.com";
String client = "myuser";
String validator = "chef-validator";
String clientCredential = Files.toString(new File("C:\\Users\\myuser\\sandbox\\chef\\myuser.pem"), Charsets.UTF_8);
String validatorCredential = Files.toString(new File("C:\\Users\\myuser\\sandbox\\chef\\chef-validator.pem"), Charsets.UTF_8);
Properties props = new Properties();
props.put(ChefProperties.CHEF_VALIDATOR_NAME, validator);
props.put(ChefProperties.CHEF_VALIDATOR_CREDENTIAL, validatorCredential);
props.put(Constants.PROPERTY_RELAX_HOSTNAME, "true");
props.put(Constants.PROPERTY_TRUST_ALL_CERTS, "true");
ChefContext ctx = ContextBuilder.newBuilder("chef")
.endpoint(endpoint)
.credentials(client, clientCredential)
.overrides(props)
.modules(ImmutableSet.of(new SshjSshClientModule())) //
.buildView(ChefContext.class);
ChefService chef = ctx.getChefService();
List<String> runlist = new RunListBuilder().addRole("platformcontrol_dev").build();
ArrayList<String> runList2 = new ArrayList<String>();
for(String item : runlist) {
runList2.add(item);
}
BootstrapConfig bootstrapConfig = BootstrapConfig.builder().runList(runList2).build();
chef.updateBootstrapConfigForGroup("jclouds-chef", bootstrapConfig);
Statement bootstrap = chef.createBootstrapScriptForGroup("jclouds-chef");
SshClient.Factory sshFactory = ctx.unwrap().utils()
.injector().getInstance(Key.get(new TypeLiteral<SshClient.Factory>() {}));
SshClient ssh = sshFactory.create(HostAndPort.fromParts(vmIp, 22),
LoginCredentials.builder().user(vmSshUsername).password(vmSshPassword).build());
ssh.connect();
try {
StringBuilder rawScript = new StringBuilder();
Map<String, String> resolvedFunctions = ScriptBuilder.resolveFunctionDependenciesForStatements(
new HashMap<String, String>(), ImmutableSet.of(bootstrap), OsFamily.UNIX);
ScriptBuilder.writeFunctions(resolvedFunctions, OsFamily.UNIX, rawScript);
rawScript.append(bootstrap.render(OsFamily.UNIX));
ssh.put("/tmp/chef-bootstrap.sh", rawScript.toString());
ExecResponse result = ssh.exec("bash /tmp/chef-bootstrap.sh");
} catch(Throwable t) {
println "Exception: " + t.message
} finally {
ssh.disconnect();
}
}
}
Our in-house "Chef" (our devops guy) now wants to add the concept of Chef "environments" to all our recipes in addition to the existing roles. This is so that we can specify environment-specific roles for each node. My question: does the JClouds-Chef API handle environments? If so, how might I modify the code to incorporate environment-specific roles?
Is it just as simple as:
BootstrapConfig bootstrapConfig = BootstrapConfig.builder()
.environment("name-of-env-here?").runList(runList2).build();
Yes, it is that simple. That will tell the bootstrap script to register the node in the specified environment.
Take into account, though, that the environment must already exist in the Chef Server. If you want to create nodes in new environments, you can also create them programmatically as follows:
ChefApi api = ctx.unwrapApi(ChefApi.class);
if (api.getEnvironment("environment-name") == null) {
Environment env = Environment.builder()
.name("environment-name")
.description("Some description")
.build();
api.createEnvironment(env);
}

Categories