Java LDAP query with paging not working, PagedResultsControl not honored - java

Here's my code, basically according to the sample in https://docs.oracle.com/javase/7/docs/api/javax/naming/ldap/PagedResultsControl.html:
package com.igsl.ldapuserattributes;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import javax.naming.Context;
import javax.naming.NamingEnumeration;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import javax.naming.ldap.Control;
import javax.naming.ldap.InitialLdapContext;
import javax.naming.ldap.LdapContext;
import javax.naming.ldap.PagedResultsControl;
import javax.naming.ldap.PagedResultsResponseControl;
import org.apache.log4j.Logger;
import org.codehaus.jackson.map.ObjectMapper;
public class LDAPUserAttributes {
private static final Logger LOGGER = Logger.getLogger(LDAPUserAttributes.class);
private static final ObjectMapper OM = new ObjectMapper();
private static final String AUTH_METHOD = "simple";
public static void main(String[] args) throws Exception {
String[] readAttrs = new String[] {
"distinguishedName",
"sAMAccountName",
"displayName",
"mail",
"telephone",
"memberOf",
"createTimestamp",
"modifyTimestamp",
"objectClass",
"dn"
};
Map<String, Map<String, List<String>>> ad = getLDAPUsers(
"ldap://192.168.56.120:389",
"CN=Administrator,CN=Users,DC=win2022,DC=kcwong,DC=igsl",
"P#ssw0rd",
"CN=Users,DC=win2022,DC=kcwong,DC=igsl",
"(&(objectClass=user)(objectClass=user)(|(sAMAccountName=t*)(sAMAccountName=a*)))",
SearchControls.SUBTREE_SCOPE,
readAttrs);
System.out.println(OM.writeValueAsString(ad));
Map<String, Map<String, List<String>>> apacheDS = getLDAPUsers(
"ldap://127.0.0.1:10389",
"uid=admin,ou=system",
"admin",
"ou=users,ou=system",
"(&(objectClass=person))",
SearchControls.SUBTREE_SCOPE,
readAttrs);
System.out.println(OM.writeValueAsString(apacheDS));
}
public static Map<String, Map<String, List<String>>> getLDAPUsers(String url, String principal, String credential, String baseDN, String filter, int scope, String[] readAttrs) throws Exception {
Map<String, Map<String, List<String>>> output = new HashMap<String, Map<String, List<String>>>();
// Note: Jira uses OSGi and does not export com.sun.* classes.
// So LdapCtxFactory is not available when using a JobRunner's classloader.
// We need to switch class loader for this thread.
final Thread currentThread = Thread.currentThread();
final ClassLoader originalClassLoader = currentThread.getContextClassLoader();
try {
ClassLoader rootClassLoader = ClassLoader.getSystemClassLoader();
currentThread.setContextClassLoader(rootClassLoader);
LdapContext ctx = null;
try {
final int PAGE_SIZE = 500; // TODO Move to config
Hashtable env = new Hashtable();
env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
env.put(Context.PROVIDER_URL, url);
env.put(Context.SECURITY_AUTHENTICATION, AUTH_METHOD);
env.put(Context.SECURITY_PRINCIPAL, principal);
env.put(Context.SECURITY_CREDENTIALS, credential);
List<Control> controls = new ArrayList<Control>();
controls.add(new PagedResultsControl(PAGE_SIZE, Control.CRITICAL));
ctx = new InitialLdapContext(env, null);
ctx.setRequestControls(new Control[] {
new PagedResultsControl(PAGE_SIZE, Control.CRITICAL)
});
byte[] pageCookie = null;
int total = 0;
System.out.println("Start of LDAP query");
do {
NamingEnumeration<SearchResult> results = ctx.search(
baseDN,
filter,
new SearchControls());
//new SearchControls(SearchControls.SUBTREE_SCOPE, 0, 0, readAttrs, false, false));
System.out.println("results: " + results);
int count = 0;
Control[] ctrls;
ctrls = ctx.getResponseControls();
if (ctrls != null) {
System.out.println("Before loop Response controls: " + ctrls.length);
for (int i = 0; i < ctrls.length; i++) {
System.out.println("Response control: " + ctrls[i].getID() + " - " + ctrls[i].getClass().getCanonicalName());
if (ctrls[i] instanceof PagedResultsResponseControl) {
PagedResultsResponseControl prrc = (PagedResultsResponseControl) ctrls[i];
total = prrc.getResultSize();
pageCookie = prrc.getCookie();
System.out.println("New page cookie: " + OM.writeValueAsString(pageCookie));
}
}
} else {
System.out.println("Before loop Response controls is null");
}
while (results != null && results.hasMore()) {
count++;
SearchResult result = results.next();
Map<String, List<String>> userData = new HashMap<String, List<String>>();
Attributes attrs = result.getAttributes();
for (int i = 0; i < readAttrs.length; i++) {
Attribute attr = attrs.get(readAttrs[i]);
if (attr != null) {
NamingEnumeration<?> values = attr.getAll();
List<String> valueList = new ArrayList<String>();
while (values.hasMore()) {
Object value = values.next();
valueList.add(String.valueOf(value));
}
userData.put(attr.getID(), valueList);
}
}
output.put(result.getName(), userData);
System.out.println("Processed user #" + count + ": " + result.getName());
}
ctrls = ctx.getResponseControls();
if (ctrls != null) {
System.out.println("After loop Response controls: " + ctrls.length);
for (int i = 0; i < ctrls.length; i++) {
System.out.println("Response control: " + ctrls[i].getID() + " - " + ctrls[i].getClass().getCanonicalName());
if (ctrls[i] instanceof PagedResultsResponseControl) {
PagedResultsResponseControl prrc = (PagedResultsResponseControl) ctrls[i];
total = prrc.getResultSize();
pageCookie = prrc.getCookie();
System.out.println("New page cookie: " + OM.writeValueAsString(pageCookie));
}
}
} else {
System.out.println("After loop Response controls is null");
}
ctx.setRequestControls(new Control[] {
new PagedResultsControl(PAGE_SIZE, pageCookie, Control.CRITICAL)
});
} while (pageCookie != null);
System.out.println("All pages completed");
} finally {
if (ctx != null) {
ctx.close();
}
}
} catch (Exception ex) {
System.out.println("LDAP query error: " + ex);
throw ex;
} finally {
currentThread.setContextClassLoader(originalClassLoader);
}
return output;
}
}
I have an Windows 2012 with Active Directory server inside a virtual machine. I filled it with 5000 users.
Calling the code (the first segment in main()) with page size set to 500, AD returns no response control (the array is null), and the code will throw exception on the 1001st attempt to call result.hasMore():
LDAP query error: javax.naming.SizeLimitExceededException: [LDAP: error code 4 - Sizelimit Exceeded]; remaining name 'CN=Users,DC=win2022,DC=kcwong,DC=igsl'
Exception in thread "main" javax.naming.SizeLimitExceededException: [LDAP: error code 4 - Sizelimit Exceeded]; remaining name 'CN=Users,DC=win2022,DC=kcwong,DC=igsl'
at com.sun.jndi.ldap.LdapCtx.mapErrorCode(LdapCtx.java:3311)
at com.sun.jndi.ldap.LdapCtx.processReturnCode(LdapCtx.java:3205)
at com.sun.jndi.ldap.LdapCtx.processReturnCode(LdapCtx.java:2996)
at com.sun.jndi.ldap.AbstractLdapNamingEnumeration.getNextBatch(AbstractLdapNamingEnumeration.java:148)
at com.sun.jndi.ldap.AbstractLdapNamingEnumeration.hasMoreImpl(AbstractLdapNamingEnumeration.java:217)
at com.sun.jndi.ldap.AbstractLdapNamingEnumeration.hasMore(AbstractLdapNamingEnumeration.java:189)
at com.igsl.ldapuserattributes.LDAPUserAttributes.getLDAPUsers(LDAPUserAttributes.java:112)
at com.igsl.ldapuserattributes.LDAPUserAttributes.main(LDAPUserAttributes.java:43)
I tried to add a condition to the while loop to stop calling .hasMore() after PAGE_SIZE, then the after loop response control is also null, so without a cookie, the query ended there.
I have a Apache DS (2.0.0.v20210717-M17) as well, again with 5000 users. Calling it (the second segment in main()) with page size set to 500, it also returns no response control (array is null) but it happily allows me to list all 5000 users without issues. If I use the commented SearchControl() with parameters instead of default, then I get the size limit exceeded exception after 500.
It seems both ApacheDS and ActiveDirectory do not honor PagedResultsControl... I recall I used to be able to page many years ago.
The closest question I can find is this:
Why doesn't Active Directory return me a PagedResultsResponseControl?
And it does not have an answer. The comment about disabling referral is not applicable as I have already tried not using it (the commented line about SearchControls).
Is the code sample now out of date and needs changes? How can I page my query with AD?

Turns out it is the SearchControls parameter in ctx.search().
I need to avoid using the version with all parameters (which includes max results), that will override PagedResultsControl.
Instead I need to create one using the default constructor, then override the things I need:
SearchControls sc = new SearchControls();
sc.setScope(SearchControls.SUBTREE);
Then it will work properly.

Related

I'm getting "Topic not present in metadata after 60000 ms" message on some computers

Here's my program
package kafkaConsumer;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.LongSerializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.lang.management.ManagementFactory;
import java.lang.management.RuntimeMXBean;
import java.time.Duration;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import java.util.Scanner;
import java.util.concurrent.Future;
public class KafkaConsumerExample {
private final static String INTOPIC = "my-intopic";
private final static String OUTTOPIC = "my-outtopic";
private final static String BOOTSTRAP_SERVERS = "192.168.10.10:9092";
private static Producer<Long, String> createProducer(String bootstrapServers) {
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ProducerConfig.CLIENT_ID_CONFIG, "KafkaProducerExample");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName());
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
return new KafkaProducer<>(props);
}
private static Consumer<Long, String> createConsumer(String intopic, String bootstrapServers) {
final Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.GROUP_ID_CONFIG, "KafkaConsumerExample");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName());
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "1");
// Create the consumer using props.
final Consumer<Long, String> consumer = new KafkaConsumer<>(props);
// Subscribe to the topic.
consumer.subscribe(Collections.singletonList(intopic));
return consumer;
}
static boolean run(
Consumer<Long, String> consumer, Producer<Long, String> producer,
String inTopic, String outTopic) throws InterruptedException {
String valueToSend;
long keyToUse;
if (consumer == null) {
Scanner sc = new Scanner(System.in);
System.out.print("Enter key> ");
keyToUse = sc.nextLong();
valueToSend = sc.nextLine();
System.out.print("Enter value> ");
valueToSend = sc.nextLine();
} else {
Duration delta = Duration.ofSeconds(1);
ConsumerRecords<Long, String> consumerRecords = consumer.poll(delta);
while (consumerRecords.count() == 0) {
consumerRecords = consumer.poll(delta);
}
ConsumerRecord<Long, String> record = consumerRecords.iterator().next();
keyToUse = record.key();
valueToSend = record.value();
if (producer != null)
System.out.println("Got key = " + keyToUse + " and value = " + valueToSend);
}
if (producer == null) {
System.out.println("key = " + keyToUse + " and value = " + valueToSend);
} else {
try {
System.out.println("Creating ProducerRecord");
final ProducerRecord<Long, String> record =
new ProducerRecord<>(outTopic, keyToUse, valueToSend);
System.out.println("Calling producer.send");
Future<RecordMetadata> sent = producer.send(record);
System.out.println("Calling sent.get");
RecordMetadata metadata = sent.get();
System.out.println("Calling flush");
producer.flush();
System.out.println("After flush");
} catch (Exception e) {
System.out.println("Exception sending message: " + e.getMessage());
}
}
return !valueToSend.equals("STOP");
}
public static void usage() {
System.out.println(System.getProperty("sun.java.command"));
System.out.println();
System.out.println("Usage parameters: [--intopic name] [--outtopic name] [--bootstrap-servers servers]");
System.exit(1);
}
public static void main(String... args) throws Exception {
String inTopic = INTOPIC;
String outTopic = OUTTOPIC;
String bootstrapServers = BOOTSTRAP_SERVERS;
for (int i = 0; i < args.length; ++i) {
if (args[i].equals("--intopic")) {
if (i == args.length - 1) {
usage();
}
inTopic = args[++i];
} else if (args[i].equals("--outtopic")) {
if (i == args.length - 1) {
usage();
}
outTopic = args[++i];
} else if (args[i].equals("--bootstrap-servers")) {
if (i == args.length - 1) {
usage();
}
bootstrapServers = args[++i];
} else {
usage();
}
}
final Consumer<Long, String> consumer;
if (inTopic.equals("stdin")) {
consumer = null;
} else {
consumer = createConsumer(inTopic, bootstrapServers);
}
final Producer<Long, String> producer;
if (outTopic.equals("stdout")) {
producer = null;
} else {
producer = createProducer(bootstrapServers);
}
while (true) {
if (!run(consumer, producer, inTopic, outTopic)) {
break;
}
}
if (consumer != null)
consumer.close();
if (producer != null)
producer.close();
}
}
I run it on Windows and Linux. On some computers it runs fine, but on other computers, specifically a Linux machine which is not the kafka machine, it consistently gives me this error:
Exception sending message: org.apache.kafka.common.errors.TimeoutException: Topic outtopic not present in metadata after 60000 ms.
This happens, of course, when trying to send a message in the run() function, specifically in the sentence RecordMetadata metadata = sent.get().
This kafka installation allows creation of new topics automatically. In fact, if I enter a new name in the --outtopic parameter, even when sending the message fails, the topic is created.
Any clues why? What am I missing in the configuration?
Thank you
Shimon
192.168.10.10:9092
This seems to be an internal IP. Check if the clients where you cannot access are within its network range i.e. whether they can access this IP.
Try doing a telnet from your client machine..
telnet 192.168.10.10 9092
If you are not able to telnet then give the IP which can be accessed by your clients and ensure the same in the advertised.listeners also.
Also check your advertised.listeners config. When we connect to a url given in the bootstrap.servers that typically should par with those in the advertised.listeners configuration.
Topic metadata not present means that your client is unable to fetch any information about the given topic i.e. it cannot get metadata through the given bootstrap.servers property.

Elasticsearch create index and post

I am trying to perform actions in ES, so far I believe that I was able established connection correctly using Jest(http request) and now I am trying to create a new topic and post some information so it will be visible throw the elasticsearch head plugin, I run my code I dont receive any Exception but nothing happens as well,
public class ElasticSearch {
private String ES_HOST = "localhost";
private String ES_PORT = "9200";
private static JestClient jestClient = null;
public JestClient getElasticSearchClient() {
return jestClient;
}
public void connectToElasticSearch() {
try {
JestClientFactory factory = new JestClientFactory();
factory.setHttpClientConfig(
new HttpClientConfig.Builder("http://" + ES_HOST + ":" + ES_PORT)
.multiThreaded(true)
// //Per default this implementation will create no more than 2 concurrent
// connections per given route
// .defaultMaxTotalConnectionPerRoute(<YOUR_DESIRED_LEVEL_OF_CONCURRENCY_PER_ROUTE>)
// // and no more 20 connections in total
// .maxTotalConnection(<YOUR_DESIRED_LEVEL_OF_CONCURRENCY_TOTAL>)
.build());
jestClient = factory.getObject();
} catch (Exception e) {
e.printStackTrace();
}
}
public void createIndex(String indexName, String indexType) throws IOException {
// jestClient.execute(new CreateIndex.Builder(indexName).build());
PutMapping putMapping = new PutMapping.Builder(
indexName,
indexType,
"{ \"my_type\" : { \"properties\" : { \"message\" : {\"type\" : \"string\", \"store\" : \"yes\"} } } }"
).build();
jestClient.execute(putMapping);
}
public void postInES() throws IOException {
String source = jsonBuilder()
.startObject()
.field("user", "kimchy")
.field("postDate", "date")
.field("message", "trying out Elastic Search")
.endObject().string();
}
public static void main(String[] args) throws IOException {
ElasticSearch es = new ElasticSearch();
es.connectToElasticSearch();
es.getElasticSearchClient();
es.createIndex("ES TEST", "TEST");
es.postInES();
}
I am using:
<dependency>
<groupId>io.searchbox</groupId>
<artifactId>jest</artifactId>
<version>5.3.3</version>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>transport</artifactId>
<version>6.2.4</version>
</dependency>`enter code here`
I will appreciate your help
thanks
Thanks.
I found few problems in my code above and I was able to fix it, first when using java the port has to be 9300 and not 9200, I actually changed my entire code and decided to use TransportClient instead of JestClient which helped me. in case anyone else needs or had a similar problem I will share my code here hope it will help others
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.reindex.BulkByScrollResponse;
import org.elasticsearch.index.reindex.DeleteByQueryAction;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Map;
/**
* #author YoavT #Date 6/26/2018 #Time 9:20 AM
*/
public class ElasticSearch{
private String ES_HOST = "localhost";
private int ES_PORT = 9300;
private TransportClient client = null;
protected boolean connectToElasticSearch(String clusterName) {
boolean flag = false;
try {
Settings settings =
Settings.builder()
.put("cluster.name", clusterName)
.put("client.transport.ignore_cluster_name", true)
.put("client.transport.sniff", true)
.build();
// create connection
client = new PreBuiltTransportClient(settings);
client.addTransportAddress(new TransportAddress(InetAddress.getByName(ES_HOST), ES_PORT));
System.out.println(
"Connection " + clusterName + "#" + ES_HOST + ":" + ES_PORT + " established!");
flag = true;
} catch (Exception e) {
e.printStackTrace();
flag = false;
}
return flag;
}
/**
* Check the health status of the cluster
*/
public boolean isClusterHealthy(String clusterName) {
connectToElasticSearch(clusterName);
final ClusterHealthResponse response =
client
.admin()
.cluster()
.prepareHealth()
.setWaitForGreenStatus()
.setTimeout(TimeValue.timeValueSeconds(2))
.execute()
.actionGet();
if (response.isTimedOut()) {
System.out.println("The cluster is unhealthy: " + response.getStatus());
return false;
}
System.out.println("The cluster is healthy: " + response.getStatus());
return true;
}
/**
* Previous step is (check if cluster is healthy) The cluster is ready now and we can start with
* creating an index. Before that, we check that the same index was not created previously.
*/
public boolean isIndexRegistered(String indexName, String clusterName) {
connectToElasticSearch(clusterName);
// check if index already exists
final IndicesExistsResponse ieResponse =
client.admin().indices().prepareExists(indexName).get(TimeValue.timeValueSeconds(1));
// index not there
if (!ieResponse.isExists()) {
return false;
}
System.out.println("Index already created!");
return true;
}
/**
* If the index does not exist already, we create the index. *
*/
public boolean createIndex(String indexName, String numberOfShards, String numberOfReplicas, String clusterName) {
connectToElasticSearch(clusterName);
try {
CreateIndexResponse createIndexResponse =
client
.admin()
.indices()
.prepareCreate(indexName.toLowerCase())
.setSettings(
Settings.builder()
.put("index.number_of_shards", numberOfShards)
.put("index.number_of_replicas", numberOfReplicas))
.get();
if (createIndexResponse.isAcknowledged()) {
System.out.println(
"Created Index with "
+ numberOfShards
+ " Shard(s) and "
+ numberOfReplicas
+ " Replica(s)!");
return true;
}
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
public static void main(String[] args) throws IOException {
ElasticSearch elasticSearch = new ElasticSearch();
elasticSearch.connectToElasticSearch("elasticsearch");
boolean isHealthy = elasticSearch.isClusterHealthy("elasticsearch");
System.out.println("is cluster healthy= " + isHealthy);
boolean isIndexExsist = elasticSearch.isIndexRegistered("Test", "elasticsearch");
System.out.println("is index exsist = " + isIndexExsist);
boolean createIndex = elasticSearch.createIndex("TestIndex", "3", "1", "elasticsearch");
System.out.println("Is index created = " + createIndex);
boolean bulkInsert = elasticSearch.bulkInsert("TestIndex", "Json", "elasticsearch");
System.out.println("Bulk insert = " + bulkInsert);
long deleteBulk = elasticSearch.deleteBulk("TestIndex", "name", "Mark Twain", "elasticsearch");
System.out.println("Delete bulk = " + deleteBulk);
}
/**
* We basically want to index a JSON array consisting of objects with the properties name and age. We use a bulk insert to insert all the data at once.
* In our tests it happened that the cluster health status was not ready when we tried to run a search/delete query directly after the insert. Consequently,
* we added the setRefreshPolicy( RefreshPolicy.IMMEDIATE ) method to signalize the server to refresh the index after the specified request.
* The data can now be queried directly after.
*
* #param indexName
* #param indexType
* #return
* #throws IOException
*/
public boolean bulkInsert(String indexName, String indexType, String clusterName) throws IOException {
connectToElasticSearch(clusterName);
boolean flag = true;
BulkRequestBuilder bulkRequest = client.prepareBulk();
// for (int i = 0; i < listOfParametersForInsertion.length; i++) {
bulkRequest
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(
client
.prepareIndex(indexName, indexType, null)
.setSource(
XContentFactory.jsonBuilder()
.startObject()
.field("name", "Mark Twain")
.field("age", 75)
.endObject()));
// }
BulkResponse bulkResponse = bulkRequest.get();
if (bulkResponse.hasFailures()) {
// process failures by iterating through each bulk response item
System.out.println("Bulk insert failed!");
flag = false;
}
return flag;
}
/**
* After successfully querying data, we try to delete documents using a key-value pair to get
* deeper into the Elasticsearch behavior.
*/
public long deleteBulk(String indexName, String key, String value, String clusterName) {
connectToElasticSearch(clusterName);
BulkByScrollResponse response =
DeleteByQueryAction.INSTANCE
.newRequestBuilder(client)
.filter(QueryBuilders.matchQuery(key, value))
.source(indexName)
.refresh(true)
.get();
System.out.println("Deleted " + response.getDeleted() + " element(s)!");
return response.getDeleted();
}
/**
* To query the data, we use a SearchResponse in combination with a scroll. A scroll is basically
* the Elasticsearch counterpart to a cursor in a traditional SQL database. Using that sort of
* query is quite an overkill for our example and just for demonstration purposes. It is rather
* used to query large amounts of data (not like five documents in our case) and not intended for
* real-time user requests.
*
* #param indexName
* #param from
* #param to
*/
public void queryResultsWithFilter(String indexName, int from, int to, String clusterName, String filterField) {
connectToElasticSearch(clusterName);
SearchResponse scrollResp =
client
.prepareSearch(indexName)
// sort order
.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC)
// keep results for 60 seconds
.setScroll(new TimeValue(60000))
// filter for age
.setPostFilter(QueryBuilders.rangeQuery(filterField).from(from).to(to))
// maximum of 100 hits will be returned for each scroll
.setSize(100)
.get();
// scroll until no hits are returned
do {
int count = 1;
for (SearchHit hit : scrollResp.getHits().getHits()) {
Map<String, Object> res = hit.getSourceAsMap();
// print results
for (Map.Entry<String, Object> entry : res.entrySet()) {
System.out.println("[" + count + "] " + entry.getKey() + " --> " + entry.getValue());
}
count++;
}
scrollResp =
client
.prepareSearchScroll(scrollResp.getScrollId())
.setScroll(new TimeValue(60000))
.execute()
.actionGet();
// zero hits mark the end of the scroll and the while loop.
} while (scrollResp.getHits().getHits().length != 0);
}
}

Is their any way to fetch duplicates from AD using java

Is their any way to fetch duplicates from AD using java ? I see we can do it in power shell by grouping all usernames and then checking count >1.
https://gallery.technet.microsoft.com/scriptcenter/Find-Active-Directory-c8789b42
Please help :).
you should get all objects of a special type(such as user, group , ...) and their attributes. then check duplicate attributes of all objects. for do this, you can insert each attributes in a hasp map as a key, and insert all value of attribute per each object and check is duplicated or not ?
use JAVA JNDI to access AD server as follow:
/**
* retrieve all attributes of a named object.
*
*/
class GetAllAttrs {
static void printAttrs(Attributes attrs) {
if (attrs == null) {
System.out.println("No attributes");
} else {
/* Print each attribute */
try {
for (NamingEnumeration ae = attrs.getAll(); ae.hasMore();) {
Attribute attr = (Attribute) ae.next();
System.out.println("attribute: " + attr.getID());
/* print each value */
for (NamingEnumeration e = attr.getAll(); e.hasMore(); System.out
.println("value: " + e.next()))
;
}
} catch (NamingException e) {
e.printStackTrace();
}
}
}
public static void main(String[] args) {
// Set up the environment for creating the initial context
Hashtable<String, Object> env = new Hashtable<String, Object>(11);
env
.put(Context.INITIAL_CONTEXT_FACTORY,
"com.sun.jndi.ldap.LdapCtxFactory");
env.put(Context.PROVIDER_URL, "ldap://localhost:389/o=JNDITutorial");
try {
// Create the initial context
DirContext ctx = new InitialDirContext(env);
// Get all the attributes of named object
Attributes answer = ctx.getAttributes("cn=Ted Geisel, ou=People");
// Print the answer
printAttrs(answer);
// Close the context when we're done
ctx.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
also you can use search filter to limit your outputs:
public class LdapSearch {
public static void main(String[] args) throws Exception {
Hashtable env = new Hashtable();
String sp = "com.sun.jndi.ldap.LdapCtxFactory";
env.put(Context.INITIAL_CONTEXT_FACTORY, sp);
String ldapUrl = "ldap://localhost:389/dc=yourName, dc=com";
env.put(Context.PROVIDER_URL, ldapUrl);
DirContext dctx = new InitialDirContext(env);
String base = "ou=People";
SearchControls sc = new SearchControls();
String[] attributeFilter = { "cn", "mail" };
sc.setReturningAttributes(attributeFilter);
sc.setSearchScope(SearchControls.SUBTREE_SCOPE);
String filter = "(&(sn=W*)(l=Criteria*))";
NamingEnumeration results = dctx.search(base, filter, sc);
while (results.hasMore()) {
SearchResult sr = (SearchResult) results.next();
Attributes attrs = sr.getAttributes();
Attribute attr = attrs.get("cn");
System.out.print(attr.get() + ": ");
attr = attrs.get("mail");
System.out.println(attr.get());
}
dctx.close();
}
}

Java HashMap Create, edit and delete

I am trying to update a HashMap use it directly in the next method, but it isn't working. From what I read I couldn't find a solution. Some say'd it is impossible and some say use an iterator, but even with the iterator it's not working. the error is the printing method it is not printing or even getting inside the while loop because it is empty but i cant find why
This is the two methods I'm trying to update and print some information.
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Scanner;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
public class OrderList {
// Storage for an arbitrary number of details.
private HashMap<String, Order> orderList = new HashMap<String, Order>();
/**
* Perform any initialization .
*/
public OrderList() {
orderList = new HashMap<String, Order>();
}
public HashMap<String, Order> getOrders() {
return orderList;
}
public void readOrderFile(String OrderListPath) {
try {
File file = new File(OrderListPath);
Scanner scan = new Scanner(file);
while (scan.hasNextLine()) {
String readLine = scan.nextLine();
if (readLine != null) {
getSplitLinesOrders(readLine);
}
}
} catch (Exception e) {
}
}
public void getSplitLinesOrders(String readLine) {
String id = "";
String customerId = "";
String itemId = "";
int quantity = 0;
try {
String[] splitedLine = readLine.split(",");
if (splitedLine.length == 4) {
id = splitedLine[0];
customerId = splitedLine[1];
itemId = splitedLine[2];
quantity = Integer.parseInt(splitedLine[3]);
Order newOrder = new Order(id, customerId, itemId, quantity);
orderList.put(id, newOrder);
}
} catch (Exception e) {
}
}
/**
* Add a new set of details to the list
* #param details The details of the staff
*/
// public void addDetails(Order details) {
// orderList.add(details);
// }
public boolean hasOrder() {
return orderList.size() != 0;
}
public Order getNextOrder() {
Order order = orderList.remove(0);
return order;
}
/**
* #return All the details
*/
public String listDetails() {
StringBuffer allEntries = new StringBuffer();
for (Map.Entry<String, Order> details : orderList.entrySet()) {
String Key = details.getKey();
Object value = details.getValue();
allEntries.append(Key + " " + value);
}
return allEntries.toString();
}
public void PrintListOfOrders() {
Iterator it = getOrders().entrySet().iterator();
try {
while (it.hasNext()) {
Order value = (Order) it.next();
System.out.println(value.getOrderId() + " " + value.getCustomerId() + " " + value.getItemId() + " " + value.getQuantity());
}
} catch (Exception e) {
System.out.println(e);
}
}
}
You're probably getting a NullPointerException? Next time tell us what is going wrong and provide stacktraces if applicable.
The code you posted doesn't create an instance of orderList, so if it's not done elsewhere that code will throw a NullPointerException
Try adding:
private HashMap<String, Order> orderList = new HashMap<String, Order>;
Swallowing an Exception like this:
} catch (Exception e) {
}
is not a good practice since it will hide all information about what's going wrong, at least do:
catch (Exception e) {
e.printStacktrace();
}
You could do something like this:
Set<String> s = List.keySet();
Iterator<String> i = s.iterator();
Which is the initialization of the iterator, and then you could iterate through the keys using i.next(), getting a String each time and asking orderList.get(thatString) to get the value.

Iterating through HashSet empties HashMap entry

The following method gets a "Route" (class name and class method):
public Route getRoute(final String method, final String request) {
if (hasRoutes) {
for (Map.Entry<Pattern, HashMap<String, String>> entry : routes) {
Matcher match = entry.getKey().matcher(request);
if (match.find()) {
HashMap<String, String> methods = entry.getValue();
// ISSUE: Returns FALSE after 1st call of Router.getRoute()
if (methods.containsKey(method)) {
return new Route(match.group("interface"), "TRUE (" + method + " - " + match.group("interface") + "): " + methods.get(method));
} else {
return new Route(match.group("interface"), "FALSE (" + method + " - " + match.group("interface") + "): " + methods.values().toString() + ", SIZE: " + entry.getValue().size());
}
//return entry.getValue().containsKey(method) ? new Route(match.group("interface"), entry.getValue().get(method)) : null;
}
}
}
return null;
}
"routes" is defined as:
private Set<Entry<Pattern, HashMap<String, String>>> routes;
It is a cached representation of a JSON configuration file that defines supported routes, e.g.:
{
"^/?(?<interface>threads)/?$": {
"GET": "list",
"POST": "create"
},
"^/?(?<interface>threads)/(?<id>\\d+)/?$": {
"GET": "get",
"POST": "reply",
"PUT": "edit",
"PATCH": "edit",
"DELETE": "delete"
}
}
EDIT, here's how "routes" is filled from the contents of the JSON file:
try {
JsonParser parser = JSONFactory.createJsonParser(in);
JsonNode root = JSONMapper.readTree(parser);
Iterator base = root.getFieldNames();
Iterator node;
String match, method;
HashMap<Pattern, HashMap<String, String>> routesMap = new HashMap();
while (base.hasNext()) {
match = base.next().toString();
if (match != null) {
node = root.get(match).getFieldNames();
HashMap<String, String> methods = new HashMap();
while (node.hasNext()) {
method = node.next().toString();
if (method != null) {
methods.put(method, root.get(match).get(method).getTextValue());
}
}
if (!methods.isEmpty()) {
routesMap.put(Pattern.compile(match), methods);
}
}
}
if (!routesMap.isEmpty()) {
hasRoutes = true;
routes = routesMap.entrySet();
}
// Help garbage collection
parser = null;
root = null;
base = null;
node = null;
match = null;
method = null;
routesMap = null;
} catch (Exception ex) {
}
EDIT 2, properties in question & init() method:
public final static JsonFactory JSONFactory = new JsonFactory();
public final static ObjectMapper JSONMapper = new ObjectMapper();
public static Router router;
private final Class self = getClass();
private final ClassLoader loader = self.getClassLoader();
public void init(ServletConfig config) throws ServletException {
super.init(config);
router = new Router(self.getResourceAsStream("/v1_0/Routes.json"), JSONFactory, JSONMapper);
}
For some reason when accessing the servlet after the first time the HashMap is empty of values. A x.size() returns zero.
This is a rewrite of a PHP application from the ground up so I apologise in advance if the issue is something mundane.
Full source:
- Router source
- Route source
Your getOptions() method removes every entry from this map as it iterates. So, after calling getOptions() once, the map is empty.
By the way, assigning null to variables does not "help the garbage collector." The garbage collector knows that when the scope of a variable is exited, that variable no longer references the object. You are actually slowing things down by assigning values (null) that can never be read (as well as cluttering your code with counterproductive noise). A good static analysis tool like FindBugs will warn you that this is bad code.
Iterating through HashSet empties HashMap entry
That doesn't happen. Simply iterating a HashSet has no side-effects on the set's contents.
There is something else going on here that is causing your problem.

Categories