I am creating a Node.js Java backend. The Node.js middleware receives HTTP requests from an Android application and then relays it to the Java code. The reason for choosing this technologies is to create a highly scalable backend from scratch.
I want the Node.js api to receive the HTTP requests, pass it to the Java-side of the backend, the Java code does its calculations, sends back the result to the Node.js API and then finishes the process by sending the result back to the Android application.
I can receive and parse HTTP requests:
var BodyParser = require('body-parser');
var Express = require('express');
var JavaClient = require('./NodeJavaBridge.js');
var JavaClientInstance = new JavaClient();
var app = Express();
///// Receive message logic \\\\\
app.use(BodyParser.json());
app.post('/', function (request, response)
{
var task = request.body;
response.writeHead(200, { 'content-type': 'text/plain' });
var otherObject = { SomeData: 1234 };
var json = JSON.stringify({
data: otherObject
});
response.end(json);
});
console.log("START --> Java Client Instance");
JavaClientInstance.run();
app.listen(8080); //to port on which the express server listen
console.log("Server listening on: " + 8080);
I can also send and receive data between Node.js and Java:
var Util = require('util');
var EventEmitter = require('events').EventEmitter;
var ChildProc = require('child_process');
var JavaClient = function () {
var _self = this;
// The child process object we get when we spawn the java process
var _javaSpawn = null;
// buffer for receiving messages in part and piecing them together later
var _receiveBuffer = null;
// The location of java and the - we're making these public because maybe
// we want to change them in the user of this module.
_self.javaPath = 'java';
_self.jarPath = 'C:/Dev/Backend_Java.jar';
_self.verbose = true;
// list of events emitted - for informational purposes
_self.events = [
'spawn', 'message', 'exception', 'unknown', 'sent', 'java_error',
// Response messages that then become events themselves
'Error', 'Hello', 'Info'
];
/**
* Attach our own event handler to reply to the hello message.
* This is just a convenience part of the protocol so that clients don't have to do it.
* Also connects if connection data was supplied.
*/
_self.on('Hello', function () {
_self.sendHello();
});
/**
* Executes the java process to begin sending and receiving communication
*/
_self.run = function () {
// Invoke the process
_javaSpawn = ChildProc.spawn(_self.javaPath, ['-jar', _self.jarPath]);
// Wire up events
_javaSpawn.stdout.on('data', onData);
_javaSpawn.stderr.on('data', onJavaError);
_javaSpawn.on('exit', function (code) {
console.log("The java program exited with code " + code + ".");
});
// Emit our own event to indicate to others that we have spawned
_self.emit('spawn', _javaSpawn);
}
// sends the hello request message
_self.sendHello = function () {
sendMessage(
{
messageName : 'Hello',
version : '1.1'
});
}
// sends a message that will be echoed back as an Info message
_self.sendEcho = function (message) {
sendMessage(
{
messageName : "Echo",
message : message
});
}
// sends a message telling the java app to exit
_self.sendGoodbye = function () {
sendMessage(
{
"messageName" : "Goodbye"
});
}
/**
* Sends a message object as a JSON encoded string to the java application for processing.
*/
function sendMessage(aMsg)
{
// convert to json and prepare buffer
var aJsonString = JSON.stringify(aMsg);
var lByteLength = Buffer.byteLength(aJsonString);
var lMsgBuffer = new Buffer(4 + lByteLength);
// Write 4-byte length, followed by json, to buffer
lMsgBuffer.writeUInt32BE(lByteLength, 0);
lMsgBuffer.write(aJsonString, 4, aJsonString.length, 'utf8');
// send buffer to standard input on the java application
_javaSpawn.stdin.write(lMsgBuffer);
_self.emit('sent', aMsg);
}
/**
* Receive data over standard input
*/
function onData(data)
{
// Attach or extend receive buffer
_receiveBuffer = (null == _receiveBuffer) ? data : Buffer.concat([_receiveBuffer, data]);
// Pop all messages until the buffer is exhausted
while (null != _receiveBuffer && _receiveBuffer.length > 3)
{
var size = _receiveBuffer.readInt32BE(0);
// Early exit processing if we don't have enough data yet
if ((size + 4) > _receiveBuffer.length)
{
break;
}
// Pull out the message
var json = _receiveBuffer.toString('utf8', 4, (size + 4));
// Resize the receive buffer
_receiveBuffer = ((size + 4) == _receiveBuffer.length) ? null : _receiveBuffer.slice((size + 4));
// Parse the message as a JSON object
try
{
var msgObj = JSON.parse(json);
// emit the generic message received event
_self.emit('message', msgObj);
// emit an object-type specific event
if ((typeof msgObj.messageName) == 'undefined')
{
_self.emit('unknown', msgObj);
}
else
{
_self.emit(msgObj.messageName, msgObj);
}
}
catch (ex)
{
_self.emit('exception', ex);
}
}
}
/**
* Receive error output from the java process
*/
function onJavaError(data)
{
_self.emit('java_error', data.toString());
}
}
// Make our JavaClient class an EventEmitter
Util.inherits(JavaClient, EventEmitter);
// export our class
module.exports = JavaClient;
My problem: How do I let the POST request send a request to my JavaClient instance, wait for a response and then send it back to origin (Android app).
Here is an example of how I am trying to get the logic working:
var client = require('./JavaClient');
var instance = new client();
instance.on('message', function(msg) {
console.log('Received a message...');
console.log(msg);
});
instance.on('sent', function(msg) {
console.log('Sent a message...');
console.log(msg);
});
instance.on('Info', function(msg) {
console.log("Received info");
console.log(msg.message);
});
(function() {
// Start it up (Hello exchanges happen)
instance.run();
// Receive acknowledgement of hello
instance.once('Info', function() {
// Try echoing something
instance.sendEcho("ECHO!");
});
})();
If I should make something more clear please let me know (it's really late and I assume that my writing capabilities is taking a dive). I would appreciate any answer/suggestion/thisisabadidea type of comments.
Thanks!
var Util = require('util');
var EventEmitter = require('events').EventEmitter;
var ChildProc = require('child_process');
var JavaClient = function () {
var _self = this;
// The child process object we get when we spawn the java process
var _javaSpawn = null;
// buffer for receiving messages in part and piecing them together later
var _receiveBuffer = null;
// The location of java and the - we're making these public because maybe
// we want to change them in the user of this module.
_self.javaPath = 'java';
_self.jarPath = 'C:/Dev/Backend_Java.jar';
_self.verbose = true;
// list of events emitted - for informational purposes
_self.events = [
'spawn', 'message', 'exception', 'unknown', 'sent', 'java_error',
// Response messages that then become events themselves
'Error', 'Hello', 'Info'
];
/**
* Attach our own event handler to reply to the hello message.
* This is just a convenience part of the protocol so that clients don't have to do it.
* Also connects if connection data was supplied.
*/
_self.on('Hello', function () {
_self.sendHello();
});
/**
* Executes the java process to begin sending and receiving communication
*/
_self.run = function () {
// Invoke the process
_javaSpawn = ChildProc.spawn(_self.javaPath, ['-jar', _self.jarPath]);
// Wire up events
_javaSpawn.stdout.on('data', onData);
_javaSpawn.stderr.on('data', onJavaError);
_javaSpawn.on('exit', function (code) {
console.log("The java program exited with code " + code + ".");
});
// Emit our own event to indicate to others that we have spawned
_self.emit('spawn', _javaSpawn);
}
// sends the hello request message
_self.sendHello = function () {
sendMessage(
{
messageName : 'Hello',
version : '1.1'
});
}
// sends a message that will be echoed back as an Info message
_self.sendEcho = function (message) {
sendMessage(
{
messageName : "Echo",
message : message
});
}
// sends a message telling the java app to exit
_self.sendGoodbye = function () {
sendMessage(
{
"messageName" : "Goodbye"
});
}
/**
* Sends a message object as a JSON encoded string to the java application for processing.
*/
function sendMessage(aMsg)
{
// convert to json and prepare buffer
var aJsonString = JSON.stringify(aMsg);
var lByteLength = Buffer.byteLength(aJsonString);
var lMsgBuffer = new Buffer(4 + lByteLength);
// Write 4-byte length, followed by json, to buffer
lMsgBuffer.writeUInt32BE(lByteLength, 0);
lMsgBuffer.write(aJsonString, 4, aJsonString.length, 'utf8');
// send buffer to standard input on the java application
_javaSpawn.stdin.write(lMsgBuffer);
_self.emit('sent', aMsg);
}
/**
* Receive data over standard input
*/
function onData(data)
{
// Attach or extend receive buffer
_receiveBuffer = (null == _receiveBuffer) ? data : Buffer.concat([_receiveBuffer, data]);
// Pop all messages until the buffer is exhausted
while (null != _receiveBuffer && _receiveBuffer.length > 3)
{
var size = _receiveBuffer.readInt32BE(0);
// Early exit processing if we don't have enough data yet
if ((size + 4) > _receiveBuffer.length)
{
break;
}
// Pull out the message
var json = _receiveBuffer.toString('utf8', 4, (size + 4));
// Resize the receive buffer
_receiveBuffer = ((size + 4) == _receiveBuffer.length) ? null : _receiveBuffer.slice((size + 4));
// Parse the message as a JSON object
try
{
var msgObj = JSON.parse(json);
// emit the generic message received event
_self.emit('message', msgObj);
// emit an object-type specific event
if ((typeof msgObj.messageName) == 'undefined')
{
_self.emit('unknown', msgObj);
}
else
{
_self.emit(msgObj.messageName, msgObj);
}
}
catch (ex)
{
_self.emit('exception', ex);
}
}
}
/**
* Receive error output from the java process
*/
function onJavaError(data)
{
_self.emit('java_error', data.toString());
}
}
// Make our JavaClient class an EventEmitter
Util.inherits(JavaClient, EventEmitter);
// export our class
module.exports = JavaClient;
var client = require('./JavaClient');
var instance = new client();
instance.on('message', function(msg) {
console.log('Received a message...');
console.log(msg);
});
instance.on('sent', function(msg) {
console.log('Sent a message...');
console.log(msg);
});
instance.on('Info', function(msg) {
console.log("Received info");
console.log(msg.message);
});
(function() {
// Start it up (Hello exchanges happen)
instance.run();
// Receive acknowledgement of hello
instance.once('Info', function() {
// Try echoing something
instance.sendEcho("ECHO!");
});
})();
Related
I have been trying to get a connection to a mqtt server working for a while,
and while the subscription is received, it executes once and after that it only executes after a server restart
if (!mqttClient.isConnected){
mqttClient.connect(mqttConnectOptions).waitForCompletion()
}
val iMqttToken = mqttClient.subscribe("devices/+/+", 0) { topic, message ->
// gets printed once
println(topic + String(message.payload))
val pointTo = topic.substring(topic.indexOf("/") + 1, topic.length)
val result = Math.random() > 0.5
val publishTopic = "server/$pointTo"
// gets printed once
println("enviando respuesta a $publishTopic")
// publish goes out and gets received by MQTT device
mqttClient.publish(publishTopic,
"{\"granted\":$result,\"transactionId\":21334}".encodeToByteArray(),
0,
false)
Timer().schedule(2000) {
// Prints true
println(mqttClient.isConnected)
}
}
Not sure what am I doing wrong any help is apreciated.
I referred this to receive messages from my Azure Service bus via subscription
I am able to receive the messages, but I am continuously receiving the messages until I manually terminate the program
I have a timeout option and want to receive messages only till the timeout.
It would be helpful if you can explain how the below code works and how I can modify the below code to receive messages for a particular time frame and stop receiving once my timeout has been reached.
static void registerMessageHandlerOnClient(SubscriptionClient receiveClient, ExecutorService executorService) throws Exception {
// register the RegisterMessageHandler callback
receiveClient.registerMessageHandler(
new IMessageHandler() {
// callback invoked when the message handler loop has obtained a message
public CompletableFuture<Void> onMessageAsync(IMessage message) {
// receives message is passed to callback
if (message.getLabel() != null &&
message.getContentType() != null &&
message.getLabel().contentEquals("Scientist") &&
message.getContentType().contentEquals("application/json")) {
byte[] body = message.getBody();
Map scientist = GSON.fromJson(new String(body, UTF_8), Map.class);
System.out.printf(
"\n\t\t\t\t%s Message received: \n\t\t\t\t\t\tMessageId = %s, \n\t\t\t\t\t\tSequenceNumber = %s, \n\t\t\t\t\t\tEnqueuedTimeUtc = %s," +
"\n\t\t\t\t\t\tExpiresAtUtc = %s, \n\t\t\t\t\t\tContentType = \"%s\", \n\t\t\t\t\t\tContent: [ firstName = %s, name = %s ]\n",
receiveClient.getEntityPath(),
message.getMessageId(),
message.getSequenceNumber(),
message.getEnqueuedTimeUtc(),
message.getExpiresAtUtc(),
message.getContentType(),
scientist != null ? scientist.get("firstName") : "",
scientist != null ? scientist.get("name") : "");
}
return receiveClient.completeAsync(message.getLockToken());
}
// callback invoked when the message handler has an exception to report
public void notifyException(Throwable throwable, ExceptionPhase exceptionPhase) {
System.out.printf(exceptionPhase + "-" + throwable.getMessage());
}
},
// 1 concurrent call, messages are auto-completed, auto-renew duration
new MessageHandlerOptions(1, false, Duration.ofMinutes(1)),
executorService);
}
This cannot be done in your subscription code.
There are two options/workarounds which you can do:
Don't send a message to the topic continuously, have time control there.
Create a Timer Trigger that makes a REST API call Subscriptions - Create Or Update to make EntityStatus = ReceiveDisabled and use the similar function to make EntityStatus = Active.
We created a rest API on python and it is locally running. And the 'http://127.0.0.1:5002/business' API is showing contents {"business name": "something"} if I open it on google chrome. However, when we call this API in nodejs, it always gives me the error. But if I use another API(exactly same code but different api in nodejs), it is working.
async function get_recommend_initial(){
//https://ViolaS.api.stdlib.com/InitialRecommendation#dev/
// // agent.add('providing recommendations...');
const options = {
method: 'GET'
,uri: 'http://127.0.0.1:5002/business'
// ,uri:'https://ViolaS.api.stdlib.com/InitialRecommendation#dev/'
// ,json: true
};
// return request(options).then(response => {
// console.log(response)
// return (response)
// }).catch(function (err) {
// console.log('No recommend data');
// console.log(err);
// });
return requestAPI(options).then(function(data)
{
let initial_recommendation = JSON.parse(data);
console.log(initial_recommendation);
//return initial_recommendation.information[0].name;
}).catch(function (err) {
console.log('No recommend data');
console.log(err);
});
}
1
The API that is created by python file which is running locally. You can see the API code figure by moving your mouth above 1. Thanks!!!
The python code is as follows:
app = Flask(__name__)
#Add resources to be much cleaner
api = Api(app)
features = {}
class Business(Resource):
def get(self):
return {'business name': 'something'} # Fetches first column that is Employee ID
def post(self):
some_json = request.get_json()
print(some_json)
countNumber = features.get('count',0) + 1
features['count'] = countNumber
return {'You sent': some_json,
'Count:':countNumber}, 201
def put(self):
some_json = request.get_json()
print(some_json)
#record the count number
countNumber = features.get('count',0) + 1
features['count'] = countNumber
features['ok'] = 'yes'
return {'You sent': some_json,
'Count:':countNumber,
'Ok:': features['ok']}, 201
api.add_resource(Business, '/business') # Route_1
if __name__ == '__main__':
app.run(port='5002')
The error is as following:
dialogflowFirebaseFulfillment
Error: Unknown response type: "undefined" at WebhookClient.addResponse_ (/srv/node_modules/dialogflow-fulfillment/src/dialogflow-fulfillment.js:277:13) at WebhookClient.add (/srv/node_modules/dialogflow-fulfillment/src/dialogflow-fulfillment.js:245:12) at Sys_Recommend (/srv/index.js:31:11) at <anonymous>
And the log is:
No recommend data
I am working with Firestore right now and have a little bit of a problem with pagination.
Basically, I have a collection (assume 10 items) where each item has some data and a timestamp.
Now, I am fetching the first 3 items like this:
Firestore.firestore()
.collection("collectionPath")
.order(by: "timestamp", descending: true)
.limit(to: 3)
.addSnapshotListener(snapshotListener())
Inside my snapshot listener, I save the last document from the snapshot, in order to use that as a starting point for my next page.
So, at some time I will request the next page of items like this:
Firestore.firestore()
.collection("collectionPath")
.order(by: "timestamp", descending: true)
.start(afterDocument: lastDocument)
.limit(to: 3)
.addSnapshotListener(snapshotListener2()) // Note that this is a new snapshot listener, I don't know how I could reuse the first one
Now I have the items from index 0 to index 5 (in total 6) in my frontend. Neat!
If the document at index 4 now updates its timestamp to the newest timestamp of the whole collection, things start to go down.
Remember that the timestamp determines its position on account of the order clause!
What I expected to happen was, that after the changes are applied, I still show 6 items (and still ordered by their timestamps)
What happened was, that after the changes are applied, I have only 5 items remaining, since the item that got pushed out of the first snapshot is not added to the second snapshot automatically.
Am I missing something about Pagination with Firestore?
EDIT: As requested, I post some more code here:
This is my function to return a snapshot listener. Well, and the two methods I use to request the first page and then the second page I posted already above
private func snapshotListener() -> FIRQuerySnapshotBlock {
let index = self.index
return { querySnapshot, error in
guard let snap = querySnapshot, error == nil else {
log.error(error)
return
}
// Save the last doc, so we can later use pagination to retrieve further chats
if snap.count == self.limit {
self.lastDoc = snap.documents.last
} else {
self.lastDoc = nil
}
let offset = index * self.limit
snap.documentChanges.forEach() { diff in
switch diff.type {
case .added:
log.debug("added chat at index: \(diff.newIndex), offset: \(offset)")
self.tVHandler.dataManager.insert(item: Chat(dictionary: diff.document.data() as NSDictionary), at: IndexPath(row: Int(diff.newIndex) + offset, section: 0), in: nil)
case .removed:
log.debug("deleted chat at index: \(diff.oldIndex), offset: \(offset)")
self.tVHandler.dataManager.remove(itemAt: IndexPath(row: Int(diff.oldIndex) + offset, section: 0), in: nil)
case .modified:
if diff.oldIndex == diff.newIndex {
log.debug("updated chat at index: \(diff.oldIndex), offset: \(offset)")
self.tVHandler.dataManager.update(item: Chat(dictionary: diff.document.data() as NSDictionary), at: IndexPath(row: Int(diff.oldIndex) + offset, section: 0), in: nil)
} else {
log.debug("moved chat at index: \(diff.oldIndex), offset: \(offset) to index: \(diff.newIndex), offset: \(offset)")
self.tVHandler.dataManager.move(item: Chat(dictionary: diff.document.data() as NSDictionary), from: IndexPath(row: Int(diff.oldIndex) + offset, section: 0), to: IndexPath(row: Int(diff.newIndex) + offset, section: 0), in: nil)
}
}
}
self.tableView?.reloadData()
}
}
So again, I am asking if I can have one snapshot listener that listens for changes in more than one page I requested from Firestore
Well, I contacted the guys over at Firebase Google Group for help, and they were able to tell me that my use case is not yet supported.
Thanks to Kato Richardson for attending to my problem!
For anyone interested in the details, see this thread
I came across the same use case today and I have successfully implemented a working solution in Objective C client. Below is the algorithm if anyone wants to apply in their program and I will really appreciate if google-cloud-firestore team can put my solution on their page.
Use Case: A feature to allow paginating a long list of recent chats along with the option to attach real time listeners to update the list to have chat with most recent message on top.
Solution: This can be made possible by using pagination logic like we do for other long lists and attaching real time listener with limit set to 1:
Step 1: On page load fetch the chats using pagination query as below:
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
[self fetchChats];
}
-(void)fetchChats {
__weak typeof(self) weakSelf = self;
FIRQuery *paginateChatsQuery = [[[self.db collectionWithPath:MAGConstCollectionNameChats]queryOrderedByField:MAGConstFieldNameTimestamp descending:YES]queryLimitedTo:MAGConstPageLimit];
if(self.arrChats.count > 0){
FIRDocumentSnapshot *lastChatDocument = self.arrChats.lastObject;
paginateChatsQuery = [paginateChatsQuery queryStartingAfterDocument:lastChatDocument];
}
[paginateChatsQuery getDocumentsWithCompletion:^(FIRQuerySnapshot * _Nullable snapshot, NSError * _Nullable error) {
if (snapshot == nil) {
NSLog(#"Error fetching documents: %#", error);
return;
}
///2. Observe chat updates if not attached
if(weakSelf.chatObserverState == ChatObserverStateNotAttached) {
weakSelf.chatObserverState = ChatObserverStateAttaching;
[weakSelf observeChats];
}
if(snapshot.documents.count < MAGConstPageLimit) {
weakSelf.noMoreData = YES;
}
else {
weakSelf.noMoreData = NO;
}
[weakSelf.arrChats addObjectsFromArray:snapshot.documents];
[weakSelf.tblVuChatsList reloadData];
}];
}
Step 2: On success callback of "fetchAlerts" method attach the observer for real time updates only once with limit set to 1.
-(void)observeChats {
__weak typeof(self) weakSelf = self;
self.chatsListener = [[[[self.db collectionWithPath:MAGConstCollectionNameChats]queryOrderedByField:MAGConstFieldNameTimestamp descending:YES]queryLimitedTo:1]addSnapshotListener:^(FIRQuerySnapshot * _Nullable snapshot, NSError * _Nullable error) {
if (snapshot == nil) {
NSLog(#"Error fetching documents: %#", error);
return;
}
if(weakSelf.chatObserverState == ChatObserverStateAttaching) {
weakSelf.chatObserverState = ChatObserverStateAttached;
}
for (FIRDocumentChange *diff in snapshot.documentChanges) {
if (diff.type == FIRDocumentChangeTypeAdded) {
///New chat added
NSLog(#"Added chat: %#", diff.document.data);
FIRDocumentSnapshot *chatDoc = diff.document;
[weakSelf handleChatUpdates:chatDoc];
}
else if (diff.type == FIRDocumentChangeTypeModified) {
NSLog(#"Modified chat: %#", diff.document.data);
FIRDocumentSnapshot *chatDoc = diff.document;
[weakSelf handleChatUpdates:chatDoc];
}
else if (diff.type == FIRDocumentChangeTypeRemoved) {
NSLog(#"Removed chat: %#", diff.document.data);
}
}
}];
}
Step 3. On listener callback check for document changes and handle only FIRDocumentChangeTypeAdded and FIRDocumentChangeTypeModified events and ignore the FIRDocumentChangeTypeRemoved event. We are doing this by calling "handleChatUpdates" method for both FIRDocumentChangeTypeAdded and FIRDocumentChangeTypeModified event in which we are first trying to find the matching chat document from local list and if it exist we are removing it from the list and then we are adding the new document received from listener callback and adding it to the beginning of the list.
-(void)handleChatUpdates:(FIRDocumentSnapshot *)chatDoc {
NSInteger chatIndex = [self getIndexOfMatchingChatDoc:chatDoc];
if(chatIndex != NSNotFound) {
///Remove this object
[self.arrChats removeObjectAtIndex:chatIndex];
}
///Insert this chat object at the beginning of the array
[self.arrChats insertObject:chatDoc atIndex:0];
///Refresh the tableview
[self.tblVuChatsList reloadData];
}
-(NSInteger)getIndexOfMatchingChatDoc:(FIRDocumentSnapshot *)chatDoc {
NSInteger chatIndex = 0;
for (FIRDocumentSnapshot *chatDocument in self.arrChats) {
if([chatDocument.documentID isEqualToString:chatDoc.documentID]) {
return chatIndex;
}
chatIndex++;
}
return NSNotFound;
}
Step 4. Reload the tableview to see the changes.
my solution is to create 1 maintainer query - listener to observe on those removed item from first query, and we will update it every time there's new message coming.
To make pagination with snapshot listener first we have to create reference point document from the collection.After that we are listening to collection based on that reference point document.
Let's you have a collection called messages and timestamp called createdAt with each document in that collection.
//get messages
getMessages(){
//first we will fetch the very last/latest document.
//to hold listeners
listnerArray=[];
const very_last_document= await this.afs.collectons('messages')
.ref
.limit(1)
.orderBy('createdAt','desc')
.get({ source: 'server' });
//if very_last.document.empty property become true,which means there is no messages
//present till now ,we can go with a query without having a limit
//else we have to apply the limit
if (!very_last_document.empty) {
const start = very_last_document.docs[very_last_document.docs.length - 1].data().createdAt;
//listner for new messages
//all new message will be registered on this listener
const listner_1 = this.afs.collectons('messages')
.ref
.orderBy('createdAt','desc')
.endAt(start) <== this will make sure the query will fetch up to 'start' point(including 'start' point document)
.onSnapshot(messages => {
for (const message of messages .docChanges()) {
if (message .type === "added")
//do the job...
if (message.type === "modified")
//do the job...
if (message.type === "removed")
//do the job ....
}
},
err => {
//on error
})
//old message will be registered on this listener
const listner_2 = this.afs.collectons('messages')
.ref
.orderBy('createdAt','desc')
.limit(20)
.startAfter(start) <== this will make sure the query will fetch after the 'start' point
.onSnapshot(messages => {
for (const message of messages .docChanges()) {
if (message .type === "added")
//do the job...
if (message.type === "modified")
//do the job...
if (message.type === "removed")
//do the job ....
}
this.listenerArray.push(listner_1, listner_2);
},
err => {
//on error
})
} else {
//no document found!
//very_last_document.empty = true
const listner_1 = this.afs.collectons('messages')
.ref
.orderBy('createdAt','desc')
.onSnapshot(messages => {
for (const message of messages .docChanges()) {
if (message .type === "added")
//do the job...
if (message.type === "modified")
//do the job...
if (message.type === "removed")
//do the job ....
}
},
err => {
//on error
})
this.listenerArray.push(listner_1);
}
}
//to load more messages
LoadMoreMessage(){
//Assuming messages array holding the the message we have fetched
//getting the last element from the array messages.
//that will be the starting point of our next batch
const endAt = this.messages[this.messages.length-1].createdAt
const listner_2 = this.getService
.collections('messages')
.ref
.limit(20)
.orderBy('createdAt', "asc") <== should be in 'asc' order
.endBefore(endAt) <== Getting the 20 documnents (the limit we have applied) from the point 'endAt';
.onSnapshot(messages => {
if (messages.empty && this.messages.length)
this.messages[this.messages.length - 1].hasMore = false;
for (const message of messages.docChanges()) {
if (message.type === "added")
//do the job...
if (message.type === "modified")
//do the job
if (message.type === "removed")
//do the job
}
},
err => {
//on error
})
this.listenerArray.push(listner_2)
}
I have used below mentioned API of dcm4che2 from this repository http://www.dcm4che.org/maven2/dcm4che/ in my java project.
dcm4che-core-2.0.29.jar
org.dcm4che2.data.DicomObject
org.dcm4che2.io.StopTagInputHandler
org.dcm4che2.data.BasicDicomObject
org.dcm4che2.data.UIDDictionary
org.dcm4che2.data.DicomElement
org.dcm4che2.data.SimpleDcmElement
org.dcm4che2.net.service.StorageCommitmentService
org.dcm4che2.util.CloseUtils
dcm4che-net-2.0.29.jar
org.dcm4che2.net.CommandUtils
org.dcm4che2.net.ConfigurationException
org.dcm4che2.net.NetworkApplicationEntity
org.dcm4che2.net.NetworkConnection
org.dcm4che2.net.NewThreadExecutor
org.dcm4che3.net.service.StorageService
org.dcm4che3.net.service.VerificationService
Currently i want to migrate to dcm4che3 but, above listed API is not found in dcm4che3 which i have downloaded from this repository http://sourceforge.net/projects/dcm4che/files/dcm4che3/
Could you please guide me for alternate approach?
As you have already observed, the BasicDicomObject is history -- alongside quite a few others.
The new "Dicom object" is Attributes -- an object is a collection of attributes.
Therefore, you create Attributes, populate them with the tags you need for RQ-behaviour (C-FIND, etc) and what you get in return is another Attributes object from which you pull the tags you want.
In my opinion, dcm4che 2.x was vague on the subject of dealing with individual value representations. dcm4che 3.x is quite a bit clearer.
The migration demands a rewrite of your code regarding how you query and how you treat individual tags. On the other hand, dcm4che 3.x makes the new code less convoluted.
On request, I have added the initial setup of a connection to some service class provider (SCP):
// Based on org.dcm4che:dcm4che-core:5.25.0 and org.dcm4che:dcm4che-net:5.25.0
import org.dcm4che3.data.*;
import org.dcm4che3.net.*;
import org.dcm4che3.net.pdu.AAssociateRQ;
import org.dcm4che3.net.pdu.PresentationContext;
import org.dcm4che3.net.pdu.RoleSelection;
import org.dcm4che3.net.pdu.UserIdentityRQ;
// Client side representation of the connection. As a client, I will
// not be listening for incoming traffic (but I could choose to do so
// if I need to transfer data via MOVE)
Connection local = new Connection();
local.setHostname("client.on.network.com");
local.setPort(Connection.NOT_LISTENING);
// Remote side representation of the connection
Connection remote = new Connection();
remote.setHostname("pacs.on.network.com");
remote.setPort(4100);
remote.setTlsProtocols(local.getTlsProtocols());
remote.setTlsCipherSuites(local.getTlsCipherSuites());
// Calling application entity
ApplicationEntity ae = new ApplicationEntity("MeAsAServiceClassUser".toUpperCase());
ae.setAETitle("MeAsAServiceClassUser");
ae.addConnection(local); // on which we may not be listening
ae.setAssociationInitiator(true);
ae.setAssociationAcceptor(false);
// Device
Device device = new Device("MeAsAServiceClassUser".toLowerCase());
device.addConnection(local);
device.addApplicationEntity(ae);
// Configure association
AAssociateRQ rq = new AAssociateRQ();
rq.setCallingAET("MeAsAServiceClassUser");
rq.setCalledAET("NameThatIdentifiesTheProvider"); // e.g. "GEPACS"
rq.setImplVersionName("MY-SCU-1.0"); // Max 16 chars
// Credentials (if appropriate)
String username = "username";
String passcode = "so secret";
if (null != username && username.length() > 0 && null != passcode && passcode.length() > 0) {
rq.setUserIdentityRQ(UserIdentityRQ.usernamePasscode(username, passcode.toCharArray(), true));
}
Example, pinging the PACS (using the setup above):
String[] TRANSFER_SYNTAX_CHAIN = {
UID.ExplicitVRLittleEndian,
UID.ImplicitVRLittleEndian
};
// Define transfer capabilities for verification SOP class
ae.addTransferCapability(
new TransferCapability(null,
/* SOP Class */ UID.Verification,
/* Role */ TransferCapability.Role.SCU,
/* Transfer syntax */ TRANSFER_SYNTAX_CHAIN)
);
// Setup presentation context
rq.addPresentationContext(
new PresentationContext(
rq.getNumberOfPresentationContexts() * 2 + 1,
/* abstract syntax */ UID.Verification,
/* transfer syntax */ TRANSFER_SYNTAX_CHAIN
)
);
rq.addRoleSelection(new RoleSelection(UID.Verification, /* is SCU? */ true, /* is SCP? */ false));
try {
// 1) Open a connection to the SCP
Association association = ae.connect(local, remote, rq);
// 2) PING!
DimseRSP rsp = association.cecho();
rsp.next(); // Consume reply, which may fail
// Still here? Success!
// 3) Close the connection to the SCP
if (as.isReadyForDataTransfer()) {
as.waitForOutstandingRSP();
as.release();
}
} catch (Throwable ignore) {
// Failure
}
Another example, retrieving studies from a PACS given accession numbers; setting up the query and handling the result:
String modality = null; // e.g. "OT"
String accessionNumber = "1234567890";
//--------------------------------------------------------
// HERE follows setup of a query, using an Attributes object
//--------------------------------------------------------
Attributes query = new Attributes();
// Indicate character set
{
int tag = Tag.SpecificCharacterSet;
VR vr = ElementDictionary.vrOf(tag, query.getPrivateCreator(tag));
query.setString(tag, vr, "ISO_IR 100");
}
// Study level query
{
int tag = Tag.QueryRetrieveLevel;
VR vr = ElementDictionary.vrOf(tag, query.getPrivateCreator(tag));
query.setString(tag, vr, "STUDY");
}
// Accession number
{
int tag = Tag.AccessionNumber;
VR vr = ElementDictionary.vrOf(tag, query.getPrivateCreator(tag));
query.setString(tag, vr, accessionNumber);
}
// Optionally filter on modality in study if 'modality' is provided,
// otherwise retrieve modality
{
int tag = Tag.ModalitiesInStudy;
VR vr = ElementDictionary.vrOf(tag, query.getPrivateCreator(tag));
if (null != modality && modality.length() > 0) {
query.setString(tag, vr, modality);
} else {
query.setNull(tag, vr);
}
}
// We are interested in study instance UID
{
int tag = Tag.StudyInstanceUID;
VR vr = ElementDictionary.vrOf(tag, query.getPrivateCreator(tag));
query.setNull(tag, vr);
}
// Do the actual query, needing an AppliationEntity (ae),
// a local (local) and remote (remote) Connection, and
// an AAssociateRQ (rq) set up earlier.
try {
// 1) Open a connection to the SCP
Association as = ae.connect(local, remote, rq);
// 2) Query
int priority = 0x0002; // low for the sake of demo :)
as.cfind(UID.StudyRootQueryRetrieveInformationModelFind, priority, query, null,
new DimseRSPHandler(as.nextMessageID()) {
#Override
public void onDimseRSP(Association assoc, Attributes cmd,
Attributes response) {
super.onDimseRSP(assoc, cmd, response);
int status = cmd.getInt(Tag.Status, -1);
if (Status.isPending(status)) {
//--------------------------------------------------------
// HERE follows handling of the response, which
// is just another Attributes object
//--------------------------------------------------------
String studyInstanceUID = response.getString(Tag.StudyInstanceUID);
// etc...
}
}
});
// 3) Close the connection to the SCP
if (as.isReadyForDataTransfer()) {
as.waitForOutstandingRSP();
as.release();
}
}
catch (Exception e) {
// Failure
}
More on this at https://github.com/FrodeRanders/dicom-tools