Why service is not reading AWS Secrets Manager in AWS ECS Fargate - java

I am deploying a springboot2.5.0 gradle project in AWS ECS, where it has to read AWS Secrets Manager on startup. I have written this 2 files to read the Secrets Manager in aws. But the service is still failing to read the values from the manager. Any pointers will help.
The Error in ECS Container
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'tokenManager': Injection of autowired dependencies failed; nested exception is java.lang.IllegalArgumentException: Could not resolve placeholder 'moa.aws.secrets.oidc.introspectUrl' in value "${moa.aws.secrets.oidc.introspectUrl}"
The 2 files
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
#Configuration
public class SecretsManagerBootstrapConfiguration {
#Bean
public SecretsManagerPropertySourceLocator awsSecretsManager(
#Value("${aws.secrets:}") final String[] allowedSecrets) {
return new SecretsManagerPropertySourceLocator(allowedSecrets);
}
}
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cloud.bootstrap.config.PropertySourceLocator;
import org.springframework.core.env.AbstractEnvironment;
import org.springframework.core.env.CompositePropertySource;
import org.springframework.core.env.EnumerablePropertySource;
import org.springframework.core.env.Environment;
import org.springframework.core.env.MapPropertySource;
import org.springframework.core.env.MutablePropertySources;
import org.springframework.core.env.PropertySource;
import com.fasterxml.jackson.databind.ObjectMapper;
public class SecretsManagerPropertySourceLocator implements PropertySourceLocator {
private static final Logger LOGGER = LoggerFactory.getLogger(SecretsManagerPropertySourceLocator.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private String[] allowedSecrets;
public SecretsManagerPropertySourceLocator(
final String[] secrets) {
this.allowedSecrets = secrets;
for (String secret : allowedSecrets) {
LOGGER.debug("allowed secrets ::===> " + secret);
}
}
/**
* #param environment the current Environment
* #return a PropertySource or null if there is none
* #throws IllegalStateException if there is a fail fast condition
*/
#Override
public PropertySource<?> locate(Environment environment) {
final CompositePropertySource propertySource = new CompositePropertySource("aws-secrets-store");
final MutablePropertySources sources = ((AbstractEnvironment) environment).getPropertySources();
for (String secret : allowedSecrets
) {
StreamSupport.stream(sources.spliterator(), false)
.filter(ps -> ps instanceof EnumerablePropertySource)
.map(ps -> ((EnumerablePropertySource) ps).getPropertyNames())
.flatMap(Arrays::stream)
.distinct()
.filter(prop -> (prop.contains(secret)))
.forEach(prop -> {
propertySource.addPropertySource(new MapPropertySource(secret, jsonToMap(prop, environment.getProperty(prop))));
});
}
return propertySource;
}
private Map<String, Object> jsonToMap(final String name, String jsonString) {
try {
final Map<String, Object> map = OBJECT_MAPPER.readValue(jsonString, Map.class);
return map.entrySet()
.stream()
.collect(
Collectors.toMap(
e -> name.replaceAll("/", ".") + '.' + e.getKey(),
Map.Entry::getValue
));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

I am not seeing your Secret Manager code in the above code examples. How are you invoking this AWS Service? The best practice (from a Java app) is to use the Secret Manager V2 Java API. You can use this Java API from within a Spring app. To obtain a secret from this service - use Java V2 code like this:
package com.example.secrets;
//snippet-start:[secretsmanager.java2.get_secret.import]
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.secretsmanager.SecretsManagerClient;
import software.amazon.awssdk.services.secretsmanager.model.GetSecretValueRequest;
import software.amazon.awssdk.services.secretsmanager.model.GetSecretValueResponse;
import software.amazon.awssdk.services.secretsmanager.model.SecretsManagerException;
//snippet-end:[secretsmanager.java2.get_secret.import]
/**
* To run this AWS code example, ensure that you have setup your development environment, including your AWS credentials.
*
* For information, see this documentation topic:
*
*https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html
*/
public class GetSecretValue {
public static void main(String[] args) {
final String USAGE = "\n" +
"Usage:\n" +
" GetSecretValue <secretName> \n\n" +
"Where:\n" +
" secretName - the name of the secret (for example, tutorials/MyFirstSecret). \n";
if (args.length != 1) {
System.out.println(USAGE);
System.exit(1);
}
String secretName = args[0];
Region region = Region.US_EAST_1;
SecretsManagerClient secretsClient = SecretsManagerClient.builder()
.region(region)
.build();
getValue(secretsClient, secretName);
secretsClient.close();
}
//snippet-start:[secretsmanager.java2.get_secret.main]
public static void getValue(SecretsManagerClient secretsClient,String secretName) {
try {
GetSecretValueRequest valueRequest = GetSecretValueRequest.builder()
.secretId(secretName)
.build();
GetSecretValueResponse valueResponse = secretsClient.getSecretValue(valueRequest);
String secret = valueResponse.secretString();
System.out.println(secret);
} catch (SecretsManagerException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
}
//snippet-end:[secretsmanager.java2.get_secret.main]
}

Related

Getting partial json response for s3select with aws java sdk v2

I am trying to implement s3select in a spring boot app to query parquet file in s3 bucket, I am only getting partial result from the s3select output, Please help to identify the issue, i have used aws java sdk v2.
Upon checking the json output(printed in the console), overall characters in the output is 65k.
I am using eclipse and tried unchecking "Limit console output" in the console preference, which did not help.
Code is here:-
import java.util.List;
import java.util.concurrent.CompletableFuture;
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
import software.amazon.awssdk.core.async.SdkPublisher;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3AsyncClient;
import software.amazon.awssdk.services.s3.model.CompressionType;
import software.amazon.awssdk.services.s3.model.EndEvent;
import software.amazon.awssdk.services.s3.model.ExpressionType;
import software.amazon.awssdk.services.s3.model.InputSerialization;
import software.amazon.awssdk.services.s3.model.JSONOutput;
import software.amazon.awssdk.services.s3.model.OutputSerialization;
import software.amazon.awssdk.services.s3.model.ParquetInput;
import software.amazon.awssdk.services.s3.model.RecordsEvent;
import software.amazon.awssdk.services.s3.model.SelectObjectContentEventStream;
import software.amazon.awssdk.services.s3.model.SelectObjectContentEventStream.EventType;
import software.amazon.awssdk.services.s3.model.SelectObjectContentRequest;
import software.amazon.awssdk.services.s3.model.SelectObjectContentResponse;
import software.amazon.awssdk.services.s3.model.SelectObjectContentResponseHandler;
public class ParquetSelect {
private static final String BUCKET_NAME = "<bucket-name>";
private static final String KEY = "<object-key>";
private static final String QUERY = "select * from S3Object s";
public static S3AsyncClient s3;
public static void selectObjectContent() {
Handler handler = new Handler();
SelectQueryWithHandler(handler).join();
RecordsEvent recordsEvent = (RecordsEvent) handler.receivedEvents.stream()
.filter(e -> e.sdkEventType() == EventType.RECORDS)
.findFirst()
.orElse(null);
System.out.println(recordsEvent.payload().asUtf8String());
}
private static CompletableFuture<Void> SelectQueryWithHandler(SelectObjectContentResponseHandler handler) {
InputSerialization inputSerialization = InputSerialization.builder()
.parquet(ParquetInput.builder().build())
.compressionType(CompressionType.NONE)
.build();
OutputSerialization outputSerialization = OutputSerialization.builder()
.json(JSONOutput.builder().build())
.build();
SelectObjectContentRequest select = SelectObjectContentRequest.builder()
.bucket(BUCKET_NAME)
.key(KEY)
.expression(QUERY)
.expressionType(ExpressionType.SQL)
.inputSerialization(inputSerialization)
.outputSerialization(outputSerialization)
.build();
return s3.selectObjectContent(select, handler);
}
private static class Handler implements SelectObjectContentResponseHandler {
private SelectObjectContentResponse response;
private List<SelectObjectContentEventStream> receivedEvents = new ArrayList<>();
private Throwable exception;
#Override
public void responseReceived(SelectObjectContentResponse response) {
this.response = response;
}
#Override
public void onEventStream(SdkPublisher<SelectObjectContentEventStream> publisher) {
publisher.subscribe(receivedEvents::add);
}
#Override
public void exceptionOccurred(Throwable throwable) {
exception = throwable;
}
#Override
public void complete() {
}
}
}
I see you are using selectObjectContent(). Have you tried calling the s3AsyncClient.getObject() method. Does that work for you?
For example, here is a code example that gets a PDF file from an Amazon S3 bucket and write the PDF file to a local file.
package com.example.s3.async;
// snippet-start:[s3.java2.async_stream_ops.complete]
// snippet-start:[s3.java2.async_stream_ops.import]
import software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider;
import software.amazon.awssdk.core.async.AsyncResponseTransformer;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3AsyncClient;
import software.amazon.awssdk.services.s3.model.GetObjectRequest;
import software.amazon.awssdk.services.s3.model.GetObjectResponse;
import java.nio.file.Paths;
import java.util.concurrent.CompletableFuture;
// snippet-end:[s3.java2.async_stream_ops.import]
// snippet-start:[s3.java2.async_stream_ops.main]
/**
* Before running this Java V2 code example, set up your development environment, including your credentials.
*
* For more information, see the following documentation topic:
*
* https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html
*/
public class S3AsyncStreamOps {
public static void main(String[] args) {
final String usage = "\n" +
"Usage:\n" +
" <bucketName> <objectKey> <path>\n\n" +
"Where:\n" +
" bucketName - The name of the Amazon S3 bucket (for example, bucket1). \n\n" +
" objectKey - The name of the object (for example, book.pdf). \n" +
" path - The local path to the file (for example, C:/AWS/book.pdf). \n" ;
if (args.length != 3) {
System.out.println(usage);
System.exit(1);
}
String bucketName = args[0];
String objectKey = args[1];
String path = args[2];
ProfileCredentialsProvider credentialsProvider = ProfileCredentialsProvider.create();
Region region = Region.US_EAST_1;
S3AsyncClient s3AsyncClient = S3AsyncClient.builder()
.region(region)
.credentialsProvider(credentialsProvider)
.build();
GetObjectRequest objectRequest = GetObjectRequest.builder()
.bucket(bucketName)
.key(objectKey)
.build();
CompletableFuture<GetObjectResponse> futureGet = s3AsyncClient.getObject(objectRequest,
AsyncResponseTransformer.toFile(Paths.get(path)));
futureGet.whenComplete((resp, err) -> {
try {
if (resp != null) {
System.out.println("Object downloaded. Details: "+resp);
} else {
err.printStackTrace();
}
} finally {
// Only close the client when you are completely done with it.
s3AsyncClient.close();
}
});
futureGet.join();
}
}

kafka streams abandoned cart development - session window

I am attempting to build out a kstreams app that takes in records from an input topic that is a simple json payload (id and timestamp included - the key is a simple 3 digit string) (there is also no schema required). for the output topic I wish to produce only the records in which have been abandoned for 30 minutes or more (session window). based on this link, I have begun to develop a kafka streams app:
package io.confluent.developer;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.Produced;
import org.apache.kafka.streams.kstream.SessionWindows;
import java.io.FileInputStream;
import java.io.IOException;
import java.time.Duration;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.format.FormatStyle;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
public class SessionWindow {
private final DateTimeFormatter timeFormatter = DateTimeFormatter.ofLocalizedTime(FormatStyle.LONG)
.withLocale(Locale.US)
.withZone(ZoneId.systemDefault());
public Topology buildTopology(Properties allProps) {
final StreamsBuilder builder = new StreamsBuilder();
final String inputTopic = allProps.getProperty("input.topic.name");
final String outputTopic = allProps.getProperty("output.topic.name");
builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
.groupByKey()
.windowedBy(SessionWindows.ofInactivityGapAndGrace(Duration.ofMinutes(5), Duration.ofSeconds(10)))
.count()
.toStream()
.map((windowedKey, count) -> {
String start = timeFormatter.format(windowedKey.window().startTime());
String end = timeFormatter.format(windowedKey.window().endTime());
String sessionInfo = String.format("Session info started: %s ended: %s with count %s", start, end, count);
return KeyValue.pair(windowedKey.key(), sessionInfo);
})
.to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
return builder.build();
}
public Properties loadEnvProperties(String fileName) throws IOException {
Properties allProps = new Properties();
FileInputStream input = new FileInputStream(fileName);
allProps.load(input);
input.close();
return allProps;
}
public static void main(String[] args) throws Exception {
if (args.length < 1) {
throw new IllegalArgumentException("This program takes one argument: the path to an environment configuration file.");
}
SessionWindow tw = new SessionWindow();
Properties allProps = tw.loadEnvProperties(args[0]);
allProps.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
allProps.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, ClickEventTimestampExtractor.class);
Topology topology = tw.buildTopology(allProps);
ClicksDataGenerator dataGenerator = new ClicksDataGenerator(allProps);
dataGenerator.generate();
final KafkaStreams streams = new KafkaStreams(topology, allProps);
final CountDownLatch latch = new CountDownLatch(1);
// Attach shutdown handler to catch Control-C.
Runtime.getRuntime().addShutdownHook(new Thread("streams-shutdown-hook") {
#Override
public void run() {
streams.close(Duration.ofSeconds(5));
latch.countDown();
}
});
try {
streams.cleanUp();
streams.start();
latch.await();
} catch (Throwable e) {
System.exit(1);
}
System.exit(0);
}
static class ClicksDataGenerator {
final Properties properties;
public ClicksDataGenerator(final Properties properties) {
this.properties = properties;
}
public void generate() {
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
}
}
}
package io.confluent.developer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.streams.processor.TimestampExtractor;
public class ClickEventTimestampExtractor implements TimestampExtractor {
#Override
public long extract(ConsumerRecord<Object, Object> record, long previousTimestamp) {
System.out.println(record.value());
return record.getTimestamp();
}
}
i am having issues withe the following:
getting the code to compile - I keep getting this error (I am new to java so please bear with me). what is the correct way to call the getTimestamp?:
error: cannot find symbol
return record.getTimestamp();
^
symbol: method getTimestamp()
location: variable record of type ConsumerRecord<Object,Object>
1 error
not sure if the timestamp extractor will work for this particular scenario. I read here that 'The Timestamp extractor can only give you one timestamp'. does that mean that if there are multiple messages with different keys this wont work? some clarification or examples would help.
thanks!

java.lang.IllegalStateException: No reader found for type: class io.helidon.examples.quickstart.se.pokemon.Pokemon

I am new to Helidon and i am trying to create a basic CRUD REST service using Helidon SE.
I have been referring the DbClient examples in GitHib (https://github.com/oracle/helidon/tree/master/examples/dbclient) to create a basic CRUD REST Service.
I am able to do Read all/one and Delete all/one in DB but unable to do Create or Update Operation, below is the error stack which i get when trying to invoke a POST Service :
java.util.concurrent.ExecutionException: Unhandled 'cause' of this exception encountered.
at io.helidon.webserver.RequestRouting$RoutedRequest.defaultHandler(RequestRouting.java:394)
at io.helidon.webserver.RequestRouting$RoutedRequest.nextNoCheck(RequestRouting.java:374)
at io.helidon.webserver.RequestRouting$RoutedRequest.next(RequestRouting.java:417)
at io.helidon.webserver.Handler.lambda$create$4(Handler.java:99)
at java.base/java.util.concurrent.CompletableFuture.uniExceptionally(CompletableFuture.java:986)
at java.base/java.util.concurrent.CompletableFuture.uniExceptionallyStage(CompletableFuture.java:1004)
at java.base/java.util.concurrent.CompletableFuture.exceptionally(CompletableFuture.java:2307)
at java.base/java.util.concurrent.CompletableFuture.exceptionally(CompletableFuture.java:143)
at io.helidon.common.reactive.CompletionAwaitable.exceptionally(CompletionAwaitable.java:293)
at io.helidon.webserver.Handler.lambda$create$5(Handler.java:97)
at io.helidon.webserver.RequestRouting$RoutedRequest.next(RequestRouting.java:320)
at io.helidon.metrics.MetricsSupport$MetricsContextHandler.accept(MetricsSupport.java:619)
at io.helidon.webserver.RequestRouting$RoutedRequest.next(RequestRouting.java:320)
at io.helidon.metrics.MetricsSupport.lambda$configureVendorMetrics$7(MetricsSupport.java:364)
at io.helidon.webserver.RequestRouting$RoutedRequest.next(RequestRouting.java:320)
at io.helidon.webserver.WebTracingConfig$RequestSpanHandler.accept(WebTracingConfig.java:247)
at io.helidon.webserver.RequestRouting$RoutedRequest.next(RequestRouting.java:320)
at io.helidon.common.context.Contexts.runInContext(Contexts.java:98)
at io.helidon.webserver.RequestRouting.route(RequestRouting.java:87)
at io.helidon.webserver.ForwardingHandler.channelRead0(ForwardingHandler.java:167)
at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:324)
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:311)
at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:425)
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:276)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)
at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)
at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.base/java.lang.Thread.run(Thread.java:834)
Caused by: java.lang.IllegalStateException: No reader found for type: class io.helidon.examples.quickstart.se.pokemon.Pokemon
at io.helidon.media.common.MessageBodyReaderContext.readerNotFound(MessageBodyReaderContext.java:338)
at io.helidon.media.common.MessageBodyReaderContext.unmarshall(MessageBodyReaderContext.java:167)
at io.helidon.media.common.MessageBodyReadableContent.as(MessageBodyReadableContent.java:117)
at io.helidon.webserver.Handler.lambda$create$5(Handler.java:83)
... 34 more
Below is the code written by
Main.java
package io.helidon.examples.quickstart.se;
import io.helidon.config.Config;
import io.helidon.config.ConfigValue;
import io.helidon.dbclient.DbClient;
import io.helidon.examples.quickstart.se.pokemon.PokemonService;
import io.helidon.health.HealthSupport;
import io.helidon.health.checks.HealthChecks;
import io.helidon.media.jsonp.JsonpSupport;
import io.helidon.metrics.MetricsSupport;
import io.helidon.webserver.Routing;
import io.helidon.webserver.WebServer;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
import java.util.logging.LogManager;
/**
* The application main class.
*/
public final class Main {
/**
* Cannot be instantiated.
*/
private Main() {
}
/**
* Application main entry point.
* #param args command line arguments.
* #throws IOException if there are problems reading logging properties
*/
public static void main(final String[] args) throws IOException {
startServer();
}
/**
* Start the server.
* #return the created {#link WebServer} instance
* #throws IOException if there are problems reading logging properties
*/
static WebServer startServer() throws IOException {
// load logging configuration
setupLogging();
System.out.println("Logging Set up");
// By default this will pick up application.yaml from the classpath
Config config = Config.create();
System.out.println("Config created");
// Build server with JSONP support
WebServer server = WebServer.builder(createRouting(config))
.config(config.get("server"))
.addMediaSupport(JsonpSupport.create())
.build();
System.out.println("Webserver Created : "+server);
// Try to start the server. If successful, print some info and arrange to
// print a message at shutdown. If unsuccessful, print the exception.
System.out.println("Server startup initiating");
server.start()
.thenAccept(ws -> {
System.out.println(
"WEB server is up! http://localhost:" + ws.port() + "/greet");
ws.whenShutdown().thenRun(()
-> System.out.println("WEB server is DOWN. Good bye!"));
})
.exceptionally(t -> {
System.err.println("Startup failed: " + t.getMessage());
t.printStackTrace(System.err);
return null;
});
// Server threads are not daemon. No need to block. Just react.
return server;
}
/**
* Creates new {#link Routing}.
*
* #return routing configured with JSON support, a health check, and a service
* #param config configuration of this server
*/
private static Routing createRouting(Config config) {
System.out.println("Inside create Routing.");
Config dbConfig = config.get("db");
System.out.println("dbConfig : ");
System.out.println(dbConfig.get("db.source").asString().orElse("No Data"));
ConfigValue<Map<String, String>> test = dbConfig.asMap();
// for (Map.Entry<String,String> entry : test)
// System.out.println("Key = " + entry.getKey() +
// ", Value = " + entry.getValue());
//Client services are added through a service loader
DbClient dbClient = DbClient.builder(dbConfig).build();
System.out.println("dbClient : "+dbClient);
MetricsSupport metrics = MetricsSupport.create();
GreetService greetService = new GreetService(config);
PokemonService pokemonService = new PokemonService(dbClient);
HealthSupport health = HealthSupport.builder()
.addLiveness(HealthChecks.healthChecks()) // Adds a convenient set of checks
.build();
System.out.println("Returning Value");
return Routing.builder()
.register(health) // Health at "/health"
.register(metrics) // Metrics at "/metrics"
.register("/greet", greetService)
.register("/pokemon", pokemonService)
.build();
}
/**
* Configure logging from logging.properties file.
*/
private static void setupLogging() throws IOException {
try (InputStream is = Main.class.getResourceAsStream("/logging.properties")) {
LogManager.getLogManager().readConfiguration(is);
}
}
}
PokemonService.java
package io.helidon.examples.quickstart.se.pokemon;
import io.helidon.common.http.Http;
import io.helidon.common.reactive.Multi;
import io.helidon.dbclient.DbClient;
import io.helidon.dbclient.DbRow;
import io.helidon.webserver.*;
import javax.json.JsonObject;
import java.util.concurrent.CompletionException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Level;
import java.util.logging.Logger;
public class PokemonService implements Service {
private static final Logger LOGGER = Logger.getLogger(PokemonService.class.getName());
private final AtomicReference<String> greeting = new AtomicReference<>();
private final DbClient dbClient;
public PokemonService(DbClient dbClient){
this.dbClient = dbClient;
greeting.set("Hello Pokemon");
}
#Override
public void update(Routing.Rules rules) {
rules
.get("/", this::listPokemons)
// get one
.get("/{name}", this::getPokemon)
// create new
.post("/", Handler.create(Pokemon.class, this::insertPokemon))
// delete all
.delete("/", this::deleteAllPokemons)
// delete one
.delete("/{name}", this::deletePokemon)
;
}
private void listPokemons(ServerRequest request, ServerResponse response) {
Multi<JsonObject> rows = dbClient.execute(exec -> exec.namedQuery("select-all"))
.map(it -> it.as(JsonObject.class));
response.send(rows, JsonObject.class);
}
private void insertPokemon(ServerRequest request, ServerResponse response, Pokemon pokemon) {
System.out.println("Start of insertPokemon");
System.out.println("pokemon : ");
System.out.println(pokemon);
System.out.println("Name : "+pokemon.getName());
System.out.println("Type : "+pokemon.getType());
dbClient.execute(exec -> exec
.createNamedInsert("insert2")
.namedParam(pokemon)
.execute())
.thenAccept(count -> response.send("Inserted: " + count + " values"))
.exceptionally(throwable -> sendError(throwable, response));
}
private <T> T sendError(Throwable throwable, ServerResponse response) {
Throwable realCause = throwable;
if (throwable instanceof CompletionException) {
realCause = throwable.getCause();
}
response.status(Http.Status.INTERNAL_SERVER_ERROR_500);
response.send("Failed to process request: " + realCause.getClass().getName() + "(" + realCause.getMessage() + ")");
LOGGER.log(Level.WARNING, "Failed to process request", throwable);
return null;
}
private void getPokemon(ServerRequest request, ServerResponse response) {
String pokemonName = request.path().param("name");
dbClient.execute(exec -> exec.namedGet("select-one", pokemonName))
.thenAccept(opt -> opt.ifPresentOrElse(it -> sendRow(it, response),
() -> sendNotFound(response, "Pokemon "
+ pokemonName
+ " not found")))
.exceptionally(throwable -> sendError(throwable, response));
}
private void sendRow(DbRow row, ServerResponse response) {
response.send(row.as(JsonObject.class));
}
private void sendNotFound(ServerResponse response, String message) {
response.status(Http.Status.NOT_FOUND_404);
response.send(message);
}
private void deleteAllPokemons(ServerRequest request, ServerResponse response) {
dbClient.execute(exec -> exec
// this is to show how ad-hoc statements can be executed (and their naming in Tracing and Metrics)
.createDelete("DELETE FROM pokemons")
.execute())
.thenAccept(count -> response.send("Deleted: " + count + " values"))
.exceptionally(throwable -> sendError(throwable, response));
}
private void deletePokemon(ServerRequest request, ServerResponse response) {
final String name = request.path().param("name");
dbClient.execute(exec -> exec.namedDelete("delete", name))
.thenAccept(count -> response.send("Deleted: " + count + " values"))
.exceptionally(throwable -> sendError(throwable, response));
}
}
Pokemon.java
package io.helidon.examples.quickstart.se.pokemon;
import io.helidon.common.Reflected;
#Reflected
public class Pokemon {
private String name;
private String type;
/**
* Default constructor.
*/
public Pokemon() {
// JSON-B
}
/**
* Create pokemon with name and type.
*
* #param name name of the beast
* #param type type of the beast
*/
public Pokemon(String name, String type) {
this.name = name;
this.type = type;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
}
PokemonMapper.java
package io.helidon.examples.quickstart.se.pokemon;
import io.helidon.dbclient.DbColumn;
import io.helidon.dbclient.DbMapper;
import io.helidon.dbclient.DbRow;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Maps database statements to {#link io.helidon.examples.quickstart.se.pokemon.Pokemon} class.
*/
public class PokemonMapper implements DbMapper<Pokemon> {
#Override
public Pokemon read(DbRow row) {
// DbColumn id = row.column("id");
DbColumn name = row.column("name");
DbColumn type = row.column("type");
return new Pokemon(name.as(String.class), type.as(String.class));
}
#Override
public Map<String, Object> toNamedParameters(Pokemon value) {
Map<String, Object> map = new HashMap<>(2);
// map.put("id", value.getId());
map.put("name", value.getName());
map.put("type", value.getType());
return map;
}
#Override
public List<Object> toIndexedParameters(Pokemon value) {
List<Object> list = new ArrayList<>(2);
// list.add(value.getId());
list.add(value.getName());
list.add(value.getType());
return list;
}
}
PokemonMapperProvider.java
package io.helidon.examples.quickstart.se.pokemon;
import io.helidon.dbclient.DbMapper;
import io.helidon.dbclient.spi.DbMapperProvider;
import javax.annotation.Priority;
import java.util.Optional;
/**
* Provides pokemon mappers.
*/
#Priority(1000)
public class PokemonMapperProvider implements DbMapperProvider {
private static final PokemonMapper MAPPER = new PokemonMapper();
#SuppressWarnings("unchecked")
#Override
public <T> Optional<DbMapper<T>> mapper(Class<T> type) {
if (type.equals(Pokemon.class)) {
return Optional.of((DbMapper<T>) MAPPER);
}
return Optional.empty();
}
}
Any help in this regard is truly appreciated.
Regards,
Gaurav
The Pokemon class is a Java Bean and can be converted from/to JSON with JSON-B; that is what the original example does.
You need to register the Jsonb media support:
.addMediaSupport(JsonbSupport.create())
This also requires the following Maven dependency:
<dependency>
<groupId>io.helidon.media</groupId>
<artifactId>helidon-media-jsonb</artifactId>
</dependency>

Spring Integration Java DSL SFTP how to get remote SFTP server information in handler

I am trying to download files from multiple SFTP servers then handle those files. But I can not get the information of remote SFTP server such as: IpAddress, remoteDirectory depending on which file MessageHandler handling. Instead Payload only contains the information of the dowloaded files at local. Here the source code I use from the guide:
How to dynamically define file filter pattern for Spring Integration SFTP Inbound Adapter?
SFTIntegration.java
import com.jcraft.jsch.ChannelSftp.LsEntry;
import java.io.File;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.channel.NullChannel;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.integration.dsl.Pollers;
import org.springframework.integration.dsl.SourcePollingChannelAdapterSpec;
import org.springframework.integration.expression.FunctionExpression;
import org.springframework.integration.file.remote.aop.RotatingServerAdvice;
import org.springframework.integration.file.remote.session.DelegatingSessionFactory;
import org.springframework.integration.file.remote.session.SessionFactory;
import org.springframework.integration.scheduling.PollerMetadata;
import org.springframework.integration.sftp.dsl.Sftp;
import org.springframework.integration.sftp.dsl.SftpInboundChannelAdapterSpec;
import org.springframework.integration.sftp.session.DefaultSftpSessionFactory;
import org.springframework.messaging.MessageChannel;
import org.springframework.stereotype.Component;
/**
* flow.
*/
#Configuration
#Component
public class SFTIntegration {
public static final String TIMEZONE_UTC = "UTC";
public static final String TIMESTAMP_FORMAT_OF_FILES = "yyyyMMddHHmmssSSS";
public static final String TEMPORARY_FILE_SUFFIX = ".part";
public static final int POLLER_FIXED_PERIOD_DELAY = 60000;
public static final int MAX_MESSAGES_PER_POLL = 100;
private static final Logger LOG = LoggerFactory.getLogger(SFTIntegration.class);
private static final String CHANNEL_INTERMEDIATE_STAGE = "intermediateChannel";
#Autowired
private ImportHandler importHandler;
/** database access repository */
private final SFTPServerConfigRepo SFTPServerConfigRepo;
#Value("${sftp.local.directory.download:${java.io.tmpdir}/localDownload}")
private String localTemporaryPath;
public SFTIntegration(final SFTPServerConfigRepo SFTPServerConfigRepo) {
this.SFTPServerConfigRepo = SFTPServerConfigRepo;
}
/**
* The default poller with 5s, 100 messages, RotatingServerAdvice and transaction.
*
* #return default poller.
*/
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata poller() {
return Pollers
.fixedDelay(POLLER_FIXED_PERIOD_DELAY)
.advice(advice())
.maxMessagesPerPoll(MAX_MESSAGES_PER_POLL)
.transactional()
.get();
}
/**
* The direct channel for the flow.
*
* #return MessageChannel
*/
#Bean
public MessageChannel stockIntermediateChannel() {
return new DirectChannel();
}
/**
* Get the files from a remote directory. Add a timestamp to the filename
* and write them to a local temporary folder.
*
* #return IntegrationFlow
*/
#Bean
public IntegrationFlow collectionInboundFlowFromSFTPServer() {
// Source definition
final SftpInboundChannelAdapterSpec sourceSpec = Sftp.inboundAdapter(delegatingSFtpSessionFactory())
.preserveTimestamp(true)
.patternFilter("*.*")
.deleteRemoteFiles(true)
.maxFetchSize(MAX_MESSAGES_PER_POLL)
.remoteDirectory("/")
.localDirectory(new File(localTemporaryPath))
.temporaryFileSuffix(TEMPORARY_FILE_SUFFIX)
.localFilenameExpression(new FunctionExpression<String>(s -> {
final int fileTypeSepPos = s.lastIndexOf('.');
return
DateTimeFormatter
.ofPattern(TIMESTAMP_FORMAT_OF_FILES)
.withZone(ZoneId.of(TIMEZONE_UTC))
.format(Instant.now())
+ "_"
+ s.substring(0, fileTypeSepPos)
+ s.substring(fileTypeSepPos);
}));
// Poller definition
final Consumer<SourcePollingChannelAdapterSpec> collectionInboundPoller = endpointConfigurer -> endpointConfigurer
.id("collectionInboundPoller")
.autoStartup(true)
.poller(poller());
return IntegrationFlows
.from(sourceSpec, collectionInboundPoller)
.transform(File.class, p -> {
// log step
LOG.info("flow=collectionInboundFlowFromSFTPServer, message=incoming file: " + p);
return p;
})
.channel(CHANNEL_INTERMEDIATE_STAGE)
.get();
}
#Bean
public IntegrationFlow collectionIntermediateStageChannel() {
return IntegrationFlows
.from(CHANNEL_INTERMEDIATE_STAGE)
.handle(importHandler)
.channel(new NullChannel())
.get();
}
public DefaultSftpSessionFactory createNewSftpSessionFactory(final SFTPServerConfig pc) {
final DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory(
false);
factory.setHost(pc.getServerIp());
factory.setPort(pc.getPort());
factory.setUser(pc.getUsername());
factory.setPassword(pc.getPassword());
factory.setAllowUnknownKeys(true);
return factory;
}
#Bean
public DelegatingSessionFactory<LsEntry> delegatingSFtpSessionFactory() {
final List<SFTPServerConfig> partnerConnections = SFTPServerConfigRepo.findAll();
if (partnerConnections.isEmpty()) {
return null;
}
final Map<Object, SessionFactory<LsEntry>> factories = new LinkedHashMap<>(10);
for (SFTPServerConfig pc : partnerConnections) {
// create a factory for every key containing server type, url and port
if (factories.get(pc.getKey()) == null) {
factories.put(pc.getKey(), createNewSftpSessionFactory(pc));
}
}
// use the first SF as the default
return new DelegatingSessionFactory<>(factories, factories.values().iterator().next());
}
#Bean
public RotatingServerAdvice advice() {
final List<SFTPServerConfig> sftpConnections = SFTPServerConfigRepo.findAll();
final List<RotatingServerAdvice.KeyDirectory> keyDirectories = new ArrayList<>();
for (SFTPServerConfig pc : sftpConnections) {
keyDirectories
.add(new RotatingServerAdvice.KeyDirectory(pc.getKey(), pc.getServerPath()));
}
return new RotatingServerAdvice(delegatingSFtpSessionFactory(), keyDirectories, true);
}
}
ImportHandler.java
import org.springframework.messaging.Message;
import org.springframework.stereotype.Service;
#Service
public class ImportHandler {
public void handle(Message<?> message) {
System.out.println("Hello " + message);
System.out.println(message.getPayload());
System.out.println(message.getHeaders());
//How can I get the information of remote server Ip address, remoteDirectory here where the file comes from
}
}
If you have any ideas, please let me know. Thank you so much!.
It's not currently supported; please open a new feature request.

Google Pub/Sub Java examples

I'm not able to find a way to read messages from pub/sub using java.
I'm using this maven dependency in my pom
<dependency>
<groupId>com.google.cloud</groupId>
<artifactId>google-cloud-pubsub</artifactId>
<version>0.17.2-alpha</version>
</dependency>
I implemented this main method to create a new topic:
public static void main(String... args) throws Exception {
// Your Google Cloud Platform project ID
String projectId = ServiceOptions.getDefaultProjectId();
// Your topic ID
String topicId = "my-new-topic-1";
// Create a new topic
TopicName topic = TopicName.create(projectId, topicId);
try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
topicAdminClient.createTopic(topic);
}
}
The above code works well and, indeed, I can see the new topic I created using the google cloud console.
I implemented the following main method to write a message to my topic:
public static void main(String a[]) throws InterruptedException, ExecutionException{
String projectId = ServiceOptions.getDefaultProjectId();
String topicId = "my-new-topic-1";
String payload = "Hellooooo!!!";
PubsubMessage pubsubMessage =
PubsubMessage.newBuilder().setData(ByteString.copyFromUtf8(payload)).build();
TopicName topic = TopicName.create(projectId, topicId);
Publisher publisher;
try {
publisher = Publisher.defaultBuilder(
topic)
.build();
publisher.publish(pubsubMessage);
System.out.println("Sent!");
} catch (IOException e) {
System.out.println("Not Sended!");
e.printStackTrace();
}
}
Now I'm not able to verify if this message was really sent.
I would like to implement a message reader using a subscription to my topic.
Could someone show me a correct and working java example about reading messages from a topic?
Anyone can help me?
Thanks in advance!
Here is the version using the google cloud client libraries.
package com.techm.data.client;
import com.google.cloud.pubsub.v1.AckReplyConsumer;
import com.google.cloud.pubsub.v1.MessageReceiver;
import com.google.cloud.pubsub.v1.Subscriber;
import com.google.cloud.pubsub.v1.SubscriptionAdminClient;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.pubsub.v1.ProjectSubscriptionName;
import com.google.pubsub.v1.ProjectTopicName;
import com.google.pubsub.v1.PubsubMessage;
import com.google.pubsub.v1.PushConfig;
/**
* A snippet for Google Cloud Pub/Sub showing how to create a Pub/Sub pull
* subscription and asynchronously pull messages from it.
*/
public class CreateSubscriptionAndConsumeMessages {
private static String projectId = "projectId";
private static String topicId = "topicName";
private static String subscriptionId = "subscriptionName";
public static void createSubscription() throws Exception {
ProjectTopicName topic = ProjectTopicName.of(projectId, topicId);
ProjectSubscriptionName subscription = ProjectSubscriptionName.of(projectId, subscriptionId);
try (SubscriptionAdminClient subscriptionAdminClient = SubscriptionAdminClient.create()) {
subscriptionAdminClient.createSubscription(subscription, topic, PushConfig.getDefaultInstance(), 0);
}
}
public static void main(String... args) throws Exception {
ProjectSubscriptionName subscription = ProjectSubscriptionName.of(projectId, subscriptionId);
createSubscription();
MessageReceiver receiver = new MessageReceiver() {
#Override
public void receiveMessage(PubsubMessage message, AckReplyConsumer consumer) {
System.out.println("Received message: " + message.getData().toStringUtf8());
consumer.ack();
}
};
Subscriber subscriber = null;
try {
subscriber = Subscriber.newBuilder(subscription, receiver).build();
subscriber.addListener(new Subscriber.Listener() {
#Override
public void failed(Subscriber.State from, Throwable failure) {
// Handle failure. This is called when the Subscriber encountered a fatal error
// and is
// shutting down.
System.err.println(failure);
}
}, MoreExecutors.directExecutor());
subscriber.startAsync().awaitRunning();
// In this example, we will pull messages for one minute (60,000ms) then stop.
// In a real application, this sleep-then-stop is not necessary.
// Simply call stopAsync().awaitTerminated() when the server is shutting down,
// etc.
Thread.sleep(60000);
} finally {
if (subscriber != null) {
subscriber.stopAsync().awaitTerminated();
}
}
}
}
This is working fine for me.
The Cloud Pub/Sub Pull Subscriber Guide has sample code for reading messages from a topic.
I haven't used google cloud client libraries but used the api client libraries. Here is how I created a subscription.
package com.techm.datapipeline.client;
import java.io.IOException;
import java.security.GeneralSecurityException;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.client.http.HttpStatusCodes;
import com.google.api.services.pubsub.Pubsub;
import com.google.api.services.pubsub.Pubsub.Projects.Subscriptions.Create;
import com.google.api.services.pubsub.Pubsub.Projects.Subscriptions.Get;
import com.google.api.services.pubsub.Pubsub.Projects.Topics;
import com.google.api.services.pubsub.model.ExpirationPolicy;
import com.google.api.services.pubsub.model.Subscription;
import com.google.api.services.pubsub.model.Topic;
import com.techm.datapipeline.factory.PubsubFactory;
public class CreatePullSubscriberClient {
private final static String PROJECT_NAME = "yourProjectId";
private final static String TOPIC_NAME = "yourTopicName";
private final static String SUBSCRIPTION_NAME = "yourSubscriptionName";
public static void main(String[] args) throws IOException, GeneralSecurityException {
Pubsub pubSub = PubsubFactory.getService();
String topicName = String.format("projects/%s/topics/%s", PROJECT_NAME, TOPIC_NAME);
String subscriptionName = String.format("projects/%s/subscriptions/%s", PROJECT_NAME, SUBSCRIPTION_NAME);
Topics.Get listReq = pubSub.projects().topics().get(topicName);
Topic topic = listReq.execute();
if (topic == null) {
System.err.println("Topic doesn't exist...run CreateTopicClient...to create the topic");
System.exit(0);
}
Subscription subscription = null;
try {
Get getReq = pubSub.projects().subscriptions().get(subscriptionName);
subscription = getReq.execute();
} catch (GoogleJsonResponseException e) {
if (e.getStatusCode() == HttpStatusCodes.STATUS_CODE_NOT_FOUND) {
System.out.println("Subscription " + subscriptionName + " does not exist...will create it");
}
}
if (subscription != null) {
System.out.println("Subscription already exists ==> " + subscription.toPrettyString());
System.exit(0);
}
subscription = new Subscription();
subscription.setTopic(topicName);
subscription.setPushConfig(null); // indicating a pull
ExpirationPolicy expirationPolicy = new ExpirationPolicy();
expirationPolicy.setTtl(null); // never expires;
subscription.setExpirationPolicy(expirationPolicy);
subscription.setAckDeadlineSeconds(null); // so defaults to 10 sec
subscription.setRetainAckedMessages(true);
Long _week = 7L * 24 * 60 * 60;
subscription.setMessageRetentionDuration(String.valueOf(_week)+"s");
subscription.setName(subscriptionName);
Create createReq = pubSub.projects().subscriptions().create(subscriptionName, subscription);
Subscription createdSubscription = createReq.execute();
System.out.println("Subscription created ==> " + createdSubscription.toPrettyString());
}
}
And once you create the subscription (pull type)...this is how you pull the messages from the topic.
package com.techm.datapipeline.client;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.List;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.client.http.HttpStatusCodes;
import com.google.api.client.util.Base64;
import com.google.api.services.pubsub.Pubsub;
import com.google.api.services.pubsub.Pubsub.Projects.Subscriptions.Acknowledge;
import com.google.api.services.pubsub.Pubsub.Projects.Subscriptions.Get;
import com.google.api.services.pubsub.Pubsub.Projects.Subscriptions.Pull;
import com.google.api.services.pubsub.model.AcknowledgeRequest;
import com.google.api.services.pubsub.model.Empty;
import com.google.api.services.pubsub.model.PullRequest;
import com.google.api.services.pubsub.model.PullResponse;
import com.google.api.services.pubsub.model.ReceivedMessage;
import com.techm.datapipeline.factory.PubsubFactory;
public class PullSubscriptionsClient {
private final static String PROJECT_NAME = "yourProjectId";
private final static String SUBSCRIPTION_NAME = "yourSubscriptionName";
private final static String SUBSCRIPTION_NYC_NAME = "test";
public static void main(String[] args) throws IOException, GeneralSecurityException {
Pubsub pubSub = PubsubFactory.getService();
String subscriptionName = String.format("projects/%s/subscriptions/%s", PROJECT_NAME, SUBSCRIPTION_NAME);
//String subscriptionName = String.format("projects/%s/subscriptions/%s", PROJECT_NAME, SUBSCRIPTION_NYC_NAME);
try {
Get getReq = pubSub.projects().subscriptions().get(subscriptionName);
getReq.execute();
} catch (GoogleJsonResponseException e) {
if (e.getStatusCode() == HttpStatusCodes.STATUS_CODE_NOT_FOUND) {
System.out.println("Subscription " + subscriptionName
+ " does not exist...run CreatePullSubscriberClient to create");
}
}
PullRequest pullRequest = new PullRequest();
pullRequest.setReturnImmediately(false); // wait until you get a message
pullRequest.setMaxMessages(1000);
Pull pullReq = pubSub.projects().subscriptions().pull(subscriptionName, pullRequest);
PullResponse pullResponse = pullReq.execute();
List<ReceivedMessage> msgs = pullResponse.getReceivedMessages();
List<String> ackIds = new ArrayList<String>();
int i = 0;
if (msgs != null) {
for (ReceivedMessage msg : msgs) {
ackIds.add(msg.getAckId());
//System.out.println(i++ + ":===:" + msg.getAckId());
String object = new String(Base64.decodeBase64(msg.getMessage().getData()));
System.out.println("Decoded object String ==> " + object );
}
//acknowledge all the received messages
AcknowledgeRequest content = new AcknowledgeRequest();
content.setAckIds(ackIds);
Acknowledge ackReq = pubSub.projects().subscriptions().acknowledge(subscriptionName, content);
Empty empty = ackReq.execute();
}
}
}
Note: This client only waits until it receives at least one message and terminates if it's receives one (up to a max of value - set in MaxMessages) at once.
Let me know if this helps. I'm going to try the cloud client libraries soon and will post an update once I get my hands on them.
And here's the missing factory class ...if you plan to run it...
package com.techm.datapipeline.factory;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.pubsub.Pubsub;
import com.google.api.services.pubsub.PubsubScopes;
public class PubsubFactory {
private static Pubsub instance = null;
private static final Logger logger = Logger.getLogger(PubsubFactory.class.getName());
public static synchronized Pubsub getService() throws IOException, GeneralSecurityException {
if (instance == null) {
instance = buildService();
}
return instance;
}
private static Pubsub buildService() throws IOException, GeneralSecurityException {
logger.log(Level.FINER, "Start of buildService");
HttpTransport transport = GoogleNetHttpTransport.newTrustedTransport();
JsonFactory jsonFactory = new JacksonFactory();
GoogleCredential credential = GoogleCredential.getApplicationDefault(transport, jsonFactory);
// Depending on the environment that provides the default credentials (for
// example: Compute Engine, App Engine), the credentials may require us to
// specify the scopes we need explicitly.
if (credential.createScopedRequired()) {
Collection<String> scopes = new ArrayList<>();
scopes.add(PubsubScopes.PUBSUB);
credential = credential.createScoped(scopes);
}
logger.log(Level.FINER, "End of buildService");
// TODO - Get the application name from outside.
return new Pubsub.Builder(transport, jsonFactory, credential).setApplicationName("Your Application Name/Version")
.build();
}
}
The message reader is injected on the subscriber. This part of the code will handle the messages:
MessageReceiver receiver =
new MessageReceiver() {
#Override
public void receiveMessage(PubsubMessage message, AckReplyConsumer consumer) {
// handle incoming message, then ack/nack the received message
System.out.println("Id : " + message.getMessageId());
System.out.println("Data : " + message.getData().toStringUtf8());
consumer.ack();
}
};

Categories