I am trying to read connection URL environment variable of PostgreSQL service inside my application.conf as follows:
db.default.driver="org.postgresql.Driver"
db.default.url=${?cloud.services.postgresql.connection.url}
My VCAP_SERVICES is as follows
{
"postgresql": [
{
"binding_name": null,
"credentials": {
"dbname": "sample-db",
"end_points": [
{
"host": "x.x.x.x",
"network_id": "SF",
"port": "44980"
}
],
"hostname": "x.x.x.x",
"password": "sample-password",
"port": "44980",
"ports": {
"5432/tcp": "44980"
},
"uri": "postgres://sample-user:sample-password#x.x.x.x:44980/sample-db",
"username": "sample-user"
},
"instance_name": "postgresql",
"label": "postgresql",
"name": "postgresql",
"plan": "v9.6-dev",
"provider": null,
"syslog_drain_url": null,
"tags": [
"postgresql",
"relational"
],
"volume_mounts": []
}
]
}
I am following this article.
However the database won't configure and the root was Configuration error[jdbcUrl is required with driverClassName.]. Full exception dump below.
play.api.Configuration$$anon$1: Configuration error[Cannot initialize to database [default]]
at play.api.Configuration$.configError(Configuration.scala:155)
at play.api.Configuration.reportError(Configuration.scala:394)
at play.api.db.DefaultDBApi.$anonfun$initialize$1(DefaultDBApi.scala:76)
at scala.collection.immutable.List.foreach(List.scala:333)
at play.api.db.DefaultDBApi.initialize(DefaultDBApi.scala:68)
at play.api.db.DBApiProvider.get$lzycompute(DBModule.scala:92)
at play.api.db.DBApiProvider.get(DBModule.scala:77)
at play.api.db.DBApiProvider.get(DBModule.scala:59)
at com.google.inject.internal.ProviderInternalFactory.provision(ProviderInternalFactory.java:85)
at com.google.inject.internal.BoundProviderFactory.provision(BoundProviderFactory.java:77)
at com.google.inject.internal.ProviderInternalFactory.circularGet(ProviderInternalFactory.java:59)
at com.google.inject.internal.BoundProviderFactory.get(BoundProviderFactory.java:61)
at com.google.inject.internal.SingleFieldInjector.inject(SingleFieldInjector.java:52)
at com.google.inject.internal.MembersInjectorImpl.injectMembers(MembersInjectorImpl.java:147)
at com.google.inject.internal.MembersInjectorImpl.injectAndNotify(MembersInjectorImpl.java:101)
at com.google.inject.internal.MembersInjectorImpl.injectMembers(MembersInjectorImpl.java:71)
at com.google.inject.internal.InjectorImpl.injectMembers(InjectorImpl.java:1055)
at com.google.inject.util.Providers$GuicifiedProviderWithDependencies.initialize(Providers.java:154)
at com.google.inject.util.Providers$GuicifiedProviderWithDependencies$$FastClassByGuice$$2a7177aa.invoke(<generated>)
at com.google.inject.internal.SingleMethodInjector$1.invoke(SingleMethodInjector.java:51)
at com.google.inject.internal.SingleMethodInjector.inject(SingleMethodInjector.java:85)
at com.google.inject.internal.MembersInjectorImpl.injectMembers(MembersInjectorImpl.java:147)
at com.google.inject.internal.MembersInjectorImpl.injectAndNotify(MembersInjectorImpl.java:101)
at com.google.inject.internal.Initializer$InjectableReference.get(Initializer.java:245)
at com.google.inject.internal.Initializer.injectAll(Initializer.java:140)
at com.google.inject.internal.InternalInjectorCreator.injectDynamically(InternalInjectorCreator.java:178)
at com.google.inject.internal.InternalInjectorCreator.build(InternalInjectorCreator.java:111)
at com.google.inject.Guice.createInjector(Guice.java:87)
at com.google.inject.Guice.createInjector(Guice.java:78)
at play.api.inject.guice.GuiceBuilder.injector(GuiceInjectorBuilder.scala:200)
at play.inject.guice.GuiceBuilder.injector(GuiceBuilder.java:211)
at play.inject.guice.GuiceApplicationBuilder.build(GuiceApplicationBuilder.java:121)
at play.inject.guice.GuiceApplicationLoader.load(GuiceApplicationLoader.java:32)
at play.api.ApplicationLoader$JavaApplicationLoaderAdapter$1.load(ApplicationLoader.scala:181)
at play.core.server.DevServerStart$$anon$1.$anonfun$reload$3(DevServerStart.scala:190)
at play.utils.Threads$.withContextClassLoader(Threads.scala:22)
at play.core.server.DevServerStart$$anon$1.reload(DevServerStart.scala:182)
at play.core.server.DevServerStart$$anon$1.get(DevServerStart.scala:142)
at play.core.server.AkkaHttpServer.handleRequest(AkkaHttpServer.scala:301)
at play.core.server.AkkaHttpServer.$anonfun$createServerBinding$1(AkkaHttpServer.scala:191)
at akka.stream.impl.fusing.MapAsync$$anon$30.onPush(Ops.scala:1285)
at akka.stream.impl.fusing.GraphInterpreter.processPush(GraphInterpreter.scala:541)
at akka.stream.impl.fusing.GraphInterpreter.execute(GraphInterpreter.scala:423)
at akka.stream.impl.fusing.GraphInterpreterShell.runBatch(ActorGraphInterpreter.scala:625)
at akka.stream.impl.fusing.GraphInterpreterShell$AsyncInput.execute(ActorGraphInterpreter.scala:502)
at akka.stream.impl.fusing.GraphInterpreterShell.processEvent(ActorGraphInterpreter.scala:600)
at akka.stream.impl.fusing.ActorGraphInterpreter.akka$stream$impl$fusing$ActorGraphInterpreter$$processEvent(ActorGraphInterpreter.scala:769)
at akka.stream.impl.fusing.ActorGraphInterpreter$$anonfun$receive$1.applyOrElse(ActorGraphInterpreter.scala:784)
at akka.actor.Actor.aroundReceive(Actor.scala:535)
at akka.actor.Actor.aroundReceive$(Actor.scala:533)
at akka.stream.impl.fusing.ActorGraphInterpreter.aroundReceive(ActorGraphInterpreter.scala:691)
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:575)
at akka.actor.ActorCell.invoke(ActorCell.scala:545)
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270)
at akka.dispatch.Mailbox.run(Mailbox.scala:231)
at akka.dispatch.Mailbox.exec(Mailbox.scala:243)
at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056)
at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692)
at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157)
Caused by: play.api.Configuration$$anon$1: Configuration error[jdbcUrl is required with driverClassName.]
at play.api.Configuration$.configError(Configuration.scala:155)
at play.api.Configuration.reportError(Configuration.scala:394)
at play.api.db.HikariCPConnectionPool.create(HikariCPModule.scala:70)
at play.api.db.PooledDatabase.createDataSource(Databases.scala:249)
at play.api.db.DefaultDatabase.dataSource$lzycompute(Databases.scala:141)
at play.api.db.DefaultDatabase.dataSource(Databases.scala:139)
at play.api.db.DefaultDBApi.$anonfun$initialize$1(DefaultDBApi.scala:72)
... 57 common frames omitted
Caused by: java.lang.IllegalArgumentException: jdbcUrl is required with driverClassName.
at com.zaxxer.hikari.HikariConfig.validate(HikariConfig.java:1000)
at play.api.db.HikariCPConfig.toHikariConfig(HikariCPModule.scala:140)
at play.api.db.HikariCPConnectionPool.$anonfun$create$1(HikariCPModule.scala:57)
at scala.util.Try$.apply(Try.scala:210)
at play.api.db.HikariCPConnectionPool.create(HikariCPModule.scala:54)
... 61 common frames omitted
Using Play Framework 2.8.
It looks like Cloud Foundry support for Play Framework has been broken since Play 2.5. Cloud Foundry's Java Buildpack removed support for it which in turn invalidates the corresponding documentation in Play Framework which refers to configuration keys prefixed by {?cloud.services....}.
I ended writing code to parse VCAP_SERVICES myself and insert it in the application loader, following the documentation to create your own application loader:
package com.example.config;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import play.ApplicationLoader;
import play.inject.guice.GuiceApplicationBuilder;
import play.inject.guice.GuiceApplicationLoader;
public class MyApplicationLoader extends GuiceApplicationLoader {
private static final Logger LOGGER = Logger.getLogger(MyApplicationLoader.class.getCanonicalName());
#Override
public GuiceApplicationBuilder builder(ApplicationLoader.Context context) {
// https://www.programcreek.com/scala/play.api.Configuration
// https://www.playframework.com/documentation/2.8.x/JavaDependencyInjection#Advanced:-Extending-the-GuiceApplicationLoader
Config cloudConfig = parseCloudFoundryEnvironmentConfig(context);
return initialBuilder
.in(context.environment())
.loadConfig(cloudConfig.withFallback(context.initialConfig()))
.overrides(overrides(context));
}
static Config parseCloudFoundryEnvironmentConfig(ApplicationLoader.Context context) {
final ObjectMapper objectMapper = new ObjectMapper();
final HashMap<String,Object> configOutput = new HashMap<>();
final String vcap_services_str = System.getenv("VCAP_SERVICES");
if(vcap_services_str != null) {
try {
JsonNode rootNode = objectMapper.readTree(vcap_services_str);
/// ... parse VCAP_SERVICES and initialize the "db...." Play configuration into `configOutput`
} catch(IOException ex) {
LOGGER.log(Level.SEVERE, ex, () -> MessageFormat.format("Unable to parse VCAP_SERVICES content: {0}", vcap_services_str));
}
} else {
LOGGER.info("VCAP_SERVICES not defined");
}
Config configResult = ConfigFactory.parseMap(configOutput, "Environment Variables");
return configResult;
}
}
Then I activated this new application loader by creating an entry in the reference.conf configuration file under the conf folder.
play.application.loader=com.example.config.MyApplicationLoader
Related
I've been trying for a while to get this parser to work correctly but I keep getting the same issue every time. The code is very insistent that the value is null.
The issue is this, I have this call to the Google Books API which returns a fairly large JSON response. The values I need from this are nested and are proving hard to get at.
This is an example of the JSON in question. What I want to get to are a few of the books' identifying information which can be found under volumeInfo.
{
"kind": "books#volumes",
"totalItems": 1,
"items": [
{
"kind": "books#volume",
"id": "zkqMEAAAQBAJ",
"etag": "4V9ue/R0aw4",
"selfLink": "https://www.googleapis.com/books/v1/volumes/zkqMEAAAQBAJ",
"volumeInfo": {
"title": "The Song of the Cell",
"subtitle": "An Exploration of Medicine and the New Human",
"authors": [
"Siddhartha Mukherjee"
],
"publisher": "Simon and Schuster",
"publishedDate": "2022-10-25",
"description": "Presenting revelatory and exhilarating stories of scientists, doctors, and the patients whose lives may be saved by their work, the author draws on his own experience as a researcher, doctor, and prolific reader to explore how the discovery of cells created a new kind of medicine based on the therapeutic manipulation of cells.",
"industryIdentifiers": [
{
"type": "ISBN_13",
"identifier": "9781982117351"
},
{
"type": "ISBN_10",
"identifier": "1982117354"
}
],
"readingModes": {
"text": false,
"image": false
},
"pageCount": 496,
"printType": "BOOK",
"categories": [
"History"
],
"averageRating": 5,
"ratingsCount": 2,
"maturityRating": "NOT_MATURE",
"allowAnonLogging": false,
"contentVersion": "0.6.2.0.preview.0",
"panelizationSummary": {
"containsEpubBubbles": false,
"containsImageBubbles": false
},
"imageLinks": {
"smallThumbnail": "http://books.google.com/books/content?id=zkqMEAAAQBAJ&printsec=frontcover&img=1&zoom=5&edge=curl&source=gbs_api",
"thumbnail": "http://books.google.com/books/content?id=zkqMEAAAQBAJ&printsec=frontcover&img=1&zoom=1&edge=curl&source=gbs_api"
},
"language": "en",
"previewLink": "http://books.google.com/books?id=zkqMEAAAQBAJ&printsec=frontcover&dq=isbn:9781982117351&hl=&cd=1&source=gbs_api",
"infoLink": "http://books.google.com/books?id=zkqMEAAAQBAJ&dq=isbn:9781982117351&hl=&source=gbs_api",
"canonicalVolumeLink": "https://books.google.com/books/about/The_Song_of_the_Cell.html?hl=&id=zkqMEAAAQBAJ"
},
"saleInfo": {
"country": "US",
"saleability": "NOT_FOR_SALE",
"isEbook": false
},
"accessInfo": {
"country": "US",
"viewability": "PARTIAL",
"embeddable": true,
"publicDomain": false,
"textToSpeechPermission": "ALLOWED_FOR_ACCESSIBILITY",
"epub": {
"isAvailable": false
},
"pdf": {
"isAvailable": false
},
"webReaderLink": "http://play.google.com/books/reader?id=zkqMEAAAQBAJ&hl=&source=gbs_api",
"accessViewStatus": "SAMPLE",
"quoteSharingAllowed": false
},
"searchInfo": {
"textSnippet": "Presenting revelatory and exhilarating stories of scientists, doctors, and the patients whose lives may be saved by their work, the author draws on his own experience as a researcher, doctor, and prolific reader to explore how the discovery ..."
}
}
]
}
My code is fairly simple. I've tried a few different combinations of JsonNode, JsonFactory and JsonParser but nothing has seemed to work like it should. Some of the resources I looked at while trying to fix this were this article on Baeldung, this tutorial and more Stack Overflow posts than I can list. The API call itself is not the problem. It works just fine.
This is standard Java. No Spring or other frameworks.
ObjectMapper mapper = new ObjectMapper();
URL url= new URL(setURL(bookInfo, infoType));
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
InputStream responseStream = connection.getInputStream();
JsonNode volumeNode = mapper.readTree(responseStream);
GoogleVolume testVolume= new GoogleVolume();
testVolume.setTitle(volumeNode.get("title").textValue());
Here is the stackTrace:
Exception in thread "JavaFX Application Thread" java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
at javafx.fxml#19/javafx.fxml.FXMLLoader$MethodHandler.invoke(FXMLLoader.java:1857)
at javafx.fxml#19/javafx.fxml.FXMLLoader$ControllerMethodEventHandler.handle(FXMLLoader.java:1724)
at javafx.base#19/com.sun.javafx.event.CompositeEventHandler.dispatchBubblingEvent(CompositeEventHandler.java:86)
at javafx.base#19/com.sun.javafx.event.EventHandlerManager.dispatchBubblingEvent(EventHandlerManager.java:234)
at javafx.base#19/com.sun.javafx.event.EventHandlerManager.dispatchBubblingEvent(EventHandlerManager.java:191)
at javafx.base#19/com.sun.javafx.event.CompositeEventDispatcher.dispatchBubblingEvent(CompositeEventDispatcher.java:59)
at javafx.base#19/com.sun.javafx.event.BasicEventDispatcher.dispatchEvent(BasicEventDispatcher.java:58)
at javafx.base#19/com.sun.javafx.event.EventDispatchChainImpl.dispatchEvent(EventDispatchChainImpl.java:114)
at javafx.base#19/com.sun.javafx.event.BasicEventDispatcher.dispatchEvent(BasicEventDispatcher.java:56)
at javafx.base#19/com.sun.javafx.event.EventDispatchChainImpl.dispatchEvent(EventDispatchChainImpl.java:114)
at javafx.base#19/com.sun.javafx.event.BasicEventDispatcher.dispatchEvent(BasicEventDispatcher.java:56)
at javafx.base#19/com.sun.javafx.event.EventDispatchChainImpl.dispatchEvent(EventDispatchChainImpl.java:114)
at javafx.base#19/com.sun.javafx.event.EventUtil.fireEventImpl(EventUtil.java:74)
at javafx.base#19/com.sun.javafx.event.EventUtil.fireEvent(EventUtil.java:49)
at javafx.base#19/javafx.event.Event.fireEvent(Event.java:198)
at javafx.graphics#19/javafx.scene.Node.fireEvent(Node.java:8923)
at javafx.controls#19/javafx.scene.control.Button.fire(Button.java:203)
at javafx.controls#19/com.sun.javafx.scene.control.behavior.ButtonBehavior.mouseReleased(ButtonBehavior.java:207)
at javafx.controls#19/com.sun.javafx.scene.control.inputmap.InputMap.handle(InputMap.java:274)
at javafx.base#19/com.sun.javafx.event.CompositeEventHandler$NormalEventHandlerRecord.handleBubblingEvent(CompositeEventHandler.java:247)
at javafx.base#19/com.sun.javafx.event.CompositeEventHandler.dispatchBubblingEvent(CompositeEventHandler.java:80)
at javafx.base#19/com.sun.javafx.event.EventHandlerManager.dispatchBubblingEvent(EventHandlerManager.java:234)
at javafx.base#19/com.sun.javafx.event.EventHandlerManager.dispatchBubblingEvent(EventHandlerManager.java:191)
at javafx.base#19/com.sun.javafx.event.CompositeEventDispatcher.dispatchBubblingEvent(CompositeEventDispatcher.java:59)
at javafx.base#19/com.sun.javafx.event.BasicEventDispatcher.dispatchEvent(BasicEventDispatcher.java:58)
at javafx.base#19/com.sun.javafx.event.EventDispatchChainImpl.dispatchEvent(EventDispatchChainImpl.java:114)
at javafx.base#19/com.sun.javafx.event.BasicEventDispatcher.dispatchEvent(BasicEventDispatcher.java:56)
at javafx.base#19/com.sun.javafx.event.EventDispatchChainImpl.dispatchEvent(EventDispatchChainImpl.java:114)
at javafx.base#19/com.sun.javafx.event.BasicEventDispatcher.dispatchEvent(BasicEventDispatcher.java:56)
at javafx.base#19/com.sun.javafx.event.EventDispatchChainImpl.dispatchEvent(EventDispatchChainImpl.java:114)
at javafx.base#19/com.sun.javafx.event.EventUtil.fireEventImpl(EventUtil.java:74)
at javafx.base#19/com.sun.javafx.event.EventUtil.fireEvent(EventUtil.java:54)
at javafx.base#19/javafx.event.Event.fireEvent(Event.java:198)
at javafx.graphics#19/javafx.scene.Scene$MouseHandler.process(Scene.java:3894)
at javafx.graphics#19/javafx.scene.Scene.processMouseEvent(Scene.java:1887)
at javafx.graphics#19/javafx.scene.Scene$ScenePeerListener.mouseEvent(Scene.java:2620)
at javafx.graphics#19/com.sun.javafx.tk.quantum.GlassViewEventHandler$MouseEventNotification.run(GlassViewEventHandler.java:411)
at javafx.graphics#19/com.sun.javafx.tk.quantum.GlassViewEventHandler$MouseEventNotification.run(GlassViewEventHandler.java:301)
at java.base/java.security.AccessController.doPrivileged(AccessController.java:399)
at javafx.graphics#19/com.sun.javafx.tk.quantum.GlassViewEventHandler.lambda$handleMouseEvent$2(GlassViewEventHandler.java:450)
at javafx.graphics#19/com.sun.javafx.tk.quantum.QuantumToolkit.runWithoutRenderLock(QuantumToolkit.java:424)
at javafx.graphics#19/com.sun.javafx.tk.quantum.GlassViewEventHandler.handleMouseEvent(GlassViewEventHandler.java:449)
at javafx.graphics#19/com.sun.glass.ui.View.handleMouseEvent(View.java:551)
at javafx.graphics#19/com.sun.glass.ui.View.notifyMouse(View.java:937)
at javafx.graphics#19/com.sun.glass.ui.gtk.GtkApplication._runLoop(Native Method)
at javafx.graphics#19/com.sun.glass.ui.gtk.GtkApplication.lambda$runLoop$11(GtkApplication.java:316)
at java.base/java.lang.Thread.run(Thread.java:833)
Caused by: java.lang.reflect.InvocationTargetException
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
at com.sun.javafx.reflect.Trampoline.invoke(MethodUtil.java:77)
at jdk.internal.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
at javafx.base#19/com.sun.javafx.reflect.MethodUtil.invoke(MethodUtil.java:275)
at javafx.fxml#19/com.sun.javafx.fxml.MethodHelper.invoke(MethodHelper.java:84)
at javafx.fxml#19/javafx.fxml.FXMLLoader$MethodHandler.invoke(FXMLLoader.java:1854)
... 46 more
Caused by: java.lang.NullPointerException: Cannot invoke "com.fasterxml.jackson.databind.JsonNode.textValue()" because the return value of "com.fasterxml.jackson.databind.JsonNode.get(String)" is null
at com.libary#0.0.1-SNAPSHOT/com.library.models.GoogleBooksService.getBookData(GoogleBooksService.java:49)
at com.libary#0.0.1-SNAPSHOT/com.library.controllers.LoginController.attemptLogin(LoginController.java:33)
I apologize if the details are a bit lite, but if any information is needed then I'll be happy to provide it. Any input on why this is happening would be greatly appreciated!
According to the JSON structure, the first items must be fetched, and since it is a list, the first items must be fetched based on index.
String title = volumeNode.get("items").get(0).get("volumeInfo").get("title").textValue();
Well, "title" is not a property at the top level of your JSON so when it searches for such property it finds nothing and returns null. Also it seems to me that your code may be a bit too complex. You didn't provide your GoogleVolume class. However by its structure it should be the same as the structure of your JSON. What I would suggest that you use ObjectMapper method: public T readValue(String content,
Class valueType)
throws JsonProcessingException,
JsonMappingException
where the second parameter would be your GoogleVolume.class. Or You can try use Map.class to parse your JSON String to instance of Map<String, Object>. Also if I may suggest to simplify your code even further I wrote my own Open Source library called MgntUtils that provides (among other utils) a very simple Http client and JsonUtil for simple cases of serializing/parsing JSON. your code with use of my library may look something like this:
HttpClient httpClient = new HttpClient();
String jsonResponse = httpClient.sendHttpRequest(urlStr, HttpClient.HttpMethod.GET);
GoogleVolume volume = JsonUtils.readObjectFromJsonString(jsonResponse, GoogleVolume.class);
For simplicity I omitted exception handling. Here is the Javadoc for MgntUtils library. The library can be obtained as Maven artifact from Maven Central or from Github (including Javadoc and source code)
Hello i have create an RDS on AWS, and created a policy
with this permission based on this link
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"rds-db:connect"
],
"Resource": [
"arn:aws:rds-db:us-east-2:1234567890:dbuser:db-ABCDEFGHIJKL01234/db_user"
]
}
]
}
I've have a general user that defined with a spesific password
i tried login with the user but instead of the password i tried to set
auth token link in this guide
private static Properties setMySqlConnectionProperties() {
Properties mysqlConnectionProperties = new Properties();
mysqlConnectionProperties.setProperty("verifyServerCertificate","true");
mysqlConnectionProperties.setProperty("useSSL", "true");
mysqlConnectionProperties.setProperty("user",DB_USER);
mysqlConnectionProperties.setProperty("password",generateAuthToken());
return mysqlConnectionProperties;
}
public static String generateAuthToken(String region, String hostName, int port, String username) {
RdsIamAuthTokenGenerator generator = RdsIamAuthTokenGenerator.builder()
.credentials(new DefaultAWSCredentialsProviderChain())
.region(region)
.build();
String authToken = generator.getAuthToken(
GetIamAuthTokenRequest.builder()
.hostname(hostName)
.port(port)
.userName(username)
.build());
return authToken;
}
Im using in my case with postgresql
and it result this error
"FATAL: password authentication failed for user \"root\"","error.stack_trace":"org.postgresql.util.PSQLException: FATAL: password authentication failed for user \"root\"
my root user should support with IAM, what can i validate in order to fix it
below you can see from AWS, that my policy is defined
First all i had a bug - i used the db name instead DBI resource ID
This is the expected format:
arn:aws:rds-db:region:account-id:dbuser:DbiResourceId/db-user-name
and here is the code
data "aws_iam_policy_document" "policy_fooweb_job" {
statement {
actions = [
"rds-db:connect"
]
effect = "Allow"
resources = [
"arn:aws:rds-db:${var.region}:${data.aws_caller_identity.current.account_id}:dbuser:${data.aws_db_instance.database.resource_id}/someUser"
]
}
}
## get the db instance
data "aws_db_instance" "database" {
db_instance_identifier = "company-oltp1"
}
I am starting to build a Microservice API Gateway, and I am considering Spring Cloud to help me with the routing. But some calls to the Gateway API will need multiple requests to different services.
Lets say I have 2 services: Order Details Service and Delivery Service. I want to have a Gateway endpoint GET /orders/{orderId} that makes a call to Order Details service and then Delivery Service and combine the two to return full Order details with delivery. Is this possible with the routing of Spring cloud or should I make these by hand using something like RestTemplate to make the calls?
There is an enhancement proposal posted on GitHub to have routes support multiple URIs. So far, there aren't any plans to implement this yet, at least, not according to one of the contributors.
As posted in the Spring Cloud Gateway Github issue mentioned by g00glen00b, until the library develops a Filter for this, I resolved it using the ModifyResponseBodyGatewayFilterFactory in my own custom Filter.
Just in case it's useful for anyone else, I provide the base implementation here (it may need some rework, but it should be enough to make the point).
Simply put, I have a "base" service retrieving something like this:
[
{
"targetEntryId": "624a448cbc728123b47d08c4",
"sections": [
{
"title": "sadasa",
"description": "asda"
}
],
"id": "624a448c45459c4d757869f1"
},
{
"targetEntryId": "624a44e5bc728123b47d08c5",
"sections": [
{
"title": "asda",
"description": null
}
],
"id": "624a44e645459c4d757869f2"
}
]
And I want to enrich these entries with the actual targetEntry data (of course, identified by targetEntryId).
So, I created my Filter based on the ModifyResponseBody one:
/**
* <p>
* Filter to compose a response body with associated data from a second API.
* </p>
*
* #author rozagerardo
*/
#Component
public class ComposeFieldApiGatewayFilterFactory extends
AbstractGatewayFilterFactory<ComposeFieldApiGatewayFilterFactory.Config> {
public ComposeFieldApiGatewayFilterFactory() {
super(Config.class);
}
#Autowired
ModifyResponseBodyGatewayFilterFactory modifyResponseBodyFilter;
ParameterizedTypeReference<List<Map<String, Object>>> jsonType =
new ParameterizedTypeReference<List<Map<String, Object>>>() {
};
#Value("${server.port:9080}")
int aPort;
#Override
public GatewayFilter apply(final Config config) {
return modifyResponseBodyFilter.apply((c) -> {
c.setRewriteFunction(List.class, List.class, (filterExchange, input) -> {
List<Map<String, Object>> castedInput = (List<Map<String, Object>>) input;
// extract base field values (usually ids) and join them in a "," separated string
String baseFieldValues = castedInput.stream()
.map(bodyMap -> (String) bodyMap.get(config.getOriginBaseField()))
.collect(Collectors.joining(","));
// Request to a path managed by the Gateway
WebClient client = WebClient.create();
return client.get()
.uri(UriComponentsBuilder.fromUriString("http://localhost").port(aPort)
.path(config.getTargetGatewayPath())
.queryParam(config.getTargetQueryParam(), baseFieldValues).build().toUri())
.exchangeToMono(response -> response.bodyToMono(jsonType)
.map(targetEntries -> {
// create a Map using the base field values as keys fo easy access
Map<String, Map> targetEntriesMap = targetEntries.stream().collect(
Collectors.toMap(pr -> (String) pr.get("id"), pr -> pr));
// compose the origin body using the requested target entries
return castedInput.stream().map(originEntries -> {
originEntries.put(config.getComposeField(),
targetEntriesMap.get(originEntries.get(config.getOriginBaseField())));
return originEntries;
}).collect(Collectors.toList());
})
);
});
});
}
;
#Override
public List<String> shortcutFieldOrder() {
return Arrays.asList("originBaseField", "targetGatewayPath", "targetQueryParam",
"composeField");
}
/**
* <p>
* Config class to use for AbstractGatewayFilterFactory.
* </p>
*/
public static class Config {
private String originBaseField;
private String targetGatewayPath;
private String targetQueryParam;
private String composeField;
public Config() {
}
// Getters and Setters...
}
}
For completeness, this is the corresponding route setup using my Filter:
spring:
cloud:
gateway:
routes:
# TARGET ENTRIES ROUTES
- id: targetentries_route
uri: ${configs.api.tagetentries.baseURL}
predicates:
- Path=/api/target/entries
- Method=GET
filters:
- RewritePath=/api/target/entries(?<segment>.*), /target-entries-service$\{segment}
# ORIGIN ENTRIES
- id: originentries_route
uri: ${configs.api.originentries.baseURL}
predicates:
- Path=/api/origin/entries**
filters:
- RewritePath=/api/origin/entries(?<segment>.*), /origin-entries-service$\{segment}
- ComposeFieldApi=targetEntryId,/api/target/entries,ids,targetEntry
And with this, my resulting response looks as follows:
[
{
"targetEntryId": "624a448cbc728123b47d08c4",
"sections": [
{
"title": "sadasa",
"description": "asda"
}
],
"id": "624a448c45459c4d757869f1",
"targetEntry": {
"id": "624a448cbc728123b47d08c4",
"targetEntityField": "whatever"
}
},
{
"targetEntryId": "624a44e5bc728123b47d08c5",
"sections": [
{
"title": "asda",
"description": null
}
],
"id": "624a44e645459c4d757869f2",
"targetEntry": {
"id": "624a44e5bc728123b47d08c5",
"targetEntityField": "somethingelse"
}
}
]
Haven't seen a solution to my particular problem so far. It isn't working at least. Its driving me pretty crazy. This particular combo doesn't seem to have a lot in the google space. My error occurs as the job does into the mapper from what I can tell. The input to this job are avro schema'd output that is compressed with deflate though I tried uncompressed as well.
Avro: 1.7.7
Hadoop: 2.4.1
I am getting this error and I'm not sure why. Here is my job, mapper, and reduce. The error is happening when the mapper comes in.
Sample uncompressed Avro input file (StockReport.SCHEMA is defined this way)
{"day": 3, "month": 2, "year": 1986, "stocks": [{"symbol": "AAME", "timestamp": 507833213000, "dividend": 10.59}]}
Job
#Override
public int run(String[] strings) throws Exception {
Job job = Job.getInstance();
job.setJobName("GenerateGraphsJob");
job.setJarByClass(GenerateGraphsJob.class);
configureJob(job);
int resultCode = job.waitForCompletion(true) ? 0 : 1;
return resultCode;
}
private void configureJob(Job job) throws IOException {
try {
Configuration config = getConf();
Path inputPath = ConfigHelper.getChartInputPath(config);
Path outputPath = ConfigHelper.getChartOutputPath(config);
job.setInputFormatClass(AvroKeyInputFormat.class);
AvroKeyInputFormat.addInputPath(job, inputPath);
AvroJob.setInputKeySchema(job, StockReport.SCHEMA$);
job.setMapperClass(StockAverageMapper.class);
job.setCombinerClass(StockAverageCombiner.class);
job.setReducerClass(StockAverageReducer.class);
FileOutputFormat.setOutputPath(job, outputPath);
} catch (IOException | ClassCastException e) {
LOG.error("An job error has occurred.", e);
}
}
Mapper:
public class StockAverageMapper extends
Mapper<AvroKey<StockReport>, NullWritable, StockYearSymbolKey, StockReport> {
private static Logger LOG = LoggerFactory.getLogger(StockAverageMapper.class);
private final StockReport stockReport = new StockReport();
private final StockYearSymbolKey stockKey = new StockYearSymbolKey();
#Override
protected void map(AvroKey<StockReport> inKey, NullWritable ignore, Context context)
throws IOException, InterruptedException {
try {
StockReport inKeyDatum = inKey.datum();
for (Stock stock : inKeyDatum.getStocks()) {
updateKey(inKeyDatum, stock);
updateValue(inKeyDatum, stock);
context.write(stockKey, stockReport);
}
} catch (Exception ex) {
LOG.debug(ex.toString());
}
}
Schema for map output key:
{
"namespace": "avro.model",
"type": "record",
"name": "StockYearSymbolKey",
"fields": [
{
"name": "year",
"type": "int"
},
{
"name": "symbol",
"type": "string"
}
]
}
Stack trace:
java.lang.Exception: java.lang.IncompatibleClassChangeError: Found interface org.apache.hadoop.mapreduce.TaskAttemptContext, but class was expected
at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
Caused by: java.lang.IncompatibleClassChangeError: Found interface org.apache.hadoop.mapreduce.TaskAttemptContext, but class was expected
at org.apache.avro.mapreduce.AvroKeyInputFormat.createRecordReader(AvroKeyInputFormat.java:47)
at org.apache.hadoop.mapred.MapTask$NewTrackingRecordReader.<init>(MapTask.java:492)
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:735)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:340)
at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Edit: Not that it matters but I'm working to reduce this to data I can create JFreeChart outputs from. Not getting through the mapper so that shouldn't be related.
The problem is that org.apache.hadoop.mapreduce.TaskAttemptContext was a class in Hadoop 1 but became an interface in Hadoop 2.
This is one of the reasons why libraries which depend on the Hadoop libs need to have separately compiled jarfiles for Hadoop 1 and Hadoop 2. Based on your stack trace, it appears that somehow you got a Hadoop1-compiled Avro jarfile, despite running with Hadoop 2.4.1.
The download mirrors for Avro provide nice separate downloadables for avro-mapred-1.7.7-hadoop1.jar vs avro-mapred-1.7.7-hadoop2.jar.
The problem is that Avro 1.7.7 supports 2 versions of Hadoop and hence depends on both Hadoop versions. And by default Avro 1.7.7 jars dependend on old Hadoop version.
To build with Avro 1.7.7 with Hadoop2 just add extra classifier line to maven dependencies:
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro-mapred</artifactId>
<version>1.7.7</version>
<classifier>hadoop2</classifier>
</dependency>
This will tell maven to search for avro-mapred-1.7.7-hadoop2.jar, not avro-mapred-1.7.7.jar
Same applicable for Avro 1.7.4 and above
Does anyone have an example of using Apache Qpid within a standalone junit test.
Ideally I want to be able to create a queue on the fly which I can put/get msgs within my test.
So I'm not testing QPid within my test, I'll use integration tests for that, however be very useful to test methods handling msgs with having to mock out a load of services.
Here is the setup method I use for QPID 0.30 (I use this in a Spock test but should be portable to Java of Junit with no problems). This supports SSL connection, the HTTP management, and uses only in-memory startup. Startup time is sub-second. Configuration for QPID is awkward compared to using ActiveMQ for the same purpose, but QPID is AMQP compliant and allows for a smooth, neutral testing for AMQP clients (obviously the use of exchanges can not mimic RabbitMQs implementation, but for basic purposes it is sufficient)
First I created a minimal test-config.json which I put in the resources folder:
{
"name": "${broker.name}",
"modelVersion": "2.0",
"defaultVirtualHost" : "default",
"authenticationproviders" : [ {
"name" : "passwordFile",
"type" : "PlainPasswordFile",
"path" : "${qpid.home_dir}${file.separator}etc${file.separator}passwd",
"preferencesproviders" : [{
"name": "fileSystemPreferences",
"type": "FileSystemPreferences",
"path" : "${qpid.work_dir}${file.separator}user.preferences.json"
}]
} ],
"ports" : [ {
"name" : "AMQP",
"port" : "${qpid.amqp_port}",
"authenticationProvider" : "passwordFile",
"keyStore" : "default",
"protocols": ["AMQP_0_10", "AMQP_0_8", "AMQP_0_9", "AMQP_0_9_1" ],
"transports" : [ "SSL" ]
}, {
"name" : "HTTP",
"port" : "${qpid.http_port}",
"authenticationProvider" : "passwordFile",
"protocols" : [ "HTTP" ]
}],
"virtualhostnodes" : [ {
"name" : "default",
"type" : "JSON",
"virtualHostInitialConfiguration" : "{ \"type\" : \"Memory\" }"
} ],
"plugins" : [ {
"type" : "MANAGEMENT-HTTP",
"name" : "httpManagement"
}],
"keystores" : [ {
"name" : "default",
"password" : "password",
"path": "${qpid.home_dir}${file.separator}keystore.jks"
}]
}
I
I also needed to create a keystore.jks file for localhost because the QPID broker and the RabbitMQ client do not like to communicate over an unencrypted channel. I also added a file called "passwd" in "integTest/resources/etc" that has this content:
guest:password
Here is the code from the unit test setup:
class level variables:
def tmpFolder = Files.createTempDir()
Broker broker
def amqpPort = PortFinder.findFreePort()
def httpPort = PortFinder.findFreePort()
def qpidHomeDir = 'src/integTest/resources/'
def configFileName = "/test-config.json"
code for the setup() method:
def setup() {
broker = new Broker();
def brokerOptions = new BrokerOptions()
File file = new File(qpidHomeDir)
String homePath = file.getAbsolutePath();
log.info(' qpid home dir=' + homePath)
log.info(' qpid work dir=' + tmpFolder.absolutePath)
brokerOptions.setConfigProperty('qpid.work_dir', tmpFolder.absolutePath);
brokerOptions.setConfigProperty('qpid.amqp_port',"${amqpPort}")
brokerOptions.setConfigProperty('qpid.http_port', "${httpPort}")
brokerOptions.setConfigProperty('qpid.home_dir', homePath);
brokerOptions.setInitialConfigurationLocation(homePath + configFileName)
broker.startup(brokerOptions)
log.info('broker started')
}
code for cleanup()
broker.shutdown()
To make an AMQP connection from a Rabbit MQ client:
ConnectionFactory factory = new ConnectionFactory();
factory.setUri("amqp://guest:password#localhost:${amqpPort}");
factory.useSslProtocol()
log.info('about to make connection')
def connection = factory.newConnection();
//get a channel for sending the "kickoff" message
def channel = connection.createChannel();
The Qpid project has a number of tests that use an embedded broker for testing. Whilst we use a base case to handle startup shutdown you could do the following to simply integrate a broker within your tests:
public void setUp()
{
int port=1;
// Config is actually a Configuaration File App Registry object, or Configuration Application Registry.
ApplicationRegistry.initialise(config, port);
TransportConnection.createVMBroker(port);
}
public void test()
{...}
public void tearDown()
{
TransportConnection.killVMBroker(port);
ApplicationRegistry.remove(port);
}
Then for the connection you need to specify the conectionURL for the broker. i.e. borkerlist='vm://1'
My solution on qpid-broker # 6.1.1, add below to pom.xml
<dependency>
<groupId>org.apache.qpid</groupId>
<artifactId>qpid-broker</artifactId>
<version>6.1.1</version>
<scope>test</scope>
</dependency>
qpid config file as:
{
"name" : "${broker.name}",
"modelVersion" : "6.1",
"defaultVirtualHost" : "default",
"authenticationproviders" : [ {
"name" : "anonymous",
"type" : "Anonymous"
} ],
"ports" : [ {
"name" : "AMQP",
"port" : "${qpid.amqp_port}",
"authenticationProvider" : "anonymous",
"virtualhostaliases" : [ {
"name" : "defaultAlias",
"type" : "defaultAlias"
} ]
} ],
"virtualhostnodes" : [ {
"name" : "default",
"type" : "JSON",
"defaultVirtualHostNode" : "true",
"virtualHostInitialConfiguration" : "{ \"type\" : \"Memory\" }"
} ]
}
code to start the qpid server
Broker broker = new Broker();
BrokerOptions brokerOptions = new BrokerOptions();
// I use fix port number
brokerOptions.setConfigProperty("qpid.amqp_port", "20179");
brokerOptions.setConfigurationStoreType("Memory");
// work_dir for qpid's log, configs, persist data
System.setProperty("qpid.work_dir", "/tmp/qpidworktmp");
// init config of qpid. Relative path for classloader resource or absolute path for non-resource
System.setProperty("qpid.initialConfigurationLocation", "qpid/qpid-config.json");
brokerOptions.setStartupLoggedToSystemOut(false);
broker.startup(brokerOptions);
code to stop qpid server
broker.shutdown();
Since I use anonymouse mode, client should do like:
SaslConfig saslConfig = new SaslConfig() {
public SaslMechanism getSaslMechanism(String[] mechanisms) {
return new SaslMechanism() {
public String getName() {
return "ANONYMOUS";
}
public LongString handleChallenge(LongString challenge, String username, String password) {
return LongStringHelper.asLongString("");
}
};
}
};
ConnectionFactory factory = new ConnectionFactory();
factory.setHost("localhost");
factory.setPort(20179);
factory.setSaslConfig(saslConfig);
Connection connection = factory.newConnection();
Channel channel = connection.createChannel();
That's all.
A little more on how to do it on other version.
You can download qpid-broker binary package from official site. After download and unzip, you can run it to test as server against your case. After your case connected server well, using commandline to generate or just copy the initial config file in QPID_WORK, remove useless id filed and use it for embedded server like above.
The most complicated thing is the authentication. You can choose PLAIN mode but you have to add the username and password in initial config. I choose anonymous mode which need a little code when connecting. For other authentication mode you have specify the password file or key/cert store, which I didnt try.
If it still not working, you can read the qpid-borker doc and Main class code in qpid-broker artifact which show how command line works for each settings.
The best I could figure out was:
PropertiesConfiguration properties = new PropertiesConfiguration();
properties.addProperty("virtualhosts.virtualhost.name", "test");
properties.addProperty("security.principal-databases.principal-database.name", "testPasswordFile");
properties.addProperty("security.principal-databases.principal-database.class", "org.apache.qpid.server.security.auth.database.PropertiesPrincipalDatabase");
ServerConfiguration config = new ServerConfiguration(properties);
ApplicationRegistry.initialise(new ApplicationRegistry(config) {
#Override
protected void createDatabaseManager(ServerConfiguration configuration) throws Exception {
Properties users = new Properties();
users.put("guest","guest");
users.put("admin","admin");
_databaseManager = new PropertiesPrincipalDatabaseManager("testPasswordFile", users);
}
});
TransportConnection.createVMBroker(ApplicationRegistry.DEFAULT_INSTANCE);
With a URL of:
amqp://admin:admin#/test?brokerlist='vm://:1?sasl_mechs='PLAIN''
The big pain is with configuration and authorization. Milage may vary.