I try to setup a Mongo DB in Spring Boot project. I've set an uri in application.yml:
spring:
data:
mongodb:
uri: mongodb://user:pass#localhost:27017/mydbname
But application fails to read data from repository with error:
Attempt to switch database target during SASL authentication.
Line where error occurs (kotlin):
val emails = emailRepository.findAllByStatus(READY_TO_SEND)
where
interface EmailRepository : MongoRepository<Email, String> {
fun findAllByStatus(status: EmailStatus) : Collection<Email>
}
and
data class Email(
#Id
#get:JsonIgnore
var id: String? = null,
#get:NotNull
val from: MailActor,
#get:NotEmpty
val to: Collection<MailActor>,
#get:NotEmpty
val subject: String,
#get:NotEmpty
val htmlText: String,
val attachments: Collection<Attachment> = listOf(),
val cc: Collection<MailActor> = listOf(),
val bcc: Collection<MailActor> = listOf(),
#get:JsonIgnore
val status: EmailStatus = EmailStatus.READY_TO_SEND,
#get:JsonIgnore
val created: LocalDateTime = LocalDateTime.now(),
#get:JsonIgnore
val lastSendAttempt: LocalDateTime? = null,
)
The same error occurs with findAll and save operations on repository (and likely others too)
Stack trace:
Caused by: com.mongodb.MongoSecurityException: Exception authenticating MongoCredential{mechanism=SCRAM-SHA-1, userName='user', source='user', password=<hidden>, mechanismProperties=<hidden>}
at com.mongodb.internal.connection.SaslAuthenticator.wrapException(SaslAuthenticator.java:235) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.SaslAuthenticator$1.run(SaslAuthenticator.java:80) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.SaslAuthenticator$1.run(SaslAuthenticator.java:51) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.SaslAuthenticator.doAsSubject(SaslAuthenticator.java:241) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.SaslAuthenticator.authenticate(SaslAuthenticator.java:51) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.InternalStreamConnectionInitializer.authenticate(InternalStreamConnectionInitializer.java:168) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.InternalStreamConnectionInitializer.initialize(InternalStreamConnectionInitializer.java:63) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.InternalStreamConnection.open(InternalStreamConnection.java:144) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.UsageTrackingInternalConnection.open(UsageTrackingInternalConnection.java:51) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.DefaultConnectionPool$PooledConnection.open(DefaultConnectionPool.java:431) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.DefaultConnectionPool.get(DefaultConnectionPool.java:115) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.DefaultConnectionPool.get(DefaultConnectionPool.java:100) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.DefaultServer.getConnection(DefaultServer.java:92) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.binding.ClusterBinding$ClusterBindingConnectionSource.getConnection(ClusterBinding.java:119) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.client.internal.ClientSessionBinding$SessionBindingConnectionSource.getConnection(ClientSessionBinding.java:135) ~[mongodb-driver-sync-4.1.1.jar:na]
at com.mongodb.internal.operation.FindOperation$1.call(FindOperation.java:653) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.operation.FindOperation$1.call(FindOperation.java:650) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.operation.OperationHelper.withReadConnectionSource(OperationHelper.java:582) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.operation.FindOperation.execute(FindOperation.java:650) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.operation.FindOperation.execute(FindOperation.java:78) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.client.internal.MongoClientDelegate$DelegateOperationExecutor.execute(MongoClientDelegate.java:178) ~[mongodb-driver-sync-4.1.1.jar:na]
at com.mongodb.client.internal.MongoIterableImpl.execute(MongoIterableImpl.java:135) ~[mongodb-driver-sync-4.1.1.jar:na]
at com.mongodb.client.internal.MongoIterableImpl.iterator(MongoIterableImpl.java:92) ~[mongodb-driver-sync-4.1.1.jar:na]
at org.springframework.data.mongodb.core.MongoTemplate.executeFindMultiInternal(MongoTemplate.java:2790) ~[spring-data-mongodb-3.1.2.jar:3.1.2]
... 33 common frames omitted
Caused by: com.mongodb.MongoCommandException: Command failed with error 17 (ProtocolError): 'Attempt to switch database target during SASL authentication.' on server localhost:27017. The full response is {"ok": 0.0, "errmsg": "Attempt to switch database target during SASL authentication.", "code": 17, "codeName": "ProtocolError"}
at com.mongodb.internal.connection.ProtocolHelper.getCommandFailureException(ProtocolHelper.java:175) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.InternalStreamConnection.receiveCommandMessageResponse(InternalStreamConnection.java:359) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.InternalStreamConnection.sendAndReceive(InternalStreamConnection.java:280) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.CommandHelper.sendAndReceive(CommandHelper.java:83) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.CommandHelper.executeCommand(CommandHelper.java:33) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.SaslAuthenticator.sendSaslContinue(SaslAuthenticator.java:195) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.SaslAuthenticator.access$200(SaslAuthenticator.java:43) ~[mongodb-driver-core-4.1.1.jar:na]
at com.mongodb.internal.connection.SaslAuthenticator$1.run(SaslAuthenticator.java:69) ~[mongodb-driver-core-4.1.1.jar:na]
... 55 common frames omitted
I am able to connect to mongo via Intellij Idea client with the same credentials.
I run mongo db with docker-compose
version: '3.1'
services:
mongodb:
image: mongo
container_name: my-service-mongo
restart: always
environment:
MONGO_INITDB_ROOT_USERNAME: user
MONGO_INITDB_ROOT_PASSWORD: pass
MONGO_INITDB_DATABASE: mydbname
ports:
- 27017:27017
volumes:
- ./mongo-init.js:/docker-entrypoint-initdb.d/mongo-init.js:ro
where mongo-init.js is
db.createUser(
{
user: "user",
pwd: "pass",
roles: [
{
role: "readWrite",
db: "mydbname"
}
]
}
);
What is going on? The only place in the internet where I found this error message is... mongo source code 8].
Any help appreciated.
spring:
data:
mongodb:
uri: mongodb://admin:admin#127.0.0.1:27017/dbname?authSource=admin
add authSource param
If you:
Have multiple similar accounts (with same credentials), one in "admin" and the other in another database.
Intend to log in with the other account not in the "admin" database.
Have SASL enabled.
Then you may be affected by JAVA-4290, which is caused by speculative authentication only supporting the "admin" database, in mongodb-driver-core versions before 4.3.2. This resulted in the client successfully authenticating with the wrong account (the one in the admin database) via speculative authentication, only for MongoDB to trap the error when the SASL Continue message is sent. It does so with a Protocol Error, as you have observed.
As of Spring Boot 2.5.4, the version of mongodb-driver-core that ships with Spring Boot is only 4.2.3.
It seems that it's a bug in mongo DB version
4.4.3-bionic
I've changed image in docker-compose to
mongo:3
and the error went away. Uh, so many hours....
Adding spring.data.mongodb.authentication-database=admin in your application.properties will set the authentication database to admin. This is equivalent to adding ?authSource=admin in your connection string.
I faced the same issue . I was using the spring-boot-starter-parent 2.4.1 and it worked fine with the latest mongodb docker instance. But I had to connect to Bitnami mongo docker container which uses lower mongo and then the same error occurred. I downgraded the version to 2.3.7.RELEASE and then the expected behavior was obtained without errors.
This reason for the error for me was compatibility issue. Refer Spring data MongoDB Compatibility Matrix for viewing different versions and compatibility.
EDIT
I faced similar error in a different application. With the help of this stackoverflow post, Mongodb could not find user "user#database", I was able to understand the issue and solve it. Basically, you need to specify the authentication database properly. If not, by default the user will be checked in the database provided and not the authentication database. In my case I specified database A and my user was in admin db. So user admin was checked in database A.
This answer may not directly related to this question, but I hope it will help others. If you use uri then #ChanningQiu answer is fine. But if you use property for each param then you can try following way-
spring:
data:
mongodb:
host: localhost
port: 27017
database: db_name
username: db_user_name
password: db_password
authentication-database: admin
So in this case you need to set authentication-database: admin value.
Along with #Lutoslaw answer above: rollback to 4.2.2 works for me too.
I'v gotten the same error - Attempt to switch database target during SASL authentication - on createIndex operation.
It seems as my first operation under ordinary user after successful creation admin one.
It would be cool to know what has been so dramatically changed in the auth mechanism between those versions.
Related
I'm launching 3 elastic nodes using elastic operator and i tried to set up automated snapshots for these instances.
I followed this doc
I minified the json of the service account key and created a file called gcs.client.default.credentials_file with no file extension and added this file to kubernetes secrets.
And added the secureSettings.secretName field to the spec of the elastic cluster and added the secret name to it which was gcs-credentials
But i get this error on the logs
{"#timestamp":"2022-12-26T18:45:40.037Z", "log.level":"ERROR", "message":"fatal exception while booting Elasticsearch", "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.server","process.thread.name":"main","log.logger":"org.elasticsearch.bootstrap.Elasticsearch","elasticsearch.node.name":"elasticsearch-cluster-es-node-1","elasticsearch.cluster.name":"elasticsearch-cluster","error.type":"java.lang.IllegalStateException","error.message":"failed to load plugin class [org.elasticsearch.repositories.gcs.GoogleCloudStoragePlugin]","error.stack_trace":"java.lang.IllegalStateException: failed to load plugin class [org.elasticsearch.repositories.gcs.GoogleCloudStoragePlugin]\n\tat org.elasticsearch.server#8.5.0/org.elasticsearch.plugins.PluginsService.loadPlugin(PluginsService.java:607)\n\tat org.elasticsearch.server#8.5.0/org.elasticsearch.plugins.PluginsService.loadBundle(PluginsService.java:482)\n\tat org.elasticsearch.server#8.5.0/org.elasticsearch.plugins.PluginsService.loadBundles(PluginsService.java:290)\n\tat org.elasticsearch.server#8.5.0/org.elasticsearch.plugins.PluginsService.<init>(PluginsService.java:159)\n\tat org.elasticsearch.server#8.5.0/org.elasticsearch.plugins.PluginsService.lambda$getPluginsServiceCtor$14(PluginsService.java:634)\n\tat org.elasticsearch.server#8.5.0/org.elasticsearch.node.Node.<init>(Node.java:406)\n\tat org.elasticsearch.server#8.5.0/org.elasticsearch.node.Node.<init>(Node.java:316)\n\tat org.elasticsearch.server#8.5.0/org.elasticsearch.bootstrap.Elasticsearch$2.<init>(Elasticsearch.java:214)\n\tat org.elasticsearch.server#8.5.0/org.elasticsearch.bootstrap.Elasticsearch.initPhase3(Elasticsearch.java:214)\n\tat org.elasticsearch.server#8.5.0/org.elasticsearch.bootstrap.Elasticsearch.main(Elasticsearch.java:67)\nCaused by: java.lang.reflect.InvocationTargetException\n\tat java.base/jdk.internal.reflect.DirectConstructorHandleAccessor.newInstance(DirectConstructorHandleAccessor.java:79)\n\tat java.base/java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:500)\n\tat java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:484)\n\tat org.elasticsearch.server#8.5.0/org.elasticsearch.plugins.PluginsService.loadPlugin(PluginsService.java:600)\n\t... 9 more\nCaused by: java.lang.IllegalArgumentException: failed to load GCS client credentials from [gcs.client.default.credentials_file]\n\tat org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.loadCredential(GoogleCloudStorageClientSettings.java:265)\n\tat org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.getClientSettings(GoogleCloudStorageClientSettings.java:221)\n\tat org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.load(GoogleCloudStorageClientSettings.java:209)\n\tat org.elasticsearch.repositories.gcs.GoogleCloudStoragePlugin.reload(GoogleCloudStoragePlugin.java:88)\n\tat org.elasticsearch.repositories.gcs.GoogleCloudStoragePlugin.<init>(GoogleCloudStoragePlugin.java:36)\n\tat java.base/jdk.internal.reflect.DirectConstructorHandleAccessor.newInstance(DirectConstructorHandleAccessor.java:67)\n\t... 12 more\nCaused by: java.io.IOException: Invalid PKCS#8 data.\n\tat com.google.auth.oauth2.ServiceAccountCredentials.privateKeyFromPkcs8(ServiceAccountCredentials.java:496)\n\tat com.google.auth.oauth2.ServiceAccountCredentials.fromPkcs8(ServiceAccountCredentials.java:474)\n\tat com.google.auth.oauth2.ServiceAccountCredentials.fromJson(ServiceAccountCredentials.java:212)\n\tat com.google.auth.oauth2.ServiceAccountCredentials.fromStream(ServiceAccountCredentials.java:548)\n\tat com.google.auth.oauth2.ServiceAccountCredentials.fromStream(ServiceAccountCredentials.java:520)\n\tat org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.lambda$loadCredential$13(GoogleCloudStorageClientSettings.java:257)\n\tat java.base/java.security.AccessController.doPrivileged(AccessController.java:569)\n\tat org.elasticsearch.repositories.gcs.SocketAccess.doPrivilegedIOException(SocketAccess.java:33)\n\tat org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.loadCredential(GoogleCloudStorageClientSettings.java:256)\n\t... 17 more\n"}
ERROR: Elasticsearch did not exit normally - check the logs at /usr/share/elasticsearch/logs/elasticsearch-cluster.log
Try adding the following lines to your configuration (on each Elasticsearch):
elasticsearch01:
image: docker.elastic.co/elasticsearch/elasticsearch:7.6.2
...
ulimits:
memlock:
soft: -1
hard: -1
Also check this link on Elasticsearch for more detailed information.
Here is the application.yml I am using for my Spring WebFlux project
redis:
redisson:
config: |
clusterServersConfig:
idleConnectionTimeout: 10000
connectTimeout: ${REDISSON_CONNECT_TIMEOUT:20000}
timeout: ${REDISSON_TIMEOUT:3000}
retryAttempts: ${REDISSON_RETRY_ATTEMPTS:3}
retryInterval: ${REDISSON_RETRY_INTERVAL:1500}
subscriptionConnectionPoolSize: ${REDISSON_SUBSCRIPTION_POOL_SIZE:50}
slaveConnectionMinimumIdleSize: ${REDISSON_SLAVE_MIN_IDLE_SIZE:24}
slaveConnectionPoolSize: ${REDISSON_SLAVE_POOL_SIZE:48}
masterConnectionMinimumIdleSize: ${REDISSON_MASTER_MIN_IDLE_SIZE:24}
masterConnectionPoolSize: ${REDISSON_MASTER_POOL_SIZE:48}
nodeAddresses:
- "rediss://${APPS_REDIS:-}:${APPS_REDIS_PORT:6379}"
password: ${APPS_REDIS_SECRET:-}
threads: ${REDISSON_THREADS:16}
nettyThreads: ${REDISSON_NETTY_THREADS:96}
But whenever I am starting the project in my laptop, this error comes up
Caused by: com.fasterxml.jackson.core.JsonParseException: Unrecognized token 'clusterServersConfig': was expecting (JSON String, Number, Array, Object or token 'null', 'true' or 'false')
I am not sure why it is saying clusterServersConfig is an unrecognized token. In the official doc also, it is mentioned and here is an example of this.
At first I thought it might be because I am running redis locally in my M1 Mac so redis-clusters aren't generated by default. I even tried to enable clusters in redis.conf and run a redis clusters with 3 nodes using redis-cli but still this happens. I have tried almost everything I could think of or search on the net. Any help appreciated :)
I'm trying to simplify my consumer as much as possible. The problem is, when looking at the records coming in my Kafka listener:
List<GenericRecord> incomingRecords the values are just string values. I've tried turning specific reader to true and false. I've set the value deserializer as well. Am I missing something? This worked fine when I use a Java configuration class, but want to keep consolidated to this application.properties file.
application.properties
spring.kafka.properties.security.protocol=SASL_SSL
spring.kafka.properties.sasl.mechanism=SCRAM-SHA-256
spring.kafka.properties.sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required username="${SASL_ACCESS_KEY}" password="${SASL_SECRET}";
spring.kafka.consumer.auto-offset-reset=earliest
#### Consumer Properties Configuration
spring.kafka.properties.key.deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.properties.value.deserializer=io.confluent.kafka.serializers.KafkaAvroDeserializer
spring.kafka.properties.value.subject.name.strategy=io.confluent.kafka.serializers.subject.TopicRecordNameStrategy
spring.kafka.bootstrap-servers=
spring.kafka.properties.schema.registry.url=
spring.kafka.properties.specific.avro.reader=true
spring.kafka.consumer.properties.spring.json.trusted.packages=*
logging.level.org.apache.kafka=TRACE
logging.level.io.confluent.kafka.schemaregistry=TRACE
consumer
#KafkaListener(topics = "${topic}", groupId = "${group}")
public void processMessageBatch(List<GenericRecord> incomingRecords,
#Header(KafkaHeaders.RECEIVED_PARTITION_ID) List<Integer> partitions,
#Header(KafkaHeaders.RECEIVED_TOPIC) List<String> topics,
#Header(KafkaHeaders.OFFSET) List<Long> offsets) {
currentMicroBatch = Stream.of(currentMicroBatch, incomingRecords).flatMap(List::stream).collect(Collectors.toList());
if (currentMicroBatch.size() >= maxRecords || validatedElapsedDuration(durationMonitor)) {
System.out.println("ETL processing logic will be done here");
}
clearBatch();
}
I notice when I use:
spring.kafka.consumer.value-deserializer=io.confluent.kafka.serializers.KafkaAvroDeserializer
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
I get the following error:
2020-12-02 17:04:42.745 DEBUG 51910 --- [ntainer#0-0-C-1] i.c.k.s.client.rest.RestService : Sending GET with input null to https://myschemaregistry.com
2020-12-02 17:04:42.852 ERROR 51910 --- [ntainer#0-0-C-1] o.s.kafka.listener.LoggingErrorHandler : Error while processing: null
org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition my-topic-avro-32 at offset 7836. If needed, please seek past the record to continue consumption.
java.lang.IllegalArgumentException: argument "src" is null
at com.fasterxml.jackson.databind.ObjectMapper._assertNotNull(ObjectMapper.java:4735)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3502)
at io.confluent.kafka.schemaregistry.client.rest.RestService.sendHttpRequest(RestService.java:270)
at io.confluent.kafka.schemaregistry.client.rest.RestService.httpRequest(RestService.java:334)
at io.confluent.kafka.schemaregistry.client.rest.RestService.getId(RestService.java:573)
at io.confluent.kafka.schemaregistry.client.rest.RestService.getId(RestService.java:557)
at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.getSchemaByIdFromRegistry(CachedSchemaRegistryClient.java:149)
at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.getBySubjectAndId(CachedSchemaRegistryClient.java:230)
at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.getById(CachedSchemaRegistryClient.java:209)
at io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer$DeserializationContext.schemaFromRegistry(AbstractKafkaAvroDeserializer.java:241)
at io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer.deserialize(AbstractKafkaAvroDeserializer.java:102)
at io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer.deserialize(AbstractKafkaAvroDeserializer.java:81)
at io.confluent.kafka.serializers.KafkaAvroDeserializer.deserialize(KafkaAvroDeserializer.java:55)
at org.apache.kafka.common.serialization.Deserializer.deserialize(Deserializer.java:60)
at org.apache.kafka.clients.consumer.internals.Fetcher.parseRecord(Fetcher.java:1268)
at org.apache.kafka.clients.consumer.internals.Fetcher.access$3600(Fetcher.java:124)
at org.apache.kafka.clients.consumer.internals.Fetcher$PartitionRecords.fetchRecords(Fetcher.java:1492)
at org.apache.kafka.clients.consumer.internals.Fetcher$PartitionRecords.access$1600(Fetcher.java:1332)
at org.apache.kafka.clients.consumer.internals.Fetcher.fetchRecords(Fetcher.java:645)
at org.apache.kafka.clients.consumer.internals.Fetcher.fetchedRecords(Fetcher.java:606)
at org.apache.kafka.clients.consumer.KafkaConsumer.pollForFetches(KafkaConsumer.java:1263)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1225)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1201)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doPoll(KafkaMessageListenerContainer.java:1062)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1018)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:949)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:834)
I found the issue. Debugging deep into the rest client for confluent, I was hit with a 401 (terrible logs btw)
I needed to add this:
spring.kafka.properties.basic.auth.credentials.source=SASL_INHERIT
since I'm using SASL auth and needed registry to inherit the SASL config I added up above. fun stuff..
Had the same issue.
for me I just needed to set the protocol of the schema-registry url from http:// to https:// and it worked.
#Ryan answered gave me a clue.
I am trying to implement a reactive application with Spring Webflux and MongoDB.
I have the following configuration in application.properties file:
spring.data.mongodb.database=my-db
spring.data.mongodb.uri=mongodb://user:pass#host:port/my-db
However, when I try save some document to the MongoDB I am getting the following error:
backend_1 | Caused by: com.mongodb.MongoCommandException: Command failed with error 13 (Unauthorized): 'not authorized on test to execute command { insert: "user", ordered: true, $db: "test" }' on server database:27017. The full response is { "ok" : 0.0, "errmsg" : "not authorized on test to execute command { insert: \"user\", ordered: true, $db: \"test\" }", "code" : 13, "codeName" : "Unauthorized" }
backend_1 | at com.mongodb.internal.connection.ProtocolHelper.getCommandFailureException(ProtocolHelper.java:179) ~[mongodb-driver-core-3.8.2.jar:na]
backend_1 | at com.mongodb.internal.connection.InternalStreamConnection$2$1.onResult(InternalStreamConnection.java:370) ~[mongodb-driver-core-3.8.2.jar:na]
I simply can not understand why the driver does not respect the configuration for the database name and tries to insert into the database test (and thus fail).
Am I missing something?
One more thing is that I am using the Java backend and MongoDB within a separate containers with docker compose.
I have found a solution for the authentication issue.
It was the authSource that I was missing, so I added it in the database URI and it worked.
spring.data.mongodb.uri=mongodb://user:pass#host:port/my-db?authSource=admin
However I still do not understand why the documents that my Java backend creates are being inserted in test database and not the database that I configured to work with.
Put this in application.properties file:
# MongoDB
spring.data.mongodb.host=localhost
spring.data.mongodb.port=27017
spring.data.mongodb.database=my-db
spring.data.mongodb.username=username
spring.data.mongodb.password=password
I have created a jhipster gateway and micro-service project along with a UAA server. It is working fine in my local environment. However, when I try to deploy it to a server and run it, I get an error similar to the below when I try to log in from the server-deployed gateway project:
Not a valid Domain name
Error :
2018-01-25 12:02:42.242 DEBUG 10852 --- [ XNIO-2 task-6] w.i.s.o.OAuth2TokenEndpointClientAdapter : contacting OAuth2 token endpoint to login user: admin
2018-01-25 12:02:42.419 ERROR 10852 --- [ XNIO-2 task-6] c.w.i.s.o.OAuth2AuthenticationService : failed to get OAuth2 tokens from UAA
java.lang.IllegalArgumentException: Not a valid domain name: '192.168.0.202'
at com.google.common.base.Preconditions.checkArgument(Preconditions.java:210)
at com.google.common.net.InternetDomainName.<init>(InternetDomainName.java:155)
at com.google.common.net.InternetDomainName.from(InternetDomainName.java:216)
at com.wdsi.iloads.security.oauth2.OAuth2CookieHelper.getCookieDomain(OAuth2CookieHelper.java:296)
at com.wdsi.iloads.security.oauth2.OAuth2CookieHelper.createCookies(OAuth2CookieHelper.java:109)
at com.wdsi.iloads.security.oauth2.OAuth2AuthenticationService.authenticate(OAuth2AuthenticationService.java:70)
at com.wdsi.iloads.web.rest.AuthResource.authenticate(AuthResource.java:51)
at com.wdsi.iloads.web.rest.AuthResource$$FastClassBySpringCGLIB$$fdfdf7ca.invoke(<generated>)
at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:204)
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:738)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:157)
at org.springframework.aop.aspectj.MethodInvocationProceedingJoinPoint.proceed(MethodInvocationProceedingJoinPoint.java:85)
at com.wdsi.iloads.aop.logging.LoggingAspect.logAround(LoggingAspect.java:85)
Any help is appreciated.
This was fixed recently in JHipster's generator by this pull request.
To fix it locally in your project, edit OAuth2CookieHelper.java (in the security/oauth2/ package of your Java folder). Add an extra check for IP addresses before parsing the address as a domain name.
// add this import
import com.google.common.net.InetAddresses;
// add this if-statement surrounding the domain name parsing
// if it isn't an IP address
if (!InetAddresses.isInetAddress(domain)) {
// strip off subdomains, leaving the top level domain only
InternetDomainName domainName = InternetDomainName.from(domain);
if (domainName.isUnderPublicSuffix() && !domainName.isTopPrivateDomain()) {
// preserve leading dot
return "." + domainName.topPrivateDomain().toString();
}
}