Add Prometheus Metrics Endpoint to Java App Using Jax-Rs - java

I’m trying to add a Prometheus metrics exporter to my Java app. The app is currently using javax.ws.rs to define REST endpoints.
For example:
Import javax.ws.rs.*;
Import javax.ws.rs.core.MediaType;
Import javax.ws.rs.core.Response;
#GET
#Path(“/example”)
#Timed
Public Response example(#QueryParam(“id”) Integer id) {
return Response.ok(“testing”)
}
All the examples I found for setting up Prometheus in Java are using Spring. They suggest the following:
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import io.prometheus.client.exporter.HTTPServer;
import java.io.IOException;
#SpringBootApplication
public class App {
public static void main(String[] args) {
SpringApplication.run(App.class, args);
try {
HTTPServer server = new HTTPServer(8081);
} catch (IOException e) { e.printStackTrace(); }
}
}
Is there a way I can simply define a new endpoint in my current setup, for example:
#GET
#Path(“/metrics”)
#Timed
Public Response example {
return Response.ok(“return prom metrics here”)
}
Without having to introduce Spring into the stack?

This can be done as follows:
import io.prometheus.client.Counter;
import io.prometheus.client.CollectorRegistry;
import io.prometheus.client.exporter.TextFormat;
CollectorRegistry registry = new CollectorRegistry();
Counter exCounter = Counter.build().name(“example”).register(registry);
#GET
#Path(“/metrics”)
Public String getMetrics() {
Writer writer = new StringWriter();
try {
TextFormat.write004(writer, registry.metricFamilySamples());
return writer.toString();
} catch (IOException e) {
return “error”;
}
}

Related

Springboot - Save entity in 'normal' class

I'm pretty new to Springboot and Java in general and because we got this in school I'm fiddeling arround.
I'm now trying to save an entity outside of the Springboot Entities, Repositories or RestController with the following code:
InfMApplication.java:
package com.domain.springboot;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import com.domain.springboot.repositories.MovieRepository;
import com.domain.springboot.services.MovieImport;
#SpringBootApplication
public class InfMApplication {
public static void main(String[] args) {
SpringApplication.run(InfMApplication.class, args);
MovieImport movieImport = new MovieImport();
movieImport.saveToDb();
}
}
MovieImport.java:
package com.domain.springboot.services;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpRequest.BodyPublishers;
import java.net.http.HttpResponse;
import java.net.http.HttpResponse.BodyHandlers;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.bind.annotation.CrossOrigin;
import java.io.*;
import java.net.URL;
import com.google.gson.Gson;
import com.domain.omdbapi.entities.Movie;
import com.domain.omdbapi.entities.SearchResponse;
import com.domain.omdbapi.entities.SearchResult;
import com.domain.springboot.repositories.ComplexRepository;
import com.domain.springboot.repositories.DocumentRepository;
import com.domain.springboot.repositories.MovieRepository;
import com.domain.springboot.repositories.SimpleRepository;
#Service
public class MovieImport {
private final MovieRepository movieRepository;
public MovieImport(MovieRepository movieRepository){
this.movieRepository = movieRepository;
}
public void main() {
String randomImdbId = fetchRandomMovie();
Movie movie = fetchMovieDetails(randomImdbId);
saveToDb(movie);
}
public void saveToDb(Movie movie) {
com.domain.springboot.entities.Movie springbootMovie = new com.domain.springboot.entities.Movie(movie.Title, movie.imdbID);
this.movieRepository.save(springbootMovie);
}
public String fetchRandomMovie() {
String randomWord = getRandomWord();
String url = "https://www.omdbapi.com/?apikey=<API_KEY>&type=movie&s=" + randomWord;
HttpClient client = HttpClient.newHttpClient();
HttpRequest request = HttpRequest.newBuilder(
URI.create(url))
.header("accept", "application/json")
.build();
HttpResponse<String> response = null;
try {
response = client.send(request, BodyHandlers.ofString());
} catch (Exception e) {
System.out.println(e);
}
Gson gson = new Gson();
SearchResponse searchResponse = gson.fromJson(response.body(), SearchResponse.class);
int randomIndex = new Random().nextInt(0, searchResponse.getSearch().length);
SearchResult randomResult = searchResponse.getSearch()[randomIndex];
return randomResult.getImdbID();
}
public Movie fetchMovieDetails(String imdbId) {
String url = "https://www.omdbapi.com/?apikey=<API_KEY>&type=movie&plot=full&i=" + imdbId;
HttpClient client = HttpClient.newHttpClient();
HttpRequest request = HttpRequest.newBuilder(
URI.create(url))
.header("accept", "application/json")
.build();
HttpResponse<String> response = null;
try {
response = client.send(request, BodyHandlers.ofString());
} catch (Exception e) {
System.out.println(e);
}
Gson gson = new Gson();
Movie movie = gson.fromJson(response.body(), Movie.class);
return movie;
}
public String getRandomWord() {
URL resource = getClass().getClassLoader().getResource("Wordlist.txt");
List<String> words = new ArrayList<>();
try {
File file = new File(resource.toURI());
words = Files.readAllLines(file.toPath(), StandardCharsets.UTF_8);
} catch (Exception e) {
e.printStackTrace();
}
int randomIndex = new Random().nextInt(0, words.size());
return words.get(randomIndex);
}
}
If I use "this.movieRepository.save(movieObject);" to save a movie in the MovieRestController the same way, it works. I also tried adding the "#Autowire" annotation, but this didn't work.
I always get the error
java.lang.NullPointerException: Cannot invoke "com.domain.springboot.repositories.MovieRepository.save(Object)" because "this.movieRepository" is null
How can I get to use the movieRepository in other Java classes like in the RestControllers?
java.lang.NullPointerException: Cannot invoke
"com.domain.springboot.repositories.MovieRepository.save(Object)"
because "this.movieRepository" is null
Above is perfectly valid if we look at your following shared code.
public class MovieImport {
private MovieRepository movieRepository;
public void saveToDb() {
// Create movie
com.domain.springboot.entities.Movie springbootMovie = new com.domain.springboot.entities.Movie("Iron Man", "284cb8fgf");
this.movieRepository.save(springbootMovie);
}
}
You've to correct certain things in your code base.
First you're not initializing the movieRepository and therefore, you're getting the null pointer exception. As you've been using the springboot you can use construction injection to initialized the field by spring container. Also. this class should be scanned by spring and you should also put some annotation such as Component or Service on top of it.
Following will work if your MovieImport and MovieRepository classess will scan by springboot.
package com.domain;
import com.domain.omdbapi.entities.Movie;
import com.domain.springboot.repositories.MovieRepository;
#Service
public class MovieImport {
private final MovieRepository movieRepository;
public MovieImport(MovieRepository movieRepository){
this.movieRepository = movieRepository;
}
public void saveToDb() {
// Create movie
com.domain.springboot.entities.Movie springbootMovie = new com.domain.springboot.entities.Movie("Iron Man", "284cb8fgf");
this.movieRepository.save(springbootMovie);
}
}
Updated
#SpringBootApplication
public class InfMApplication implements CommandLineRunner {
#Autowired
private MovieImport movieImport;
public static void main(String[] args) {
SpringApplication.run(InfMApplication.class, args);
}
#Override
public void run(String... args) throws Exception {
movieImport.saveToDb();
}
}

Spring Stream kafka Binder Test Custom Headers

I'm trying to figure out how to include custom headers in the Spring Message<?> used in Spring Cloud Stream with the Kafka Binder. My goal is to include some custom header data that would be added on in one producer (function) class, passed to kafka and then consumed by another class in a different service (with the customer header data).
I feel like I am missing something as I can seem to get it to work using the TestChannelBinder e.g.
import org.springframework.messaging.Message;
import org.springframework.stereotype.Component;
import java.util.function.Function;
#Component
#Slf4j
public class BaseStream implements Function<Message<String>, String> {
#Override
public String apply(Message<String> transactionMessage) {
log.debug("Converted Message: {} ", transactionMessage);
return transactionMessage.getPayload();
}
}
Test class with Test Binder:
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.stream.binder.test.InputDestination;
import org.springframework.cloud.stream.binder.test.OutputDestination;
import org.springframework.cloud.stream.binder.test.TestChannelBinderConfiguration;
import org.springframework.context.annotation.Import;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.context.TestPropertySource;
#SpringBootTest
#TestPropertySource("classpath:testStream.properties")
#Import(TestChannelBinderConfiguration.class)
public class TestForStream {
#Autowired
InputDestination inputDestination;
#Autowired
OutputDestination outputDestination;
#Test
void contextLoads() {
inputDestination.send(MessageBuilder
.withPayload("Test Payload")
.setHeader("customHeader", "headerSpecificData")
.build());
}
}
testStream.properties
spring.cloud.function.definition=baseStream
spring.cloud.stream.bindings.baseStream-in-0.destination=test-in
spring.cloud.stream.bindings.baseStream-out-0.destination=test-out
spring.cloud.stream.bindings.baseStream-in-0.group=test-group-base
Log when running:
Converted Message: GenericMessage [payload=Test Payload, headers={id=5c6d1082-c084-0b25-4afc-b5d97bf537f9, customHeader=headerSpecificData, contentType=application/json, timestamp=1639398696800, target-protocol=kafka}]
Which is what I am looking to do. But when I try to test it for the kafka bider it seems to include the Message<String> object in the payload as a JSON string, which I thought would be parsed into the requested input of the function BaseStream.
Just wondering if someone could maybe see where i'm going wrong with my testing as I have tried various things to get this to work, and seeing as it works with a test binder I would assume it works for the Kafka Binder.
Test Class for Kafka Binder Test:
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.test.context.TestPropertySource;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
#EmbeddedKafka(partitions = 1, brokerProperties = { "listeners=PLAINTEXT://localhost:9092", "port=9092"})
#SpringBootTest
#TestPropertySource("classpath:testStream.properties")
public class TestForStream {
public static CountDownLatch latch = new CountDownLatch(1);
#Autowired
public EmbeddedKafkaBroker broker;
#Test
void contextLoads() {
sleep(5);//Included this as it takes some time to init>
sendMessage("test-in", MessageBuilder
.withPayload("Test Payload")
.setHeader("customHeader", "headerSpecificData")
.build());
}
public <T> ProducerFactory<String, T> createProducerFactory() {
Map<String, Object> configs = new HashMap<>(KafkaTestUtils.producerProps(broker));
configs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
//Is JsonSerializer correct for a message?
return new DefaultKafkaProducerFactory<>(configs);
}
public <T> void sendMessage(String topic, T listObj) {
try {
KafkaTemplate<String, T> kafkaTemplate = new KafkaTemplate<>(createProducerFactory());
kafkaTemplate.send(new ProducerRecord<>(topic, listObj));
}catch (Exception e){
e.printStackTrace();
}
}
public void sleep(long time){
try {
latch.await(time, TimeUnit.SECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
Log of kafka binder test for message:
Converted Message: GenericMessage [payload={"payload":"Test Payload","headers":{"customHeader":"headerSpecificData","id":"d540a3ca-28db-b137-fc86-c25cc4b7eb8b","timestamp":1639399810476}}, headers={deliveryAttempt=1, kafka_timestampType=CREATE_TIME, kafka_receivedTopic=test-in, target-protocol=kafka, kafka_offset=0, scst_nativeHeadersPresent=true, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#79580279, id=1cf2d382-df29-2672-4180-07da99e58244, kafka_receivedPartitionId=0, kafka_receivedTimestamp=1639399810526, contentType=application/json, __TypeId__=[B#24c79350, kafka_groupId=test-group-base, timestamp=1639399810651}]
So here the message has been included in the payload and the kafka headers included in the headers as expected.
I have tried spring.cloud.stream.kafka.binder.headers and headerMode to see if they would change anything but to no avail.
Edit:
Using springCloudVersion = 2020.0.3
I was using:
public <T> void sendMessage(String topic, T listObj) {
try {
KafkaTemplate<String, T> kafkaTemplate = new KafkaTemplate<>(createProducerFactory());
kafkaTemplate.send(new ProducerRecord<>(topic, listObj));
}catch (Exception e){
e.printStackTrace();
}
}
To send the message which was putting the message as the value.
What I should've been using:
public void sendMessage(String topic, Message<?> listObj) {
try {
KafkaTemplate<String, Message<?>> kafkaTemplate = new KafkaTemplate<>(createProducerFactory());
kafkaTemplate.setDefaultTopic(topic);
kafkaTemplate.send(listObj);
}catch (Exception e){
e.printStackTrace();
}
}

Cannot read email body with Spring: javax.mail.FolderClosedException

I'm trying to listen my Gmail inbox for incoming mails. Every time new mail arrives, I want to see it's subject and content.
So far, I have this:
import java.io.IOException;
import javax.mail.BodyPart;
import javax.mail.Folder;
import javax.mail.internet.ContentType;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.mail.util.MimeMessageParser;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.mail.transformer.MailToStringTransformer;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageHandler;
import org.springframework.messaging.MessagingException;
public class GmailInboundImapIdleAdapterTestApp {
private static Log logger = LogFactory.getLog(GmailInboundImapIdleAdapterTestApp.class);
public static void main (String[] args) throws Exception {
#SuppressWarnings("resource")
ClassPathXmlApplicationContext ac = new ClassPathXmlApplicationContext("/META-INF/spring/integration/gmail-imap-idle-config.xml");
DirectChannel inputChannel = ac.getBean("receiveChannel", DirectChannel.class);
inputChannel.subscribe(new MessageHandler() {
public void handleMessage(Message<?> message){
MimeMessage mm = (MimeMessage) message.getPayload();
try {
System.out.println("Subject: "+mm.getSubject());
System.out.println("Body: "+readPlainContent(mm));
}
catch (javax.mail.MessagingException e) {
System.out.println("MessagingException: "+e.getMessage());
e.printStackTrace();
}
catch (Exception e) {
System.out.println("Exception: "+e.getMessage());
e.printStackTrace();
}
}
});
}
private static String readHtmlContent(MimeMessage message) throws Exception {
return new MimeMessageParser(message).parse().getHtmlContent();
}
private static String readPlainContent(MimeMessage message) throws Exception {
return new MimeMessageParser(message).parse().getPlainContent();
}
}
It can read the mail subject correctly. But no luck with mail body.javax.mail.FolderClosedException hit me. How to fix this?
As Gary said: simple-content="true" or since recently autoCloseFolder = false: https://docs.spring.io/spring-integration/docs/5.2.0.RELEASE/reference/html/mail.html#mail-inbound
Starting with version 5.2, the autoCloseFolder option is provided on the mail receiver. Setting it to false doesn’t close the folder automatically after a fetch, but instead an IntegrationMessageHeaderAccessor.CLOSEABLE_RESOURCE header (see MessageHeaderAccessor API for more information) is populated into every message to producer from the channel adapter. It is the target application’s responsibility to call the close() on this header whenever it is necessary in the downstream flow:

How to create Couchbase bucket via java API?

I am Using Spring data couchbase .
package com.CouchbaseMine.config;
import java.io.IOException;
import java.net.URI;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.couchbase.config.AbstractCouchbaseConfiguration;
import com.couchbase.client.CouchbaseClient;
#Configuration
#EnableAutoConfiguration
public class CouchbaseMineCouchBaseConfig extends AbstractCouchbaseConfiguration {
#Value("${couchbase.cluster.bucket}")
private String bucketName;
#Value("${couchbase.cluster.password}")
private String password;
#Value("${couchbase.cluster.ip}")
private String ip;
#Override
protected String getBucketName() {
List<URI> uris=new LinkedList<URI>();
uris.add(URI.create("5x.xx.xxx.xx9"));
CouchbaseClient client=null;
try {
System.err.println("-- > - > i am in ");
client=new CouchbaseClient(uris,"default","");
} catch (IOException e) {
System.err.println("IOException connetion to couchbase:"+e.getMessage() );
System.exit(1);
}
return this.bucketName;
}
#Override
protected String getBucketPassword() {
return this.password;
}
#Override
protected List<String> bootstrapHosts() {
// TODO Auto-generated method stub
//return Collections.singletonList("54.89.127.249");
return Arrays.asList(this.ip);
}
}
This is configuration class used for establish connection
Follow application properties file
server.port=3000
couchbase.cluster.ip 5x.xx.xxx.xx9
couchbase.cluster.bucket DHxxxar
couchbase.cluster.password 1221
Bottom line: I have created the bucket (Dhxxxar) manually in couchbase.But i need to automatically create the bucket(database) while i run my spring boot application.
So give me any suggestion regards the same . Thanks in advance
Try this:
Cluster cluster = CouchbaseCluster.create("127.0.0.1");
ClusterManager clusterManager = cluster.clusterManager("Administrator", "12345");
BucketSettings bucketSettings = new DefaultBucketSettings.Builder()
.type(BucketType.COUCHBASE)
.name("hello")
.quota(120)
.build();
clusterManager.insertBucket(bucketSettings);
More details:
https://developer.couchbase.com/documentation/server/current/sdk/java/managing-clusters.html
IgorekPotworek's answer is great for Couchbase Java SDK version 2.x.
For version 3.x, the code looks a little different:
Cluster cluster = Cluster.connect("localhost", "Administrator", "password");
BucketManager bucketManager = cluster.buckets();
bucketManager.createBucket(
BucketSettings.create("bucketName")
.ramQuotaMB(100));

Server sent event client is not working in Java project

I am trying to implement SSE client in java from this tutorial.
It is working fine when implemented as Servlet client using post method.But it is not working when I am implementing the same in Java project using main method and with same jar files as in servlet.Here is the code I am using along with target URI:-
import javax.ws.rs.Consumes;
import javax.ws.rs.ProcessingException;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import org.glassfish.jersey.media.sse.EventListener;
import org.glassfish.jersey.media.sse.EventSource;
import org.glassfish.jersey.media.sse.InboundEvent;
import org.glassfish.jersey.media.sse.SseFeature;
public class SSEreceive {
public static void main(String[] args) {
// TODO Auto-generated method stub
try {
Client client = ClientBuilder.newBuilder().register(SseFeature.class).build();
WebTarget target = ((Client)client).target("http://www.w3schools.com/html/demo_sse.php");
EventSource eventSource = (EventSource)EventSource.target(target).build();
EventListener listener = new EventListener() {
#Override
//#Consumes(MediaType.APPLICATION_JSON)
public void onEvent(InboundEvent inboundEvent) {
// System.out.println(inboundEvent.getName() + "; " + inboundEvent.readData(String.class));
System.out.println(inboundEvent.readData(String.class));
}
};
//eventSource.register(listener, "message-to-client");
eventSource.register(listener);
eventSource.open();
System.out.println("Connection tried");
eventSource.close();
} catch (ProcessingException pe) {
pe.printStackTrace();
System.out.println(pe.getMessage());
} catch (Exception e) {
e.printStackTrace();
System.out.println(e.getMessage());
}
}
}
Can some please help me why this is not working in Java project ?

Categories