How to mock S3AsyncClient in Junit - java

I have one method that uploads files to Amazon S3. I am trying to write JUnit for this method but get NullPointerException on the S3AsyncClient:
my class:
public class S3Client<T> {
private static final Logger log = LoggerFactory.getLogger(S3Client.class);
S3AsyncClient client;
/**
*
* #param s3Configuration
*/
public S3Client(AWSS3Configuration s3Configuration) {
this.client = s3Configuration.getAsyncClient();
}
/**
* Uploads a file s3 bucket and returns etag
* #param uploadData
* #return
* #throws S3Exception
*/
public CompletableFuture<String> uploadFile(S3UploadData<T> uploadData) throws S3Exception {
int contentLength;
AsyncRequestBody asyncRequestBody;
if(uploadData.getContent() instanceof String) {
String content = (String) uploadData.getContent();
contentLength = content.length();
asyncRequestBody = AsyncRequestBody.fromString(content);
}
else if(uploadData.getContent() instanceof byte[]){
byte[] bytes = (byte[]) uploadData.getContent();
contentLength = bytes.length;
asyncRequestBody = AsyncRequestBody.fromBytes(bytes);
}
else{
throw new IllegalArgumentException("Unsupported upload content type");
}
PutObjectRequest putObjRequest = PutObjectRequest.builder()
.bucket(uploadData.getBucketName())
.key(uploadData.getFileName())
.metadata(uploadData.getMetaData())
.contentLength((long) contentLength).build();
CompletableFuture<String> response = client.putObject(putObjRequest, asyncRequestBody).thenApply(
getPutObjectResponse -> {
log.info("Got response from S3 upload={}", getPutObjectResponse.eTag());
return getPutObjectResponse.eTag();
});
response.exceptionally(throwable -> {
log.error("Exception occurred while uploading a file intuit_tid={} file={}",uploadData.getTransactionId(),uploadData.getFileName());
throw new S3Exception(throwable.getMessage());
});
return response;
}
input for this class object of S3UploadData:
`
#Getter
#AllArgsConstructor
public class InputData<T> {
T content;
String fileName;
String bucketName;
String transactionId;
Map<String, String> metaData;
}`
can u help please with writing Junit for uploadFile method?

You have no JUNIT test code. You should have code that uses org.junit.jupiter.api.*
Instead of using a MOCK, call the actual S3 Async code in a #TestInstance integration test to make sure it works. For example, here is my test in IntelliJ.
As you can see, my test passed and I Know my code works -- which is the point of this AWS integration test.
If my code failed or threw an exception for some reason, my test would fail. For example, if I passed a bucket name that does not exist, I would get:
Here is my Java Amazon S3 Async code:
package com.example.s3.async;
import software.amazon.awssdk.core.async.AsyncRequestBody;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3AsyncClient;
import software.amazon.awssdk.services.s3.model.PutObjectRequest;
import software.amazon.awssdk.services.s3.model.PutObjectResponse;
import java.nio.file.Paths;
import java.util.concurrent.CompletableFuture;
// snippet-end:[s3.java2.async_ops.import]
// snippet-start:[s3.java2.async_ops.main]
/**
* To run this AWS code example, ensure that you have setup your development environment, including your AWS credentials.
*
* For information, see this documentation topic:
*
* https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html
*/
public class S3AsyncOps {
public static void main(String[] args) {
final String USAGE = "\n" +
"Usage:\n" +
" S3AsyncOps <bucketName> <key> <path>\n\n" +
"Where:\n" +
" bucketName - the name of the Amazon S3 bucket (for example, bucket1). \n\n" +
" key - the name of the object (for example, book.pdf). \n" +
" path - the local path to the file (for example, C:/AWS/book.pdf). \n";
if (args.length != 3) {
System.out.println(USAGE);
System.exit(1);
}
String bucketName = args[0];
String key = args[1];
String path = args[2];
Region region = Region.US_WEST_2;
S3AsyncClient client = S3AsyncClient.builder()
.region(region)
.build();
putObjectAsync(client, bucketName, key, path);
}
public static void putObjectAsync(S3AsyncClient client,String bucketName, String key, String path) {
PutObjectRequest objectRequest = PutObjectRequest.builder()
.bucket(bucketName)
.key(key)
.build();
// Put the object into the bucket
CompletableFuture<PutObjectResponse> future = client.putObject(objectRequest,
AsyncRequestBody.fromFile(Paths.get(path))
);
future.whenComplete((resp, err) -> {
try {
if (resp != null) {
System.out.println("Object uploaded. Details: " + resp);
} else {
// Handle error
err.printStackTrace();
}
} finally {
// Only close the client when you are completely done with it
client.close();
}
});
future.join();
}
}
Now for my test, i want to call this code, not MOCK it. I have setup my test in IntelliJ like this,
import org.junit.jupiter.api.*;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import software.amazon.awssdk.regions.Region;
import java.io.*;
import java.util.*;
import com.example.s3.async.*;
import software.amazon.awssdk.services.s3.S3AsyncClient;
#TestInstance(TestInstance.Lifecycle.PER_METHOD)
#TestMethodOrder(MethodOrderer.OrderAnnotation.class)
public class AmazonS3AsyncTest {
private static S3AsyncClient s3AsyncClient;
// Define the data members required for the tests
private static String bucketName = "";
private static String objectKey = "";
private static String objectPath = "";
private static String toBucket = "";
#BeforeAll
public static void setUp() throws IOException {
// Run tests on Real AWS Resources
s3AsyncClient = S3AsyncClient.builder()
.region(Region.US_EAST_1)
.build();
try (InputStream input = AmazonS3Test.class.getClassLoader().getResourceAsStream("config.properties")) {
Properties prop = new Properties();
if (input == null) {
System.out.println("Sorry, unable to find config.properties");
return;
}
//load a properties file from class path, inside static method
prop.load(input);
// Populate the data members required for all tests
bucketName = prop.getProperty("bucketName");
objectKey = prop.getProperty("objectKey");
objectPath= prop.getProperty("objectPath");
toBucket = prop.getProperty("toBucket");
} catch (IOException ex) {
ex.printStackTrace();
}
}
#Test
#Order(1)
public void whenInitializingAWSS3Service_thenNotNull() {
assertNotNull(s3AsyncClient);
System.out.println("Test 1 passed");
}
#Test
#Order(2)
public void putObject() {
S3AsyncOps.putObjectAsync(s3AsyncClient, bucketName, objectKey, objectPath);
System.out.println("Test 2 passed");
}
}

You could use Mockito to mock the S3AsyncClient operations.
#Mock
private S3AsyncClient s3AsyncClient;
Below is the unit test case for my upload file implementation. It will surely give you insights how it is done and it can be done.
#Nested
class UploadFile {
#Captor
ArgumentCaptor<PutObjectRequest> putObjectRequestCaptor;
#Captor
ArgumentCaptor<AsyncRequestBody> requestBodyCaptor;
#Test
void testSuccessfulUpload() {
Flux<ByteBuffer> body = Flux.just();
var expectedResponse = PutObjectResponse.builder().build();
when(s3AsyncClient.putObject(putObjectRequestCaptor.capture(), requestBodyCaptor.capture())) .thenReturn(CompletableFuture.completedFuture(expectedResponse));
fileUploadService.upload("TEST_PREFIX", "test.zip", body);
assertThat(putObjectRequestCaptor.getValue().bucket()).isEqualTo(TEST_BUCKET);
assertThat(putObjectRequestCaptor.getValue().key()).isEqualTo("TEST_PREFIX/test.zip");
assertThat(requestBodyCaptor.getValue()).isNotNull();
}
}

Related

Why is AmazonS3ClientBuilder.standard().withRegion(Regions.DEFAULT_REGION).build() not responding?

AmazonS3ClientBuilder.standard().withRegion(Regions.DEFAULT_REGION).build() is waiting something?
I am using Amazon S3 and put many files to S3 every day by using this code.
AmazonS3 s3 = null;
s3 = AmazonS3ClientBuilder.standard().withRegion(Regions.DEFAULT_REGION).build();
try {
s3.putObject(bucket_name, key_name, new File(file_path));
} catch (AmazonServiceException e) {
System.err.println(e.getErrorMessage());
System.exit(1);
}
At one time, it works fine.
But AmazonS3ClientBuilder.standard().withRegion(Regions.DEFAULT_REGION).build()
is not responding sometimes suddenly.
If I wait for a day or so, it works again.
This is a stack trace when
AmazonS3ClientBuilder.standard().withRegion(Regions.DEFAULT_REGION).build()
is not responding.
Do you have any idea?
java.lang.Thread.State: RUNNABLE
at java.lang.ClassLoader$NativeLibrary.load0(java.base#11.0.12/Native Method)
at java.lang.ClassLoader$NativeLibrary.load(java.base#11.0.12/ClassLoader.java:2442)
at java.lang.ClassLoader$NativeLibrary.loadLibrary(java.base#11.0.12/ClassLoader.java:2498)
- locked <0x000000070c9cf5c8> (a java.util.HashSet)
at java.lang.ClassLoader.loadLibrary0(java.base#11.0.12/ClassLoader.java:2694)
at java.lang.ClassLoader.loadLibrary(java.base#11.0.12/ClassLoader.java:2648)
at java.lang.Runtime.loadLibrary0(java.base#11.0.12/Runtime.java:830)
at java.lang.System.loadLibrary(java.base#11.0.12/System.java:1873)
at sun.security.ec.SunEC$1.run(jdk.crypto.ec#11.0.12/SunEC.java:63)
at sun.security.ec.SunEC$1.run(jdk.crypto.ec#11.0.12/SunEC.java:61)
at java.security.AccessController.doPrivileged(java.base#11.0.12/Native Method)
at sun.security.ec.SunEC.<clinit>(jdk.crypto.ec#11.0.12/SunEC.java:61)
at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(java.base#11.0.12/Native Method)
at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(java.base#11.0.12/NativeConstructorAccessorImpl.java:62)
at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(java.base#11.0.12/DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(java.base#11.0.12/Constructor.java:490)
at java.util.ServiceLoader$ProviderImpl.newInstance(java.base#11.0.12/ServiceLoader.java:780)
at java.util.ServiceLoader$ProviderImpl.get(java.base#11.0.12/ServiceLoader.java:722)
at java.util.ServiceLoader$3.next(java.base#11.0.12/ServiceLoader.java:1395)
at sun.security.jca.ProviderConfig$ProviderLoader.load(java.base#11.0.12/ProviderConfig.java:340)
at sun.security.jca.ProviderConfig$3.run(java.base#11.0.12/ProviderConfig.java:248)
at sun.security.jca.ProviderConfig$3.run(java.base#11.0.12/ProviderConfig.java:242)
at java.security.AccessController.doPrivileged(java.base#11.0.12/Native Method)
at sun.security.jca.ProviderConfig.doLoadProvider(java.base#11.0.12/ProviderConfig.java:242)
at sun.security.jca.ProviderConfig.getProvider(java.base#11.0.12/ProviderConfig.java:222)
- locked <0x000000070cde52d0> (a sun.security.jca.ProviderConfig)
at sun.security.jca.ProviderList.getProvider(java.base#11.0.12/ProviderList.java:266)
at sun.security.jca.ProviderList.getService(java.base#11.0.12/ProviderList.java:379)
at sun.security.jca.GetInstance.getInstance(java.base#11.0.12/GetInstance.java:157)
at javax.net.ssl.SSLContext.getInstance(java.base#11.0.12/SSLContext.java:168)
at com.amazonaws.internal.SdkSSLContext.getPreferredSSLContext(SdkSSLContext.java:32)
at com.amazonaws.http.apache.client.impl.ApacheConnectionManagerFactory.getPreferredSocketFactory(ApacheConnectionManagerFactory.java:91)
at com.amazonaws.http.apache.client.impl.ApacheConnectionManagerFactory.create(ApacheConnectionManagerFactory.java:65)
at com.amazonaws.http.apache.client.impl.ApacheConnectionManagerFactory.create(ApacheConnectionManagerFactory.java:58)
at com.amazonaws.http.apache.client.impl.ApacheHttpClientFactory.create(ApacheHttpClientFactory.java:50)
at com.amazonaws.http.apache.client.impl.ApacheHttpClientFactory.create(ApacheHttpClientFactory.java:38)
at com.amazonaws.http.AmazonHttpClient.<init>(AmazonHttpClient.java:315)
at com.amazonaws.http.AmazonHttpClient.<init>(AmazonHttpClient.java:299)
at com.amazonaws.AmazonWebServiceClient.<init>(AmazonWebServiceClient.java:172)
at com.amazonaws.services.s3.AmazonS3Client.<init>(AmazonS3Client.java:638)
at com.amazonaws.services.s3.AmazonS3Builder$1.apply(AmazonS3Builder.java:35)
at com.amazonaws.services.s3.AmazonS3Builder$1.apply(AmazonS3Builder.java:32)
at com.amazonaws.services.s3.AmazonS3ClientBuilder.build(AmazonS3ClientBuilder.java:64)
at com.amazonaws.services.s3.AmazonS3ClientBuilder.build(AmazonS3ClientBuilder.java:28)
at com.amazonaws.client.builder.AwsSyncClientBuilder.build(AwsSyncClientBuilder.java:46)
You are using a very old AWS SDK. Best practice is to update to AWS SDK for Java V2.
Here is the Java V2 code to use to place an object into an Amazon S3 bucket. You can find many other examples in Github.
package com.example.s3;
// snippet-start:[s3.java2.s3_object_upload.import]
import software.amazon.awssdk.core.sync.RequestBody;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.model.PutObjectRequest;
import software.amazon.awssdk.services.s3.model.PutObjectResponse;
import software.amazon.awssdk.services.s3.model.S3Exception;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
// snippet-end:[s3.java2.s3_object_upload.import]
/**
* To run this AWS code example, ensure that you have setup your development environment, including your AWS credentials.
*
* For information, see this documentation topic:
*
* https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html
*/
public class PutObject {
public static void main(String[] args) {
final String USAGE = "\n" +
"Usage:\n" +
" <bucketName> <objectKey> <objectPath> \n\n" +
"Where:\n" +
" bucketName - the Amazon S3 bucket to upload an object into.\n" +
" objectKey - the object to upload (for example, book.pdf).\n" +
" objectPath - the path where the file is located (for example, C:/AWS/book2.pdf). \n\n" ;
if (args.length != 3) {
System.out.println(USAGE);
System.exit(1);
}
String bucketName =args[0];
String objectKey = args[1];
String objectPath = args[2];
System.out.println("Putting object " + objectKey +" into bucket "+bucketName);
System.out.println(" in bucket: " + bucketName);
Region region = Region.US_EAST_1;
S3Client s3 = S3Client.builder()
.region(region)
.build();
String result = putS3Object(s3, bucketName, objectKey, objectPath);
System.out.println("Tag information: "+result);
s3.close();
}
// snippet-start:[s3.java2.s3_object_upload.main]
public static String putS3Object(S3Client s3,
String bucketName,
String objectKey,
String objectPath) {
try {
Map<String, String> metadata = new HashMap<>();
metadata.put("x-amz-meta-myVal", "test");
PutObjectRequest putOb = PutObjectRequest.builder()
.bucket(bucketName)
.key(objectKey)
.metadata(metadata)
.build();
PutObjectResponse response = s3.putObject(putOb,
RequestBody.fromBytes(getObjectFile(objectPath)));
return response.eTag();
} catch (S3Exception e) {
System.err.println(e.getMessage());
System.exit(1);
}
return "";
}
// Return a byte array
private static byte[] getObjectFile(String filePath) {
FileInputStream fileInputStream = null;
byte[] bytesArray = null;
try {
File file = new File(filePath);
bytesArray = new byte[(int) file.length()];
fileInputStream = new FileInputStream(file);
fileInputStream.read(bytesArray);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (fileInputStream != null) {
try {
fileInputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return bytesArray;
}
// snippet-end:[s3.java2.s3_object_upload.main]
}

calling a java class method in another class

Hi I have below Java Class for Sending Fax from Java
package oracle.apps.print;
import com.softlinx.replixfax.*;
import javax.xml.ws.*;
import org.apache.commons.codec.binary.Base64;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.Path;
import java.io.File;
public class Fax {
public void SendFax(String Filepath, String faxno) {
try {
ReplixFaxService service = new ReplixFaxService();
ReplixFaxPort port = service.getReplixFaxPort();
((BindingProvider)port).getRequestContext().put(BindingProvider.USERNAME_PROPERTY, "admin");
// ((BindingProvider) port).getRequestContext().put(BindingProvider.ENDPOINT_ADDRESS_PROPERTY,"https://api.rpxfax.com/softlinx/replixfax/wsapi");
((BindingProvider)port).getRequestContext().put(BindingProvider.ENDPOINT_ADDRESS_PROPERTY,
"https://api.rpxtest.com:9999/softlinx/replixfax/wsapi");
Authentication auth = new Authentication();
auth.setLogin("user");
String password = "pwd";
auth.setPassword(org.apache.commons.codec.binary.Base64.encodeBase64String(password.getBytes()));
auth.setRealm("MTBC");
auth.setPasswordSecurity("base64");
SendFaxInput sendFaxInput = new SendFaxInput();
sendFaxInput.setAuthentication(auth);
FaxRecipient recipient = new FaxRecipient();
recipient.setFaxNumber(faxno.toString());
Attachment attachment = new Attachment();
File f = new File(Filepath.toString());
attachment.setFileName(f.getName());
Path path = Paths.get(Filepath.toString());
byte[] data = Files.readAllBytes(path);
attachment.setAttachmentContent(data);
sendFaxInput.getFaxRecipient().add(recipient);
sendFaxInput.getAttachment().add(attachment);
SendFaxOutput result = port.sendFax(sendFaxInput);
System.out.println("Status Code= " + result.getRequestStatus().getStatusCode());
if (result.getFaxInfo() != null) {
System.out.println("Fax ID = " + result.getFaxInfo().get(0).getFaxId());
}
} catch (Exception ex) {
System.out.println("Exception: " + ex.getMessage());
}
}
}
I am compiling this class like this
javac -cp .;./commons-codec-1.10.jar Fax.java
However Compiling of both classes is fine no error at compile time
when i call the method Fax in another class (XXEmail) like this
package oracle.apps.print;
public class XXEmail implements JavaConcurrentProgram {
public static void main(String[] args) {
try {
Fax mtbcfax = new Fax();
mtbcfax.SendFax("E:\\csv_svb\\3010218.pdf", "173224xxxx");
out.writeln("Fax Sent Successfully");
} catch (Exception i) {
log.writeln("Error while Sending Fax " + i.getMessage(), LogFile.STATEMENT);
} finally {
log.writeln("Error while Sending Fax ");
}
}
}
It always goes to Finally block with out showing any error
How can i call this method so it should return with success code or exception
Try to:
Comment all lines in the SendFax function and add only a log:
public void SendFax(String Filepath, String faxno) {
out.writeln("No problem here");
}
Now start the program and see if the function is correctly called or not.
If It is correctly called, so probably the arguments you send are wrong.

GoogleIdTokenVerifier does not return name, picture, etc

We're using the goole-client-api library to verify google accounts from our backend. This implementation runs on Google Cloud Platform (GCP) using App Engine and DataStore.
What we're seeing so far is for the GoogleIdTokenVerifier to work but only returns email and uid along with token signature.
The token and uid used does return all profile infos when run against our servlet but does not through our App Engine Endpoint.
Here is the code used:
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.auth.oauth2.GoogleIdToken;
import com.google.api.client.googleapis.auth.oauth2.GoogleIdTokenVerifier;
import com.google.api.client.extensions.appengine.http.UrlFetchTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Collections;
import java.util.logging.Logger;
public class GoogleVerifier implements TokenVerifier {
final Logger logger = Logger.getLogger(GoogleVerifier.class.getName());
private static GoogleVerifier instance = null;
private String privAppId;
private UrlFetchTransport httpTransport; //library required to run on GCP
private JsonFactory jsonFactory;
private GoogleVerifier() {
}
private static GoogleVerifier getInstance() {
if (instance == null) {
instance = new GoogleVerifier();
}
return instance;
}
public static void setAppId(String appId) {
getInstance().setPrivAppId(appId);
getInstance().setHttpTransport(new UrlFetchTransport());
getInstance().setJsonFactory(new JacksonFactory());
}
public static String[] verify(String token, String uid) {
return getInstance().verifyPrivate(token, uid);
}
public String[] verifyPrivate(String token, String uid) {
#SuppressWarnings("unused")
GoogleCredential credential = new GoogleCredential().setAccessToken(token);
GoogleIdTokenVerifier verifier = new GoogleIdTokenVerifier.Builder(httpTransport, jsonFactory)
.setAudience(Collections.singletonList(privAppId))
.build();
String[] payloadInfo = new String[5];
try {
GoogleIdToken idToken = verifier.verify(token);
if (idToken != null) {
GoogleIdToken.Payload payload = idToken.getPayload();
if (payload.getSubject().equals(uid)) {
logger.info("Matching google id: " + uid);
payloadInfo[0] = payload.getSubject();
payloadInfo[1] = payload.get("given_name").toString();
payloadInfo[2] = payload.get("family_name").toString();
payloadInfo[3] = payload.get("picture").toString();
payloadInfo[4] = payload.getEmail();
return payloadInfo;
} else {
logger.info("Mismatching google id: " + uid);
return payloadInfo;
}
}
}
catch (Exception e) {
e.printStackTrace();
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
logger.warning(sw.toString());
return payloadInfo;
}
return payloadInfo;
}
private void setPrivAppId(String appId) {
this.privAppId = appId;
}
private void setHttpTransport(UrlFetchTransport httpTransport) {
this.httpTransport = httpTransport;
}
private void setJsonFactory(JsonFactory jsonFactory) {
this.jsonFactory = jsonFactory;
}
}
Here is our App Engine Endpoint:
#ApiMethod(name = "loginSocial", path = "loginSocial", httpMethod = HttpMethod.PUT)
public Response loginSocial(#Named("token") String token,
#Named("uid") String uid,
#Named("socialWebSite") SOCIALWEBSITE socialWebSite,
HttpServletRequest request) throws DatabaseException, IOException {
Response response = new Response();
//make sure parameters and not null or empty
if (token != null && uid != null && socialWebSite != null &&
!token.trim().isEmpty() && !uid.trim().isEmpty()){
String [] userInfo = new String[5];
//validate token and retrieve info first
if (socialWebSite.equals(SOCIALWEBSITE.GOOGLE)){
GoogleVerifier.setAppId(APP_ID);
userInfo = GoogleVerifier.verify(token, uid);
}else if(socialWebSite.equals(APP_ID);
userInfo = FacebookVerifier.verify(token, uid);
}
}
}
Thanks!
I ended up using a different library which is much simpler and provided the same information.
https://stackoverflow.com/questions/22516693

Record test coverage per test case using eclEmma tool

I want to record test coverage per test case using eclEmma tool. The coverage should contain the % covered by that test case of the target class and also want to access the statements executed by that test case. Follwowing is the code which runs a test class and generates the coverage on test class itself.
package expJaCoCo;
public class Calculadora
{
public Calculadora() { }
public int add(int x, final int y) {
return x + y;
}
}
CalculadoraTest.java
package expJaCoCo;
import junit.framework.TestCase;
import org.junit.BeforeClass;
import org.junit.AfterClass;
import org.junit.Test;
public class CalculadoraTest extends TestCase
{
private Calculadora c1;
#BeforeClass
public void setUp() { c1 = new Calculadora(); }
#AfterClass
public void tearDown() { c1 = null; }
#Test
public void testAdd() { assertTrue(c1.add(1, 0) == 1); }
}
CoreTutorial.java
package expJaCoCo;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import org.jacoco.core.analysis.Analyzer;
import org.jacoco.core.analysis.CoverageBuilder;
import org.jacoco.core.analysis.IClassCoverage;
import org.jacoco.core.analysis.ICounter;
import org.jacoco.core.data.ExecutionDataStore;
import org.jacoco.core.instr.Instrumenter;
import org.jacoco.core.runtime.IRuntime;
import org.jacoco.core.runtime.LoggerRuntime;
import org.junit.runner.JUnitCore;
import org.junit.runner.Result;
public class CoreTutorial
{
/**
* A class loader that loads classes from in-memory data.
*/
public static class MemoryClassLoader extends ClassLoader
{
private final Map<String, byte[]> definitions = new HashMap<String, byte[]>();
/**
* Add a in-memory representation of a class.
*
* #param name name of the class
* #param bytes class definition
*/
public void addDefinition(final String name, final byte[] bytes) {
definitions.put(name, bytes);
}
#Override
protected Class<?> loadClass(final String name, final boolean resolve) throws ClassNotFoundException
{
final byte[] bytes = definitions.get(name);
if (bytes != null)
return defineClass(name, bytes, 0, bytes.length);
return super.loadClass(name, resolve);
}
}
private InputStream getTargetClass(final String name)
{
final String resource = '/' + name.replace('.', '/') + ".class";
return getClass().getResourceAsStream(resource);
}
private void printCounter(final String unit, final ICounter counter)
{
final Integer missed = Integer.valueOf(counter.getMissedCount());
final Integer total = Integer.valueOf(counter.getTotalCount());
System.out.printf("%s of %s %s missed%n", missed, total, unit);
}
private String getColor(final int status)
{
switch (status) {
case ICounter.NOT_COVERED:
return "red";
case ICounter.PARTLY_COVERED:
return "yellow";
case ICounter.FULLY_COVERED:
return "green";
}
return "";
}
private void runTutorial() throws Exception
{
final String targetName = CalculadoraTest.class.getName();
// For instrumentation and runtime we need a IRuntime instance to collect execution data:
final IRuntime runtime = new LoggerRuntime();
// The Instrumenter creates a modified version of our test target class that contains additional probes for execution data recording:
final Instrumenter instr = new Instrumenter(runtime);
final byte[] instrumented = instr.instrument(getTargetClass(targetName));
// Now we're ready to run our instrumented class and need to startup the runtime first:
runtime.startup();
// In this tutorial we use a special class loader to directly load the instrumented class definition from a byte[] instances.
final MemoryClassLoader memoryClassLoader = new MemoryClassLoader();
memoryClassLoader.addDefinition(targetName, instrumented);
final Class<?> targetClass = memoryClassLoader.loadClass(targetName);
// Here we execute our test target class through its Runnable interface:
/*final Runnable targetInstance = (Runnable) targetClass.newInstance();
targetInstance.run();*/
JUnitCore junit = new JUnitCore();
Result result = junit.run(targetClass);
System.out.println(result.getRunTime());
// At the end of test execution we collect execution data and shutdown the runtime:
final ExecutionDataStore executionData = new ExecutionDataStore();
runtime.collect(executionData, null, false);
runtime.shutdown();
// Together with the original class definition we can calculate coverage information:
final CoverageBuilder coverageBuilder = new CoverageBuilder();
final Analyzer analyzer = new Analyzer(executionData, coverageBuilder);
analyzer.analyzeClass(getTargetClass(targetName));
// Let's dump some metrics and line coverage information:
for (final IClassCoverage cc : coverageBuilder.getClasses())
{
System.out.printf("Coverage of class %s%n", cc.getName());
printCounter("instructions", cc.getInstructionCounter());
printCounter("branches", cc.getBranchCounter());
printCounter("lines", cc.getLineCounter());
printCounter("methods", cc.getMethodCounter());
printCounter("complexity", cc.getComplexityCounter());
for (int i = cc.getFirstLine(); i <= cc.getLastLine(); i++) {
System.out.printf("Line %s: %s%n", Integer.valueOf(i), getColor(cc.getLine(i).getStatus()));
}
}
}
public static void main(final String[] args) throws Exception {
new CoreTutorial().runTutorial();
}
}
This example executes and instrument CalculadoraTest and provide the coverage of CalculadoraTest.java, but I want the coverage of Calculadora.java
How can I change the code to get the desired result.
This example executes and instrument CalculadoraTest and provide the coverage of CalculadoraTest.java, but I want the coverage of Calculadora.java How can I change the code to get the desired result.
targetName that is CalculadoraTest.class.getName() is used for both coverage instrumentation/analysis by JaCoCo and execution by JUnit, however in the first case should be Calculadora.class.getName().
Using JaCoCo 0.7.7 APIs:
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import org.jacoco.core.analysis.Analyzer;
import org.jacoco.core.analysis.CoverageBuilder;
import org.jacoco.core.analysis.IClassCoverage;
import org.jacoco.core.analysis.ICounter;
import org.jacoco.core.data.ExecutionDataStore;
import org.jacoco.core.instr.Instrumenter;
import org.jacoco.core.runtime.IRuntime;
import org.jacoco.core.runtime.LoggerRuntime;
import org.junit.runner.JUnitCore;
import org.junit.runner.Result;
import org.jacoco.core.runtime.RuntimeData;
import org.jacoco.core.data.SessionInfoStore;
public class CoreTutorialTest
{
/**
* A class loader that loads classes from in-memory data.
*/
public static class MemoryClassLoader extends ClassLoader
{
private final Map<String, byte[]> definitions = new HashMap<String, byte[]>();
/**
* Add a in-memory representation of a class.
*
* #param name name of the class
* #param bytes class definition
*/
public void addDefinition(final String name, final byte[] bytes) {
definitions.put(name, bytes);
}
#Override
protected Class<?> loadClass(final String name, final boolean resolve) throws ClassNotFoundException
{
final byte[] bytes = definitions.get(name);
if (bytes != null)
return defineClass(name, bytes, 0, bytes.length);
return super.loadClass(name, resolve);
}
}
private InputStream getTargetClass(final String name)
{
final String resource = '/' + name.replace('.', '/') + ".class";
return getClass().getResourceAsStream(resource);
}
private void printCounter(final String unit, final ICounter counter)
{
final Integer missed = Integer.valueOf(counter.getMissedCount());
final Integer total = Integer.valueOf(counter.getTotalCount());
System.out.printf("%s of %s %s missed%n", missed, total, unit);
}
private String getColor(final int status)
{
switch (status) {
case ICounter.NOT_COVERED:
return "red";
case ICounter.PARTLY_COVERED:
return "yellow";
case ICounter.FULLY_COVERED:
return "green";
}
return "";
}
private void runTutorial() throws Exception
{
final String targetName = Calculadora.class.getName();
// For instrumentation and runtime we need a IRuntime instance to collect execution data:
final IRuntime runtime = new LoggerRuntime();
// The Instrumenter creates a modified version of our test target class that contains additional probes for execution data recording:
final Instrumenter instr = new Instrumenter(runtime);
final byte[] instrumented = instr.instrument(getTargetClass(targetName), "");
// Now we're ready to run our instrumented class and need to startup the runtime first:
final RuntimeData data = new RuntimeData();
runtime.startup(data);
// In this tutorial we use a special class loader to directly load the instrumented class definition from a byte[] instances.
final MemoryClassLoader memoryClassLoader = new MemoryClassLoader();
memoryClassLoader.addDefinition(targetName, instrumented);
final Class<?> targetClass = memoryClassLoader.loadClass(targetName);
// Here we execute our test target class through its Runnable interface:
/*final Runnable targetInstance = (Runnable) targetClass.newInstance();
targetInstance.run();*/
String junitName = CalculadoraTest.class.getName();
memoryClassLoader.addDefinition(junitName, instr.instrument(getTargetClass(junitName), ""));
final Class<?> junitClass = memoryClassLoader.loadClass(junitName);
JUnitCore junit = new JUnitCore();
Result result = junit.run(junitClass);
System.out.println("Failure count: " + result.getFailureCount());
// At the end of test execution we collect execution data and shutdown the runtime:
final ExecutionDataStore executionData = new ExecutionDataStore();
data.collect(executionData, new SessionInfoStore(), false);
runtime.shutdown();
// Together with the original class definition we can calculate coverage information:
final CoverageBuilder coverageBuilder = new CoverageBuilder();
final Analyzer analyzer = new Analyzer(executionData, coverageBuilder);
analyzer.analyzeClass(getTargetClass(targetName), targetName);
// Let's dump some metrics and line coverage information:
for (final IClassCoverage cc : coverageBuilder.getClasses())
{
System.out.printf("Coverage of class %s%n", cc.getName());
printCounter("instructions", cc.getInstructionCounter());
printCounter("branches", cc.getBranchCounter());
printCounter("lines", cc.getLineCounter());
printCounter("methods", cc.getMethodCounter());
printCounter("complexity", cc.getComplexityCounter());
for (int i = cc.getFirstLine(); i <= cc.getLastLine(); i++) {
System.out.printf("Line %s: %s%n", Integer.valueOf(i), getColor(cc.getLine(i).getStatus()));
}
}
}
public static void main(final String[] args) throws Exception {
new CoreTutorial().runTutorial();
}
}

How to make read and write execution in MapReduce works faster?

I've a program, which has only Driver and Mapper class. I am not using Reducer class.
In driver class, I'm reading a file in S3bucket and in the Mapper class. I'm writing a file in S3bucket through normal java code like(aws java sdk) and not by context.write.
I have 1000 json files. When I ran the program, the driver class gets the file and mapper class is writing each file in the s3 bucket. For me it takes a maximum 2 seconds to write a single file but I want to write minimum 100 files within 2 seconds.
How can I achieve this? Please suggest me some solutions.
Service call class:
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethodBase;
import org.apache.commons.httpclient.methods.GetMethod;
import org.elasticsearch.client.Client;
public class ClcConstants {
public static Client client = null;
// Variable Declarations for mapping the field in input file
public static String ContactId = "contactid";
public static String PostalCode = "PostalCode";
public static String Email = "Email";
public static String DateFormat = "dd/MM/yyyy HH:mm";
public static String Date = "dd/MM/yyyy";
// WebService MethodExecution
public static String executePost(String url) {
System.out.println("Started to connect webservices...");
HttpClient httpClient = new HttpClient();
/*
* Credentials credentials = new NTCredentials("myusername",
* "mypassword", "myhost", "mydomain");
* httpClient.getState().setCredentials(AuthScope.ANY, credentials);
*/
System.out.println("Accessing webservices...");
HttpMethodBase method = new GetMethod(url);
try {
int returnCode = httpClient.executeMethod(method);
System.out.println("returnCode: " + returnCode);
String response = method.getResponseBodyAsString();
// response
// ="{\"GetAllSourceWeightageResult\":[{\"ClientKey\":\"L4CTRsto\",\"Weightages\":[{\"Source\":\"DI\",\"Weight\":1},{\"Source\":\"ER\",\"Weight\":2},{\"Source\":\"IM\",\"Weight\":3},{\"Source\":\"CU\",\"Weight\":4}]}]}";
System.out.println("Response: " + response);
return response;
} catch (Exception e) {
e.printStackTrace();
}
return "";
}
}
Driver Class:
import java.util.Date;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.json.JSONArray;
import org.json.JSONObject;
public class ClcDriver {
// Globally initialized in order to access outside the method
public static String client_key;
public static String folder_name;
public static void main(String[] args) throws Exception {
// Arguments to be passed dynamically
String dbType = args[0];
String serviceUrl = args[1];
String input_path = args[2];
client_key = args[3];
folder_name = args[4];
Date date = new Date();
String jobstarttime = String.format("Current Date/Time : %tc", date);
System.out.println(jobstarttime);
String url = serviceUrl + "/GetCLCConfiguration/?clientKey="
+ client_key;
System.out.println("GetCLCConfiguration from Service");
String responseText = ClcConstants.executePost(url);
System.out.println("End GetCLCConfiguration from Service");
// Convert the accessed string to JsonObject.
JSONObject json_data = new JSONObject(responseText);
// Converting into JsonArray in order to process in the loop
JSONArray array = (JSONArray) json_data
.get("GetCLCConfigurationResult");
// If the argument passed as "amazonS3", the below method gets executed
if (dbType.equals("amazonS3")) {
amazonS3(array, input_path);
}
}
// Passing the GetCLCConfigurationResults and input path of S3 Bucket
public static void amazonS3(JSONArray array, String input_path)
throws Exception {
for (int i = 0; i < array.length(); i++) {
System.out.println("***********");
JSONObject innerObj = (JSONObject) array.get(i);
String clientName = innerObj.get("ClientKey").toString();
String data = innerObj.get("Configurations").toString();
System.out.println("Setting Configuration...");
Configuration conf = new Configuration();
System.out.println("Configuration done");
System.out.println("Accessing s3bucket...");
conf.set("Configurations", data);
conf.set("ClientKey", clientName);
conf.set("fs.s3n.awsAccessKeyId", "myaccesskey");
conf.set("fs.s3n.awsSecretAccessKey",
"mysecret access key");
System.out.println("Accessed.");
System.out.println("Setting Job...");
Job job = Job.getInstance(conf, "JobName");
System.out.println("Job assigned");
job.setJarByClass(ClcDriver.class);
job.setMapperClass(ClcMapper.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path(input_path + client_key
+ "/" + folder_name + "/"));
FileOutputFormat.setOutputPath(job, new Path(input_path + client_key
+ "/" + folder_name + "/" + "output" + "/"));
if (!job.waitForCompletion(true))
return;
// Calculating Job Completed Time
Date date = new Date();
String jobcompletedtime = String
.format("Current Date/Time : %tc", date);
System.out.println(jobcompletedtime);
}
}
}
Mapper Class:
public class ClcMapper extends Mapper<Object, Text, Text, Text> {
private static String bucketName = "BucketName";
private static String keyName = ClcDriver.client_key + "/"
+ ClcDriver.folder_name + "/";
public static AmazonS3 s3client;
public String jsonobjectconvertstring;
public InputStream writecontentins3;
int val;
// Called once at the beginning of the task
protected void setup(Context context) throws IOException,
InterruptedException {
System.out.println("Mapper Started");
System.out.println("Accessing s3bucket once again...");
s3client = new AmazonS3Client(new BasicAWSCredentials(
"Myaccesskey",
"mysecretaccesskey"));
System.out.println("Accessed.");
System.out.println("Setting Region...");
Region region = Region.getRegion(Regions.US_WEST_2);
s3client.setRegion(region);
s3client.setEndpoint("s3-us-west-2.amazonaws.com");
System.out.println("Region was successfully set.");
// GetCLCConfiguration results from Driver class
//Configuration conf = context.getConfiguration();
//String data = conf.get("Configurations");
}
// Processing Mapper for each contact...
public void map(Object key, Text value, Context context)
throws IOException, InterruptedException {
boolean MemberOfAnyContactGroup = true;
String line = value.toString();
try {
JSONObject contacts = new JSONObject(line);
// Updating the CLC field
System.out.println(contacts.put("CLC", val).toString());
context.write(new Text(contacts.toString()),new Text());
} catch (Exception e) {
System.out.println(e);
}
}

Categories