Cannot write parquet to amazon s3 bucket using AvroParquetWriter in Java - java

Hi am trying to write parquets to an amazon s3 bucket/key using JAVA but getting an [org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for scheme "file"].
Cannot write
[s3a://nprd-pr-snd-rtgrev-edp/sndrtgrev/out/jcl-0.snappy.parquet]:
org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for
scheme "file"
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:3443)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3466)
at org.apache.hadoop.fs.FileSystem.access$300(FileSystem.java:174)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3574)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3521)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:540)
at org.apache.hadoop.fs.FileSystem.getLocal(FileSystem.java:496)
at org.apache.hadoop.fs.LocalDirAllocator$AllocatorPerContext.confChanged(LocalDirAllocator.java:316)
at org.apache.hadoop.fs.LocalDirAllocator$AllocatorPerContext.getLocalPathForWrite(LocalDirAllocator.java:393)
at org.apache.hadoop.fs.LocalDirAllocator.getLocalPathForWrite(LocalDirAllocator.java:165)
at org.apache.hadoop.fs.LocalDirAllocator.getLocalPathForWrite(LocalDirAllocator.java:146)
at org.apache.hadoop.fs.s3a.S3AFileSystem.createTmpFileForWrite(S3AFileSystem.java:1019)
at org.apache.hadoop.fs.s3a.S3ADataBlocks$DiskBlockFactory.create(S3ADataBlocks.java:816)
at org.apache.hadoop.fs.s3a.S3ABlockOutputStream.createBlockIfNeeded(S3ABlockOutputStream.java:204)
at org.apache.hadoop.fs.s3a.S3ABlockOutputStream.(S3ABlockOutputStream.java:182)
at org.apache.hadoop.fs.s3a.S3AFileSystem.create(S3AFileSystem.java:1369)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1195)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1175)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1064)
at org.apache.parquet.hadoop.ParquetFileWriter.(ParquetFileWriter.java:244)
at org.apache.parquet.hadoop.ParquetWriter.(ParquetWriter.java:273)
at org.apache.parquet.hadoop.ParquetWriter$Builder.build(ParquetWriter.java:494)
at com.jclconsultants.parquet.tool.AwsParquetProcessor.writeParquetWithAvroToAwsBucket(AwsParquetProcessor.java:210)
at com.jclconsultants.parquet.tool.AwsParquetProcessor.run(AwsParquetProcessor.java:81)
at com.jclconsultants.parquet.tool.ToolProcessParquetsFromAwsBucket.launchAwsParquetProcessor(ToolProcessParquetsFromAwsBucket.java:269)
at com.jclconsultants.parquet.tool.ToolProcessParquetsFromAwsBucket.launchThreads(ToolProcessParquetsFromAwsBucket.java:251)
at com.jclconsultants.parquet.tool.ToolProcessParquetsFromAwsBucket.processParquetsFromBucket(ToolProcessParquetsFromAwsBucket.java:160)
at com.jclconsultants.parquet.tool.ToolProcessParquetsFromAwsBucket.main(ToolProcessParquetsFromAwsBucket.java:127)
Here is how I am writing (code was gathered from different post):
private void writeParquetWithAvroToAwsBucket(String outputParquetName, List<GenericData.Record> records) {
URI awsURI = null;
try {
awsURI = new URI("s3a://" + bucketName + "/" + outputParquetName );
} catch (URISyntaxException e1) {
e1.printStackTrace();
return;
}
Path dataFile = new Path(awsURI);
Configuration config = new Configuration();
config.set("fs.s3a.access.key", accesskey);
config.set("fs.s3a.secret.key", secretAccessKey);
config.set("fs.s3a.endpoint", "s3." + Regions.CA_CENTRAL_1.getName() + ".amazonaws.com");
config.set("fs.s3a.impl", "org.apache.hadoop.fs.s3a.S3AFileSystem");
config.set("fs.s3a.aws.credentials.provider", "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider");
config.set("fs.s3a.server-side-encryption-algorithm", S3AEncryptionMethods.SSE_KMS.getMethod());
config.set("fs.s3a.connection.ssl.enabled", "true");
config.set("fs.s3a.impl.disable.cache", "true");
config.set("fs.s3a.path.style.access", "true");
try (ParquetWriter<GenericData.Record> writer = AvroParquetWriter.<GenericData.Record>builder(dataFile)//
.withSchema(getSchema())//
.withConf(config)//
.withCompressionCodec(SNAPPY)//
.withWriteMode(OVERWRITE)//
.build()) {
for (GenericData.Record record : records) {
writer.write(record);
}
} catch (Exception e) {
e.printStackTrace();
}
}
and the schema si as follow:
private Schema getSchema() {
String json = "{\"type\":\"record\",\r\n" + " \"name\":\"spark_schema\",\r\n" + " \"fields\":[\r\n"
+ " {\"name\":\"PolicyVersion_uniqueId\",\"type\":[\"null\",\"string\"],\"default\":null},\r\n"
+ " {\"name\":\"agreementNumber\",\"type\":[\"null\",\"string\"],\"default\":null},\r\n"
+ " {\"name\":\"epoch_time\",\"type\":[\"null\",\"string\"],\"default\":null},\r\n"
+ " {\"name\":\"xml\",\"type\":[\"null\",\"string\"],\"default\":null},\r\n"
+ " {\"name\":\"filename\",\"type\":[\"null\",\"string\"],\"default\":null},\r\n"
+ " {\"name\":\"message_header\",\"type\":[\"null\",{\"type\":\"map\",\"values\":[\"null\",\"string\"]}],\"default\":null},\r\n"
+ " {\"name\":\"tracking_number\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"fullTermPremium\",\"type\":[\"null\",\"string\"],\"default\":null},\r\n"
+ " {\"name\":\"epoch_date\",\"type\":[\"null\",{\"type\":\"int\",\"logicalType\":\"date\"}],\"default\":null},\r\n"
+ " {\"name\":\"collection_date\",\"type\":[\"null\",{\"type\":\"int\",\"logicalType\":\"date\"}],\"default\":null}\r\n"
+ "]}";
return new Schema.Parser().parse(json);
}
I've also tried with a different credential provider (AnonymousAWSCredentialsProvider and TemporaryAWSCredentialsProvider) but still did not get it to work. I have a feeling that my config is missing something as it fails during the AvroParquetWriter build().
What am I doing wrong?
Notice that I can read parquets from S3 with a similar configuration.
I can also write a parquet file to my local drive and then upload the parquet file to s3 as follow:
private Path writeGenericRecordsToLocalDrive() {
Path dataFile = new Path(OUTPUT_DIR + "/" + parquetName);
Configuration config = new Configuration();
config.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
config.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
try (ParquetWriter<GenericData.Record> writer = AvroParquetWriter.<GenericData.Record>builder(dataFile)//
.withSchema(getSchema())//
.withConf(config)//
.withCompressionCodec(SNAPPY)//
.withWriteMode(OVERWRITE)//
.build()) {
for (GenericData.Record parquet : parquetRecords) {
writer.write(parquet);
nbRecordsInParquet++;
}
} catch (IOException e) {
e.printStackTrace();
}
return dataFile;
}
private void uploadLocalParquetToAwsBucket(Path dataFile) {
AmazonS3 s3Client = null;
try {
File fileToUpload = new File(OUTPUT_DIR + "/" + dataFile.getName());
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setHeader(Headers.SERVER_SIDE_ENCRYPTION, "aws:kms");
objectMetadata.setContentLength(fileToUpload.length());
BasicAWSCredentials creds = new BasicAWSCredentials(accessKey, secretAccessKey);
s3Client = AmazonS3ClientBuilder.standard().withRegion(Regions.CA_CENTRAL_1)
.withCredentials(new AWSStaticCredentialsProvider(creds)).build();
String bucketDestination = fileToUpload.getName();
com.amazonaws.services.s3.model.PutObjectRequest putRequest = new com.amazonaws.services.s3.model.PutObjectRequest(
bucketName + "/" + bucketFolder, bucketDestination, new FileInputStream(fileToUpload),
objectMetadata);
putRequest.putCustomRequestHeader(Headers.SERVER_SIDE_ENCRYPTION, "aws:kms");
PutObjectResult putResult = s3Client.putObject(putRequest);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (s3Client != null) {
s3Client.shutdown();
}
}
}

Related

When email returns an error, pass the error to the error email code and include it in the body of the email

I'm developing a spring boot application to generate an email with a pdf. I need to display the error message exception returned by the document creation service. the document service (and email service ) return a message if there is an exception captured by the Exception handler, and When it returns an error, pass the error to the error email code and include it in the body of the email. and finally I want to test it.
Controller class
Model.addAttribute("permit", permit);
model.addAttribute("success", "Success! Permit for " + permit.getReceipt() + " sent. ");
Map<String, String> result = documentServiceImpl.generatePdf(permit);
if (documentServiceImpl.STATUS_SUCCESS.equals(result.get("status"))) {
model.addAttribute("date", result.get("date"));
String emailBody = getEmailBody(permit);
Map<String, String> emailResult = emailServiceImpl.sendRequestoremail(permit.getEmail(), emailBody, result.get("file"));
documentServiceImpl.deletePdf(result.get("file"));
if (emailServiceImpl.STATUS_ERROR.equals(emailResult.get("status"))) {
String errorE = emailResult.get("message");
String errorEmail = getErrorEmail(errorE);
Map<String,String> financeEmail = emailServiceImpl.sendFinanceEmail(permit.getEmail(), errorEmail);
model.addAttribute("errorEmail", "Received response for the Temporary Parking Permit script but there was an error " + financeEmail);
}
}
// String body = getEmailBody(permit);
System.out.println(result.get("status"));
System.out.println(result.get("file"));
} catch (Exception e) {
model.addAttribute("error", e.getMessage());
}
return "permit";
}
private String getEmailBody( Permit permit) {
ClassLoaderTemplateResolver templateResolver = new ClassLoaderTemplateResolver();
templateResolver.setSuffix(".html");
templateResolver.setTemplateMode(TemplateMode.HTML);
templateResolver.setPrefix("templates/");
TemplateEngine templateEngine = new TemplateEngine();
templateEngine.setTemplateResolver(templateResolver);
Context context = new Context();
context.setVariable("permit", permit);
return templateEngine.process("email" , context);
}
private String getErrorEmail(String permit) {
ClassLoaderTemplateResolver templateResolver = new ClassLoaderTemplateResolver();
templateResolver.setSuffix(".html");
templateResolver.setTemplateMode(TemplateMode.HTML);
templateResolver.setPrefix("templates/");
TemplateEngine templateEngine = new TemplateEngine();
templateEngine.setTemplateResolver(templateResolver);
Context context = new Context();
context.setVariable("permit", permit);
return templateEngine.process("emailError" , context);
}
}
Document service
#Override
public Map<String, String> generatePdf(Permit permit) {
Map<String, String> result = new HashMap<>();
currentDate = getCurrentDate("dd MMMM uuuu");
String fileName = null;
try {
Path outputPath = Paths.get(tmp + File.separator + "parking");
Files.createDirectories(outputPath);
fileName = outputPath + File.separator + permit.getCustomerNum() + permit.getPurchaseDate().replaceAll("[^a-zA-Z]", "") + getCurrentDate("uuuuMMddHHmmss") + ".pdf";
InputStream pdfTemplate = resourceFile.getInputStream();
PDDocument pdfDoc = PDDocument.load(pdfTemplate);
PDDocumentCatalog docCatalog = pdfDoc.getDocumentCatalog();
PDAcroForm form = docCatalog.getAcroForm();
form.getField("Receipt").setValue(permit.getReceipt());
form.getField("PermitType").setValue(permit.getPermitType());
form.getField("PurchaseDate").setValue(permit.getPurchaseDate());
form.getField("MakeModel").setValue(permit.getMake() + " " + permit.getCarModel());
// form.getField("Model").setValue(permit.getCarModel());
form.getField("Rego").setValue(permit.getCarRego());
form.flatten();
System.out.println("generatePdf: " + fileName);
File output = new File(fileName);
pdfDoc.save(output);
pdfDoc.close();
result.put("status" , STATUS_SUCCESS);
result.put("file", fileName);
result.put("date", currentDate);
} catch (Exception e) {
result.put("status", STATUS_ERROR);
result.put("file", fileName);
result.put("date", currentDate);
result.put("message", e.getMessage());
e.printStackTrace();
}
return result;
}

How to setup user policy for the minio bucket using s3Client?

We use Minio as our backend service but we communicate with it through
software.amazon.awssdk.services.s3.S3Client
I see that this class contains method putBucketPolicy
but I don't see any method which allow to assign policy to user. Is there any way to assigh user policy using S3Client ?
Edited Answer:
Your updated question helped me determine what you were looking for.
You need to create a policy and assign it to a role. You can then assign that role to your user. The AWS SDK for Java 2.x provides support for all of these actions with IAM.
Here's what we can do:
1- Creating a policy
To create a new policy, provide the policy’s name and a JSON-formatted policy document in a CreatePolicyRequest to the IamClient’s createPolicy method.
Imports
import software.amazon.awssdk.core.waiters.WaiterResponse;
import software.amazon.awssdk.services.iam.model.CreatePolicyRequest;
import software.amazon.awssdk.services.iam.model.CreatePolicyResponse;
import software.amazon.awssdk.services.iam.model.GetPolicyRequest;
import software.amazon.awssdk.services.iam.model.GetPolicyResponse;
import software.amazon.awssdk.services.iam.model.IamException;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.iam.IamClient;
import software.amazon.awssdk.services.iam.waiters.IamWaiter;
Code
public static String createIAMPolicy(IamClient iam, String policyName ) {
try {
// Create an IamWaiter object
IamWaiter iamWaiter = iam.waiter();
CreatePolicyRequest request = CreatePolicyRequest.builder()
.policyName(policyName)
.policyDocument(PolicyDocument).build();
CreatePolicyResponse response = iam.createPolicy(request);
// Wait until the policy is created
GetPolicyRequest polRequest = GetPolicyRequest.builder()
.policyArn(response.policy().arn())
.build();
WaiterResponse<GetPolicyResponse> waitUntilPolicyExists = iamWaiter.waitUntilPolicyExists(polRequest);
waitUntilPolicyExists.matched().response().ifPresent(System.out::println);
return response.policy().arn();
} catch (IamException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
return "" ;
}
You can check out CreatePolicy.java for complete example.
2- Attach a role policy
You can attach a policy to an IAM role by calling the IamClient’s attachRolePolicy method.
Imports
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.iam.IamClient;
import software.amazon.awssdk.services.iam.model.IamException;
import software.amazon.awssdk.services.iam.model.AttachRolePolicyRequest;
import software.amazon.awssdk.services.iam.model.AttachedPolicy;
import software.amazon.awssdk.services.iam.model.ListAttachedRolePoliciesRequest;
import software.amazon.awssdk.services.iam.model.ListAttachedRolePoliciesResponse;
import java.util.List;
Code
public static void attachIAMRolePolicy(IamClient iam, String roleName, String policyArn ) {
try {
ListAttachedRolePoliciesRequest request = ListAttachedRolePoliciesRequest.builder()
.roleName(roleName)
.build();
ListAttachedRolePoliciesResponse response = iam.listAttachedRolePolicies(request);
List<AttachedPolicy> attachedPolicies = response.attachedPolicies();
// Ensure that the policy is not attached to this role
String polArn = "";
for (AttachedPolicy policy: attachedPolicies) {
polArn = policy.policyArn();
if (polArn.compareTo(policyArn)==0) {
System.out.println(roleName +
" policy is already attached to this role.");
return;
}
}
AttachRolePolicyRequest attachRequest =
AttachRolePolicyRequest.builder()
.roleName(roleName)
.policyArn(policyArn)
.build();
iam.attachRolePolicy(attachRequest);
System.out.println("Successfully attached policy " + policyArn +
" to role " + roleName);
} catch (IamException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
System.out.println("Done");
}
You can check out AttachRolePolicy.java for complete example.
Bonus Content
Scenario for create a user and assume a role
The following code example shows how to:
Create a user who has no permissions.
Create a role that grants permission to list Amazon S3 buckets for the account.
Add a policy to let the user assume the role.
Assume the role and list Amazon S3 buckets using temporary credentials.
Delete the policy, role, and user.
/*
To run this Java V2 code example, set up your development environment, including your credentials.
For information, see this documentation topic:
https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html
This example performs these operations:
1. Creates a user that has no permissions.
2. Creates a role and policy that grants Amazon S3 permissions.
3. Creates a role.
4. Grants the user permissions.
5. Gets temporary credentials by assuming the role. Creates an Amazon S3 Service client object with the temporary credentials.
6. Deletes the resources.
*/
public class IAMScenario {
public static final String DASHES = new String(new char[80]).replace("\0", "-");
public static final String PolicyDocument =
"{" +
" \"Version\": \"2012-10-17\"," +
" \"Statement\": [" +
" {" +
" \"Effect\": \"Allow\"," +
" \"Action\": [" +
" \"s3:*\"" +
" ]," +
" \"Resource\": \"*\"" +
" }" +
" ]" +
"}";
public static void main(String[] args) throws Exception {
final String usage = "\n" +
"Usage:\n" +
" <username> <policyName> <roleName> <roleSessionName> <fileLocation> <bucketName> \n\n" +
"Where:\n" +
" username - The name of the IAM user to create. \n\n" +
" policyName - The name of the policy to create. \n\n" +
" roleName - The name of the role to create. \n\n" +
" roleSessionName - The name of the session required for the assumeRole operation. \n\n" +
" fileLocation - The file location to the JSON required to create the role (see Readme). \n\n" +
" bucketName - The name of the Amazon S3 bucket from which objects are read. \n\n" ;
if (args.length != 6) {
System.out.println(usage);
System.exit(1);
}
String userName = args[0];
String policyName = args[1];
String roleName = args[2];
String roleSessionName = args[3];
String fileLocation = args[4];
String bucketName = args[5];
Region region = Region.AWS_GLOBAL;
IamClient iam = IamClient.builder()
.region(region)
.credentialsProvider(ProfileCredentialsProvider.create())
.build();
System.out.println(DASHES);
System.out.println("Welcome to the AWS IAM example scenario.");
System.out.println(DASHES);
System.out.println(DASHES);
System.out.println(" 1. Create the IAM user.");
Boolean createUser = createIAMUser(iam, userName);
System.out.println(DASHES);
if (createUser) {
System.out.println(userName + " was successfully created.");
System.out.println(DASHES);
System.out.println("2. Creates a policy.");
String polArn = createIAMPolicy(iam, policyName);
System.out.println("The policy " + polArn + " was successfully created.");
System.out.println(DASHES);
System.out.println(DASHES);
System.out.println("3. Creates a role.");
String roleArn = createIAMRole(iam, roleName, fileLocation);
System.out.println(roleArn + " was successfully created.");
System.out.println(DASHES);
System.out.println(DASHES);
System.out.println("4. Grants the user permissions.");
attachIAMRolePolicy(iam, roleName, polArn);
System.out.println(DASHES);
System.out.println(DASHES);
System.out.println("*** Wait for 1 MIN so the resource is available");
TimeUnit.MINUTES.sleep(1);
System.out.println("5. Gets temporary credentials by assuming the role.");
System.out.println("Perform an Amazon S3 Service operation using the temporary credentials.");
assumeGivenRole(roleArn, roleSessionName, bucketName);
System.out.println(DASHES);
System.out.println(DASHES);
System.out.println("6 Getting ready to delete the AWS resources");
deleteRole(iam, roleName, polArn);
deleteIAMUser(iam, userName);
System.out.println(DASHES);
System.out.println(DASHES);
System.out.println("This IAM Scenario has successfully completed");
System.out.println(DASHES);
} else {
System.out.println(userName +" was not successfully created.");
}
}
public static Boolean createIAMUser(IamClient iam, String username ) {
try {
// Create an IamWaiter object
IamWaiter iamWaiter = iam.waiter();
CreateUserRequest request = CreateUserRequest.builder()
.userName(username)
.build();
// Wait until the user is created.
CreateUserResponse response = iam.createUser(request);
GetUserRequest userRequest = GetUserRequest.builder()
.userName(response.user().userName())
.build();
WaiterResponse<GetUserResponse> waitUntilUserExists = iamWaiter.waitUntilUserExists(userRequest);
waitUntilUserExists.matched().response().ifPresent(System.out::println);
return true;
} catch (IamException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
return false;
}
public static String createIAMRole(IamClient iam, String rolename, String fileLocation ) throws Exception {
try {
JSONObject jsonObject = (JSONObject) readJsonSimpleDemo(fileLocation);
CreateRoleRequest request = CreateRoleRequest.builder()
.roleName(rolename)
.assumeRolePolicyDocument(jsonObject.toJSONString())
.description("Created using the AWS SDK for Java")
.build();
CreateRoleResponse response = iam.createRole(request);
System.out.println("The ARN of the role is "+response.role().arn());
return response.role().arn();
} catch (IamException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
return "";
}
public static String createIAMPolicy(IamClient iam, String policyName ) {
try {
// Create an IamWaiter object.
IamWaiter iamWaiter = iam.waiter();
CreatePolicyRequest request = CreatePolicyRequest.builder()
.policyName(policyName)
.policyDocument(PolicyDocument).build();
CreatePolicyResponse response = iam.createPolicy(request);
// Wait until the policy is created.
GetPolicyRequest polRequest = GetPolicyRequest.builder()
.policyArn(response.policy().arn())
.build();
WaiterResponse<GetPolicyResponse> waitUntilPolicyExists = iamWaiter.waitUntilPolicyExists(polRequest);
waitUntilPolicyExists.matched().response().ifPresent(System.out::println);
return response.policy().arn();
} catch (IamException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
return "" ;
}
public static void attachIAMRolePolicy(IamClient iam, String roleName, String policyArn ) {
try {
ListAttachedRolePoliciesRequest request = ListAttachedRolePoliciesRequest.builder()
.roleName(roleName)
.build();
ListAttachedRolePoliciesResponse response = iam.listAttachedRolePolicies(request);
List<AttachedPolicy> attachedPolicies = response.attachedPolicies();
String polArn;
for (AttachedPolicy policy: attachedPolicies) {
polArn = policy.policyArn();
if (polArn.compareTo(policyArn)==0) {
System.out.println(roleName + " policy is already attached to this role.");
return;
}
}
AttachRolePolicyRequest attachRequest = AttachRolePolicyRequest.builder()
.roleName(roleName)
.policyArn(policyArn)
.build();
iam.attachRolePolicy(attachRequest);
System.out.println("Successfully attached policy " + policyArn + " to role " + roleName);
} catch (IamException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
}
// Invoke an Amazon S3 operation using the Assumed Role.
public static void assumeGivenRole(String roleArn, String roleSessionName, String bucketName) {
StsClient stsClient = StsClient.builder()
.region(Region.US_EAST_1)
.build();
try {
AssumeRoleRequest roleRequest = AssumeRoleRequest.builder()
.roleArn(roleArn)
.roleSessionName(roleSessionName)
.build();
AssumeRoleResponse roleResponse = stsClient.assumeRole(roleRequest);
Credentials myCreds = roleResponse.credentials();
String key = myCreds.accessKeyId();
String secKey = myCreds.secretAccessKey();
String secToken = myCreds.sessionToken();
// List all objects in an Amazon S3 bucket using the temp creds.
Region region = Region.US_EAST_1;
S3Client s3 = S3Client.builder()
.credentialsProvider(StaticCredentialsProvider.create(AwsSessionCredentials.create(key, secKey, secToken)))
.region(region)
.build();
System.out.println("Created a S3Client using temp credentials.");
System.out.println("Listing objects in "+bucketName);
ListObjectsRequest listObjects = ListObjectsRequest.builder()
.bucket(bucketName)
.build();
ListObjectsResponse res = s3.listObjects(listObjects);
List<S3Object> objects = res.contents();
for (S3Object myValue : objects) {
System.out.println("The name of the key is " + myValue.key());
System.out.println("The owner is " + myValue.owner());
}
} catch (StsException e) {
System.err.println(e.getMessage());
System.exit(1);
}
}
public static void deleteRole(IamClient iam, String roleName, String polArn) {
try {
// First the policy needs to be detached.
DetachRolePolicyRequest rolePolicyRequest = DetachRolePolicyRequest.builder()
.policyArn(polArn)
.roleName(roleName)
.build();
iam.detachRolePolicy(rolePolicyRequest);
// Delete the policy.
DeletePolicyRequest request = DeletePolicyRequest.builder()
.policyArn(polArn)
.build();
iam.deletePolicy(request);
System.out.println("*** Successfully deleted "+polArn);
// Delete the role.
DeleteRoleRequest roleRequest = DeleteRoleRequest.builder()
.roleName(roleName)
.build();
iam.deleteRole(roleRequest);
System.out.println("*** Successfully deleted " +roleName);
} catch (IamException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
}
public static void deleteIAMUser(IamClient iam, String userName) {
try {
DeleteUserRequest request = DeleteUserRequest.builder()
.userName(userName)
.build();
iam.deleteUser(request);
System.out.println("*** Successfully deleted " + userName);
} catch (IamException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
}
public static Object readJsonSimpleDemo(String filename) throws Exception {
FileReader reader = new FileReader(filename);
JSONParser jsonParser = new JSONParser();
return jsonParser.parse(reader);
}
}
Original Answer:
PutBucketPolicy
If you don't have PutBucketPolicy permissions, Amazon S3 returns a 403 Access Denied error. If you have the correct permissions, but you're not using an identity that belongs to the bucket owner's account, Amazon S3 returns a 405 Method Not Allowed error.
You can check out for more from AWS API Reference: PutBucketPolicy

Upload document on google drive in a folder

I want to upload my document on google drive but in a folder. can you please suggest how i insert this into the folder. I have uploaded but this is not in folder. Code is -
#RequestMapping(value = "/uploadDDFile", method = RequestMethod.POST)
public ModelAndView uploadDDFile(#RequestParam(value = "ddid", required = true) Integer ddid,
#RequestParam(value = "catageryId", required = true) Integer catageryId,
#RequestParam(value = "document", required = true) GMultipartFile document[], HttpServletRequest request) {
System.out.println("-------------------------");
String name = "";
DdeDriveDocuments ddeDriveDocuments = new DdeDriveDocuments();
if (ServletFileUpload.isMultipartContent(request) && document != null) {
for (GMultipartFile gdocument : document) {
try {
boolean user = true;
List<DdeDriveDocuments> dds = ddeDriveDocumentsService.fatchData(ddid, catageryId);
for (DdeDriveDocuments file : dds) {
System.out.println(file.getDocument_name());
if (file.getDocument_name().equals(gdocument.getOriginalFilename())) {
user = false;
}
}
if (user == true) {
Client client = sessionService.getClient();
System.out.println(gdocument.getOriginalFilename());
ddeDriveDocuments
.setDocument_name((gdocument.getName() != null ? gdocument.getOriginalFilename() : ""));
ddeDriveDocuments.setDocument_uploadby(client.getEmail());
ddeDriveDocuments.setDocument_created(new Date());
ddeDriveDocuments.setCatagery_id(catageryId);
ddeDriveDocuments.setDd_id(ddid);
ddeDriveDocuments.setDd_uuid(GeneralUtil.getUUID());
ddeDriveDocuments.setClientID(client.getClientID());
Lawyer googleAuthToken = lawyerService
.getAuthorisedUserToken(Configurator.getInstance().getDriveAccountEmail());
if (googleAuthToken != null) {
// upload file in drive
if (ServletFileUpload.isMultipartContent(request) && document != null) {
// It's value either we need to get from form.
String description = "Testing";
File file = DriveService.uploadDocumentToDrive(googleAuthToken, gdocument,
ddeDriveDocuments.getDocument_name(), description);
File thumFile = DriveService.getFileById(googleAuthToken, file.getId());
System.out.println("thumFile ====" + thumFile);
System.out.println("thab url" + thumFile.getIconLink());
if (file != null) {
ddeDriveDocuments.setDocument_drive_id(file.getId());
ddeDriveDocuments.setImageurl(thumFile.getIconLink());
ddeDriveDocuments = ddeDriveDocumentsService.create(ddeDriveDocuments);
}
}
} else {
System.out.println("Autorised token not available for configured drive account.");
}
} else {
System.out.println("wroung Input");
System.out.println("wroung Input");
name = name.concat(gdocument.getOriginalFilename() + " , ");
System.out.println("This is ::::::::::::: " + name);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
if(name !=""){
sessionService.setUnupload_files_name(name);
}
return new ModelAndView("redirect:/al/client/ddeclientportal/" + ddid + "/" + catageryId);
}
public static File uploadDocumentToDrive(Lawyer googleAuthToken,
GMultipartFile file, String fileName, String description) {
File driveFile = null;
try {
InputStream fileStream = file.getInputStream();
String mimeType = DocumentListEntry.MediaType.fromFileName(
file.getOriginalFilename()).getMimeType();
GoogleCredential googleCrednetial = getGoogleCredential(googleAuthToken);
Drive drive = buildService(googleCrednetial);
String file_name = fileName.contains(FilenameUtils.getExtension(file.getOriginalFilename())) ? fileName : fileName + "." + FilenameUtils.getExtension(file.getOriginalFilename());
File body1 = new File();
body1.setTitle("cloudbox");
body1.setMimeType("application/vnd.google-apps.folder");
driveFile = drive.files().insert(body1).execute();
File body = new File();
body.setTitle(file_name);
body.setDescription(description);
body.setMimeType(mimeType);
driveFile = drive.files()
.insert(body, new InputStreamContent(mimeType, fileStream))
.execute();
} catch (Exception e) {
e.printStackTrace();
}
return driveFile;
}
Please help i want insert my document in folder.
By Using
File body1 = new File();
body1.setTitle("cloudbox");
body1.setMimeType("application/vnd.google-apps.folder");
driveFile = drive.files().insert(body1).execute();
File body = new File();
body.setTitle(file_name);
body.setDescription(description);
body.setMimeType(mimeType);
body.setParents(Arrays.asList(new ParentReference().setId(driveFile.getId())));
driveFile = drive.files()
.insert(body, new InputStreamContent(mimeType, fileStream))
.execute();
Now can you please suggest how i can generate subfoler.

Java FTP using Apache commons throws "IOException caught while copying"

I have made a JavaFX application which includes uploading large files (> 1GB) to a server. Every time I got the same error on same place. Any Suggestions what am I doing wrong here.
ftpclient.connect(server, port);
ftpclient.login(ftpuser, ftppass);
ftpclient.enterLocalPassiveMode();
ftpclient.setKeepAlive(true);
ftpclient.setControlKeepAliveTimeout(3000);
Task<Void> copyMnt = new Task<Void>() {
#Override
protected Void call(){
try {
new Thread(new FTPHandler(ftpclient, source , dest)).run();
} catch (IOException ex) {
Logger.getLogger(MyAppController.class.getName()).log(Level.SEVERE, null, ex);
}
return null;
}
};
new Thread(copyMnt).start();
Now on the FTPHandler Class
// The constructor will set the ftpclient, source and destinations.
#Override
public void run() {
try {
uploadDirectory(this.getClient(), this.getDest(), this.getSrc(), "");
} catch (IOException ex) {
Logger.getLogger(FTPHandler.class.getName()).log(Level.SEVERE, null, ex);
}
}
public static void uploadDirectory(FTPClient ftpClient,
String remoteDirPath, String localParentDir, String remoteParentDir)
throws IOException {
File localDir = new File(localParentDir);
File[] subFiles = localDir.listFiles();
if (subFiles != null && subFiles.length > 0) {
for (File item : subFiles) {
String remoteFilePath = remoteDirPath + "/" + remoteParentDir
+ "/" + item.getName();
if (remoteParentDir.equals("")) {
remoteFilePath = remoteDirPath + "/" + item.getName();
}
if (item.isFile()) {
// upload the file
String localFilePath = item.getAbsolutePath();
java.util.Date date= new java.util.Date();
System.out.println(new Timestamp(date.getTime()) + " : Uploading :: " + localFilePath + " to " + remoteFilePath);
boolean uploaded = uploadSingleFile(ftpClient,
localFilePath, remoteFilePath);
if (uploaded) {
System.out.println("Success : "
+ remoteFilePath);
} else {
System.out.println("Failed : "
+ localFilePath);
}
} else {
// create directory on the server
boolean created = ftpClient.makeDirectory(remoteFilePath);
if (created) {
System.out.println("CREATED the directory: "
+ remoteFilePath);
} else {
System.out.println("COULD NOT create the directory: "
+ remoteFilePath);
}
// upload the sub directory
String parent = remoteParentDir + "/" + item.getName();
if (remoteParentDir.equals("")) {
parent = item.getName();
}
localParentDir = item.getAbsolutePath();
uploadDirectory(ftpClient, remoteDirPath, localParentDir,
parent);
}
}
}
}
Every time I am uploading the files (files are of different types like .iso, .dat etc), The first few files (upload sequence is like first few hundred files are smaller i.e less than few MBs then Last 10 files are more than 1 GB big)will be successfully uploaded (i.e all smaller files and 2 of the last 10 files) but when it starts uploading the third big file I get following exception.
SEVERE: null
org.apache.commons.net.io.CopyStreamException: IOException caught while copying.
at org.apache.commons.net.io.Util.copyStream(Util.java:134)
at org.apache.commons.net.ftp.FTPClient._storeFile(FTPClient.java:653)
at org.apache.commons.net.ftp.FTPClient.__storeFile(FTPClient.java:624)
at org.apache.commons.net.ftp.FTPClient.storeFile(FTPClient.java:1976)
The CopyStreamException has a "cause" exception. Check that using the .getCause(), to see what went wrong.
See the Util.copyStream method:
public static final long copyStream(InputStream source, OutputStream dest,
int bufferSize, long streamSize,
CopyStreamListener listener,
boolean flush)
throws CopyStreamException
{
int bytes;
long total = 0;
byte[] buffer = new byte[bufferSize >= 0 ? bufferSize : DEFAULT_COPY_BUFFER_SIZE];
try
{
while ((bytes = source.read(buffer)) != -1)
{
....
}
}
catch (IOException e)
{
throw new CopyStreamException("IOException caught while copying.",
total, e);
}
return total;
}
Somewhere in your uploadSingleFile function, do
try
{
ftpClient.storeFile(...)
}
catch (Exception e)
{
e.printStackTrace();
if (e.getCause() != null)
{
e.getCause().printStackTrace();
}
}
I do not know Java, so the code may not be 100% correct.
See also Getting full string stack trace including inner exception.

how to objective c(gcc command) program from java

**
i am trying to run objective c program from java..
every time geting this error..
Main error is java.io.IOException: Cannot run program "C:\store\1436014813635\panna.exe": CreateProcess error=2, The system cannot find the file specified
help me..
thanks in advance..
**
#Override
public Map<String, Object> compileAndExecute(Code code, String folderName) {
Map<String, Object> finalResult = new HashMap<String, Object>();
try {
String folderPath = "C:\\store\\" + folderName + "\\";
String sourceFilePath = "C:\\store\\" + folderName + "\\panna.m";
String outputFilePath = "C:\\store\\" + folderName + "\\panna.exe";
File fileSaveDir = new File(folderPath);
if (!fileSaveDir.exists()) {
fileSaveDir.mkdirs();
}
File file = new File(sourceFilePath);
log.info("code :"+code.getCode());
ByteBuffer buf = ByteBuffer.wrap(code.getCode().getBytes());
#SuppressWarnings("resource")
FileChannel channel = new FileOutputStream(file, true)
.getChannel();
channel.write(buf);
channel.close();
String command = "gcc -o " + outputFilePath +" "
+sourceFilePath+" -I C:/GNUstep/GNUstep/System/Library/Headers -L C:/GNUstep/GNUstep/System/Library/Libraries -std=c99 -lobjc -lgnustep-base -fconstant-string-class=NSConstantString";
System.out.print("Command :"+command);
finalResult = PannaUtils.runProcessObjectiveC(command, folderName);
log.info("finalResult "+finalResult.get("success")+folderName);
log.info("class Name:" + outputFilePath);
finalResult = PannaUtils.runProcessObjectiveC(outputFilePath, folderName);
log.info("finalResult "+finalResult.get("success")+folderName);
//PannaUtils.getTestCaseResults(folderName, finalResult, code, outputFilePath);
} catch (Exception e) {
e.printStackTrace();
}
return finalResult;
}
//runProcessObjectiveC from here.
public static Map<String, Object> runProcessObjectiveC(String command,
String folderName) {
_log.info(command);
Map<String, Object> afterExecutionOutput = new HashMap<String, Object>();
try{
Process pro = Runtime.getRuntime().exec(command);
ProcessTimeoutThread.addProcess(pro);
long afterExecTime = System.currentTimeMillis();
_log.info(""+afterExecTime);
String error = printLines(command + " error:", pro.getErrorStream());
pro.waitFor();
_log.info("error sop"+error);
_log.info("got exit vlue :"
+ pro.exitValue() );
if (pro.exitValue() > 0) {
afterExecutionOutput.put("success", error);
} else {
afterExecutionOutput.put("success", error);
}
}
catch (Exception e) {
// TODO Auto-generated catch block
_log.info("Main error is "+e.toString());
}
return afterExecutionOutput;
}

Categories