Retrieve Image from Encrypted S3 Bucket using URL in Java - java

private static final String bucketName = "imagebucket";
private static final BasicAWSCredentials credentials = new BasicAWSCredentials("secret_id", "secret_pass");
private static final String destinationFolder = "images/";
private static AmazonS3 s3 = AmazonS3Client
.builder()
.withRegion("us-east-2")
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.build();
#RequestMapping("/image")
public String uploadToBucketAndReturnUrl(byte[] fileByteArray, String extension)
{
//generate a UUID hash
UUID uuid = UUID.randomUUID();
String randomUUIDString = uuid.toString();
String imageHash = randomUUIDString + "." + extension;
String filePath = destinationFolder+imageHash;
try
{
File tf = File.createTempFile("image", "." + extension);
BufferedImage image = ImageIO.read(new ByteArrayInputStream(fileByteArray));
ImageIO.write(image, extension, tf);
s3.putObject(new PutObjectRequest(bucketName, filePath, tf));
}
catch(IOException e)
{
e.printStackTrace();
}
//return the url
String imageUrl = "https://s3.us-east-2.amazonaws.com/" + bucketName + "/" + filePath;
return imageUrl;
}
I have a function that will upload an image to my encrypted bucket and return a URL. Now I need to create a function that will take in the URL and retrieve the image from the bucket, and I'm unsure how to procceed.

Related

Getting java.net.ConnectException: Connection refused (Connection refused) while creating object using fake-gcs-server image

I am getting java.net.ConnectException:connection refused error while trying to write junit for creating object in gcs using fake-gcs-server image. Please find the below code. Bucket name is test and consider, it is already created.
#TempDir
private static File directory;
private static final GenericContainer<?> GCS_CONTAINER = new GenericContainer<>(DockerImageName.parse("fsouza/fake-gcs-server:1.33.1"))
.withExposedPorts(4443).withCreateContainerCmdModifier(cmd -> cmd.withEntrypoint(
"/bin/fake-gcs-server",
"-scheme", "http"
));
String fakeGcsExternalUrl = "http://" + GCS_CONTAINER.getContainerIpAddress() + ":" + GCS_CONTAINER.getFirstMappedPort();
private static final Storage storage = new Storage.Builder(getTransport(), GsonFactory.getDefaultInstance(), null).setRootUrl(fakeGcsExternalUrl).setApplicationName("test").build();
void test() {
final File localFile1 = new File(directory.getAbsolutePath() + File.separator + "testFile.txt");
localFile1.createNewFile();
try (final FileWriter fileWriter = new FileWriter(localFile1.getPath())) {
fileWriter.write("Test gs file content");
}
final InputStream stream = FileUtils.openInputStream(localFile1);
Path path = localFile1.toPath();
String contentType = Files.probeContentType(path);
uploadFile("test", "/sampleFiles/newFile.txt", contentType, stream, null);
}
public String uploadFile(final Storage storage, final String bucketName, final String filePath,
final String contentType, final InputStream inputStream, final Map<String, String> metadata)
throws IOException {
final InputStreamContent contentStream = new InputStreamContent(contentType, inputStream);
final StorageObject objectMetadata = new StorageObject().setName(filePath);
objectMetadata.setMetadata(GoogleLabels.manageLabels(metadata));
final Storage.Objects.Insert insertRequest = storage.objects().insert(bucketName, objectMetadata,
contentStream);
return insertRequest.execute().getName();
}
This problem might be related to you omitting to set the fake-gcs-server's external URL property to the container's address. Make sure you follow guide in the official repo https://github.com/fsouza/fake-gcs-server/blob/cf3fcb083e19553636419818e29f84825bd1e13c/examples/java/README.md, particularly, that you execute the following code:
private static void updateExternalUrlWithContainerUrl(String fakeGcsExternalUrl) throws Exception {
String modifyExternalUrlRequestUri = fakeGcsExternalUrl + "/_internal/config";
String updateExternalUrlJson = "{"
+ "\"externalUrl\": \"" + fakeGcsExternalUrl + "\""
+ "}";
HttpRequest req = HttpRequest.newBuilder()
.uri(URI.create(modifyExternalUrlRequestUri))
.header("Content-Type", "application/json")
.PUT(BodyPublishers.ofString(updateExternalUrlJson))
.build();
HttpResponse<Void> response = HttpClient.newBuilder().build()
.send(req, BodyHandlers.discarding());
if (response.statusCode() != 200) {
throw new RuntimeException(
"error updating fake-gcs-server with external url, response status code " + response.statusCode() + " != 200");
}
}
before using the container.

Error while uploading huge base64 file to S3 Bucket

I am trying to upload a huge video file to the s3 bucket.
I am getting the data from the client-side in base64 format which I am sending to the S3client to upload to my bucket as shown below:-
public class UploadFileService {
private static final String BUCKET_NAME = "data";
private static final Regions REGION = Regions.US_EAST_2;
LoggerUtils loggerUtils = new LoggerUtils ();
public String uploadFile(String fileData, String fileName, String contentType, String extension){
try {
loggerUtils.log ("File Data" , fileData);
byte[] bI = org.apache.commons.codec.binary.Base64.decodeBase64 ((fileData.substring (fileData.indexOf (",") + 1)).getBytes ());
InputStream fis = new ByteArrayInputStream (bI);
AmazonS3 s3 = new AmazonS3Client ();
Region usWest02 = Region.getRegion (REGION);
s3.setRegion (usWest02);
ObjectMetadata metadata = new ObjectMetadata ();
metadata.setContentLength (bI.length);
metadata.setContentType ("video/mp4");
//metadata.setContentType (contentType + "/" + extension.substring (1));
metadata.setCacheControl ("public, max-age=0");
s3.putObject (BUCKET_NAME, fileName, fis, metadata);
s3.setObjectAcl (BUCKET_NAME, fileName, CannedAccessControlList.PublicRead);
URL s3Url = s3.getUrl(BUCKET_NAME, fileName);
return s3Url.toExternalForm();
}
catch (Exception exception){
loggerUtils.log (exception.toString ());
throw exception;
}
}
public static void main(String[] args) {
String fileName = "abc1.mp4";
String fileData = "hkbk";
new UploadFileService ().uploadFile (fileData, fileName, null, null);
}
}
But if the fileData is usage (base64 of a 2MB video) then I am getting the below error:-
Error:(46, 27) java: constant string too long

S3 file upload issue on AWS - Spring Boot

I have written a piece of code, to upload a file on Amazon S3. It is working fine in my local system. I am able to upload a file and as a response I am getting the file url. But when I try to upload those files to AWS server, they are not getting uploaded, I am getting a 200 response as well, and its showing a rectangle box on postman.
Can anyone help me to solve this issue? Any help will be appreciated. Thanks!
#Service
public class AmazonClient {
private AmazonS3 amazonS3;
#Value("${amazonProperties.accessKey}")
public String accessKey;
#Value("${amazonProperties.secretKey}")
public String secretKey;
#Value("${amazonProperties.bucketName}")
public String bucketName;
#Value("${amazonProperties.endpointUrl}")
public String endpointUrl;
#Value("${amazonProperties.region}")
public String region;
#PostConstruct
private void initializeAmazon()
{
AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey);
this.amazonS3 = AmazonS3ClientBuilder
.standard()
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.withRegion(region)
.build();
}
private File convertMultiPartToFile(MultipartFile file) throws IOException
{
File convFile = new File(file.getOriginalFilename());
FileOutputStream fos = new FileOutputStream(convFile);
fos.write(file.getBytes());
fos.close();
return convFile;
}
private String generateFileName(MultipartFile multiPart)
{
return new Date().getTime() + "-" + multiPart.getOriginalFilename().replace(" ", "_");
}
private void uploadFileTos3bucket(String fileName, File file)
{
amazonS3.putObject(new PutObjectRequest(bucketName, fileName, file)
.withCannedAcl(CannedAccessControlList.PublicRead));
}
public String uploadFile(MultipartFile multipartFile) {
String fileUrl = "";
try
{
File file = convertMultiPartToFile(multipartFile);
String fileName = generateFileName(multipartFile);
fileUrl = endpointUrl + "/" + bucketName + "/" + fileName;
uploadFileTos3bucket(fileName, file);
//file.delete();
}
catch (Exception e)
{
e.printStackTrace();
}
return fileUrl;
}
public String deleteFileFromS3Bucket(String fileUrl)
{
String fileName = fileUrl.substring(fileUrl.lastIndexOf("/") + 1);
amazonS3.deleteObject(new DeleteObjectRequest(bucketName + "/", fileName));
return "Successfully deleted";
}
}
Please ensure that the AWS IAM credentials provided are working and the bucket location, as well as the name, are provided correctly. If it is then:-
i. Make sure that the bucket is accessible programmatically.
ii. Make sure that the IAM role has privileges enough to interact with S3 bucket eg(Full S3 access policy).
These might be probable resolutions.

How do I make a WebDav call using resttemplate?

How do I make a WebDav call using resttemplate if I am trying to get file from a webdav server.
I used to do that using HttpClient like this:
public byte[] getFileAsBytes( String location ) throws FileNotFoundException, IOException {
GetMethod method = new GetMethod( baseUrl + "/" + location );
client.executeMethod( method );
if ( method.getStatusCode() == HttpStatus.SC_NOT_FOUND ) {
throw new FileNotFoundException( "Got error " + method.getStatusCode() + " : " + method.getStatusText()
+ " retrieving file from webdav server at path " + location );
} else if ( method.getStatusCode() != HttpStatus.SC_OK ) {
throw new IOException( "Got error " + method.getStatusCode() + " : " + method.getStatusText()
+ " retrieving file from webdav server at path " + location );
}
return method.getResponseBody();
}
I was able to hit the WebDav server using restTemplate as the following:
/**
* This method copies the file from webdav to local system
*
* #param documentMetadata
* #return
*/
#Override
public Document downloadFile( DocumentMetadata documentMetadata ) {
Document document = new Document();
String fileUrl = baseUrl + documentMetadata.getFilepath();
ResponseEntity<byte[]> result = restTemplate.exchange(fileUrl, HttpMethod.GET, new HttpEntity<>( createHeaders( username, password )), byte[].class );
return document;
}
private Void prepareDocument( ClientHttpResponse response, Document document, DocumentMetadata meta ) throws IOException {
document.setName( meta.getFilename() );
document.setFilePath( meta.getFilepath() );
document.setFile( IOUtils.toByteArray( response.getBody() ) );
return null;
}
public static HttpHeaders createHeaders( final String userName, final String password ) {
log.debug( "SlateUtil.createHeaders" );
return new HttpHeaders(){{
String auth = userName + ":" + password;
byte[] encodedAuth = Base64.encodeBase64(
auth.getBytes( Charset.forName( "US-ASCII" )));
String authHeader = "Basic " + new String( encodedAuth );
set( "Authorization", authHeader );
}};
}
For other people reference if they ever get this question:
here is how my models look like.
public class Document {
private String name;
private byte[] file;
private String filePath;
private String contentType;
// getters and setters
}
public class DocumentMetadata {
private String id;
private String filename;
private String filepath;
private String extension;
private String createdBy;
private Date createDate;
private String size;
private String documentType;
private String displayName;
// getters and setters
}

Java Google Cloud Storage upload media link null, but image uploads

I'm trying to upload an image to a existing bucket in my Google Cloud Storage.
The image file gets uploaded successfully when I go and check, but the returned download url is null
CODE
private String uploadImage(File filePath, String blobName, File uploadCreds) throws FileNotFoundException, IOException{
Storage storage = StorageOptions.newBuilder().setProjectId("myProjectId")
.setCredentials(ServiceAccountCredentials.fromStream(new FileInputStream(uploadCreds)))
.build()
.getService();
String bucketName = "myBucketName";
Bucket bucket = storage.get(bucketName);
BlobId blobId = BlobId.of(bucket.getName(), blobName);
InputStream inputStream = new FileInputStream(filePath);
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).setContentType("image/jpeg").build();
try (WriteChannel writer = storage.writer(blobInfo)) {
byte[] buffer = new byte[1024];
int limit;
try {
while ((limit = inputStream.read(buffer)) >= 0) {
writer.write(ByteBuffer.wrap(buffer, 0, limit));
}
} catch (Exception ex) {
ex.printStackTrace();
}finally {
writer.close();
}
System.out.println("Image URL : " + blobInfo.getMediaLink());
System.out.println("Blob URL : " + blobInfo.getSelfLink());
return blobInfo.getMediaLink();
}
}
filePath is the Image File
blobName is a random Image Name
uploadCreds is my credintials.json file
Why is the blobInfo.getMediaLink() and blobInfo.getSelfLink() returning null? What am i doing wrong?
Here is my code that works perfectly
#RestController
#RequestMapping("/api")
public class CloudStorageHelper {
Credentials credentials = GoogleCredentials.fromStream(new FileInputStream("C:\\Users\\sachinthah\\Downloads\\MCQ project -1f959c1fc3a4.json"));
Storage storage = StorageOptions.newBuilder().setCredentials(credentials).build().getService();
public CloudStorageHelper() throws IOException {
}
#SuppressWarnings("deprecation")
#RequestMapping(method = RequestMethod.POST, value = "/imageUpload112")
public String uploadFile(#RequestParam("fileseee")MultipartFile fileStream)
throws IOException, ServletException {
BlobstoreService blobstoreService = BlobstoreServiceFactory.getBlobstoreService();
String bucketName = "mcqimages";
checkFileExtension(fileStream.getName());
DateTimeFormatter dtf = DateTimeFormat.forPattern("-YYYY-MM-dd-HHmmssSSS");
DateTime dt = DateTime.now(DateTimeZone.UTC);
String fileName = fileStream.getOriginalFilename();
BlobInfo blobInfo = BlobInfo.newBuilder(bucketName, fileName)
.setAcl(new ArrayList<>(Arrays.asList(Acl.of(User.ofAllUsers(), Role.READER))))
.build(),
fileStream.getInputStream());
System.out.println(blobInfo.getMediaLink());
// sachintha added a comma after the link to identify the link that get generated
return blobInfo.getMediaLink() + ",";
}
private void checkFileExtension(String fileName) throws ServletException {
if (fileName != null && !fileName.isEmpty() && fileName.contains(".")) {
String[] allowedExt = {".jpg", ".jpeg", ".png", ".gif"};
for (String ext : allowedExt) {
if (fileName.endsWith(ext)) {
return;
}
}
throw new ServletException("file must be an image");
}
}
The Answer was quite simple, i just got rid of the manual upload method and used the inbuilt create blob.
private String uploadImage(File filePath, String blobName, File uploadCreds) throws FileNotFoundException, IOException{
Storage storage = StorageOptions.newBuilder().setProjectId("porjectId")
.setCredentials(ServiceAccountCredentials.fromStream(new FileInputStream(uploadCreds)))
.build()
.getService();
String bucketName = "bucketName";
Bucket bucket = storage.get(bucketName);
BlobId blobId = BlobId.of(bucket.getName(), blobName);
InputStream inputStream = new FileInputStream(filePath);
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).setContentType("image/jpeg").build();
Blob blob = storage.create(blobInfo, inputStream);
System.out.println("Image URL : " + blob.getMediaLink());
return blob.getMediaLink();
}
In case you want to store it in a special folder get the blobinfo.name() and append the "/" to it. for e.g if temp.jpg need to be stored with date folders. get the date from date object and format it with date formatter and prepend it
blobinfo.name() = date+"/"+blobinfo.name();
will classify all images date wise..

Categories