How to access local path from linux server java web application? - java

I've created a webapplication which creates folders when filling destination input ( for example => C:\xxx\xxx path).
When i run on my local (http:\localhost:8080), it works perfectly. it finds local windows path and creates folders.
But now i want to open this webapp to group of people, deployed tomcat on internal unix server (http:\ipnumber\portnumber).
The problem is that when user fills input with local destination, program code can not find the path or can not access local computer folder structure, it looks unix server folder structure.
How can i achieve this? I use angularjs for frontend with call restapi with http.post, the backend side is java.
package com.ama.ist.controller;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.ama.ist.model.CustomErrorType;
import com.ama.ist.model.Patch;
import com.ama.ist.service.PatchService;
#RestController
public class PatchController {
#Autowired
private PatchService patchService;
#CrossOrigin(origins = "http://ipnumber:portnumber")
#RequestMapping(value = "/mk", method = RequestMethod.POST)
public ResponseEntity<?> createFolder(#RequestBody Patch patch) {
System.out.println("patch ddest: => " + patch.getDestination());
String iscreatedstatus = patchService.create(patch);
System.out.println("iscreatedstatus" + iscreatedstatus);
if (!(iscreatedstatus.equals("Success"))) {
System.out.println("if success" );
return new ResponseEntity<Object>(new CustomErrorType("ER",iscreatedstatus), HttpStatus.NOT_FOUND);
}
System.out.println("if disinda success" );
return new ResponseEntity<Object>(new CustomErrorType("OK",iscreatedstatus), HttpStatus.CREATED);
}
//
#RequestMapping("/resource")
public Map<String,Object> home() {
Map<String,Object> model = new HashMap<String,Object>();
model.put("id", UUID.randomUUID().toString());
model.put("content", "Hello World");
return model;
}
}
This is Service
package com.ama.ist.service;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.springframework.stereotype.Service;
import org.tmatesoft.svn.core.SVNDepth;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNProperties;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.auth.BasicAuthenticationManager;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.wc.SVNCommitClient;
import org.tmatesoft.svn.core.wc.SVNWCUtil;
import com.ama.ist.model.Patch;
import com.ama.ist.model.User;
#Service
public class PatchService {
public String create(Patch patch) {
String ConstantPath = patch.getDestination();
File testFile = new File("");
String currentPath = testFile.getAbsolutePath();
System.out.println("current path is: " + currentPath);
System.out.println("ConstantPath => " + ConstantPath);
// if (!(isValidPath(ConstantPath))) {
// return "invalid Path";
// }
// System.out.println("Valid mi " + isValidPath(ConstantPath));
String foldername = patch.getWinNum() + " - " + patch.getWinName();
System.out.println(ConstantPath + foldername);
File files = new File(ConstantPath + foldername);
if (files.exists()) {
return "The Folder is already created in that path";
}
File files1 = new File(ConstantPath + foldername + "\\Patch");
File files2 = new File(ConstantPath + foldername + "\\Backup");
File files3 = new File(ConstantPath + foldername + "\\Backup\\UAT");
File files4 = new File(ConstantPath + foldername + "\\Backup\\PROD");
if (!files.exists()) {
if (files.mkdirs()) {
files1.mkdir();
files2.mkdir();
files3.mkdir();
files4.mkdir();
createReadme(ConstantPath + foldername, patch);
if (patch.isChecked()) {
System.out.println("patch.getDestination => " + patch.getDestination());
System.out.println("patch.getDetail => " + patch.getDetail());
System.out.println("patch.getSvnPath => " + patch.getSvnPath());
System.out.println("patch.getWinName => " + patch.getWinName());
System.out.println("patch.getWinNum => " + patch.getWinNum());
System.out.println("patch.getUserName => " + patch.getUser().getUserName());
System.out.println("patch.getPassword => " + patch.getUser().getPassword());
ImportSvn(patch);
}
System.out.println("Multiple directories are created!");
return "Success";
} else {
System.out.println("Failed to create multiple directories!");
return "Unknwon error";
}
} else {
return "File name is already exists";
}
}
public static boolean isValidPath(String path) {
System.out.println("path => " + path);
File f = new File(path);
if (f.isDirectory()) {
System.out.println("true => ");
return true;
} else {
System.out.println("false => ");
return false;
}
}
public void createReadme(String path, Patch patch) {
try {
ClassLoader classLoader = getClass().getClassLoader();
File file = new File(classLoader.getResource("Readme.txt").getFile());
// System.out.println("!!!!!!!!!!" + new java.io.File("").getAbsolutePath());
// File file = new File("resources/Readme.txt");
System.out.println(file.getAbsolutePath());
FileReader reader = new FileReader(file);
BufferedReader bufferedReader = new BufferedReader(reader);
String line;
PrintWriter writer = new PrintWriter(path + "\\Readme.txt", "UTF-8");
System.out.println(path + "\\Readme.txt");
while ((line = bufferedReader.readLine()) != null) {
line = line.replace("#Winnumber", Integer.toString(patch.getWinNum()));
line = line.replace("#NameSurname", " ");
line = line.replace("#Type", "Package");
line = line.replace("#detail", patch.getDetail());
SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy");
String date = sdf.format(new Date());
line = line.replace("#Date", date);
line = line.replace("#Desc", patch.getWinName());
writer.println(line);
System.out.println(line);
}
reader.close();
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public void ImportSvn(Patch patch) {
String name = patch.getUser().getUserName();
String password = patch.getUser().getPassword();
// String filename = patch.getWinName()
String filename = patch.getWinNum() + " - " + patch.getWinName();
String url = patch.getSvnPath() + "/" + filename;
ISVNAuthenticationManager authManager = new BasicAuthenticationManager(name, password);
SVNCommitClient commitClient = new SVNCommitClient(authManager, SVNWCUtil.createDefaultOptions(true));
File f = new File(patch.getDestination() + filename);
try {
String logMessage = filename;
commitClient.doImport(f, // File/Directory to be imported
SVNURL.parseURIEncoded(url), // location within svn
logMessage, // svn comment
new SVNProperties(), // svn properties
true, // use global ignores
false, // ignore unknown node types
SVNDepth.INFINITY);
// SVNClientManager cm =
// SVNClientManager.newInstance(SVNWCUtil.createDefaultOptions(true),authManager);
//
// SVNUpdateClient uc = cm.getUpdateClient();
// long[] l = uc.doUpdate(new File[]{dstPath},
// SVNRevision.HEAD,SVNDepth.INFINITY, true,true);
} catch (SVNException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
This is Angularjs side
$scope.Create = function() {
$scope.obj = [];
console.log("$scope.svnPath" + $scope.patch.svnPath);
console.log("$scope.userName" + $scope.patch.user.userName);
$http({
method : "POST",
url : "http://ipnumber:port/patchinit/mk",
data : $scope.patch
}).then(function mySuccess(response) {
console.log("Success!! ");
$scope.obj = response.data;
$scope.errorMessage = response.data.errorMessage;
$scope.errorCode = response.data.errorCode;
}, function myError(response) {
//$scope.obj = response.statusText;
$scope.errorMessage = response.data.errorMessage;
$scope.errorCode = response.data.errorCode;
});
}

You can share that folder on windows and mount that shared folder in unix. Once mounted, it can be easily accessed using samba(smb://192.168.1.117/Your_Folder).
Samba is standard on nearly all distributions of Linux and is commonly included as a basic system service on other Unix-based operating systems as well.

Related

Batch Insert In Cassandra using Apache Spark hanging and context not getting closed when triggered from a Web Ser

I am a new to spark . I am trying to insert csv files into cassandra table using spark-cassandra connector as below:
The files are in Hdfs and I am getting the Paths of all files and for each path I call a method which does converts the csv data to corressponding cassandra data types and creates a prepared statement binds the data to the prepared statement and adds it to a batch. Finally I execute the batch when its 1000.
Key Points
1. I am using Apache Cassandra 2.1.8 and Spark 1.5
2. I read the Csv files using Spark Context
3. I am using the com.datastax.spark.connector.cql.CassandraConnector to create a Session with Cassandra.
I have 9 Files , each files data goes to a table in cassandra. Every Things works fine All inserts are happening as expected and the job gets completed when I submit the jar on spark submit.
The problem I am facing is When the same Jar is invoked through a web-service (web service calls the script to invoke the jar) One of the files data doesn’t get inserted and the spark context doesn’t stop due to which the jobs is forever running.
When I insert 4 files or 5 files everything works fine even through the web-service. But all together it hanging and I get 10 records less in one of the tables and context doesn’t stop.
Its strange because when I am submitting the jar on the spark submit directly everything works fine and through the web service I face this issue , Its strange bcz even the web-service submits the job to the same spark submit.
Here is my code
package com.pz.loadtocassandra;
import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.ConsoleHandler;
import java.util.logging.FileHandler;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import com.datastax.driver.core.BatchStatement;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.exceptions.InvalidTypeException;
import com.datastax.spark.connector.cql.CassandraConnector;
import com.datastax.spark.connector.japi.CassandraRow;
import com.pz.shared.UnicodeBOMInputStream;
import com.pz.shared.fileformat.Header;
import com.pz.shared.mr.fileformat.MRFileFormats.CSVInputFormat;
import com.pz.shared.mr.fileformat.MRFileFormats.TextArrayWritable;
public class LoadToCassandra {
public static final String STUDYID = "STUDYID";
public static final String PROJECTNAME = "PROJECTNAME";
public static final String FILEID = "FILEID";
public static int count = 0;
public static final String FILE_SERPERATOR = "/";
public static Logger log = Logger.getLogger(LoadToCassandra.class.getName());
public static void main(String[] args) {
String propFileLoc = args[0];
String hdfsHome = args[1];
String hdfs_DtdXmlPath = args[2];
String hdfs_NormalizedDataPath = args[3];
run(propFileLoc, hdfsHome, hdfs_DtdXmlPath,hdfs_NormalizedDataPath);
} catch (IOException exception) {
log.log(Level.SEVERE, "Error occur in FileHandler.", exception);
}
}
public static void run(String propFileLoc, String hdfsHome,
String hdfs_DtdXmlPath, String hdfs_NormalizedDataPath) {
JavaSparkContext ctx = null;
FileSystem hadoopFs = null;
try {
PropInitialize.initailizeConfig(propFileLoc);
//setting spark context
ctx = setSparkContext(propFileLoc);
ParseDtdXml.parseDTDXML(hdfsHome, hdfs_DtdXmlPath);
Configuration configuration = setHadoopConf();
hadoopFs = getHadoopFs(hdfsHome, configuration);
FileStatus[] fstat = hadoopFs.listStatus(new Path(hdfs_NormalizedDataPath));
//Getting the csv paths
Path[] paths = FileUtil.stat2Paths(fstat);
log.info("PATH.size - " + paths.length);
for (Path path : paths) {
log.info("path is : "+path.toString());
loadToCassandra(propFileLoc, path, configuration,hdfsHome, ctx);
}
} catch (IOException | URISyntaxException e) {
log.log(Level.SEVERE, "run method", e);
e.printStackTrace();
} finally {
log.info("finally ");
if (ctx!= null) {
ctx.stop();
System.out.println("SC Stopped");
}
if (hadoopFs != null) {
try {
hadoopFs.close();
} catch (IOException e) {
log.log(Level.SEVERE, "run method", e);
}
}
}
}
// input : 1. String hdfs home ,
// 2. Configuration hadoop conf object
// returns : hadoop File System object
private static FileSystem getHadoopFs(String hdfsHome,
Configuration configuration) throws IOException, URISyntaxException {
return FileSystem.get(new URI(hdfsHome), configuration);
}
// input : no inputs
// process : sets hadoop config parameters
// output : retuns hadoop conf object
private static Configuration setHadoopConf() throws IOException,
URISyntaxException {
Configuration configuration = new Configuration();
configuration.setBoolean("csvFileFormat.encoded.flag", true);
configuration.set("csvinputformat.token.delimiter", ",");
return configuration;
}
// input : string Properties File Location
// process : creates and sets the configurations of spark context
// retuns : JavaSparkContext object with configurations set to it.
private static JavaSparkContext setSparkContext(String propFileLoc) {
PropInitialize.initailizeConfig(propFileLoc);
SparkConf conf = new SparkConf();
conf.set("spark.serializer",
"org.apache.spark.serializer.KryoSerializer");
conf.setAppName("Loading Data");
conf.setMaster(PropInitialize.spark_master);
conf.set("spark.cassandra.connection.host",
PropInitialize.cassandra_hostname);
conf.setJars(PropInitialize.external_jars);
return new JavaSparkContext(conf);
}
private static void loadToCassandra(String propFileLoc, Path sourceFileHdfsPath,
Configuration hadoopConf, String hdfsHome,JavaSparkContext ctx) {
System.out.println("File :: " + sourceFileHdfsPath.toString());
FileSystem hadoopFs = null;
PropInitialize.initailizeConfig(propFileLoc);
String cassKeyspaceName = PropInitialize.cass_keyspace_name;
log.info("entered here for file "+sourceFileHdfsPath.toString());
final String strInputFileName = StringUtils.split(
sourceFileHdfsPath.getName(), "#")[0].toLowerCase();
final String strTableNameInCass = StringUtils.split(
sourceFileHdfsPath.getName(), "-")[0].split("#")[1]
.toLowerCase();
final String strSourceFilePath = sourceFileHdfsPath.toString();
try {
hadoopFs = getHadoopFs(hdfsHome, hadoopConf);
//getting the cassandra connection using spark conf
final CassandraConnector connector = getCassandraConnection(ctx);
final JavaRDD<CassandraRow> cassTableObj=getCassTableObj(ctx,cassKeyspaceName,strTableNameInCass);
final Map<String, List<String>> tabColMapWithColTypes1 = ParseDtdXml.tabColMapWithColTypes;
final String headersUpdated;
final String headers;
UnicodeBOMInputStream ubis = new UnicodeBOMInputStream(
hadoopFs.open(sourceFileHdfsPath));
Header CsvHeader = Header.getCSVHeader(ubis, ",");
if (!strTableNameInCass.equalsIgnoreCase("PCMASTER")) {
String fString = "";
for (int i = 0; i < CsvHeader.size() - 1; i++) {
fString = fString + CsvHeader.get(i).ColumnName + ",";
}
fString = fString
+ CsvHeader.get(CsvHeader.size() - 1).ColumnName;
headers = fString; // StringUtils.join(stringArr.toString(),",");
headersUpdated = strTableNameInCass.toUpperCase() + "ID,"
+ headers;
} else {
String[] stringArr = new String[CsvHeader.size()];
String fString = "";
for (int i = 0; i < CsvHeader.size() - 1; i++) {
// stringArr[i] = CsvHeader.get(i).ColumnName;
fString = fString + CsvHeader.get(i).ColumnName + ",";
}
fString = fString
+ CsvHeader.get(CsvHeader.size() - 1).ColumnName;
headers = StringUtils.join(stringArr.toString(), ",");
headersUpdated = fString;
}
ubis.close();
//Reading the file using spark context
JavaPairRDD<LongWritable, TextArrayWritable> fileRdd = ctx
.newAPIHadoopFile(strSourceFilePath, CSVInputFormat.class,
LongWritable.class, TextArrayWritable.class,
hadoopConf);
final long recCount = fileRdd.count();
final String[] actCols = headersUpdated.split(",");
final LinkedHashMap<Object, String> mapOfColNameAndType = new LinkedHashMap<Object, String>();
final List<String> colNameAndType = tabColMapWithColTypes1
.get(strTableNameInCass.toUpperCase());
for (int i = 0; i < actCols.length; i++) {
if (colNameAndType.contains(actCols[i] + " " + "text")) {
int indexOfColName = colNameAndType.indexOf(actCols[i]
+ " " + "text");
mapOfColNameAndType.put(i,
colNameAndType.get(indexOfColName).split(" ")[1]);
} else if (colNameAndType
.contains(actCols[i] + " " + "decimal")) {
int indexOfColName = colNameAndType.indexOf(actCols[i]
+ " " + "decimal");
mapOfColNameAndType.put(i,
colNameAndType.get(indexOfColName).split(" ")[1]);
} else {
continue;
}
}
//creates the query for prepared statement
final String makeStatement = makeSt(cassKeyspaceName,
strTableNameInCass, actCols);
final long seqId1 = cassTableObj.count();
//calling map on the fileRdd
JavaRDD<String> data = fileRdd.values().map(
new Function<TextArrayWritable, String>() {
/**
*
*/
private static final long serialVersionUID = 1L;
Session session;
boolean isssession = false;
PreparedStatement statement;
BatchStatement batch;
int lineCount = 0;
long seqId = seqId1;
/*for each line returned as an TextArrayWritable convert each cell the corresponding
* bind the data to prepared statement
* add it to batch
*/
#Override
public String call(TextArrayWritable tup)
throws Exception {
seqId++;
lineCount++;
log.info("entered here 3 for file "+strSourceFilePath.toString());
String[] part = tup.toStrings();
Object[] parts = getDataWithUniqueId(
strTableNameInCass, part);
//For each file
//Creates the session
//creates the PreparedStatement
if (!isssession) {
session = connector.openSession();
statement = session.prepare(makeStatement);
log.info("entered here 4 for file "+strSourceFilePath.toString());
// System.out.println("statement :" +
// statement);
isssession = true;
batch = new BatchStatement();
}
List<Object> typeConvData = new ArrayList<Object>();
for (int i = 0; i < parts.length; i++) {
String type = mapOfColNameAndType.get(i);
try {
if (type.equalsIgnoreCase("text")) {
typeConvData.add(parts[i]);
} else {
// parts[i] =
// parts[i].toString().replace("\"",
// "");
// check if the String the has to
// converted to a BigDecimal is any
// positive or negetive integer or not.
// if its not a positive integer or
// negative forcefully convert it to
// zero (avoiding NumberFormatException)
if (!((String) parts[i])
.matches("-?\\d+")) {
parts[i] = "0";
}
long s = Long
.valueOf((String) parts[i]);
typeConvData.add(BigDecimal.valueOf(s));
}
} catch (NullPointerException e) {
log.log(Level.SEVERE, "loadToCass method", e);
} catch (NumberFormatException e) {
log.log(Level.SEVERE, "loadToCass method", e);
} catch (InvalidTypeException e) {
log.log(Level.SEVERE, "loadToCass method", e);
}
}
List<Object> data = typeConvData;
//bind data to query
final BoundStatement query = statement.bind(data
.toArray(new Object[data.size()]));
//add query to batch
batch.add(query);
int count = LoadToCassandra.count;
//when count is 1k execute batch
if (count == 1000) {
log.info("entered here 5 for file "+strSourceFilePath.toString());
log.info("batch done");
session.execute(batch);
LoadToCassandra.count = 0;
batch = new BatchStatement();
return StringUtils.join(tup.toStrings());
}
//if its the last batch and its not of size 1k
if (lineCount == (recCount))
{
log.info("Last Batch");
session.executeAsync(batch);
log.info("entered here 6 for file "+strSourceFilePath.toString());
//session.execute(batch);
session.close();
log.info("Session closed");
}
LoadToCassandra.count++;
return StringUtils.join(tup.toStrings());
}
private Object[] getDataWithUniqueId(
String strTableNameInCass, String[] part) {
Object[] parts = null;
ArrayList<String> tempArraylist = new ArrayList<String>();
if (!strTableNameInCass
.equalsIgnoreCase("PCMASTER")) {
for (int i = 0; i < part.length; i++) {
if (i == 0) {
tempArraylist.add(0,
String.valueOf(seqId));
}
tempArraylist.add(part[i]);
}
parts = tempArraylist.toArray();
} else {
parts = part;
}
return parts;
}
});
data.count();
hadoopFs.close();
} catch (Exception e) {
e.printStackTrace();
}
}
private static JavaRDD<CassandraRow> getCassTableObj(
JavaSparkContext ctx, String cassKeyspaceName,
String strTableNameInCass) {
return javaFunctions(ctx)
.cassandraTable(cassKeyspaceName,
strTableNameInCass.toLowerCase());
}
private static CassandraConnector getCassandraConnection(
JavaSparkContext ctx) {
return CassandraConnector.apply(ctx.getConf());
}
private static String makeSt(String keyspace, String tabName,
String[] colNames) {
StringBuilder sb = new StringBuilder();
sb.append("INSERT INTO " + keyspace + "." + tabName + " ( ");
List<String> vars = new ArrayList<>();
for (int i = 0; i < (colNames.length - 1); i++) {
sb.append(colNames[i] + ",");
vars.add("?");
}
vars.add("?");
sb.append(colNames[colNames.length - 1] + " ) values ( "
+ StringUtils.join(vars, ",") + " ) ");
return sb.toString();
}}
Can anyone tell me what could the reason that causes this problem and how can it be resolved. Thanks
Once you inserted your data into cassandra, call ctx.stop() method, it will stop spark context.

Image compression or conversion

Simple i want to apply image compression using PNG/JPEG/Bitmap file.
Android we have Bitmap.CompressFormat to compressed our bitmap file and use for further operation.
Bitmap.CompressFormat class allow to compress in 3 format as below :
JPEG
PNG
WEBP
My query is i want to compress file in any on of below format :
JBIG2
TIFF G4
TIFF LZW
I have found some image compression library like ImageIo & ImageMagick but didn't get any success. I want to use this file to upload on AmazonServer. Please guide me how to achieve this or is there any other option to upload image on amazon server.
Thanks for your time.
I don't know about those file's compression but i created this class to upload files programatically into an Amazon s3 bucket that uses the Amazon SDK api:
package com.amazon.util;
import com.amazonaws.AmazonClientException;
import com.amazonaws.auth.PropertiesCredentials;
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
public class AmazonS3Files {
private static final String existingBucketName = "bucketName";
private final String strProperties = "accessKey = MYACESSKEY \n"
+ "secretKey = my+secret+key";
private static final String urlRegion = "https://s3.amazonaws.com/";
public static final String urlPathS3 = urlRegion + existingBucketName;
public String UploadFile(InputStream inFile, String pathDocument, String fileName) {
return UploadFile(inFile, pathDocument, fileName, false);
}
public void deleteObjectsInFolder(String folderPath) {
InputStream inputStreamCredentials = new ByteArrayInputStream(strProperties.getBytes());
folderPath = folderPath.replace('\\', '/');
if (folderPath.charAt(folderPath.length() - 1) == '/') {
folderPath = folderPath.substring(0, folderPath.length() - 1);
}
if (folderPath.charAt(0) == '/') {
folderPath = folderPath.substring(1, folderPath.length());
}
try {
AmazonS3 s3Client = new AmazonS3Client(new PropertiesCredentials(inputStreamCredentials));
for (S3ObjectSummary file : s3Client.listObjects(existingBucketName, folderPath).getObjectSummaries()) {
s3Client.deleteObject(existingBucketName, file.getKey());
}
} catch (IOException | AmazonClientException e) {
System.out.println(e);
}
}
public void deleteFile(String filePath) {
InputStream inputStreamCredentials = new ByteArrayInputStream(strProperties.getBytes());
filePath = filePath.replace('\\', '/');
if (filePath.charAt(0) == '/') {
filePath = filePath.substring(1, filePath.length());
}
try {
AmazonS3 s3Client = new AmazonS3Client(new PropertiesCredentials(inputStreamCredentials));
s3Client.deleteObject(existingBucketName, filePath);
} catch (IOException | AmazonClientException e) {
System.out.println(e);
}
}
public String UploadFile(InputStream inFile, String pathDocument, String fileName, boolean bOverwiteFile) {
InputStream inputStreamCredentials = new ByteArrayInputStream(strProperties.getBytes());
String amazonFileUploadLocationOriginal;
String strFileExtension = fileName.substring(fileName.lastIndexOf("."), fileName.length());
fileName = fileName.substring(0, fileName.lastIndexOf("."));
fileName = fileName.replaceAll("[^A-Za-z0-9]", "");
fileName = fileName + strFileExtension;
pathDocument = pathDocument.replace('\\', '/');
try {
if (pathDocument.charAt(pathDocument.length() - 1) == '/') {
pathDocument = pathDocument.substring(0, pathDocument.length() - 1);
}
if (pathDocument.charAt(0) == '/') {
pathDocument = pathDocument.substring(1, pathDocument.length());
}
amazonFileUploadLocationOriginal = existingBucketName + "/" + pathDocument;
AmazonS3 s3Client = new AmazonS3Client(new PropertiesCredentials(inputStreamCredentials));
s3Client.setRegion(Region.getRegion(Regions.SA_EAST_1));
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setContentLength(inFile.available());
if (!bOverwiteFile) {
boolean bFileServerexists = true;
int tmpIntEnum = 0;
while (bFileServerexists) {
String tmpStrFile = fileName;
if (tmpIntEnum > 0) {
tmpStrFile = fileName.substring(0, fileName.lastIndexOf(".")) + "(" + tmpIntEnum + ")" + fileName.substring(fileName.lastIndexOf("."), fileName.length());
}
if (!serverFileExists(urlRegion + amazonFileUploadLocationOriginal + "/" + tmpStrFile)) {
bFileServerexists = false;
fileName = tmpStrFile;
}
tmpIntEnum++;
}
}
String strFileType = fileName.substring(fileName.lastIndexOf("."), fileName.length());
if (strFileType.toUpperCase().equals(".jpg".toUpperCase())) {
objectMetadata.setContentType("image/jpeg");
} else if (strFileType.toUpperCase().equals(".png".toUpperCase())) {
objectMetadata.setContentType("image/png");
} else if (strFileType.toUpperCase().equals(".gif".toUpperCase())) {
objectMetadata.setContentType("image/gif");
} else if (strFileType.toUpperCase().equals(".gmap".toUpperCase())) {
objectMetadata.setContentType("text/plain");
}
PutObjectRequest putObjectRequest = new PutObjectRequest(amazonFileUploadLocationOriginal, fileName, inFile, objectMetadata).withCannedAcl(CannedAccessControlList.PublicRead);
PutObjectResult result = s3Client.putObject(putObjectRequest);
return "/" + pathDocument + "/" + fileName;
} catch (Exception e) {
// TODO: handle exception
return null;
}
}
public boolean serverFileExists(String URLName) {
try {
HttpURLConnection.setFollowRedirects(false);
HttpURLConnection con =
(HttpURLConnection) new URL(URLName).openConnection();
con.setRequestMethod("HEAD");
return (con.getResponseCode() == HttpURLConnection.HTTP_OK);
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
}
And for usage with your file:
BufferedImage img = null;
try {
img = ImageIO.read(new File("file.jpg"));
String strReturn = AmazonS3Files.UploadFile(new ByteArrayInputStream(((DataBufferByte)(img).getRaster().getDataBuffer()).getData()), "path/to/file", "newfilename.jpg"); //Returns null if the upload doesn't work or the s3 file path of the uploaded file
} catch (IOException e) {
//Handle Exception
}

My module is stopping my Wowza application from loading because of required dependencies

Local - Wowza Streaming Engine 4.1.0, Windows 8, Java version 1.7.0_67
Server - The Wowza Streaming Engine AMI here. Java version 1.7.0_65
I have Wowza running locally and on an EC2 instance.
Locally it works fine and I can connect and publish streams to my application without a problem. I cannot connect or publish streams to the application on my server, however.
I removed the .jar (module) that went with the application, and I was able to connect and publish to my app, though it gave me a warning that it couldn't find the associated module, which was to be expected.
I put the module back in, restarted the server, and I was unable to connect.
It appears that my .jar file is stopping the application from loading for some reason.
Here's the source for my module:
package com.xxxxxxxxxxxxxxx.recorder;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.S3ClientOptions;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.wowza.wms.application.*;
import com.wowza.wms.amf.*;
import com.wowza.wms.client.*;
import com.wowza.wms.module.*;
import com.wowza.wms.request.*;
import com.wowza.wms.stream.*;
import com.wowza.wms.rtp.model.*;
import com.wowza.wms.httpstreamer.model.*;
import com.wowza.wms.httpstreamer.cupertinostreaming.httpstreamer.*;
import com.wowza.wms.httpstreamer.smoothstreaming.httpstreamer.*;
public class RecorderModules extends ModuleBase implements AWSCredentialsProvider {
IApplicationInstance appInstance;
private String videoBucket;
private String thumbBucket;
private String videoDistro;
private String thumbnailDistro;
private String region;
private AmazonS3Client s3;
private String dir;
public void onAppStart(IApplicationInstance appInstance) {
String fullname = appInstance.getApplication().getName() + "/"
+ appInstance.getName();
getLogger().info("onAppStart: " + fullname);
this.appInstance = appInstance;
try{
videoBucket = appInstance.getProperties().getPropertyStr("videoBucket");
getLogger().info("Video bucket is " + videoBucket);
thumbBucket = appInstance.getProperties().getPropertyStr("thumbBucket");
getLogger().info("Thumb bucket is " + thumbBucket);
videoDistro = appInstance.getProperties().getPropertyStr("videoDistro");
getLogger().info("Video distro is " + videoDistro);
thumbnailDistro =appInstance.getProperties().getPropertyStr("thumbnailDistro");
getLogger().info("thumbnail distro is " + thumbnailDistro);
region = appInstance.getProperties().getPropertyStr("region");
getLogger().info("region is " + region);
s3 = new AmazonS3Client();
s3.setEndpoint(region);
getLogger().info("AmazonS3Client is created");
}catch(Exception e){
getLogger().info("Could not read config " + e);
}
}
public void doSave(IClient client, RequestFunction function, AMFDataList params) {
getLogger().info("doSave hit ");
new File(dir + params.getString(3) + ".flv").renameTo(new File(dir+params.getString(4)+".flv"));
getLogger().info("Starting upload");
String thumbName = params.getString(4).replace("vid_", "thumb_")+".jpg";
String flvName = params.getString(4)+".flv";
String mp4Name = params.getString(4)+".mp4";
try{
PutObjectRequest p = new PutObjectRequest(videoBucket,flvName, new File(dir+flvName));
p.setRequestCredentials(getCredentials());
p.setCannedAcl(CannedAccessControlList.BucketOwnerFullControl);
getLogger().info("attempting to upload " + flvName + " to " + videoBucket);
s3.putObject(p);
getLogger().info("flv upload complete " + videoBucket + " " + flvName);
PutObjectRequest p2 = new PutObjectRequest(thumbBucket,thumbName, new File(dir+thumbName));
p2.setRequestCredentials(getCredentials());
p2.setCannedAcl(CannedAccessControlList.PublicRead);
getLogger().info("attempting to upload " + thumbName + " to " + thumbBucket);
s3.putObject(p2);
getLogger().info("thumb upload complete " + thumbBucket + " " + thumbName);
String[] info = new String[5];
info[0] = videoDistro+params.getString(4);
info[1] = thumbnailDistro+thumbName;
info[2] = params.getString(4);
info[3] = videoBucket;
info[4] = thumbBucket;
getLogger().info("sending info to client " + info[0]);
//client.call("uploadDone", null,(Object[])info);
}catch(Exception e){
getLogger().info("Upload failed");
getLogger().info(e);
//client.call("uploadFailed")
}
//transcode
//-crf 23 -refs 3 -profile:v baseline -level 3.0 -pix_fmt yuv420p -preset veryslow
String[] command = {"ffmpeg",
"-i", dir+params.getString(4)+".flv",
"-crf", "23",
"-refs","3",
"-profile:v","baseline",
"-level","3.0",
"-pix_fmt","yuv420p",
"-preset","veryslow",
dir+params.getString(4)+".mp4"};
try {
ProcessBuilder builder = new ProcessBuilder(command);
builder.redirectErrorStream(true);
getLogger().info("Starting process");
Process process = builder.start();
BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream()));
String line = null;
while((line = in.readLine()) != null) {
System.out.println(line);
}
process.waitFor();
PutObjectRequest p = new PutObjectRequest(videoBucket,mp4Name, new File(dir+mp4Name));
p.setRequestCredentials(getCredentials());
p.setCannedAcl(CannedAccessControlList.BucketOwnerFullControl);
getLogger().info("transcoding completed");
s3.putObject(p);
getLogger().info("mp4 file uploaded");
} catch (Exception e) {
getLogger().info("Error running ffmpeg");
e.printStackTrace();
}
deleteFiles(params.getString(4).replace("vid_",""));
}
public void saveThumbnail(IClient client, RequestFunction function, AMFDataList params){
String dir = client.getAppInstance().getStreamStoragePath()+"/"+"thumb_"+params.getString(4).split(",")[2]+".jpg";
getLogger().info(params);
Path path = Paths.get(dir);
byte[] byteArr = (byte[])((AMFDataByteArray)params.get(3)).getValue();
try {
Files.write(path, byteArr, StandardOpenOption.CREATE_NEW);
} catch (IOException e) {
e.printStackTrace();
}
}
public void onAppStop(IApplicationInstance appInstance) {
String fullname = appInstance.getApplication().getName() + "/"
+ appInstance.getName();
getLogger().info("onAppStop: " + fullname);
}
public void onConnect(IClient client, RequestFunction function, AMFDataList params) {
getLogger().info("onConnect: " + client.getClientId());
}
public void onConnectAccept(IClient client) {
getLogger().info("onConnectAccept: " + client.getClientId());
}
public void onConnectReject(IClient client) {
getLogger().info("onConnectReject: " + client.getClientId());
}
public void onDisconnect(IClient client) {
getLogger().info("onDisconnect: " + client.getClientId());
}
public void onStreamCreate(IMediaStream stream) {
getLogger().info("onStreamCreate: " + stream.getSrc());
}
public void onStreamDestroy(IMediaStream stream) {
getLogger().info("onStreamDestroy: " + stream.getSrc());
}
public void onHTTPSessionCreate(IHTTPStreamerSession httpSession) {
getLogger().info("onHTTPSessionCreate: " + httpSession.getSessionId());
}
public void onHTTPSessionDestroy(IHTTPStreamerSession httpSession) {
getLogger().info("onHTTPSessionDestroy: " + httpSession.getSessionId());
}
public void onHTTPCupertinoStreamingSessionCreate(HTTPStreamerSessionCupertino httpSession) {
getLogger().info(
"onHTTPCupertinoStreamingSessionCreate: "
+ httpSession.getSessionId());
}
public void onHTTPCupertinoStreamingSessionDestroy(HTTPStreamerSessionCupertino httpSession) {
getLogger().info(
"onHTTPCupertinoStreamingSessionDestroy: "
+ httpSession.getSessionId());
}
public void onHTTPSmoothStreamingSessionCreate( HTTPStreamerSessionSmoothStreamer httpSession) {
getLogger().info(
"onHTTPSmoothStreamingSessionCreate: "
+ httpSession.getSessionId());
}
public void onHTTPSmoothStreamingSessionDestroy( HTTPStreamerSessionSmoothStreamer httpSession) {
getLogger().info(
"onHTTPSmoothStreamingSessionDestroy: "
+ httpSession.getSessionId());
}
public void onRTPSessionCreate(RTPSession rtpSession) {
getLogger().info("onRTPSessionCreate: " + rtpSession.getSessionId());
}
public void onRTPSessionDestroy(RTPSession rtpSession) {
getLogger().info("onRTPSessionDestroy: " + rtpSession.getSessionId());
}
public void onCall(String handlerName, IClient client, RequestFunction function, AMFDataList params) {
getLogger().info("onCall: " + handlerName);
}
/* Overwritten method: Delete content of the same name before starting */
public void publish(IClient client, RequestFunction function, AMFDataList params) {
getLogger().info("publish hit");
String name = params.getString(3).replace("flv:","").replace("vid_","").replace("_temp", "");
getLogger().info("name:" + name);
dir = appInstance.decodeStorageDir("${com.wowza.wms.AppHome}"+"/content/recorder/");
deleteFiles(name);
invokePrevious(client,function,params);
}
private void deleteFiles(String name){
getLogger().info("deleting " + name);
try {
if(Files.exists(Paths.get(dir+"thumb_"+name+".jpg"))){
getLogger().info("deleting thumbnail");
Files.delete(Paths.get(dir+"thumb_"+name+".jpg"));
}
if(Files.exists((Paths.get(dir+"vid_"+name+".flv")))){
getLogger().info("deleting video");
Files.delete(Paths.get(dir+"vid_"+name+".flv"));
}
if(Files.exists((Paths.get(dir+"vid_"+name+".mp4")))){
getLogger().info("deleting mp4 video");
Files.delete(Paths.get(dir+"vid_"+name+".mp4"));
}
if(Files.exists((Paths.get(dir+"vid_"+name+"_temp.flv")))){
getLogger().info("deleting temp video");
Files.delete(Paths.get(dir+"vid_"+name+"_temp.flv"));
}
} catch (IOException e) {
getLogger().info("Could not delete old files");
}
}
#Override
public AWSCredentials getCredentials() {
getLogger().info("getting credentials");
return new BasicAWSCredentials(appInstance.getProperties().getPropertyStr("accessKey"),appInstance.getProperties().getPropertyStr("secretKey"));
}
#Override
public void refresh() {
// TODO Auto-generated method stub
}
}
It could be related to this:
http://www.wowza.com/forums/showthread.php?36693-Aws-plugin-breaks-application-with-no-errors
That might mean that my .jar file isn't being built with the required dependencies (the AWS stuff).
EDIT:
So I included all the dependencies, making sure that the AWS stuff was in the .jar (I looked at it with winrar), and now it gives me "Module class not found or could not be loaded" when the application starts. I can see that the application is there.
This might be related with this error I get in Eclipse when I tried to create a runnable jar with all the dependencies extracted: "Could not find main method from given launch configuration." Even though I got this error, it appeared to work, as the .jar file grew several times in size.
Make sure the applications folder actually exists; in your wowza-install-folder there must be a subfolder called "applications". If it's not there, create it manually.
In Wowza 4.x, you must use the Engine Manager to create applications and manage them. Open the Engine Manager (http://your.ec2.server:8088) and choose the specific application; then select "Incoming security" and check what it says under "RTMP Publishing". If you don't want any protection, change it to "Open (no authentication required)"; otherwise you must send along credentials from your AS3 code when connecting with the NetConnection.
You may also want to check the Wowza logs in [wowza-install-folder]/logs - if the connection fails, there should be a message in the log about this that may give you useful information.
PS: I usually use the Pre-Built AMIs from Wowza's website to initiate a new instance.
I resolved it by installing everything differently on a new server.
I took by auto-generated .jar file, and the two jar files that it depended on, commons-codec-1.9 and aws-java-sdk-1.4.7, and installed them using the "startup package" method, as opposed to transferring the dependencies later over ftp.
And everything worked fine.

query page(web services in crmod) java code reading from excel !! skip value not found

I have written a code in java for web services of crm on demand. The code reads the 1st column (id) from excel and queries it in the crm and inserts the values accordingly. If the 'id' is found it inserts the other values, but when the 'id' is not found in the crm the code is terminated with error message. I want the code to query for a record and if found insert the other values, but if the record is not found it should skip that record and query for the next id without it being terminated in between.
Please help...
and also how to skip cells with no value(null)??
import crmondemand.ws.account.AccountWS_AccountInsertChild_Input;
import crmondemand.ws.account.AccountWS_AccountInsertChild_Output;
import crmondemand.ws.account.AccountWS_AccountUpdate_Input;
import crmondemand.ws.account.AccountWS_AccountUpdate_Output;
import crmondemand.ws.account.Default_BindingStub;
import crmondemand.xml.account.Account;
import crmondemand.xml.account.RelatedAccount;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
import java.net.Proxy;
import java.net.URL;
import java.rmi.RemoteException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.StringTokenizer;
import java.util.TimeZone;
import javax.xml.rpc.ServiceException;
import jxl.Cell;
import jxl.CellType;
import jxl.Sheet;
import jxl.Workbook;
import jxl.read.biff.BiffException;
import org.apache.axis.AxisProperties;
import org.apache.log4j.Logger;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFDateUtil;
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
public class accrel1 {
private String inputFile;
public static int length;
public void setInputFile(String inputFile) {
this.inputFile = inputFile;
}
static Logger logger = Logger.getLogger(accrel1.class);
public static URL reauthentication(Properties properties) throws MalformedURLException {
// System.setProperty("https.proxyHost", "172.17.24.24");
// System.setProperty("https.proxyPort", "8003");
//System.setProperty("https.proxyHost", "145.247.13.164");
// System.setProperty("https.proxyPort", "3128");
// System.setProperty("javax.net.ssl.trustStore","C:\\ProgramFiles\\Java\\jdk1.6.0_31\\jre\\lib\\cacerts");
System.out.println("Loggin In");
String jsessionid_full =
logon("https://secure-ausomxapa.crmondemand.com/Services/Integration",
"################", "##########", properties);
System.out.println("Connecting to CRM..." + jsessionid_full);
String jsessionid = getSessionId(jsessionid_full);
System.out.println("JSessionid: " + jsessionid);
String endpoint =
"https://secure-ausomxapa.crmondemand.com/Services/Integration" +
";jsessionid=" + jsessionid;
URL urlAddr = new java.net.URL(endpoint);
System.out.println("Establishing Connection...");
return urlAddr;
}
public static void urlqueryWS1(URL urlAddr, List cellDataList,
Properties properties) throws RemoteException,
ServiceException,
MalformedURLException {
AxisProperties.setProperty("https.proxyHost",
properties.getProperty("proxyhost"));
AxisProperties.setProperty("https.proxyPort",
properties.getProperty("proxyport"));
// AxisProperties.setProperty("http.nonProxyHosts", "secure-ausomxapa.crmondemand.com");
crmondemand.ws.account.AccountWS_AccountQueryPage_Input accountlist =
new crmondemand.ws.account.AccountWS_AccountQueryPage_Input();
crmondemand.ws.account.AccountWS_AccountQueryPage_Output outlist =
new crmondemand.ws.account.AccountWS_AccountQueryPage_Output();
crmondemand.xml.account.Account[] accounts =
new crmondemand.xml.account.Account[1];
crmondemand.xml.account.Account account =
new crmondemand.xml.account.Account();
String stringCellValue[] = new String[6000];
int k = 0;
int flag = 0, notflag = 0, noparentflag = 0;
System.out.println("CellDatasize:" + cellDataList.size());
for (int i = 1; i < cellDataList.size(); i++) {
crmondemand.ws.account.Account service =
new crmondemand.ws.account.AccountLocator();
Default_BindingStub stub =
(Default_BindingStub)service.getDefault(urlAddr);
List cellTempList = (List)cellDataList.get(i);
for (int j = 0; j < cellTempList.size(); j++) {
HSSFCell hssfCell = (HSSFCell)cellTempList.get(j);
//System.out.println("Cell Type:" + i + "," + j + " " + hssfCell.getCellType());
if (hssfCell.getCellType() == HSSFCell.CELL_TYPE_STRING) {
stringCellValue[j] = hssfCell.getStringCellValue();
// logger.info(i + "," + j + "\t" + stringCellValue[j] + "\t");
} else if (hssfCell.getCellType() ==
HSSFCell.CELL_TYPE_NUMERIC) {
if (HSSFDateUtil.isCellDateFormatted(hssfCell)) {
System.out.println(hssfCell.getDateCellValue());
Date date = new Date();
date = hssfCell.getDateCellValue();
SimpleDateFormat df =
new SimpleDateFormat("MM/dd/yyyy");
stringCellValue[j] = df.format(date);
} else {
stringCellValue[j] = new BigDecimal(hssfCell.getNumericCellValue()).toPlainString();
//logger.info("Number"+i+","+j+ "\t"+stringCellValue[j] + "\t");
}
}
}
AccountWS_AccountInsertChild_Input child =
new AccountWS_AccountInsertChild_Input();
AccountWS_AccountInsertChild_Output childs =
new AccountWS_AccountInsertChild_Output();
crmondemand.xml.account.Account[] accrels =
new crmondemand.xml.account.Account[1];
crmondemand.xml.account.Account accrel =
new crmondemand.xml.account.Account();
RelatedAccount[] relaccounts = new RelatedAccount[1];
RelatedAccount relaccount = new RelatedAccount();
logger.info("Inserting " + i + "Record: "+stringCellValue[0]);
relaccount.setRelationshipRole(stringCellValue[1]);
relaccount.setRelatedAccountId(stringCellValue[2]);
relaccount.setStartDate(stringCellValue[6]);
relaccount.setEndDate(stringCellValue[7]);
relaccount.setReverseRelationshipRole(stringCellValue[3]);
relaccount.setComments(stringCellValue[4]);
relaccount.setRelationshipStatus(stringCellValue[5]);
relaccounts[0] = relaccount;
accrel.setAccountId(stringCellValue[0]); //JDE Account ID
accrel.setListOfRelatedAccount(relaccounts);
accrels[0] = accrel;
child.setListOfAccount(accrels);
try {
childs = stub.accountInsertChild(child);
logger.info(i + "th Record Inserted");
++flag;
} catch (Exception e) {
logger.info("Network Error: Re-Authenticating" + e);
urlAddr = reauthentication(properties);
stub = (Default_BindingStub)service.getDefault(urlAddr);
childs = stub.accountInsertChild(child);
logger.info(i + "th Record Inserted in 2nd Attempt");
++flag;
}
//logger.info("Total No. Of Records Processed"+flag);
}
logger.info("Total No. Of Records Processed"+flag);
}
private void readExcelFile(URL urlAddr, String fileName,
Properties properties) throws ServiceException,
RemoteException,
MalformedURLException {
System.out.println("Reading Excel File");
/**
* Create a new instance for cellDataList
*/
List cellDataList = new ArrayList();
try {
/**
* Create a new instance for FileInputStream class
*/
FileInputStream fileInputStream = new FileInputStream(fileName);
/**
* Create a new instance for POIFSFileSystem class
*/
POIFSFileSystem fsFileSystem =
new POIFSFileSystem(fileInputStream);
/*
* Create a new instance for HSSFWorkBook Class
*/
HSSFWorkbook workBook = new HSSFWorkbook(fsFileSystem);
HSSFSheet hssfSheet = workBook.getSheetAt(0);
/**
* Iterate the rows and cells of the spreadsheet
* to get all the datas.
*/
Iterator rowIterator = hssfSheet.rowIterator();
while (rowIterator.hasNext()) {
HSSFRow hssfRow = (HSSFRow)rowIterator.next();
Iterator iterator = hssfRow.cellIterator();
List cellTempList = new ArrayList();
while (iterator.hasNext()) {
HSSFCell hssfCell = (HSSFCell)iterator.next();
cellTempList.add(hssfCell);
}
cellDataList.add(cellTempList);
}
} catch (Exception e) {
e.printStackTrace();
}
/**
* Call the printToConsole method to print the cell data in the
* console.
*/
urlqueryWS1(urlAddr, cellDataList, properties);
}
private static String logon(String wsLocation, String userName,
String password, Properties properties) {
String sessionString = "FAIL";
int port =
Integer.parseInt(properties.getProperty("proxyport")); //Converting String Port Number to Integer.
try {
Proxy proxy =
new Proxy(Proxy.Type.HTTP, new InetSocketAddress(properties.getProperty("proxyhost"),
port));
// create an HTTPS connection to the On Demand webservices
URL wsURL = new URL(wsLocation + "?command=login");
HttpURLConnection wsConnection =
(HttpURLConnection)wsURL.openConnection(proxy);
// we don't want any caching to occur
wsConnection.setUseCaches(false);
// we want to send data to the server
// wsConnection.setDoOutput(true);
// set some http headers to indicate the username and passwod we are using to logon
wsConnection.setRequestProperty("UserName", userName);
wsConnection.setRequestProperty("Password", password);
wsConnection.setRequestMethod("GET");
// see if we got a successful response
if (wsConnection.getResponseCode() == HttpURLConnection.HTTP_OK) {
// get the session id from the cookie setting
sessionString = getCookieFromHeaders(wsConnection);
}
} catch (Exception e) {
System.out.println("Logon Exception generated :: " + e);
}
return sessionString;
}
private static String getCookieFromHeaders(HttpURLConnection wsConnection) {
// debug code - display all the returned headers
String headerName;
String headerValue = "FAIL";
for (int i = 0; ; i++) {
headerName = wsConnection.getHeaderFieldKey(i);
if (headerName != null && headerName.equals("Set-Cookie")) {
// found the Set-Cookie header (code assumes only one cookie is being set)
headerValue = wsConnection.getHeaderField(i);
break;
}
}
// return the header value (FAIL string for not found)
return headerValue;
}
private static String getSessionId(String cookie) {
StringTokenizer st = new StringTokenizer(cookie, ";");
String jsessionid = st.nextToken();
st = new StringTokenizer(jsessionid, "=");
st.nextToken();
return st.nextToken();
}
public static void main(String[] args) throws IOException,
ServiceException {
String jsessionid, jsessionid_full;
String endpoint;
AxisProperties.setProperty("https.proxyHost", "172.25.9.240");
AxisProperties.setProperty("https.proxyPort", "2006");
System.setProperty("https.proxyHost", "172.25.9.240");
System.setProperty("https.proxyPort", "2006");
// System.setProperty("https.proxyHost", "145.247.13.164");
// System.setProperty("https.proxyPort", "3128");
Properties properties = new Properties();
properties.load(new FileInputStream("C:\\Users\\10608011\\Documents\\Account_Config.properties")); //Windows Path
System.setProperty("javax.net.ssl.trustStore","C:\\Oracle\\Middleware3\\jdk160_24\\jre\\lib\\security\\cacerts");
System.out.println("Logging In");
jsessionid_full =
logon("https://secure-ausomxapa.crmondemand.com/Services/Integration",
"############", "###########", properties);
System.out.println("Establishing " + jsessionid_full);
jsessionid = getSessionId(jsessionid_full);
System.out.println("Jsessionid: " + jsessionid);
endpoint =
"https://secure-ausomxapa.crmondemand.com/Services/Integration" +
";jsessionid=" + jsessionid;
URL urlAddr = new java.net.URL(endpoint);
String fileName =
"D:" + File.separator + "Test2.xls"; // Windows Path
// String fileName =File.separator + "u01"+File.separator +"CRM_DEV"+File.separator +
// "sofwaredepot"+File.separator +"PALS_200_11Feb2013.xls"; //Linux Path /u01/CRM_DEV/softwaredepot
// String fileName="PALS_200_11Feb2013.xls";
final long start = System.currentTimeMillis();
logger.info("Start Time:" + start);
new accrel1().readExcelFile(urlAddr, fileName, properties);
final long durationInMilliseconds = System.currentTimeMillis() - start;
System.out.println("Time(Min) for Data Upload: " +
durationInMilliseconds / 60000 + "mins.");
logger.info("Duration for Data Upload: " +
durationInMilliseconds / 60000 + "mins.");
}
}

modify zip with long pathnames

I have a jar file which I need to replace a few classes from. The problem is that they're located in a directory with a name that is too long for both arch and windows to handle.
(So the layout inside the jar file contains directory's with impossibly long names)
I therefore can't physically create the same directory structure and use "jar uf modded.jar com/"
Is there any way/program/command/trick to get those files to end up and that location in that jar file?
You can use the java.util.zip package to read and write a zip file. Since the zip entry names are just strings, the file system's limit on name length is not an obstacle:
import java.io.InputStream;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.Collection;
import java.util.ArrayList;
import java.util.Map;
import java.util.LinkedHashMap;
import java.util.Enumeration;
import java.util.Objects;
import java.util.logging.Logger;
import java.util.logging.Level;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipOutputStream;
import java.util.zip.ZipException;
public class ZipUpdater {
private static final Logger logger =
Logger.getLogger(ZipUpdater.class.getName());
private final Map<String, Path> changes;
public ZipUpdater(Map<String, Path> changes) {
this.changes = new LinkedHashMap<>(
Objects.requireNonNull(changes, "Change list cannot be null"));
}
public void update(Path zipFile)
throws IOException,
ZipException {
Objects.requireNonNull(zipFile, "Zip file cannot be null");
Map<String, Path> changesNeeded = new LinkedHashMap<>(changes);
Path newZipFilePath = Files.createTempFile(null, ".zip");
try (ZipFile oldZipFile = new ZipFile(zipFile.toFile());
ZipOutputStream newZipFile = new ZipOutputStream(
new BufferedOutputStream(
Files.newOutputStream(newZipFilePath)))) {
String comment = oldZipFile.getComment();
if (comment != null) {
newZipFile.setComment(comment);
}
Enumeration<? extends ZipEntry> oldEntries = oldZipFile.entries();
while (oldEntries.hasMoreElements()) {
ZipEntry entry = oldEntries.nextElement();
String entryName = entry.getName();
Path source = changesNeeded.remove(entryName);
if (source != null) {
ZipEntry newEntry = new ZipEntry(entryName);
newEntry.setMethod(entry.getMethod());
newEntry.setTime(entry.getTime());
newEntry.setComment(entry.getComment());
newEntry.setExtra(entry.getExtra());
newZipFile.putNextEntry(newEntry);
logger.log(Level.INFO,
"Replacing entry \"{0}\" with contents of file \"{1}\"",
new Object[] { entryName, source });
Files.copy(source, newZipFile);
} else {
ZipEntry newEntry = new ZipEntry(entry);
newZipFile.putNextEntry(newEntry);
logger.log(Level.FINE, "Copying entry {0}", entryName);
try (InputStream entryData = new BufferedInputStream(
oldZipFile.getInputStream(entry))) {
int b;
while ((b = entryData.read()) >= 0) {
newZipFile.write(b);
}
}
}
newZipFile.closeEntry();
}
}
if (!changesNeeded.isEmpty()) {
throw new IOException("The following entries were not found"
+ " in '" + zipFile + "': " + changesNeeded.keySet());
}
Files.move(zipFile, Paths.get(zipFile + ".old"),
StandardCopyOption.REPLACE_EXISTING);
Files.move(newZipFilePath, zipFile);
}
public static void main(String[] args)
throws IOException,
ZipException {
if (args.length < 3 || (args.length % 2) != 1 ||
args[0].equals("-?") ||
args[0].equalsIgnoreCase("-h") ||
args[0].equalsIgnoreCase("--help")) {
System.err.println("Usage:");
System.err.println(
"java " + ZipUpdater.class.getName() + " <zipfile>"
+ " <zip-entry> <replacement-file>"
+ " [ <zip-entry> <replacement-file> ] ...");
System.exit(2);
}
Path zipFile = Paths.get(args[0]);
int argCount = args.length;
Map<String, Path> changes = new LinkedHashMap<>(argCount / 2);
for (int i = 1; i < argCount; i += 2) {
String entry = args[i];
Path replacement = Paths.get(args[i + 1]);
changes.put(entry, replacement);
}
ZipUpdater updater = new ZipUpdater(changes);
updater.update(zipFile);
}
}

Categories