I'm trying to compare my Input XML to an expected TXT
But somehow it fails and I have no clue why. I think this is because I'm writing a negative test (expected TXT contains error message, which is produced by xquery due to invalid inputs)
Comparing two XMLs are working(without negative test).
public void testHeader(String inputHeaderFileName, String expectedResultFileName) throws Exception {
HashMap<String, Object> xqueryParametersMap = new HashMap<String, Object>();
xqueryParametersMap.put("udgHeader", db.parse(getFileFromTestDataBasePath(inputHeaderFileName)));
this.test(xqueryParametersMap, expectedResultFileName);
}
public void test(Map<String, Object> xqueryParametersMap, String expectedResultFileName) throws Exception {
String expectedOutput = readFile(testDataBasePath + File.separator + expectedResultFileName, encoding);
String result = transform(xqueryParametersMap).xmlText(new XmlOptions().setSavePrettyPrint().setSavePrettyPrintIndent(2));
if (printTransformedXmlToConsoleBoolean) System.out.println(result);
Diff diff = new Diff(expectedOutput, result);
diff.overrideDifferenceListener(new IgnoreTextAndAttributeValuesDifferenceListener());
assertTrue("Grundstruktur des Resultats ist anders als in '" + expectedResultFileName + "' erwartet!\n\n" + diff + "\n\n", diff.similar());
DetailedDiff detailDiff = new DetailedDiff(compareXML(expectedOutput, result));
#SuppressWarnings("unchecked")
List<Difference> allDifferences = detailDiff.getAllDifferences();
assertEquals("Tatsaechliches Ergebnis weicht von '" + expectedResultFileName + "' ab!\n\n" + detailDiff + "\n\n", 0, allDifferences.size());
}
private XmlObject transform(Map<String, Object> xqueryParametersMap) throws Exception {
XmlObject xmlObject = XmlObject.Factory.newInstance();
XmlOptions options = new XmlOptions();
Map<String, Object> paramMap = new HashMap<String, Object>();
Iterator<Entry<String, Object>> it = xqueryParametersMap.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, Object> pairs = it.next();
Object value = pairs.getValue();
String key = pairs.getKey();
if (value instanceof Document) {
XmlObject inputXml = XmlObject.Factory.parse(getStringFromDocument((Document) value));
paramMap.put(key, getXmlObject(inputXml));
} else if (value instanceof String) {
XmlString string = XmlString.Factory.newInstance();
string.setStringValue(value.toString());
paramMap.put(key, string);
}
}
String xqueryFileContent = readFile(xQueryUnderTestPath, encoding);
options.setXqueryVariables(paramMap);
xqueryFileContent = xqueryFileContent.replaceAll("(?s)\\s*?\\(:.*?:\\)", "");
XmlObject[] resultsObjects = xmlObject.execQuery(xqueryFileContent, options);
if (resultsObjects.length < 1)
{
//throw new NoResultException();
throw new Exception("Xquery transformation did not return a result");
}
else if (resultsObjects.length > 1)
{
//throw new NonUniqueResultException("result size is '" + resultsObjects.length + "'");
throw new Exception("Xquery transformation did return more than one result: " + resultsObjects.length);
}
else return resultsObjects[0];
}
'
The test:
public void testRecipientListNegative() throws Exception {
recipientTester.testHeader("input.xml", "expected.txt");
}
Stacktrace:
org.apache.xmlbeans.XmlRuntimeException: weblogic.xml.query.exceptions.XQueryUserException: line 29, column 5: fase: Unknown msg-name/recipient combination ['ARS_XYZ'/'ESM']! Please check fase recipient list.
at weblogic.xml.query.runtime.core.Error.fetchNext(Error.java:61)
at weblogic.xml.query.iterators.GenericIterator.next(GenericIterator.java:104)
at weblogic.xml.query.runtime.core.IfThenElse.fetchNext(IfThenElse.java:91)
at weblogic.xml.query.iterators.GenericIterator.next(GenericIterator.java:104)
at weblogic.xml.query.runtime.core.IfThenElse.fetchNext(IfThenElse.java:91)
at weblogic.xml.query.iterators.GenericIterator.next(GenericIterator.java:104)
at weblogic.xml.query.runtime.constructor.AtomicElementConstructor.fetchNext(AtomicElementConstructor.java:129)
at weblogic.xml.query.iterators.GenericIterator.peekNext(GenericIterator.java:163)
at weblogic.xml.query.runtime.constructor.SuperElementConstructor.getPhase2(SuperElementConstructor.java:388)
at weblogic.xml.query.runtime.constructor.PartMatElemConstructor.matEverything(PartMatElemConstructor.java:123)
at weblogic.xml.query.runtime.constructor.PartMatElemConstructor.fetchNext(PartMatElemConstructor.java:197)
at weblogic.xml.query.iterators.GenericIterator.peekNext(GenericIterator.java:163)
at weblogic.xml.query.runtime.constructor.SuperElementConstructor.getPhase2(SuperElementConstructor.java:388)
at weblogic.xml.query.runtime.constructor.PartMatElemConstructor.fetchNext(PartMatElemConstructor.java:229)
at weblogic.xml.query.iterators.GenericIterator.next(GenericIterator.java:104)
at weblogic.xml.query.runtime.core.LetIterator.fetchNext(LetIterator.java:133)
at weblogic.xml.query.iterators.GenericIterator.next(GenericIterator.java:104)
at weblogic.xml.query.runtime.core.LetIterator.fetchNext(LetIterator.java:133)
at weblogic.xml.query.iterators.GenericIterator.next(GenericIterator.java:104)
at weblogic.xml.query.xdbc.iterators.ItemIterator.fetchNext(ItemIterator.java:86)
at weblogic.xml.query.iterators.LegacyGenericIterator.next(LegacyGenericIterator.java:109)
at weblogic.xml.query.runtime.qname.InsertNamespaces.fetchNext(InsertNamespaces.java:216)
at weblogic.xml.query.iterators.GenericIterator.next(GenericIterator.java:104)
at weblogic.xml.query.runtime.core.ExecutionWrapper.fetchNext(ExecutionWrapper.java:88)
at weblogic.xml.query.iterators.GenericIterator.next(GenericIterator.java:104)
at org.apache.xmlbeans.impl.store.XqrlImpl$SegmentedIterator.next(XqrlImpl.java:1656)
at org.apache.xmlbeans.impl.store.XqrlImpl.loadTokenStream(XqrlImpl.java:1410)
at org.apache.xmlbeans.impl.store.XqrlImpl.loadTokenStream(XqrlImpl.java:1383)
at org.apache.xmlbeans.impl.store.XqrlImpl.executeQueryToXmlObjects(XqrlImpl.java:1575)
at org.apache.xmlbeans.impl.store.XqrlImpl.access$000(XqrlImpl.java:53)
at org.apache.xmlbeans.impl.store.XqrlImpl$CompiledQuery.objectExecute(XqrlImpl.java:302)
at org.apache.xmlbeans.impl.store.Query.objectExecQuery(Query.java:80)
at org.apache.xmlbeans.impl.store.Xobj.exec_query(Xobj.java:2525)
at org.apache.xmlbeans.impl.values.XmlObjectBase.execQuery(XmlObjectBase.java:525)
at de.db.udg.componenttest.XQueryTester.transform(XQueryTester.java:271)
at de.db.udg.componenttest.XQueryTester.test(XQueryTester.java:210)
at de.db.udg.componenttest.XQueryTester.testHeader(XQueryTester.java:172)
at XQueryTest.testRecipientListNegative(XQueryTest.java:38)
the lines which are failing according to stacktrace:
this.test(xqueryParametersMap, expectedResultFileName);
String result = transform(xqueryParametersMap).xmlText(new XmlOptions().setSavePrettyPrint().setSavePrettyPrintIndent(2));
XmlObject[] resultsObjects = xmlObject.execQuery(xqueryFileContent, options);
Related
Say the input is:
[{keyname: firstname, path:person.name.firstname,type:string},
{keyname:age, path:person.age,type:number}]
Then jsonschema should be generated from the json
{ person:{
name:{ firstname: guest },
age: 6
}}
I need the json schema
#Component
public class SchemaUtil {
#Autowired
ObjectMapper mapper;
//Converts path of type -> $.eventMessage.phoneNumbers[:1].type to -> /eventMessage/phoneNumbers/0/type
public String pathConverter(String path) {
String newString=path.replace("$", "").replace(".", "/").replaceAll("\\[(.*?)\\]","/0");
return newString;
}
public String getSchema(List<SchemaPOJO> schemaList)
{
ObjectNode rootNode = mapper.createObjectNode();
StringBuilder output = new StringBuilder();
try
{
for(SchemaPOJO schema: schemaList)
{
String path = pathConverter(schema.getPath());
create(rootNode,path,schema.getDataType());
if(path.contains("/0/"))
{
int index1=path.indexOf("/0/");
while(index1 != -1)
{
String temp = path.substring(0,index1+1)+'1'+path.substring(index1+2);
create(rootNode,temp,schema.getDataType());
index1=path.indexOf("/0/",index1+1);
}
}
}
output.append("{\"$schema\":\"http://json-schema.org/draft-04/schema#\","
+ "\"type\": \"object\", \"properties\": {");
output.append(makeSchema(mapper.writeValueAsString(rootNode),null));
}
catch(Exception e)
{
return null;
}
List<String> keys = new ArrayList<>();
Iterator<String> iterator = rootNode.fieldNames();
iterator.forEachRemaining(e -> keys.add(e));
String inParams = String.join(",", keys.stream().map(key1 -> "\""+key1+"\"").collect(Collectors.toList()));
output.append(",\"required\":["+inParams+"]");
output.append("}");
return output.toString();
}
//Creates dummy values in json
private void create(ObjectNode rootNode, String path,String dataType)
{
switch(dataType)
{
case "string":createJson(rootNode, JsonPointer.compile(path), new TextNode("stringdummytext"));break;
case "integer":createJson(rootNode, JsonPointer.compile(path), new IntNode(65734));break;
case "float":createJson(rootNode, JsonPointer.compile(path), new FloatNode(124));break;
case "boolean":createJson(rootNode, JsonPointer.compile(path),BooleanNode.getFalse());break;
default:createJson(rootNode, JsonPointer.compile(path), new TextNode("stringdummytext"));break;
}
}
//Creates dummy json
private void createJson(ObjectNode node, JsonPointer pointer, JsonNode value) {
JsonPointer parentPointer = pointer.head();
JsonNode parentNode = node.at(parentPointer);
String fieldName = pointer.last().toString().substring(1);
if (parentNode.isMissingNode() || parentNode.isNull()) {
parentNode = StringUtils.isNumeric(fieldName) ? mapper.createArrayNode() : mapper.createObjectNode();
createJson(node,parentPointer, parentNode);
}
if (parentNode.isArray()) {
ArrayNode arrayNode = (ArrayNode) parentNode;
int index = Integer.parseInt(fieldName);
for (int i = arrayNode.size(); i <= index; i++) {
arrayNode.addNull();
}
arrayNode.set(index, value);
} else if (parentNode.isObject()) {
((ObjectNode) parentNode).set(fieldName, value);
} else {
throw new IllegalArgumentException("`" + fieldName + "` can't be set for parent node `"
+ parentPointer + "` because parent is not a container but " + parentNode.getNodeType().name());
}
}
//Makes schema from dummy json
private String makeSchema(String json,JsonNodeType jsonNodeType) throws IOException
{
JsonNode jsonNode = mapper.readTree(json);
StringBuilder output = new StringBuilder();
for (Iterator<String> iterator = jsonNode.fieldNames(); iterator.hasNext();)
{
String fieldName = iterator.next();
JsonNodeType nodeType;
nodeType = jsonNode.get(fieldName).getNodeType();
output.append(convertNodeToStringSchemaNode(jsonNode, nodeType, fieldName));
output.append(",");
}
if( (jsonNodeType==null) || (jsonNodeType.name().equals("ARRAY")) || (jsonNodeType.name().equals("OBJECT")))
output.deleteCharAt(output.length()-1);
else if(jsonNode.getNodeType().name().equals("STRING"))
output.append("string\"");
else if(jsonNode.getNodeType().name().equals("NUMBER"))
output.append("integer\"");
output.append("}");
return output.toString();
}
//Util Method to create json schema
private String convertNodeToStringSchemaNode(JsonNode jsonNode, JsonNodeType nodeType, String key) throws IOException
{
StringBuilder result = new StringBuilder("\"" + key + "\": { \"type\": \"");
JsonNode node = null;
switch (nodeType) {
case ARRAY :
node = jsonNode.get(key).get(0);
result.append("array\", \"items\": [{ \"type\":\"");
if(node.getNodeType().name().equals("OBJECT"))
result.append("object\", \"properties\": { ");
result.append(makeSchema(node.toString(), node.getNodeType()));
if(node.getNodeType().name().equals("OBJECT"))
{
List<String> keys = new ArrayList<>();
Iterator<String> iterator = node.fieldNames();
iterator.forEachRemaining(e -> keys.add(e));
String inParams = String.join(",", keys.stream().map(key1 -> "\""+key1+"\"").collect(Collectors.toList()));
result.append(",\"required\":["+inParams+"]");
result.append("}");
}
result.append("]}");
break;
case BOOLEAN:
result.append("boolean\" }");
break;
case NUMBER:
result.append("integer\" }");
break;
case OBJECT:
node = jsonNode.get(key);
result.append("object\", \"properties\": {");
result.append(makeSchema(node.toString(), JsonNodeType.OBJECT));
List<String> keys = new ArrayList<>();
Iterator<String> iterator = node.fieldNames();
iterator.forEachRemaining(e -> keys.add(e));
String inParams = String.join(",", keys.stream().map(key1 -> "\""+key1+"\"").collect(Collectors.toList()));
result.append(",\"required\":["+inParams+"]");
result.append("}");
break;
case STRING:
result.append("string\" }");
break;
default:
result.append("string\" }");
}
return result.toString();
}
}
public void addNewUser(MongoClient mdbClient, String newUserName, String newUserPassword, DBManagement.DBRole roles) {
System.out.println("inside addNEw User method");
Map<String, String> user = new LinkedHashMap<String, String>();
user.put("createUser", newUserName);
user.put("pwd", newUserPassword);
List<Map<String, String>> listOfRoles = new ArrayList<Map<String, String>>();
Map<String, String> role1 = new LinkedHashMap<String, String>();
role1.put("role",roles.getRole());
role1.put("db", roles.getDb());
listOfRoles.add(role1);
user.put("roles", listOfRoles.toString());
System.out.println("MAP: " + user);
try{
String json = new ObjectMapper().writeValueAsString(user);
/*String json = new ObjectMapper().convertValue(user);*/
System.out.println(json);
//String jsonCommand = "{ createUser: \" + newUserName +"/" + " ," + "pwd: /" + newUserPassword + "/" + " ," + "roles : [" + roles_str + "]}" ;
String jsonCommand = json;
System.out.println("createUserString-->"+jsonCommand);
Document command = new Document(Document.parse(jsonCommand));
Document collStatsResults = mdbClient.getDatabase("admin").runCommand(command);
System.out.println(collStatsResults.toJson());
} catch(Exception e) {
System.out.println("Error " + e);
}
}
I am getting output string as -{"createUser":"demoUser2","pwd":"password","roles":"[{role=dbOwner, db=udata}]"}
Expected output- {"createUser":"demoUser2","pwd":"password","roles":[{"role":"dbOwner", "db":"udata"}]}
Firstly i used JSONObject() but it doesnt care about the json sequence ,so i tried with linkedhashMap but facing array conversion issue..can anyone help.Or is there any other way to generate json sequentially.
I am working with HashMap and don't have much experience yet.
I am trying to write a csvFile for proof of a comparison between two lists.
If the compared value is the same ok otherwise not ok.
Simple, but the values "ok" or "not ok" need to be changed automatically, so it was suggested to me to use HashMap where I put the name of the compared field which is the key and the value will be its state (ok or not ok).
So far the values are returned and the file is written, but the status does not fill in automatically.
This is my code so far, if anyone knows how I can do it or has other suggestions please let me know.
HashMap
public static Map<String, String> initMap(String status) {
Map<String, String> mapFields = new HashMap<String, String>();
mapFields.put("Book Ref", status);
mapFields.put("Trade Date", status);
mapFields.put("Start Date", status);
mapFields.put("End Date", status);
mapFields.put("Period Multiplier", status);
mapFields.put("Notional", status);
mapFields.put("Currency", status);
mapFields.put("Rate", status);
mapFields.put("Frequency Multiplier", status);
mapFields.put("Day count", status);
return mapFields;
}
Here in the same class, I created this method to compare two lists and define if it is ok or not.
public static void compareValues(List<String> inputs, List<String> outputs, XrayFields fields, TradeData data, MKTWireIRS mkt) throws ParseException, InterruptedException {
int y = 0;
int x = 0;
Map<String, String> map = new HashMap<String, String>();
// List<WriterCSV> writeCSV = new ArrayList<>();
WriterCSV cv = new WriterCSV();
try {
for (String input : inputs) {
for (String out : outputs) {
cv = new WriterCSV();
map = new HashMap<String, String>();
if (y == x) {
if (input.equals(out)) {
System.out.println("ok: " + input + " = " + out);
String comment = "All fields checked are ok";
fields.setComment(comment);
fields.setStatus("PASS");
cv.setOk("Ok");
map = initMap(cv.getOk());
} else {
System.out.println("not ok: " + input + " = " + out);
fields.setStatus("FAIL");
String comment = "The value " + input + " is not the same as " + out;
fields.setComment(comment);
cv.setOk("not Ok");
map = initMap(cv.getOk());
}
}
x = x + 1; // count of the list of output
}
y = y + 1; // count of the list of inputs
x = 0; // reset to 0 the count of outputs
}
//create evidence of comparison
cv.reportMKTWireToOutputIRS(data, mkt, map);
} catch (Error e) {
System.out.println(e);
}
}
This is the method for writing the csv.
public void reportMKTWireToOutputIRS(TradeData data2, MKTWireIRS mkt, Map<String, String> map ) throws ParseException, InterruptedException {
try {
FileWriter fw = new FileWriter(new File(CSV_MKTWire + Setup.IRScsv));
CSVWriter cw = new CSVWriter(fw);
//format values
String month = PropertyNames.deCapitalize(data2.getPeriod());
String monthReset = PropertyNames.deCapitalize(data2.getResetFrequencyPeriod());
String formatTradeDateMKT = Utils.formatDateToCompareMKTWire(data2.getTradeDateIRS());
String formatStartDateMKT = Utils.formatDateToCompareMKTWire(data2.getStart_Date());
String formatMAturityDateMKT = Utils.formatDateToCompareMKTWire(data2.getMaturity_Date());
String rateActual = Utils.roundDecimal(data2.getRateIRS());
String rateFormat = Utils.roundRateMKTwire(mkt.getRateIRS());
String notionalFormat = data2.getNotional().charAt(0) + "M";
String[] headers = { "Output Field", "Output Value", " MKTWire Field", " MKTWire Value", "Status" };
List<String[]> data = new ArrayList<String[]>();
String[] book = { "Book Ref", data2.getBookRef() + data2.getBookType(),"Book MKTWire",mkt.getBookIRS(), map.get("Book Ref")};
String[] tradeDate = { "Trade Date", formatTradeDateMKT,"Trade Date MKTWire",mkt.getTradeDateIRS(), map.get("Trade Date")};
String[] startDate = { "Start Date", formatStartDateMKT, "Start Date MKTWire",mkt.getStartDate(), map.get("Start Date") };
String[] maturity = { "End Date", formatMAturityDateMKT, "End Date MKTWire",mkt.getEndDate(), map.get("End Date") };
String[] tenor = { "Period Multiplier", data2.getPeriodMultiplier() + month, "Tenor MKTWire",mkt.getTenorIRS(), map.get("Period Multiplier") };
String[] notional = { "Notional", notionalFormat, "Notional MKTWire", mkt.getNotionalValueIRS(), map.get("Notional") };
String[] currency = { "Currency", data2.getCurrencyIRS(), "Currency MKTWire", mkt.getCurrencyIRS(), map.get("Currency") };
String[] rate = { "Rate", rateActual, "Rate MKTWire", rateFormat, map.get("Rate") };
String[] resetFrequency = { "Frequency Multiplier", data2.getResetFrequencyMultiplier() + monthReset, "Frequency Multiplier MKTWire", mkt.getResetFrequencyIRS(),map.get("Frequency Multiplier") };
String[] dayCount = { "Day Count", data2.getDayCount(), "Day Count MKTWire", mkt.getDayCountIRS(), map.get("Day count") };
data.add(headers);
data.add(book);
data.add(tradeDate);
data.add(startDate);
data.add(maturity);
data.add(tenor);
data.add(notional);
data.add(currency);
data.add(rate);
data.add(resetFrequency);
data.add(dayCount);
cw.writeAll(data);
cw.flush();
fw.flush();
cw.close();
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
You are having one map and you are calling the initMap method which sets the value for all keys in the map within a loop, in the end it will have either "ok" or "not ok" based on your final loop validation.
I'll explain the logic: I am reading a XML file which contain many request and responses in soap format then I'm storing the request and response in two Hash map. In first Hash map I'm storing transaction Id(unique) as key and values as request time,til-name. In second hash map I'm storing transaction Id(unique) as key and values as response time. In both hash map the keys are same but values are different, by using for loop iterating two loops and I need to get the time difference between response time and request time
eg:request time:2020-01-30T11:07:08.351Z and response time:2020-01-30T11:07:10.152Z
public class MapTimeDiff {
public static void main(String[] args) throws ParseException {
File file =new File("C:\\Users\\gsanaulla\\Documents\\My Received Files\\ecarewsframework.xml");
Scanner in = null;
String tilname = null;
String transactionId = null;
String requesttime = null;
String responsetime = null;
Date dateOne = null;
Date dateTwo = null;
double timeDiff;
DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS");
Map<String,ArrayList<String>> request=new HashMap<String,ArrayList<String>>();
ArrayList<String> req=new ArrayList<String>();
Map<String,ArrayList<String>> response=new HashMap<String,ArrayList<String>>();
ArrayList<String> res=new ArrayList<String>();
try {
in = new Scanner(file);
while(in.hasNext())
{
String line=in.nextLine();
if(line.contains("</S:Envelope>")) {
System.out.println(line);
tilname=line.split("StartRecord><")[1].split("><")[0].split(":")[1];
System.out.println("tilname :: "+tilname);
transactionId = line.split("transactionId>")[1].split("<")[0];
System.out.println("transactio id :: "+transactionId);
requesttime=line.split("sourceTimestamp>")[1].split("<")[0];
System.out.println("request time is :: "+requesttime);
dateOne = df.parse(requesttime);
}
req.add(tilname);
req.add(dateOne.toString());
System.out.println("req is==== " +req);
request.put(transactionId,req);
System.out.println("request is==== " +request.get(transactionId));
if(line.contains("</SOAP-ENV:Envelope>")) {
//System.out.println(line);
if(line.contains("transactionId"))
{
responsetime=line.split("sourceTimestamp>")[1].split("<")[0];
transactionId = line.split("transactionId>")[1].split("<")[0];
System.out.println("responsetime :: "+responsetime);
System.out.println("transaction id "+transactionId);
dateTwo = df.parse(responsetime);
}
res.add(dateTwo.toString());
System.out.println("res is===== "+res);
response.put(transactionId,res);
System.out.println("response is===== "+response.get(transactionId));
for (Entry<String, ArrayList<String>> entry : request.entrySet()) {
for (Entry<String, ArrayList<String>> entry1 : response.entrySet()) {
System.out.println("Key = " + entry.getKey() +
", Value = " + entry.getValue());
System.out.println("Key = " + entry1.getKey() +
", Value = " + entry1.getValue());
if(request.keySet().equals(response.keySet())) {
timeDiff = (dateTwo.getTime() - dateOne.getTime());
}
}
}
}
}
}
catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
I'm not sure if I understood your question correctly but maybe you can do something similiar like the following:
Map<String, List<String>> requests = Map.of("1", List.of("10,13,12"), "2", List.of("8,7,9"), "3", List.of("11"));
Map<String, List<String>> responses = Map.of("1", List.of("9,10,14"), "2", List.of("8,9,6,12"));
for(Map.Entry<String, List<String>> requestEntry : requests.entrySet()) {
String transactionId = requestEntry.getKey();
if(responses.containsKey(transactionId)) {
System.out.println("Transaction Id: " + transactionId);
for(int i = 0; i < min(requestEntry.getValue().size(), responses.get(transactionId).size()); i++) {
List<String> requestTimes = asList(requestEntry.getValue().get(i).split(","));
List<String> responseTimes = asList(responses.get(transactionId).get(i).split(","));
for(int j = 0; j < min(requestTimes.size(), responseTimes.size()); j++) {
int requestTime = parseInt(requestTimes.get(j));
int responseTime = parseInt(responseTimes.get(j));
System.out.println("Difference: " + abs(requestTime - responseTime));
}
}
}
}
As you can see there are no responses for transactionId 3 so this will be ignored.
If elements in the list for a key differ in size (transactionId 2) the surplus elements will also be ignored.
Transaction Id: 1
Difference: 1
Difference: 3
Difference: 2
Transaction Id: 2
Difference: 0
Difference: 2
Difference: 3
I have a MapReduce Program which can process Delimited, Fixed Width and Excel Files. There is no problem in reading Delimited and Fixed Width File. But Problem with Excel File is setup() and cleanup() methods are getting called,but not the map(). I tried with adding annotations to map() still it didnt work.
public class RulesDriver extends Configured implements Tool {
private static Logger LOGGER = LoggerFactory.getLogger(RulesDriver.class);
RuleValidationService aWSS3Service = new RuleValidationService();
HashMap<String, Object> dataMap = new HashMap<String, Object>();
HashMap<String, String> controlMap = new HashMap<String, String>();
public String inputPath = "";
public String outputPath = "";
private static DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm");
ControlFileReader ctrlReader = new ControlFileReader();
CSVToExcel csv2Excel = new CSVToExcel();
HashMap<Integer,String> countMap = new HashMap<Integer,String>();
HashMap<String,Integer> numberValueMap = new HashMap<String,Integer>();
HashMap<String,Object> rulesMap = new HashMap<String,Object>();
CharsetConvertor charsetConvertor = new CharsetConvertor();
ControlFileComparison controlFileComparison = new ControlFileComparison();
boolean isControlFileValid = false;
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new RulesDriver(), args);
System.exit(res);
}
#Override
public int run(String[] args) throws Exception {
LOGGER.info("HADOOP MapReduce Driver started");
if (args.length < 3) {
LOGGER.info("Args ");
return 1;
}
int j = -1;
//Prop - Starts
String cacheBucket = args[0];
String s3accesskey = args[1];
String s3accesspass = args[2];
//Prop - ends
// file2InputPath Value is from DB
String file2InputLocation = "";
String fileComparisonInd = "";
String inputPath = "";
String outputPath = "";
String url = "";
String userId = "";
String password = "";
String fileType = "";
String ctrlCompResult = "N";
try {
Configuration conf = new Configuration();
Properties prop = new Properties();
//Prop - starts
prop.setProperty("qatool.cacheBucket", cacheBucket);
prop.setProperty("qatool.s3accesskey", s3accesskey);
prop.setProperty("qatool.s3accesspass", s3accesspass);
String propertiesFile = aWSS3Service.getObjectKey(cacheBucket, "application",prop);
if(null==propertiesFile && "".equals(propertiesFile)){
return 0;
}
S3Object s3Object = aWSS3Service.getObject(cacheBucket, propertiesFile, prop);
LOGGER.info("Loading App properties");
InputStreamReader in = new InputStreamReader(s3Object.getObjectContent());
Properties appProperties = new Properties();
try {
appProperties.load(in);
prop.putAll(appProperties);
LOGGER.info(" ",prop);
}
catch (IOException e1) {
LOGGER.error("Exception while reading properties file :" , e1);
return 0;
}
initialize(prop);
if (!(dataMap == null)) {
if (("N").equals(dataMap.get("SuccessIndicator"))) {
return 0;
}
List value = (ArrayList) dataMap.get("LookUpValList");
LOGGER.info("lookUpVallist",value);
}
if (dataMap != null) {
controlMap = (HashMap<String, String>) dataMap.get("ControlMap");
}
if (controlMap != null) {
inputPath = (prop.getProperty("qatool.rulesInputLocation") + "/").concat((String) dataMap.get("InputFileName")); //TEMP
LOGGER.info(inputPath);
fileType = (String) dataMap.get("FileType");
} else {
inputPath = (prop.getProperty("qatool.rulesInputLocation") + "/").concat((String) dataMap.get("InputFileName"));
LOGGER.info(inputPath);
fileType = (String) dataMap.get("FileType");
}
rulesMap = (HashMap<String,Object>)dataMap.get("RulesMap");
isControlFileValid = controlFileComparison.compareControlFile(controlMap, aWSS3Service, prop, rulesMap); //TEMP
LOGGER.info("isControlFileValid in driver : "+isControlFileValid);
if(isControlFileValid){
ctrlCompResult = "Y";
}
conf.set("isControlFileValid", ctrlCompResult);
// ** DATABASE Connection **/
String ctrlFileId = controlMap.get("ControlFileIdentifier");
url = prop.getProperty(QaToolRulesConstants.DB_URL);
userId = prop.getProperty(QaToolRulesConstants.DB_USER_ID);
password = prop.getProperty(QaToolRulesConstants.DB_USER_DET);
InpflPrcsSumm inpflPrcsSumm = new InpflPrcsSumm();
DBConnectivity dbConnectivity = new DBConnectivity(url, userId, password);
inpflPrcsSumm = dbConnectivity.getPreviousFileDetail(ctrlFileId);
dbConnectivity.closeConnection();
LOGGER.info( " inpflPrcsSumm.getPrevFileId() " + inpflPrcsSumm.getPrevFileId());
prop.setProperty(QaToolRulesConstants.PREV_FILE_ID, inpflPrcsSumm.getPrevFileId().toString());
file2InputLocation = inpflPrcsSumm.getPrevFileLocation();
boolean file2Available = file2InputLocation.isEmpty();
String folderPath = "";
String bucket = "";
if (!file2Available) {
String arr[] = file2InputLocation.split("/");
if(file2InputLocation.startsWith("http")){
bucket = arr[3];
}else{
bucket = arr[2];
}
folderPath = file2InputLocation.substring(file2InputLocation.lastIndexOf(bucket) + bucket.length() + 1, file2InputLocation.length());
}
// File 2 input path
prop.setProperty("qatool.file2InputPath", file2InputLocation);
if(!file2Available){
file2InputLocation = file2InputLocation + "/Success";
String file2Name = aWSS3Service.getObjectKey(bucket, folderPath,prop);
LOGGER.info("bucket->"+bucket);
LOGGER.info("folderPath->"+folderPath);
file2Name = file2Name.substring(file2Name.lastIndexOf("/")+1, file2Name.length());
prop.setProperty("file2Name", (null!=file2Name && "".equals(file2Name))?"":file2Name);
LOGGER.info(prop.getProperty("file2Name"));
}
prop.setProperty("qatool.auditPrevFolderPath", folderPath);
prop.setProperty("qatool.auditBucketPrevFolderPath", bucket);
LOGGER.info("ctrlFileId : " + ctrlFileId);
LOGGER.info("BUCKET : " + bucket);
LOGGER.info("folder : " + folderPath);
Date dateobj = new Date();
outputPath = (String) prop.getProperty("qatool.rulesOutputLocation") + "/" + dateFormat.format(dateobj); //TEMP
fileComparisonInd = controlMap.get("FileComparisonIndicator");
Gson gson = new Gson();
String propSerilzation = gson.toJson(prop);
conf.set("application.properties", propSerilzation);
Job job = Job.getInstance(conf);
job.setJarByClass(RulesDriver.class);
job.setJobName("Rule Validation and Comparison");
job.getConfiguration().set("fs.s3n.awsAccessKeyId", (String) prop.getProperty("qatool.s3accesskey"));
job.getConfiguration().set("fs.s3n.awsSecretAccessKey", (String) prop.getProperty("qatool.s3accesspass"));
job.getConfiguration().set("fs.s3.awsAccessKeyId", (String) prop.getProperty("qatool.s3accesskey"));
job.getConfiguration().set("fs.s3.awsSecretAccessKey", (String) prop.getProperty("qatool.s3accesspass"));
job.getConfiguration().set("fs.s3a.awsAccessKeyId", (String) prop.getProperty("qatool.s3accesskey"));
job.getConfiguration().set("fs.s3a.awsSecretAccessKey", (String) prop.getProperty("qatool.s3accesspass"));
job.getConfiguration().set("fs.s3n.endpoint", "s3.amazonaws.com");
job.getConfiguration().set("fs.s3.endpoint", "s3.amazonaws.com");
job.getConfiguration().set("fs.s3a.endpoint", "s3.amazonaws.com");
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
job.setReducerClass(RulesCountReducer.class);
job.setNumReduceTasks(1);
job.setMaxMapAttempts(1);
job.setMaxReduceAttempts(1);
if("UTF-16".equalsIgnoreCase(controlMap.get("FileCodePage"))){
convertEncoding((String)dataMap.get("InputFileName"),rulesInputLocation,prop);
if (!file2Available && "Y".equals(ctrlCompResult)) {
convertEncoding(inpflPrcsSumm.getPrevFileName(),file2InputLocation,prop);
}
}
LOGGER.info("fileComparisonInd + "+ fileComparisonInd + " file2Available + " + file2Available + " ctrlCompResult + " + ctrlCompResult);
if (fileType != null && fileType.equals(QaToolRulesConstants.INPUT_FILE_TYPE_DELIMI)) {
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
MultipleInputs.addInputPath(job, new Path(rulesInputLocation), TextInputFormat.class, TextRulesMapper.class);
if (fileComparisonInd.equalsIgnoreCase(QaToolRulesConstants.FILE_COMP_INDICATOR) && !file2Available && "Y".equals(ctrlCompResult)) {
Path file2InputPath = new Path(file2InputLocation);
if (isInputPathAvail(file2InputPath, conf)) {
MultipleInputs.addInputPath(job, file2InputPath, TextInputFormat.class, TextRulesMapperFile2.class);
}
}
} else if (fileType != null && fileType.equals(QaToolRulesConstants.INPUT_FILE_TYPE_EXCEL)) {
job.setInputFormatClass(ExcelInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
MultipleInputs.addInputPath(job, new Path(rulesInputLocation), ExcelInputFormat.class, ExcelMapper.class);
String inputFileName = controlMap.get("InputFileName");
String fileExtn = inputFileName.substring(inputFileName.lastIndexOf(".") + 1);
prop.setProperty("File", "Excel");
prop.setProperty("fileExtension", fileExtn);
if (fileComparisonInd.equalsIgnoreCase(QaToolRulesConstants.FILE_COMP_INDICATOR) && !file2Available && "Y".equals(ctrlCompResult)) {
Path file2InputPath = new Path(file2InputLocation);
if (isInputPathAvail(file2InputPath, conf)) {
MultipleInputs.addInputPath(job, file2InputPath, ExcelInputFormat.class, ExcelMapper2.class);
}
}
} else if (fileType != null && fileType.equals(QaToolRulesConstants.INPUT_FILE_TYPE_FIXED)) {
prop.setProperty("File", "DAT");
MultipleInputs.addInputPath(job, new Path(rulesInputLocation), TextInputFormat.class, FixedWidthMapper.class);
if (fileComparisonInd.equalsIgnoreCase(QaToolRulesConstants.FILE_COMP_INDICATOR) && !file2Available && "Y".equals(ctrlCompResult)) {
Path file2InputPath = new Path(file2InputLocation);
if (isInputPathAvail(file2InputPath, conf)) {
MultipleInputs.addInputPath(job, file2InputPath, TextInputFormat.class, FixedWidthMapper2.class);
}
}
}
MultipleOutputs.addNamedOutput(job, "error", TextOutputFormat.class, Text.class, Text.class);
MultipleOutputs.addNamedOutput(job, "success", TextOutputFormat.class, Text.class, Text.class);
MultipleOutputs.addNamedOutput(job, QaToolRulesConstants.ADDED_DELETED, TextOutputFormat.class, Text.class, Text.class );
/*MultipleOutputs.addNamedOutput(job, QaToolRulesConstants.ADDED_UPDATED, TextOutputFormat.class, Text.class, Text.class );*/ //TEMP ADDED FOR ADDED AND UPDATED
MultipleOutputs.addNamedOutput(job, QaToolRulesConstants.DETAIL, TextOutputFormat.class, Text.class, Text.class);
FileOutputFormat.setOutputPath(job, new Path(outputPath));
j = job.waitForCompletion(true) ? 0 : 1;
LOGGER.info("Program Complted with return " + j);
//Code Added for Control file Movement -- starts
String outputBucket = rulesOutputLocation;
outputBucket = outputBucket.substring(outputBucket.indexOf("//")+2, outputBucket.length());
outputBucket = outputBucket.substring(0,(outputBucket.indexOf("/")));
String controlFileNamekey = aWSS3Service.getObjectKey(outputBucket, "delivery/"+ dataMap.get("ControlFileName"),prop);
if (controlFileNamekey != null) {
controlFileNamekey = (String) controlFileNamekey.substring(controlFileNamekey.lastIndexOf("/") + 1,controlFileNamekey.length());
String outputCtrlFilePath = "delivery/"+ dateFormat.format(dateobj) +"/" + controlFileNamekey;
LOGGER.info("controlFileNamekey "+controlFileNamekey+" outputCtrlFilePath "+outputCtrlFilePath);
aWSS3Service.moveObjects(outputBucket, "delivery/"+controlFileNamekey, outputBucket, outputCtrlFilePath,prop);
}
//Code Added for Control file Movement -- Ends
if (j == 0) {
// Get counters
LOGGER.info("Transfer");
final Counters counters = job.getCounters();
long duplicates = counters.findCounter(MATCH_COUNTER.DUPLICATES).getValue();
LOGGER.info("duplicates->"+duplicates);
long groupingThreshold = counters.findCounter(MATCH_COUNTER.GROUPING_ERR_THRESHOLD).getValue();
LOGGER.info("groupingThreshold->"+groupingThreshold);
if(groupingThreshold==1 || duplicates==1){
if(duplicates==1){
writeOutputFile(folderName,dateobj,"DuplicateRecords",prop,cacheBucket);
}else{
writeOutputFile(folderName,dateobj,"GroupingThreshold",prop,cacheBucket);
}
}else{
long successCount = counters.findCounter(MATCH_COUNTER.SUCCESS_COUNT).getValue();
if (controlMap.get("ColumnHeaderPresentIndicator") != null
&& controlMap.get("ColumnHeaderPresentIndicator").equals("Y")) {
successCount = successCount-1;
}
LOGGER.info("successCount "+successCount);
LOGGER.info("TOLERANCEVALUE " + counters.findCounter(MATCH_COUNTER.TOLERANCEVALUE).getValue());
LOGGER.info("RULES_ERRORTHRESHOLD " + counters.findCounter(MATCH_COUNTER.RULES_ERRORTHRESHOLD).getValue());
long errorThreshold = counters.findCounter(MATCH_COUNTER.RULES_ERRORTHRESHOLD).getValue();
LOGGER.info("COMPARISION_ERR_THRESHOLD " + counters.findCounter(MATCH_COUNTER.COMPARISION_ERR_THRESHOLD).getValue());
writeOutputFile(folderName,dateobj, outputPath + "," + successCount + "," + counters.findCounter(MATCH_COUNTER.TOLERANCEVALUE).getValue() + "," + errorThreshold + ","
+counters.findCounter(MATCH_COUNTER.COMPARISION_ERR_THRESHOLD).getValue()+","+ctrlCompResult,prop,cacheBucket);
String auditBucketName = "";
auditBucketName = rulesOutputLocation;
auditBucketName = auditBucketName.substring(auditBucketName.indexOf("//") + 2, auditBucketName.length() - 1);
auditBucketName = auditBucketName.substring(0, (auditBucketName.indexOf("/")));
String auditFileMovementPath = "delivery/" + dateFormat.format(dateobj);
auditFile = auditFile.replace(".xlsx","");
String inputFileName = (String) dataMap.get("InputFileName");
inputFileName = inputFileName.substring(0,inputFileName.lastIndexOf(".")).concat(".xlsx");
try {
LOGGER.info("Audit Bucket Name : " + auditBucketName);
LOGGER.info("Move parameter >>> outputbucketname : auditFileLocation : auditBucketName : auditFileMovementPath auditFile ");
LOGGER.info("Move parameter " + outputbucketname + ", " + auditFileLocation + " , " + auditBucketName + " , " + auditFileMovementPath + "/" + auditFile + "_" + inputFileName);
aWSS3Service.moveObjects(outputbucketname, auditFileLocation, auditBucketName, auditFileMovementPath +"/"+ auditFile +"_"+ inputFileName, prop);
} catch (Exception e) {
LOGGER.error("Exception while moving audit file ",e);
}
}
}else{
writeOutputFile(folderName,dateobj,"DuplicateRecords",prop,cacheBucket);
}
} catch (Exception e) {
LOGGER.error("Error in RulesDriver ", e);
}
return j;
}
}
Excel Mapper :
public class ExcelMapper extends Mapper<LongWritable, Text, Text, Text> {
#Override
protected void setup(Mapper<LongWritable, Text, Text, Text>.Context context)throws InterruptedException, IOException {
LOGGER.info("Inside Mapper Setup");
}
#Override
public void map(LongWritable key, Text value, Context context) throws InterruptedException, IOException {
}
#Override
protected void cleanup(Context context) throws IOException,
InterruptedException {
}
}