Hey i am trying to get the size of Static map from other class...
i am defining Static map in one class...as
tasklet.class
package com.hcsc.ccsp.nonadj.subrogation.integration;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.core.io.Resource;
import org.springframework.util.Assert;
import com.hcsc.ccsp.nonadj.subrogation.batch.Subrogation;
import com.hcsc.ccsp.nonadj.subrogation.common.SubrogationConstants;
/**
* #author Manan Shah
*
*/
public class SubrogationFileTransferTasklet implements Tasklet,
InitializingBean {
private Logger logger = LogManager
.getLogger(SubrogationFileTransferTasklet.class);
private Resource inputfile;
private Resource outputfile;
public static String fileLastName;
public static String header = null;
public static String trailer = null;
public static List<Subrogation> fileDataListSubro = new ArrayList<Subrogation>();
public List<String> fileDataListS = new ArrayList<String>();
public static TreeMap<String, Subrogation> map = new TreeMap<String, Subrogation>();
public int counter = 0;
public String value;
#Override
public void afterPropertiesSet() throws Exception {
Assert.notNull(inputfile, "inputfile must be set");
}
public void setTrailer(String trailer) {
this.trailer = trailer;
}
public void setHeader(String header) {
this.header = header;
}
public String getTrailer() {
return trailer;
}
public String getHeader() {
return header;
}
public Resource getInputfile() {
return inputfile;
}
public void setInputfile(Resource inputfile) {
this.inputfile = inputfile;
}
public Resource getOutputfile() {
return outputfile;
}
public void setOutputfile(Resource outputfile) {
this.outputfile = outputfile;
}
public static void setFileDataListSubro(List<Subrogation> fileDataListSubro) {
SubrogationFileTransferTasklet.fileDataListSubro = fileDataListSubro;
}
public static List<Subrogation> getFileDataListSubro() {
return fileDataListSubro;
}
public static void setMap(TreeMap<String, Subrogation> map) {
SubrogationFileTransferTasklet.map = map;
}
public static TreeMap<String, Subrogation> getMap() {
return map;
}
#Override
public RepeatStatus execute(StepContribution contribution,
ChunkContext chunkContext) throws Exception {
value = (String) chunkContext.getStepContext().getStepExecution()
.getJobExecution().getExecutionContext().get("outputFile");
readFromFile();
return RepeatStatus.FINISHED;
}
public void readFromFile() {
BufferedReader br = null;
try {
String sCurrentLine;
br = new BufferedReader(new FileReader(inputfile.getFile()));
fileLastName = inputfile.getFile().getName();
while ((sCurrentLine = br.readLine()) != null) {
if (sCurrentLine.indexOf("TRAILER") != -1) {
setTrailer(sCurrentLine);
} else if (sCurrentLine.indexOf("HEADER") != -1) {
setHeader(sCurrentLine);
} else if (sCurrentLine.equalsIgnoreCase("")) {
} else {
fileDataListS.add(sCurrentLine);
}
}
convertListOfStringToListOfSubrogaion(fileDataListS);
writeDataToFile();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (br != null)
br.close();
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
public void convertListOfStringToListOfSubrogaion(List<String> list) {
Iterator<String> iterator = list.iterator();
while (iterator.hasNext()) {
Subrogation subrogration = new Subrogation();
String s = iterator.next();
subrogration.setGRP_NBR(StringUtils.substring(s, 0, 6));
subrogration.setSECT_NBR(StringUtils.substring(s, 6, 10));
subrogration.setAFP_VAL(StringUtils.substring(s, 10, 13));
subrogration.setDOL_MIN_VAL(StringUtils.substring(s, 13, 20));
subrogration
.setCORP_ENT_CD(StringUtils.substring(s, 20, s.length()));
map.put(subrogration.getGRP_NBR() + subrogration.getSECT_NBR(),
subrogration);
fileDataListSubro.add(subrogration);
}
}
public void writeDataToFile() {
try {
File file = new File(value);
if (!file.exists()) {
logger.info("output file is:-" + file.getAbsolutePath());
file.createNewFile();
}
FileWriter fw = new FileWriter(file.getAbsoluteFile());
BufferedWriter bw = new BufferedWriter(fw);
Iterator it = map.entrySet().iterator();
while (it.hasNext()) {
Map.Entry subrogation = (Map.Entry) it.next();
// System.out.println(subrogation.getKey() + " = " +
// subrogation.getValue());
// it.remove(); // avoids a ConcurrentModificationException
bw.append(subrogation.getValue().toString()
+ SubrogationConstants.filler58);
}
bw.close();
} catch (IOException e) {
e.printStackTrace();
}
logger.info("subrogationFileTransferTasklet Step completes");
}
}
In processor i want to put map size into int.
processor.class
package com.hcsc.ccsp.nonadj.subrogation.processor;
import org.apache.commons.lang3.StringUtils;
import org.springframework.batch.item.ItemProcessor;
import com.hcsc.ccsp.nonadj.subrogation.Utils.SubrogationUtils;
import com.hcsc.ccsp.nonadj.subrogation.batch.Subrogation;
import com.hcsc.ccsp.nonadj.subrogation.common.SubrogationConstants;
import com.hcsc.ccsp.nonadj.subrogation.integration.SubrogationFileTransferTasklet;
public class SubrogationProcessor implements
ItemProcessor<Subrogation, Subrogation> {
public SubrogationFileTransferTasklet fileTransferTasklet = new SubrogationFileTransferTasklet();
SubrogationUtils subrogationUtils = new SubrogationUtils();
public int countFromFile=SubrogationFileTransferTasklet.map.size();
public static int totalRecords = 0;
public static int duplicate = 0;
#Override
public Subrogation process(Subrogation subrogration) throws Exception {
// TODO Auto-generated method stub
if (subrogationUtils.validateData(subrogration)) {
Subrogation newSubro = new Subrogation();
newSubro.setGRP_NBR(StringUtils.leftPad(subrogration.getGRP_NBR()
.trim(), SubrogationConstants.length6, "0"));
if (subrogration.getSECT_NBR().trim().length() < 5) {
newSubro.setSECT_NBR(StringUtils.leftPad(subrogration
.getSECT_NBR().trim(), SubrogationConstants.length4,
"0"));
} else if (subrogration.getSECT_NBR().trim().length() == 5) {
newSubro.setSECT_NBR(StringUtils.substring(subrogration.getSECT_NBR().trim(), 1));
} else {
return null;
}
newSubro.setAFP_VAL(StringUtils.leftPad(subrogration.getAFP_VAL()
.trim(), SubrogationConstants.length3, "0"));
if (subrogration.getDOL_MIN_VAL().trim().contains(".")) {
newSubro.setDOL_MIN_VAL(StringUtils.leftPad(StringUtils.substring(subrogration.getDOL_MIN_VAL(),0,subrogration.getDOL_MIN_VAL().indexOf(".")), SubrogationConstants.length7,
"0"));
} else {
newSubro.setDOL_MIN_VAL(StringUtils.leftPad(subrogration
.getDOL_MIN_VAL().trim(), SubrogationConstants.length7,
"0"));
}
newSubro.setCORP_ENT_CD(StringUtils.substring(
subrogration.getCORP_ENT_CD(), 0, 2));
if (SubrogationFileTransferTasklet.map.containsKey(newSubro
.getGRP_NBR() + newSubro.getSECT_NBR())) {
duplicate++;
return null;
} else {
if(SubrogationFileTransferTasklet.fileLastName.contains("TX")){
if(newSubro.getCORP_ENT_CD().equalsIgnoreCase("TX")){
SubrogationFileTransferTasklet.map.put(newSubro
.getGRP_NBR() + newSubro.getSECT_NBR(), newSubro);
totalRecords++;
return newSubro;
}
}
else{
if(SubrogationFileTransferTasklet.fileLastName.contains("IL")){
if(!newSubro.getCORP_ENT_CD().equalsIgnoreCase("TX"))
{
newSubro.setCORP_ENT_CD("IL");
SubrogationFileTransferTasklet.map.put(newSubro
.getGRP_NBR() + newSubro.getSECT_NBR(), newSubro);
totalRecords++;
return newSubro;
}
}
else{
return null;
}
}
return null;
}
}
else {
return null;
}
}
}
class SubrogrationException extends RuntimeException {
private static final long serialVersionUID = -8971030257905108630L;
public SubrogrationException(String message) {
super(message);
}
}
and at last i want to use that countFromFile in other class..
writer.class
package com.hcsc.ccsp.nonadj.subrogation.writer;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.LineNumberReader;
import java.io.Writer;
import java.util.Date;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.springframework.batch.item.ItemStreamException;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.file.FlatFileFooterCallback;
import org.springframework.batch.item.file.FlatFileHeaderCallback;
import com.hcsc.ccsp.nonadj.subrogation.Utils.SubrogationUtils;
import com.hcsc.ccsp.nonadj.subrogation.batch.Subrogation;
import com.hcsc.ccsp.nonadj.subrogation.common.SubrogationConstants;
import com.hcsc.ccsp.nonadj.subrogation.integration.SubrogationFileTransferTasklet;
import com.hcsc.ccsp.nonadj.subrogation.processor.SubrogationProcessor;
public class SubrogationHeaderFooterWriter implements FlatFileFooterCallback,FlatFileHeaderCallback{
private Logger logger = LogManager
.getLogger(SubrogationHeaderFooterWriter.class);
SubrogationFileTransferTasklet fileTransferTasklet = new SubrogationFileTransferTasklet();
SubrogationUtils subrogationUtils=new SubrogationUtils();
SubrogationProcessor processor=new SubrogationProcessor();
private ItemWriter<Subrogation> delegate;
public void setDelegate(ItemWriter<Subrogation> delegate) {
this.delegate = delegate;
}
public ItemWriter<Subrogation> getDelegate() {
return delegate;
}
#Override
public void writeHeader(Writer writer) throws IOException {
//writer.write(SubrogationFileTransferTasklet.header);
}
#Override
public void writeFooter(Writer writer) throws IOException {
String trailer = SubrogationFileTransferTasklet.trailer;
String s1 = StringUtils.substring(trailer, 0, 23);
logger.info(" Data from input file size is---- "+new SubrogationProcessor().countFromFile);
int trailerCounter=new SubrogationProcessor().countFromFile+SubrogationProcessor.totalRecords;
logger.info(" Data comming from database is"+SubrogationProcessor.totalRecords);
logger.info(" Duplicate data From DataBase is " +SubrogationProcessor.duplicate);
logger.info(" Traileer is " + s1+ trailerCounter);
writer.write(s1 + trailerCounter);
SubrogationFileTransferTasklet.map.clear();
SubrogationFileTransferTasklet.fileDataListSubro.clear();
SubrogationProcessor.totalRecords=0;
SubrogationProcessor.duplicate=0;
}
public void writeErrorDataToFile(List<String> errorDataList,String errorfile){
File file;
try {
file = new File(errorfile);
logger.info("error file is "+errorfile);
FileWriter fileWriter = new FileWriter(file,true);
BufferedWriter bufferWritter = new BufferedWriter(fileWriter);
for(String data:errorDataList){
bufferWritter.write(new Date()+" "+data);
bufferWritter.write(SubrogationConstants.LINE_SEPARATOR);
}
bufferWritter.close();
}
catch (IOException e) {
throw new ItemStreamException("Could not convert resource to file: [" + errorfile + "]", e);
}
}
/*
public void write(List<? extends Subrogation> subrogation) throws Exception {
System.out.println("inside writer");
delegate.write(subrogation);
}*/
}
so here in logger massage.size prints 0....
I am not able to understand why???
Do in this way to make sure that It is initialized with the current size of the map when object is constructed.
class SubrogationProcessor{
public int countFromFile;
public SubrogationProcessor(){
countFromFile=SubrogationFileTransferTasklet.map.size();
}
}
This depends on when the "map.put" line of code is run. Is it in a static block in the tasklet class?
If processor instance is initialized before record has been added to the map then map.size() will indeed be 0.
my suggestion would be to add the map into a static block if at all possible or to debug the code and see when the .put() method is being called in comparison to when the .size() method is called
public static TreeMap<String, Subrogation> map = new TreeMap<String, Subrogation>();
static{
map.put(subrogration.getGRP_NBR() + subrogration.getSECT_NBR(), subrogration);
}
Related
Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 8 days ago.
Improve this question
I'm currently developing a JNLP application.
Application starts its "Splash Screen Loading Screen" phase of launch, but then crashes into my SystemShutDownUtility with the Error Log posted below.
I am unsure of how to fix this error as I am not seeing any bits of the code that is the problem. It's not showing the usual "Caused By..." log either.
I can and will add any class files that will help you guys assist me in resolving this issue.
Its probably really simple and I'm probably missing it cause its so late but any help on this is greatly appreciated!
Error Log:
java.lang.NullPointerException: Cannot invoke "javax.swing.ActionMap.remove(Object)" because the return value of "javax.swing.AbstractButton.getActionMap()" is null
at java.desktop/javax.swing.plaf.basic.BasicButtonUI.uninstallListeners(BasicButtonUI.java:244)
at java.desktop/javax.swing.plaf.basic.BasicButtonUI.uninstallUI(BasicButtonUI.java:211)
at java.desktop/javax.swing.JComponent.uninstallUIAndProperties(JComponent.java:745)
at java.desktop/javax.swing.JComponent.setUI(JComponent.java:720)
at java.desktop/javax.swing.AbstractButton.setUI(AbstractButton.java:1758)
at com.cthsprojects.apphost.ui.decorator.AppMenuToggleButtonDecorator.decorate(AppMenuToggleButtonDecorator.java:27)
at com.cthsprojects.apphost.ui.decorator.V7MenuToggleButtonDecorator.decorate(V7MenuToggleButtonDecorator.java:22)
at com.cthsprojects.apphost.ui.decorator.AppMenuToggleButtonDecorator.decorate(AppMenuToggleButtonDecorator.java:1)
at com.cthsprojects.common.view.adapters.AbstractComponentAdapterImpl.addDecorator(AbstractComponentAdapterImpl.java:63)
at com.cthsprojects.apphost.ui.ToolsSubMenuAdapter.addApplication(ToolsSubMenuAdapter.java:176)
at com.cthsprojects.apphost.ui.ToolsSubMenuAdapter.addDomainComponents(ToolsSubMenuAdapter.java:160)
at com.cthsprojects.apphost.ui.ToolsSubMenuAdapter.<init>(ToolsSubMenuAdapter.java:89)
at com.cthsprojects.apphost.ui.ToolsSubMenuAdapter.addDomainComponents(ToolsSubMenuAdapter.java:163)
at com.cthsprojects.apphost.ui.ToolsSubMenuAdapter.<init>(ToolsSubMenuAdapter.java:79)
at com.cthsprojects.apphost.ui.ToolsMenuAdapter.initialize(ToolsMenuAdapter.java:37)
at com.cthsprojects.apphost.ui.AppTopPanelAdapterImpl.initialize(AppTopPanelAdapterImpl.java:59)
at com.cthsprojects.apphost.loader.AppHostRootFrameImpl.initialize(AppHostRootFrameImpl.java:84)
at com.cthsprojects.apphost.launch.AppHost.run(AppHost.java:107)
at com.cthsprojects.apphost.launch.AppHost.main(AppHost.java:56)
AppHost Class File:
package com.cthsprojects.apphost.launch;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.Locale;
import java.util.Properties;
import javax.jnlp.ServiceManager;
import javax.jnlp.SingleInstanceListener;
import javax.jnlp.SingleInstanceService;
import javax.jnlp.UnavailableServiceException;
import javax.swing.SwingUtilities;
import javax.swing.ToolTipManager;
import javax.swing.border.EtchedBorder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.springframework.beans.BeansException;
import org.springframework.context.support.GenericApplicationContext;
import com.cthsprojects.apphost.context.ApplicationLoader;
import com.cthsprojects.apphost.exceptiontracer.ExceptionLogger;
import com.cthsprojects.apphost.loader.AppHostRootFrame;
import com.cthsprojects.apphost.splash.SplashScreen;
import com.cthsprojects.common.components.decorators.StyleDecorator;
import com.cthsprojects.common.ui.theme.SSLookAndFeel;
import com.cthsprojects.common.ui.util.SwingUtils;
import com.cthsprojects.common.view.adapters.decorators.AdapterDecorator;
import com.cthsprojects.common.view.adapters.decorators.QAToolTipDecorator;
import com.cthsprojects.common.view.adapters.factory.AdapterFactory;
import com.cthsprojects.configuration.AppHostOptions;
import com.cthsprojects.configuration.ConfigurationBuilder;
import com.cthsprojects.configuration.ConfigurationException;
import com.cthsprojects.configuration.LoggingOutputStream;
import com.cthsprojects.configuration.ProfileNotFoundException;
import com.cthsprojects.configuration.UniversalOptions;
public class AppHost {
public static final String SOCRATES_VERSION = "4.26.7";
public static SingleInstanceListener sisL = null;
SingleInstanceService sis;
private AppHostRootFrame root;
private SplashScreen splashScreen;
private ExceptionLogger exceptionLogger;
public static void main(String[] args) {
AppHost host = new AppHost();
host.run();
}
void run() {
preventMultiLaunchIfJNLP();
StringBuilder startSocratesText = new StringBuilder();
startSocratesText.append("Starting Socrates (");
startSocratesText.append("4.26.7");
startSocratesText.append(")");
System.out.println(startSocratesText.toString());
String profile = System.getProperty("profile");
ConfigurationBuilder.getInstance().getLoggingConfigurer().configure(profile);
final Log LOGGER = LogFactory.getLog(AppHost.class);
LOGGER.info(startSocratesText.toString());
setC3P0Properties();
String os = System.getProperty("os.name").toLowerCase();
String javaVersion = System.getProperty("java.version").toLowerCase();
if (os.startsWith("win") && javaVersion.startsWith("1.8")) {
System.setOut(new PrintStream((OutputStream)new LoggingOutputStream(Logger.getLogger("sysout"), Level.INFO), true));
System.setErr(new PrintStream((OutputStream)new LoggingOutputStream(Logger.getLogger("sysout"), Level.ERROR), true));
}
restoreMultiLaunchIfConfigurationOff();
SwingUtils.switchToSSLookAndFeel();
if (this.splashScreen == null)
this.splashScreen = new SplashScreen();
showSplashScreen();
Thread.UncaughtExceptionHandler killSplash = new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread arg0, Throwable e) {
String errorString;
if (e instanceof ExceptionInInitializerError) {
errorString = e.getCause().toString();
} else {
errorString = e.toString();
}
LOGGER.error("An error occurred with splash", e);
AppHost.this.splashScreen.error(errorString);
}
};
Thread.setDefaultUncaughtExceptionHandler(killSplash);
GenericApplicationContext factory = ApplicationLoader.getInstance().getFactory();
if (this.exceptionLogger == null) {
this.exceptionLogger = (ExceptionLogger)factory.getBean(ExceptionLogger.class.getSimpleName());
this.exceptionLogger.setSplashScreen(this.splashScreen);
}
if (this.root == null)
this.root = (AppHostRootFrame)factory.getBean(AppHostRootFrame.class.getSimpleName());
Thread.setDefaultUncaughtExceptionHandler((Thread.UncaughtExceptionHandler)this.exceptionLogger);
checkProfileSet();
overrideDefaultLocale();
setupToolTips();
setupDebug();
this.root.initialize(ApplicationLoader.getInstance().getApplicationDomains());
this.splashScreen.dispose();
this.root.setVisible(true);
this.root.setAlwaysOnTop(false);
}
private void setC3P0Properties() {
Log LOGGER = LogFactory.getLog(AppHost.class);
Properties prop = new Properties(System.getProperties());
InputStream input = getResourceFileAsInputStream("c3p0.properties");
if (input == null) {
LOGGER.info("Using c3p0-default.properties");
prop = loadProperties(prop, "c3p0-default.properties");
} else {
LOGGER.info("Using c3p0.properties");
}
prop.put("com.mchange.v2.log.MLog", "com.mchange.v2.log.log4j.Log4jMLog");
System.setProperties(prop);
}
private Properties loadProperties(Properties prop, String fileName) {
Log LOGGER = LogFactory.getLog(AppHost.class);
try {
InputStream input = getResourceFileAsInputStream(fileName);
prop.load(input);
} catch (IOException ex) {
LOGGER.error("Failed to read " + fileName, ex);
}
return prop;
}
private InputStream getResourceFileAsInputStream(String fileName) {
ClassLoader classLoader = AppHost.class.getClassLoader();
return classLoader.getResourceAsStream(fileName);
}
private void restoreMultiLaunchIfConfigurationOff() throws BeansException, ConfigurationException {
if (null != sisL &&
!AppHostOptions.getInstance().isSingleLaunch())
this.sis.removeSingleInstanceListener(sisL);
}
private void preventMultiLaunchIfJNLP() {
try {
this.sis = (SingleInstanceService)ServiceManager.lookup("javax.jnlp.SingleInstanceService");
sisL = new SingleInstanceListener() {
public void newActivation(String[] as) {}
};
this.sis.addSingleInstanceListener(sisL);
} catch (UnavailableServiceException e) {
this.sis = null;
}
}
private void showSplashScreen() {
if (AppHostOptions.getInstance().isSplashScreenEnabled())
this.splashScreen.setVisible(true);
}
private void checkProfileSet() {
if (System.getProperty("profile") == null)
throw new ProfileNotFoundException("Please specify a profile in sysargs\n\n\t-Dprofile=deploy");
}
private void setupToolTips() {
boolean qaToolTipsEnabled = UniversalOptions.getInstance().isQaToolTipsEnabled();
if (qaToolTipsEnabled) {
ToolTipManager.sharedInstance().setLightWeightPopupEnabled(false);
ToolTipManager.sharedInstance().setInitialDelay(10);
ToolTipManager.sharedInstance().setDismissDelay(7000);
AdapterFactory.addDefaultDecorator((AdapterDecorator)new QAToolTipDecorator());
}
SSLookAndFeel.setSScrolBarQAMode(qaToolTipsEnabled);
}
private void setupDebug() {
boolean uiDebugEnabled = UniversalOptions.getInstance().isUIDebugEnabled();
if (uiDebugEnabled) {
StyleDecorator debugStyle = new StyleDecorator();
debugStyle.setBorder(new EtchedBorder());
debugStyle.setNoFont(true);
AdapterFactory.addDefaultDecorator((AdapterDecorator)debugStyle);
}
}
private void overrideDefaultLocale() {
String localeOverride = System.getProperty("LocaleOverride");
if (localeOverride != null) {
Locale localeOverrideObject;
String[] localeArray = localeOverride.split("_");
switch (localeArray.length) {
case 1:
localeOverrideObject = new Locale(localeArray[0]);
break;
case 2:
localeOverrideObject = new Locale(localeArray[0], localeArray[1]);
break;
case 3:
localeOverrideObject = new Locale(localeArray[0], localeArray[1], localeArray[2]);
break;
default:
return;
}
Locale.setDefault(localeOverrideObject);
}
}
void setRootFrame(AppHostRootFrame root) {
this.root = root;
}
void setSplashScreen(SplashScreen splashScreen) {
this.splashScreen = splashScreen;
}
void setExceptionLogger(ExceptionLogger exceptionLogger) {
this.exceptionLogger = exceptionLogger;
}
}
ApplicationLoader class file:
package com.cthsprojects.apphost.context;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.BeanFactoryUtils;
import org.springframework.beans.factory.ListableBeanFactory;
import org.springframework.context.support.GenericApplicationContext;
import com.cthsprojects.apphost.application.SocratesApplication;
import com.cthsprojects.apphost.applications.domains.ApplicationDomain;
import com.cthsprojects.apphost.applications.domains.DomainComponent;
import com.cthsprojects.apphost.applications.domains.DomainType;
import com.cthsprojects.apphost.resources.AppHostErrorMessagesTextToken;
import com.cthsprojects.application.login.LoginSocratesApplication;
import com.cthsprojects.common.view.adapters.PanelAdapter;
import com.cthsprojects.common.view.adapters.factory.AdapterFactory;
import com.cthsprojects.configuration.ConfigurationBuilder;
public final class ApplicationLoader {
private static ApplicationLoader instance;
private static List<SocratesApplication> applications;
private static final int MAP_INITIAL_CAPACITY = 4;
private final Map<DomainType, ApplicationDomain> domains;
private ApplicationLoader() {
ApplicationDomain defaultDomain = (ApplicationDomain)getFactory().getBean(DomainType.getDefaultDomainTypeName());
applications = new ArrayList<SocratesApplication>(BeanFactoryUtils.beansOfTypeIncludingAncestors((ListableBeanFactory)getFactory(), SocratesApplication.class).values());
for (Iterator<SocratesApplication> itr = applications.iterator(); itr.hasNext(); ) {
SocratesApplication app = itr.next();
ApplicationDomain dom = app.getApplicationDomain();
if (null == dom)
app.setApplicationDomain(defaultDomain);
}
this.domains = new HashMap<DomainType, ApplicationDomain>(4, 1.0F);
Collection<SocratesApplication> doms = BeanFactoryUtils.beansOfTypeIncludingAncestors((ListableBeanFactory)getFactory(), ApplicationDomain.class).values();
for (Iterator<SocratesApplication> iterator1 = doms.iterator(); iterator1.hasNext(); ) {
ApplicationDomain d = (ApplicationDomain)iterator1.next();
this.domains.put(d.getDomainType(), d);
}
}
public static ApplicationLoader getInstance() {
if (instance == null)
instance = new ApplicationLoader();
return instance;
}
public void close() {
for (Iterator<SocratesApplication> itr = applications.iterator(); itr.hasNext(); ) {
SocratesApplication app = itr.next();
app.doExit();
}
getFactory().close();
getFactory().destroy();
SharedContext.reset();
}
public Collection<ApplicationDomain> getApplicationDomains() {
return Collections.unmodifiableCollection(this.domains.values());
}
public Collection<SocratesApplication> getApplications() {
Collection<SocratesApplication> ans = applications;
if (ans == null)
ans = new ArrayList<SocratesApplication>();
return Collections.unmodifiableCollection(ans);
}
public SocratesApplication getApplicationByName(String applicationName) {
SocratesApplication targetApp = null;
for (SocratesApplication application : applications) {
if (application.getClass().getSimpleName().equals(applicationName)) {
targetApp = application;
break;
}
}
return targetApp;
}
public SocratesApplication getLoginApplication() {
return (SocratesApplication)getFactory().getBean(LoginSocratesApplication.class.getSimpleName());
}
public GenericApplicationContext getFactory() {
return ConfigurationBuilder.getInstance().getApplicationFactory();
}
public ApplicationDomain getDomain(DomainType aType) {
if (null == aType)
throw new NullPointerException();
ApplicationDomain d = this.domains.get(aType);
if (null == d)
throw new IllegalArgumentException(AppHostErrorMessagesTextToken.ERROR_INVALID_DOMAIN_TYPE.getText(new Object[] { aType.getName() }));
return d;
}
public List<SocratesApplication> findApplicationsForDomain(ApplicationDomain aApplicationDomain) {
List<SocratesApplication> l = new ArrayList<SocratesApplication>();
Iterator<DomainComponent> itr = aApplicationDomain.getChildren().iterator();
while (itr.hasNext()) {
DomainComponent dc = itr.next();
findApps(l, dc);
}
return l;
}
private List<SocratesApplication> findApps(List<SocratesApplication> aList, DomainComponent aDomainComponent) {
if (aDomainComponent.isLeaf()) {
aList.add(aDomainComponent.getAdaptee());
return aList;
}
for (Iterator<DomainComponent> itr = aDomainComponent.getChildren().iterator(); itr.hasNext(); ) {
DomainComponent child = itr.next();
findApps(aList, child);
}
return aList;
}
public void reset() {
for (Iterator<SocratesApplication> itr = applications.iterator(); itr.hasNext(); ) {
SocratesApplication app = itr.next();
if (app.isInitialized()) {
resetLocations(app);
app.reset();
}
}
}
private void resetLocations(SocratesApplication application) {
PanelAdapter containerPanel = AdapterFactory.getTracker().getPanel(null, "Host.CentrePanel.Container");
if (null != containerPanel)
containerPanel.removeAll();
}
}
Can someone tell me how to add an attachment using ProducerTemplate?
I have been searching but I can not find an answer to my case.
I am using Camen 2.1 and I have these three clases:
MailSender2.java
import java.util.HashMap;
import java.util.Map;
import java.util.ResourceBundle;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.ProducerTemplate;
public class MailSender2 extends TypeMail{
private static final ResourceBundle RES = ResourceBundle.getBundle("mail");
protected static final String MAIL_NOTIFICATION_ENDPOINT=RES.getString("mail.host.location").trim()+":"+RES.getString("mail.port").trim();
private Map<String, Object> header;
public MailSender2() {
this.header=new HashMap<>();
}
public void send(ProducerTemplate template) {
this.header.put("From", this.getT_from());
this.header.put("To", this.getT_to());
this.header.put("Subject", this.getT_subject());
this.header.put(Exchange.CONTENT_TYPE, "text/html; charset=UTF-8");
//this.getF_ficher() <-- I have here the file to attach
//this.getT_ficnon() <-- I have here the name ot the file
//this.getT_ficext() <-- I have here the extension ot the file
template.sendBodyAndHeaders(MAIL_NOTIFICATION_ENDPOINT, this.getT_mensaje(), header);
}
}
TypeMail.java:
public class TypeMail {
private String t_id;
private String t_from;
private String t_to;
private String t_subject;
private String t_mensaje;
private byte[] f_ficher;
private String t_ficnon;
private String t_ficext;
public String getT_id() {
return t_id;
}
public void setT_id(String t_id) {
this.t_id = t_id;
}
public String getT_from() {
return t_from;
}
public void setT_from(String t_from) {
this.t_from = t_from;
}
public String getT_to() {
return t_to;
}
public void setT_to(String t_to) {
this.t_to = t_to;
}
public String getT_subject() {
return t_subject;
}
public void setT_subject(String t_subject) {
this.t_subject = t_subject;
}
public String getT_mensaje() {
return t_mensaje;
}
public void setT_mensaje(String t_mensaje) {
this.t_mensaje = t_mensaje;
}
public byte[] getF_ficher() {
return f_ficher;
}
public void setF_ficher(byte[] f_ficher) {
this.f_ficher = f_ficher;
}
public String getT_ficnon() {
return t_ficnon;
}
public void setT_ficnon(String t_ficnon) {
this.t_ficnon = t_ficnon;
}
public String getT_ficext() {
return t_ficext;
}
public void setT_ficext(String t_ficext) {
this.t_ficext = t_ficext;
}
}
MailCommunicationTransformer.java:
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.ProducerTemplate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ws.soap.client.SoapFaultClientException;
public class MailCommunicationTransformer {
MailSender2 mailSender = null;
static Logger logger = LoggerFactory.getLogger(MailCommunicationTransformer.class);
public MailCommunicationTransformer()
{
}
public MailLog transform(Object actualMessage, Exchange exchange, CamelContext context)
{
mailSender = exchange.getIn().getBody(MailSender2.class);
try {
MailSenderDAO mailSenderDAO = (MailSenderDAO)context.getRegistry().lookup("MailSenderDAO");
mailSenderDAO.validarInput(mailSender);
if (mailSender!=null) {
ProducerTemplate template=exchange.getContext().createProducerTemplate();
try {
mailSender.send(template);
}
catch (Throwable ex) {
ex.printStackTrace();
exchange.setProperty(Exchange.EXCEPTION_CAUGHT,ex);
}
}
}catch (MailException me) {
me.printStackTrace();
exchange.setProperty(Exchange.EXCEPTION_CAUGHT,me);
}
Throwable e = exchange.getProperty(Exchange.EXCEPTION_CAUGHT,
Throwable.class);
String response = "OK";
if (e != null) {
StringBuffer mensaje = new StringBuffer();
if (e instanceof SoapFaultClientException) {
mensaje.append("MAIL fault exception: CLIENT. ");
} else {
mensaje.append("MAIL fault exception: MAIL. ");
}
logger.info("MailCommunicationTransformer",e);
while (e != null) {
e.printStackTrace();
mensaje.append(e.getMessage());
e = e.getCause();
}
response = mensaje.toString();
}
MailLog log = new MailLog(mailSender, response); //, protocolo
return log;
}
}
In TypeMail I have the file in f_ficher, and the fileName (t_ficnon) and extension (t_ficext), but I can not find how to attach this file in MailSender2 before template.sendBodyAndHeaders(.....)
Any help would be very appreciated.
Regards.
Perhaps I don't fully understand your question, but the ProducerTemplate don't know about the message type.
You just send a body and perhaps also headers to an endpoint.
Therefore the body just needs to be a fully constructed MimeMessage object as documented in the Camel Mail docs.
You can simply construct the mail message with Java and then use the object with the ProducerTemplate (what you already do).
template.sendBodyAndHeaders("your-smtp-endpoint", yourMimeMessageInstance, yourHeaderMap);
Thanks for the answer!
But, finally, I could do it this way:
new class EmailProcessor.java
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Files;
import java.util.Objects;
import java.util.ResourceBundle;
import javax.activation.DataHandler;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.commons.codec.binary.Base64;
public class EmailProcessor implements Processor {
// Atributos de la clase
private TypeMail typeMail;
public EmailProcessor(TypeMail typeMail) {
this.typeMail = typeMail;
}
#Override
public void process(Exchange exchange) throws Exception {
Message ms = exchange.getIn();
ms.setHeader("From", this.typeMail.getT_from());
ms.setHeader("To", this.typeMail.getT_to());
ms.setHeader("Subject", this.typeMail.getT_subject());
ms.setHeader(Exchange.CONTENT_TYPE, "text/html; charset=UTF-8");
ms.setBody("<p style='font-family: Calibri;'>" + this.typeMail.getT_mensaje() + "</p>");
if (this.typeMail.getF_ficher() != null) {
String mimeType = "application/pdf";
if ("zip".equals(typeMail.getT_ficext())) {
mimeType = "application/zip";
}
ms.addAttachment(typeMail.getT_ficnom() + "." + typeMail.getT_ficext(), new DataHandler(typeMail.getF_ficher(), mimeType));
}
}
}
MailSender.java:
import java.util.ResourceBundle;
import org.apache.camel.ExchangePattern;
import org.apache.camel.ProducerTemplate;
public class MailSender extends TypeMail{
private static final ResourceBundle RES = ResourceBundle.getBundle("mail");
protected static final String MAIL_NOTIFICATION_ENDPOINT=RES.getString("mail.host.location").trim()+":"+RES.getString("mail.port").trim();
public MailSender() {
}
public void send(ProducerTemplate template) {
template.send(MAIL_NOTIFICATION_ENDPOINT, ExchangePattern.InOnly, new EmailProcessor(this));
}
}
What am I doing wrong, My below kafka stream program giving issue while streaming the data, "Cannot deserialize instance of com.kafka.productiontest.models.TimeOff out of START_ARRAY token ".
I have a topic timeOffs2 which contain time offs information with key timeOffID and value is of type object which contain employeeId. I just want to group all time offs for employee key and write to the store.
For store key will be employeeId and value will be list of timeoffs.
Program properties and streaming logic:
public Properties getKafkaProperties() throws UnknownHostException {
InetAddress myHost = InetAddress.getLocalHost();
Properties kafkaStreamProperties = new Properties();
kafkaStreamProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
kafkaStreamProperties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
kafkaStreamProperties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, TimeOffSerde.class);
kafkaStreamProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
kafkaStreamProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.kafka.productiontest.models.TimeOffSerializer");
kafkaStreamProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, application_id );
kafkaStreamProperties.put(StreamsConfig.APPLICATION_SERVER_CONFIG, myHost.getHostName() + ":" + port);
return kafkaStreamProperties;
}
String topic = "timeOffs2";
StreamsBuilder builder = new StreamsBuilder();
KStream<String, TimeOff> source = builder.stream(topic);
KTable<String, ArrayList<TimeOff>> newStore = source.groupBy((k, v) -> v.getEmployeeId())
.aggregate(ArrayList::new,
(key, value, aggregate) -> {
aggregate.add(value);
return aggregate;
}, Materialized.as("NewStore").withValueSerde(TimeOffListSerde(TimeOffSerde)));
final Topology topology = builder.build();
final KafkaStreams streams = new KafkaStreams(topology, getKafkaProperties());
TimeOffSerializer.java
ackage com.kafka.productiontest.models;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.common.serialization.Serializer;
import java.util.Map;
public class TimeOffSerializer implements Serializer {
#Override
public void configure(Map configs, boolean isKey) {
}
#Override
public byte[] serialize(String topic, Object data) {
byte[] retVal = null;
ObjectMapper objectMapper = new ObjectMapper();
try {
retVal = objectMapper.writeValueAsString(data).getBytes();
} catch (Exception e) {
e.printStackTrace();
}
return retVal;
}
#Override
public void close() {
}
}
TimeOffDeserializer.java
package com.kafka.productiontest.models;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.common.serialization.Deserializer ;
import java.util.Map;
public class TimeOffDeserializer implements Deserializer {
#Override
public void configure(Map configs, boolean isKey) {
}
#Override
public TimeOff deserialize(String arg0, byte[] arg1) {
ObjectMapper mapper = new ObjectMapper();
TimeOff timeOff = null;
try {
timeOff = mapper.readValue(arg1, TimeOff.class);
} catch (Exception e) {
e.printStackTrace();
}
return timeOff;
}
#Override
public void close() {
}
}
TimeOffSerde.java
package com.kafka.productiontest.models;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import java.util.Map;
public class TimeOffSerde implements Serde<Object> {
private final Serde inner;
public TimeOffSerde(){
inner = Serdes.serdeFrom(new TimeOffSerializer(), new TimeOffDeserializer());
}
#Override
public void configure(Map<String, ?> configs, boolean isKey) {
inner.serializer().configure(configs, isKey);
inner.deserializer().configure(configs, isKey);
}
#Override
public void close() {
inner.serializer().close();
inner.deserializer().close();
}
#Override
public Serializer<Object> serializer() {
return inner.serializer();
}
#Override
public Deserializer<Object> deserializer() {
return inner.deserializer();
}
}
TimeOffListSerializer.java
package com.kafka.productiontest.models;
import org.apache.kafka.common.serialization.Serializer;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.sql.Time;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Map;
public class TimeOffListSerializer implements Serializer<ArrayList<TimeOff>> {
private Serializer<TimeOff> inner;
public TimeOffListSerializer(Serializer<TimeOff> inner) {
this.inner = inner;
}
#Override
public void configure(Map<String, ?> configs, boolean isKey) {
}
#Override
public byte[] serialize(String topic, ArrayList<TimeOff> data) {
final int size = data.size();
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
final DataOutputStream dos = new DataOutputStream(baos);
final Iterator<TimeOff> iterator = data.iterator();
try {
dos.writeInt(size);
while (iterator.hasNext()) {
final byte[] bytes = inner.serialize(topic, iterator.next());
dos.writeInt(bytes.length);
dos.write(bytes);
}
}catch (Exception ex) {
}
return baos.toByteArray();
}
#Override
public void close() {
inner.close();
}
}
TimeOffListDeserializer.java
package com.kafka.productiontest.models;
import org.apache.kafka.common.serialization.Deserializer;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
public class TimeOffListDeserializer implements Deserializer<ArrayList<TimeOff>> {
private final Deserializer<TimeOff> valueDeserializer;
public TimeOffListDeserializer(final Deserializer<TimeOff> valueDeserializer) {
this.valueDeserializer = valueDeserializer;
}
#Override
public void configure(Map<String, ?> configs, boolean isKey) {
}
#Override
public ArrayList<TimeOff> deserialize(String topic, byte[] data) {
if (data == null || data.length == 0) {
return null;
}
final ArrayList<TimeOff> arrayList = new ArrayList<>();
final DataInputStream dataInputStream = new DataInputStream(new ByteArrayInputStream(data));
try {
final int records = dataInputStream.readInt();
for (int i = 0; i < records; i++) {
final byte[] valueBytes = new byte[dataInputStream.readInt()];
dataInputStream.read(valueBytes);
arrayList.add(valueDeserializer.deserialize(topic, valueBytes));
}
} catch (IOException e) {
throw new RuntimeException("Unable to deserialize ArrayList", e);
}
return arrayList;
}
#Override
public void close() {
}
}
TimeOffListSerde.java
package com.kafka.productiontest.models;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import java.util.ArrayList;
import java.util.Map;
public class TimeOffListSerde implements Serde<ArrayList<TimeOff>> {
private Serde<ArrayList<TimeOff>> inner;
public TimeOffListSerde() {
}
public TimeOffListSerde(Serde<TimeOff> serde){
inner = Serdes.serdeFrom(new TimeOffListSerializer(serde.serializer()), new TimeOffListDeserializer(serde.deserializer()));
}
#Override
public void configure(Map<String, ?> configs, boolean isKey) {
inner.serializer().configure(configs, isKey);
inner.deserializer().configure(configs, isKey);
}
#Override
public void close() {
inner.serializer().close();
inner.deserializer().close();
}
#Override
public Serializer<ArrayList<TimeOff>> serializer() {
return inner.serializer();
}
#Override
public Deserializer<ArrayList<TimeOff>> deserializer() {
return inner.deserializer();
}
}
I think issue is in this part with withValueSerde. I can not compile with this code. But if I remove withValueSerde, it is giving me this issue "Can not deserialize TimeOff object". Can you please help and guide what I am doing wrong.
KTable<String, ArrayList<TimeOff>> newStore = source.groupBy((k, v) -> v.getEmployeeId())
.aggregate(ArrayList::new,
(key, value, aggregate) -> {
aggregate.add(value);
return aggregate;
}, Materialized.as("NewStore").withValueSerde(TimeOffListSerde(TimeOffSerde)));
Looking at your code I can see several issues:
TimeOffSerde - It should implement Serde<TimeOff> not Serde<Object>
You don't pass types for Key and Value in Materialized, so it assume it is Object
So your streaming part should be something like:
KTable<String, ArrayList<TimeOff>> newStore = source.groupBy((k, v) -> v.getEmployeeId())
.aggregate(ArrayList::new,
(key, value, aggregate) -> {
aggregate.add(value);
return aggregate;
}, Materialized.<String, ArrayList<TimeOff>, KeyValueStore<Bytes, byte[]>>as("NewStore").withValueSerde(new TimeOffListSerde(new TimeOffSerde())));
NOTICE: Rember to clear state store directory after modification.
This is the main class in which query is being fired
package extractKeyword;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import org.apache.commons.httpclient.Header;
import org.apache.commons.httpclient.methods.GetMethod;
import org.dbpedia.spotlight.exceptions.AnnotationException;
import org.dbpedia.spotlight.model.DBpediaResource;
import org.dbpedia.spotlight.model.Text;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.LinkedList;
import java.util.List;
public class db extends AnnotationClient {
//private final static String API_URL = "http://jodaiber.dyndns.org:2222/";
private static String API_URL = "http://spotlight.dbpedia.org/";
private static double CONFIDENCE = 0.0;
private static int SUPPORT = 0;
// private static String powered_by ="non";
// private static String spotter ="CoOccurrenceBasedSelector";//"LingPipeSpotter"=Annotate all spots
//AtLeastOneNounSelector"=No verbs and adjs.
//"CoOccurrenceBasedSelector" =No 'common words'
//"NESpotter"=Only Per.,Org.,Loc.
//private static String disambiguator ="Default";//Default ;Occurrences=Occurrence-centric;Document=Document-centric
//private static String showScores ="yes";
#SuppressWarnings("static-access")
public void configiration(double CONFIDENCE,int SUPPORT)
//, String powered_by,String spotter,String disambiguator,String showScores)
{
this.CONFIDENCE=CONFIDENCE;
this.SUPPORT=SUPPORT;
// this.powered_by=powered_by;
//this.spotter=spotter;
//this.disambiguator=disambiguator;
//showScores=showScores;
}
public List<DBpediaResource> extract(Text text) throws AnnotationException {
// LOG.info("Querying API.");
String spotlightResponse;
try {
String Query=API_URL + "rest/annotate/?" +
"confidence=" + CONFIDENCE
+ "&support=" + SUPPORT
// + "&spotter=" + spotter
// + "&disambiguator=" + disambiguator
// + "&showScores=" + showScores
// + "&powered_by=" + powered_by
+ "&text=" + URLEncoder.encode(text.text(), "utf-8");
//LOG.info(Query);
GetMethod getMethod = new GetMethod(Query);
getMethod.addRequestHeader(new Header("Accept", "application/json"));
spotlightResponse = request(getMethod);
} catch (UnsupportedEncodingException e) {
throw new AnnotationException("Could not encode text.", e);
}
assert spotlightResponse != null;
JSONObject resultJSON = null;
JSONArray entities = null;
try {
resultJSON = new JSONObject(spotlightResponse);
entities = resultJSON.getJSONArray("Resources");
} catch (JSONException e) {
//throw new AnnotationException("Received invalid response from DBpedia Spotlight API.");
}
LinkedList<DBpediaResource> resources = new LinkedList<DBpediaResource>();
if(entities!=null)
for(int i = 0; i < entities.length(); i++) {
try {
JSONObject entity = entities.getJSONObject(i);
resources.add(
new DBpediaResource(entity.getString("#URI"),
Integer.parseInt(entity.getString("#support"))));
} catch (JSONException e) {
//((Object) LOG).error("JSON exception "+e);
}
}
return resources;
}
}
The extended class
package extractKeyword;
import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
import org.apache.commons.httpclient.Header;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpException;
import org.apache.commons.httpclient.HttpMethodBase;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.params.HttpMethodParams;
import org.dbpedia.spotlight.exceptions.AnnotationException;
import org.dbpedia.spotlight.model.DBpediaResource;
import org.dbpedia.spotlight.model.Text;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.Logger;
import javax.ws.rs.HttpMethod;
/**
* #author pablomendes
*/
public abstract class AnnotationClient {
//public Logger LOG = Logger.getLogger(this.getClass());
private List<String> RES = new ArrayList<String>();
// Create an instance of HttpClient.
private static HttpClient client = new HttpClient();
public List<String> getResu(){
return RES;
}
public String request(GetMethod getMethod) throws AnnotationException {
String response = null;
// Provide custom retry handler is necessary
( getMethod).getParams().setParameter(HttpMethodParams.RETRY_HANDLER,
new DefaultHttpMethodRetryHandler(3, false));
try {
// Execute the method.
int statusCode = client.executeMethod((org.apache.commons.httpclient.HttpMethod) getMethod);
if (statusCode != HttpStatus.SC_OK) {
// LOG.error("Method failed: " + ((HttpMethodBase) method).getStatusLine());
}
// Read the response body.
byte[] responseBody = ((HttpMethodBase) getMethod).getResponseBody(); //TODO Going to buffer response body of large or unknown size. Using getResponseBodyAsStream instead is recommended.
// Deal with the response.
// Use caution: ensure correct character encoding and is not binary data
response = new String(responseBody);
} catch (HttpException e) {
// LOG.error("Fatal protocol violation: " + e.getMessage());
throw new AnnotationException("Protocol error executing HTTP request.",e);
} catch (IOException e) {
//((Object) LOG).error("Fatal transport error: " + e.getMessage());
//((Object) LOG).error(((HttpMethodBase) method).getQueryString());
throw new AnnotationException("Transport error executing HTTP request.",e);
} finally {
// Release the connection.
((HttpMethodBase) getMethod).releaseConnection();
}
return response;
}
protected static String readFileAsString(String filePath) throws java.io.IOException{
return readFileAsString(new File(filePath));
}
protected static String readFileAsString(File file) throws IOException {
byte[] buffer = new byte[(int) file.length()];
#SuppressWarnings("resource")
BufferedInputStream f = new BufferedInputStream(new FileInputStream(file));
f.read(buffer);
return new String(buffer);
}
static abstract class LineParser {
public abstract String parse(String s) throws ParseException;
static class ManualDatasetLineParser extends LineParser {
public String parse(String s) throws ParseException {
return s.trim();
}
}
static class OccTSVLineParser extends LineParser {
public String parse(String s) throws ParseException {
String result = s;
try {
result = s.trim().split("\t")[3];
} catch (ArrayIndexOutOfBoundsException e) {
throw new ParseException(e.getMessage(), 3);
}
return result;
}
}
}
public void saveExtractedEntitiesSet(String Question, LineParser parser, int restartFrom) throws Exception {
String text = Question;
int i=0;
//int correct =0 ; int error = 0;int sum = 0;
for (String snippet: text.split("\n")) {
String s = parser.parse(snippet);
if (s!= null && !s.equals("")) {
i++;
if (i<restartFrom) continue;
List<DBpediaResource> entities = new ArrayList<DBpediaResource>();
try {
entities = extract(new Text(snippet.replaceAll("\\s+"," ")));
System.out.println(entities.get(0).getFullUri());
} catch (AnnotationException e) {
// error++;
//LOG.error(e);
e.printStackTrace();
}
for (DBpediaResource e: entities) {
RES.add(e.uri());
}
}
}
}
public abstract List<DBpediaResource> extract(Text text) throws AnnotationException;
public void evaluate(String Question) throws Exception {
evaluateManual(Question,0);
}
public void evaluateManual(String Question, int restartFrom) throws Exception {
saveExtractedEntitiesSet(Question,new LineParser.ManualDatasetLineParser(), restartFrom);
}
}
The Main Class
package extractKeyword;
public class startAnnonation {
public static void main(String[] args) throws Exception {
String question = "What is the winning chances of BJP in New Delhi elections?";
db c = new db ();
c.configiration(0.25,0);
//, 0, "non", "AtLeastOneNounSelector", "Default", "yes");
c.evaluate(question);
System.out.println("resource : "+c.getResu());
}
}
The main problem is here when I am using DBPedia spotlight using spotlight jar (above code)then i am getting different result as compared to the dbpedia spotlight endpoint(dbpedia-spotlight.github.io/demo/)
Result using the above code:-
Text :-What is the winning chances of BJP in New Delhi elections?
Confidence level:-0.35
resource : [Election]
Result on DBPedia Spotlight endpoint(//dbpedia-spotlight.github.io/demo/)
Text:-What is the winning chances of BJP in New Delhi elections?
Confidence level:-0.35
resource : [Bharatiya_Janata_Party, New_Delhi, Election]
Why also the spotlight now don't have support as a parameter?
I am generating a csv file in my map function. So that each map task generates one csv file. Now this is a side effect and not the output of the mapper. The way I am naming those files is something like filename_inputkey. However when I run the application on a single node cluster, there is only one file generated. I have 10 lines in my input and as per my understanding goes, there will be 10 mapper tasks and 10 files would be generated. Let me know if I am thinking in a wrong way here.
Here is my GWASInputFormat class
import java.io.IOException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
public class GWASInputFormat extends FileInputFormat<LongWritable, GWASGenotypeBean>{
#Override
public RecordReader<LongWritable, GWASGenotypeBean> getRecordReader(org.apache.hadoop.mapred.InputSplit input, JobConf job, Reporter arg2) throws IOException {
return (RecordReader<LongWritable, GWASGenotypeBean>) new GWASRecordReader(job, (FileSplit)input);
}
}
Here is GWASRecordReader
import java.io.IOException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.LineRecordReader;
import org.apache.hadoop.mapred.RecordReader;
public class GWASRecordReader implements RecordReader<LongWritable, GWASGenotypeBean>{
private LineRecordReader lineReader;
private LongWritable lineKey;
private Text lineValue;
#Override
public void close() throws IOException {
if(lineReader != null) {
lineReader.close();
}
}
public GWASRecordReader(JobConf job, FileSplit split) throws IOException {
lineReader = new LineRecordReader(job, split);
lineKey = lineReader.createKey();
lineValue = lineReader.createValue();
}
#Override
public LongWritable createKey() {
return new LongWritable();
}
#Override
public GWASGenotypeBean createValue() {
return new GWASGenotypeBean();
}
#Override
public long getPos() throws IOException {
return lineReader.getPos();
}
#Override
public boolean next(LongWritable key, GWASGenotypeBean value) throws IOException {
if(!lineReader.next(lineKey, lineValue)){
return false;
}
String[] values = lineValue.toString().split(",");
if(values.length !=32) {
throw new IOException("Invalid Record ");
}
value.setPROJECT_NAME(values[0]);
value.setRESEARCH_CODE(values[1]);
value.setFACILITY_CODE(values[2]);
value.setPROJECT_CODE(values[3]);
value.setINVESTIGATOR(values[4]);
value.setPATIENT_NUMBER(values[5]);
value.setSAMPLE_COLLECTION_DATE(values[6]);
value.setGENE_NAME(values[7]);
value.setDbSNP_RefSNP_ID(values[8]);
value.setSNP_ID(values[9]);
value.setALT_SNP_ID(values[10]);
value.setSTRAND(values[11]);
value.setASSAY_PLATFORM(values[12]);
value.setSOFTWARE_NAME(values[13]);
value.setSOFTWARE_VERSION_NUMBER(values[14]);
value.setTEST_DATE(values[15]);
value.setPLATE_POSITION(values[16]);
value.setPLATE_ID(values[17]);
value.setOPERATOR(values[18]);
value.setGENOTYPE(values[19]);
value.setGENOTYPE_QS1_NAME(values[20]);
value.setGENOTYPE_QS2_NAME(values[21]);
value.setGENOTYPE_QS3_NAME(values[22]);
value.setGENOTYPE_QS4_NAME(values[23]);
value.setGENOTYPE_QS5_NAME(values[24]);
value.setGENOTYPE_QS1_RESULT(values[25]);
value.setGENOTYPE_QS2_RESULT(values[26]);
value.setGENOTYPE_QS3_RESULT(values[27]);
value.setGENOTYPE_QS4_RESULT(values[28]);
value.setGENOTYPE_QS5_RESULT(values[29]);
value.setSTAGE(values[30]);
value.setLAB(values[31]);
return true;
}
#Override
public float getProgress() throws IOException {
return lineReader.getProgress();
}
}
Mapper class
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import com.google.common.base.Strings;
public class GWASMapper extends MapReduceBase implements Mapper<LongWritable, GWASGenotypeBean, Text, Text> {
private static Configuration conf;
#SuppressWarnings("rawtypes")
public void setup(org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException {
conf = context.getConfiguration();
// Path[] otherFiles = DistributedCache.getLocalCacheFiles(context.getConfiguration());
}
#Override
public void map(LongWritable inputKey, GWASGenotypeBean inputValue, OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
checkForNulls(inputValue, inputKey.toString());
output.collect(new Text(inputValue.getPROJECT_CODE()), new Text(inputValue.getFACILITY_CODE()));
}
private void checkForNulls(GWASGenotypeBean user, String inputKey) {
String f1 = " does not have a value_fail";
String p1 = "Must not contain NULLS for required fields";
// have to initialize these two to some paths in hdfs
String edtChkRptDtl = "/user/hduser/output6/detail" + inputKey + ".csv";
String edtChkRptSmry = "/user/hduser/output6/summary" + inputKey + ".csv";
../
List<String> errSmry = new ArrayList<String>();
Map<String, String> loc = new TreeMap<String, String>();
if(Strings.isNullOrEmpty(user.getPROJECT_NAME())) {
loc.put("test", "PROJECT_NAME ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getRESEARCH_CODE())) {
loc.put("test", "RESEARCH_CODE ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getFACILITY_CODE())) {
loc.put("test", "FACILITY_CODE ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getPROJECT_CODE())) {
loc.put("test", "PROJECT_CODE ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getINVESTIGATOR())) {
loc.put("test", "INVESTIGATOR ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getPATIENT_NUMBER())) {
loc.put("test", "PATIENT_NUMBER ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getSAMPLE_COLLECTION_DATE())) {
loc.put("test", "SAMPLE_COLLECTION_DATE ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getGENE_NAME())) {
loc.put("test", "GENE_NAME ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getSTRAND())) {
loc.put("test", "STRAND ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getASSAY_PLATFORM())) {
loc.put("test", "ASSAY_PLATFORM ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getSOFTWARE_NAME())) {
loc.put("test", "SOFTWARE_NAME ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getTEST_DATE())) {
loc.put("test", "TEST_DATE ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getPLATE_POSITION())) {
loc.put("test", "PLATE_POSITION ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getPLATE_ID())) {
loc.put("test", "PLATE_ID ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getOPERATOR())) {
loc.put("test", "OPERATOR ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getGENOTYPE())) {
loc.put("test", "GENOTYPE ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getSTAGE())) {
loc.put("test", "STAGE ");
errSmry.add("_fail");
} else if(Strings.isNullOrEmpty(user.getLAB())) {
loc.put("test", "LAB ");
errSmry.add("_fail");
}
String customNullMsg = "Required Genotype column(s)";
List<String> error = new ArrayList<String>();
String message = null;
if(!loc.isEmpty()) {
for (Map.Entry<String, String> entry : loc.entrySet()) {
message = "line:" + entry.getKey() + " column:" + entry.getValue() + " " + f1;
error.add(message);
}
} else {
message = "_pass";
error.add(message);
}
int cnt = 0;
if(!errSmry.isEmpty()) {
// not able to understand this. Are we trying to get the occurances
// if the last key that contains _fail
for (String key : errSmry) {
if(key.contains("_fail")) {
cnt = Collections.frequency(errSmry, key);
// ******************** Nikhil added this
break;
}
}
if(cnt > 0) {
writeCsvFileSmry(edtChkRptSmry, customNullMsg, p1, "failed", Integer.toString(cnt));
} else {
writeCsvFileSmry(edtChkRptSmry, customNullMsg, p1, "passed", "0");
}
} else {
writeCsvFileSmry(edtChkRptSmry, customNullMsg, p1, "passed", "0");
}
// loop the list and write out items to the error report file
if(!error.isEmpty()) {
for (String s : error) {
//System.out.println(s);
if(s.contains("_fail")) {
String updatedFailmsg = s.replace("_fail", "");
writeCsvFileDtl(edtChkRptDtl, "genotype", updatedFailmsg, "failed");
}
if(s.contains("_pass")) {
writeCsvFileDtl(edtChkRptDtl, "genotype", p1, "passed");
}
}
} else {
writeCsvFileDtl(edtChkRptDtl, "genotype", p1, "passed");
}
// end loop
}
private void writeCsvFileDtl(String edtChkRptDtl, String col1, String col2, String col3) {
try {
if(conf == null) {
conf = new Configuration();
}
FileSystem fs = FileSystem.get(conf);
Path path = new Path(edtChkRptDtl);
if (!fs.exists(path)) {
FSDataOutputStream out = fs.create(path);
out.writeChars(col1);
out.writeChar(',');
out.writeChars(col2);
out.writeChar(',');
out.writeChars(col3);
out.writeChar('\n');
out.flush();
out.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
private void writeCsvFileSmry(String edtChkRptSmry, String col1, String col2, String col3, String col4) {
try {
if(conf == null) {
conf = new Configuration();
}
FileSystem fs = FileSystem.get(conf);
Path path = new Path(edtChkRptSmry);
if (!fs.exists(path)) {
FSDataOutputStream out = fs.create(path);
out.writeChars(col1);
out.writeChar(',');
out.writeChars(col2);
out.writeChar(',');
out.writeChars(col3);
out.writeChar(',');
out.writeChars(col4);
out.writeChar('\n');
out.flush();
out.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
Here is my driver class
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class GWASMapReduce extends Configured implements Tool{
/**
* #param args
*/
public static void main(String[] args) throws Exception {
Configuration configuration = new Configuration();
ToolRunner.run(configuration, new GWASMapReduce(), args);
}
#Override
public int run(String[] arg0) throws Exception {
JobConf conf = new JobConf(new Configuration());
conf.setInputFormat(GWASInputFormat.class);
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(Text.class);
conf.setJarByClass(GWASMapReduce.class);
conf.setMapperClass(GWASMapper.class);
conf.setNumReduceTasks(0);
FileInputFormat.addInputPath(conf, new Path(arg0[0]));
FileOutputFormat.setOutputPath(conf, new Path(arg0[1]));
JobClient.runJob(conf);
return 0;
}
}
There will probably be only one Mapper task, and ten invocations of it's map method. If you wish to write out one file per Mapper, you should do so in its configure method. If you wish to write out one file per input record, you should so in its map method.
Edit: The above turned out to be unrelated to the problem. The issue is that in GWASInputFormat, you do not set the key in the next method, so your map input key is always the same. Simply add key.set(lineKey.get()); to the next method, and it should work.