Test passed in IDE but failed in TeamCity - java

I have a class that i had to test:
package com.mycompany.openapi.cms.core.support.liquibase;
import liquibase.change.custom.CustomTaskChange;
import liquibase.database.Database;
import liquibase.database.jvm.JdbcConnection;
import liquibase.exception.CustomChangeException;
import liquibase.exception.SetupException;
import liquibase.exception.ValidationErrors;
import liquibase.resource.ResourceAccessor;
import org.slf4j.LoggerFactory;
import org.springframework.util.StringUtils;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.sql.Statement;
import java.util.Set;
public class ApplySqlFileIfExistsChange implements CustomTaskChange {
private final org.slf4j.Logger logger = LoggerFactory.getLogger(getClass());
private String file;
private ResourceAccessor resourceAccessor;
#Override
public void execute(Database database) throws CustomChangeException {
JdbcConnection databaseConnection = (JdbcConnection) database.getConnection();
try {
Set<InputStream> files = resourceAccessor.getResourcesAsStream(file);
if(files != null){
for (InputStream inputStream : files) {
BufferedReader in = new BufferedReader(
new InputStreamReader(inputStream));
String str;
String sql;
StringBuilder sqlBuilder = new StringBuilder("");
while ((str = in.readLine()) != null) {
sqlBuilder.append(str).append(" ");
}
in.close();
sql = sqlBuilder.toString().trim();
if(StringUtils.isEmpty(sql)){
return;
}
Statement statement = databaseConnection.createStatement();
statement.execute(sql);
statement.close();
}
}
} catch (FileNotFoundException e) {
logger.error(e.getMessage(), e);
} catch (Exception e) {
throw new CustomChangeException(e);
}
}
public String getFile() {
return file;
}
public void setFile(String file) {
this.file = file;
}
#Override
public void setFileOpener(ResourceAccessor resourceAccessor) {
this.resourceAccessor = resourceAccessor;
}
}
I had written the test:
package com.mycompany.openapi.cms.core.support.liquibase;
import com.google.common.collect.Sets;
import liquibase.database.Database;
import liquibase.database.jvm.JdbcConnection;
import liquibase.resource.ResourceAccessor;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.sql.Statement;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.verify;
import static org.powermock.api.mockito.PowerMockito.*;
#RunWith(PowerMockRunner.class)
#PrepareForTest({LoggerFactory.class})
public class TestApplySqlFileIfExistsChange {
#InjectMocks
ApplySqlFileIfExistsChange applySqlFileIfExistsChange;
#Mock
private ResourceAccessor resourceAccessor;
#Mock
private JdbcConnection jdbcConnection;
#Mock
private Database database;
#Mock
Statement statement;
#BeforeClass
public static void setUpClass() {
mockStatic(LoggerFactory.class);
when(LoggerFactory.getLogger(ApplySqlFileIfExistsChange.class)).thenReturn(mock(Logger.class));
}
#Before
public void setUp() throws Exception {
when(database.getConnection()).thenReturn(jdbcConnection);
InputStream inp1, inp2;
inp1 = new ByteArrayInputStream("FirstTestQuery".getBytes(StandardCharsets.UTF_8));
inp2 = new ByteArrayInputStream("SecondTestQuery".getBytes(StandardCharsets.UTF_8));
when(resourceAccessor.getResourcesAsStream(anyString())).thenReturn(Sets.newHashSet(inp1, inp2));
when(jdbcConnection.createStatement()).thenReturn(statement);
}
#Test
public void execute() throws Exception {
applySqlFileIfExistsChange.execute(database);
verify(statement).execute("FirstTestQuery");
verify(statement).execute("SecondTestQuery");
}
}
Problem is that test above passed in my IDE, but when i make push in repository TeamCity build failed on my test. I can't undestand why, because code are same in both places. Here is stack trace in TeamCity:
java.lang.IllegalStateException: Failed to transform class with name org.slf4j.LoggerFactory. Reason: java.io.IOException: invalid constant type: 15
at org.powermock.core.classloader.MockClassLoader.loadMockClass(MockClassLoader.java:267)
at org.powermock.core.classloader.MockClassLoader.loadModifiedClass(MockClassLoader.java:180)
at org.powermock.core.classloader.DeferSupportingClassLoader.loadClass(DeferSupportingClassLoader.java:70)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at sun.reflect.generics.factory.CoreReflectionFactory.makeNamedType(CoreReflectionFactory.java:114)
at sun.reflect.generics.visitor.Reifier.visitClassTypeSignature(Reifier.java:125)
at sun.reflect.generics.tree.ClassTypeSignature.accept(ClassTypeSignature.java:49)
at sun.reflect.annotation.AnnotationParser.parseSig(AnnotationParser.java:439)
at sun.reflect.annotation.AnnotationParser.parseClassValue(AnnotationParser.java:420)
at sun.reflect.annotation.AnnotationParser.parseClassArray(AnnotationParser.java:724)
at sun.reflect.annotation.AnnotationParser.parseArray(AnnotationParser.java:531)
at sun.reflect.annotation.AnnotationParser.parseMemberValue(AnnotationParser.java:355)
at sun.reflect.annotation.AnnotationParser.parseAnnotation2(AnnotationParser.java:286)
at sun.reflect.annotation.AnnotationParser.parseAnnotations2(AnnotationParser.java:120)
at sun.reflect.annotation.AnnotationParser.parseAnnotations(AnnotationParser.java:72)
at java.lang.Class.createAnnotationData(Class.java:3521)
at java.lang.Class.annotationData(Class.java:3510)
at java.lang.Class.getAnnotation(Class.java:3415)
at org.junit.internal.MethodSorter.getDeclaredMethods(MethodSorter.java:52)
at org.junit.internal.runners.TestClass.getAnnotatedMethods(TestClass.java:45)
at org.junit.internal.runners.MethodValidator.validateTestMethods(MethodValidator.java:71)
at org.junit.internal.runners.MethodValidator.validateStaticMethods(MethodValidator.j
I apologize for such a number of code in my question.

You probably have conflicting Powermock/Mockito versions.
Check your local maven repo for the versions, and get rid of the older ones
in your pom and/or update the other dependency which included it.
Check in the root of the project with:
mvn dependency:tree -Dverbose -Dincludes=NAME_OF_DEPENDENCY

You have a conflict of dependent libraries in the TeamCity repo vs your local repo.
Confirm the TC build is using the exact same version of Powermock that your local is using.
There may be an older version being transitively included.
mvn dependency:tree -Dverbose
This will list all the resolvable dependencies. See if there are different versions of Powermock. You can then stop them being possible pulled in by using the exclude/exclusions tag in the pom.xml.

Related

java.lang.NoSuchMethodErrror:'com.mongodb.connection.ConnectionPoolSettings$Builder

I'm trying to use flapdoodle embedded mongodb for writing the unit tests. But I was receiving the following error. "java.lang.NoSuchMethodErrror:'com.mongodb.connection.ConnectionPoolSettings$Builder
com.mongodb.connection.ConnectionPoolSettings$Builder.maxWaitQueueSize(int)"
Is there any dependency I need to add here ? Is the code which I have written in #Before block correct. At the end I need MongoClient and MongoDatabse to pass to other method and test.
**MongoDBConfig file:**
import java.io.IOException;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoClients;
#EnableMongoRepositories
public class MongoDBConfig {
#Value("${mongo.db.url}")
private String MONGO_DB_URL;
#Value(("${mongo.db.port:27017}"))
private int MONGO_DB_PORT;
#Value("${mongo.db.name}")
private String MONGO_DB_NAME;
#Bean
public MongoTemplate mongoTemplate() throws IOException {
MongoClient mongoClient = MongoClients.create("mongodb://localhost:27017");
MongoTemplate mongoTemplate = new MongoTemplate(mongoClient, "test");
return mongoTemplate;
}
}
**UnitTestFile:**
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.stream.Collectors;
import org.json.simple.parser.ParseException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit4.SpringRunner;
import com.mongodb.MongoClient;
import com.mongodb.client.MongoDatabase;
import com.sap.crm.ana.gatewayservice.AnaGatewayServiceApplication;
import de.flapdoodle.embed.mongo.MongodExecutable;
import de.flapdoodle.embed.mongo.MongodStarter;
import de.flapdoodle.embed.mongo.config.IMongodConfig;
import de.flapdoodle.embed.mongo.config.MongodConfigBuilder;
import de.flapdoodle.embed.mongo.config.Net;
import de.flapdoodle.embed.mongo.distribution.Version;
import de.flapdoodle.embed.process.runtime.Network;
#SpringBootTest
#RunWith(SpringRunner.class)
#ContextConfiguration(classes = { MongoDBConfig.class })
#TestPropertySource(locations = "classpath:application-test.properties")
public class CSNHandlerTestEmbeddedMongoDB {
private CSNHandler handler;
private MongoClient mongoClient;
private MongoDatabase database;
private String model;
private MongodExecutable mongodExecutable;
private MongoTemplate mongoTemplate;
#Before
public void setup() throws Exception {
MongodStarter starter = MongodStarter.getDefaultInstance();
String bindIp = "localhost";
int port = 27017;
IMongodConfig mongodConfig = new MongodConfigBuilder().version(Version.Main.PRODUCTION)
.net(new Net(bindIp, port, Network.localhostIsIPv6())).build();
this.mongoClient = new MongoClient(bindIp, port);
this.database = mongoClient.getDatabase("test");
mongodExecutable = null;
try {
mongodExecutable = starter.prepare(mongodConfig);
mongodExecutable.start();
} catch (Exception e) {
// log exception here
if (mongodExecutable != null)
mongodExecutable.stop();
}
}
#Test
public void testCSNHandler() throws IOException, ParseException {
handler = new CSNHandler(database);
handler.insertUpdateAnaModel("100", model);
}
#After
public void teardown() throws Exception {
if (mongodExecutable != null)
mongodExecutable.stop();
}
}
**Dependencies:**
<dependency>
<groupId>de.flapdoodle.embed</groupId>
<artifactId>de.flapdoodle.embed.mongo</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>3.12.9</version>
</dependency>

Spark Throwing a "NoClassDefFoundError" Despite jar Indicating Class is Present

I am receiving a NoClassDefFoundError despite 7Zip indicating the jar containing the class is present in the uberjar being submitted to run the program. I am submitting with the below line:
spark-submit --class org.dia.red.ctakes.spark.CtakesSparkMain target/spark-ctakes-0.1-job.jar
The error being thrown is:
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/uima/cas/FSIndex
at org.dia.red.ctakes.spark.CtakesSparkMain.main(CtakesSparkMain.java:50)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:743)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.ClassNotFoundException: org.apache.uima.cas.FSIndex
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 10 more
The CtakesSparkMain class below calls the CtakesFunction class:
package org.dia.red.ctakes.spark;
import java.util.List;
import java.io.PrintWriter;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.uima.jcas.cas.FSArray;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.storage.StorageLevel;
import org.json.JSONObject;
public class CtakesSparkMain {
/**
* #param args
*/
public static void main(String[] args) throws Exception {
SparkConf conf = new SparkConf().setAppName("ctakes");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<String> lines = sc.textFile("/mnt/d/metistream/ctakes-streaming/SparkStreamingCTK/testdata100.txt").map(new CtakesFunction());
String first = lines.take(2).get(0);
PrintWriter out = new PrintWriter("/mnt/d/metistream/ctakes-streaming/SparkStreamingCTK/test_outputs/output.txt");
out.println(first);
out.close();
sc.close();
}
}
CtakesFunction:
package org.dia.red.ctakes.spark;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import org.apache.ctakes.typesystem.type.refsem.OntologyConcept;
import org.apache.ctakes.typesystem.type.textsem.*;
import org.apache.uima.UIMAException;
import org.apache.uima.cas.FSIndex;
import org.apache.uima.cas.Type;
import org.apache.uima.UIMAException;
import org.apache.uima.jcas.JCas;
import org.apache.uima.analysis_engine.AnalysisEngineDescription;
import org.apache.uima.cas.impl.XmiCasSerializer;
import org.apache.uima.fit.factory.JCasFactory;
import org.apache.uima.fit.pipeline.SimplePipeline;
import org.apache.uima.jcas.cas.FSArray;
import org.apache.uima.util.XMLSerializer;
import org.apache.spark.api.java.function.Function;
import it.cnr.iac.CTAKESClinicalPipelineFactory;
import org.json.*;
/**
* #author Selina Chu, Michael Starch, and Giuseppe Totaro
*
*/
public class CtakesFunction implements Function<String, String> {
transient JCas jcas = null;
transient AnalysisEngineDescription aed = null;
private void setup() throws UIMAException {
System.setProperty("ctakes.umlsuser", "");
System.setProperty("ctakes.umlspw", "");
this.jcas = JCasFactory.createJCas();
this.aed = CTAKESClinicalPipelineFactory.getDefaultPipeline();
}
private void readObject(ObjectInputStream in) {
try {
in.defaultReadObject();
this.setup();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (UIMAException e) {
e.printStackTrace();
}
}
#Override
public String call(String paragraph) throws Exception {
this.jcas.setDocumentText(paragraph);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
SimplePipeline.runPipeline(this.jcas, this.aed);
FSIndex index = this.jcas.getAnnotationIndex(IdentifiedAnnotation.type);
Iterator iter = index.iterator();
JSONArray annotationsArray = new JSONArray();
JSONObject allAnnotations = new JSONObject();
ArrayList<String> types = new ArrayList<String>();
types.add("org.apache.ctakes.typesystem.type.textsem.SignSymptomMention");
types.add("org.apache.ctakes.typesystem.type.textsem.DiseaseDisorderMention");
types.add("org.apache.ctakes.typesystem.type.textsem.AnatomicalSiteMention");
types.add("org.apache.ctakes.typesystem.type.textsem.ProcedureMention");
types.add("import org.apache.ctakes.typesystem.type.textsem.MedicationMention");
String type;
String[] splitType;
FSArray snomedArray;
ArrayList<String> snomedStringArray = new ArrayList<String>();
while (iter.hasNext()){
IdentifiedAnnotation annotation = (IdentifiedAnnotation)iter.next();
type = annotation.getType().toString();
if (types.contains(type)){
JSONObject annotations = new JSONObject();
splitType = type.split("[.]");
annotations.put("id", annotation.getId());
annotations.put("subject", annotation.getSubject());
annotations.put("type", splitType[splitType.length - 1]);
annotations.put("text", annotation.getCoveredText());
annotations.put("polarity", annotation.getPolarity());
annotations.put("confidence", annotation.getConfidence());
snomedArray = annotation.getOntologyConceptArr();
for (int i = 0; i < snomedArray.size(); i++){
snomedStringArray.add(((OntologyConcept)snomedArray.get(i)).getCode());
}
annotations.put("snomed_codes", snomedStringArray);
snomedStringArray.clear();
annotationsArray.put(annotations);
}
}
allAnnotations.put("Annotations", annotationsArray);
this.jcas.reset();
return allAnnotations.toString();
}
}
I was attempting to modify the repository # https://github.com/selinachu/SparkStreamingCTK to leverage regular Spark as opposed to SparkStreaming (and Spark 2.0), but haven't been able to resolve this.
This is because this is not entirely an uber-jar generated by maven for this project. Spark-submit can not load a class from a jar within a jar. One need a special class loader for this. The right approach would be to explode all jars to put all contained classes in the uber-jar similar how maven shade plugin does it https://maven.apache.org/plugins/maven-shade-plugin/
So you have to change pom.xml file to generate correct uber-jar for this project.
Inspired by YuGagarin's feedback, I used SBT assembly to assemble an UberJar of cTAKES itself. Compiling everything into one "true" fat jar resolved the above issue.
However, I should point out there are still some residual issues with cTAKES and Spark that I am currently working through.

Mocking file operations in a unit test using junit and PowerMockito doesn't work

I'm trying to write unit test for a static method which contains file operations. I'm using junit and PowerMockito. The method to be test is converting a csv file to bean list. Since this is unit test I mock a method call which is inside of our method. But the following error is occuring,
org.mockito.exceptions.misusing.MissingMethodInvocationException:
when() requires an argument which has to be 'a method call on a mock'.
For example:
when(mock.getArticles()).thenReturn(articles);
Also, this error might show up because:
1. you stub either of: final/private/equals()/hashCode() methods.
Those methods cannot be stubbed/verified.
Mocking methods declared on non-public parent classes is not supported.
2. inside when() you don't call method on mock but on some other object.
at com.mypackage..unittest.UTCSVBeanUtil.convert_convertingCsvToBean(UTCSVBeanUtil.java:54)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.lang.reflect.Method.invoke(Unknown Source)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
My class is,
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Path;
import java.util.List;
import au.com.bytecode.opencsv.CSVReader;
import au.com.bytecode.opencsv.CSVWriter;
import au.com.bytecode.opencsv.bean.ColumnPositionMappingStrategy;
import au.com.bytecode.opencsv.bean.CsvToBean;
public class CSVBeanUtil {
public static <T> List<T> fileToBean(final String filename, final char delimiter, final Class<T> beanClass,
final String[] columns) throws FileNotFoundException {
BufferedReader reader = new BufferedReader(new FileReader(filename));
try {
return bufferReaderToBean(reader, delimiter, beanClass, columns);
} finally {
if (reader != null) {
try {
reader.close();
} catch (final IOException e) {
// ignore
}
}
}
}
public static <T> List<T> stringToBean() {
return null;
}
public static <T> List<T> bufferReaderToBean(BufferedReader reader, final char delimiter, final Class<T> beanClass,
final String[] columns) {
CSVReader csvreader = null;
final CsvToBean<T> csv = new CsvToBean<T>();
csvreader = new CSVReader(reader, delimiter);
ColumnPositionMappingStrategy<T> strategy = new ColumnPositionMappingStrategy<T>();
strategy.setType(beanClass);
strategy.setColumnMapping(columns);
return csv.parse(strategy, csvreader);
}
public static boolean writeToCsv(List<String[]> beanList, Path absPath) throws IOException {
CSVWriter writer = new CSVWriter(new FileWriter(absPath.toAbsolutePath().toString()),
CSVWriter.DEFAULT_SEPARATOR, CSVWriter.NO_QUOTE_CHARACTER);
writer.writeAll(beanList);
writer.close();
return false;
}
}
And my test class is,
import static org.junit.Assert.assertEquals;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import com.mypackage..config.AppConfig;
import com.mypackage..entity.MyFile;
import com.mypackage..service.MyFileValidation;
import com.mypackage..utility.CSVBeanUtil;
import org.apache.commons.io.FileUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
#WebAppConfiguration
#ContextConfiguration(classes = { AppConfig.class })
#PrepareForTest(CSVBeanUtil.class)
#RunWith(SpringJUnit4ClassRunner.class)
public class UTCSVBeanUtil {
List<MyFile> dataList = new ArrayList<MyFile>();
List<MyFile> expectedList=new ArrayList<MyFile>();
#Before
public void beforeClass() {
expectedList = getdataList();
}
#Test
public void convert_convertingCsvToBean()
throws IOException{
PowerMockito.mockStatic(CSVBeanUtil.class);
// BufferedReader bufferedReader = Mockito.mock(BufferedReader.class); // while using this the test execution doesn't terminate
BufferedReader bufferedReader= new BufferedReader(new StringReader("201030"));
// File file = Mockito.mock(File.class);
Mockito.when(CSVBeanUtil.bufferReaderToBean(bufferedReader, ',', MyFile.class, MyFile.columns))
.thenReturn(expectedList);
dataList.addAll(CSVBeanUtil.fileToBean( null, ',', MyFile.class, MyFile.columns));
assertEquals(expectedList,dataList);
}
private List<MyFile> getdataList() {
List<MyFile> expectedList=new ArrayList<MyFile>();
MyFile gv=new MyFile();
gv.setTRADENUM("201030");
expectedList.add(gv);
return expectedList;
}
}
I'm trying to solve this problem. Please help me... Thank You
PowerMock provides it's own runner, specified with the #RunWith annotation
#RunWith( PowerMockRunner.class)
If you want to use SpringJUnit4ClassRunner.class
#RunWith(PowerMockRunner.class)
#PowerMockRunnerDelegate(SpringJUnit4ClassRunner.class)
Using another JUnit Runner with PowerMock

UIMA Pipeline with Stanford NER

I have integrated Stanford NER in UIMA and developed a pipeline.
The pipeline contains a FileSystemCollectionReader,an NERAnnotator and a CasConsumer but the output so come isn't desired. In my input directory i have two files and after running the pipeline, i get two files as ouput but the second file is getting merged with the first file in second ouput. I don't know what's happening here.
The code for CasConsumer:
`
package org.gds.uima;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.uima.UimaContext;
import org.apache.uima.analysis_component.AnalysisComponent_ImplBase;
import org.apache.uima.analysis_component.CasAnnotator_ImplBase;
import org.apache.uima.analysis_component.JCasAnnotator_ImplBase;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.cas.CAS;
import org.apache.uima.cas.CASException;
import org.apache.uima.cas.Type;
import org.apache.uima.cas.text.AnnotationFS;
import org.apache.uima.fit.component.CasConsumer_ImplBase;
import org.apache.uima.fit.component.JCasConsumer_ImplBase;
import org.apache.uima.fit.descriptor.ConfigurationParameter;
import org.apache.uima.fit.util.CasUtil;
import org.apache.uima.jcas.JCas;
import org.apache.uima.resource.ResourceInitializationException;
public class CasConsumer extends JCasConsumer_ImplBase
{
public final static String PARAM_OUTPUT="outputDir";
#ConfigurationParameter(name = PARAM_OUTPUT)
private String outputDirectory;
public final static String PARAM_ANNOTATION_TYPES = "annotationTypes";
enter code here
#ConfigurationParameter(name = PARAM_ANNOTATION_TYPES,defaultValue="String")
public List<String> annotationTypes;
public void initialize(final UimaContext context) throws ResourceInitializationException
{
super.initialize(context);
}
#Override
public void process(JCas jcas)
{
String original = jcas.getDocumentText();
try
{
String onlyText="";
JCas sofaText = jcas.getView(NERAnnotator.SOFA_NAME);
onlyText = sofaText.getDocumentText();
String name = UUID.randomUUID().toString().substring(20);
File outputDir = new File(this.outputDirectory+"/"+name);
System.out.print("Saving file to "+outputDir.getAbsolutePath());
FileOutputStream fos = new FileOutputStream(outputDir.getAbsoluteFile());
PrintWriter pw = new PrintWriter(fos);
pw.println(onlyText);
pw.close();
}
catch(CASException cae)
{
System.out.println(cae);
}
catch(FileNotFoundException fne)
{
System.out.print(fne);
}
}
}
`
}

java unit test mock HttpClient and webdav

Hello I have a class for doing webdav related operations such as creating a directory, Implementatiion can be seen below (the createDir method). The question is how to test it nicely, perhaps using EasyMock or a similar lib. Any ideas? thanks!
package foobar;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import org.apache.commons.httpclient.*;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.methods.DeleteMethod;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.InputStreamRequestEntity;
import org.apache.commons.httpclient.methods.PutMethod;
import org.apache.commons.httpclient.methods.RequestEntity;
import org.apache.jackrabbit.webdav.client.methods.DavMethod;
import org.apache.jackrabbit.webdav.client.methods.MkColMethod;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import mypackage.httpdclient.util.URLHandler;
public class WebDavImpl{
private static final String SEPARATOR = " ----- ";
private HttpClient httpClient;
public StorageSpaceClientImpl() {
httpClient = new HttpClient();
}
public String createDir(String dirName) {
String response = null;
String url = URLHandler.getInstance().getDirectoryUrl(dirName);
DavMethod mkcol = new MkColMethod(url);
try {
httpClient.executeMethod(mkcol);
response = mkcol.getStatusCode() + SEPARATOR + mkcol.getStatusText();
} catch (IOException ex) {
} finally {
mkcol.releaseConnection();
}
return response;
}
}

Categories