ImageMagick: Im4Java Running from eclipse from MacBook is showing the exception - java

I'm trying to run im4java to compare the images but I'm getting the below exception message.
I have followed the steps to add ImageMagick as in this link: https://www.imagemagick.org/script/download.php#macosx
I was getting the same error so, I added the environmental variable in the run configurations of my eclipse as:
DYLD_LIBRARY_PATH=/Users/siva/Downloads/ImageMagick-7.0.5/lib and started running but still i'm getting the same error message:
I also tried:
System.setProperty("java.library.path", "/Users/siva/Downloads/ImageMagick-7.0.5/bin/magick");
If anyone has tried using imageMagick on Mac please suggest me what are the steps I have to do.
I was able to run the below command from the terminal successfully:
compare imageone.png imagetwo.png diffimage.png
import java.io.IOException;
import java.math.BigDecimal;
import java.util.ArrayList;
import org.apache.commons.lang.StringUtils;
import org.im4java.core.IM4JavaException;
import org.im4java.core.IMOperation;
import org.im4java.core.ImageMagickCmd;
import org.im4java.core.Info;
import org.im4java.process.ArrayListOutputConsumer;
public class UseImageM {
private static final String COMPARE_COMMAND = "compare";
private static final String ABSOLUTE_ERROR = "AE";
private static String imageOnePath = "/Users/siva/Documents/imageEclipse/UsingImagemagick/screenshot.png";
private static String imageTwoPath = "/Users/siva/Documents/imageEclipse/UsingImagemagick/screenshotfinal.png";
private static String outputImageName = "outputoftheimageCompare";
private static final int COUNT_OF_DECIMAL = 10;
public static void main(String[] args) {
IMOperation imOperation = new IMOperation();
imOperation.metric(ABSOLUTE_ERROR);
imOperation.addImage(imageOnePath);
imOperation.addImage(imageTwoPath);
imOperation.addImage(outputImageName);
ImageMagickCmd compare = new ImageMagickCmd(COMPARE_COMMAND);
ArrayListOutputConsumer outputConsumer = new ArrayListOutputConsumer();
compare.setOutputConsumer(outputConsumer);
BigDecimal diffPercentage = BigDecimal.ZERO;
Info imageInfo;
long imageSize = 0;
try {
imageInfo = new Info(imageTwoPath, true);
imageSize = imageInfo.getImageWidth() * imageInfo.getImageHeight();
compare.run(imOperation);
final ArrayList<String> errorText = compare.getErrorText();
if (errorText.size() == 1) {
final BigDecimal diffCount = new BigDecimal(errorText.get(0));
if (!BigDecimal.ZERO.equals(diffCount)) {
diffPercentage = diffCount.divide(
new BigDecimal(imageSize), COUNT_OF_DECIMAL,
BigDecimal.ROUND_DOWN);
}
}
} catch (final IOException | InterruptedException e) {
System.out.println((String.format(
"Exception happened in comparison between %s and %s.",
imageOnePath, imageTwoPath) + e));
} catch (final IM4JavaException e) {
e.printStackTrace();
// error message
if (!StringUtils.isNumeric(compare.getErrorText().get(0))) {
final String errorMsg = String.format(
"Exception happened in comparison between %s and %s.",
imageOnePath, imageTwoPath);
System.out.println(errorMsg + e);
}
final BigDecimal diffCount = new BigDecimal(compare.getErrorText()
.get(0));
diffPercentage = diffCount.divide(new BigDecimal(imageSize),
COUNT_OF_DECIMAL, BigDecimal.ROUND_DOWN);
}
System.out.println(diffPercentage);
}
}
Console Logs:
org.im4java.core.InfoException: org.im4java.core.CommandException: java.io.IOException: Cannot run program "identify": error=2, No such file or directory
at org.im4java.core.Info.getBaseInfo(Info.java:360)
at org.im4java.core.Info.<init>(Info.java:151)
at UseImageM.main(UseImageM.java:36)
Caused by: org.im4java.core.CommandException: java.io.IOException: Cannot run program "identify": error=2, No such file or directory
at org.im4java.core.ImageCommand.run(ImageCommand.java:219)
at org.im4java.core.Info.getBaseInfo(Info.java:342)
... 2 more
Caused by: java.io.IOException: Cannot run program "identify": error=2, No such file or directory
at java.lang.ProcessBuilder.start(ProcessBuilder.java:1048)
at org.im4java.process.ProcessStarter.startProcess(ProcessStarter.java:407)
at org.im4java.process.ProcessStarter.run(ProcessStarter.java:312)
at org.im4java.core.ImageCommand.run(ImageCommand.java:215)
... 3 more
Caused by: java.io.IOException: error=2, No such file or directory
at java.lang.UNIXProcess.forkAndExec(Native Method)
at java.lang.UNIXProcess.<init>(UNIXProcess.java:247)
at java.lang.ProcessImpl.start(ProcessImpl.java:134)
at java.lang.ProcessBuilder.start(ProcessBuilder.java:1029)
... 6 more
Exception in thread "main" java.lang.NullPointerException
at UseImageM.main(UseImageM.java:60)

Related

How to connect to Argo workflows using java client

I am trying to implement a simple action from the argo workflows example (https://github.com/argoproj/argo-workflows/blob/master/sdks/java/client/docs/WorkflowServiceApi.md), I can do it with the API, but I need to do it from a Java application. The Java SDK doesn't even seem to connect to the Argo API. But the UI is working.
public static void main(String[] args) {
ApiClient defaultClient = Configuration.getDefaultApiClient();
String url = "http://xxx.xxx.xxx.xxx:2746";
defaultClient.setBasePath(url);
WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient);
String namespace = "argo"; // String |
String name = "hello-argo"; // String |
String getOptionsResourceVersion = "argoproj.io/v1alpha1"; // String | resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset +optional
String fields = ""; // String | Fields to be included or excluded in the response. e.g. \"spec,status.phase\", \"-status.nodes\".
try {
IoArgoprojWorkflowV1alpha1Workflow result = apiInstance.workflowServiceGetWorkflow(namespace, name, getOptionsResourceVersion, fields);
System.out.println(result);
} catch (ApiException e) {
System.err.println("Exception when calling WorkflowServiceApi#workflowServiceGetWorkflow");
System.err.println("Status code: " + e.getCode());
System.err.println("Reason: " + e.getResponseBody());
System.err.println("Response headers: " + e.getResponseHeaders());
e.printStackTrace();
}
}
Here is the error:
Exception when calling WorkflowServiceApi#workflowServiceGetWorkflow
Status code: 0
Reason: null
Response headers: null
io.argoproj.workflow.ApiException: java.io.IOException: unexpected end of stream on http://xxx.xxx.xxx.xxx:2746/...
at io.argoproj.workflow.ApiClient.execute(ApiClient.java:930)
at io.argoproj.workflow.apis.WorkflowServiceApi.workflowServiceGetWorkflowWithHttpInfo(WorkflowServiceApi.java:486)
at io.argoproj.workflow.apis.WorkflowServiceApi.workflowServiceGetWorkflow(WorkflowServiceApi.java:463)
at Argo.main(Argo.java:20)
Caused by: java.io.IOException: unexpected end of stream on http://xxx.xxx.xxx.xxx:2746/...
at okhttp3.internal.http1.Http1ExchangeCodec.readResponseHeaders(Http1ExchangeCodec.kt:202)
at okhttp3.internal.connection.Exchange.readResponseHeaders(Exchange.kt:106)
at okhttp3.internal.http.CallServerInterceptor.intercept(CallServerInterceptor.kt:79)
at okhttp3.internal.http.RealInterceptorChain.proceed(RealInterceptorChain.kt:109)
at io.argoproj.workflow.ApiClient$2.intercept(ApiClient.java:1267)
at okhttp3.internal.http.RealInterceptorChain.proceed(RealInterceptorChain.kt:109)
at okhttp3.internal.connection.ConnectInterceptor.intercept(ConnectInterceptor.kt:34)
at okhttp3.internal.http.RealInterceptorChain.proceed(RealInterceptorChain.kt:109)
at okhttp3.internal.cache.CacheInterceptor.intercept(CacheInterceptor.kt:95)
at okhttp3.internal.http.RealInterceptorChain.proceed(RealInterceptorChain.kt:109)
at okhttp3.internal.http.BridgeInterceptor.intercept(BridgeInterceptor.kt:83)
at okhttp3.internal.http.RealInterceptorChain.proceed(RealInterceptorChain.kt:109)
at okhttp3.internal.http.RetryAndFollowUpInterceptor.intercept(RetryAndFollowUpInterceptor.kt:76)
at okhttp3.internal.http.RealInterceptorChain.proceed(RealInterceptorChain.kt:109)
at okhttp3.internal.connection.RealCall.getResponseWithInterceptorChain$okhttp(RealCall.kt:201)
at okhttp3.internal.connection.RealCall.execute(RealCall.kt:154)
at io.argoproj.workflow.ApiClient.execute(ApiClient.java:926)
... 3 more
Caused by: java.io.EOFException: \n not found: limit=0 content=…
at okio.RealBufferedSource.readUtf8LineStrict(RealBufferedSource.kt:332)
at okhttp3.internal.http1.HeadersReader.readLine(HeadersReader.kt:29)`enter code here`
at okhttp3.internal.http1.Http1ExchangeCodec.readResponseHeaders(Http1ExchangeCodec.kt:178)
... 19 more
import io.argoproj.workflow.ApiClient;
import io.argoproj.workflow.ApiException;
import io.argoproj.workflow.Configuration;
import io.argoproj.workflow.models.*;
import io.argoproj.workflow.apis.WorkflowServiceApi;
import io.kubernetes.client.openapi.models.V1Container;
import io.kubernetes.client.openapi.models.V1ObjectMeta;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
public class Argo {
public static void main(String[] args) {
ApiClient defaultClient = Configuration.getDefaultApiClient();
defaultClient.setVerifyingSsl(false);
String url = "https://xxx.xxx.xxx.xxx:2746";
defaultClient.setBasePath(url);
WorkflowServiceApi apiInstance = new WorkflowServiceApi(defaultClient);
String namespace = "argo"; // String |
WorkflowCreateRequest body = new WorkflowCreateRequest();// WorkflowCreateRequest |
CreateOptions options = new CreateOptions();
Workflow workflow = new Workflow();
workflow.setApiVersion("argoproj.io/v1alpha1");
V1ObjectMeta meta = new V1ObjectMeta();
meta.setNamespace("argo");
meta.setName("hello-java-client");
WorkflowSpec spec = new WorkflowSpec();
spec.setEntrypoint("argosay");
Template template = new Template();
template.setName("argosay");
V1Container v1Container = new V1Container();
v1Container.setName("pod");
v1Container.setImage("docker/whalesay:latest");
v1Container.setCommand(List.of("cowsay"));
v1Container.setArgs(List.of("test java client"));
template.setContainer(v1Container);
spec.setTemplates(List.of(template));
workflow.setMetadata(meta);
workflow.setSpec(spec);
body.setNamespace("argo");
body.setServerDryRun(false);
body.setWorkflow(workflow);
try {
Workflow result = apiInstance.workflowServiceCreateWorkflow(namespace, body);
System.out.println(result);
} catch (ApiException e) {
System.err.println("Exception when calling WorkflowServiceApi#workflowServiceCreateWorkflow");
System.err.println("Status code: " + e.getCode());
System.err.println("Reason: " + e.getResponseBody());
System.err.println("Response headers: " + e.getResponseHeaders());
e.printStackTrace();
}
Try some meaningful value of fields
String fields = "items.spec,items.status.phase";

java Gherkin parser stream does not release file locks

I am using Gherkin parser to parse feature files and returning the list of Gherkin documents see the function below:
import io.cucumber.gherkin.Gherkin;
import io.cucumber.messages.IdGenerator;
import io.cucumber.messages.Messages;
import io.cucumber.messages.Messages.Envelope;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class GherkinUtils {
private static final Logger LOG = LogManager.getLogger(GherkinUtils.class);
public static ArrayList<Messages.GherkinDocument> getGherkinDocumentsFromFiles() {
IdGenerator idGenerator = new IdGenerator.Incrementing();
ArrayList<Messages.GherkinDocument> listOfGherkinDocuments = new ArrayList<>();
String pathFolderFrameworkFeatures = SettingsUtils.getPathFolderFrameworkFeatures();
List<String> listOfPathsForFeatureFiles = FileUtils.getAllFilePathsFromFolder(pathFolderFrameworkFeatures);
try (Stream<Envelope> dataStream = Gherkin.fromPaths(listOfPathsForFeatureFiles, false, true, false, idGenerator)){
List<Envelope> envelopes = dataStream.collect(Collectors.toList());
for (Envelope env : envelopes) {
Messages.GherkinDocument gherkinDocument = env.getGherkinDocument();
listOfGherkinDocuments.add(gherkinDocument);
}
} catch (Exception e) {
LOG.error("Error occurred while trying to read the feature files", new Exception(e));
}
FileUtils.renameAllFeatureFiles("b");
return listOfGherkinDocuments;
}
}
Just before the return statement, you can see the function that will update the name for all feature files just to check if they are not locked.
The problem is that only the first file is always renamed and the rest of them are always locked.
If I will place the rename function at the top, then all the files are successfully renamed...
My understanding is that the try statement will automatically close the stream. Also, I tried to close it manually inside the try block but the results are the same.
What am I missing? How can I make it to release the file locks?
Update 1:
This exact line is making the files (except the first one to be locked):
List<Envelope> envelopes = dataStream.collect(Collectors.toList());
Here is the file name update function definition in case you want to test it:
public static void renameAllFeatureFiles(String fileName) {
String pathFeaturesFolder = SettingsUtils.getPathFolderFrameworkFeatures();
List<String> pathList = FileUtils.getAllFilePathsFromFolder(pathFeaturesFolder);
int counter = 0;
for (String path : pathList) {
counter ++;
File file = new File(path);
File newFile = new File(pathFeaturesFolder + "\\" + fileName +counter+".feature");
System.out.println("File: " + path + " locked: " + !file.renameTo(newFile));
}
}
And here is a sample feature file content:
Feature: Test
Scenario: test 1
Given User will do something
And User will do something
Update 2:
Tried with separate thread using javafx Task, still the same issue :(
Except for one file (this is really strange) all files are locked...
public static void runInNewThread() {
// define the execution task that will run in a new thread
Task<Void> newTask = new Task<>() {
#Override
protected Void call() {
ArrayList<Messages.GherkinDocument> listOfGherkinDocuments = GherkinUtils.getGherkinDocumentsFromFiles();
return null;
}
};
// run the task in a new thread
Thread th = new Thread(newTask);
th.setDaemon(true);
th.start();
}
For now, I have used workaround with creating copies of the specific files and using parser on the copies to prevent locking of the original versions...

Error "Exception in thread "main" java.lang.ExceptionInInitializerError" while working with log4j

Error is :
Exception in thread "main" java.lang.ExceptionInInitializerError
at com.agile.pc.cmserver.base.CMLogger.setLogClass(CMLogger.java:39)
at com.agile.util.log.CMLogFactory.getLogger(CMLogFactory.java:77)
at com.agile.util.exception.AppException.<clinit>(AppException.java:28)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Unknown Source)
at com.sun.proxy.$Proxy23.<clinit>(Unknown Source)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown Source)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown Source)
at java.lang.reflect.Constructor.newInstance(Unknown Source)
at java.lang.reflect.Proxy.newProxyInstance(Unknown Source)
at com.agile.api.pc.EJBRemoteProxy.createRemoteProxy(EJBRemoteProxy.java:60)
at com.agile.api.pc.EJBLookup.getRemoteInterface(EJBLookup.java:1012)
at com.agile.api.pc.EJBLookup.getRemoteInterface(EJBLookup.java:959)
at com.agile.api.pc.EJBLookup.getChangeSession(EJBLookup.java:309)
at com.agile.api.pc.change.Change.getBean(Change.java:106)
at com.agile.api.pc.RouteObject$GetStatusAction.doSdkAction(RouteObject.java:2926)
at com.agile.api.common.SDKAction.run(SDKAction.java:23)
at weblogic.security.acl.internal.AuthenticatedSubject.doAs(AuthenticatedSubject.java:368)
at weblogic.security.service.SecurityManager.runAs(SecurityManager.java:163)
at weblogic.security.Security.runAs(Security.java:61)
at com.agile.api.common.WebLogicAuthenticator.doAs(WebLogicAuthenticator.java:111)
at com.agile.api.common.Security.doAs(Security.java:54)
at com.agile.api.common.Security.doAs(Security.java:109)
at com.agile.api.pc.RouteObject.getStatus(RouteObject.java:1206)
at com.gehc.extensions.px.CreateChildSCN.doAction(CreateChildSCN.java:39)
at com.gehc.extensions.px.CreateChildSCN.main(CreateChildSCN.java:124)
Caused by: java.lang.NullPointerException
at org.apache.logging.log4j.util.ProviderUtil.validVersion(ProviderUtil.java:142)
at org.apache.logging.log4j.util.ProviderUtil.loadProvider(ProviderUtil.java:80)
at org.apache.logging.log4j.util.ProviderUtil.<init>(ProviderUtil.java:66)
at org.apache.logging.log4j.util.ProviderUtil.lazyInit(ProviderUtil.java:124)
at org.apache.logging.log4j.util.ProviderUtil.hasProviders(ProviderUtil.java:108)
at org.apache.logging.log4j.LogManager.<clinit>(LogManager.java:89)
... 27 more
Javafile:
import java.util.ResourceBundle;
import org.apache.log4j.Logger;
import com.agile.api.APIException;
import com.agile.api.IAgileSession;
import com.agile.api.IChange;
import com.agile.api.IDataObject;
import com.agile.api.INode;
import com.agile.px.ActionResult;
import com.agile.px.ICustomAction;
import com.gehc.common.core.CreateChildSCN_BO;
import com.gehc.common.pxconstants.GEHC_SCNConstants;
import com.gehc.common.pxutil.SDKUtil;
import com.gehc.common.pxutil.Util;
public class CreateChildSCN implements ICustomAction {
private static ResourceBundle objResourceBundle = ResourceBundle.getBundle("GEHCCreateChildSCN");
private static Logger objLogger = Logger.getLogger(CreateChildSCN.class);
public ActionResult doAction(IAgileSession aSession, INode actionNode,
IDataObject currentObject){
Util.initAppLogger(CreateChildSCN.class, Util.getLogFileName());
StringBuffer pxMessage = new StringBuffer();
try
{
IChange objChange = (IChange)currentObject;
String strChangeStatus = objChange.getStatus().toString();
IAgileSession objAgileSession = null;
String strUser = aSession.getCurrentUser().toString();
objLogger.info("Session with Logged on User..::"+aSession.getCurrentUser().toString());
objAgileSession = SDKUtil.getAgileSession(objResourceBundle
.getString("AGILE_USER"), objResourceBundle
.getString("AGILE_PASSWORD"), objResourceBundle
.getString("AGILE_URL"));
if(!(strUser.equals(objResourceBundle.getString("CURRENT_USER")))){
if(strChangeStatus.equals(objResourceBundle.getString("ECO_WORKFLOW_STATUS"))
|| strChangeStatus.equals(objResourceBundle.getString(("ECR_WORKFLOW_STATUS")))){
String suppliers = objChange.getValue(GEHC_SCNConstants.SCN_SUPPLIER_NAME).toString();
String supplierGroup = objChange.getValue(GEHC_SCNConstants.SCN_SUPPLIER_USER_GROUPS).toString();
//Checking for the suppliers presence
if("".equals(suppliers) && "".equals(supplierGroup)){
CreateChildSCN_BO objChildSCNBO = new CreateChildSCN_BO();
pxMessage.append(objChildSCNBO.createSCNs(objAgileSession, objChange));
}else{
pxMessage.append(objResourceBundle.getString("MESSAGE_ERROR_SUPPLIER_NAME_VALUE"));
objLogger.info(objResourceBundle.getString("MESSAGE_ERROR_SUPPLIER_NAME_VALUE"));
System.out.println(objResourceBundle.getString("MESSAGE_ERROR_SUPPLIER_NAME_VALUE"));
}
}else{
//System.out.println(objResourceBundle.getString("MESSAGE_ERROR_WORKFLOW_CRITERIA"));
pxMessage.append(objResourceBundle.getString("MESSAGE_ERROR_WORKFLOW_CRITERIA"));
} objLogger.info(objResourceBundle.getString("MESSAGE_ERROR_WORKFLOW_CRITERIA"));
}else{
//System.out.println(objResourceBundle.getString("MESSAGE_ERROR_INVALID_USER_LOGIN"));
pxMessage.append(objResourceBundle.getString("MESSAGE_ERROR_INVALID_USER_LOGIN"));
objLogger.info(objResourceBundle.getString("MESSAGE_ERROR_INVALID_USER_LOGIN"));
}
}catch (Exception apiEx){
apiEx.printStackTrace();
System.out.println(objResourceBundle.getString("MESSAGE_ERROR_UNABLE_TO_DO ") + apiEx);
pxMessage.append(objResourceBundle.getString("MESSAGE_ERROR_UNABLE_TO_DO"));
objLogger.error(objResourceBundle.getString("MESSAGE_ERROR_UNABLE_TO_DO"));
}
return new ActionResult(ActionResult.STRING, pxMessage.toString());
}
/**
* For Stand alone Only
* Invokes the doAction method
*/
public static void main(String[] args) {
CreateChildSCN objSCRValidation = null;
String strNumber = null;
IAgileSession objAgileSession = null;
IChange objChange = null;
ResourceBundle objResources = ResourceBundle.getBundle("GEHCCreateChildSCN");
Logger objLogger = Logger.getLogger(CreateChildSCN.class);
try {
objSCRValidation = new CreateChildSCN();
Util.initAppLogger(CreateChildSCN.class, Util.getLogFileName());
strNumber = "SCN-0043018";
// Establish session
/*objAgileSession = SDKUtil.getAgileSession(objResources
.getString("AGILE_USER"), objResources
.getString("AGILE_PASSWORD"), objResources
.getString("AGILE_URL"));*/
objAgileSession = SDKUtil.getAgileSession("xxx","xxxx","xxxxxxxxx");
System.out.println("created session");
// Load the objChange
//System.out.println(" Object:: " + objChange.getName());
objChange = (IChange) objAgileSession
.getObject(IChange.OBJECT_TYPE, strNumber);
objSCRValidation
.doAction(objAgileSession, null, objChange);
} catch (APIException e) {
System.out.println("Error log from main thread ::: " + e);
objLogger.error(objResources.getString("SESSION_FAILED") + Util.exception2String(e));
}
}
}
And also it contains properties files where we defined logger details like path and file name of log file and logger initialization like log4j.category.com.xxx.common.util.SDKUtil = debug, XLogger
Here we are using Log4.jar api initialized in classpath.
Any help here is highly appreciated.
Thanks,
Himachandra.
if you look at the code for at org.apache.logging.log4j.util.ProviderUtil.validVersion(ProviderUtil.java:142), it looks like a bug in Log4J:
private static boolean validVersion(final String version) {
for (final String v : COMPATIBLE_API_VERSIONS) {
if (version.startsWith(v)) {
return true;
}
}
return false;
}
In that library, if (version.startsWith(v)) should be if (v.startsWith(version)) since version is nullable but v is never null.
Try using a newer version of log4j that does not have this bug.
I did not see this issue in version:
2.11.2
I saw this issue in versions:
2.6.2 (java.lang.NullPointerException at org.apache.logging.log4j.util.ProviderUtil.validVersion(ProviderUtil.java:142))
2.8.2 (java.lang.NullPointerException at org.apache.logging.log4j.util.ProviderUtil.validVersion(ProviderUtil.java:142))
2.10.0 (java.lang.NoClassDefFoundError: Could not initialize class org.apache.logging.log4j.util.PropertiesUtil)
Changing the version was a quick fix for me. However, it does not fix the underlying root cause. For me, I think it was some strange interaction between junit-log4j-jmockit that allowed the version to be null. If I ran tests in a different order, I had no issue.

NACHOS on eclipse exception in thread "main"

I am getting the following error when running nachos in eclipse:
Exception in thread "main" java.lang.Error: Unresolved compilation problem:
at nachos.machine.Lib.assertTrue(Lib.java:75)
at nachos.machine.Machine.main(Machine.java:24)
The above two methods are as follows:
nachos.machine.Lib.assertTrue:
public static void assertTrue(boolean expression) {
if (!expression)
throw new AssertionFailureError();
}
nachos.machine.Machine.main:
public static void main(final String[] args) {
System.out.println("nachos 5.0j initializing...");
Lib.assertTrue(Machine.args == null);//This is the call after which error is thrown
Machine.args = args;
processArgs();
Config.load(configFileName);
// get the current directory (.)
baseDirectory = new File(new File("").getAbsolutePath());
// get the nachos directory (./nachos)
nachosDirectory = new File(baseDirectory, "nachos");
String testDirectoryName =
Config.getString("FileSystem.testDirectory");
// get the test directory
if (testDirectoryName != null) {
testDirectory = new File(testDirectoryName);
}
else {
// use ../test
testDirectory = new File(baseDirectory.getParentFile(), "test");
}
securityManager = new NachosSecurityManager(testDirectory);
privilege = securityManager.getPrivilege();
privilege.machine = new MachinePrivilege();
TCB.givePrivilege(privilege);
privilege.stats = stats;
securityManager.enable();
createDevices();
checkUserClasses();
autoGrader = (AutoGrader) Lib.constructObject(autoGraderClassName);
new TCB().start(new Runnable() {
public void run() { autoGrader.start(privilege); }
});
}
I had this same error come up for me when I was trying to run nachos through eclipse. What I did was to look at Lib.java and scroll down to the function with an eclipse error on it. For me it was checkDerivation. There should be an eclipse error on the parameters <?>, if you look at the auto fix eclipse gives you and pick the project wide fix nachos should run after that.

Job failed Exception hadoop

I am using multi text output formate to create multiple files of a single file i.e each line on new file.
This is my code:
public class MOFExample extends Configured implements Tool {
private static double count = 0;
static class KeyBasedMultipleTextOutputFormat extends
MultipleTextOutputFormat<Text, Text> {
#Override
protected String generateFileNameForKeyValue(Text key, Text value,
String name) {
return count++ + "_";// + name;
}
}
/**
* The main job driver.
*/
public int run(final String[] args) throws Exception {
Path csvInputs = new Path(args[0]);
Path outputDir = new Path(args[1]);
JobConf jobConf = new JobConf(super.getConf());
jobConf.setJarByClass(MOFExample.class);
jobConf.setMapperClass(IdentityMapper.class);
jobConf.setInputFormat(KeyValueTextInputFormat.class);
jobConf.setOutputFormat(KeyBasedMultipleTextOutputFormat.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setOutputKeyClass(Text.class);
FileInputFormat.setInputPaths(jobConf, csvInputs);
FileOutputFormat.setOutputPath(jobConf, outputDir);
//jobConf.setNumMapTasks(4);
jobConf.setNumReduceTasks(4);
return JobClient.runJob(jobConf).isSuccessful() ? 0 : 1;
}
public static void main(final String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new MOFExample(), args);
System.exit(res);
}
}
This code runs fine on small text file but when the number of lines of input file are greater than 1900 which is yet not a large file it throws an exception:
Exception in thread "main" java.io.IOException: Job failed!
at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:836)
at MOFExample.run(MOFExample.java:57)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at MOFExample.main(MOFExample.java:61)
I also tried this tutorial but this one returns empty output directory without any exception when the input file is large however this one also worked fine with small input file.
Note: I am using Single-Node Cluster

Categories