I am trying to upload a file to S3 and getting error like
Target exception: java.lang.NoSuchMethodError:
com.amazonaws.SDKGlobalConfiguration.isInRegionOptimizedModeEnabled()Z
Code is
String accessKey="accesskey";
String secretKey="mysecretkey"
AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey);
AmazonS3 conn = new AmazonS3Client(credentials);
In the line of AmazonS3 conn = new AmazonS3Client(credentials); I am getting the target exception.
Totally I imported these many java packages showing below. still getting the same error.
import java.io.File;
import java.io.IOException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.util.List;
import com.amazonaws.auth.*;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.util.StringUtils;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.SDKGlobalConfiguration;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.GeneratePresignedUrlRequest;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.amazonaws.services.s3.*;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.AmazonS3Builder;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.ClientConfigurationFactory;
import com.amazonaws.annotation.NotThreadSafe;
import com.amazonaws.annotation.SdkTestInternalApi;
import com.amazonaws.client.AwsSyncClientParams;
import com.amazonaws.internal.SdkFunction;
import com.amazonaws.regions.AwsRegionProvider;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.Protocol;
import com.amazonaws.AmazonS3Client;
import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.PutObjectRequest;
I am writing a mapreduce project.
I want to send an array from mapper to reducer.
But it has an error and I can't fix It.
I import these classes:
import java.io.DataInput;
import java.io.DataOutput;
import java.io.EOFException;
import java.io.IOException;
import java.net.Socket;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.conf.Configured;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.util.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.util.Iterator;
import hadoop.DENCLUE;
//import javafx.scene.text.Text;
import sun.security.krb5.Config;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.viewfs.Constants;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
//import org.apache.hadoop.mapred.jobcontrol.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.omg.CORBA.PUBLIC_MEMBER;
import com.sun.org.apache.bcel.internal.generic.NEW;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.task.JobContextImpl;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.util.*;
import java.io.DataOutput;
import java.io.DataInput;
import java.io.IOException;
This is my Map class:
public static class Mapn extends MapReduceBase implements Mapper<LongWritable, Text, Text, Text> {
#SuppressWarnings("rawtypes")
Context con ;
#SuppressWarnings("unchecked")
public void map(LongWritable key, Text value, OutputCollector< Text,Text >
output, Reporter reporter) throws IOException {
String line = value.toString();
String[] words=line.split(",");
for(String word: words )
{
Text outputKey = new Text(word.toUpperCase().trim());
try {
con.write(outputKey, words);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
This is the job:
public static void main(String[] args) throws Exception {
Configuration c=new Configuration();
String[] files=new GenericOptionsParser(c,args).getRemainingArgs();
Path input=new Path(files[0]);
Path output=new Path(files[1]);
Job j=new Job(c,"wnt");
j.setJarByClass(projectmr.class);
j.setMapperClass(Mapn.class);
j.setReducerClass(Reduce.class);
j.setOutputKeyClass(Text.class);
j.setOutputValueClass(Text.class);
FileInputFormat.addInputPaths(j, input);
FileOutputFormat.setOutputPath(j, output);
System.exit(j.waitForCompletion(true)?0:1);
and this is the error I get:
Exception in thread "main" java.lang.RuntimeException: class hadoop.projectmr$Mapn not org.apache.hadoop.mapreduce.Mapper
at org.apache.hadoop.conf.Configuration.setClass(Configuration.java:1969)
at org.apache.hadoop.mapreduce.Job.setMapperClass(Job.java:891)
at hadoop.projectmr.main(projectmr.java:191)
This is the old, Hadoop 1 API
import org.apache.hadoop.mapred.*;
You should be importing from classes within
org.apache.hadoop.mapreduce.*;
As the error says
not org.apache.hadoop.mapreduce.Mapper
So, basically, you don't need MapReduceBase, and Mapper is a class now, not an interface
So, you now would have
public static class MyMapper extends Mapper<Kin, Vin, Kout, Vout>
Look at the WordCount code
I am working on a Java utility that gets files from HDFS to a remote machine. I have some issues in the SFTP library import. I have included org mule transport and the related libraries in the POM and after doing mvn install I can see that the Classes are available in the dependency Path. However when I execute the class I get the error message as follow:
Exception in thread "main" java.lang.NoClassDefFoundError: org/mule/transport/sftp/SftpClient
at xxxx.yyyyyy.sftpFileTransfer.main(sftpFileTransfer.java:17)
Caused by: java.lang.ClassNotFoundException: org.mule.transport.sftp.SftpClient
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
... 1 more
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.BufferedInputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.mule.transport.sftp.SftpClient;
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.ChannelSftp.LsEntry;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.JSchException;
import com.jcraft.jsch.Session;
import com.jcraft.jsch.SftpATTRS;
import com.jcraft.jsch.SftpException;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Vector;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.mule.transport.sftp.SftpClient;
public class sftpFileTransfer {
public static void main(String[] args) throws IOException {
System.out.println("This is for testing");
SftpClient sftpCli = new SftpClient("abcdef");
sftpCli.login("karthick_kb","/home/karthick/.ssh/id_rsa", null);
sftpCli.changeWorkingDirectory("/tmp/");
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
FSDataInputStream fsdisPath = null;
String filePath = null;
filePath = "/a/b/c/d/e/f/1.dat.gz";
Path inputPath = new Path(filePath);
fsdisPath = fs.open(inputPath);
BufferedInputStream bis = new BufferedInputStream(fsdisPath);
sftpCli.storeFile(inputPath.getName(), bis);
fsdisPath.close();
}
}
The maven dependencies are as follows
<dependency>
<groupId>com.jcraft</groupId>
<artifactId>jsch</artifactId>
<version>0.1.54</version>
</dependency>
<dependency>
<groupId>org.mule</groupId>
<artifactId>mule-core</artifactId>
<version>3.4.0</version>
</dependency>
<dependency>
<groupId>org.mule.transports</groupId>
<artifactId>mule-transport-sftp</artifactId>
<version>3.4.0</version>
</dependency>
</dependencies>
I am able to see the required SFTP Classes in the maven dependencies What could I be missing.. Any information would be great
I'm trying to import vk.core.api in Java but when I try to compile it, I get only errors like "error: package vk.core.api does not exist
import vk.core.api.*;"
package main.java.tddt;
import javafx.scene.control.Label;
import main.java.tddt.data.Log;
import main.java.tddt.data.LogList;
import main.java.tddt.data.Timer;
import main.java.tddt.gui.Controller;
import vk.core.api.*;
import vk.core.api.CompilerResult;
import vk.core.api.TestResult;
import vk.core.api.CompileError;
import vk.core.api.CompilerFactory;
import vk.core.api.JavaStringCompiler;
import vk.core.internal.*;
import vk.core.internal.InternalResult;
import java.util.TreeSet;
import javax.xml.bind.JAXBException;
import java.io.File;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Collection;
import java.util.List;
You need to add in the dependencies of your build.gradle something like:
compile group: 'de.hhu.stups', name: 'virtual-kata-lib', version: '1.0.0'
Someone needs a public RSA key from me & gave me a java program to make a keypair. I have zero experience with Java so very simple things are very difficult for me. The program looks like this:
import java.io.IOException;
import java.math.BigInteger;
import java.security.GeneralSecurityException;
import java.security.Key;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.Provider;
import java.security.SecureRandom;
import java.security.Security;
import java.security.interfaces.RSAPrivateKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.RSAPrivateKeySpec;
import java.security.spec.RSAPublicKeySpec;
import java.util.HashMap;
import java.util.Map;
import javax.crypto.Cipher;
import javax.crypto.KeyGenerator;
public class CustomRSAKeyPairGenerator{
public void generate() {
try {
KeyPairGenerator objlRSAKeyPairGen =
KeyPairGenerator.getInstance("RSA");
objlRSAKeyPairGen.initialize(2048);
KeyPair objlRSAKeyPair = objlRSAKeyPairGen.generateKeyPair();
RSAPublicKey objlPublicKey = (RSAPublicKey) objlRSAKeyPair.getPublic();
RSAPrivateKey objlPrivateKey = (RSAPrivateKey) objlRSAKeyPair.getPrivate();
StringBuffer strblPublicKey = new StringBuffer();
strblPublicKey.append(objlPublicKey.getModulus().toString(16).toUpperCase());
strblPublicKey.append('~');
strblPublicKey.append(objlPublicKey.getPublicExponent().toString(16).toUpperCase()
);
System.out.println(strblPublicKey.toString());
StringBuffer strblPrivateKey = new StringBuffer();
strblPrivateKey.append(objlPrivateKey.getModulus().toString(16).toUpperCase());
strblPrivateKey.append('~');
strblPrivateKey.append(objlPrivateKey.getPrivateExponent().toString(16).toUpperCase());
System.out.println(strblPrivateKey.toString());
}catch (NoSuchAlgorithmException noSuchAlgrtm){
System.out.println(noSuchAlgrtm.getMessage());
}
}
}
When I compile this I get a file called "CustomRSAKeyPairGenerator.class" so I then run that like this:
mylogin$java CustomRSAKeyPairGenerator
and I get this error:
Exception in thread "main" java.lang.NoClassDefFoundError: CustomRSAKeyPairGenerator/class
Caused by: java.lang.ClassNotFoundException: CustomRSAKeyPairGenerator.class
at java.net.URLClassLoader$1.run(URLClassLoader.java:202)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:190)
at java.lang.ClassLoader.loadClass(ClassLoader.java:306)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301)
at java.lang.ClassLoader.loadClass(ClassLoader.java:247)
Am I running this wrong or is there a problem in the java source that I need to fix? Is there a way to use this to make the RSA keypair that I need?