Google SpeechClient io.grpc.StatusRuntimeException: UNAVAILABLE: Credentials failed to obtain metadata - java

I'm making an application with Google SpeechClient that has the requirements to set a GOOGLE_APPLICATION_CREDENTIALS environment variable that, once set, you can use the voice to text api.
My application is required to run in linux and windows. In linux it runs perfectly, however, on windows, when running the project, it throws an exception com.google.api.gax.rpc.UnavailableException: "io.grpc.StatusRuntimeException: UNAVAILABLE: Credentials failed to obtain metadata" when trying to run this thread
package Controller.Runnables;
import Controller.GUI.VoxSpeechGUIController;
import Model.SpokenTextHistory;
import com.google.api.gax.rpc.ClientStream;
import com.google.api.gax.rpc.ResponseObserver;
import com.google.api.gax.rpc.StreamController;
import com.google.cloud.speech.v1.*;
import com.google.protobuf.ByteString;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.TargetDataLine;
import java.io.IOException;
import java.util.ArrayList;
public class SpeechRecognizerRunnable implements Runnable{
private VoxSpeechGUIController controller;
public SpeechRecognizerRunnable(VoxSpeechGUIController voxSpeechGUIController) {
this.controller = voxSpeechGUIController;
}
#Override
public void run() {
MicrofoneRunnable micrunnable = MicrofoneRunnable.getInstance();
Thread micThread = new Thread(micrunnable);
ResponseObserver<StreamingRecognizeResponse> responseObserver = null;
try (SpeechClient client = SpeechClient.create()) {
ClientStream<StreamingRecognizeRequest> clientStream;
responseObserver =
new ResponseObserver<StreamingRecognizeResponse>() {
ArrayList<StreamingRecognizeResponse> responses = new ArrayList<>();
public void onStart(StreamController controller) {}
public void onResponse(StreamingRecognizeResponse response) {
try {
responses.add(response);
StreamingRecognitionResult result = response.getResultsList().get(0);
// There can be several alternative transcripts for a given chunk of speech. Just
// use the first (most likely) one here.
SpeechRecognitionAlternative alternative = result.getAlternativesList().get(0);
String transcript = alternative.getTranscript();
System.out.printf("Transcript : %s\n", transcript);
String newText = SpokenTextHistory.getInstance().getActualSpeechString() + " " + transcript;
SpokenTextHistory.getInstance().setActualSpeechString(newText);
controller.setLabelText(newText);
}
catch (Exception ex){
System.out.println(ex.getMessage());
ex.printStackTrace();
}
}
public void onComplete() {
}
public void onError(Throwable t) {
System.out.println(t);
}
};
clientStream = client.streamingRecognizeCallable().splitCall(responseObserver);
RecognitionConfig recognitionConfig =
RecognitionConfig.newBuilder()
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setLanguageCode("pt-BR")
.setSampleRateHertz(16000)
.build();
StreamingRecognitionConfig streamingRecognitionConfig =
StreamingRecognitionConfig.newBuilder().setConfig(recognitionConfig).build();
StreamingRecognizeRequest request =
StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(streamingRecognitionConfig)
.build(); // The first request in a streaming call has to be a config
clientStream.send(request);
try {
// SampleRate:16000Hz, SampleSizeInBits: 16, Number of channels: 1, Signed: true,
// bigEndian: false
AudioFormat audioFormat = new AudioFormat(16000, 16, 1, true, false);
DataLine.Info targetInfo =
new DataLine.Info(
TargetDataLine.class,
audioFormat); // Set the system information to read from the microphone audio
// stream
if (!AudioSystem.isLineSupported(targetInfo)) {
System.out.println("Microphone not supported");
System.exit(0);
}
// Target data line captures the audio stream the microphone produces.
micrunnable.targetDataLine = (TargetDataLine) AudioSystem.getLine(targetInfo);
micrunnable.targetDataLine.open(audioFormat);
micThread.start();
long startTime = System.currentTimeMillis();
while (!micrunnable.stopFlag) {
long estimatedTime = System.currentTimeMillis() - startTime;
if (estimatedTime >= 55000) {
clientStream.closeSend();
clientStream = client.streamingRecognizeCallable().splitCall(responseObserver);
request =
StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(streamingRecognitionConfig)
.build();
startTime = System.currentTimeMillis();
} else {
request =
StreamingRecognizeRequest.newBuilder()
.setAudioContent(ByteString.copyFrom(micrunnable.sharedQueue.take()))
.build();
}
clientStream.send(request);
}
} catch (Exception e) {
System.out.println(e);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
I've been working hard for hours and have not found a solution that solves my problem.
It is worth mentioning that the environment variable is being set correctly.
Has anyone ever had this problem with Google? What should I do to fix this?
This is my envirounment variable creator:
PS: I`ve already tried use all google alternatives to validate credentials, but all return me errors.
package Controller.Autentication;
import java.io.*;
import java.lang.reflect.Field;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class GoogleAuthentication {
private static final String GOOGLE_APPLICATION_CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS";
private static final String VoxSpeechFolder = ".vox";
private static final String GoogleAuthenticationJsonFile = "VoxAuthentication.json";
public static void setupGoogleCredentials() {
String directory = defaultDirectory();
directory += File.separator+VoxSpeechFolder;
File voxPath = new File(directory);
if (!voxPath.exists()) {
voxPath.mkdirs();
}
ClassLoader classLoader = new GoogleAuthentication().getClass().getClassLoader();
File srcFile = new File(classLoader.getResource(GoogleAuthenticationJsonFile).getFile());
if(srcFile.exists()){
try {
String voxDestPath = defaultDirectory() + File.separator + VoxSpeechFolder +File.separator+ GoogleAuthenticationJsonFile;
File destFile = new File(voxDestPath);
copyFile(srcFile,destFile);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
Map<String,String> googleEnv = new HashMap<>();
String path = defaultDirectory() +File.separator+ VoxSpeechFolder +File.separator+ GoogleAuthenticationJsonFile;
googleEnv.put(GOOGLE_APPLICATION_CREDENTIALS, path);
setGoogleEnv(googleEnv);
} catch (Exception e) {
e.printStackTrace();
}
}
static void copyFile(File sourceFile, File destFile)
throws IOException {
InputStream inStream ;
OutputStream outStream ;
System.out.println(destFile.getPath());
if(destFile.createNewFile()){
inStream = new FileInputStream(sourceFile);
outStream = new FileOutputStream(destFile);
byte[] buffer = new byte[1024];
int length;
while ((length = inStream.read(buffer)) > 0){
outStream.write(buffer, 0, length);
}
inStream.close();
outStream.close();
}
}
static String defaultDirectory()
{
String OS = getOperationSystem();
if (OS.contains("WIN"))
return System.getenv("APPDATA");
else if (OS.contains("MAC"))
return System.getProperty("user.home") + "/Library/Application "
+ "Support";
else if (OS.contains("LINUX")) {
return System.getProperty("user.home");
}
return System.getProperty("user.dir");
}
static String getOperationSystem() {
return System.getProperty("os.name").toUpperCase();
}
protected static void setGoogleEnv(Map<String, String> newenv) throws Exception {
try {
Class<?> processEnvironmentClass = Class.forName("java.lang.ProcessEnvironment");
Field theEnvironmentField = processEnvironmentClass.getDeclaredField("theEnvironment");
theEnvironmentField.setAccessible(true);
Map<String, String> env = (Map<String, String>) theEnvironmentField.get(null);
env.putAll(newenv);
Field theCaseInsensitiveEnvironmentField = processEnvironmentClass.getDeclaredField("theCaseInsensitiveEnvironment");
theCaseInsensitiveEnvironmentField.setAccessible(true);
Map<String, String> cienv = (Map<String, String>) theCaseInsensitiveEnvironmentField.get(null);
cienv.putAll(newenv);
} catch (NoSuchFieldException e) {
Class[] classes = Collections.class.getDeclaredClasses();
Map<String, String> env = System.getenv();
for(Class cl : classes) {
if("java.util.Collections$UnmodifiableMap".equals(cl.getName())) {
Field field = cl.getDeclaredField("m");
field.setAccessible(true);
Object obj = field.get(env);
Map<String, String> map = (Map<String, String>) obj;
map.clear();
map.putAll(newenv);
}
}
}
String genv = System.getenv(GOOGLE_APPLICATION_CREDENTIALS);
System.out.println(genv);
}
}

Related

How to unzip all the password protected zip files in a directory using Java

I am new to java and trying to write an program which will unzip all the password protected zip files in an directory, I am able to unzip all the normal zip files (Without password) but I am not sure how to unzip password protected files.
Note: All zip files have same password
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.PathMatcher;
import java.nio.file.Paths;
import java.util.List;
import java.util.stream.Stream;
import java.util.zip.ZipEntry;
import net.lingala.zip4j.core.ZipFile;
import net.lingala.zip4j.model.FileHeader;
import java.util.zip.*;
public class Extraction {
// public Extraction() {
//
// try {
//
// ZipFile zipFile = new
// ZipFile("C:\\Users\\Desktop\\ZipFile\\myzip.zip");
//
// if (zipFile.isEncrypted()) {
//
// zipFile.setPassword("CLAIMS!");
// }
//
// List fileHeaderList = zipFile.getFileHeaders();
//
// for (int i = 0; i < fileHeaderList.size(); i++) {
// FileHeader fileHeader = (FileHeader) fileHeaderList.get(i);
//
// zipFile.extractFile(fileHeader, "C:\\Users\\Desktop\\ZipFile");
// System.out.println("Extracted");
// }
//
// } catch (Exception e) {
// System.out.println("Please Try Again");
// }
//
// }
//
// public static void main(String[] args) {
// new Extraction();
//
// }
// }
public static void main(String[] args) {
Extraction unzipper = new Extraction();
unzipper.unzipZipsInDirTo(Paths.get("C:\\Users\\Desktop\\ZipFile"),
Paths.get("C:\\Users\\Desktop\\ZipFile\\Unziped"));
}
public void unzipZipsInDirTo(Path searchDir, Path unzipTo) {
final PathMatcher matcher = searchDir.getFileSystem().getPathMatcher("glob:**/*.zip");
try (final Stream<Path> stream = Files.list(searchDir)) {
stream.filter(matcher::matches).forEach(zipFile -> unzip(zipFile, unzipTo));
} catch (Exception e) {
System.out.println("Something went wrong, Please try again!!");
}
}
public void unzip(Path zipFile, Path outputPath) {
try (ZipInputStream zis = new ZipInputStream(Files.newInputStream(zipFile))) {
ZipEntry entry = zis.getNextEntry();
while (entry != null) {
Path newFilePath = outputPath.resolve(entry.getName());
if (entry.isDirectory()) {
Files.createDirectories(newFilePath);
} else {
if (!Files.exists(newFilePath.getParent())) {
Files.createDirectories(newFilePath.getParent());
}
try (OutputStream bos = Files.newOutputStream(outputPath.resolve(newFilePath))) {
byte[] buffer = new byte[Math.toIntExact(entry.getSize())];
int location;
while ((location = zis.read(buffer)) != -1) {
bos.write(buffer, 0, location);
}
}
}
entry = zis.getNextEntry();
}
} catch (Exception e1) {
System.out.println("Please try again");
}
}
}
I found the answer I am posting this as there might be someone else who might be looking for the similar answer.
import java.io.File;
import java.util.List;
import javax.swing.filechooser.FileNameExtensionFilter;
import net.lingala.zip4j.core.ZipFile;
import net.lingala.zip4j.model.FileHeader;
public class SamExtraction {
public static void main(String[] args) {
final FileNameExtensionFilter extensionFilter = new FileNameExtensionFilter("N/A", "zip");
//Folder where zip file is present
final File file = new File("C:/Users/Desktop/ZipFile");
for (final File child : file.listFiles()) {
try {
ZipFile zipFile = new ZipFile(child);
if (extensionFilter.accept(child)) {
if (zipFile.isEncrypted()) {
//Your ZIP password
zipFile.setPassword("MYPASS!");
}
List fileHeaderList = zipFile.getFileHeaders();
for (int i = 0; i < fileHeaderList.size(); i++) {
FileHeader fileHeader = (FileHeader) fileHeaderList.get(i);
//Path where you want to Extract
zipFile.extractFile(fileHeader, "C:/Users/Desktop/ZipFile");
System.out.println("Extracted");
}
}
} catch (Exception e) {
System.out.println("Please Try Again");
}
}
}
}

Specify where to add file after zip

I am new to java and I am having trouble specifying where to save the folder after I zipped it. It always go to the project workspace that I am creating. I want it to be saved in the path
"C:\\Users\\win8.1\\Desktop\\AES"
Here is the code:
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
public class ZipUtils
{
private List<String> fileList;
private static final String OUTPUT_ZIP_FILE = "Folder.zip";
private static final String SOURCE_FOLDER = "C:\\Users\\win8.1\\Desktop\\AES\\SAMPLEPATH"; // SourceFolder path
public ZipUtils()
{
fileList = new ArrayList<String>();
}
public static void main(String[] args)
{
ZipUtils appZip = new ZipUtils();
appZip.generateFileList(new File(SOURCE_FOLDER));
appZip.zipIt(OUTPUT_ZIP_FILE);
}
public void zipIt(String zipFile)
{
byte[] buffer = new byte[1024];
String source = "";
FileOutputStream fos = null;
ZipOutputStream zos = null;
try
{
try
{
source = SOURCE_FOLDER.substring(SOURCE_FOLDER.lastIndexOf("\\") + 1, SOURCE_FOLDER.length());
}
catch (Exception e)
{
source = SOURCE_FOLDER;
}
fos = new FileOutputStream(zipFile);
zos = new ZipOutputStream(fos);
System.out.println("Output to Zip : " + zipFile);
FileInputStream in = null;
for (String file : this.fileList)
{
System.out.println("File Added : " + file);
ZipEntry ze = new ZipEntry(source + File.separator + file);
zos.putNextEntry(ze);
try
{
in = new FileInputStream(SOURCE_FOLDER + File.separator + file);
int len;
while ((len = in.read(buffer)) > 0)
{
zos.write(buffer, 0, len);
}
}
finally
{
in.close();
}
}
zos.closeEntry();
System.out.println("Folder successfully compressed");
}
catch (IOException ex)
{
ex.printStackTrace();
}
finally
{
try
{
zos.close();
}
catch (IOException e)
{
e.printStackTrace();
}
}
}
public void generateFileList(File node)
{
// add file only
if (node.isFile())
{
fileList.add(generateZipEntry(node.toString()));
}
if (node.isDirectory())
{
String[] subNote = node.list();
for (String filename : subNote)
{
generateFileList(new File(node, filename));
}
}
}
private String generateZipEntry(String file)
{
return file.substring(SOURCE_FOLDER.length() + 1, file.length());
}
}
I got the code here. Thank you in advance!
There is no problem. Just write
private static final String OUTPUT_ZIP_FILE = "C:\\Users\\win8.1\\Desktop\\AES\\Folder.zip";
private static final String SOURCE_FOLDER = "C:\\Users\\win8.1\\Desktop\\AES\\SAMPLEPATH";

Add property to properties file if null

I am trying to add a new property to config.properties if it isn't there. Is there any way to do this?
My current config class looks like this:
package com.template;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Properties;
public class Config {
static File folder = new File(System.getProperty("user.home") + "/Documents/Electrocode Productions/template");
public static void add() {
if(!folder.exists()) {
try {
folder.mkdirs();
} catch(SecurityException e) {
Log.error(e);
}
}
Properties config = new Properties();
OutputStream output = null;
Path filePath = Paths.get(folder + "/config.properties");
if(!(filePath == null)) {
try {
output = new FileOutputStream(folder + "/config.properties");
config.setProperty("log", "true");
config.store(output, null);
} catch(IOException e) {
Log.error(e);
} finally {
if(output !=null) {
try {
output.close();
} catch(IOException e) {
Log.error(e);
}
}
}
}
}
public static String get(String value) {
Properties config = new Properties();
InputStream input = null;
try {
input = new FileInputStream(folder + "/config.properties");
config.load(input);
} catch(IOException e) {
Log.error(e);
} finally {
if (input != null) {
try {
input.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return config.getProperty(value).trim();
}
}
This is working, as it won't overwrite the file if you edit it, but if you delete an entry, you need to completely delete the whole file to re-add that entry.
My ultimate goal is for you to be able to close the program, edit the config file, then reopen the config file with the new arguments, but if you delete an argument, it won't crash to program because it relies on an answer from the config file. (I hope that makes sense. It's basically like most video games).
You need to validate the value you get from the Properties before using it. e.g. you can't .trim() a value which wasn't here.
public class Config {
static final File folder = new File(System.getProperty("user.home") + "/Documents/Electrocode Productions/template");
static final File file = new File(folder, "config.properties");
public static void add() throws IOException {
if (file.exists())
return;
// create directories as needed.
folder.mkdirs();
Properties config = new Properties();
config.setProperty("log", "true");
try (OutputStream out = new FileOutputStream(file)) {
config.store(out, null);
}
}
public static String get(String key, String defaultValue) {
if (!file.exists())
return defaultValue;
try (InputStream in = new FileInputStream(file)) {
Properties config = new Properties();
config.load(input);
} catch(IOException e) {
Log.error(e);
return defaultValue;
}
String value = config.getProperty(key);
if (value == null)
return defaultValue;
value = value.trim();
if (value.isEmpty())
return defaultValue;
return value;
}
}

OSB ALSBConfigurationMBean importing configuration using java

I am trying to import configuration sbconfig.jar in OSB using java for automated tests. Before that I implemented FindAndReplace operation using Orcale help in java.
But in сase importing configuration from the same Article, when I run presented code - I get an error.
What I am trying to do
Connect to OSB(Oracle Service Bus 11.1)
Read file
Create session
Import prepared file
Activate session
On fourth item i got an error:
Cannot import to deployed configuration
Maybe someone has already implemented such a thing?
I would be grateful for help
package update.configuration.osb;
import com.bea.wli.config.Ref;
import com.bea.wli.config.customization.FindAndReplaceCustomization;
import com.bea.wli.config.env.EnvValueQuery;
import com.bea.wli.config.importexport.ImportResult;
import com.bea.wli.config.resource.Diagnostics;
import com.bea.wli.sb.management.configuration.*;
import com.bea.wli.sb.util.EnvValueTypes;
import com.bea.wli.config.customization.Customization;
import java.io.*;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Hashtable;
import java.util.List;
import javax.management.*;
import javax.management.remote.*;
import javax.naming.Context;
import weblogic.management.jmx.MBeanServerInvocationHandler;
import weblogic.management.mbeanservers.domainruntime.DomainRuntimeServiceMBean;
import com.bea.wli.sb.management.importexport.ALSBJarInfo;
import com.bea.wli.sb.management.importexport.ALSBImportPlan;
import java.util.Map;
public class OSBConfigUpdateNew {
private static JMXConnector initConnection(String hostname, int port,
String username, String password) throws IOException,
MalformedURLException {
JMXServiceURL serviceURL = new JMXServiceURL("t3", hostname, port,
"/jndi/" + DomainRuntimeServiceMBean.MBEANSERVER_JNDI_NAME);
Hashtable<String, String> h = new Hashtable<String, String>();
h.put(Context.SECURITY_PRINCIPAL, username);
h.put(Context.SECURITY_CREDENTIALS, password);
h.put(JMXConnectorFactory.PROTOCOL_PROVIDER_PACKAGES,
"weblogic.management.remote");
return JMXConnectorFactory.connect(serviceURL, h);
}
static private void simpleImportExport(String importFileName, String host, int port,
String username, String password) {
JMXConnector conn = null;
SessionManagementMBean sm = null;
File importFile = new File(importFileName);
byte[] bytes = readBytes(importFile);
String sessionName = "newsession";
String statusmsg = "";
try {
conn = initConnection(host, port, username, password);
MBeanServerConnection mbconn = conn.getMBeanServerConnection();
DomainRuntimeServiceMBean domainService = (DomainRuntimeServiceMBean) MBeanServerInvocationHandler
.newProxyInstance(mbconn, new ObjectName(
DomainRuntimeServiceMBean.OBJECT_NAME));
sm = (SessionManagementMBean) domainService.findService(
SessionManagementMBean.NAME, SessionManagementMBean.TYPE,
null);
sm.createSession(sessionName);
ALSBConfigurationMBean alsbSession = getConfigMBean(sessionName, conn);
// ALSBConfigurationMBean alsbSession = (ALSBConfigurationMBean) domainService.findService(ALSBConfigurationMBean.NAME + "." + "newsession", ALSBConfigurationMBean.TYPE, null);
try {
alsbSession.uploadJarFile(bytes);
ALSBJarInfo jarInfo = alsbSession.getImportJarInfo();
ALSBImportPlan importPlan = jarInfo.getDefaultImportPlan();
ImportResult result = alsbSession.importUploaded(importPlan);
if (result.getImported().size() > 0) {
System.out.println("The following resources have been successfully imported.");
for (Ref ref : result.getImported()) {
System.out.println("\t" + ref);
}
}
if (result.getFailed().size() > 0) {
System.out.println("The following resources have failed to be imported.");
for (Map.Entry<Ref, Diagnostics> e : result.getFailed().entrySet()) {
Ref ref = e.getKey();
Diagnostics d = e.getValue();
System.out.println("\t" + ref + ". reason: " + d);
}
System.out.println("Discarding the session.");
sm.discardSession(sessionName);
System.exit(1);
}
} catch (Exception e) {
e.printStackTrace();
}
try {
sm.activateSession(sessionName, "description");
ALSBConfigurationMBean alsbcore = getConfigMBean(null, conn);
byte[] contentsProj = alsbcore.exportProjects(Collections.singleton(Ref.makeProjectRef("Project")), null);
} catch (Exception e) {
e.printStackTrace();
}
} catch (Exception ex) {
if (null != sm) {
try {
sm.discardSession(sessionName);
} catch (Exception e) {
System.out.println("Able to discard the session");
}
}
statusmsg = "Not able to perform the operation";
ex.printStackTrace();
} finally {
if (null != conn)
try {
conn.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
private static ALSBConfigurationMBean getConfigMBean(String sessionName, JMXConnector conn) throws Exception {
MBeanServerConnection mbconn = conn.getMBeanServerConnection();
DomainRuntimeServiceMBean domainService =
(DomainRuntimeServiceMBean) MBeanServerInvocationHandler.newProxyInstance(
mbconn, new ObjectName(DomainRuntimeServiceMBean.OBJECT_NAME));
return (ALSBConfigurationMBean) domainService.findService(
ALSBConfigurationMBean.NAME,
ALSBConfigurationMBean.TYPE, null);
}
private static byte[] readBytes(File importFile) {
ByteArrayOutputStream ous = null;
InputStream ios = null;
try {
byte[] buffer = new byte[4096];
ous = new ByteArrayOutputStream();
ios = new FileInputStream(importFile);
int read = 0;
while ((read = ios.read(buffer)) != -1)
ous.write(buffer, 0, read);
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (ous != null)
ous.close();
} catch (IOException e) {
}
try {
if (ios != null)
ios.close();
} catch (IOException e) {
}
}
return ous.toByteArray();
}
// private static Ref constructRef(String refType, String serviceuri) {
// Ref ref = null;
// String[] uriData = serviceuri.split("/");
// ref = new Ref(refType, uriData);
// return ref;
// }
public static void main(String[] args) {
simpleImportExport("C:\\sbconfig.jar", "127.0.0.1",
7001, "user", "password");
}
}

Dynamically compiling and Running a Hadoop job from another Java File

I am trying to write a Java file that receives the source code of a MapReduce job, compiles it dynamically and runs the job on a Hadoop cluster. To reach this, I have written 3 methods called compile(), makeJAR() and run_Hadoop_Job(). Everything works fine with the compilation and creation of the JAR file. However, when the job is submitted to Hadoop, as soon as the job starts, it faces problem with finding required Mapper/Reducer classes and throws a ClassNotFoundException for both the Mapper_Class and Reducer_Class *(java.lang.ClassNotFoundException: reza.rCloud.Mapper_Reducer_Classes$Mapper_Class.class)* . I know that there should be something wrong with how I have referenced the required Mapper/Reducer classes but I was not able to figure it out after several. Any help/suggestion on how to solve the issue is highly appreciated.
Regarding the details of the project: I have a file called "rCloud_test/src/reza/Mapper_Reducer_Classes.java" that contains the source code for Mapper_Class and Reducer_Class. This file is ultimately received during the runtime but for now I copied the Hadoop WordCount example in it and store it locally in the same folder as my main class file: rCloud_test/src/reza/Platform2.java.
Here below you can see the main() method of the Platform2.java which is my main class for this project:
public static void main(String[] args){
System.out.println("Code Execution Started");
String className = "Mapper_Reducer_Classes";
Platform2 myPlatform = new Platform2();
//step 1: compile the received class file dynamically:
boolean compResult = myPlatform.compile(className);
System.out.println(className + ".java compilation result: "+compResult);
//step 2: make a JAR file out of the compiled file:
if (compResult) {
compResult = myPlatform.makeJAR("jar_file", myPlatform.compilation_Output_Folder);
System.out.println("JAR creation result: "+compResult);
}
//step 3: Now let's run the Hadoop job:
if (compResult) {
compResult = myPlatform.run_Hadoop_Job(className);
System.out.println("Running on Hadoop result: "+compResult);
}
The method that is causing me all the problems is the run_Hadoop_Job() which is as below:
private boolean run_Hadoop_Job(String className){
try{
System.out.println("*Starting to run the code on Hadoop...");
String[] argsTemp = { "project_test/input", "project_test/output" };
Configuration conf = new Configuration();
conf.set("fs.default.name", "hdfs://localhost:54310");
conf.set("mapred.job.tracker", "localhost:54311");
conf.set("mapred.jar", jar_Output_Folder + "/jar_file"+".jar");
conf.set("libjars", required_Execution_Classes);
//THIS IS WHERE IT CAN'T FIND THE MENTIONED CLASSES, ALTHOUGH THEY EXIST BOTH ON DISK
// AND IN THE CREATED JAR FILE:??????
System.out.println("Getting Mapper/Reducer package name: " +
Mapper_Reducer_Classes.class.getName());
conf.set("mapreduce.map.class", "reza.rCloud.Mapper_Reducer_Classes$Mapper_Class");
conf.set("mapreduce.reduce.class", "reza.rCloud.Mapper_Reducer_Classes$Reducer_Class");
Job job = new Job(conf, "Hadoop Example for dynamically and programmatically compiling-running a job");
job.setJarByClass(Platform2.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path(argsTemp[0]));
FileSystem fs = FileSystem.get(conf);
Path out = new Path(argsTemp[1]);
fs.delete(out, true);
FileOutputFormat.setOutputPath(job, new Path(argsTemp[1]));
//job.submit();
System.out.println("*and now submitting the job to Hadoop...");
System.exit(job.waitForCompletion(true) ? 0 : 1);
System.out.println("Job Finished!");
} catch (Exception e) {
System.out.println("****************Exception!" );
e.printStackTrace();
return false;
}
return true;
}
if needed, here's the source code for the compile() method:
private boolean compile(String className) {
String fileToCompile = JOB_FOLDER + "/" +className+".java";
JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
FileOutputStream errorStream = null;
try{
errorStream = new FileOutputStream(JOB_FOLDER + "/logs/Errors.txt");
} catch(FileNotFoundException e){
//if problem creating the file, default wil be console
}
int compilationResult =
compiler.run( null, null, errorStream,
"-classpath", required_Compilation_Classes,
"-d", compilation_Output_Folder,
fileToCompile);
if (compilationResult == 0) {
//Compilation is successful:
return true;
} else {
//Compilation Failed:
return false;
}
}
and the source code for makeJAR() method:
private boolean makeJAR(String outputFileName, String inputDirectory) {
Manifest manifest = new Manifest();
manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION,
"1.0");
JarOutputStream target = null;
try {
target = new JarOutputStream(new FileOutputStream(
jar_Output_Folder+ "/"
+ outputFileName+".jar" ), manifest);
add(new File(inputDirectory), target);
} catch (Exception e) { return false; }
finally {
if (target != null)
try{
target.close();
} catch (Exception e) { return false; }
}
return true;
}
private void add(File source, JarOutputStream target) throws IOException
{
BufferedInputStream in = null;
try
{
if (source.isDirectory())
{
String name = source.getPath().replace("\\", "/");
if (!name.isEmpty())
{
if (!name.endsWith("/"))
name += "/";
JarEntry entry = new JarEntry(name);
entry.setTime(source.lastModified());
target.putNextEntry(entry);
target.closeEntry();
}
for (File nestedFile: source.listFiles())
add(nestedFile, target);
return;
}
JarEntry entry = new JarEntry(source.getPath().replace("\\", "/"));
entry.setTime(source.lastModified());
target.putNextEntry(entry);
in = new BufferedInputStream(new FileInputStream(source));
byte[] buffer = new byte[1024];
while (true)
{
int count = in.read(buffer);
if (count == -1)
break;
target.write(buffer, 0, count);
}
target.closeEntry();
}
finally
{
if (in != null)
in.close();
}
}
and finally the fixed parameters used for accessing the files:
private String JOB_FOLDER = "/Users/reza/My_Software/rCloud_test/src/reza/rCloud";
private String HADOOP_SOURCE_FOLDER = "/Users/reza/My_Software/hadoop-0.20.2";
private String required_Compilation_Classes = HADOOP_SOURCE_FOLDER + "/hadoop-0.20.2-core.jar";
private String required_Execution_Classes = required_Compilation_Classes + "," +
"/Users/reza/My_Software/ActorFoundry_dist_ver/lib/commons-cli-1.1.jar," +
"/Users/reza/My_Software/ActorFoundry_dist_ver/lib/commons-logging-1.1.1.jar";
public String compilation_Output_Folder = "/Users/reza/My_Software/rCloud_test/dyn_classes";
private String jar_Output_Folder = "/Users/reza/My_Software/rCloud_test/dyn_jar";
As a result of running the Platform2, the structure of the project on disk looks as below:
rCloud_test/classes/reza/rCloud/Platform2.class: contain the Platform2 class
rCloud_test/dyn_classes/reza/rCloud/ contains the classes for Mapper_Reducer_Classes.class, Mapper_Reducer_Classes$Mapper_Class.class, and Mapper_Reducer_Classes$Reducer_Class.class
rCloud_test/dyn_jar/jar_file.jar contains the created jar file
REVSED: here's the source code for the rCloud_test/src/reza/rCloud/Mapper_Reducer_Classes.java:
package reza.rCloud;
import java.io.IOException;
import java.lang.InterruptedException;
import java.util.StringTokenizer;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
public class Mapper_Reducer_Classes {
/**
* The map class of WordCount.
*/
public static class Mapper_Class
extends Mapper<Object, Text, Text, IntWritable> {
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
public void map(Object key, Text value, Context context)
throws IOException, InterruptedException {
StringTokenizer itr = new StringTokenizer(value.toString());
while (itr.hasMoreTokens()) {
word.set(itr.nextToken());
context.write(word, one);
}
}
}
/**
* The reducer class of WordCount
*/
public static class Reducer_Class
extends Reducer<Text, IntWritable, Text, IntWritable> {
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
int sum = 0;
for (IntWritable value : values) {
sum += value.get();
}
context.write(key, new IntWritable(sum));
}
}
}
Try to set them by using the setClass() method :
conf.setClass("mapreduce.map.class",
Class.forName("reza.rCloud.Mapper_Reducer_Classes$Mapper_Class"),
Mapper.class);
conf.setClass("mapreduce.reduce.class",
Class.forName("reza.rCloud.Mapper_Reducer_Classes$Reducer_Class"),
Reducer.class);

Categories