two actions for one button android (android) - java

There is a problem with the code for android applications
I tried to assign multiple actions to a single button.Here's the code:
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
public class MainActivity extends Activity implements OnClickListener {
AudioRecord audioRecord;
private Thread recordingThread = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
start_button = (Button) findViewById(R.id.button1);
stop_button = (Button) findViewById(R.id.button2);
start_button.setOnClickListener(this);
stop_button.setOnClickListener(this);
createAudioRecorder();
}
private void createAudioRecorder() {
int sampleRate = 16000;
int channelConfig = AudioFormat.CHANNEL_IN_MONO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int minInternalBufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
int internalBufferSize = minInternalBufferSize * 4;
Log.d(TAG, "minInternalBufferSize = " + minInternalBufferSize
+ ", internalBufferSize = " + internalBufferSize
+ ", myBufferSize = " + myBufferSize);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, internalBufferSize);
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.button1:
reading = true;
startRecorder();
break;
case R.id.button2:
reading = false;
stopRecorder();
break;
}
}
private void startRecorder() {
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
AudioData();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
protected void AudioData() {
audioRecord.startRecording();
byte data[] = new byte[myBufferSize];
byte[] myBuffer = new byte[myBufferSize];
int readCount = 0;
int totalCount = 0;
while (reading) {
readCount = audioRecord.read(myBuffer, 0, myBufferSize);
data = myBuffer;
totalCount += readCount;
}
}
private void stopRecorder() {
if (null != audioRecord) {
reading = false;
audioRecord.stop();
audioRecord.release();
audioRecord = null;
recordingThread = null;
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
When I run this code the first time it works, but not following..
Logs show that the error here somewhere (NullPointerException):
audioRecord.startRecording();
Help solve the problem!

When button2 is clicked, stopRecorder() is called, which assigns null to audioRecord. Therefore, the next time button1 is clicked, the new thread that is spawned will throw a NullPointerException when it calls AudioData().
Perhaps you can eliminate createAudioRecorder() and move its logic to the beginning of AudioData().
Also, I would rename AudioData(), which looks like a class name, to recordAudioData(). That would conform better to standard Java naming conventions.

Related

How a make I from pcm to mp3 convert audio for recording device storage in Android Studio with Java?

I have a problem. I take sound from the microphone of the device with Android Studio and record it as pcm. Since the audio recording is in pcm format, I cannot play it on the device. I'm trying to convert this to mp3 for playback. I tried different libraries but couldn't. Can you help me?
My recording page code:
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
public class MainActivity extends AppCompatActivity {
private static final int AUDIO_SOURCE = MediaRecorder.AudioSource.MIC;
private static final int SAMPLE_RATE = 44100;
private static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_MONO;
private static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
private static final int BUFFER_SIZE = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT);
private static final int REQUEST_RECORD_AUDIO_PERMISSION = 200;
private AudioRecord audioRecord;
private boolean isRecording = false;
private Thread recordingThread;
private String filePath;
private static final int FILTER_LOW_FREQ = 100;
private static final int FILTER_HIGH_FREQ = 20000;
private int bufferSize;
private int sampleRate;
private int channelConfig;
private int audioFormat;
private static final int MAX_VOLUME = 32767;
final int MAX_FREQ = SAMPLE_RATE / 2;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void but_recPressed(View V) {
startRecording();
Toast.makeText(getApplicationContext(), "Kayıt Başlatıldı", Toast.LENGTH_SHORT).show();
}
public void but_stopPressed(View V) {
stopRecording();
Toast.makeText(this, "Kayıt Durduruldu", Toast.LENGTH_SHORT).show();
File file = new File(filePath);
if (file.exists()) {
Toast.makeText(this, "Kaydedilen Dosya Yolu: " + filePath, Toast.LENGTH_LONG).show();
} else {
Toast.makeText(this, "Kaydedilen Dosya Bulunamadı!", Toast.LENGTH_LONG).show();
}
}
private void startRecording() {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO}, REQUEST_RECORD_AUDIO_PERMISSION);
} else {
audioRecord = new AudioRecord(AUDIO_SOURCE, SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT, BUFFER_SIZE);
audioRecord.startRecording();
isRecording = true;
new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
short[] buffer = new short[bufferSize];
while (isRecording) {
Toast.makeText(getApplicationContext(), "döngüdeyim", Toast.LENGTH_LONG).show();
int read = audioRecord.read(buffer, 0, bufferSize);
for (int i = 0; i < read; i++) {
Toast.makeText(getApplicationContext(), "Fordayım", Toast.LENGTH_LONG).show();
int frequency = buffer[i];
if (frequency >= FILTER_LOW_FREQ && frequency <= FILTER_HIGH_FREQ) {
int volume = (int) (frequency / MAX_FREQ * MAX_VOLUME);
buffer[i] = (short) (buffer[i] * volume);
// Do something with the filtered frequency
Toast.makeText(getApplicationContext(), "Frekans Yakalandı", Toast.LENGTH_LONG).show();
}
else{
int volume = (int) (frequency / MAX_FREQ * MAX_VOLUME);
buffer[i] = (short) (buffer[i] * volume);
Toast.makeText(getApplicationContext(), "Frekans Yakalanmadı", Toast.LENGTH_LONG).show();
}
}
}
}
}).start();
}
}
private void stopRecording() {
try {
isRecording = false;
audioRecord.stop();
audioRecord.release();
audioRecord = null;
recordingThread = null;
} catch (Exception e) {
e.printStackTrace();
Log.e("MainActivity", "Error while recording audio: " + e.getMessage());
}
}
public void but_folderPressed(View v) {
Intent intent = new Intent(this, list.class);
startActivity(intent);
}
private void writeAudioDataToFile() {
byte data[] = new byte[BUFFER_SIZE];
filePath = getExternalCacheDir().getAbsolutePath();
filePath += "/" + System.currentTimeMillis() + ".sesimvar" +".pcm";
try (FileOutputStream os = new FileOutputStream(filePath)) {
while (isRecording) {
int read = audioRecord.read(data, 0, BUFFER_SIZE);
if (AudioRecord.ERROR_INVALID_OPERATION != read) {
os.write(data);
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
I tried FFmpeg and the LAME library. I couldn't add the LAME library properly. FFmpeg, on the other hand, gave error lines.
did How to convert .pcm file to .wav or .mp3? not help you out?
For the future, it would help if you post the stack-trace with the thrown exception or error.

How to change screen orientation from landscape to portrait?

I have applied the following post answers but unable to solve the problem.
OpenCV camera orientation issue
I am getting the following exception.
Exception locking surface
java.lang.IllegalArgumentException
at android.view.Surface.nativeLockCanvas(Native Method)
at android.view.Surface.lockCanvas(Surface.java:264)
at android.view.SurfaceView$4.internalLockCanvas(SurfaceView.java:825)
at android.view.SurfaceView$4.lockCanvas(SurfaceView.java:793)
at org.opencv.android.CameraBridgeViewBase.deliverAndDrawFrame(CameraBridgeViewBase.java:403)
at org.opencv.android.JavaCameraView$CameraWorker.run(JavaCameraView.java:365)
at java.lang.Thread.run(Thread.java:818)
Here is opencv library function which throw the exception:
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
Canvas canvas = null ;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
canvas = getHolder().lockCanvas();
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
getHolder().unlockCanvasAndPost(canvas);
}
}
}
Here is my main activity where i use opencv library to call this method:
package org.opencv.samples.facedetect;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Camera;
import android.media.AudioManager;
import android.net.ConnectivityManager;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.provider.Settings;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import org.opencv.samples.facedetect.DetectionBasedTracker;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class FdActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 , SharedPreferences.OnSharedPreferenceChangeListener {
private static final String TAG = "OCVSample::Activity";
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
//private MenuItem mItemExit;
private MenuItem mItemSettings ;
private MenuItem showCounterValues ;
private TextView faceCounterTv ;
private Button resetButton ;
private Button savebtn ;
private Button quitButton ;
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mJavaDetector;
private DetectionBasedTracker mNativeDetector;
private int mDetectorType = JAVA_DETECTOR;
// private String[] mDetectorName;
private float mRelativeFaceSize = 0.2f;
private int mAbsoluteFaceSize = 0;
private float scaleFactor ;
private int minNeighbour ;
private int delayTime ;
private boolean isFaces_detect ;
private boolean isFaces_detect_pre ;
private boolean count_Face_Logic ;
private float countFace = 0.0f ;
private long startTime ;
private AudioManager mAudioManager ;
private static final int MY_PERMISSIONS_REQUEST_ACCOUNTS = 1;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("detectionBasedTracker");
try {
// load cascade file from application resources
InputStream is = getResources().openRawResource(R.raw.haarcascade_frontalface_default);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "haarcascade_frontalface_default.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mJavaDetector.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
mOpenCvCameraView.enableView();
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
public FdActivity() {
isFaces_detect = false ;
isFaces_detect_pre = false ;
count_Face_Logic = true ;
startTime = System.currentTimeMillis();
mAbsoluteFaceSize = 200 ;
scaleFactor = 1.2f ;
minNeighbour = 1 ;
delayTime = 1 ;
Log.i(TAG, "Instantiated new " + this.getClass());
}
/**
* Called when the activity is first created.
*/
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
//getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.face_detect_surface_view);
setSupportActionBar((Toolbar) findViewById(R.id.toolbar));
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view);
faceCounterTv = (TextView) findViewById(R.id.faceCountertv);
resetButton = (Button) findViewById(R.id.resetbtn);
savebtn = (Button) findViewById(R.id.savebtn);
quitButton = (Button) findViewById(R.id.quitbtn);
mAudioManager = (AudioManager) getSystemService(AUDIO_SERVICE);
resetButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
countFace = 0 ;
Toast.makeText(getApplicationContext() , "Reset the Face Counter" , Toast.LENGTH_LONG).show();
}
});
savebtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
saveFaceCounter();
Toast.makeText(getApplicationContext() , "Counter Value Saved" , Toast.LENGTH_SHORT).show();
}
});
quitButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
System.exit(0);
}
});
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
// mOpenCvCameraView.setAlpha(0);
mOpenCvCameraView.setCameraIndex(1);
mOpenCvCameraView.setCvCameraViewListener(this);
//if (checkAndRequestPermissions()){
// Toast.makeText(getApplicationContext() , "OnCreate" , Toast.LENGTH_LONG).show();
//setSharedPreferences();
//}
// check current state first
// boolean state = isAirplaneMode();
// // toggle the state
// if (state)
// toggleAirplaneMode(0, state);
// else
// toggleAirplaneMode(1, state);
}
#Override
public void onPause() {
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume() {
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
PreferenceManager.getDefaultSharedPreferences(this).unregisterOnSharedPreferenceChangeListener(this);
}
public void onCameraViewStarted(int width, int height) {
mGray = new Mat();
mRgba = new Mat();
}
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
// Mat mRgbaT = mRgba.t();
// Core.flip(mRgba.t(), mRgbaT, 1);
// Imgproc.resize(mRgbaT, mRgbaT, mRgba.size());
mGray = inputFrame.gray();
//Core.transpose(mGray, mGray);
//Core.flip(mGray, mGray, 0);
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, scaleFactor, minNeighbour, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
} else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
} else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++) {
Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
}
countDetectedFace(faces);
runOnUiThread(new Runnable() {
#Override
public void run() {
faceCounterTv.setText(String.valueOf(countFace));
}
});
return mRgba;
}
public void countDetectedFace(MatOfRect faces){
// do{
// This block is to make sure the it only count face when it appears. e.g. : no detected face --> face --> no detected face (count as 1)
if (faces.empty()){
isFaces_detect = isFaces_detect_pre = false ;
}
else{
isFaces_detect = true ;
}
// Only count when previous frame = 0 and current frame = 1. Eliminate counting when successive frame have face detected
if ((isFaces_detect_pre == false) && (isFaces_detect == true) && (count_Face_Logic == true)){
countFace += 0.25 ; // four times it detect face equal to 1
startTime = System.currentTimeMillis(); // store new time value so that it do not count every miliseconds
isFaces_detect_pre = true ;
Log.d(TAG , String.valueOf(countFace));
}
if ((System.currentTimeMillis() - startTime) < delayTime){ // to make sure it doesnt count every frame, buffer of 1 seconds
count_Face_Logic = false ;
}
else{
count_Face_Logic = true ;
}
// }while(!isAppExit);
}
}
How can I get rid of this exception?

Getting int to UI thread

I am developing an Android application that brute-forces an MD5 sum created from an int.
The brute forcing part works fine. (I can sysout the final value and it's correct.)
I'm having problems getting the output value onto an alert dialog. Logcat says: Attempting to initialize hardware acceleration outside of the main thread, aborting
It's aborting on the last statement in my code, the one that actually shows the alert dialog;
builder.show();
Here's my MainActivity.java:
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.Context;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
import android.widget.RadioButton;
import android.widget.Toast;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
public class MainActivity extends Activity {
String passwordToHash;
String result;
boolean goodPIN = false;
boolean startbruteforce = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
//My stuff
public void doIt(View v) throws NoSuchAlgorithmException, UnsupportedEncodingException
{
RadioButton r2 = (RadioButton) findViewById(R.id.calculate);
RadioButton r1 = (RadioButton) findViewById(R.id.crack);
final EditText input = (EditText) findViewById(R.id.inputTextArea);
final EditText output = (EditText) findViewById(R.id.outputTextArea);
//Toast.makeText(this, "Working on it!", Toast.LENGTH_LONG).show();
if(r2.isChecked())
{
if(input.getText().toString().length() > 4)
{
goodPIN = false;
output.setText("");
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle ("Uuuuuuhh....");
builder.setMessage("Hash not calculated because that PIN would take too long to brute force :(");
builder.setPositiveButton("Yeah, whatever...", null);
builder.show();
}
else
{
goodPIN = true;
}
if(goodPIN)
{
View view = this.getCurrentFocus();
if (view != null) {
InputMethodManager imm = (InputMethodManager)getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
}
Toast.makeText(this, "Calculated MD5!", Toast.LENGTH_LONG).show();
passwordToHash = input.getText().toString();
MessageDigest digest = MessageDigest.getInstance("MD5");
byte[] inputBytes = passwordToHash.getBytes("UTF-8");
byte[] hashBytes = digest.digest(inputBytes);
StringBuffer stringBuffer = new StringBuffer();
for (int i = 0; i < hashBytes.length; i++)
{
stringBuffer.append(Integer.toString((hashBytes[i] & 0xff) + 0x100, 16)
.substring(1));
}
result = stringBuffer.toString();
output.setText(result);
}
}
else if(r1.isChecked())
{
View view = this.getCurrentFocus();
if (view != null) {
InputMethodManager imm = (InputMethodManager)getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
}
final ProgressDialog dialog = ProgressDialog.show(MainActivity.this, "Working on it!", "Brute-forcing. Please wait...", true);
double starttime = System.currentTimeMillis();
final Thread thread = new Thread()
{
#Override
public void run()
{
String crackedPassword = "Hello";
String crackedPasswordHash = "a262";
int pinsTested = 1000;
int crackedPasswordInt = 1000;
String passwordToCrack;
//Get the password to crack
passwordToCrack = input.getText().toString();
long startTime = System.currentTimeMillis();
while (!crackedPasswordHash.equals(passwordToCrack))
{
pinsTested++;
crackedPasswordInt++;
crackedPassword = Integer.toString(crackedPasswordInt);
MessageDigest digest = null;
try
{
digest = MessageDigest.getInstance("MD5");
}
catch (NoSuchAlgorithmException e)
{
e.printStackTrace();
}
byte[] inputBytes = new byte[0];
try
{
inputBytes = crackedPassword.getBytes("UTF-8");
}
catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
byte[] hashBytes = digest.digest(inputBytes);
StringBuffer stringBuffer = new StringBuffer();
for (int i = 0; i < hashBytes.length; i++)
{
stringBuffer.append(Integer.toString((hashBytes[i] & 0xff) + 0x100, 16)
.substring(1));
}
crackedPasswordHash = stringBuffer.toString();
//System.out.println(pinsTested + " PINs tested");
//System.out.println("Hash of: " + pinsTested + " is: " + crackedPasswordHash);
}
long endTime = System.currentTimeMillis();
long totalTime = endTime - startTime;
System.out.println("Done! " + pinsTested);
updateUI(pinsTested);
//runOnUiThread(pinsTested);
}
};
Thread animation = new Thread()
{
#Override
public void run()
{
try
{
Thread.sleep(4000);
}
catch (InterruptedException e) {
e.printStackTrace();
}
dialog.dismiss();
thread.start();
}
};
animation.start();
}
}
public void updateUI(final int pass) {
Looper.prepare();
final Handler myHandler = new Handler();
(new Thread(new Runnable() {
#Override
public void run() {
myHandler.post(new Runnable() {
#Override
public void run() {
test(pass);
}
});
}
})).start();
}
public void test(int pass)
{
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle ("Done!");
builder.setMessage("PIN is: " + pass);
builder.setPositiveButton("Yeah, whatever...", null);
builder.show();
}
}
As the UI Thread says:to move data from a background thread to the UI thread, use a Handler that's running on the UI thread.
You create Handler in method updateUI ,but the updateUI is created in thread other than UI Thread,so you get the error.
You need to try like this:
public class MainActivity extends Activity {
private Handler mHandler = new Handler() {
#Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
switch (msg.what) {
case 1:
test((int)msg.obj);
}
}
}
public void updateUI(final int pass) {
Message msg = Message.obtain();
msg.what=1;
msg.obj = pass;
mHandler.sendMessage(msg);
}
}
According to the documentation,
A Handler allows you to send and process Message and Runnable objects associated with a thread's MessageQueue. Each Handler instance is associated with a single thread and that thread's message queue. When you create a new Handler, it is bound to the thread / message queue of the thread that is creating it -- from that point on, it will deliver messages and runnables to that message queue and execute them as they come out of the message queue.
So you are creating the handler in the "updateUI" method and that method is called from a thread other than the UI thread, in this case you need to declare your Handler as a member variable and initialize the Handler in the onCreate method.
Handler
you can do everything u want in inside run method:
but its not very safe!
runOnUiThread(new Runnable() {
#Override
public void run() {
}
});

Problems with sound recording from microphone - (android)

I have a small piece of code that reads some sounds from the microphone and writes a byte array. Unfortunately it is not working correctly. The project runs on the emulator, but something is wrong with writing to a byte array.
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.TextView;
public class MainActivity extends Activity implements OnClickListener {
Button start_button;
Button stop_button;
TextView text;
boolean reading = false;
final String TAG = "TAG";
int myBufferSize = 8192;
AudioRecord audioRecord;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
start_button = (Button) findViewById(R.id.button1);
stop_button = (Button) findViewById(R.id.button2);
text = (TextView) findViewById(R.id.textView1);
start_button.setOnClickListener(this);
stop_button.setOnClickListener(this);
createAudioRecorder();
}
private void createAudioRecorder() {
int sampleRate = 16000;
int channelConfig = AudioFormat.CHANNEL_IN_MONO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int minInternalBufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
int internalBufferSize = minInternalBufferSize * 4;
Log.d(TAG, "minInternalBufferSize = " + minInternalBufferSize
+ ", internalBufferSize = " + internalBufferSize
+ ", myBufferSize = " + myBufferSize);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, internalBufferSize);
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.button1:
Log.d(TAG, "record start");
reading = true;
startRecorder();
case R.id.button2:
Log.d(TAG, "stop recorder");
reading = false;
stopRecorder();
}
}
private void startRecorder() {
reading = true;
Log.d(TAG, "recording...");
audioRecord.startRecording();
new Thread(new Runnable() {
#Override
public void run() {
if (audioRecord == null)
return;
byte[] myBuffer = new byte[myBufferSize];
byte[] data = new byte[myBufferSize];
int readCount = 0;
int totalCount = 0;
while (reading) {
readCount = audioRecord.read(myBuffer, 0, myBufferSize);
data = myBuffer;
totalCount += readCount;
Log.d(TAG, "readCount = " + readCount + ", totalCount = "
+ totalCount);
Log.d(TAG, "lenght: " + data.length);
Log.d(TAG, "data1: " + data[0]);
Log.d(TAG, "data2: " + data[1]);
}
}
}).start();
}
private void stopRecorder() {
reading = false;
Log.d(TAG, "record stop!");
audioRecord.stop();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
#Override
protected void onDestroy() {
super.onDestroy();
reading = false;
if (audioRecord != null) {
audioRecord.release();
}
}
}
Logs:
03-23 15:48:51.028: D/TAG(782): minInternalBufferSize = 640, internalBufferSize = 2560, myBufferSize = 8192
03-23 15:48:52.928: D/TAG(782): record start
03-23 15:48:52.928: D/TAG(782): recording...
03-23 15:48:52.948: D/TAG(782): stop recorder
03-23 15:48:52.958: D/TAG(782): record stop!
03-23 15:48:53.018: D/TAG(782): readCount = 160, totalCount = 160
03-23 15:48:53.018: D/TAG(782): lenght: 8192
03-23 15:48:53.018: D/TAG(782): data1: 0
03-23 15:48:53.028: D/TAG(782): data2: 0
03-23 15:48:54.758: D/TAG(782): stop recorder
03-23 15:48:54.758: D/TAG(782): record stop!
Notice? an array of bytes written to stop recording.
Why the 'stop recorder' for words twice? though I click on the stop button once..
Why not using FileOutputStream ?
To start the record :
FileOutputStream fos = openFileOutput(__FILENAME__,Context.MODE_WORLD_READABLE|Context.MODE_WORLD_WRITEABLE);
recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.DEFAULT);
recorder.setOutputFile(fos.getFD());
recorder.prepare();
recorder.start();
And don't forget to stop and release the MediaRecorder.

Android AudioRecord.stop() leads to crash

i am trying to Record Audio on Android with AudioRecord (http://developer.android.com/reference/android/media/AudioRecord.html#stop%28%29). I geared to Android AudioRecord example
My code is the following:
import java.io.IOException;
import android.os.Bundle;
import android.app.Activity;
import android.view.Menu;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.os.Environment;
import android.view.ViewGroup;
import android.widget.Button;
import android.view.View;
import android.view.View.OnClickListener;
import android.content.Context;
import android.util.Log;
import android.media.MediaRecorder;
import android.media.MediaPlayer;
import de.benediktbock.fft.fft;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder.AudioSource;
import java.io.IOException;
public class MainActivity extends Activity {
private static final String LOG_TAG = "FFTTEST";
private PlayButton mPlayButton = null;
private TextView realTeil = null;
private TextView imgTeil = null;
private fft mFFT = null;
private int channel_config = AudioFormat.CHANNEL_IN_MONO;
private int format = AudioFormat.ENCODING_PCM_16BIT;
private int sampleRate = 8000;
private int bufferSize = AudioRecord.getMinBufferSize(sampleRate, channel_config, format);
private AudioRecord audioInput = null; //new AudioRecord(AudioSource.MIC, sampleSize, channel_config, format, bufferSize);
private short[] audioBuffer = new short[bufferSize];
private Thread readingThread = null;
private boolean isRecording = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
LinearLayout ll = new LinearLayout(this);
mPlayButton = new PlayButton(this);
ll.addView(mPlayButton,new LinearLayout.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT,
0));
realTeil = new TextView(this);
ll.addView(realTeil,new LinearLayout.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT,
0));
imgTeil = new TextView(this);
ll.addView(imgTeil,new LinearLayout.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT,
0));
setContentView(ll);
realTeil.setText("Realteil");
imgTeil.setText("Imaginärteil");
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
class PlayButton extends Button {
boolean mStartPlaying = true;
OnClickListener clicker = new OnClickListener() {
public void onClick(View v) {
onPlay(mStartPlaying);
if (mStartPlaying) {
setText("Stop");
} else {
setText("Start");
}
mStartPlaying = !mStartPlaying;
}
};
public PlayButton(Context ctx) {
super(ctx);
setText("Start");
setOnClickListener(clicker);
}
}
private void onPlay(boolean start) {
if (start) {
startRecording();
} else {
stopRecording();
}
}
private void startRecording()
{
//create and start recorder
audioInput = new AudioRecord(AudioSource.MIC, sampleRate, channel_config, format, bufferSize);
audioInput.startRecording();
isRecording = true;
//start reading thread
readingThread = new Thread(new Runnable()
{
#Override
public void run()
{
readAudioToBuffer();
}
},"readAudio Thread");
readingThread.start();
}
private void readAudioToBuffer()
{
while(isRecording)
{
audioInput.read(audioBuffer, 0,bufferSize);
}
audioInput.stop(); // on this point the app crashes (after clicking "stop")
audioInput.release();
audioInput = null;
}
private void stopRecording()
{
isRecording = false;
readingThread = null;
}
}
I can start the recording without any problems. But when i stop the record the app crashes. I've found out that it crashes at the line audioInput.stop().
Does sombody know where the problem is? I have absolutely no idea.
private void stopRecording()
{
isRecording = false;
// readingThread = null;
}
after setting isRecording=false,you are assigning readingThread=null but the thread immidiately can not finish his task. so it leads your app to creashes.

Categories