Problems with sound recording from microphone - (android) - java

I have a small piece of code that reads some sounds from the microphone and writes a byte array. Unfortunately it is not working correctly. The project runs on the emulator, but something is wrong with writing to a byte array.
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.TextView;
public class MainActivity extends Activity implements OnClickListener {
Button start_button;
Button stop_button;
TextView text;
boolean reading = false;
final String TAG = "TAG";
int myBufferSize = 8192;
AudioRecord audioRecord;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
start_button = (Button) findViewById(R.id.button1);
stop_button = (Button) findViewById(R.id.button2);
text = (TextView) findViewById(R.id.textView1);
start_button.setOnClickListener(this);
stop_button.setOnClickListener(this);
createAudioRecorder();
}
private void createAudioRecorder() {
int sampleRate = 16000;
int channelConfig = AudioFormat.CHANNEL_IN_MONO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int minInternalBufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
int internalBufferSize = minInternalBufferSize * 4;
Log.d(TAG, "minInternalBufferSize = " + minInternalBufferSize
+ ", internalBufferSize = " + internalBufferSize
+ ", myBufferSize = " + myBufferSize);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, internalBufferSize);
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.button1:
Log.d(TAG, "record start");
reading = true;
startRecorder();
case R.id.button2:
Log.d(TAG, "stop recorder");
reading = false;
stopRecorder();
}
}
private void startRecorder() {
reading = true;
Log.d(TAG, "recording...");
audioRecord.startRecording();
new Thread(new Runnable() {
#Override
public void run() {
if (audioRecord == null)
return;
byte[] myBuffer = new byte[myBufferSize];
byte[] data = new byte[myBufferSize];
int readCount = 0;
int totalCount = 0;
while (reading) {
readCount = audioRecord.read(myBuffer, 0, myBufferSize);
data = myBuffer;
totalCount += readCount;
Log.d(TAG, "readCount = " + readCount + ", totalCount = "
+ totalCount);
Log.d(TAG, "lenght: " + data.length);
Log.d(TAG, "data1: " + data[0]);
Log.d(TAG, "data2: " + data[1]);
}
}
}).start();
}
private void stopRecorder() {
reading = false;
Log.d(TAG, "record stop!");
audioRecord.stop();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
#Override
protected void onDestroy() {
super.onDestroy();
reading = false;
if (audioRecord != null) {
audioRecord.release();
}
}
}
Logs:
03-23 15:48:51.028: D/TAG(782): minInternalBufferSize = 640, internalBufferSize = 2560, myBufferSize = 8192
03-23 15:48:52.928: D/TAG(782): record start
03-23 15:48:52.928: D/TAG(782): recording...
03-23 15:48:52.948: D/TAG(782): stop recorder
03-23 15:48:52.958: D/TAG(782): record stop!
03-23 15:48:53.018: D/TAG(782): readCount = 160, totalCount = 160
03-23 15:48:53.018: D/TAG(782): lenght: 8192
03-23 15:48:53.018: D/TAG(782): data1: 0
03-23 15:48:53.028: D/TAG(782): data2: 0
03-23 15:48:54.758: D/TAG(782): stop recorder
03-23 15:48:54.758: D/TAG(782): record stop!
Notice? an array of bytes written to stop recording.
Why the 'stop recorder' for words twice? though I click on the stop button once..

Why not using FileOutputStream ?
To start the record :
FileOutputStream fos = openFileOutput(__FILENAME__,Context.MODE_WORLD_READABLE|Context.MODE_WORLD_WRITEABLE);
recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.DEFAULT);
recorder.setOutputFile(fos.getFD());
recorder.prepare();
recorder.start();
And don't forget to stop and release the MediaRecorder.

Related

How a make I from pcm to mp3 convert audio for recording device storage in Android Studio with Java?

I have a problem. I take sound from the microphone of the device with Android Studio and record it as pcm. Since the audio recording is in pcm format, I cannot play it on the device. I'm trying to convert this to mp3 for playback. I tried different libraries but couldn't. Can you help me?
My recording page code:
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
public class MainActivity extends AppCompatActivity {
private static final int AUDIO_SOURCE = MediaRecorder.AudioSource.MIC;
private static final int SAMPLE_RATE = 44100;
private static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_MONO;
private static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
private static final int BUFFER_SIZE = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT);
private static final int REQUEST_RECORD_AUDIO_PERMISSION = 200;
private AudioRecord audioRecord;
private boolean isRecording = false;
private Thread recordingThread;
private String filePath;
private static final int FILTER_LOW_FREQ = 100;
private static final int FILTER_HIGH_FREQ = 20000;
private int bufferSize;
private int sampleRate;
private int channelConfig;
private int audioFormat;
private static final int MAX_VOLUME = 32767;
final int MAX_FREQ = SAMPLE_RATE / 2;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void but_recPressed(View V) {
startRecording();
Toast.makeText(getApplicationContext(), "Kayıt Başlatıldı", Toast.LENGTH_SHORT).show();
}
public void but_stopPressed(View V) {
stopRecording();
Toast.makeText(this, "Kayıt Durduruldu", Toast.LENGTH_SHORT).show();
File file = new File(filePath);
if (file.exists()) {
Toast.makeText(this, "Kaydedilen Dosya Yolu: " + filePath, Toast.LENGTH_LONG).show();
} else {
Toast.makeText(this, "Kaydedilen Dosya Bulunamadı!", Toast.LENGTH_LONG).show();
}
}
private void startRecording() {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO}, REQUEST_RECORD_AUDIO_PERMISSION);
} else {
audioRecord = new AudioRecord(AUDIO_SOURCE, SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT, BUFFER_SIZE);
audioRecord.startRecording();
isRecording = true;
new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
short[] buffer = new short[bufferSize];
while (isRecording) {
Toast.makeText(getApplicationContext(), "döngüdeyim", Toast.LENGTH_LONG).show();
int read = audioRecord.read(buffer, 0, bufferSize);
for (int i = 0; i < read; i++) {
Toast.makeText(getApplicationContext(), "Fordayım", Toast.LENGTH_LONG).show();
int frequency = buffer[i];
if (frequency >= FILTER_LOW_FREQ && frequency <= FILTER_HIGH_FREQ) {
int volume = (int) (frequency / MAX_FREQ * MAX_VOLUME);
buffer[i] = (short) (buffer[i] * volume);
// Do something with the filtered frequency
Toast.makeText(getApplicationContext(), "Frekans Yakalandı", Toast.LENGTH_LONG).show();
}
else{
int volume = (int) (frequency / MAX_FREQ * MAX_VOLUME);
buffer[i] = (short) (buffer[i] * volume);
Toast.makeText(getApplicationContext(), "Frekans Yakalanmadı", Toast.LENGTH_LONG).show();
}
}
}
}
}).start();
}
}
private void stopRecording() {
try {
isRecording = false;
audioRecord.stop();
audioRecord.release();
audioRecord = null;
recordingThread = null;
} catch (Exception e) {
e.printStackTrace();
Log.e("MainActivity", "Error while recording audio: " + e.getMessage());
}
}
public void but_folderPressed(View v) {
Intent intent = new Intent(this, list.class);
startActivity(intent);
}
private void writeAudioDataToFile() {
byte data[] = new byte[BUFFER_SIZE];
filePath = getExternalCacheDir().getAbsolutePath();
filePath += "/" + System.currentTimeMillis() + ".sesimvar" +".pcm";
try (FileOutputStream os = new FileOutputStream(filePath)) {
while (isRecording) {
int read = audioRecord.read(data, 0, BUFFER_SIZE);
if (AudioRecord.ERROR_INVALID_OPERATION != read) {
os.write(data);
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
I tried FFmpeg and the LAME library. I couldn't add the LAME library properly. FFmpeg, on the other hand, gave error lines.
did How to convert .pcm file to .wav or .mp3? not help you out?
For the future, it would help if you post the stack-trace with the thrown exception or error.

How to change screen orientation from landscape to portrait?

I have applied the following post answers but unable to solve the problem.
OpenCV camera orientation issue
I am getting the following exception.
Exception locking surface
java.lang.IllegalArgumentException
at android.view.Surface.nativeLockCanvas(Native Method)
at android.view.Surface.lockCanvas(Surface.java:264)
at android.view.SurfaceView$4.internalLockCanvas(SurfaceView.java:825)
at android.view.SurfaceView$4.lockCanvas(SurfaceView.java:793)
at org.opencv.android.CameraBridgeViewBase.deliverAndDrawFrame(CameraBridgeViewBase.java:403)
at org.opencv.android.JavaCameraView$CameraWorker.run(JavaCameraView.java:365)
at java.lang.Thread.run(Thread.java:818)
Here is opencv library function which throw the exception:
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
Canvas canvas = null ;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
canvas = getHolder().lockCanvas();
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
getHolder().unlockCanvasAndPost(canvas);
}
}
}
Here is my main activity where i use opencv library to call this method:
package org.opencv.samples.facedetect;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Camera;
import android.media.AudioManager;
import android.net.ConnectivityManager;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.provider.Settings;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import org.opencv.samples.facedetect.DetectionBasedTracker;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class FdActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 , SharedPreferences.OnSharedPreferenceChangeListener {
private static final String TAG = "OCVSample::Activity";
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
//private MenuItem mItemExit;
private MenuItem mItemSettings ;
private MenuItem showCounterValues ;
private TextView faceCounterTv ;
private Button resetButton ;
private Button savebtn ;
private Button quitButton ;
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mJavaDetector;
private DetectionBasedTracker mNativeDetector;
private int mDetectorType = JAVA_DETECTOR;
// private String[] mDetectorName;
private float mRelativeFaceSize = 0.2f;
private int mAbsoluteFaceSize = 0;
private float scaleFactor ;
private int minNeighbour ;
private int delayTime ;
private boolean isFaces_detect ;
private boolean isFaces_detect_pre ;
private boolean count_Face_Logic ;
private float countFace = 0.0f ;
private long startTime ;
private AudioManager mAudioManager ;
private static final int MY_PERMISSIONS_REQUEST_ACCOUNTS = 1;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("detectionBasedTracker");
try {
// load cascade file from application resources
InputStream is = getResources().openRawResource(R.raw.haarcascade_frontalface_default);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "haarcascade_frontalface_default.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mJavaDetector.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
mOpenCvCameraView.enableView();
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
public FdActivity() {
isFaces_detect = false ;
isFaces_detect_pre = false ;
count_Face_Logic = true ;
startTime = System.currentTimeMillis();
mAbsoluteFaceSize = 200 ;
scaleFactor = 1.2f ;
minNeighbour = 1 ;
delayTime = 1 ;
Log.i(TAG, "Instantiated new " + this.getClass());
}
/**
* Called when the activity is first created.
*/
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
//getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.face_detect_surface_view);
setSupportActionBar((Toolbar) findViewById(R.id.toolbar));
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view);
faceCounterTv = (TextView) findViewById(R.id.faceCountertv);
resetButton = (Button) findViewById(R.id.resetbtn);
savebtn = (Button) findViewById(R.id.savebtn);
quitButton = (Button) findViewById(R.id.quitbtn);
mAudioManager = (AudioManager) getSystemService(AUDIO_SERVICE);
resetButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
countFace = 0 ;
Toast.makeText(getApplicationContext() , "Reset the Face Counter" , Toast.LENGTH_LONG).show();
}
});
savebtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
saveFaceCounter();
Toast.makeText(getApplicationContext() , "Counter Value Saved" , Toast.LENGTH_SHORT).show();
}
});
quitButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
System.exit(0);
}
});
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
// mOpenCvCameraView.setAlpha(0);
mOpenCvCameraView.setCameraIndex(1);
mOpenCvCameraView.setCvCameraViewListener(this);
//if (checkAndRequestPermissions()){
// Toast.makeText(getApplicationContext() , "OnCreate" , Toast.LENGTH_LONG).show();
//setSharedPreferences();
//}
// check current state first
// boolean state = isAirplaneMode();
// // toggle the state
// if (state)
// toggleAirplaneMode(0, state);
// else
// toggleAirplaneMode(1, state);
}
#Override
public void onPause() {
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume() {
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
PreferenceManager.getDefaultSharedPreferences(this).unregisterOnSharedPreferenceChangeListener(this);
}
public void onCameraViewStarted(int width, int height) {
mGray = new Mat();
mRgba = new Mat();
}
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
// Mat mRgbaT = mRgba.t();
// Core.flip(mRgba.t(), mRgbaT, 1);
// Imgproc.resize(mRgbaT, mRgbaT, mRgba.size());
mGray = inputFrame.gray();
//Core.transpose(mGray, mGray);
//Core.flip(mGray, mGray, 0);
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, scaleFactor, minNeighbour, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
} else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
} else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++) {
Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
}
countDetectedFace(faces);
runOnUiThread(new Runnable() {
#Override
public void run() {
faceCounterTv.setText(String.valueOf(countFace));
}
});
return mRgba;
}
public void countDetectedFace(MatOfRect faces){
// do{
// This block is to make sure the it only count face when it appears. e.g. : no detected face --> face --> no detected face (count as 1)
if (faces.empty()){
isFaces_detect = isFaces_detect_pre = false ;
}
else{
isFaces_detect = true ;
}
// Only count when previous frame = 0 and current frame = 1. Eliminate counting when successive frame have face detected
if ((isFaces_detect_pre == false) && (isFaces_detect == true) && (count_Face_Logic == true)){
countFace += 0.25 ; // four times it detect face equal to 1
startTime = System.currentTimeMillis(); // store new time value so that it do not count every miliseconds
isFaces_detect_pre = true ;
Log.d(TAG , String.valueOf(countFace));
}
if ((System.currentTimeMillis() - startTime) < delayTime){ // to make sure it doesnt count every frame, buffer of 1 seconds
count_Face_Logic = false ;
}
else{
count_Face_Logic = true ;
}
// }while(!isAppExit);
}
}
How can I get rid of this exception?

Android:- convert a recorded audio file into a float array

I am making a simple android application where I record something from a smartphone mic, save it in a file and then play that file.
Now I want to apply a high pass filter to that audio file, but to do so, I need to first convert the audio file into a float array. Can someone please help me out with it.
Thanks
package abc.com.please;
import android.content.pm.PackageManager;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
public class MainActivity extends AppCompatActivity {
private static MediaRecorder mediaRecorder = new MediaRecorder();
private static MediaPlayer mediaPlayer;
private static String audioFilePath;
private static Button stopButton;
private static Button playButton;
private static Button recordButton;
private boolean isRecording = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
recordButton = (Button) findViewById(R.id.recordButton);
playButton = (Button) findViewById(R.id.playButton);
stopButton = (Button) findViewById(R.id.stopButton);
if (!hasMicrophone())
{
stopButton.setEnabled(false);
playButton.setEnabled(false);
recordButton.setEnabled(false);
} else {
playButton.setEnabled(false);
stopButton.setEnabled(false);
}
audioFilePath =
Environment.getExternalStorageDirectory().getAbsolutePath()
+ "/myaudio.3gp";
recordButton.setOnClickListener(new View.OnClickListener(){
public void onClick (View v)
{
isRecording = true;
stopButton.setEnabled(true);
playButton.setEnabled(false);
recordButton.setEnabled(false);
try {
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mediaRecorder.setOutputFile(audioFilePath);
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mediaRecorder.prepare();
mediaRecorder.start();
}catch (Exception e) {
e.printStackTrace();
}
}
});
stopButton.setOnClickListener(new View.OnClickListener() {
public void onClick (View view)
{
stopButton.setEnabled(false);
playButton.setEnabled(true);
if (isRecording)
{
recordButton.setEnabled(false);
isRecording = false;
mediaRecorder.stop();
mediaRecorder.release();
recordButton.setEnabled(true);
}
else
{
Toast.makeText(getApplicationContext(),"No recording going on",Toast.LENGTH_SHORT).show();
}
}});
playButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View view)
{
playButton.setEnabled(false);
recordButton.setEnabled(false);
stopButton.setEnabled(true);
try {
mediaPlayer = new MediaPlayer();
mediaPlayer.setDataSource(audioFilePath);
mediaPlayer.prepare();
mediaPlayer.start();
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
protected boolean hasMicrophone() {
PackageManager pmanager = this.getPackageManager();
return pmanager.hasSystemFeature(
PackageManager.FEATURE_MICROPHONE);
}
}
The details of the bytes-to-float conversion are going to depend on the file format that you use for your audio file. I cannot tell what that format is from your code. If your file is WAV, 44100 fps, 16-bit, little-endian, stereo (this is a standard Java, "CD quality" format) you can try making use of the following code I wrote. The key conversion point is when two bytes are concatenated and converted to a single numeral (where "buffer" contains data being read in from the audio file:
float audioVal = ( buffer[bufferIdx++] & 0xff )
| ( buffer[bufferIdx++] << 8 );
If it is big endian, reverse the order of the shifts. If 24 or 32 bit, then you would OR in shifts of << 16 and << 24, respectively. With 16-bit, the result will make use of the range of a short, so division by 32767 is needed to normalize the result to [-1..1].
I've been using the following in the Java context for a while without problems, but I don't know if Android supports javax.sound.sampled.AudioInputStream, etc. Maybe it is still useful to see the conversion in context of a file read? The code assumes that we have the "CD Quality" audio format and that the audio file is not longer than Integer.MAX number of frames.
public float[] loadSoundFileURL(URL url) throws UnsupportedAudioFileException,
IOException
{
AudioInputStream ais = AudioSystem.getAudioInputStream(url);
int framesCount = (int)ais.getFrameLength();
// assuming stereo format, so two entries per frame
float[] temp = new float[framesCount * 2];
long tempCountdown = temp.length;
int bytesRead = 0;
int bufferIdx;
int clipIdx = 0;
byte[] buffer = new byte[1024];
while((bytesRead = ais.read(buffer, 0, 1024)) != -1)
{
bufferIdx = 0;
for (int i = 0, n = (bytesRead >> 1); i < n; i ++)
{
if ( tempCountdown-- >= 0)
{
temp[clipIdx++] =
( buffer[bufferIdx++] & 0xff )
| ( buffer[bufferIdx++] << 8 ) ;
}
}
}
// QUESTION: better to do following in above loop?
for (int i = 0; i < temp.length; i++)
{
temp[i] = temp[i] / 32767f;
}
return temp;
}

two actions for one button android (android)

There is a problem with the code for android applications
I tried to assign multiple actions to a single button.Here's the code:
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
public class MainActivity extends Activity implements OnClickListener {
AudioRecord audioRecord;
private Thread recordingThread = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
start_button = (Button) findViewById(R.id.button1);
stop_button = (Button) findViewById(R.id.button2);
start_button.setOnClickListener(this);
stop_button.setOnClickListener(this);
createAudioRecorder();
}
private void createAudioRecorder() {
int sampleRate = 16000;
int channelConfig = AudioFormat.CHANNEL_IN_MONO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int minInternalBufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
int internalBufferSize = minInternalBufferSize * 4;
Log.d(TAG, "minInternalBufferSize = " + minInternalBufferSize
+ ", internalBufferSize = " + internalBufferSize
+ ", myBufferSize = " + myBufferSize);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, internalBufferSize);
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.button1:
reading = true;
startRecorder();
break;
case R.id.button2:
reading = false;
stopRecorder();
break;
}
}
private void startRecorder() {
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
AudioData();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
protected void AudioData() {
audioRecord.startRecording();
byte data[] = new byte[myBufferSize];
byte[] myBuffer = new byte[myBufferSize];
int readCount = 0;
int totalCount = 0;
while (reading) {
readCount = audioRecord.read(myBuffer, 0, myBufferSize);
data = myBuffer;
totalCount += readCount;
}
}
private void stopRecorder() {
if (null != audioRecord) {
reading = false;
audioRecord.stop();
audioRecord.release();
audioRecord = null;
recordingThread = null;
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
When I run this code the first time it works, but not following..
Logs show that the error here somewhere (NullPointerException):
audioRecord.startRecording();
Help solve the problem!
When button2 is clicked, stopRecorder() is called, which assigns null to audioRecord. Therefore, the next time button1 is clicked, the new thread that is spawned will throw a NullPointerException when it calls AudioData().
Perhaps you can eliminate createAudioRecorder() and move its logic to the beginning of AudioData().
Also, I would rename AudioData(), which looks like a class name, to recordAudioData(). That would conform better to standard Java naming conventions.

Tesseract unable to copy data to SD?

I think I'm doing something wrong with my paths but I can't figure out what. I am testing on a Nexus 7 if that's relevant. Following examples I tried to copy the tessdata folder over to the SD card however it keeps telling me unable to copy file not found. I don't quite understand why. (I have the tesseract project as a library and I have the tessdata folder copied into assets).
All help is appreciated thanks!
Code:
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import android.app.Activity;
import android.content.Intent;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;
import com.googlecode.tesseract.android.TessBaseAPI;
public class MainActivity extends Activity {
private static ImageView imageView;
// protected static Bitmap bit;
protected static Bitmap mImageBitmap;
// protected static String DATA_PATH;
public static final String STORAGE_PATH = Environment.getExternalStorageDirectory().toString() + "/rjb";
protected static String tesspath = STORAGE_PATH + "/tessdata";
protected static String savepath = null;
protected static String TAG = "OCR";
protected static String lang = "eng";
// main method
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// This is my image view for to show the image with
imageView = (ImageView) this.findViewById(R.id.imageView1);
// this is the take a photo button
Button photoButton = (Button) this.findViewById(R.id.button1);
//check if the rjb directory is there
//make it if its not
createmydir();
//if (!(new File(STORAGE_PATH + File.separator + "tessdata" + File.separator + lang + ".traineddata")).exists()) {
try {
AssetManager assetManager = this.getAssets();
//open the asset manager and open the traineddata path
InputStream in = assetManager.open("tessdata/eng.traineddata");
OutputStream out = new FileOutputStream(tesspath + "/eng.traineddata");
byte[] buf = new byte[8024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
in.close();
out.close();
} catch (IOException e) {
android.util.Log.e(TAG, "Was unable to copy " + lang
+ " traineddata " + e.toString());
android.util.Log.e(TAG, "IM PRINTING THE STACK TRACE");
e.printStackTrace();
}
//} else {
processImage(STORAGE_PATH + File.separator + "savedAndroid.jpg");
//}
photoButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// CALL THE PICTURE (this works)
dispatchTakePictureIntent(0);
}
});
}
private void createmydir() {
File t = new File(STORAGE_PATH);
if(t.exists()) {
Toast.makeText(getApplicationContext(), "IM TOASTIN CAUSE IT EXISTS", Toast.LENGTH_LONG).show();
}
else {
t.mkdirs();
Toast.makeText(getApplicationContext(), "IM TOASTIN CUZ I MADE IT EXIST", Toast.LENGTH_LONG).show();
}
}
private void handleSmallCameraPhoto(Intent intent) {
Bundle extras = intent.getExtras();
mImageBitmap = (Bitmap) extras.get("data");
imageView.setImageBitmap(mImageBitmap);
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4;
saveImageAndroid(mImageBitmap);
Bitmap bitmap = BitmapFactory.decodeFile(savepath, options);
ExifInterface exif;
try {
exif = new ExifInterface(savepath);
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,ExifInterface.ORIENTATION_NORMAL);
int rotate = 0;
switch (exifOrientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotate = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotate = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotate = 270;
break;
}
if (rotate != 0) {
int w = bitmap.getWidth();
int h = bitmap.getHeight();
// Setting pre rotate
Matrix mtx = new Matrix();
mtx.preRotate(rotate);
// Rotating Bitmap & convert to ARGB_8888, required by tess
bitmap = Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, false);
bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
}
// DATA_PATH = getDataPath();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
handleSmallCameraPhoto(data);
}
// write bitmap to storage
// saves it as savedandroid.jpg
// saves location in savepath
private void saveImageAndroid(final Bitmap passedBitmap) {
try {
savepath = STORAGE_PATH + File.separator + "savedAndroid.jpg";
FileOutputStream mFileOutStream = new FileOutputStream(savepath);
passedBitmap.compress(Bitmap.CompressFormat.JPEG, 100,mFileOutStream);
mFileOutStream.flush();
mFileOutStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
private void dispatchTakePictureIntent(int actionCode) {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(takePictureIntent, actionCode);
}
private void processImage(final String filePath) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 2;
options.inPurgeable = true;
Bitmap bitmap = BitmapFactory.decodeFile(filePath, options);
if (bitmap != null) {
/*
* was for rotating but no longer needed int width =
* bitmap.getWidth(); int height = bitmap.getHeight(); Matrix matrix
* = new Matrix(); matrix.postRotate(rotation); bitmap =
* Bitmap.createBitmap(bitmap, 0, 0, width, height, matrix, false);
* bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
*/
TessBaseAPI baseApi = new TessBaseAPI();
baseApi.setDebug(true);
baseApi.init(STORAGE_PATH, "eng");
baseApi.setPageSegMode(100);
baseApi.setPageSegMode(7);
baseApi.setImage(bitmap);
String recognizedText = baseApi.getUTF8Text();
android.util.Log.i(TAG, "recognizedText: 1 " + recognizedText);
baseApi.end();
if (lang.equalsIgnoreCase("eng")) {
recognizedText = recognizedText
.replaceAll("[^a-zA-Z0-9]+", " ");
}
android.util.Log.i(TAG,
"recognizedText: 2 " + recognizedText.trim());
}
}
}
First try to place the files on sd card and just try simple ocr on an simple image like ear, if that works then go for copying the language files through program, because of that you may come to know where the error is.

Categories