I'm trying to detect the silence in order to stop recording the audio from a mic. My current code is:
public byte[] getRecord() throws AudioException {
try {
// Reset the flag
stopped = false;
// Start a new thread to wait during listening
Thread stopper = new Thread(() -> {
try {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
br.readLine();
stopped = true;
} catch (IOException ex) {
ex.printStackTrace();
}
stop();
});
// Start the thread that can stop the record
stopper.start();
return record();
} catch (Exception e) {
throw new LineUnavailableException("Unable to record your voice", e);
}
}
private byte[] record() throws LineUnavailableException {
AudioFormat format = AudioUtil.getAudioFormat(audioConf);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
// Checks if system supports the data line
if (!AudioSystem.isLineSupported(info)) {
return null;
}
microphone = (TargetDataLine) AudioSystem.getLine(info);
microphone.open(format);
microphone.start();
System.out.println("Listening, tap enter to stop ...");
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int numBytesRead;
byte[] data = new byte[microphone.getBufferSize() / 5];
// Begin audio capture.
microphone.start();
// Here, stopped is a global boolean set by another thread.
while (!stopped) {
// Read the next chunk of data from the TargetDataLine.
numBytesRead = microphone.read(data, 0, data.length);
// Save this chunk of data.
byteArrayOutputStream.write(data, 0, numBytesRead);
}
return byteArrayOutputStream.toByteArray();
}
At the moment I stop the recording using a shell but I'd like to know how I can stop it in the while loop.
After a lot of tries now it seems to work. This is the updated code:
private byte[] record() throws LineUnavailableException {
AudioFormat format = AudioUtil.getAudioFormat(audioConf);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
// Checks if system supports the data line
if (!AudioSystem.isLineSupported(info)) {
return null;
}
microphone = (TargetDataLine) AudioSystem.getLine(info);
microphone.open(format);
microphone.start();
System.out.println("Listening, tap enter to stop ...");
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int numBytesRead;
byte[] data = new byte[microphone.getBufferSize() / 5];
short[] shorts = new short[data.length / 2];
long startSilence = 0;
boolean pause = false;
// Begin audio capture.
microphone.start();
// Here, stopped is a global boolean set by another thread.
while (!stopped) {
// Read the next chunk of data from the TargetDataLine.
numBytesRead = microphone.read(data, 0, data.length);
ByteBuffer.wrap(data).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shorts);
// Save this chunk of data.
byteArrayOutputStream.write(data, 0, numBytesRead);
double rms = 0;
for (int i = 0; i < shorts.length; i++) {
double normal = shorts[i] / 32768f;
rms += normal * normal;
}
rms = Math.sqrt(rms / shorts.length);
System.out.println("Listening, rms is " + rms);
if (rms < 0.1) {
long now = System.currentTimeMillis();
if (now - startSilence > 5000 && pause)
break;
if (!pause) {
startSilence = now;
System.out.println("Listening, new silence at " + startSilence);
}
pause = true;
} else
pause = false;
}
return byteArrayOutputStream.toByteArray();
}
How do I implement an IIR bandpass filter in my current android code? I have an android app which could record audio (frequency actually) and save it in a .wav file.
I have managed to find a IIR Filter Library online but I am not sure how to implement it into my code.
https://github.com/ddf/Minim/blob/master/src/ddf/minim/effects/BandPass.java
https://github.com/DASAR/Minim-Android/blob/master/src/ddf/minim/effects/IIRFilter.java
I am supposed to add the 18k-20k bandpass filter to the code before outputting the received sound signal into a .wav file.
My current code
package com.example.audio;
import ddf.minim.effects.*;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import com.varma.samples.audiorecorder.R;
import android.app.Activity;
import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.media.MediaScannerConnection;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.text.SpannableStringBuilder;
import android.text.style.RelativeSizeSpan;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
public class RecorderActivity extends Activity {
private static final int RECORDER_BPP = 16;
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav";
private static final String AUDIO_RECORDER_FOLDER = "AudioRecorder";
private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
private static final int RECORDER_SAMPLERATE = 44100;// 44100; //18000
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_STEREO; //AudioFormat.CHANNEL_IN_STEREO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private static final int PLAY_CHANNELS = AudioFormat.CHANNEL_OUT_STEREO; //AudioFormat.CHANNEL_OUT_STEREO;
private static final int FREQUENCY_LEFT = 2000; //Original:18000 (16 Dec)
private static final int FREQUENCY_RIGHT = 2000; //Original:18000 (16 Dec)
private static final int AMPLITUDE_LEFT = 1;
private static final int AMPLITUDE_RIGHT = 1;
private static final int DURATION_SECOND = 10;
private static final int SAMPLE_RATE = 44100;
private static final float SWEEP_RANGE = 1000.0f;
String store;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
private boolean isRecording = false;
double time;
float[] buffer1;
float[] buffer2;
byte[] byteBuffer1;
byte[] byteBuffer2;
byte[] byteBufferFinal;
int bufferIndex;
short x;
short y;
AudioTrack audioTrack;
Button btnPlay, btnStart, btnStop;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
enableButtons(false);
btnPlay = (Button) findViewById(R.id.btnPlay);
btnStop = (Button) findViewById(R.id.btnStop);
btnStart = (Button) findViewById(R.id.btnStart);
bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
buffer1 = new float[(int) (DURATION_SECOND * SAMPLE_RATE)];
buffer2 = new float[(int) (DURATION_SECOND * SAMPLE_RATE)];
float f1 = 0.0f, f2 = 0.0f;
for (int sample = 0, step = 0; sample < buffer1.length; sample++) {
time = sample / (SAMPLE_RATE * 1.0);
//f1 = (float)(FREQUENCY_LEFT + ((sample / (buffer1.length * 1.0)) * SWEEP_RANGE)); // frequency sweep
//f2 = (float)(FREQUENCY_RIGHT + ((sample / (buffer1.length * 1.0)) * SWEEP_RANGE)); // frequency sweep
f1 = FREQUENCY_LEFT; // static frequency
f2 = FREQUENCY_RIGHT; // static frequency
buffer1[sample] = (float) (AMPLITUDE_LEFT * Math.sin(2 * Math.PI * f1 * time));
buffer2[sample] = (float) (AMPLITUDE_RIGHT * Math.sin(2 * Math.PI * f2 * time));
}
byteBuffer1 = new byte[buffer1.length * 2]; // two bytes per audio
// frame, 16 bits
for (int i = 0, bufferIndex = 0; i < byteBuffer1.length; i++) {
x = (short) (buffer1[bufferIndex++] * 32767.0); // [2^16 - 1]/2 =
// 32767.0
byteBuffer1[i] = (byte) x; // low byte
byteBuffer1[++i] = (byte) (x >>> 8); // high byte
}
byteBuffer2 = new byte[buffer2.length * 2];
for (int j = 0, bufferIndex = 0; j < byteBuffer2.length; j++) {
y = (short) (buffer2[bufferIndex++] * 32767.0);
byteBuffer2[j] = (byte) y; // low byte
byteBuffer2[++j] = (byte) (y >>> 8); // high byte
}
byteBufferFinal = new byte[byteBuffer1.length * 2];
// LL RR LL RR LL RR
for (int k = 0, index = 0; index < byteBufferFinal.length - 4; k = k + 2) {
byteBufferFinal[index] = byteBuffer1[k]; // LEFT
// {0,1/4,5/8,9/12,13;...}
byteBufferFinal[index + 1] = byteBuffer1[k + 1];
index = index + 2;
byteBufferFinal[index] = byteBuffer2[k]; // RIGHT
// {2,3/6,7/10,11;...}
byteBufferFinal[index + 1] = byteBuffer2[k + 1];
index = index + 2;
}
try {
FileOutputStream ss = new FileOutputStream(Environment.getExternalStorageDirectory().getPath() + "/" + AUDIO_RECORDER_FOLDER + "/source.txt");
ss.write(byteBufferFinal);
ss.flush();
ss.close();
}
catch (IOException ioe){
Log.e("IO Error","Write source error.");
}
}
private void setButtonHandlers() {
((Button) findViewById(R.id.btnStart)).setOnClickListener(startClick);
((Button) findViewById(R.id.btnStop)).setOnClickListener(stopClick);
((Button) findViewById(R.id.btnPlay)).setOnClickListener(playClick);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart, !isRecording);
enableButton(R.id.btnStop, isRecording);
enableButton(R.id.btnPlay, isRecording);
}
private String getFilename() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
MediaScannerConnection.scanFile(this, new String[]{filepath}, null, null);
store = file.getAbsolutePath() + "/" + "Audio"
+ AUDIO_RECORDER_FILE_EXT_WAV;
return store;
}
private String getTempFilename() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
File tempFile = new File(filepath, AUDIO_RECORDER_TEMP_FILE);
if (tempFile.exists())
tempFile.delete();
return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
}
private void startRecording() {
//BandPass bandpass = new BandPass(19000,2000,44100);
/* BandPass bandpass = new BandPass(50,2,SAMPLE_RATE);
int [] freqR = {FREQUENCY_RIGHT};
int [] freqL = {FREQUENCY_LEFT};
float[] testL = shortToFloat(freqR);
float [] testR = shortToFloat(freqL);
bandpass.process(testL,testR);
bandpass.printCoeff();
*/
recorder = new AudioRecord(MediaRecorder.AudioSource.CAMCORDER,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, bufferSize);
AudioManager am = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
am.setStreamVolume(AudioManager.STREAM_MUSIC, am.getStreamMaxVolume(AudioManager.STREAM_MUSIC), 0);
/*
* AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
* (int) RECORDER_SAMPLERATE,AudioFormat.CHANNEL_OUT_STEREO,
* AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM);
*/
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
(int) SAMPLE_RATE, PLAY_CHANNELS,
AudioFormat.ENCODING_PCM_16BIT, byteBufferFinal.length,
AudioTrack.MODE_STATIC);
audioTrack.write(byteBufferFinal, 0, byteBufferFinal.length);
audioTrack.play();
BandPass bandpass = new BandPass(50,2,SAMPLE_RATE);
int [] freqR = {FREQUENCY_RIGHT};
int [] freqL = {FREQUENCY_LEFT};
float[] testL = shortToFloat(freqR);
float [] testR = shortToFloat(freqL);
bandpass.process(testL,testR);
bandpass.printCoeff();
audioTrack.setPlaybackRate(RECORDER_SAMPLERATE);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
try {
writeAudioDataToFile();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}, "AudioRecorder Thread");
recordingThread.start();
}
double[][] deinterleaveData(double[] samples, int numChannels) {
// assert(samples.length() % numChannels == 0);
int numFrames = samples.length / numChannels;
double[][] result = new double[numChannels][];
for (int ch = 0; ch < numChannels; ch++) {
result[ch] = new double[numFrames];
for (int i = 0; i < numFrames; i++) {
result[ch][i] = samples[numChannels * i + ch];
}
}
return result;
}
private void writeAudioDataToFile() throws IOException {
int read = 0;
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
FileOutputStream rs = null;
try {
os = new FileOutputStream(filename);
rs = new FileOutputStream(getFilename().split(".wav")[0] + ".txt");
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (null != os) {
while (isRecording) {
read = recorder.read(data, 0, bufferSize);
if (AudioRecord.ERROR_INVALID_OPERATION != read) {
try {
os.write(data);
rs.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
rs.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void stopRecording() {
if (null != recorder) {
isRecording = false;
audioTrack.flush();
audioTrack.stop();
audioTrack.release();
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
copyWaveFile(getTempFilename(), getFilename());
deleteTempFile();
MediaScannerConnection.scanFile(this, new String[]{getFilename()}, null, null);
AudioManager am = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
am.setStreamVolume(AudioManager.STREAM_MUSIC, 0, 0);
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
private void copyWaveFile(String inFilename, String outFilename) {
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = RECORDER_SAMPLERATE;
int channels = 2;
long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels / 8;
byte[] data = new byte[bufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 36;
WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
while (in.read(data) != -1) {
out.write(data);
}
in.close();
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void playWaveFile() {
String filepath = store;
Log.d("PLAYWAVEFILE", "I AM INSIDE");
// define the buffer size for audio track
int minBufferSize = AudioTrack.getMinBufferSize(8000,
AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
int bufferSize = 512;
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
(int) RECORDER_SAMPLERATE, AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT, minBufferSize,
AudioTrack.MODE_STREAM);
int count = 0;
byte[] data = new byte[bufferSize];
try {
FileInputStream fileInputStream = new FileInputStream(filepath);
DataInputStream dataInputStream = new DataInputStream(
fileInputStream);
audioTrack.play();
Toast.makeText(RecorderActivity.this, "this is my Toast message!!! =)",
Toast.LENGTH_LONG).show();
while ((count = dataInputStream.read(data, 0, bufferSize)) > -1) {
Log.d("PLAYWAVEFILE", "WHILE INSIDE");
audioTrack.write(data, 0, count);
//BandPass bandpass = new BandPass(19000,2000,44100); //Actual
//BandPass bandpass = new BandPass(5000,2000,44100); //Test
//int [] freqR = {FREQUENCY_RIGHT};
//int [] freqL = {FREQUENCY_LEFT};
//float[] testR = shortToFloat(freqR);
//float [] testL = shortToFloat(freqL);
//bandpass.process(testR,testL);
// BandPass bandpass = new BandPass(19000,2000,44100);
//float bw = bandpass.getBandWidth();
//float hello = bandpass.getBandWidth();
//float freq = bandpass.frequency();
//float[] test = {FREQUENCY_RIGHT,FREQUENCY_LEFT};
//shortToFloat(test);
//test [0] = FREQUENCY_RIGHT;
//test [1] = FREQUENCY_LEFT;
//bandpass.process(FREQUENCY_LEFT,FREQUENCY_RIGHT);
//Log.d("MyApp","I am here");
//Log.d("ADebugTag", "Valueeees: " + Float.toString(hello));
//Log.d("Bandwidth: " , "Bandwidth: " + Float.toString(bw));
//Log.d("Frequency: " , "Frequency is " + Float.toString(freq));
//bandpass.setBandWidth(20);
//bandpass.printCoeff();
}
audioTrack.stop();
audioTrack.release();
dataInputStream.close();
fileInputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void WriteWaveFileHeader(FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels, long byteRate)
throws IOException {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8); // block align
header[33] = 0;
header[34] = RECORDER_BPP; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
private View.OnClickListener startClick = new View.OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
Thread recordThread = new Thread(new Runnable() {
#Override
public void run() {
isRecording = true;
startRecording();
}
});
recordThread.start();
btnStart.setEnabled(false);
btnStop.setEnabled(true);
btnPlay.setEnabled(false);
}
};
private View.OnClickListener stopClick = new View.OnClickListener() {
#Override
public void onClick(View v) {
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
// TODO Auto-generated method stub
stopRecording();
enableButtons(false);
btnPlay.setEnabled(true);
// stop();
}
}, 100);
}
};
private View.OnClickListener playClick = new View.OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
playWaveFile();
btnPlay.setEnabled(true);
String filepath = store;
final String promptPlayRecord = "PlayWaveFile()\n" + filepath;
SpannableStringBuilder biggerText = new SpannableStringBuilder(promptPlayRecord);
biggerText.setSpan(new RelativeSizeSpan(2.05f), 0, promptPlayRecord.length(), 0);
Toast.makeText(RecorderActivity.this, biggerText, Toast.LENGTH_LONG).show();
}
};
}
The method below is to convert my 16-bit integer to float since the library uses float
/**
* Convert int[] audio to 32 bit float format.
* From [-32768,32768] to [-1,1]
* #param audio
*/
private float[] shortToFloat(int[] audio) {
Log.d("SHORTTOFLOAT","INSIDE SHORTTOFLOAT");
float[] converted = new float[audio.length];
for (int i = 0; i < converted.length; i++) {
// [-32768,32768] -> [-1,1]
converted[i] = audio[i] / 32768f; /* default range for Android PCM audio buffers) */
}
return converted;
}
Trying to implement bandpass filter under "SaveRecording" Method
//BandPass bandpass = new BandPass(19000,2000,44100);
Since I am trying to implement a range of 18k to 20k, I input the above values to the bandpass filter.
BandPass bandpass = new BandPass(50,2,44100); (This is just to test if the frequency has any changes since 18k-20k is not within human range)
int [] freqR = {FREQUENCY_RIGHT};
int [] freqL = {FREQUENCY_LEFT};
float[] testL = shortToFloat(freqR);
float [] testR = shortToFloat(freqL);
bandpass.process(testL,testR);
bandpass.printCoeff();
Since I am recording in STEREO, I am using the
public final synchronized void process(float[] sigLeft, float[] sigRight) {} found in the IIRFilter.java class.
However, I am not hearing any differences even though I implemented the above method. What am I doing wrong? Could anyone advise/help me?
Thank You so much! Terribly new in this signal processing. Any tips/hints on how to progress is much appreciated!!!
Updated
Since I have to output the .wav file with the filtered sound signal, I thought the way to do it is to put the BandPass filter under the "StartRecording" method, however, it is not working. Why is it that I am doing wrong?
private void startRecording() {
int count = 0;
recorder = new AudioRecord(MediaRecorder.AudioSource.CAMCORDER,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, bufferSize);
AudioManager am = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
am.setStreamVolume(AudioManager.STREAM_MUSIC, am.getStreamMaxVolume(AudioManager.STREAM_MUSIC), 0);
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
(int) SAMPLE_RATE, PLAY_CHANNELS,
AudioFormat.ENCODING_PCM_16BIT, byteBufferFinal.length,
AudioTrack.MODE_STATIC);
BandPass bandpass = new BandPass(19000,2000,44100);
float[][] signals = deinterleaveData(byteToFloat(byteBufferFinal), 2);
bandpass.process(signals[0], signals[1]);
audioTrack.write(interleaveData(signals), 0, count, WRITE_NON_BLOCKING);
audioTrack.play();
//audioTrack.write(byteBufferFinal, 0, byteBufferFinal.length); //Original
audioTrack.setPlaybackRate(RECORDER_SAMPLERATE);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
try {
writeAudioDataToFile();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}, "AudioRecorder Thread");
recordingThread.start();
}
Updated (14 March 2016)
This is the image of the output .wav file shown by Audacity:
May I ask if it is considered filtered?
What are the characteristics that I should look out for to ensure that it is filtered correctly.
The above image produce by press the black triangle
The above image graph produced by the Analyse->Plot Specturm
What about this graph? Does it implement the bandpass filter successfully? Thank
There is an issue with how you interface with the Bandpass.java source, probably resulting from what appears to be a bit of a misconception: IIR filters do not process frequencies, but they rather process time-domain data samples (which may exhibit oscillatory behavior).
As such you have to provide those time domain samples as inputs to Bandpass.process(). Since you are reading raw bytes from file, you will need to convert those bytes to float. You could do this with:
/**
* Convert byte[] raw audio to 16 bit int format.
* #param rawdata
*/
private int[] byteToShort(byte[] rawdata) {
int[] converted = new int[rawdata.length / 2];
for (int i = 0; i < converted.length; i++) {
// Wave file data are stored in little-endian order
int lo = rawdata[2*i];
int hi = rawdata[2*i+1];
converted[i] = ((hi&0xFF)<<8) | (lo&0xFF);
}
return converted;
}
private float[] byteToFloat(byte[] audio) {
return shortToFloat(byteToShort(audio));
}
Also for stereo wave files, you will get samples from the wave files which are interleaved. So you will also need to deinterleave the samples. This can be achieved in a similar fashion as you've done with deinterleaveData, except you will need a variant converting to float[][] instead of to double[][] since Bandpass.process expects float arrays.
You will of course also need to recombine the two channels back together after the filtering but before feeding the resulting filtered signals back to the audioTrack:
float[] interleaveData(float[][] data) {
int numChannels = data.length;
int numFrames = data[0].length;
float[] result = new float[numFrames*numChannels];
for (int i = 0; i < numFrames; i++) {
for (int ch = 0; ch < numChannels; ch++) {
result[numChannels * i + ch] = data[ch][i];
}
}
return result;
}
You should now have the required building blocks to filter your audio:
BandPass bandpass = new BandPass(19000,2000,44100);
while ((count = dataInputStream.read(data, 0, bufferSize)) > -1) {
// decode and deinterleave stereo 16-bit per sample data
float[][] signals = deinterleaveData(byteToFloat(data), 2);
// filter data samples, updating the buffers with the filtered samples.
bandpass.process(signals[0], signals[1]);
// recombine signals for playback
audioTrack.write(interleaveData(signals), 0, count, WRITE_NON_BLOCKING);
}
P.S.: as a final note, you are currently reading all the wave file as data samples, header included. This will result in a short noisy burst at the beginning. To avoid this, you should skip the header.
There could be a few problems...
The audio file you are trying to filter may not have much content that is noticeable to the ear in the bandpass range you are trying to filter. Try filtering a much larger range to verify the filter works. Try filtering most the song.
If you cannot get the above to work, then the filter is not being implemented properly. You likely are not implementing the filter correctly. For a really simple filter your could try Y[n] = X[n] - .5*X[n-1] which is a High Pass FIR filter with zero at 0 Hz.
Good Luck!
I have a byte array which have a lines that contains bytes of microphone audio.
I want to encrypt it line by line, I'm not sure if it encrypts the byte array line by line because when I play the audio it play the clear voice
This is the encryption method
if (AudioSystem.isLineSupported(Port.Info.SPEAKER)) {
try {
final AudioFormat format = getFormat();
DataLine.Info info = new DataLine.Info(
TargetDataLine.class, format);
final TargetDataLine line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
line.start();
Runnable runner = new Runnable() {
int bufferSize = (int) format.getSampleRate()
* format.getFrameSize();
byte buffer[] = new byte[bufferSize];
public void run() {
out = new ByteArrayOutputStream();
running = true;
System.out.println("running..........");
int lines = 0;
try {
while (running) {
int count
= line.read(buffer, 0, buffer.length);
if (count > 0) {
out.write(buffer, 0, count);
lines++;
System.out.println("lines written =" + lines);
encryptBuff();
}
}
out.close();
} catch (IOException e) {
System.err.println("I/O problems: " + e);
System.exit(-1);
} catch (Exception ex) {
Logger.getLogger(SC.class.getName()).log(Level.SEVERE, null, ex);
}
}
};
Thread captureThread = new Thread(runner);
captureThread.start();
} catch (LineUnavailableException e) {
System.err.println("Line unavailable: " + e);
System.exit(-2);
}
and this is they playing audio code
private void playAudio() {
try {
byte audio[] = out.toByteArray();
InputStream input
= new ByteArrayInputStream(audio);
final AudioFormat format = getFormat();
final AudioInputStream ais
= new AudioInputStream(input, format,
audio.length / format.getFrameSize());
DataLine.Info info = new DataLine.Info(
SourceDataLine.class, format);
final SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);
line.open(format);
line.start();
Runnable runner = new Runnable() {
int bufferSize = (int) format.getSampleRate()
* format.getFrameSize();
byte buffer[] = new byte[bufferSize];
public void run() {
try {
int count;
while ((count = ais.read(
buffer, 0, buffer.length)) != -1) {
if (count > 0) {
line.write(buffer, 0, count);
}
}
line.drain();
line.close();
} catch (IOException e) {
System.err.println("I/O problems: " + e);
System.exit(-3);
}
}
};
Thread playThread = new Thread(runner);
playThread.start();
} catch (LineUnavailableException e) {
System.err.println("Line unavailable: " + e);
System.exit(-4);
}
}
I have question about Android decode mp3, mix few audio and encode to m4a (aac). For that I use Jlayer for android to decode mp3, audiotrack to play song, and MediaCodec with mediaformat to encode pcm. The problem is my output after encode is too fast for example: I should have 5 sec audio mix but instead I got ~ 1,5 sec. I thinking that I lose somewhere audio frames, but I dont sure about that. Thanks for answer.
(output file is ~ 25% faster that should be)
Decode mp3 code:
public void decodeMP3toPCM(Resources res, int resource) throws BitstreamException, DecoderException, IOException {
InputStream inputStream = new BufferedInputStream(res.openRawResource(resource), 1152);
Bitstream bitstream = new Bitstream(inputStream);
Decoder decoder = new Decoder();
boolean done = false;
while (!done) {
Header frameHeader = bitstream.readFrame();
if (frameHeader == null) {
done = true;
} else {
SampleBuffer output = (SampleBuffer) decoder.decodeFrame(frameHeader, bitstream);
mTimeCount += frameHeader.ms_per_frame();
short[] pcm = output.getBuffer();
mDataBuffer.addFrame(mViewId, pcm);
mReadedFrames++;
mAudioTrack.write(pcm, 0, pcm.length);
}
bitstream.closeFrame();
}
inputStream.close();
}
encode:
public class AudioEncoder {
private MediaCodec mediaCodec;
private BufferedOutputStream outputStream;
private String mediaType = "audio/mp4a-latm";
public AudioEncoder(String filePath) throws IOException {
File f = new File(filePath);
touch(f);
try {
outputStream = new BufferedOutputStream(new FileOutputStream(f));
} catch (Exception e) {
e.printStackTrace();
}
try {
//mediaCodec = MediaCodec.createEncoderByType(mediaType);
mediaCodec = MediaCodec.createByCodecName("OMX.google.aac.encoder");
} catch (IOException e) {
e.printStackTrace();
}
mediaCodec = MediaCodec.createEncoderByType(mediaType);
final int kSampleRates[] = { 8000, 11025, 22050, 44100, 48000 };
final int kBitRates[] = { 64000, 128000 };
MediaFormat mediaFormat = MediaFormat.createAudioFormat(mediaType,kSampleRates[3],2);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 4608);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, kBitRates[1]);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
}
public void close() {
try {
mediaCodec.stop();
mediaCodec.release();
outputStream.flush();
outputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
public synchronized void offerEncoder(byte[] input) {
Log.e("synchro ", input.length + " is coming");
try {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (outputBufferIndex >= 0) {
int outBitsSize = bufferInfo.size;
int outPacketSize = outBitsSize + 7; // 7 is ADTS size
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
outputBuffer.position(bufferInfo.offset);
outputBuffer.limit(bufferInfo.offset + outBitsSize);
byte[] outData = new byte[outPacketSize];
addADTStoPacket(outData, outPacketSize);
outputBuffer.get(outData, 7, outBitsSize);
outputBuffer.position(bufferInfo.offset);
outputStream.write(outData, 0, outData.length);
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
private void addADTStoPacket(byte[] packet, int packetLen) {
int profile = 2; //AAC LC
//39=MediaCodecInfo.CodecProfileLevel.AACObjectELD;
int freqIdx = 4; //44.1KHz
int chanCfg = 2; //CPE
// fill in ADTS data
packet[0] = (byte) 0xFF;
packet[1] = (byte) 0xF9;
packet[2] = (byte) (((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2));
packet[3] = (byte) (((chanCfg & 3) << 6) + (packetLen >> 11));
packet[4] = (byte) ((packetLen & 0x7FF) >> 3);
packet[5] = (byte) (((packetLen & 7) << 5) + 0x1F);
packet[6] = (byte) 0xFC;
}
public void touch(File f) {
try {
if (!f.exists())
f.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
}
I have a client and server application.
The server requests an audio stream and the client handles well.
The client proceeds to setup an AudioFormat(Note: settings are the same on both ends) and TargetDataLine. It Then writes the data to a Socket output stream using a ByteArrayOutput Stream.
The server receives the Data and reads it in a threaded method. During each buffer read it it saves to a AudioInputStream which is passed to a playSound method which is threaded and synchronized to proceed to play the sound.
When i make the playSound method non threaded it works well but slightly glitchy.
Also i know having the play sound non threaded can cause sound frames to jam up
Any help is well appreciated, and any ways I can make this audio stream more efficient and fast is also welcomed.
Client:
private void captureAudio() throws CommandException {
Socket session = new Socket(host_, port_);
try {
final AudioFormat format = getFormat();
DataLine.Info info = new DataLine.Info(
TargetDataLine.class, format);
final TargetDataLine line = (TargetDataLine)
AudioSystem.getLine(info);
line.open(format);
line.start();
int bufferSize = (int)format.getSampleRate() * format.getFrameSize();
byte buffer[] = new byte[bufferSize];
running = true;
try {
while (running) {
int count = line.read(buffer, 0, buffer.length);
if (count > 0) {
BufferedOutputStream out_ = null;
out_ = new BufferedOutputStream(socket_.getOutputStream());
out_.write(buffer, 0, count);
out_.flush();
}
}
out_.close();
line.close();
} catch (IOException e) {
throw new CommandException("I/O problems: " + e,Command.TRANSFER_ERROR);
}
} catch (LineUnavailableException e) {
throw new CommandException("Line unavailable: " + e,Command.ERROR);
}
else {
throw new CommandException("Unable to Connect to Server",Command.CONNECTION_ERROR);
}
}
private AudioFormat getFormat() {
float sampleRate = 16000;
int sampleSizeInBits = 16;
int channels = 2;
boolean signed = true;
boolean bigEndian = true;
return new AudioFormat(sampleRate,sampleSizeInBits, channels, signed, bigEndian);
}
Server:
public void readSocket(final Socket socket) {
new Thread() {
#Override
public void run() {
InputStream input;
try {
input = socket.getInputStream();
final AudioFormat format = getFormat();
int bufferSize = (int)format.getSampleRate() * format.getFrameSize();
byte buffer[] = new byte[bufferSize];
int bytesRead;
while (((bytesRead = input.read(buffer, 0, bufferSize)) != -1 ) ) {
if (bytesRead > 0) {
play(new AudioInputStream(input, format, buffer.length / format.getFrameSize()));
}
}
socket.close();
} catch (Exception ex) {
}
}
}.start();
}
private AudioFormat getFormat() {
float sampleRate = 16000;
int sampleSizeInBits = 16;
int channels = 2;
boolean signed = true;
boolean bigEndian = true;
return new AudioFormat(sampleRate,
sampleSizeInBits, channels, signed, bigEndian);
}
private synchronized void play(final AudioInputStream ais) {
new Thread() {
#Override
public void run() {
try {
final AudioFormat format = getFormat();
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
SourceDataLine line = (SourceDataLine)AudioSystem.getLine(info);
line.open(format);
line.start();
int bufferSize = (int) format.getSampleRate()
* format.getFrameSize();
byte buffer[] = new byte[bufferSize];
int count;
while ((count = ais.read(buffer, 0, buffer.length)) != -1) {
if (count > 0) {
line.write(buffer, 0, count);
}
}
line.drain();
line.close();
ais.close();
} catch (LineUnavailableException ex) {
} catch (IOException ex) {
}
}
}.start();
}