I am new to Android. I am developing an application which uses AudioRecorder and AudioTrack for recording and playing. In addition to this, I am trying to read and display the amplitude values of recorded sound.
Here is my class
public class MainActivity extends AppCompatActivity {
private static final String TAG = "RecordSound";
private int BufferSize;
byte[] buffer = new byte[BufferSize];
/* AudioRecord and AudioTrack Object */
private AudioRecord record = null;
private AudioTrack track = null;
/* Audio Configuration */
private int sampleRate = 8000;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private boolean isRecording = true;
private Thread recordingThread = null;
private double lastLevel = 0;
private Thread thread;
private static final int SAMPLE_DELAY = 75;
MediaPlayer mediaPlayer;
RelativeLayout layout;
private ImageView tankHolder;
TextView text;
Button play;
String filename;
final Handler mHandler = new Handler();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
tankHolder = (ImageView)findViewById(R.id.tankHolder);
text=(TextView)findViewById(R.id.result_text);
layout=(RelativeLayout)findViewById(R.id.linear);
play=(Button)findViewById(R.id.btnStartPlay);
try {
BufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
}catch (Exception e){
e.printStackTrace();
}
}
#Override
protected void onResume() {
super.onResume();
//calling MediaPlayer for alarmtone when the tank is almost full
mediaPlayer = MediaPlayer.create(this, R.raw.tone);
startRecording();
play.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
stopRecording();
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
private void stopRecording() throws IOException{
if (null != record) {
isRecording = false;
record.stop();
record.release();
record = null;
recordingThread = null;
mHandler.post(runRecord);
mediaPlayer.stop();
mediaPlayer.release();
}
}
final Runnable runRecord=new Runnable() {
#Override
public void run() {
text.setText("working");
try {
PlayShortAudioFileViaAudioTrack("/sdcard/recorded.pcm");
} catch (IOException e) {
e.printStackTrace();
}
}
};
private void PlayShortAudioFileViaAudioTrack(String filePath) throws IOException{
// We keep temporarily filePath globally as we have only two sample sounds now..
if (filePath==null)
return;
//Reading the file..
File file = new File(filePath); // for ex. path= "/sdcard/samplesound.pcm" or "/sdcard/samplesound.wav"
byte[] byteData = new byte[(int) file.length()];
Log.d(TAG, (int) file.length()+"");
FileInputStream in = null;
try {
in = new FileInputStream( file );
in.read( byteData );
in.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// Set and push to audio track..
int intSize = android.media.AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
Log.d(TAG, intSize+"");
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
at.play();
// Write the byte array to the track
at.write(byteData, 0, byteData.length);
at.stop();
at.release();
}
private void startRecording()
{
record = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate,
channelConfig, audioFormat, BufferSize);
if (AudioRecord.STATE_INITIALIZED == record.getState())
record.startRecording();
isRecording = true;
/* Run a thread for Recording */
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
},"AudioRecorder Thread");
recordingThread.start();
thread = new Thread(new Runnable() {
public void run() {
while(thread != null && !thread.isInterrupted()){
//Let's make the thread sleep for a the approximate sampling time
try{
Thread.sleep(SAMPLE_DELAY);}catch(InterruptedException ie){ie.printStackTrace();}
readAudioBuffer();//After this call we can get the last value assigned to the lastLevel variable
runOnUiThread(new Runnable() {
#Override
public void run() {
if(lastLevel >= 7 && lastLevel <= 15){
text.setTextColor(getResources().getColor(R.color.Blue));
layout.setBackgroundColor(Color.RED);
tankHolder.setImageResource(R.drawable.ftank);
if (!mediaPlayer.isPlaying()){
mediaPlayer.start();
}
text.setText(String.valueOf(lastLevel));
}else
if(lastLevel > 50 && lastLevel <= 100){
text.setText(String.valueOf(lastLevel));
text.setTextColor(getResources().getColor(R.color.Orange));
layout.setBackgroundColor(Color.WHITE);
tankHolder.setImageResource(R.drawable.htank);
if (mediaPlayer.isPlaying()){
mediaPlayer.pause();
}
}else
if(lastLevel > 100 && lastLevel <= 170){
text.setText(String.valueOf(lastLevel));
text.setTextColor(getResources().getColor(R.color.Yellow));
layout.setBackgroundColor(Color.WHITE);
tankHolder.setImageResource(R.drawable.qtank);
if (mediaPlayer.isPlaying()){
mediaPlayer.pause();
}
}
if(lastLevel > 170){
text.setText(String.valueOf(lastLevel));
text.setTextColor(getResources().getColor(R.color.Blue));
layout.setBackgroundColor(Color.WHITE);
tankHolder.setImageResource(R.drawable.qtank);
if (mediaPlayer.isPlaying()){
mediaPlayer.pause();
}
}
}
});
}
}
},"AudioRecorder Thread");
thread.start();
}
private void writeAudioDataToFile()
{
byte data[] = new byte[BufferSize];
/* Record audio to following file */
String filePath = "/sdcard/recorded.pcm";
filename = Environment.getExternalStorageDirectory().getAbsolutePath();
filename +="/audiofile.pcm";
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read_bytes = 0;
if(null != os){
while(isRecording)
{
read_bytes = record.read(data, 0, BufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read_bytes){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void readAudioBuffer() {
try {
short[] buffer = new short[BufferSize];
int bufferReadResult = 1;
if (record != null) {
// Sense the voice...
bufferReadResult = record.read(buffer, 0, BufferSize);
double sumLevel = 0;
for (int i = 0; i < bufferReadResult; i++) {
sumLevel += buffer[i];
}
lastLevel = Math.abs((sumLevel / bufferReadResult));
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
If I run, my runOnUiThread(new Runnable() { is working and on play button press, recorded audio is not playing.
If I remove runOnUiThread(new Runnable() {, then on play button press, recorded audio will play without any problem.
So I think that the problem is with threads and I tried to implement the handler class, still I am not getting it.
Refer this,
http://www.java2s.com/Code/Android/Media/UsingAudioRecord.htm for audio recording.
If you are not satisfied,https://www.tutorialspoint.com/android/android_audio_capture.htm for detailed explanation
Related
I want to read my recorded wav file and use it for signal processing with the following processing method:
//SIGNAL PROCESSING
private void proccessSignal(double[] signalIN){
double samplefreq = 44100;
double[] signal = signalIN;
Bessel bandpassfilter = new Bessel(signal,samplefreq);
double[] filteredSignal = bandpassfilter.lowPassFilter(1,150);
Bessel newsignalfiltered = new Bessel(filteredSignal,samplefreq);
double[] needsAmplifianceSignal = newsignalfiltered.bandPassFilter(1,200,500);
double[] amplifiedSignal = needsAmplifianceSignal;
for (int i=0;i<=needsAmplifianceSignal.length;i++){
amplifiedSignal[i]=amplifiedSignal[i]*1000;
amplifiedSignal[i]=amplifiedSignal[i]*amplifiedSignal[i];
}
Hilbert h = new Hilbert(amplifiedSignal);
h.hilbertTransform();
double[][] analytical_signal = h.getOutput();
Log.d("Endsignal", "proccessSignal: "+analytical_signal);
double threshold = 0.0052;
int apneaCount = 0;
for (int i=0; i<=analytical_signal.length;i++)
{
for (int j=0; j<=analytical_signal.length;j++){
if (threshold<=analytical_signal[i][j]){
}else {
apneaCount++;
if (apneaCount>=10){
apneaview.setText("YOU HAVE APNEA, CONTACT YOUR DOCTOR");
break;
}
}
}
}
if (apneaCount < 10){
apneaview.setText("YOU DO NOT HAVE APNEA");
}
}
The recording and everything works but it looks like the signal is always null so it never gets to my processing method, here's my Logcat to get a piece of detailed information about this issue:
W/System.err: com.github.psambit9791.wavfile.WavFileException: Invalid Wav Header data, incorrect riff chunk ID
W/System.err: at com.github.psambit9791.wavfile.WavFile.openWavFile(WavFile.java:257)
W/System.err: at com.github.psambit9791.jdsp.io.Wav.readWav(Wav.java:70)
W/System.err: at com.example.appnea.DetailedStat.onCreate(DetailedStat.java:82)
and here's the full activity for better understanding:
// TEST
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_detailed_stat);
Intent receivedFromList = getIntent();
name = receivedFromList.getStringExtra("recordName");
path = receivedFromList.getStringExtra("recordPath");
nameview = findViewById(R.id.NAME);
pathview = findViewById(R.id.PATH);
apneaview = findViewById(R.id.APNEA);
nameview.setText("Record date: " + name);
pathview.setText("Record path: " + path);
apneaview.setText("INITIALISING DATA");
File audiofile = new File(path);
double[] signal = new double[0];
try {
String mMime = "audio/3gpp";
MediaCodec codec = MediaCodec.createDecoderByType(mMime);
/**
*
*
*/
Wav objRead1 = new Wav();
objRead1.readWav(path);
Hashtable<String, Long> propsOut = objRead1.getProperties(); // Gets the WAV file properties
double[][] signal1 = objRead1.getData("int"); // Can be 'int', 'long' 'double'
try {
loadFFMpegLibrary();
} catch (FFmpegNotSupportedException e) {
e.printStackTrace();
}
MediaExtractor mx = new MediaExtractor();
try {
mx.setDataSource(path);
} catch (IOException e) {
e.printStackTrace();
}
/**
* Finals
*/
MediaFormat mMediaFormat = mx.getTrackFormat(0);
byte[] bufbytes = new byte[(int) audiofile.length()];
BufferedInputStream buf = new BufferedInputStream(new FileInputStream(audiofile));
buf.read(bufbytes, 0, bufbytes.length);
buf.close();
mMediaFormat.setByteBuffer("3gp", ByteBuffer.wrap(bufbytes));
codec.setCallback(new MediaCodec.Callback() {
#Override
public void onInputBufferAvailable(#NonNull MediaCodec codec, int index) {
}
#Override
public void onOutputBufferAvailable(#NonNull MediaCodec codec, int index, #NonNull MediaCodec.BufferInfo info) {
}
#Override
public void onError(#NonNull MediaCodec codec, #NonNull MediaCodec.CodecException e) {
}
#Override
public void onOutputFormatChanged(#NonNull MediaCodec codec, #NonNull MediaFormat format) {
}
});
codec.configure(mMediaFormat, null, null, 0);
codec.start();
new Thread(() -> {
MediaCodec.BufferInfo buf_info = new MediaCodec.BufferInfo();
int outputBufferIndex = codec.dequeueOutputBuffer(buf_info, 0);
byte[] pcm = new byte[buf_info.size];
ByteBuffer[] mOutputBuffers = new ByteBuffer[buf_info.size];
mOutputBuffers[outputBufferIndex].get(pcm, 0, buf_info.size);
}).start();
sampleDir = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), "/OfficeRecordings/");
if (!sampleDir.exists()) {
sampleDir.mkdirs();
}
outputFile = sampleDir+"/"+"sample_record.3gp";
finalFile = sampleDir+"/"+"final_record.wav";
final String[] cmd = new String[]{"-y", "-i", outputFile, finalFile};
execFFmpegBinary(cmd);
} catch (IOException | WavFileException e) {
e.printStackTrace();
}
Log.d("AUDIO", "onCreate: AUDIO FILE EXIST?" + audiofile.exists());
//proccessSignal(signal);
}
private void execFFmpegBinary(final String[] command) {
FFmpeg ffmpeg = FFmpeg.getInstance(this);
try {
ffmpeg.loadBinary(new LoadBinaryResponseHandler() {
#Override
public void onStart() {
Log.d("audio", "starting to load binary");
}
#Override
public void onFailure() {
Log.d("audio", "failed to load binary");
}
#Override
public void onSuccess() {
Log.d("audio", "loaded binary");
try {
ffmpeg.execute(cmd, new ExecuteBinaryResponseHandler() {
#Override
public void onStart() {
Log.d("audio", " starting to get audio " + "");
}
#Override
public void onProgress(String message) {
Log.d("audio", " progress getting audio from ");
}
#Override
public void onFailure(String message) {
Log.d("audio", " failed to get audio ");
}
#Override
public void onSuccess(String message) {
Log.d("audio", " success getting audio from video");
}
#Override
public void onFinish() {
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
e.printStackTrace();
}
}
#Override
public void onFinish() {
}
});
} catch (FFmpegNotSupportedException e) {
// Handle if FFmpeg is not supported by device
}
}
// TRYING THE FFMPEG METHOD
public void loadFFMpegLibrary() throws FFmpegNotSupportedException {
if (fFmpeg == null)
fFmpeg = FFmpeg.getInstance(this);
fFmpeg.loadBinary(new FFmpegLoadBinaryResponseHandler() {
#Override
public void onFailure() {
Toast.makeText(getApplicationContext(), "Library failed to load", Toast.LENGTH_LONG).show();
}
#Override
public void onSuccess() {
Toast.makeText(getApplicationContext(), "Library loaded successfully", Toast.LENGTH_LONG).show();
}
#Override
public void onStart() {
}
#Override
public void onFinish() {
}
});
}
public void executeCommand(final String[] command) throws FFmpegCommandAlreadyRunningException {
fFmpeg.execute(command, new ExecuteBinaryResponseHandler() {
#Override
public void onSuccess(String message) {
}
#Override
public void onProgress(String message) {
}
#Override
public void onFailure(String message) {
}
#Override
public void onStart() {
}
#Override
public void onFinish() {
}
});
}
//CONVERTING TO DOUBLE ARRAY FROM BYTEARRAY
public static double[] toDoubleArray(byte[] byteArray){
int times = Double.SIZE / Byte.SIZE;
double[] doubles = new double[byteArray.length / times];
for(int i=0;i<doubles.length;i++){
doubles[i] = ByteBuffer.wrap(byteArray, i*times, times).getDouble();
}
return doubles;
}
private void copyFile(InputStream in, OutputStream out) throws IOException {
byte[] buffer = new byte[1024];
int read;
while((read = in.read(buffer)) != -1){
out.write(buffer, 0, read);
}
}
//SIGNAL PROCESSING
private void proccessSignal(double[] signalIN){
double samplefreq = 44100;
double[] signal = signalIN;
Bessel bandpassfilter = new Bessel(signal,samplefreq);
double[] filteredSignal = bandpassfilter.lowPassFilter(1,150);
Bessel newsignalfiltered = new Bessel(filteredSignal,samplefreq);
double[] needsAmplifianceSignal = newsignalfiltered.bandPassFilter(1,200,500);
double[] amplifiedSignal = needsAmplifianceSignal;
for (int i=0;i<=needsAmplifianceSignal.length;i++){
amplifiedSignal[i]=amplifiedSignal[i]*1000;
amplifiedSignal[i]=amplifiedSignal[i]*amplifiedSignal[i];
}
Hilbert h = new Hilbert(amplifiedSignal);
h.hilbertTransform();
double[][] analytical_signal = h.getOutput();
Log.d("Endsignal", "proccessSignal: "+analytical_signal);
double threshold = 0.0052;
int apneaCount = 0;
for (int i=0; i<=analytical_signal.length;i++)
{
for (int j=0; j<=analytical_signal.length;j++){
if (threshold<=analytical_signal[i][j]){
}else {
apneaCount++;
if (apneaCount>=10){
apneaview.setText("YOU HAVE APNEA, CONTACT YOUR DOCTOR");
break;
}
}
}
}
if (apneaCount < 10){
apneaview.setText("YOU DO NOT HAVE APNEA");
}
}
}
Thank you so much for your time, I really appreciate it. Please let me know if you need any other information!
I want to play multiple speakers at the same time.
In my apllication I'm getting audio from network, decode from C#, decode by opus and then want to play bytes. But now I can play only one speaker.
My AudioPLayer.class:
public class Player {
private static final String TAG = Player.class.getName();
private AudioTrack audioTrack;
private boolean isWorking;
public Player() {
try {
audioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
AudioConsts.SAMPLERATE,
AudioConsts.NUM_CHANNELS == 1 ? AudioConsts.CHANNEL_OUT_MONO : AudioConsts.CHANNEL_OUT_STEREO,
AudioConsts.ENCODING_PCM_16BIT,
AudioConsts.GetPlayerBufferSize(),
AudioTrack.MODE_STREAM);
} catch (Exception e){
Log.e(TAG, e.toString());
}
}
public void play() {
new Thread(new Runnable() {
#Override
public void run() {
isWorking = true;
try {
audioTrack.play();
} catch (Exception e) {
Log.d(e.toString(), "AUDIO EXCEPTION");
return;
}
int bufferSize = AudioConsts.GetPlayerBufferSize();
while (isWorking){
int cursor = audioTrack.getPlaybackHeadPosition();
if (cursor > bufferSize){
cursor %= bufferSize;
audioTrack.flush();
audioTrack.setPlaybackHeadPosition(cursor);
}
}
}
}).start();
}
public void stopReading(){
if (!isWorking)
return;
audioTrack.release();
isWorking = false;
}
public void appendForPlayback(byte[] audioMessage, int size) {
if (size != 0){
int writen = audioTrack.write(audioMessage, 0, size);
if (writen != size) {
//audioTrack.release();
Log.d(TAG, "WTF");
}
}
}
}
Also attach my AudioPlayer's initialization:
#Override
public void onCreate() {
super.onCreate();
...
player = new Player();
player.play();
IntentFilter filter = new IntentFilter();
filter.addAction(ON_UNITY_AUDIO_MESSAGE_RECEIVED);
filter.addAction(AudioConsts.START_RECORDER);
filter.addAction(AudioConsts.STOP_RECORDER);
broadcastReceiver = new BroadcastReceiver() {
#Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction().equals(ON_UNITY_AUDIO_MESSAGE_RECEIVED)) {
byte[] decryptedBytes = intent.getByteArrayExtra(UNITY_AUDIO_MESSAGE);
onUnityAudioReceivedFromNetwork(decryptedBytes);
} else if (intent.getAction().equals(AudioConsts.START_RECORDER)) {
incrementSessionCount();
recorder.startRecording();
} else if (intent.getAction().equals(AudioConsts.STOP_RECORDER)) {
recorder.stopRecording();
}
}
};
registerReceiver(broadcastReceiver, filter);
decodeMsg = new byte[AudioConsts.FRAME_SIZE * AudioConsts.ENCODING_PCM_16BIT];
opusDecoder = new OpusDecoder();
opusDecoder.init(AudioConsts.SAMPLERATE, AudioConsts.NUM_CHANNELS);
}
...
private void onUnityAudioReceivedFromNetwork(byte[] decryptedBytes) {
UnityAudioMessage audioMessage = UnityAudioMessage.fromBytesSharp(decryptedBytes);
if (audioMessage != null) {
try {
opusDecoder.decode(audioMessage.unityAudioMessage, decodeMsg, AudioConsts.FRAME_SIZE);
} catch (OpusError e) {
e.printStackTrace();
return;
}
player.appendForPlayback(decodeMsg, decodeMsg.length);
}
}
...
Can I release simultaneos playback of multiple speakers?
Also I tried release it with HaspMap of my players. But it works only like 1 audio track.
I tried a lot of things, but my solution use AsyncTask.class
Attach Player.class
public class Player {
private static final String TAG = Player.class.getName();
private AudioTrack audioTrack;
private boolean isWorking;
public Player() {
try {
audioTrack = new AudioTrack(
new AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_MEDIA)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.setLegacyStreamType(AudioManager.STREAM_MUSIC)
.build(),
new AudioFormat.Builder()
.setChannelMask(AudioFormat.CHANNEL_OUT_MONO)
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(AudioConsts.SAMPLERATE)
.build(),
AudioConsts.GetPlayerBufferSize(),
AudioTrack.MODE_STREAM,
AudioManager.AUDIO_SESSION_ID_GENERATE);
} catch (Exception e) {
Log.e(TAG, e.toString());
}
}
public void play() {
audioTrack.play();
}
public void stopReading() {
if (!isWorking)
return;
audioTrack.release();
isWorking = false;
}
public void appendForPlayback(byte[] audioMessage, int size) {
new Executor().doInBackground(audioMessage);
}
private class Executor extends AsyncTask<byte[], Void, Void> {
#Override
protected Void doInBackground(byte[]... bytes) {
for (byte[] audioMessage : bytes) {
if (audioMessage.length != 0) {
int writen = audioTrack.write(audioMessage, 0, audioMessage.length);
if (writen != audioMessage.length) {
Log.d(TAG, "WTF");
}
}
}
return null;
}
}}
I've searched all the topics on stack and Im still stuck with it.
What I m try to do is first display incoming data via bluetooth from microcontroler to TextView strDlugosc and strLenght.
Next step after that will be seperate all the data into Temperature and Voltage TextViews.
I dont know whats going on with it that it dont displaying it...
Please help me
//buttony
Button b_Onled1, b_Onled2, b_Offled1, b_Offled2;
TextView temp_1, temp_2, nap_1, nap_2, strLenght, strDlugosc;
String address = null;
private ProgressDialog progress;
BluetoothAdapter Bta = null;
BluetoothSocket btSocket = null;
private boolean isBtConnected = false;
final int handlerState = 0;
protected static final int SUCCESS_CONNECT = 0;
protected static final int MESSAGE_READ = 1;
//SPP UUID
static final UUID mojeUUID = UUID.fromString("00001101-0000-1000-8000-00805F9B34FB");
#Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_kontrola__led);
Intent newint = getIntent();
address = newint.getStringExtra(MainActivity.EXTRA_ADDRESS);
//wywolanie
b_Onled1 = (Button) findViewById(R.id.b_Onled1);
b_Onled2 = (Button) findViewById(R.id.b_Onled2);
b_Offled1 = (Button) findViewById(R.id.b_Offled1);
b_Offled2 = (Button) findViewById(R.id.b_Offled2);
temp_1 = (TextView) findViewById(R.id.temp_1);
temp_2 = (TextView) findViewById(R.id.temp_2);
nap_1 = (TextView) findViewById(R.id.nap_1);
nap_2 = (TextView) findViewById(R.id.nap_2);
strLenght = (TextView) findViewById(R.id.strLenght);
strDlugosc = (TextView) findViewById(R.id.strDlugosc);
//wywolaj klase by polaczyc
new PolaczBT().execute();
//KOMENDY ONCLICK
b_Onled1.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
wlaczLED1();
}
});
b_Onled2.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
wlaczLED2();
}
});
b_Offled1.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
wylaczLED1();
}
});
b_Offled2.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
wylaczLED2();
}
});
}
Handler mHandler = new Handler() {
#Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MESSAGE_READ:
ConnectThread connectthread = new ConnectThread(btSocket);
connectthread.start();
final byte[] readBuf = (byte[]) msg.obj;
final String readMessage = new String(readBuf, 0, msg.arg1);
strLenght.setText(readMessage);
break;
}
super.handleMessage(msg);
}
};
private void Disconnect() {
if (btSocket != null) //jezeli socket jest zajety
{
try {
btSocket.close(); //przerwij polaczenie
} catch (IOException e) {
msg("ERROR");
}
}
finish(); // powraca do pierwszego layuoutu
}
private void wylaczLED1() {
if (btSocket != null) {
try {
btSocket.getOutputStream().write("01D0$".toString().getBytes());
} catch (IOException e) {
msg("ERROR");
}
}
}
private void wylaczLED2() {
if (btSocket != null) {
try {
btSocket.getOutputStream().write("02D0$".toString().getBytes());
} catch (IOException e) {
msg("ERROR");
}
}
}
private void wlaczLED1() {
if (btSocket != null) {
try {
btSocket.getOutputStream().write("01D1$".toString().getBytes());
} catch (IOException e) {
msg("ERROR");
}
}
}
private void wlaczLED2() {
if (btSocket != null) {
try {
btSocket.getOutputStream().write("02D1$".toString().getBytes());
} catch (IOException e) {
msg("ERROR");
}
}
}
//************************************************************************
//metoda do przywolywania TOAST*******************************************
//************************************************************************
private void msg(String s) {
Toast.makeText(getApplicationContext(), s, Toast.LENGTH_LONG).show();
}
//*************************************************************************
//POLACZENIE BLUETOOTH*****************************************************
//*************************************************************************
private class PolaczBT extends AsyncTask<Void, Void, Void> {
private boolean ConnectSuccess = true;
#Override
protected void onPreExecute() {
progress = ProgressDialog.show(Kontrola_Led.this, "Laczenie...", "Prosze czekaj!");
}
#Override
protected Void doInBackground(Void... devices) {
try {
if (btSocket == null || !isBtConnected) {
Bta = BluetoothAdapter.getDefaultAdapter();
BluetoothDevice dyspozycyjnosc = Bta.getRemoteDevice(address);
btSocket = dyspozycyjnosc.createInsecureRfcommSocketToServiceRecord(mojeUUID);
BluetoothAdapter.getDefaultAdapter().cancelDiscovery();
btSocket.connect();
}
} catch (IOException e) {
ConnectSuccess = false;
}
return null;
}
#Override
protected void onPostExecute(Void result) {
super.onPostExecute(result);
if (!ConnectSuccess) {
msg("Blad polaczenia. Czy SPP BLUETOOTH dziala? Ponow probe. ");
finish();
} else {
msg("Connected.");
isBtConnected = true;
}
progress.dismiss();
}
}
StringBuilder sb = new StringBuilder();
class ConnectThread extends Thread {
final BluetoothSocket mmSocket;
final InputStream mmInStream;
final OutputStream mmOutStream;
public ConnectThread(BluetoothSocket socket) {
mmSocket = socket;
InputStream tmpIn = null;
OutputStream tmpOut = null;
try {
tmpIn = socket.getInputStream();
tmpOut = socket.getOutputStream();
} catch (IOException e) {
}
mmInStream = tmpIn;
mmOutStream = tmpOut;
}
public void run() {
byte[] buffer = new byte[128];
int bytes;
while (true) {
try {
bytes = mmInStream.read(buffer);
String strIncom = new String(buffer, 0, bytes);
sb.append(strIncom);
int endOfLineIndex = sb.indexOf("\r\n");
if (endOfLineIndex > 0){
String sbprint = sb.substring(0, endOfLineIndex);
strDlugosc.setText(sbprint);
}
mHandler.obtainMessage(Kontrola_Led.MESSAGE_READ, bytes, -1, buffer).sendToTarget();
} catch (IOException e) {
break;
}
}
}
}
}
I am new to Android programming. I am trying to connect two android mobiles through bluetooth. I am using android 4.4.4 in one mobile and android 6 in another. And I am using Windows 7, Android Studio.
Code-
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
Button openButton = (Button)findViewById(R.id.open);
Button sendButton = (Button)findViewById(R.id.send);
Button closeButton = (Button)findViewById(R.id.close);
myLabel = (TextView)findViewById(R.id.label);
myTextbox = (EditText)findViewById(R.id.entry);
//Open Button
openButton.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
try
{
findBT();
openBT();
}
catch (IOException ex) { }
}
});
//Send Button
sendButton.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
try
{
sendData();
}
catch (IOException ex) { }
}
});
//Close button
closeButton.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
try
{
closeBT();
}
catch (IOException ex) { }
}
});
}
void findBT()
{
mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
if(mBluetoothAdapter == null)
{
myLabel.setText("No bluetooth adapter available");
}
if(!mBluetoothAdapter.isEnabled())
{
Intent enableBluetooth = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableBluetooth, 0);
}
Set<BluetoothDevice> pairedDevices = mBluetoothAdapter.getBondedDevices();
if(pairedDevices.size() > 0)
{
for(BluetoothDevice device : pairedDevices)
{
if(device.getName().equals("motoG"))
{
mmDevice = device;
break;
}
}
}
myLabel.setText("Bluetooth Device Found");
}
void openBT() throws IOException
{
UUID uuid = UUID.fromString("00001101-0000-1000-8000-00805F9B34FB"); //Standard SerialPortService ID
mmSocket = mmDevice.createRfcommSocketToServiceRecord(uuid);
mBluetoothAdapter.cancelDiscovery();
mmSocket.connect();
mmOutputStream = mmSocket.getOutputStream();
mmInputStream = mmSocket.getInputStream();
beginListenForData();
myLabel.setText("Bluetooth Opened");
}
void beginListenForData()
{
final Handler handler = new Handler();
final byte delimiter = 10; //This is the ASCII code for a newline character
stopWorker = false;
readBufferPosition = 0;
readBuffer = new byte[1024];
workerThread = new Thread(new Runnable()
{
public void run()
{
while(!Thread.currentThread().isInterrupted() && !stopWorker)
{
try
{
int bytesAvailable = mmInputStream.available();
if(bytesAvailable > 0)
{
byte[] packetBytes = new byte[bytesAvailable];
mmInputStream.read(packetBytes);
for(int i=0;i<bytesAvailable;i++)
{
byte b = packetBytes[i];
if(b == delimiter)
{
byte[] encodedBytes = new byte[readBufferPosition];
System.arraycopy(readBuffer, 0, encodedBytes, 0, encodedBytes.length);
final String data = new String(encodedBytes, "US-ASCII");
readBufferPosition = 0;
handler.post(new Runnable()
{
public void run()
{
myLabel.setText(data);
}
});
}
else
{
readBuffer[readBufferPosition++] = b;
}
}
}
}
catch (IOException ex)
{
stopWorker = true;
}
}
}
});
workerThread.start();
}
void sendData() throws IOException
{
String msg = myTextbox.getText().toString();
msg += "\n";
mmOutputStream.write(msg.getBytes());
myLabel.setText("Data Sent");
}
void closeBT() throws IOException
{
stopWorker = true;
mmOutputStream.close();
mmInputStream.close();
mmSocket.close();
myLabel.setText("Bluetooth Closed");
}
}
Error- In .connect(), I am getting an exception -
read failed, socket might closed or timeout, read ret: -1.
getBluetoothService() called with no BluetoothManagerCallback
connect(), SocketState: INIT, mPfd: {ParcelFileDescriptor: FileDescriptor[55]}
I tried using insecureRFcom, it din't work. I re-paired my device, but didn't work. I removed all other paired devices from mobile, it din't work. I looked around this website and tried many approaches, but nothing works. Hope someone can help me figure out the problem.
I developed Shoutcastinternet Radio Streaming and I'm able to stream and play Successfully.
But the problem is: when i execute my application,I'm able to stream and play Continuously for halfanhour,after that the stream is getting stopped(not able to play, after that if i click on again play the stream Continues and again after some time FileNotFoundException)?
I logged the Error, after Stream get stopped.
The Error is :
java.io.FileNotFoundException: /data/data/com.torilt/cache/downloadingMediaFile430 (No such file or directory)
Can't find file. Android must have deleted it on a clean up
Getting Exception in setupplayer()
Source Code:
public class StreamingMediaPlayer extends Service {
final static public String AUDIO_MPEG = "audio/mpeg";
final static public String BITERATE_HEADER = "icy-br";
public int INTIAL_KB_BUFFER ;
private Handler handler;
//= 96*10/8
final public int BIT = 8;
final public int SECONDS = 60;
int bitrate = 56;
public File downloadingMediaFile;
final public String DOWNFILE = "downloadingMediaFile";
public Context context;
public int counter;
public int playedcounter;
public int preparecounter;
public MediaPlayer mp1;
public MediaPlayer mp2;
public boolean mp1prepared;
public boolean mp2prepared;
public boolean mp1preparing;
public boolean mp2preparing;
public boolean downloadingformp1;
public boolean downloadingformp2;
public boolean prepareState;
public String SONGURL = "";
// playing is "true" for mp1 and "false" for mp2
public boolean mp1playing;
public boolean started;
public boolean processHasStarted;
public boolean processHasPaused;
public boolean regularStream;
public BufferedInputStream stream;
public URL url;
public URLConnection urlConn;
public String station;
public String audiourl;
public Intent startingIntent = null;
public boolean stopping;
Thread preparringthread;
boolean waitingForPlayer;
// Setup all the variables
private void setupVars() {
counter = 0;
playedcounter = 0;
preparecounter = 0;
mp1 = new MediaPlayer();
mp2 = new MediaPlayer();
mp1prepared = false;
mp2prepared = false;
mp1preparing = false;
mp2preparing = false;
downloadingformp1 = false;
downloadingformp2 = false;
prepareState = true;
mp1playing = false;
started = false;
processHasStarted = false;
processHasPaused = true;
regularStream = false;
stream = null;
url = null;
urlConn = null;
station = null;
audiourl = null;
stopping = false;
preparringthread = null;
waitingForPlayer = false;
}
// This object will allow other processes to interact with our service
private final IStreamingMediaPlayer.Stub ourBinder = new IStreamingMediaPlayer.Stub() {
// String TAG = "IStreamingMediaPlayer.Stub";
public String getStation() {
// Log.d(TAG, "getStation");
return station;
}
public String getUrl() {
// Log.d(TAG, "getUrl");
return audiourl;
}
public boolean playing() {
// Log.d(TAG, "playing?");
return isPlaying();
}
public boolean pause() {
// Log.d(TAG, "playing?");
return isPause();
}
public void startAudio() {
// Log.d(TAG, "startAudio");
Runnable r = new Runnable() {
public void run() {
onStart(startingIntent, 0);
}
};
new Thread(r).start();
}
public void stopAudio() {
// Log.d(TAG, "stopAudio");
stop();
}
};
#Override
public void onCreate() {
super.onCreate();
context = this;
}
#Override
public void onStart(Intent intent, int startId) throws NullPointerException {
super.onStart(intent, startId);
// final String TAG = "StreamingMediaPlayer - onStart";
context = this;
setupVars();
if (intent.hasExtra("audiourl")) {
raiseThreadPriority();
processHasStarted = true;
processHasPaused = false;
audiourl = intent.getStringExtra("audiourl");
station = intent.getStringExtra("station");
downloadingMediaFile = new File(context.getCacheDir(), DOWNFILE+ counter);
downloadingMediaFile.deleteOnExit();
Runnable r = new Runnable() {
public void run() {
try {
startStreaming(audiourl);
} catch (IOException e) {
// Log.d(TAG, e.toString());
}
}
};
Thread t = new Thread(r);
t.start();
}
}
#Override
public void onDestroy() {
super.onDestroy();
mp1.stop();
mp2.stop();
}
#Override
public IBinder onBind(Intent intent) {
startingIntent = intent;
context = this;
return ourBinder;
}
#Override
public boolean onUnbind(Intent intent) {
super.onUnbind(intent);
stopSelf();
return true;
}
/**
* Progressivly download the media to a temporary location and update the
* MediaPlayer as new content becomes available.
*/
public void startStreaming(final String mediaUrl) throws IOException {
try {
url = new URL(mediaUrl);
urlConn = (HttpURLConnection) url.openConnection();
urlConn.setReadTimeout(1000 * 20);
urlConn.setConnectTimeout(1000 * 5);
//The getContentType method is used by the getContent method to determine the type of the remote object; subclasses may find it convenient to override the getContentType method.
String ctype = urlConn.getContentType();
if (ctype == null) {
ctype = "";
} else {
ctype = ctype.toLowerCase();
}
if (ctype.contains(AUDIO_MPEG) || ctype.equals("")) {
String temp = urlConn.getHeaderField(BITERATE_HEADER);
if (temp != null) {
bitrate = new Integer(temp).intValue();
}
} else {
stopSelf();
return;
}
}
catch(NullPointerException ne)
{
}
catch (IOException ioe) {
// Log.e(TAG, "Could not connect to " + mediaUrl);
stopSelf();
return;
}
if (!regularStream) {
INTIAL_KB_BUFFER = bitrate * SECONDS / BIT;
Runnable r = new Runnable() {
public void run() {
try {
downloadAudioIncrement(mediaUrl);
Log.i("TAG12344444", "Unable to play");
stopSelf();
return;
} catch (IOException e) {
Log.i("TAG123", "Unable to initialize the MediaPlayer for Audio Url = "+mediaUrl, e);
stopSelf();
return;
} catch (NullPointerException e) {
stopSelf();
return;
}
}
};
Thread t = new Thread(r);
t.start();
}
}
/**
* Download the url stream to a temporary location and then call the
* setDataSource for that local file
*/
public void downloadAudioIncrement(String mediaUrl) throws IOException{
int bufsizeForDownload = 8 * 1024;
int bufsizeForfile = 64 * 1024;
stream = new BufferedInputStream(urlConn.getInputStream(),bufsizeForDownload);
Log.i("bufsize",Integer.toString(urlConn.getInputStream().available()));
try{
if(stream == null || stream.available() == 0){
stopSelf();
Log.i("unable to create ","stream null");
return;
}
}catch (NullPointerException e) {
stopSelf();
Log.i("return1","return1");
return;
}
BufferedOutputStream bout = new BufferedOutputStream(new FileOutputStream(downloadingMediaFile), bufsizeForfile);
byte buf[] = new byte[bufsizeForDownload];
int totalBytesRead = 0, totalKbRead = 0, numread = 0;
do {
if (bout == null) {
counter++;
downloadingMediaFile = new File(context.getCacheDir(), DOWNFILE+ counter);
downloadingMediaFile.deleteOnExit();
bout = new BufferedOutputStream(new FileOutputStream(downloadingMediaFile), bufsizeForfile);
}
try {
numread = stream.read(buf);
} catch (IOException e) {
Log.d("Downloadingfile", "Bad read. Let's quit.");
// stop();
Log.i("return2","return2");
stopSelf();
// return;
}
catch (NullPointerException e) {
// Let's get out of here
e.printStackTrace();
break;
}
if (numread < 0) {
bout.flush();
stopSelf();
Log.i("Bad read from stream", "Bad read from stream3");
if(stream == null){
urlConn = new URL(mediaUrl).openConnection();
urlConn.setConnectTimeout(1000 * 30);
urlConn.connect();
stream = new BufferedInputStream(urlConn.getInputStream(),bufsizeForDownload);
}else{
handler.post(new Runnable() {
public void run() {
Log.i("Bad read from stream", "Bad read from xyz");
context.stopService(startingIntent);
Log.i("return3","return3");
return;
}
});
}
} else if (numread >= 1) {
bout.write(buf, 0, numread);
totalBytesRead += numread;
totalKbRead += totalBytesRead / 1000;
}
if (totalKbRead >= INTIAL_KB_BUFFER && stopping != true) {
bout.flush();
bout.close();
bout = null;
if (started == false) {
Runnable r = new Runnable() {
public void run() {
setupplayer();
}
};
Thread t = new Thread(r);
t.start();
}
totalBytesRead = 0;
totalKbRead = 0;
}
if (stopping == true) {
stream = null;
}
} while (stream != null);
}
/** oncompletelister for media player **/
class listener implements MediaPlayer.OnCompletionListener {
public void onCompletion(MediaPlayer mp) {
waitingForPlayer = false;
long timeInMilli = Calendar.getInstance().getTime().getTime();
long timeToQuit = (1000 * 30) + timeInMilli; // add 30 seconds
if (mp1playing)
{
mp1.reset();
removefile();
mp1prepared = false;
// Log.d(TAG, "mp1 is Free.");
if (downloadingformp2) {
if (mp2preparing && stopping == false) {
waitingForPlayer = true;
}
while (mp2preparing && stopping == false) {
if (timeInMilli > timeToQuit) {
stopSelf();
}
timeInMilli = Calendar.getInstance().getTime().getTime();
}
}
} else {
mp2.reset();
removefile();
mp2prepared = false;
if (downloadingformp1) {
if (mp1preparing && stopping == false) {
waitingForPlayer = true;
}
while (mp1preparing && stopping == false) {
if (timeInMilli > timeToQuit) {
stopSelf();
}
timeInMilli = Calendar.getInstance().getTime().getTime();
}
}
}
if (waitingForPlayer == true) {
// we must have been waiting
waitingForPlayer = false;
}
if (stopping == false) {
if (mp1playing) {
mp2.start();
mp1playing = false;
Runnable r = new Runnable() {
public void run() {
setupplayer();
}
};
Thread t = new Thread(r);
t.start();
} else {
mp1.start();
mp1playing = true;
Runnable r = new Runnable() {
public void run() {
setupplayer();
}
};
Thread t = new Thread(r);
t.start();
}
}
}
}
/** OnPreparedListener for media player **/
class preparelistener implements MediaPlayer.OnPreparedListener {
public void onPrepared(MediaPlayer mp) {
if (prepareState) {
prepareState = false;
mp1preparing = false;
mp1prepared = true;
if (started == false) {
started = true;
mp1.start();
mp1playing = true;
Runnable r = new Runnable() {
public void run() {
setupplayer();
}
};
Thread t = new Thread(r);
t.start();
}
} else {
prepareState = true;
mp2preparing = false;
mp2prepared = true;
}
}
};
/**
* Set Up player(s)
*/
public void setupplayer() {
final String TAG = "setupplayer";
Runnable r = new Runnable() {
public void run() {
try {
if (!mp1preparing && !mp1prepared) {
while (true) {
downloadingformp1 = true;
if (started == false)
break;
if (counter > preparecounter)
break;
}
File f = new File(context.getCacheDir(), DOWNFILE+ preparecounter);
FileInputStream ins = new FileInputStream(f);
mp1.setDataSource(ins.getFD());
mp1.setAudioStreamType(AudioManager.STREAM_MUSIC);//playing for live streaming
mp1.setOnCompletionListener(new listener());
mp1.setOnPreparedListener(new preparelistener());
if (started == false || waitingForPlayer == true){
}
mp1.prepareAsync();// .prepare();
mp1preparing = true;
downloadingformp1 = false;
preparecounter++;
} else if (!mp2preparing && !mp2prepared) {
while (true) {
downloadingformp2 = true;
if (started == false)
break;
if (counter > preparecounter)
break;
}
File f = new File(context.getCacheDir(), DOWNFILE+ preparecounter);
FileInputStream ins = new FileInputStream(f);
mp2.setDataSource(ins.getFD());
mp2.setAudioStreamType(AudioManager.STREAM_MUSIC);
mp2.setOnCompletionListener(new listener());
mp2.setOnPreparedListener(new preparelistener());
mp2.prepareAsync();
mp2preparing = true;
downloadingformp2 = false;
preparecounter++;
// }
} else
Log.d(TAG, "No Media player is available to setup.");
return;
} catch (FileNotFoundException e) {
Log.e(TAG, e.toString());
Log.e(TAG,"Can't find file. Android must have deleted it on a clean up ");
stop();
return;
} catch (IllegalStateException e) {
Log.e(TAG, e.toString());
stop();
} catch (IOException e) {
Log.e(TAG, e.toString());
stop();
}
}
};
preparringthread = new Thread(r);
preparringthread.start();
try {
preparringthread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void removefile() {
File temp = new File(context.getCacheDir(), DOWNFILE + playedcounter);
temp.delete();
playedcounter++;
}
public boolean stop() {
final String TAG = "STOP";
stopping = true;
try {
if (mp1.isPlaying()){
if (!(stream == null)) {
Log.i("IN STOP", "MP1 is nill");
stopSelf();
}
mp1.stop();
}
if (mp2.isPlaying()){
Log.i("IN STOP", "MP2 is nill");
if (!(stream == null)){
stopSelf();
}
mp2.stop();
}
} catch (Exception e) {
Log.e(TAG, "error stopping players");
}
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
Log.e(TAG, "error closing open connection");
}
}
stream = null;
processHasStarted = false;
processHasPaused = true;
if (preparringthread != null) {
preparringthread.interrupt();
}
stopSelf();
return true;
}
public boolean isPlaying() {
return processHasStarted;
}
public boolean isPause() {
return processHasPaused;
}
private void raiseThreadPriority() {
Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
}
}
you should call release(), to free the resources. If not released, too many MediaPlayer instances may result in an exception
Write this code when on youe Service
Updated
private void releaseMediaPlayer() {
if (mediaPlayer != null) {
if(mediaPlayer.isPlaying()) {
mediaPlayer.stop();
}
mediaPlayer.release();
mediaPlayer = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
releaseMediaPlayer();
}
You can see this