Android Lame - convert from RAW to MP3 - java

I'm trying to convert 3gp to MP3 using Lame. I've got byte buffer after decoding file by built-in Android decoder. After this I put it into RAW file with OutputStream and then create MP3 using Lame. But it doesn't work. There are only noises in the file. Here is my code. Thank's for any help in advance.
public class MainActivityLame extends Activity {
static {
System.loadLibrary("mp3lame");
}
private native void initEncoder(int numChannels, int sampleRate, int bitRate, int mode, int quality);
private native void destroyEncoder();
private native int encodeFile(String sourcePath, String targetPath);
public static final int NUM_CHANNELS = 1;
public static final int SAMPLE_RATE = 44100;
public static final int BITRATE = 128;
public static final int MODE = 1;
public static final int QUALITY = 2;
private AudioRecord mRecorder;
private short[] mBuffer;
private File mRawFile;
private File mEncodedFile;
private TextView mTextViewFile;
private String strFile;
private AudioTrack audioTrack;
private int mChannels;
private ShortBuffer mDecodedSamples;
private ByteBuffer mDecodedBytes;
private int mFileSize;
private int bufferSize;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mTextViewFile = (TextView) findViewById(R.id.textViewFile);
initRecorder();
initEncoder(NUM_CHANNELS, SAMPLE_RATE, BITRATE, MODE, QUALITY);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
public void onClickFolder(View view) throws IOException {
Intent questionIntent = new Intent(MainActivityLame.this, MyListActivity.class);
startActivityForResult(questionIntent, 1);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK) {
String itemName = data.getStringExtra(MyListActivity.ITEM);
mTextViewFile.setText(itemName);
} else {
mTextViewFile.setText("");
}
}
#Override
public void onDestroy() {
mRecorder.release();
destroyEncoder();
super.onDestroy();
}
private void initRecorder() {
bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(), time.format("%Y%m%d%H%M%S") + "." + suffix);
}
public void onClickExample(View view) {
strFile = mTextViewFile.getText().toString().trim();
MediaExtractor extractor = new MediaExtractor();
MediaFormat format = null;
extractor.setDataSource(strFile);
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; ++i) {
format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("audio/")) {
extractor.selectTrack(i);
break;
}
}
MediaCodec codec = MediaCodec.createDecoderByType(format.getString(MediaFormat.KEY_MIME));
codec.configure(format, null, null, 0);
codec.start();
int decodedSamplesSize = 0;
byte[] decodedSamples = null;
ByteBuffer[] inputBuffers = codec.getInputBuffers();
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
int sample_size;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
long presentation_time;
int tot_size_read = 0;
boolean done_reading = false;
mDecodedBytes = ByteBuffer.allocate(1 << 20);
while (true) {
int inputBufferId = codec.dequeueInputBuffer(100);
if (!done_reading && inputBufferId >= 0) {
sample_size = extractor.readSampleData(inputBuffers[inputBufferId], 0);
if (sample_size < 0) {
codec.queueInputBuffer(inputBufferId, 0, 0, -1, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
done_reading = true;
} else {
presentation_time = extractor.getSampleTime();
codec.queueInputBuffer(inputBufferId, 0, sample_size, presentation_time, 0);
extractor.advance();
tot_size_read += sample_size;
}
}
int outputBufferId = codec.dequeueOutputBuffer(info, 100);
if (outputBufferId >= 0 && info.size > 0) {
if (decodedSamplesSize < info.size) {
decodedSamplesSize = info.size;
decodedSamples = new byte[decodedSamplesSize];
}
outputBuffers[outputBufferId].get(decodedSamples, 0, info.size);
outputBuffers[outputBufferId].clear();
if (mDecodedBytes.remaining() < info.size) {
int position = mDecodedBytes.position();
int newSize = (int) ((position * (1.0 * mFileSize / tot_size_read)) * 1.2);
if (newSize - position < info.size + 5 * (1 << 20)) {
newSize = position + info.size + 5 * (1 << 20);
}
ByteBuffer newDecodedBytes = null;
int retry = 10;
while (retry > 0) {
try {
newDecodedBytes = ByteBuffer.allocate(newSize);
break;
} catch (OutOfMemoryError oome) {
retry--;
}
}
if (retry == 0) {
break;
}
mDecodedBytes.rewind();
newDecodedBytes.put(mDecodedBytes);
mDecodedBytes = newDecodedBytes;
mDecodedBytes.position(position);
}
mDecodedBytes.put(decodedSamples, 0, info.size);
codec.releaseOutputBuffer(outputBufferId, false);
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = codec.getOutputBuffers();
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
format = codec.getOutputFormat();
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
extractor.release();
extractor = null;
codec.stop();
codec.release();
codec = null;
mDecodedBytes.rewind();
mDecodedBytes.order(ByteOrder.LITTLE_ENDIAN);
mDecodedSamples = mDecodedBytes.asShortBuffer();
mRawFile = getFile("raw");
OutputStream output = null;
try {
output = new BufferedOutputStream(new FileOutputStream(mRawFile));
try {
output.write(mDecodedBytes.array());
} catch (IOException e) {
e.printStackTrace();
}
} catch (IOException e) {
Toast.makeText(MainActivityLame.this, e.getMessage(), Toast.LENGTH_SHORT).show();
} finally {
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivityLame.this, e.getMessage(), Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivityLame.this, e.getMessage(), Toast.LENGTH_SHORT).show();
}
}
}
}
mEncodedFile = getFile("mp3");
int result = encodeFile(mRawFile.getAbsolutePath(), mEncodedFile.getAbsolutePath());
if (result == 0) {
Toast.makeText(MainActivityLame.this, "Encoded to " + mEncodedFile.getName(), Toast.LENGTH_SHORT).show();
}
}
}

Related

Audio track is not working in Android

I am new to Android. I am developing an application which uses AudioRecorder and AudioTrack for recording and playing. In addition to this, I am trying to read and display the amplitude values of recorded sound.
Here is my class
public class MainActivity extends AppCompatActivity {
private static final String TAG = "RecordSound";
private int BufferSize;
byte[] buffer = new byte[BufferSize];
/* AudioRecord and AudioTrack Object */
private AudioRecord record = null;
private AudioTrack track = null;
/* Audio Configuration */
private int sampleRate = 8000;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private boolean isRecording = true;
private Thread recordingThread = null;
private double lastLevel = 0;
private Thread thread;
private static final int SAMPLE_DELAY = 75;
MediaPlayer mediaPlayer;
RelativeLayout layout;
private ImageView tankHolder;
TextView text;
Button play;
String filename;
final Handler mHandler = new Handler();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
tankHolder = (ImageView)findViewById(R.id.tankHolder);
text=(TextView)findViewById(R.id.result_text);
layout=(RelativeLayout)findViewById(R.id.linear);
play=(Button)findViewById(R.id.btnStartPlay);
try {
BufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
}catch (Exception e){
e.printStackTrace();
}
}
#Override
protected void onResume() {
super.onResume();
//calling MediaPlayer for alarmtone when the tank is almost full
mediaPlayer = MediaPlayer.create(this, R.raw.tone);
startRecording();
play.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
stopRecording();
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
private void stopRecording() throws IOException{
if (null != record) {
isRecording = false;
record.stop();
record.release();
record = null;
recordingThread = null;
mHandler.post(runRecord);
mediaPlayer.stop();
mediaPlayer.release();
}
}
final Runnable runRecord=new Runnable() {
#Override
public void run() {
text.setText("working");
try {
PlayShortAudioFileViaAudioTrack("/sdcard/recorded.pcm");
} catch (IOException e) {
e.printStackTrace();
}
}
};
private void PlayShortAudioFileViaAudioTrack(String filePath) throws IOException{
// We keep temporarily filePath globally as we have only two sample sounds now..
if (filePath==null)
return;
//Reading the file..
File file = new File(filePath); // for ex. path= "/sdcard/samplesound.pcm" or "/sdcard/samplesound.wav"
byte[] byteData = new byte[(int) file.length()];
Log.d(TAG, (int) file.length()+"");
FileInputStream in = null;
try {
in = new FileInputStream( file );
in.read( byteData );
in.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// Set and push to audio track..
int intSize = android.media.AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
Log.d(TAG, intSize+"");
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
at.play();
// Write the byte array to the track
at.write(byteData, 0, byteData.length);
at.stop();
at.release();
}
private void startRecording()
{
record = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate,
channelConfig, audioFormat, BufferSize);
if (AudioRecord.STATE_INITIALIZED == record.getState())
record.startRecording();
isRecording = true;
/* Run a thread for Recording */
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
},"AudioRecorder Thread");
recordingThread.start();
thread = new Thread(new Runnable() {
public void run() {
while(thread != null && !thread.isInterrupted()){
//Let's make the thread sleep for a the approximate sampling time
try{
Thread.sleep(SAMPLE_DELAY);}catch(InterruptedException ie){ie.printStackTrace();}
readAudioBuffer();//After this call we can get the last value assigned to the lastLevel variable
runOnUiThread(new Runnable() {
#Override
public void run() {
if(lastLevel >= 7 && lastLevel <= 15){
text.setTextColor(getResources().getColor(R.color.Blue));
layout.setBackgroundColor(Color.RED);
tankHolder.setImageResource(R.drawable.ftank);
if (!mediaPlayer.isPlaying()){
mediaPlayer.start();
}
text.setText(String.valueOf(lastLevel));
}else
if(lastLevel > 50 && lastLevel <= 100){
text.setText(String.valueOf(lastLevel));
text.setTextColor(getResources().getColor(R.color.Orange));
layout.setBackgroundColor(Color.WHITE);
tankHolder.setImageResource(R.drawable.htank);
if (mediaPlayer.isPlaying()){
mediaPlayer.pause();
}
}else
if(lastLevel > 100 && lastLevel <= 170){
text.setText(String.valueOf(lastLevel));
text.setTextColor(getResources().getColor(R.color.Yellow));
layout.setBackgroundColor(Color.WHITE);
tankHolder.setImageResource(R.drawable.qtank);
if (mediaPlayer.isPlaying()){
mediaPlayer.pause();
}
}
if(lastLevel > 170){
text.setText(String.valueOf(lastLevel));
text.setTextColor(getResources().getColor(R.color.Blue));
layout.setBackgroundColor(Color.WHITE);
tankHolder.setImageResource(R.drawable.qtank);
if (mediaPlayer.isPlaying()){
mediaPlayer.pause();
}
}
}
});
}
}
},"AudioRecorder Thread");
thread.start();
}
private void writeAudioDataToFile()
{
byte data[] = new byte[BufferSize];
/* Record audio to following file */
String filePath = "/sdcard/recorded.pcm";
filename = Environment.getExternalStorageDirectory().getAbsolutePath();
filename +="/audiofile.pcm";
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read_bytes = 0;
if(null != os){
while(isRecording)
{
read_bytes = record.read(data, 0, BufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read_bytes){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void readAudioBuffer() {
try {
short[] buffer = new short[BufferSize];
int bufferReadResult = 1;
if (record != null) {
// Sense the voice...
bufferReadResult = record.read(buffer, 0, BufferSize);
double sumLevel = 0;
for (int i = 0; i < bufferReadResult; i++) {
sumLevel += buffer[i];
}
lastLevel = Math.abs((sumLevel / bufferReadResult));
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
If I run, my runOnUiThread(new Runnable() { is working and on play button press, recorded audio is not playing.
If I remove runOnUiThread(new Runnable() {, then on play button press, recorded audio will play without any problem.
So I think that the problem is with threads and I tried to implement the handler class, still I am not getting it.
Refer this,
http://www.java2s.com/Code/Android/Media/UsingAudioRecord.htm for audio recording.
If you are not satisfied,https://www.tutorialspoint.com/android/android_audio_capture.htm for detailed explanation

Video Recording on Android from Frames using javaCV

I am using this library javaCV to record video on android. They have provided a sample VideoRecording Activity But There is some bug which I could not figure out what I am doing it wrong or what is missing which causing this bug.
package org.bytedeco.javacv.recordactivity;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import java.io.IOException;
import java.nio.ShortBuffer;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import static org.bytedeco.javacpp.opencv_core.*;
public class RecordActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link = "/mnt/sdcard/stream.flv";
long startTime = 0;
boolean recording = false;
private volatile FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private IplImage yuvIplimage = null;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
/** The number of seconds in the continuous record loop (or 0 to disable loop). */
final int RECORD_LENGTH = 10;
IplImage[] images;
long[] timestamps;
ShortBuffer[] samples;
int imagesIndex, samplesIndex;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.main);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
}
#Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if (cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.main, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if (1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG,"init recorder");
if (RECORD_LENGTH > 0) {
imagesIndex = 0;
images = new IplImage[RECORD_LENGTH * frameRate];
timestamps = new long[images.length];
for (int i = 0; i < images.length; i++) {
images[i] = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
timestamps[i] = -1;
}
} else if (yuvIplimage == null) {
yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
Log.i(LOG_TAG, "create yuvIplimage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("flv");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
if (RECORD_LENGTH > 0) {
Log.v(LOG_TAG,"Writing frames");
try {
int firstIndex = imagesIndex % samples.length;
int lastIndex = (imagesIndex - 1) % images.length;
if (imagesIndex <= images.length) {
firstIndex = 0;
lastIndex = imagesIndex - 1;
}
if ((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
startTime = 0;
}
if (lastIndex < firstIndex) {
lastIndex += images.length;
}
for (int i = firstIndex; i <= lastIndex; i++) {
long t = timestamps[i % timestamps.length] - startTime;
if (t >= 0) {
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(images[i % images.length]);
}
}
firstIndex = samplesIndex % samples.length;
lastIndex = (samplesIndex - 1) % samples.length;
if (samplesIndex <= samples.length) {
firstIndex = 0;
lastIndex = samplesIndex - 1;
}
if (lastIndex < firstIndex) {
lastIndex += samples.length;
}
for (int i = firstIndex; i <= lastIndex; i++) {
recorder.record(samples[i % samples.length]);
}
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
recording = false;
Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
if (RECORD_LENGTH > 0) {
samplesIndex = 0;
samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];
for (int i = 0; i < samples.length; i++) {
samples[i] = ShortBuffer.allocate(bufferSize);
}
} else {
audioData = ShortBuffer.allocate(bufferSize);
}
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
if (RECORD_LENGTH > 0) {
audioData = samples[samplesIndex++ % samples.length];
audioData.position(0).limit(0);
}
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if (bufferReadResult > 0) {
Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
if (RECORD_LENGTH <= 0) try {
recorder.record(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG,"audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera","camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
Camera.Parameters camParams = mCamera.getParameters();
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate());
camParams.setPreviewFrameRate(frameRate);
mCamera.setParameters(camParams);
startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
if (RECORD_LENGTH > 0) {
int i = imagesIndex++ % images.length;
yuvIplimage = images[i];
timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
}
/* get video data */
if (yuvIplimage != null && recording) {
yuvIplimage.getByteBuffer().put(data);
if (RECORD_LENGTH <= 0) try {
Log.v(LOG_TAG,"Writing Frame");
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
#Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
}
I am getting this error
The type org.bytedeco.javacpp.avutil$AVFrame cannot be resolved. It is indirectly referenced from required .class files
at following line in the above code
if (RECORD_LENGTH <= 0) try {
recorder.record(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
}
at recorder.record(audioData). I don't know what I am doing wrong here. New to JavaCV. Any help will be appreciated.
The error message means that a jar containing the class org.bytedeco.javacpp.avutil$AVFrame cannot be found in the java class path. I would suggest to check the class path, the version of javacv used, maybe there were bugs that are now solved, you never know. Actually there are some similar discussions on javacv's github account, which may or may not be connected to your problem: javacv github, javacv github.
What makes me wonder is why you are putting your class in "org.bytedeco.javacv.recordactivity" package? "org.bytedeco.javacv" is javacv's own package, you just don't put your own class into the package structure that do not belong to you. It can only make you troubles in the future, for instance if javacv at some time decides to create package and class with the same name.

UI not showing up after switching to AsyncTask

I have recently switched over to AsyncTask after reading how efficent it is. It took some time porting my code, but I did it in the end. I have 1 AsyncTask that gets run first, setting up the background image/color that it gets off a json object. After that, it sets up TableRows for all the other fields in the JSON file. The background changes, but the UI does not show up. I placed System.out.println's inside my AsyncTask and I see that the code has been executed (the onPostExecute method), but none of the UI shows up. I have no idea what the problem is, hence me posting this question.
Here is my AsyncTask:
class GetImageAsync extends AsyncTask<String, Void, Drawable> {
private final WeakReference<View> ViewReference;
private String data;
private Context context;
private boolean isImageView;
private boolean scale;
private int id = -1;
private int color = -1;
private int height = -1;
public GetImageAsync(TableLayout tr, String data, Context context) {
isImageView = false;
this.context = context;
this.data = data;
ViewReference = new WeakReference<View>(tr);
System.out.println("inside async");
}
public GetImageAsync(ImageView iv, int id, Context context) {
isImageView = true;
this.context = context;
this.id = id;
ViewReference = new WeakReference<View>(iv);
}
public GetImageAsync(ImageView imageView,String data, Context context, int height) {
System.out.println("profile async");
this.height = height;
isImageView = true;
this.context = context;
this.data = data;
ViewReference = new WeakReference<View>(imageView);
}
// Decode image in background.
#Override
protected Drawable doInBackground(String... params) {
System.out.println(id + " : " + isImageView);
if(isImageView && id != -1) {
return getImageFromResource();
} else {
if(data.startsWith("#")) {
color = Color.parseColor(data);
return null;
}
try {
return getImageFromWeb(data, scale);
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
private Drawable getImageFromResource() {
return context.getResources().getDrawable(id);
}
private Drawable getImageFromWeb(String data, boolean b) throws MalformedURLException, IOException {
Bitmap img = BitmapFactory.decodeStream(new URL(data).openConnection().getInputStream());
if(height != -1) {
return new BitmapDrawable(context.getResources(), getCroppedBitmap(Bitmap.createScaledBitmap(
Bitmap.createScaledBitmap(img,
img.getWidth(),
img.getWidth(), true),
height * 2,
height * 2, true)));
}
return new BitmapDrawable(context.getResources(), img);
}
// Once complete, see if ImageView is still around and set bitmap.
#Override
protected void onPostExecute(Drawable bitmap) {
if(isImageView) { //this block does not {
System.out.println("post "+isImageView);
System.out.println("iv");
if (ViewReference != null && bitmap != null) {
final ImageView imageView = (ImageView) ViewReference.get();
if (imageView != null) {
imageView.setImageDrawable(bitmap);
}
} // }
} else { //this block runs {
System.out.println("post "+isImageView);
if (ViewReference != null) {
final TableLayout tr = (TableLayout) ViewReference.get();
if (tr != null) {
if(color != -1) {
tr.setBackgroundColor(color);
} else {
tr.setBackground(bitmap);
}
}
}
} // }
}
public static Bitmap getCroppedBitmap(Bitmap bitmap) {
int w = bitmap.getWidth();
int h = bitmap.getHeight();
int radius = Math.min(h / 2, w / 2);
Bitmap output = Bitmap.createBitmap(w + 8, h + 8, Config.ARGB_8888);
Paint p = new Paint();
p.setAntiAlias(true);
Canvas c = new Canvas(output);
c.drawARGB(0, 0, 0, 0);
p.setStyle(Style.FILL);
c.drawCircle((w / 2) + 4, (h / 2) + 4, radius, p);
p.setXfermode(new PorterDuffXfermode(Mode.SRC_IN));
c.drawBitmap(bitmap, 4, 4, p);
p.setXfermode(null);
p.setStyle(Style.STROKE);
p.setColor(Color.BLACK);
p.setStrokeWidth(1);
c.drawCircle((w / 2) + 4, (h / 2) + 4, radius, p);
return output;
}
}
and here is how I create the UI:
private void createUI(JSONObject jObject) throws JSONException {
int absIndex = 0;
if (android.os.Build.VERSION.SDK_INT >= 16) {
new GetImageAsync(tr, jObject.getString(("bg")), getApplicationContext()).execute("");
System.out.println("after async");
/*if (bgcolor != -1) {
System.out.println("color: " + bgcolor);
tr.setBackgroundColor(bgcolor);
} else if (bgpic != null) {
tr.setBackground(bgpic);
}*/
}
for (int i = 0; i < keys.length; i++) {
values.add(i, new ArrayList<String>());
values.get(i).add(0, keys[i]);
values.get(i).add(1, jObject.getString(keys[i]));
}
String lastString = WhoIsLast(jObject);
trcard.setBackground(getResources()
.getDrawable(R.drawable.bg_card/* abc_menu_dropdown_panel_holo_light */));
trcard.addView(tlcard);
tr.setPadding(0, 0, 0, 0);
TableLayout.LayoutParams tableRowParams = new TableLayout.LayoutParams(
TableLayout.LayoutParams.MATCH_PARENT,
TableLayout.LayoutParams.WRAP_CONTENT);
int[] attrs = { android.R.attr.dividerVertical };
TypedArray typedArray = getApplicationContext().obtainStyledAttributes(
attrs);
Drawable divider = typedArray.getDrawable(0);
typedArray.recycle();
tableRowParams.setMargins(20, 20, 20, 0);
trcard.setLayoutParams(tableRowParams);
tlcard.setDividerDrawable(divider);
tlcard.setDividerPadding(4);
tableScrollView.addView(trcard);
for (int i = 0; i < keys.length; i++) {
String value = values.get(i).get(1);
if (value != "") {
String key = values.get(i).get(0);
boolean last = false;
if (i == keys.length || value.equals(lastString) || i == 0) {
last = true;
System.out.println(value +" = "+lastString);
}
insertElement(value, key, absIndex++, last, drawables[i]);
if (!last) {
absIndex++;
}
}
}
}
private String WhoIsLast(JSONObject j) throws JSONException {
if (j.getString("facebook").equals("")) {
return "";
}
if (j.getString("twitter").equals("")) {
return "facebook";
}
if (j.getString("email").equals("")) {
return "twitter";
}
if (j.getString("phone").equals("")) {
return "email";
}
return "";
}
int absIndex = 0;
private void insertElement(String data, String key, int i,
boolean b, Integer id) throws JSONException {
LayoutInflater inflater = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View newRow = inflater.inflate(R.layout.row, null, false);
newRow.setLayoutParams(new TableRow.LayoutParams(
TableRow.LayoutParams.MATCH_PARENT,
TableRow.LayoutParams.WRAP_CONTENT));
TextView dataTextView = (TextView) newRow
.findViewById(R.id.rowTextView);
dataTextView.setText("\t " + data);
ImageView iv = (ImageView) newRow.findViewById(R.id.rowImageView);
newRow.setId(absIndex++);
newRow.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View view) {
//omitted
});
View v = new View(this);
v.setLayoutParams(new TableRow.LayoutParams(
TableRow.LayoutParams.MATCH_PARENT, 1));
v.setBackgroundColor(Color.argb(25, 111, 111, 111));
dataTextView.measure(MeasureSpec.UNSPECIFIED, MeasureSpec.UNSPECIFIED);
if (i == 0) {
System.out.println("before async iv");
new GetImageAsync(iv, jObject.getString("profilepic"), getApplicationContext(), dataTextView.getMeasuredHeight()).execute("");
System.out.println("after async iv");
//async.execute("");
dataTextView.setShadowLayer(3, 0, 0, Color.BLACK);
dataTextView.setTextColor(getResources().getColor(R.color.white));
} else {
new GetImageAsync(iv, id, getApplicationContext()).execute("");
}
if (i == 0) {
dataTextView.setTextSize(30);
tableScrollView.addView(newRow, i);
System.out.println("adding i=0null");
} else {
System.out.println("adding i = "+i +tlcard);
tlcard.addView(newRow, i - 1);
if (!b) {
tlcard.addView(v, i);
}
}
}
Edit: where I call createUi method:
final Thread thread = new Thread(new Runnable() {
#Override
public void run() {
jObject = getJson("http://www.tabcards.com/req/androidapi/L2o30H8JlFMtFYHW3KLxkts20ztc5Be6Z6m6v315/json/"
+ value);
System.out.println(value);
if (jObject != null) {
runOnUiThread(new Runnable() {
#Override
public void run() {
try {
createUI(jObject);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
});
thread.start();

Android + OpenCV: process camera frames in a service

Most OpenCV examples for Android are based on a CameraViewActivity that receives, processes and displays the frames. But similar to this approach I want to build a service that is started when the Android device boots. The service should access the camera and do some continously image processing.
Can anyone tell me how to initialize the camera by OpenCV for this scenario? Or any link for samples?
I found several posts that explained how to do it. Basically you have to create an empty and invisible SurfaceView on the Android Camera and buffers for the preview frames. Here's part of my code.
Here's an interface that I wrote, because we do switch between local hardware and remote network cameras.
public interface ICamera {
boolean supportsRegionOfInterest();
void connect();
void release();
boolean isConnected();
}
Here's the code for the local camera that works without a visible Activity. It receives the frames in a separate thread.
public class HardwareCamera implements CameraAccess.ICamera,
Camera.PreviewCallback {
// see http://developer.android.com/guide/topics/media/camera.html for more
// details
private static final boolean USE_THREAD = true;
private final static String TAG = "HardwareCamera";
private final Context context;
private final int cameraIndex; // example: CameraInfo.CAMERA_FACING_FRONT or
// -1 for
// IP_CAM
private final CameraAccess user;
private Camera mCamera;
private int mFrameWidth;
private int mFrameHeight;
private CameraAccessFrame mCameraFrame;
private CameraHandlerThread mThread = null;
private SurfaceTexture texture = new SurfaceTexture(0);
// needed to avoid OpenCV error:
// "queueBuffer: BufferQueue has been abandoned!"
private byte[] mBuffer;
public HardwareCamera(Context context, CameraAccess user, int cameraIndex) {
this.context = context;
this.cameraIndex = cameraIndex;
this.user = user;
}
// private boolean checkCameraHardware() {
// if (context.getPackageManager().hasSystemFeature(
// PackageManager.FEATURE_CAMERA)) {
// // this device has a camera
// return true;
// } else {
// // no camera on this device
// return false;
// }
// }
public static Camera getCameraInstance(int facing) {
Camera c = null;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
int cameraCount = Camera.getNumberOfCameras();
int index = -1;
for (int camIdx = 0; camIdx < cameraCount; camIdx++) {
Camera.getCameraInfo(camIdx, cameraInfo);
if (cameraInfo.facing == facing) {
try {
c = Camera.open(camIdx);
index = camIdx;
break;
} catch (RuntimeException e) {
Log.e(TAG,
String.format(
"Camera is not available (in use or does not exist). Facing: %s Index: %s Error: %s",
facing, camIdx, e.getMessage()));
continue;
}
}
}
if (c != null)
Log.d(TAG, String.format("Camera opened. Facing: %s Index: %s",
facing, index));
else
Log.e(TAG, "Could not find any camera matching facing: " + facing);
// returns null if camera is unavailable
return c;
}
private synchronized void connectLocalCamera() {
if (!user.isOpenCVLoaded())
return;
if (USE_THREAD) {
if (mThread == null) {
mThread = new CameraHandlerThread(this);
}
synchronized (mThread) {
mThread.openCamera();
}
} else {
oldConnectCamera();
}
user.onCameraInitialized(mFrameWidth, mFrameHeight);
}
private/* synchronized */void oldConnectCamera() {
// synchronized (this) {
if (true) {// checkCameraHardware()) {
mCamera = getCameraInstance(cameraIndex);
if (mCamera == null)
return;
Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
// Camera.Size previewSize = sizes.get(0);
Collections.sort(sizes, new PreviewSizeComparer());
Camera.Size previewSize = null;
for (Camera.Size s : sizes) {
if (s == null)
break;
previewSize = s;
}
// List<Integer> formats = params.getSupportedPictureFormats();
// params.setPreviewFormat(ImageFormat.NV21);
params.setPreviewSize(previewSize.width, previewSize.height);
mCamera.setParameters(params);
params = mCamera.getParameters();
mFrameWidth = params.getPreviewSize().width;
mFrameHeight = params.getPreviewSize().height;
int size = mFrameWidth * mFrameHeight;
size = size
* ImageFormat.getBitsPerPixel(params.getPreviewFormat())
/ 8;
this.mBuffer = new byte[size];
Log.d(TAG, "Created callback buffer of size (bytes): " + size);
Mat mFrame = new Mat(mFrameHeight + (mFrameHeight / 2),
mFrameWidth, CvType.CV_8UC1);
mCameraFrame = new CameraAccessFrame(mFrame, mFrameWidth,
mFrameHeight);
if (this.texture != null)
this.texture.release();
this.texture = new SurfaceTexture(0);
try {
mCamera.setPreviewTexture(texture);
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mCamera.startPreview();
Log.d(TAG,
String.format(
"Camera preview started with %sx%s. Rendering to SurfaceTexture dummy while receiving preview frames.",
mFrameWidth, mFrameHeight));
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
// }
}
#Override
public synchronized void onPreviewFrame(byte[] frame, Camera arg1) {
mCameraFrame.put(frame);
if (user.isAutomaticReceive() || user.waitForReceive(500))
user.onPreviewFrame(mCameraFrame);
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class CameraAccessFrame implements CameraFrame {
private Mat mYuvFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
private Bitmap mCachedBitmap;
private boolean mRgbaConverted;
private boolean mBitmapConverted;
#Override
public Mat gray() {
return mYuvFrameData.submat(0, mHeight, 0, mWidth);
}
#Override
public Mat rgba() {
if (!mRgbaConverted) {
Imgproc.cvtColor(mYuvFrameData, mRgba,
Imgproc.COLOR_YUV2BGR_NV12, 4);
mRgbaConverted = true;
}
return mRgba;
}
// #Override
// public Mat yuv() {
// return mYuvFrameData;
// }
#Override
public synchronized Bitmap toBitmap() {
if (mBitmapConverted)
return mCachedBitmap;
Mat rgba = this.rgba();
Utils.matToBitmap(rgba, mCachedBitmap);
mBitmapConverted = true;
return mCachedBitmap;
}
public CameraAccessFrame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mRgba = new Mat();
this.mCachedBitmap = Bitmap.createBitmap(width, height,
Bitmap.Config.ARGB_8888);
}
public synchronized void put(byte[] frame) {
mYuvFrameData.put(0, 0, frame);
invalidate();
}
public void release() {
mRgba.release();
mCachedBitmap.recycle();
}
public void invalidate() {
mRgbaConverted = false;
mBitmapConverted = false;
}
};
private class PreviewSizeComparer implements Comparator<Camera.Size> {
#Override
public int compare(Size arg0, Size arg1) {
if (arg0 != null && arg1 == null)
return -1;
if (arg0 == null && arg1 != null)
return 1;
if (arg0.width < arg1.width)
return -1;
else if (arg0.width > arg1.width)
return 1;
else
return 0;
}
}
private static class CameraHandlerThread extends HandlerThread {
Handler mHandler;
HardwareCamera owner;
CameraHandlerThread(HardwareCamera owner) {
super("CameraHandlerThread");
this.owner = owner;
start();
mHandler = new Handler(getLooper());
}
synchronized void notifyCameraOpened() {
notify();
}
void openCamera() {
mHandler.post(new Runnable() {
#Override
public void run() {
owner.oldConnectCamera();
notifyCameraOpened();
}
});
try {
wait();
} catch (InterruptedException e) {
Log.w(TAG, "wait was interrupted");
}
}
}
#Override
public boolean supportsRegionOfInterest() {
return false;
}
#Override
public void connect() {
connectLocalCamera();
}
#Override
public void release() {
synchronized (this) {
if (USE_THREAD) {
if (mThread != null) {
mThread.interrupt();
mThread = null;
}
}
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
try {
mCamera.setPreviewTexture(null);
} catch (IOException e) {
Log.e(TAG, "Could not release preview-texture from camera.");
}
mCamera.release();
Log.d(TAG, "Preview stopped and camera released");
}
mCamera = null;
if (mCameraFrame != null) {
mCameraFrame.release();
}
if (texture != null)
texture.release();
}
}
#Override
public boolean isConnected() {
return mCamera != null;
}
}
The camera frame is taken from the OpenCV samples. It's responsible for converting the raw byte-array into OpenCV mat structures. The implementation of that interface is inside the code above.
public interface CameraFrame extends CvCameraViewFrame {
Bitmap toBitmap();
#Override
Mat rgba();
#Override
Mat gray();
}

Android Shoutcast internet Radio Streaming

I developed Shoutcastinternet Radio Streaming and I'm able to stream and play Successfully.
But the problem is: when i execute my application,I'm able to stream and play Continuously for halfanhour,after that the stream is getting stopped(not able to play, after that if i click on again play the stream Continues and again after some time FileNotFoundException)?
I logged the Error, after Stream get stopped.
The Error is :
java.io.FileNotFoundException: /data/data/com.torilt/cache/downloadingMediaFile430 (No such file or directory)
Can't find file. Android must have deleted it on a clean up
Getting Exception in setupplayer()
Source Code:
public class StreamingMediaPlayer extends Service {
final static public String AUDIO_MPEG = "audio/mpeg";
final static public String BITERATE_HEADER = "icy-br";
public int INTIAL_KB_BUFFER ;
private Handler handler;
//= 96*10/8
final public int BIT = 8;
final public int SECONDS = 60;
int bitrate = 56;
public File downloadingMediaFile;
final public String DOWNFILE = "downloadingMediaFile";
public Context context;
public int counter;
public int playedcounter;
public int preparecounter;
public MediaPlayer mp1;
public MediaPlayer mp2;
public boolean mp1prepared;
public boolean mp2prepared;
public boolean mp1preparing;
public boolean mp2preparing;
public boolean downloadingformp1;
public boolean downloadingformp2;
public boolean prepareState;
public String SONGURL = "";
// playing is "true" for mp1 and "false" for mp2
public boolean mp1playing;
public boolean started;
public boolean processHasStarted;
public boolean processHasPaused;
public boolean regularStream;
public BufferedInputStream stream;
public URL url;
public URLConnection urlConn;
public String station;
public String audiourl;
public Intent startingIntent = null;
public boolean stopping;
Thread preparringthread;
boolean waitingForPlayer;
// Setup all the variables
private void setupVars() {
counter = 0;
playedcounter = 0;
preparecounter = 0;
mp1 = new MediaPlayer();
mp2 = new MediaPlayer();
mp1prepared = false;
mp2prepared = false;
mp1preparing = false;
mp2preparing = false;
downloadingformp1 = false;
downloadingformp2 = false;
prepareState = true;
mp1playing = false;
started = false;
processHasStarted = false;
processHasPaused = true;
regularStream = false;
stream = null;
url = null;
urlConn = null;
station = null;
audiourl = null;
stopping = false;
preparringthread = null;
waitingForPlayer = false;
}
// This object will allow other processes to interact with our service
private final IStreamingMediaPlayer.Stub ourBinder = new IStreamingMediaPlayer.Stub() {
// String TAG = "IStreamingMediaPlayer.Stub";
public String getStation() {
// Log.d(TAG, "getStation");
return station;
}
public String getUrl() {
// Log.d(TAG, "getUrl");
return audiourl;
}
public boolean playing() {
// Log.d(TAG, "playing?");
return isPlaying();
}
public boolean pause() {
// Log.d(TAG, "playing?");
return isPause();
}
public void startAudio() {
// Log.d(TAG, "startAudio");
Runnable r = new Runnable() {
public void run() {
onStart(startingIntent, 0);
}
};
new Thread(r).start();
}
public void stopAudio() {
// Log.d(TAG, "stopAudio");
stop();
}
};
#Override
public void onCreate() {
super.onCreate();
context = this;
}
#Override
public void onStart(Intent intent, int startId) throws NullPointerException {
super.onStart(intent, startId);
// final String TAG = "StreamingMediaPlayer - onStart";
context = this;
setupVars();
if (intent.hasExtra("audiourl")) {
raiseThreadPriority();
processHasStarted = true;
processHasPaused = false;
audiourl = intent.getStringExtra("audiourl");
station = intent.getStringExtra("station");
downloadingMediaFile = new File(context.getCacheDir(), DOWNFILE+ counter);
downloadingMediaFile.deleteOnExit();
Runnable r = new Runnable() {
public void run() {
try {
startStreaming(audiourl);
} catch (IOException e) {
// Log.d(TAG, e.toString());
}
}
};
Thread t = new Thread(r);
t.start();
}
}
#Override
public void onDestroy() {
super.onDestroy();
mp1.stop();
mp2.stop();
}
#Override
public IBinder onBind(Intent intent) {
startingIntent = intent;
context = this;
return ourBinder;
}
#Override
public boolean onUnbind(Intent intent) {
super.onUnbind(intent);
stopSelf();
return true;
}
/**
* Progressivly download the media to a temporary location and update the
* MediaPlayer as new content becomes available.
*/
public void startStreaming(final String mediaUrl) throws IOException {
try {
url = new URL(mediaUrl);
urlConn = (HttpURLConnection) url.openConnection();
urlConn.setReadTimeout(1000 * 20);
urlConn.setConnectTimeout(1000 * 5);
//The getContentType method is used by the getContent method to determine the type of the remote object; subclasses may find it convenient to override the getContentType method.
String ctype = urlConn.getContentType();
if (ctype == null) {
ctype = "";
} else {
ctype = ctype.toLowerCase();
}
if (ctype.contains(AUDIO_MPEG) || ctype.equals("")) {
String temp = urlConn.getHeaderField(BITERATE_HEADER);
if (temp != null) {
bitrate = new Integer(temp).intValue();
}
} else {
stopSelf();
return;
}
}
catch(NullPointerException ne)
{
}
catch (IOException ioe) {
// Log.e(TAG, "Could not connect to " + mediaUrl);
stopSelf();
return;
}
if (!regularStream) {
INTIAL_KB_BUFFER = bitrate * SECONDS / BIT;
Runnable r = new Runnable() {
public void run() {
try {
downloadAudioIncrement(mediaUrl);
Log.i("TAG12344444", "Unable to play");
stopSelf();
return;
} catch (IOException e) {
Log.i("TAG123", "Unable to initialize the MediaPlayer for Audio Url = "+mediaUrl, e);
stopSelf();
return;
} catch (NullPointerException e) {
stopSelf();
return;
}
}
};
Thread t = new Thread(r);
t.start();
}
}
/**
* Download the url stream to a temporary location and then call the
* setDataSource for that local file
*/
public void downloadAudioIncrement(String mediaUrl) throws IOException{
int bufsizeForDownload = 8 * 1024;
int bufsizeForfile = 64 * 1024;
stream = new BufferedInputStream(urlConn.getInputStream(),bufsizeForDownload);
Log.i("bufsize",Integer.toString(urlConn.getInputStream().available()));
try{
if(stream == null || stream.available() == 0){
stopSelf();
Log.i("unable to create ","stream null");
return;
}
}catch (NullPointerException e) {
stopSelf();
Log.i("return1","return1");
return;
}
BufferedOutputStream bout = new BufferedOutputStream(new FileOutputStream(downloadingMediaFile), bufsizeForfile);
byte buf[] = new byte[bufsizeForDownload];
int totalBytesRead = 0, totalKbRead = 0, numread = 0;
do {
if (bout == null) {
counter++;
downloadingMediaFile = new File(context.getCacheDir(), DOWNFILE+ counter);
downloadingMediaFile.deleteOnExit();
bout = new BufferedOutputStream(new FileOutputStream(downloadingMediaFile), bufsizeForfile);
}
try {
numread = stream.read(buf);
} catch (IOException e) {
Log.d("Downloadingfile", "Bad read. Let's quit.");
// stop();
Log.i("return2","return2");
stopSelf();
// return;
}
catch (NullPointerException e) {
// Let's get out of here
e.printStackTrace();
break;
}
if (numread < 0) {
bout.flush();
stopSelf();
Log.i("Bad read from stream", "Bad read from stream3");
if(stream == null){
urlConn = new URL(mediaUrl).openConnection();
urlConn.setConnectTimeout(1000 * 30);
urlConn.connect();
stream = new BufferedInputStream(urlConn.getInputStream(),bufsizeForDownload);
}else{
handler.post(new Runnable() {
public void run() {
Log.i("Bad read from stream", "Bad read from xyz");
context.stopService(startingIntent);
Log.i("return3","return3");
return;
}
});
}
} else if (numread >= 1) {
bout.write(buf, 0, numread);
totalBytesRead += numread;
totalKbRead += totalBytesRead / 1000;
}
if (totalKbRead >= INTIAL_KB_BUFFER && stopping != true) {
bout.flush();
bout.close();
bout = null;
if (started == false) {
Runnable r = new Runnable() {
public void run() {
setupplayer();
}
};
Thread t = new Thread(r);
t.start();
}
totalBytesRead = 0;
totalKbRead = 0;
}
if (stopping == true) {
stream = null;
}
} while (stream != null);
}
/** oncompletelister for media player **/
class listener implements MediaPlayer.OnCompletionListener {
public void onCompletion(MediaPlayer mp) {
waitingForPlayer = false;
long timeInMilli = Calendar.getInstance().getTime().getTime();
long timeToQuit = (1000 * 30) + timeInMilli; // add 30 seconds
if (mp1playing)
{
mp1.reset();
removefile();
mp1prepared = false;
// Log.d(TAG, "mp1 is Free.");
if (downloadingformp2) {
if (mp2preparing && stopping == false) {
waitingForPlayer = true;
}
while (mp2preparing && stopping == false) {
if (timeInMilli > timeToQuit) {
stopSelf();
}
timeInMilli = Calendar.getInstance().getTime().getTime();
}
}
} else {
mp2.reset();
removefile();
mp2prepared = false;
if (downloadingformp1) {
if (mp1preparing && stopping == false) {
waitingForPlayer = true;
}
while (mp1preparing && stopping == false) {
if (timeInMilli > timeToQuit) {
stopSelf();
}
timeInMilli = Calendar.getInstance().getTime().getTime();
}
}
}
if (waitingForPlayer == true) {
// we must have been waiting
waitingForPlayer = false;
}
if (stopping == false) {
if (mp1playing) {
mp2.start();
mp1playing = false;
Runnable r = new Runnable() {
public void run() {
setupplayer();
}
};
Thread t = new Thread(r);
t.start();
} else {
mp1.start();
mp1playing = true;
Runnable r = new Runnable() {
public void run() {
setupplayer();
}
};
Thread t = new Thread(r);
t.start();
}
}
}
}
/** OnPreparedListener for media player **/
class preparelistener implements MediaPlayer.OnPreparedListener {
public void onPrepared(MediaPlayer mp) {
if (prepareState) {
prepareState = false;
mp1preparing = false;
mp1prepared = true;
if (started == false) {
started = true;
mp1.start();
mp1playing = true;
Runnable r = new Runnable() {
public void run() {
setupplayer();
}
};
Thread t = new Thread(r);
t.start();
}
} else {
prepareState = true;
mp2preparing = false;
mp2prepared = true;
}
}
};
/**
* Set Up player(s)
*/
public void setupplayer() {
final String TAG = "setupplayer";
Runnable r = new Runnable() {
public void run() {
try {
if (!mp1preparing && !mp1prepared) {
while (true) {
downloadingformp1 = true;
if (started == false)
break;
if (counter > preparecounter)
break;
}
File f = new File(context.getCacheDir(), DOWNFILE+ preparecounter);
FileInputStream ins = new FileInputStream(f);
mp1.setDataSource(ins.getFD());
mp1.setAudioStreamType(AudioManager.STREAM_MUSIC);//playing for live streaming
mp1.setOnCompletionListener(new listener());
mp1.setOnPreparedListener(new preparelistener());
if (started == false || waitingForPlayer == true){
}
mp1.prepareAsync();// .prepare();
mp1preparing = true;
downloadingformp1 = false;
preparecounter++;
} else if (!mp2preparing && !mp2prepared) {
while (true) {
downloadingformp2 = true;
if (started == false)
break;
if (counter > preparecounter)
break;
}
File f = new File(context.getCacheDir(), DOWNFILE+ preparecounter);
FileInputStream ins = new FileInputStream(f);
mp2.setDataSource(ins.getFD());
mp2.setAudioStreamType(AudioManager.STREAM_MUSIC);
mp2.setOnCompletionListener(new listener());
mp2.setOnPreparedListener(new preparelistener());
mp2.prepareAsync();
mp2preparing = true;
downloadingformp2 = false;
preparecounter++;
// }
} else
Log.d(TAG, "No Media player is available to setup.");
return;
} catch (FileNotFoundException e) {
Log.e(TAG, e.toString());
Log.e(TAG,"Can't find file. Android must have deleted it on a clean up ");
stop();
return;
} catch (IllegalStateException e) {
Log.e(TAG, e.toString());
stop();
} catch (IOException e) {
Log.e(TAG, e.toString());
stop();
}
}
};
preparringthread = new Thread(r);
preparringthread.start();
try {
preparringthread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void removefile() {
File temp = new File(context.getCacheDir(), DOWNFILE + playedcounter);
temp.delete();
playedcounter++;
}
public boolean stop() {
final String TAG = "STOP";
stopping = true;
try {
if (mp1.isPlaying()){
if (!(stream == null)) {
Log.i("IN STOP", "MP1 is nill");
stopSelf();
}
mp1.stop();
}
if (mp2.isPlaying()){
Log.i("IN STOP", "MP2 is nill");
if (!(stream == null)){
stopSelf();
}
mp2.stop();
}
} catch (Exception e) {
Log.e(TAG, "error stopping players");
}
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
Log.e(TAG, "error closing open connection");
}
}
stream = null;
processHasStarted = false;
processHasPaused = true;
if (preparringthread != null) {
preparringthread.interrupt();
}
stopSelf();
return true;
}
public boolean isPlaying() {
return processHasStarted;
}
public boolean isPause() {
return processHasPaused;
}
private void raiseThreadPriority() {
Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
}
}
you should call release(), to free the resources. If not released, too many MediaPlayer instances may result in an exception
Write this code when on youe Service
Updated
private void releaseMediaPlayer() {
if (mediaPlayer != null) {
if(mediaPlayer.isPlaying()) {
mediaPlayer.stop();
}
mediaPlayer.release();
mediaPlayer = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
releaseMediaPlayer();
}
You can see this

Categories