Simultaneos playback of multiple speakers - java

I want to play multiple speakers at the same time.
In my apllication I'm getting audio from network, decode from C#, decode by opus and then want to play bytes. But now I can play only one speaker.
My AudioPLayer.class:
public class Player {
private static final String TAG = Player.class.getName();
private AudioTrack audioTrack;
private boolean isWorking;
public Player() {
try {
audioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
AudioConsts.SAMPLERATE,
AudioConsts.NUM_CHANNELS == 1 ? AudioConsts.CHANNEL_OUT_MONO : AudioConsts.CHANNEL_OUT_STEREO,
AudioConsts.ENCODING_PCM_16BIT,
AudioConsts.GetPlayerBufferSize(),
AudioTrack.MODE_STREAM);
} catch (Exception e){
Log.e(TAG, e.toString());
}
}
public void play() {
new Thread(new Runnable() {
#Override
public void run() {
isWorking = true;
try {
audioTrack.play();
} catch (Exception e) {
Log.d(e.toString(), "AUDIO EXCEPTION");
return;
}
int bufferSize = AudioConsts.GetPlayerBufferSize();
while (isWorking){
int cursor = audioTrack.getPlaybackHeadPosition();
if (cursor > bufferSize){
cursor %= bufferSize;
audioTrack.flush();
audioTrack.setPlaybackHeadPosition(cursor);
}
}
}
}).start();
}
public void stopReading(){
if (!isWorking)
return;
audioTrack.release();
isWorking = false;
}
public void appendForPlayback(byte[] audioMessage, int size) {
if (size != 0){
int writen = audioTrack.write(audioMessage, 0, size);
if (writen != size) {
//audioTrack.release();
Log.d(TAG, "WTF");
}
}
}
}
Also attach my AudioPlayer's initialization:
#Override
public void onCreate() {
super.onCreate();
...
player = new Player();
player.play();
IntentFilter filter = new IntentFilter();
filter.addAction(ON_UNITY_AUDIO_MESSAGE_RECEIVED);
filter.addAction(AudioConsts.START_RECORDER);
filter.addAction(AudioConsts.STOP_RECORDER);
broadcastReceiver = new BroadcastReceiver() {
#Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction().equals(ON_UNITY_AUDIO_MESSAGE_RECEIVED)) {
byte[] decryptedBytes = intent.getByteArrayExtra(UNITY_AUDIO_MESSAGE);
onUnityAudioReceivedFromNetwork(decryptedBytes);
} else if (intent.getAction().equals(AudioConsts.START_RECORDER)) {
incrementSessionCount();
recorder.startRecording();
} else if (intent.getAction().equals(AudioConsts.STOP_RECORDER)) {
recorder.stopRecording();
}
}
};
registerReceiver(broadcastReceiver, filter);
decodeMsg = new byte[AudioConsts.FRAME_SIZE * AudioConsts.ENCODING_PCM_16BIT];
opusDecoder = new OpusDecoder();
opusDecoder.init(AudioConsts.SAMPLERATE, AudioConsts.NUM_CHANNELS);
}
...
private void onUnityAudioReceivedFromNetwork(byte[] decryptedBytes) {
UnityAudioMessage audioMessage = UnityAudioMessage.fromBytesSharp(decryptedBytes);
if (audioMessage != null) {
try {
opusDecoder.decode(audioMessage.unityAudioMessage, decodeMsg, AudioConsts.FRAME_SIZE);
} catch (OpusError e) {
e.printStackTrace();
return;
}
player.appendForPlayback(decodeMsg, decodeMsg.length);
}
}
...
Can I release simultaneos playback of multiple speakers?
Also I tried release it with HaspMap of my players. But it works only like 1 audio track.

I tried a lot of things, but my solution use AsyncTask.class
Attach Player.class
public class Player {
private static final String TAG = Player.class.getName();
private AudioTrack audioTrack;
private boolean isWorking;
public Player() {
try {
audioTrack = new AudioTrack(
new AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_MEDIA)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.setLegacyStreamType(AudioManager.STREAM_MUSIC)
.build(),
new AudioFormat.Builder()
.setChannelMask(AudioFormat.CHANNEL_OUT_MONO)
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(AudioConsts.SAMPLERATE)
.build(),
AudioConsts.GetPlayerBufferSize(),
AudioTrack.MODE_STREAM,
AudioManager.AUDIO_SESSION_ID_GENERATE);
} catch (Exception e) {
Log.e(TAG, e.toString());
}
}
public void play() {
audioTrack.play();
}
public void stopReading() {
if (!isWorking)
return;
audioTrack.release();
isWorking = false;
}
public void appendForPlayback(byte[] audioMessage, int size) {
new Executor().doInBackground(audioMessage);
}
private class Executor extends AsyncTask<byte[], Void, Void> {
#Override
protected Void doInBackground(byte[]... bytes) {
for (byte[] audioMessage : bytes) {
if (audioMessage.length != 0) {
int writen = audioTrack.write(audioMessage, 0, audioMessage.length);
if (writen != audioMessage.length) {
Log.d(TAG, "WTF");
}
}
}
return null;
}
}}

Related

Android Development: Camera2 takes videos fine, but can't take still images as the camera device has been disconnected

I have been trying to use this tutorial to create an app that can take pictures and videos with camera2 specifically as I need access to more technical aspects such as focus and exposure. As the title of this post states, I can get the code to save a video properly, but when I press the button to take a still image, it gives the following error message repeatedly:
Handler (android.os.Handler) {302c85a} sending message to a Handler on a dead thread
java.lang.IllegalStateException: Handler (android.os.Handler) {302c85a} sending message to a Handler on a dead thread
at android.os.MessageQueue.enqueueMessage(MessageQueue.java:560)
at android.os.Handler.enqueueMessage(Handler.java:778)
at android.os.Handler.sendMessageAtTime(Handler.java:727)
at android.os.Handler.sendMessageDelayed(Handler.java:697)
at android.os.Handler.post(Handler.java:427)
at android.hardware.camera2.impl.CameraDeviceImpl$CameraHandlerExecutor.execute(CameraDeviceImpl.java:2353)
at android.hardware.camera2.impl.CallbackProxies$SessionStateCallbackProxy.onCaptureQueueEmpty(CallbackProxies.java:94)
at android.hardware.camera2.impl.CameraCaptureSessionImpl$2.onRequestQueueEmpty(CameraCaptureSessionImpl.java:864)
at android.hardware.camera2.impl.CameraDeviceImpl$CameraDeviceCallbacks.onRequestQueueEmpty(CameraDeviceImpl.java:2331)
at android.hardware.camera2.ICameraDeviceCallbacks$Stub.onTransact(ICameraDeviceCallbacks.java:187)
at android.os.Binder.execTransactInternal(Binder.java:1285)
at android.os.Binder.execTransact(Binder.java:1244)
This may be related to this error that occurs when the startPreview method is called (see code at the bottom of post):
android.hardware.camera2.CameraAccessException: CAMERA_DISCONNECTED (2): checkPidStatus:2085: The camera device has been disconnected
at android.hardware.camera2.CameraManager.throwAsPublicException(CameraManager.java:1390)
...
at com.[example].androidcamera2api.MainActivity$10.onConfigured(MainActivity.java:986)
The code, which I have modified slightly from the original github example to use non-deprecated API, is as follows. (I have also commented out lines related to getting audio. They kept causing app crashes, but they are unnecessary for my use case so it shouldn't matter. Also omitted imports for brevity.)
public class MainActivity extends AppCompatActivity {
private static final String TAG = "Camera2VideoImageActivi";
private static final int REQUEST_CAMERA_PERMISSION_RESULT = 0;
private static final int REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT = 1;
private static final int STATE_PREVIEW = 0;
private static final int STATE_WAIT_LOCK = 1;
private int mCaptureState = STATE_PREVIEW;
private TextureView mTextureView;
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
setupCamera(width, height);
connectCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {}
};
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
mMediaRecorder = new MediaRecorder();
if(mIsRecording){
try{
createVideoFileName();
} catch(IOException e){
e.printStackTrace();
}
startRecord();
mMediaRecorder.start();
runOnUiThread(new Runnable() {
#Override
public void run() {
mChronometer.setBase(SystemClock.elapsedRealtime());
mChronometer.setVisibility(View.VISIBLE);
mChronometer.start();
}
});
} else {
startPreview();
}
}
#Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice camera, int error) {
camera.close();
mCameraDevice = null;
}
};
private HandlerThread mBackgroundHandlerThread;
private Handler mBackgroundHandler;
private String mCameraId;
private Size mPreviewSize;
private Size mVideoSize;
private Size mImageSize;
private ImageReader mImageReader;
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
mBackgroundHandler.post(new ImageSaver(reader.acquireLatestImage()));
}
};
private class ImageSaver implements Runnable {
private final Image mImage;
public ImageSaver(Image image) {
mImage = image;
}
#Override
public void run() {
ByteBuffer byteBuffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(mImageFileName);
fileOutputStream.write(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
Intent mediaStoreUpdateIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaStoreUpdateIntent.setData(Uri.fromFile(new File(mImageFileName)));
sendBroadcast(mediaStoreUpdateIntent);
if(fileOutputStream != null) {
try {
fileOutputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
private MediaRecorder mMediaRecorder;
private Chronometer mChronometer;
private int mTotalRotation;
private CameraCaptureSession mPreviewCaptureSession;
private CameraCaptureSession.CaptureCallback mPreviewCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult captureResult) {
switch (mCaptureState) {
case STATE_PREVIEW:
break;
case STATE_WAIT_LOCK:
mCaptureState = STATE_PREVIEW;
Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
if(afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
Toast.makeText(getApplicationContext(), "AF Locked!", Toast.LENGTH_SHORT).show();
startStillCaptureRequest();
}
break;
}
}
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
process(result);
}
};
private CameraCaptureSession mRecordCaptureSession;
private CameraCaptureSession.CaptureCallback mRecordCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult captureResult) {
switch (mCaptureState) {
case STATE_PREVIEW:
break;
case STATE_WAIT_LOCK:
mCaptureState = STATE_PREVIEW;
Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
if(afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
Toast.makeText(getApplicationContext(), "AF Locked!", Toast.LENGTH_SHORT).show();
startStillCaptureRequest();
}
break;
}
}
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
process(result);
}
};
private CaptureRequest.Builder mCaptureRequestBuilder;
private ImageButton mRecordImageButton;
private ImageButton mStillImageButton;
private boolean mIsRecording = false;
private boolean mIsTimelapse = false;
private File mVideoFolder;
private String mVideoFileName;
private File mImageFolder;
private String mImageFileName;
private static SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 270);
}
private static class CompareSizeByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum( (long)(lhs.getWidth() * lhs.getHeight()) -
(long)(rhs.getWidth() * rhs.getHeight()));
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
requestPermissions(new String[]{"android.permission.CAMERA","android.permission.WRITE_EXTERNAL_STORAGE","android.permission.READ_EXTERNAL_STORAGE"}, 1);
createVideoFolder();
createImageFolder();
mChronometer = (Chronometer) findViewById(R.id.chronometer);
mTextureView = (TextureView) findViewById(R.id.textureView);
mStillImageButton = (ImageButton) findViewById(R.id.cameraImageButton2);
mStillImageButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if(!(mIsTimelapse || mIsRecording)) {
checkWriteStoragePermission();
}
lockFocus();
}
});
mRecordImageButton = (ImageButton) findViewById(R.id.videoOnlineImageButton);
mRecordImageButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mIsRecording || mIsTimelapse) {
mChronometer.stop();
mChronometer.setVisibility(View.INVISIBLE);
mIsRecording = false;
mIsTimelapse = false;
mRecordImageButton.setImageResource(R.mipmap.btn_video_online);
// Starting the preview prior to stopping recording which should hopefully
// resolve issues being seen in Samsung devices.
startPreview();
mMediaRecorder.stop();
mMediaRecorder.reset();
Intent mediaStoreUpdateIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaStoreUpdateIntent.setData(Uri.fromFile(new File(mVideoFileName)));
sendBroadcast(mediaStoreUpdateIntent);
} else {
mIsRecording = true;
mRecordImageButton.setImageResource(R.mipmap.btn_video_busy);
checkWriteStoragePermission();
}
}
});
mRecordImageButton.setOnLongClickListener(new View.OnLongClickListener() {
#Override
public boolean onLongClick(View v) {
mIsTimelapse =true;
mRecordImageButton.setImageResource(R.mipmap.btn_timelapse);
checkWriteStoragePermission();
return true;
}
});
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if(mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
connectCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if(requestCode == REQUEST_CAMERA_PERMISSION_RESULT) {
if(grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(),
"Application will not run without camera services", Toast.LENGTH_SHORT).show();
}
if(grantResults[1] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(),
"Application will not have audio on record", Toast.LENGTH_SHORT).show();
}
}
if(requestCode == REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT) {
if(grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if(mIsRecording || mIsTimelapse) {
mIsRecording = true;
mRecordImageButton.setImageResource(R.mipmap.btn_video_busy);
}
Toast.makeText(this,
"Permission successfully granted!", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(this,
"App needs to save video to run", Toast.LENGTH_SHORT).show();
}
}
}
#Override
protected void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
#Override
public void onWindowFocusChanged(boolean hasFocas) {
super.onWindowFocusChanged(hasFocas);
View decorView = getWindow().getDecorView();
if(hasFocas) {
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION);
}
}
private void setupCamera(int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for(String cameraId : cameraManager.getCameraIdList()){
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) ==
CameraCharacteristics.LENS_FACING_FRONT){
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
mTotalRotation = sensorToDeviceRotation(cameraCharacteristics, deviceOrientation);
boolean swapRotation = mTotalRotation == 90 || mTotalRotation == 270;
int rotatedWidth = width;
int rotatedHeight = height;
if(swapRotation) {
rotatedWidth = height;
rotatedHeight = width;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedWidth, rotatedHeight);
mVideoSize = chooseOptimalSize(map.getOutputSizes(MediaRecorder.class), rotatedWidth, rotatedHeight);
mImageSize = chooseOptimalSize(map.getOutputSizes(ImageFormat.JPEG), rotatedWidth, rotatedHeight);
mImageReader = ImageReader.newInstance(mImageSize.getWidth(), mImageSize.getHeight(), ImageFormat.JPEG, 1);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void connectCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if(ContextCompat.checkSelfPermission(this, android.Manifest.permission.CAMERA) ==
PackageManager.PERMISSION_GRANTED) {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
} else {
if(shouldShowRequestPermissionRationale(android.Manifest.permission.CAMERA)) {
Toast.makeText(this,
"Video app required access to camera", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[] {android.Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO
}, REQUEST_CAMERA_PERMISSION_RESULT);
}
} else {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
System.out.println("Opened camera " + mCameraId);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startRecord() {
try {
if(mIsRecording) {
setupMediaRecorder();
} else if(mIsTimelapse) {
setupTimelapse();
}
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
Surface recordSurface = mMediaRecorder.getSurface();
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
mCaptureRequestBuilder.addTarget(previewSurface);
mCaptureRequestBuilder.addTarget(recordSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, recordSurface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
mRecordCaptureSession = session;
try {
mRecordCaptureSession.setRepeatingRequest(
mCaptureRequestBuilder.build(), null, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.d(TAG, "onConfigureFailed: startRecord");
}
}, null);
} catch (Exception e) {
e.printStackTrace();
}
}
private void startPreview() {
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
try {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
Log.d(TAG, "onConfigured: startPreview");
mPreviewCaptureSession = session;
try {
mPreviewCaptureSession.setRepeatingRequest(mCaptureRequestBuilder.build(),
null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.d(TAG, "onConfigureFailed: startPreview");
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startStillCaptureRequest() {
try {
if(mIsRecording) {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_VIDEO_SNAPSHOT);
} else {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
//mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_MANUAL);
Debug.logStack("Debug","Building still capture request",1);
}
mCaptureRequestBuilder.addTarget(mImageReader.getSurface());
mCaptureRequestBuilder.set(CaptureRequest.JPEG_ORIENTATION, mTotalRotation);
CameraCaptureSession.CaptureCallback stillCaptureCallback = new
CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
try {
createImageFileName();
} catch (IOException e) {
e.printStackTrace();
}
}
};
if(mIsRecording) {
mRecordCaptureSession.capture(mCaptureRequestBuilder.build(), stillCaptureCallback, null);
} else {
mPreviewCaptureSession.capture(mCaptureRequestBuilder.build(), stillCaptureCallback, null);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void closeCamera() {
if(mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
if(mMediaRecorder != null) {
mMediaRecorder.release();
mMediaRecorder = null;
}
}
private void startBackgroundThread() {
mBackgroundHandlerThread = new HandlerThread("Camera2VideoImage");
mBackgroundHandlerThread.start();
mBackgroundHandler = new Handler(mBackgroundHandlerThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundHandlerThread.quitSafely();
try {
mBackgroundHandlerThread.join();
mBackgroundHandlerThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private static int sensorToDeviceRotation(CameraCharacteristics cameraCharacteristics, int deviceOrientation) {
int sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
deviceOrientation = ORIENTATIONS.get(deviceOrientation);
return (sensorOrientation + deviceOrientation + 360) % 360;
}
private static Size chooseOptimalSize(Size[] choices, int width, int height) {
List<Size> bigEnough = new ArrayList<Size>();
for(Size option : choices) {
if(option.getHeight() == option.getWidth() * height / width &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
if(bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizeByArea());
} else {
return choices[0];
}
}
private void createVideoFolder() {
File movieFile = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES);
mVideoFolder = new File(movieFile, "camera2VideoImage");
if(!mVideoFolder.exists()) {
mVideoFolder.mkdirs();
}
}
private File createVideoFileName() throws IOException {
String timestamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String prepend = "VIDEO_" + timestamp + "_";
File videoFile = File.createTempFile(prepend, ".mp4", mVideoFolder);
mVideoFileName = videoFile.getAbsolutePath();
return videoFile;
}
private void createImageFolder() {
File imageFile = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
mImageFolder = new File(imageFile, "camera2VideoImage");
if(!mImageFolder.exists()) {
mImageFolder.mkdirs();
}
}
private File createImageFileName() throws IOException {
String timestamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String prepend = "IMAGE_" + timestamp + "_";
File imageFile = File.createTempFile(prepend, ".jpg", mImageFolder);
mImageFileName = imageFile.getAbsolutePath();
return imageFile;
}
private void checkWriteStoragePermission() {
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if(ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED) {
try {
createVideoFileName();
} catch (IOException e) {
e.printStackTrace();
}
if(mIsTimelapse || mIsRecording) {
startRecord();
mMediaRecorder.start();
mChronometer.setBase(SystemClock.elapsedRealtime());
mChronometer.setVisibility(View.VISIBLE);
mChronometer.start();
}
} else {
if(shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE)) {
Toast.makeText(this, "app needs to be able to save videos", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT);
}
} else {
try {
createVideoFileName();
} catch (IOException e) {
e.printStackTrace();
}
if(mIsRecording || mIsTimelapse) {
startRecord();
mMediaRecorder.start();
mChronometer.setBase(SystemClock.elapsedRealtime());
mChronometer.setVisibility(View.VISIBLE);
mChronometer.start();
}
}
}
private void setupMediaRecorder() throws IOException {
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
//mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setOutputFile(mVideoFileName);
mMediaRecorder.setVideoEncodingBitRate(1000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
//mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mMediaRecorder.setOrientationHint(mTotalRotation);
mMediaRecorder.prepare();
}
private void setupTimelapse() throws IOException {
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_TIME_LAPSE_HIGH));
mMediaRecorder.setOutputFile(mVideoFileName);
mMediaRecorder.setCaptureRate(2);
mMediaRecorder.setOrientationHint(mTotalRotation);
mMediaRecorder.prepare();
}
private void lockFocus() {
mCaptureState = STATE_WAIT_LOCK;
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
try {
if(mIsRecording) {
mRecordCaptureSession.capture(mCaptureRequestBuilder.build(), mRecordCaptureCallback, mBackgroundHandler);
} else {
mPreviewCaptureSession.capture(mCaptureRequestBuilder.build(), mPreviewCaptureCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
I would be grateful if anyone could identify why I am having this error and help me to get the app to properly take pictures.

How to stop app from accessing the internet when a blacklisted app is installed

I have an app which automatically fetch data online whenever it is opened. I would like to make it a way that the app will only check for update online when a blacklisted app is not detected.
This is the update core.
public class UpdateCore extends AsyncTask<String, String, String> {
private static final String TAG = "NetGuard.Download";
private Context context;
private Listener listener;
private PowerManager.WakeLock wakeLock;
private HttpURLConnection uRLConnection;
private InputStream is;
private TorrentDetection torrent;
private BufferedReader buffer;
private String url;
public interface Listener {
void onLoading();
void onCompleted(String config) throws Exception;
void onCancelled();
void onException(String ex);
}
public UpdateCore(Context context, String url, Listener listener) {
this.context = context;
this.url = url;
this.listener = listener;
}
#Override
protected void onPreExecute() {
listener.onLoading();
}
#Override
protected String doInBackground(String... args) {
try {
String api = url;
if(!api.startsWith("http")){
api = new StringBuilder().append("http://").append(url).toString();
}
URL oracle = new URL(api);
HttpClient Client = new DefaultHttpClient();
HttpGet httpget = new HttpGet(oracle.toURI());
HttpResponse response = Client.execute(httpget);
InputStream in = response.getEntity().getContent();
BufferedReader reader = new BufferedReader(new InputStreamReader(
in, "iso-8859-1"), 8);
//BufferedReader reader = new BufferedReader(new InputStreamReader(in));
StringBuilder str = new StringBuilder();
String line = null;
while((line = reader.readLine()) != null)
{
str.append(line);
}
in.close();
return str.toString();
} catch (Exception e) {
return "error";
} finally {
if (buffer != null) {
try {
buffer.close();
} catch (IOException ignored) {
}
}
if (is != null) {
try {
is.close();
} catch (IOException ignored) {
}
}
if (uRLConnection != null) {
uRLConnection.disconnect();
}
}
}
#Override
protected void onCancelled() {
super.onCancelled();
// Log.i(TAG, "Cancelled");
// pd.dismiss();
listener.onCancelled();
}
#Override
protected void onPostExecute(String result) {
// wakeLock.release();
//nm.cancel(1);
// pd.dismiss();
try
{
if (result.equals("error"))
{
listener.onException(result);
}
else {
listener.onCompleted(result);
}
}
catch (Exception e)
{
listener.onException(e.getMessage());
}
}
}
This is the detection code
public class TorrentDetection
{
private Context context;
private String[] items;
private TorrentDetection.TorrentListener listener;
private Timer timer;
private Handler handler;
public interface TorrentListener {
public void detected(ArrayList pkg);
}
public TorrentDetection(Context c, String[] i, TorrentListener listener) {
context = c;
items = i;
this.listener = listener;
}
private boolean check(String uri)
{
PackageManager pm = context.getPackageManager();
boolean app_installed = false;
try
{
pm.getPackageInfo(uri, PackageManager.GET_ACTIVITIES);
app_installed = true;
}
catch (PackageManager.NameNotFoundException e)
{
app_installed = false;
}
return app_installed;
}
void check() {
ArrayList arrayList2 = new ArrayList();
for (String pack : items)
{
if(check(pack)){
arrayList2.add(pack);
}
}
if (arrayList2.size() > 0)
{
listener.detected(arrayList2);
stop();
}
}
public void start() {
handler = new Handler();
timer = new Timer();
TimerTask doAsynchronousTask = new TimerTask() {
#Override
public void run()
{
handler.post(new Runnable() {
public void run()
{
check();
}
});
}
};
timer.schedule(doAsynchronousTask, 0, 3000);
}
public void stop() {
if(timer != null){
timer.cancel();
timer = null;
}
if(handler != null){
handler = null;
}
}
}
The torrent detection code checks if the following apps are installed and returns a message that an unsupported app is installed.
public class Constraints
{
public static String updater = "https://pastenord.org/raw/random";
public static String[] torrentList = new String[]{
"com.guoshi.httpcanary",
"com.adguard.android.contentblocker"};
}
In my MainActivity this initiates the detection before the online update is done with torrent.start();
void update() {
torrent.start();
new UpdateCore(this, Constraints.updater, new UpdateCore.Listener() {
#Override
public void onLoading() {
}
#Override
public void onCompleted(final String config) {
try {
final JSONObject obj = new JSONObject(MilitaryGradeEncrypt.decryptBase64StringToString(config, Constraints.confpass));
if (Double.valueOf(obj.getString("Version")) <= Double.valueOf(conts.getConfigVersion())) {
} else {
new SweetAlertDialog(MainActivity.this, SweetAlertDialog.CUSTOM_IMAGE_TYPE)
.setTitleText("Update")
.setContentText("\n" + obj.getString("Message"))
.setConfirmText("Yes,Update it!")
.setCustomImage(R.drawable.ic_update)
.setConfirmClickListener(new SweetAlertDialog.OnSweetClickListener() {
#Override
public void onClick(SweetAlertDialog sDialog) {
sDialog.dismissWithAnimation();
welcomeNotif();
restart_app();
try {
db.updateData("1", config);
sp.edit().putString("CurrentConfigVersion", obj.getString("Version")).commit();
} catch (JSONException e) {}
}
})
.show();
}
} catch (Exception e) {
// Toast.makeText(MainActivity.this, e.getMessage() , 0).show();
}
}
#Override
public void onCancelled() {
}
#Override
public void onException(String ex) {
}
}).execute();
}
}
It then makes a popup when an unsupported app is detected with this.
torrent = new TorrentDetection(this, Constraints.torrentList, new TorrentDetection.TorrentListener() {
#Override
public void detected(ArrayList pkg)
{
stopService();
new AlertDialog.Builder(MainActivity.this)
.setTitle("unsupported App!")
.setMessage(String.format("%s", new Object[]{TextUtils.join(", ", (String[]) pkg.toArray(new String[pkg.size()]))}))
.setPositiveButton("OK", null)
//.setAnimation(Animation.SLIDE)
.setCancelable(false)
.create()
//.setIcon(R.mipmap.ic_info, Icon.Visible)
.show();
}
});
I would like the make the app only check for online update only when done of the blacklisted apps are installed. Any form of help is welcomed and appreciated.
use this method to check if an application is installed or not
public boolean isPackageInstalled(String packageName, PackageManager packageManager) {
try {
packageManager.getPackageInfo(packageName, 0);
return true;
} catch (PackageManager.NameNotFoundException e) {
return false;
}
}
then to check, simply call:
PackageManager pm = context.getPackageManager();
boolean isInstalled = isPackageInstalled("com.somepackage.name", pm);
// simply put an if statemement
if(!isInstalled){
//do your update here
}
else{
//display you have installed a blacklisted app
}
sidenote, if you are targeting android 11 and above, you need to provide the information about the packages you want to find out about in the manifest like this
<queries>
<!--Add queries here-->
<package android:name="com.somepackage.name" />
</queries>

How to read a recorded audio as a WAV file on Java for audio processing?

I want to read my recorded wav file and use it for signal processing with the following processing method:
//SIGNAL PROCESSING
private void proccessSignal(double[] signalIN){
double samplefreq = 44100;
double[] signal = signalIN;
Bessel bandpassfilter = new Bessel(signal,samplefreq);
double[] filteredSignal = bandpassfilter.lowPassFilter(1,150);
Bessel newsignalfiltered = new Bessel(filteredSignal,samplefreq);
double[] needsAmplifianceSignal = newsignalfiltered.bandPassFilter(1,200,500);
double[] amplifiedSignal = needsAmplifianceSignal;
for (int i=0;i<=needsAmplifianceSignal.length;i++){
amplifiedSignal[i]=amplifiedSignal[i]*1000;
amplifiedSignal[i]=amplifiedSignal[i]*amplifiedSignal[i];
}
Hilbert h = new Hilbert(amplifiedSignal);
h.hilbertTransform();
double[][] analytical_signal = h.getOutput();
Log.d("Endsignal", "proccessSignal: "+analytical_signal);
double threshold = 0.0052;
int apneaCount = 0;
for (int i=0; i<=analytical_signal.length;i++)
{
for (int j=0; j<=analytical_signal.length;j++){
if (threshold<=analytical_signal[i][j]){
}else {
apneaCount++;
if (apneaCount>=10){
apneaview.setText("YOU HAVE APNEA, CONTACT YOUR DOCTOR");
break;
}
}
}
}
if (apneaCount < 10){
apneaview.setText("YOU DO NOT HAVE APNEA");
}
}
The recording and everything works but it looks like the signal is always null so it never gets to my processing method, here's my Logcat to get a piece of detailed information about this issue:
W/System.err: com.github.psambit9791.wavfile.WavFileException: Invalid Wav Header data, incorrect riff chunk ID
W/System.err: at com.github.psambit9791.wavfile.WavFile.openWavFile(WavFile.java:257)
W/System.err: at com.github.psambit9791.jdsp.io.Wav.readWav(Wav.java:70)
W/System.err: at com.example.appnea.DetailedStat.onCreate(DetailedStat.java:82)
and here's the full activity for better understanding:
// TEST
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_detailed_stat);
Intent receivedFromList = getIntent();
name = receivedFromList.getStringExtra("recordName");
path = receivedFromList.getStringExtra("recordPath");
nameview = findViewById(R.id.NAME);
pathview = findViewById(R.id.PATH);
apneaview = findViewById(R.id.APNEA);
nameview.setText("Record date: " + name);
pathview.setText("Record path: " + path);
apneaview.setText("INITIALISING DATA");
File audiofile = new File(path);
double[] signal = new double[0];
try {
String mMime = "audio/3gpp";
MediaCodec codec = MediaCodec.createDecoderByType(mMime);
/**
*
*
*/
Wav objRead1 = new Wav();
objRead1.readWav(path);
Hashtable<String, Long> propsOut = objRead1.getProperties(); // Gets the WAV file properties
double[][] signal1 = objRead1.getData("int"); // Can be 'int', 'long' 'double'
try {
loadFFMpegLibrary();
} catch (FFmpegNotSupportedException e) {
e.printStackTrace();
}
MediaExtractor mx = new MediaExtractor();
try {
mx.setDataSource(path);
} catch (IOException e) {
e.printStackTrace();
}
/**
* Finals
*/
MediaFormat mMediaFormat = mx.getTrackFormat(0);
byte[] bufbytes = new byte[(int) audiofile.length()];
BufferedInputStream buf = new BufferedInputStream(new FileInputStream(audiofile));
buf.read(bufbytes, 0, bufbytes.length);
buf.close();
mMediaFormat.setByteBuffer("3gp", ByteBuffer.wrap(bufbytes));
codec.setCallback(new MediaCodec.Callback() {
#Override
public void onInputBufferAvailable(#NonNull MediaCodec codec, int index) {
}
#Override
public void onOutputBufferAvailable(#NonNull MediaCodec codec, int index, #NonNull MediaCodec.BufferInfo info) {
}
#Override
public void onError(#NonNull MediaCodec codec, #NonNull MediaCodec.CodecException e) {
}
#Override
public void onOutputFormatChanged(#NonNull MediaCodec codec, #NonNull MediaFormat format) {
}
});
codec.configure(mMediaFormat, null, null, 0);
codec.start();
new Thread(() -> {
MediaCodec.BufferInfo buf_info = new MediaCodec.BufferInfo();
int outputBufferIndex = codec.dequeueOutputBuffer(buf_info, 0);
byte[] pcm = new byte[buf_info.size];
ByteBuffer[] mOutputBuffers = new ByteBuffer[buf_info.size];
mOutputBuffers[outputBufferIndex].get(pcm, 0, buf_info.size);
}).start();
sampleDir = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), "/OfficeRecordings/");
if (!sampleDir.exists()) {
sampleDir.mkdirs();
}
outputFile = sampleDir+"/"+"sample_record.3gp";
finalFile = sampleDir+"/"+"final_record.wav";
final String[] cmd = new String[]{"-y", "-i", outputFile, finalFile};
execFFmpegBinary(cmd);
} catch (IOException | WavFileException e) {
e.printStackTrace();
}
Log.d("AUDIO", "onCreate: AUDIO FILE EXIST?" + audiofile.exists());
//proccessSignal(signal);
}
private void execFFmpegBinary(final String[] command) {
FFmpeg ffmpeg = FFmpeg.getInstance(this);
try {
ffmpeg.loadBinary(new LoadBinaryResponseHandler() {
#Override
public void onStart() {
Log.d("audio", "starting to load binary");
}
#Override
public void onFailure() {
Log.d("audio", "failed to load binary");
}
#Override
public void onSuccess() {
Log.d("audio", "loaded binary");
try {
ffmpeg.execute(cmd, new ExecuteBinaryResponseHandler() {
#Override
public void onStart() {
Log.d("audio", " starting to get audio " + "");
}
#Override
public void onProgress(String message) {
Log.d("audio", " progress getting audio from ");
}
#Override
public void onFailure(String message) {
Log.d("audio", " failed to get audio ");
}
#Override
public void onSuccess(String message) {
Log.d("audio", " success getting audio from video");
}
#Override
public void onFinish() {
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
e.printStackTrace();
}
}
#Override
public void onFinish() {
}
});
} catch (FFmpegNotSupportedException e) {
// Handle if FFmpeg is not supported by device
}
}
// TRYING THE FFMPEG METHOD
public void loadFFMpegLibrary() throws FFmpegNotSupportedException {
if (fFmpeg == null)
fFmpeg = FFmpeg.getInstance(this);
fFmpeg.loadBinary(new FFmpegLoadBinaryResponseHandler() {
#Override
public void onFailure() {
Toast.makeText(getApplicationContext(), "Library failed to load", Toast.LENGTH_LONG).show();
}
#Override
public void onSuccess() {
Toast.makeText(getApplicationContext(), "Library loaded successfully", Toast.LENGTH_LONG).show();
}
#Override
public void onStart() {
}
#Override
public void onFinish() {
}
});
}
public void executeCommand(final String[] command) throws FFmpegCommandAlreadyRunningException {
fFmpeg.execute(command, new ExecuteBinaryResponseHandler() {
#Override
public void onSuccess(String message) {
}
#Override
public void onProgress(String message) {
}
#Override
public void onFailure(String message) {
}
#Override
public void onStart() {
}
#Override
public void onFinish() {
}
});
}
//CONVERTING TO DOUBLE ARRAY FROM BYTEARRAY
public static double[] toDoubleArray(byte[] byteArray){
int times = Double.SIZE / Byte.SIZE;
double[] doubles = new double[byteArray.length / times];
for(int i=0;i<doubles.length;i++){
doubles[i] = ByteBuffer.wrap(byteArray, i*times, times).getDouble();
}
return doubles;
}
private void copyFile(InputStream in, OutputStream out) throws IOException {
byte[] buffer = new byte[1024];
int read;
while((read = in.read(buffer)) != -1){
out.write(buffer, 0, read);
}
}
//SIGNAL PROCESSING
private void proccessSignal(double[] signalIN){
double samplefreq = 44100;
double[] signal = signalIN;
Bessel bandpassfilter = new Bessel(signal,samplefreq);
double[] filteredSignal = bandpassfilter.lowPassFilter(1,150);
Bessel newsignalfiltered = new Bessel(filteredSignal,samplefreq);
double[] needsAmplifianceSignal = newsignalfiltered.bandPassFilter(1,200,500);
double[] amplifiedSignal = needsAmplifianceSignal;
for (int i=0;i<=needsAmplifianceSignal.length;i++){
amplifiedSignal[i]=amplifiedSignal[i]*1000;
amplifiedSignal[i]=amplifiedSignal[i]*amplifiedSignal[i];
}
Hilbert h = new Hilbert(amplifiedSignal);
h.hilbertTransform();
double[][] analytical_signal = h.getOutput();
Log.d("Endsignal", "proccessSignal: "+analytical_signal);
double threshold = 0.0052;
int apneaCount = 0;
for (int i=0; i<=analytical_signal.length;i++)
{
for (int j=0; j<=analytical_signal.length;j++){
if (threshold<=analytical_signal[i][j]){
}else {
apneaCount++;
if (apneaCount>=10){
apneaview.setText("YOU HAVE APNEA, CONTACT YOUR DOCTOR");
break;
}
}
}
}
if (apneaCount < 10){
apneaview.setText("YOU DO NOT HAVE APNEA");
}
}
}
Thank you so much for your time, I really appreciate it. Please let me know if you need any other information!

Audio track is not working in Android

I am new to Android. I am developing an application which uses AudioRecorder and AudioTrack for recording and playing. In addition to this, I am trying to read and display the amplitude values of recorded sound.
Here is my class
public class MainActivity extends AppCompatActivity {
private static final String TAG = "RecordSound";
private int BufferSize;
byte[] buffer = new byte[BufferSize];
/* AudioRecord and AudioTrack Object */
private AudioRecord record = null;
private AudioTrack track = null;
/* Audio Configuration */
private int sampleRate = 8000;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private boolean isRecording = true;
private Thread recordingThread = null;
private double lastLevel = 0;
private Thread thread;
private static final int SAMPLE_DELAY = 75;
MediaPlayer mediaPlayer;
RelativeLayout layout;
private ImageView tankHolder;
TextView text;
Button play;
String filename;
final Handler mHandler = new Handler();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
tankHolder = (ImageView)findViewById(R.id.tankHolder);
text=(TextView)findViewById(R.id.result_text);
layout=(RelativeLayout)findViewById(R.id.linear);
play=(Button)findViewById(R.id.btnStartPlay);
try {
BufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
}catch (Exception e){
e.printStackTrace();
}
}
#Override
protected void onResume() {
super.onResume();
//calling MediaPlayer for alarmtone when the tank is almost full
mediaPlayer = MediaPlayer.create(this, R.raw.tone);
startRecording();
play.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
stopRecording();
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
private void stopRecording() throws IOException{
if (null != record) {
isRecording = false;
record.stop();
record.release();
record = null;
recordingThread = null;
mHandler.post(runRecord);
mediaPlayer.stop();
mediaPlayer.release();
}
}
final Runnable runRecord=new Runnable() {
#Override
public void run() {
text.setText("working");
try {
PlayShortAudioFileViaAudioTrack("/sdcard/recorded.pcm");
} catch (IOException e) {
e.printStackTrace();
}
}
};
private void PlayShortAudioFileViaAudioTrack(String filePath) throws IOException{
// We keep temporarily filePath globally as we have only two sample sounds now..
if (filePath==null)
return;
//Reading the file..
File file = new File(filePath); // for ex. path= "/sdcard/samplesound.pcm" or "/sdcard/samplesound.wav"
byte[] byteData = new byte[(int) file.length()];
Log.d(TAG, (int) file.length()+"");
FileInputStream in = null;
try {
in = new FileInputStream( file );
in.read( byteData );
in.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// Set and push to audio track..
int intSize = android.media.AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
Log.d(TAG, intSize+"");
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
at.play();
// Write the byte array to the track
at.write(byteData, 0, byteData.length);
at.stop();
at.release();
}
private void startRecording()
{
record = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate,
channelConfig, audioFormat, BufferSize);
if (AudioRecord.STATE_INITIALIZED == record.getState())
record.startRecording();
isRecording = true;
/* Run a thread for Recording */
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
},"AudioRecorder Thread");
recordingThread.start();
thread = new Thread(new Runnable() {
public void run() {
while(thread != null && !thread.isInterrupted()){
//Let's make the thread sleep for a the approximate sampling time
try{
Thread.sleep(SAMPLE_DELAY);}catch(InterruptedException ie){ie.printStackTrace();}
readAudioBuffer();//After this call we can get the last value assigned to the lastLevel variable
runOnUiThread(new Runnable() {
#Override
public void run() {
if(lastLevel >= 7 && lastLevel <= 15){
text.setTextColor(getResources().getColor(R.color.Blue));
layout.setBackgroundColor(Color.RED);
tankHolder.setImageResource(R.drawable.ftank);
if (!mediaPlayer.isPlaying()){
mediaPlayer.start();
}
text.setText(String.valueOf(lastLevel));
}else
if(lastLevel > 50 && lastLevel <= 100){
text.setText(String.valueOf(lastLevel));
text.setTextColor(getResources().getColor(R.color.Orange));
layout.setBackgroundColor(Color.WHITE);
tankHolder.setImageResource(R.drawable.htank);
if (mediaPlayer.isPlaying()){
mediaPlayer.pause();
}
}else
if(lastLevel > 100 && lastLevel <= 170){
text.setText(String.valueOf(lastLevel));
text.setTextColor(getResources().getColor(R.color.Yellow));
layout.setBackgroundColor(Color.WHITE);
tankHolder.setImageResource(R.drawable.qtank);
if (mediaPlayer.isPlaying()){
mediaPlayer.pause();
}
}
if(lastLevel > 170){
text.setText(String.valueOf(lastLevel));
text.setTextColor(getResources().getColor(R.color.Blue));
layout.setBackgroundColor(Color.WHITE);
tankHolder.setImageResource(R.drawable.qtank);
if (mediaPlayer.isPlaying()){
mediaPlayer.pause();
}
}
}
});
}
}
},"AudioRecorder Thread");
thread.start();
}
private void writeAudioDataToFile()
{
byte data[] = new byte[BufferSize];
/* Record audio to following file */
String filePath = "/sdcard/recorded.pcm";
filename = Environment.getExternalStorageDirectory().getAbsolutePath();
filename +="/audiofile.pcm";
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read_bytes = 0;
if(null != os){
while(isRecording)
{
read_bytes = record.read(data, 0, BufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read_bytes){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void readAudioBuffer() {
try {
short[] buffer = new short[BufferSize];
int bufferReadResult = 1;
if (record != null) {
// Sense the voice...
bufferReadResult = record.read(buffer, 0, BufferSize);
double sumLevel = 0;
for (int i = 0; i < bufferReadResult; i++) {
sumLevel += buffer[i];
}
lastLevel = Math.abs((sumLevel / bufferReadResult));
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
If I run, my runOnUiThread(new Runnable() { is working and on play button press, recorded audio is not playing.
If I remove runOnUiThread(new Runnable() {, then on play button press, recorded audio will play without any problem.
So I think that the problem is with threads and I tried to implement the handler class, still I am not getting it.
Refer this,
http://www.java2s.com/Code/Android/Media/UsingAudioRecord.htm for audio recording.
If you are not satisfied,https://www.tutorialspoint.com/android/android_audio_capture.htm for detailed explanation

Android Shoutcast internet Radio Streaming

I developed Shoutcastinternet Radio Streaming and I'm able to stream and play Successfully.
But the problem is: when i execute my application,I'm able to stream and play Continuously for halfanhour,after that the stream is getting stopped(not able to play, after that if i click on again play the stream Continues and again after some time FileNotFoundException)?
I logged the Error, after Stream get stopped.
The Error is :
java.io.FileNotFoundException: /data/data/com.torilt/cache/downloadingMediaFile430 (No such file or directory)
Can't find file. Android must have deleted it on a clean up
Getting Exception in setupplayer()
Source Code:
public class StreamingMediaPlayer extends Service {
final static public String AUDIO_MPEG = "audio/mpeg";
final static public String BITERATE_HEADER = "icy-br";
public int INTIAL_KB_BUFFER ;
private Handler handler;
//= 96*10/8
final public int BIT = 8;
final public int SECONDS = 60;
int bitrate = 56;
public File downloadingMediaFile;
final public String DOWNFILE = "downloadingMediaFile";
public Context context;
public int counter;
public int playedcounter;
public int preparecounter;
public MediaPlayer mp1;
public MediaPlayer mp2;
public boolean mp1prepared;
public boolean mp2prepared;
public boolean mp1preparing;
public boolean mp2preparing;
public boolean downloadingformp1;
public boolean downloadingformp2;
public boolean prepareState;
public String SONGURL = "";
// playing is "true" for mp1 and "false" for mp2
public boolean mp1playing;
public boolean started;
public boolean processHasStarted;
public boolean processHasPaused;
public boolean regularStream;
public BufferedInputStream stream;
public URL url;
public URLConnection urlConn;
public String station;
public String audiourl;
public Intent startingIntent = null;
public boolean stopping;
Thread preparringthread;
boolean waitingForPlayer;
// Setup all the variables
private void setupVars() {
counter = 0;
playedcounter = 0;
preparecounter = 0;
mp1 = new MediaPlayer();
mp2 = new MediaPlayer();
mp1prepared = false;
mp2prepared = false;
mp1preparing = false;
mp2preparing = false;
downloadingformp1 = false;
downloadingformp2 = false;
prepareState = true;
mp1playing = false;
started = false;
processHasStarted = false;
processHasPaused = true;
regularStream = false;
stream = null;
url = null;
urlConn = null;
station = null;
audiourl = null;
stopping = false;
preparringthread = null;
waitingForPlayer = false;
}
// This object will allow other processes to interact with our service
private final IStreamingMediaPlayer.Stub ourBinder = new IStreamingMediaPlayer.Stub() {
// String TAG = "IStreamingMediaPlayer.Stub";
public String getStation() {
// Log.d(TAG, "getStation");
return station;
}
public String getUrl() {
// Log.d(TAG, "getUrl");
return audiourl;
}
public boolean playing() {
// Log.d(TAG, "playing?");
return isPlaying();
}
public boolean pause() {
// Log.d(TAG, "playing?");
return isPause();
}
public void startAudio() {
// Log.d(TAG, "startAudio");
Runnable r = new Runnable() {
public void run() {
onStart(startingIntent, 0);
}
};
new Thread(r).start();
}
public void stopAudio() {
// Log.d(TAG, "stopAudio");
stop();
}
};
#Override
public void onCreate() {
super.onCreate();
context = this;
}
#Override
public void onStart(Intent intent, int startId) throws NullPointerException {
super.onStart(intent, startId);
// final String TAG = "StreamingMediaPlayer - onStart";
context = this;
setupVars();
if (intent.hasExtra("audiourl")) {
raiseThreadPriority();
processHasStarted = true;
processHasPaused = false;
audiourl = intent.getStringExtra("audiourl");
station = intent.getStringExtra("station");
downloadingMediaFile = new File(context.getCacheDir(), DOWNFILE+ counter);
downloadingMediaFile.deleteOnExit();
Runnable r = new Runnable() {
public void run() {
try {
startStreaming(audiourl);
} catch (IOException e) {
// Log.d(TAG, e.toString());
}
}
};
Thread t = new Thread(r);
t.start();
}
}
#Override
public void onDestroy() {
super.onDestroy();
mp1.stop();
mp2.stop();
}
#Override
public IBinder onBind(Intent intent) {
startingIntent = intent;
context = this;
return ourBinder;
}
#Override
public boolean onUnbind(Intent intent) {
super.onUnbind(intent);
stopSelf();
return true;
}
/**
* Progressivly download the media to a temporary location and update the
* MediaPlayer as new content becomes available.
*/
public void startStreaming(final String mediaUrl) throws IOException {
try {
url = new URL(mediaUrl);
urlConn = (HttpURLConnection) url.openConnection();
urlConn.setReadTimeout(1000 * 20);
urlConn.setConnectTimeout(1000 * 5);
//The getContentType method is used by the getContent method to determine the type of the remote object; subclasses may find it convenient to override the getContentType method.
String ctype = urlConn.getContentType();
if (ctype == null) {
ctype = "";
} else {
ctype = ctype.toLowerCase();
}
if (ctype.contains(AUDIO_MPEG) || ctype.equals("")) {
String temp = urlConn.getHeaderField(BITERATE_HEADER);
if (temp != null) {
bitrate = new Integer(temp).intValue();
}
} else {
stopSelf();
return;
}
}
catch(NullPointerException ne)
{
}
catch (IOException ioe) {
// Log.e(TAG, "Could not connect to " + mediaUrl);
stopSelf();
return;
}
if (!regularStream) {
INTIAL_KB_BUFFER = bitrate * SECONDS / BIT;
Runnable r = new Runnable() {
public void run() {
try {
downloadAudioIncrement(mediaUrl);
Log.i("TAG12344444", "Unable to play");
stopSelf();
return;
} catch (IOException e) {
Log.i("TAG123", "Unable to initialize the MediaPlayer for Audio Url = "+mediaUrl, e);
stopSelf();
return;
} catch (NullPointerException e) {
stopSelf();
return;
}
}
};
Thread t = new Thread(r);
t.start();
}
}
/**
* Download the url stream to a temporary location and then call the
* setDataSource for that local file
*/
public void downloadAudioIncrement(String mediaUrl) throws IOException{
int bufsizeForDownload = 8 * 1024;
int bufsizeForfile = 64 * 1024;
stream = new BufferedInputStream(urlConn.getInputStream(),bufsizeForDownload);
Log.i("bufsize",Integer.toString(urlConn.getInputStream().available()));
try{
if(stream == null || stream.available() == 0){
stopSelf();
Log.i("unable to create ","stream null");
return;
}
}catch (NullPointerException e) {
stopSelf();
Log.i("return1","return1");
return;
}
BufferedOutputStream bout = new BufferedOutputStream(new FileOutputStream(downloadingMediaFile), bufsizeForfile);
byte buf[] = new byte[bufsizeForDownload];
int totalBytesRead = 0, totalKbRead = 0, numread = 0;
do {
if (bout == null) {
counter++;
downloadingMediaFile = new File(context.getCacheDir(), DOWNFILE+ counter);
downloadingMediaFile.deleteOnExit();
bout = new BufferedOutputStream(new FileOutputStream(downloadingMediaFile), bufsizeForfile);
}
try {
numread = stream.read(buf);
} catch (IOException e) {
Log.d("Downloadingfile", "Bad read. Let's quit.");
// stop();
Log.i("return2","return2");
stopSelf();
// return;
}
catch (NullPointerException e) {
// Let's get out of here
e.printStackTrace();
break;
}
if (numread < 0) {
bout.flush();
stopSelf();
Log.i("Bad read from stream", "Bad read from stream3");
if(stream == null){
urlConn = new URL(mediaUrl).openConnection();
urlConn.setConnectTimeout(1000 * 30);
urlConn.connect();
stream = new BufferedInputStream(urlConn.getInputStream(),bufsizeForDownload);
}else{
handler.post(new Runnable() {
public void run() {
Log.i("Bad read from stream", "Bad read from xyz");
context.stopService(startingIntent);
Log.i("return3","return3");
return;
}
});
}
} else if (numread >= 1) {
bout.write(buf, 0, numread);
totalBytesRead += numread;
totalKbRead += totalBytesRead / 1000;
}
if (totalKbRead >= INTIAL_KB_BUFFER && stopping != true) {
bout.flush();
bout.close();
bout = null;
if (started == false) {
Runnable r = new Runnable() {
public void run() {
setupplayer();
}
};
Thread t = new Thread(r);
t.start();
}
totalBytesRead = 0;
totalKbRead = 0;
}
if (stopping == true) {
stream = null;
}
} while (stream != null);
}
/** oncompletelister for media player **/
class listener implements MediaPlayer.OnCompletionListener {
public void onCompletion(MediaPlayer mp) {
waitingForPlayer = false;
long timeInMilli = Calendar.getInstance().getTime().getTime();
long timeToQuit = (1000 * 30) + timeInMilli; // add 30 seconds
if (mp1playing)
{
mp1.reset();
removefile();
mp1prepared = false;
// Log.d(TAG, "mp1 is Free.");
if (downloadingformp2) {
if (mp2preparing && stopping == false) {
waitingForPlayer = true;
}
while (mp2preparing && stopping == false) {
if (timeInMilli > timeToQuit) {
stopSelf();
}
timeInMilli = Calendar.getInstance().getTime().getTime();
}
}
} else {
mp2.reset();
removefile();
mp2prepared = false;
if (downloadingformp1) {
if (mp1preparing && stopping == false) {
waitingForPlayer = true;
}
while (mp1preparing && stopping == false) {
if (timeInMilli > timeToQuit) {
stopSelf();
}
timeInMilli = Calendar.getInstance().getTime().getTime();
}
}
}
if (waitingForPlayer == true) {
// we must have been waiting
waitingForPlayer = false;
}
if (stopping == false) {
if (mp1playing) {
mp2.start();
mp1playing = false;
Runnable r = new Runnable() {
public void run() {
setupplayer();
}
};
Thread t = new Thread(r);
t.start();
} else {
mp1.start();
mp1playing = true;
Runnable r = new Runnable() {
public void run() {
setupplayer();
}
};
Thread t = new Thread(r);
t.start();
}
}
}
}
/** OnPreparedListener for media player **/
class preparelistener implements MediaPlayer.OnPreparedListener {
public void onPrepared(MediaPlayer mp) {
if (prepareState) {
prepareState = false;
mp1preparing = false;
mp1prepared = true;
if (started == false) {
started = true;
mp1.start();
mp1playing = true;
Runnable r = new Runnable() {
public void run() {
setupplayer();
}
};
Thread t = new Thread(r);
t.start();
}
} else {
prepareState = true;
mp2preparing = false;
mp2prepared = true;
}
}
};
/**
* Set Up player(s)
*/
public void setupplayer() {
final String TAG = "setupplayer";
Runnable r = new Runnable() {
public void run() {
try {
if (!mp1preparing && !mp1prepared) {
while (true) {
downloadingformp1 = true;
if (started == false)
break;
if (counter > preparecounter)
break;
}
File f = new File(context.getCacheDir(), DOWNFILE+ preparecounter);
FileInputStream ins = new FileInputStream(f);
mp1.setDataSource(ins.getFD());
mp1.setAudioStreamType(AudioManager.STREAM_MUSIC);//playing for live streaming
mp1.setOnCompletionListener(new listener());
mp1.setOnPreparedListener(new preparelistener());
if (started == false || waitingForPlayer == true){
}
mp1.prepareAsync();// .prepare();
mp1preparing = true;
downloadingformp1 = false;
preparecounter++;
} else if (!mp2preparing && !mp2prepared) {
while (true) {
downloadingformp2 = true;
if (started == false)
break;
if (counter > preparecounter)
break;
}
File f = new File(context.getCacheDir(), DOWNFILE+ preparecounter);
FileInputStream ins = new FileInputStream(f);
mp2.setDataSource(ins.getFD());
mp2.setAudioStreamType(AudioManager.STREAM_MUSIC);
mp2.setOnCompletionListener(new listener());
mp2.setOnPreparedListener(new preparelistener());
mp2.prepareAsync();
mp2preparing = true;
downloadingformp2 = false;
preparecounter++;
// }
} else
Log.d(TAG, "No Media player is available to setup.");
return;
} catch (FileNotFoundException e) {
Log.e(TAG, e.toString());
Log.e(TAG,"Can't find file. Android must have deleted it on a clean up ");
stop();
return;
} catch (IllegalStateException e) {
Log.e(TAG, e.toString());
stop();
} catch (IOException e) {
Log.e(TAG, e.toString());
stop();
}
}
};
preparringthread = new Thread(r);
preparringthread.start();
try {
preparringthread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void removefile() {
File temp = new File(context.getCacheDir(), DOWNFILE + playedcounter);
temp.delete();
playedcounter++;
}
public boolean stop() {
final String TAG = "STOP";
stopping = true;
try {
if (mp1.isPlaying()){
if (!(stream == null)) {
Log.i("IN STOP", "MP1 is nill");
stopSelf();
}
mp1.stop();
}
if (mp2.isPlaying()){
Log.i("IN STOP", "MP2 is nill");
if (!(stream == null)){
stopSelf();
}
mp2.stop();
}
} catch (Exception e) {
Log.e(TAG, "error stopping players");
}
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
Log.e(TAG, "error closing open connection");
}
}
stream = null;
processHasStarted = false;
processHasPaused = true;
if (preparringthread != null) {
preparringthread.interrupt();
}
stopSelf();
return true;
}
public boolean isPlaying() {
return processHasStarted;
}
public boolean isPause() {
return processHasPaused;
}
private void raiseThreadPriority() {
Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
}
}
you should call release(), to free the resources. If not released, too many MediaPlayer instances may result in an exception
Write this code when on youe Service
Updated
private void releaseMediaPlayer() {
if (mediaPlayer != null) {
if(mediaPlayer.isPlaying()) {
mediaPlayer.stop();
}
mediaPlayer.release();
mediaPlayer = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
releaseMediaPlayer();
}
You can see this

Categories