CameraX stop failed the video has no key frame - java

I want to record video using CameraX API, here is my code :
public class MainActivityCameraX extends Activity implements LifecycleOwner{
private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
PreviewView previewView;
ProcessCameraProvider cameraProvider = null;
private VideoCapture videoCapture;
private LifecycleRegistry mLifecycleRegistry;
#NonNull
#Override
public Lifecycle getLifecycle() {
if (mLifecycleRegistry == null) {
mLifecycleRegistry = new LifecycleRegistry(this);
mLifecycleRegistry.markState(Lifecycle.State.CREATED);
}
return mLifecycleRegistry;
}
private Executor getExecutor() {
return ContextCompat.getMainExecutor(this);
}
#SuppressLint("RestrictedApi")
private void startCameraX() {
cameraProviderFuture = ProcessCameraProvider.getInstance(this);
if(cameraProviderFuture == null){
} else{
cameraProviderFuture.addListener(() -> {
try {
cameraProvider = cameraProviderFuture.get();
cameraProvider.unbindAll();
CameraSelector cameraSelector = new CameraSelector.Builder()
.requireLensFacing(CameraSelector.LENS_FACING_FRONT)
.build();
Preview preview = new Preview.Builder()
//.setTargetAspectRatio(AspectRatio.RATIO_16_9)
.setTargetResolution(new Size(WIDTH,HEIGHT))
.build();
preview.setSurfaceProvider(previewView.getSurfaceProvider());
videoCapture = new VideoCapture.Builder()
.setCameraSelector(cameraSelector)
.setTargetResolution(new Size(3840, 2160))
.setTargetName("video/avc")
.setBitRate(10000000)
.setAudioBitRate(64000)
.setAudioSampleRate(44100)
.setAudioChannelCount(2)
.setVideoFrameRate(fps)
.build();
//videoCapture = new VideoCaptureConfig().Builde
try {
//use UseCaseGroup to assign viewport to use cases
UseCaseGroup useCaseGroup = new UseCaseGroup.Builder()
.addUseCase(preview)
.addUseCase(videoCapture)
.build();
// Attach use cases to the camera with the same lifecycle owner
Camera camera = cameraProvider.bindToLifecycle((LifecycleOwner) this, cameraSelector, useCaseGroup);
camera.getCameraInfo().getCameraState().observe((LifecycleOwner) this, (state) -> {
if (state.getError() != null) {
} else{
}
});
// Connect the preview use case to the previewView
preview.setSurfaceProvider(previewView.getSurfaceProvider());
startRecording();
} catch (Exception e){
e.printStackTrace();
}
Log.d(""+TAG,"startCameraX Finalized");
} catch (ExecutionException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}, getExecutor());
}
}
#Override
public void onStart() {
super.onStart();
mLifecycleRegistry.markState(Lifecycle.State.STARTED);
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_camerax);
mLifecycleRegistry = new LifecycleRegistry(this);
mLifecycleRegistry.markState(Lifecycle.State.CREATED);
previewView = findViewById(R.id.previewView);
}
#SuppressLint("RestrictedApi")
private boolean stopRecording(){
try {
if(videoCapture == null){
} else{
if(isRecording){
videoCapture.stopRecording();
isRecording = false;
videoCapture = null;
} else{
}
}
} catch (Exception e){
e.printStackTrace();
return false;
}
return true;
}
#SuppressLint({"MissingPermission", "RestrictedApi"})
private void startRecording() {
if (videoCapture != null) {
File outputFile = null;
Date currentDate = new Date();
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_HHmmss");
String dateString = dateFormat.format(currentDate);
outputFile = new File(Environment.getExternalStorageDirectory(),FOLDER+File.separator+dateString+".mp4");
if(outputFile == null){
} else{
videoCapture.startRecording(
new VideoCapture.OutputFileOptions.Builder(outputFile).build(),
getExecutor(),
new VideoCapture.OnVideoSavedCallback() {
#Override
public void onVideoSaved(#NonNull VideoCapture.OutputFileResults outputFileResults) {
}
#Override
public void onError(int videoCaptureError, #NonNull String message, #Nullable Throwable cause) {
}
}
);
isRecording = true;
}
} else{
Log.d(""+TAG, "startRecording videoCapture null");
}
}
When I stop the recording I get this errors :
E/MPEG4Writer: Stop() called but track is not started or stopped
I/VideoCapture: check Recording Result First Video Key Frame Write: false
I/VideoCapture: The recording result has no key frame.
I/VideoCapture: Delete file.
I/VideoCapture: Video encode thread end.
D/CameraInfo: videoCapture onError The file has no video key frame.

Related

Android Development: Camera2 takes videos fine, but can't take still images as the camera device has been disconnected

I have been trying to use this tutorial to create an app that can take pictures and videos with camera2 specifically as I need access to more technical aspects such as focus and exposure. As the title of this post states, I can get the code to save a video properly, but when I press the button to take a still image, it gives the following error message repeatedly:
Handler (android.os.Handler) {302c85a} sending message to a Handler on a dead thread
java.lang.IllegalStateException: Handler (android.os.Handler) {302c85a} sending message to a Handler on a dead thread
at android.os.MessageQueue.enqueueMessage(MessageQueue.java:560)
at android.os.Handler.enqueueMessage(Handler.java:778)
at android.os.Handler.sendMessageAtTime(Handler.java:727)
at android.os.Handler.sendMessageDelayed(Handler.java:697)
at android.os.Handler.post(Handler.java:427)
at android.hardware.camera2.impl.CameraDeviceImpl$CameraHandlerExecutor.execute(CameraDeviceImpl.java:2353)
at android.hardware.camera2.impl.CallbackProxies$SessionStateCallbackProxy.onCaptureQueueEmpty(CallbackProxies.java:94)
at android.hardware.camera2.impl.CameraCaptureSessionImpl$2.onRequestQueueEmpty(CameraCaptureSessionImpl.java:864)
at android.hardware.camera2.impl.CameraDeviceImpl$CameraDeviceCallbacks.onRequestQueueEmpty(CameraDeviceImpl.java:2331)
at android.hardware.camera2.ICameraDeviceCallbacks$Stub.onTransact(ICameraDeviceCallbacks.java:187)
at android.os.Binder.execTransactInternal(Binder.java:1285)
at android.os.Binder.execTransact(Binder.java:1244)
This may be related to this error that occurs when the startPreview method is called (see code at the bottom of post):
android.hardware.camera2.CameraAccessException: CAMERA_DISCONNECTED (2): checkPidStatus:2085: The camera device has been disconnected
at android.hardware.camera2.CameraManager.throwAsPublicException(CameraManager.java:1390)
...
at com.[example].androidcamera2api.MainActivity$10.onConfigured(MainActivity.java:986)
The code, which I have modified slightly from the original github example to use non-deprecated API, is as follows. (I have also commented out lines related to getting audio. They kept causing app crashes, but they are unnecessary for my use case so it shouldn't matter. Also omitted imports for brevity.)
public class MainActivity extends AppCompatActivity {
private static final String TAG = "Camera2VideoImageActivi";
private static final int REQUEST_CAMERA_PERMISSION_RESULT = 0;
private static final int REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT = 1;
private static final int STATE_PREVIEW = 0;
private static final int STATE_WAIT_LOCK = 1;
private int mCaptureState = STATE_PREVIEW;
private TextureView mTextureView;
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
setupCamera(width, height);
connectCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {}
};
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
mMediaRecorder = new MediaRecorder();
if(mIsRecording){
try{
createVideoFileName();
} catch(IOException e){
e.printStackTrace();
}
startRecord();
mMediaRecorder.start();
runOnUiThread(new Runnable() {
#Override
public void run() {
mChronometer.setBase(SystemClock.elapsedRealtime());
mChronometer.setVisibility(View.VISIBLE);
mChronometer.start();
}
});
} else {
startPreview();
}
}
#Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice camera, int error) {
camera.close();
mCameraDevice = null;
}
};
private HandlerThread mBackgroundHandlerThread;
private Handler mBackgroundHandler;
private String mCameraId;
private Size mPreviewSize;
private Size mVideoSize;
private Size mImageSize;
private ImageReader mImageReader;
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
mBackgroundHandler.post(new ImageSaver(reader.acquireLatestImage()));
}
};
private class ImageSaver implements Runnable {
private final Image mImage;
public ImageSaver(Image image) {
mImage = image;
}
#Override
public void run() {
ByteBuffer byteBuffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(mImageFileName);
fileOutputStream.write(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
Intent mediaStoreUpdateIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaStoreUpdateIntent.setData(Uri.fromFile(new File(mImageFileName)));
sendBroadcast(mediaStoreUpdateIntent);
if(fileOutputStream != null) {
try {
fileOutputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
private MediaRecorder mMediaRecorder;
private Chronometer mChronometer;
private int mTotalRotation;
private CameraCaptureSession mPreviewCaptureSession;
private CameraCaptureSession.CaptureCallback mPreviewCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult captureResult) {
switch (mCaptureState) {
case STATE_PREVIEW:
break;
case STATE_WAIT_LOCK:
mCaptureState = STATE_PREVIEW;
Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
if(afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
Toast.makeText(getApplicationContext(), "AF Locked!", Toast.LENGTH_SHORT).show();
startStillCaptureRequest();
}
break;
}
}
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
process(result);
}
};
private CameraCaptureSession mRecordCaptureSession;
private CameraCaptureSession.CaptureCallback mRecordCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult captureResult) {
switch (mCaptureState) {
case STATE_PREVIEW:
break;
case STATE_WAIT_LOCK:
mCaptureState = STATE_PREVIEW;
Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
if(afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
Toast.makeText(getApplicationContext(), "AF Locked!", Toast.LENGTH_SHORT).show();
startStillCaptureRequest();
}
break;
}
}
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
process(result);
}
};
private CaptureRequest.Builder mCaptureRequestBuilder;
private ImageButton mRecordImageButton;
private ImageButton mStillImageButton;
private boolean mIsRecording = false;
private boolean mIsTimelapse = false;
private File mVideoFolder;
private String mVideoFileName;
private File mImageFolder;
private String mImageFileName;
private static SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 270);
}
private static class CompareSizeByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum( (long)(lhs.getWidth() * lhs.getHeight()) -
(long)(rhs.getWidth() * rhs.getHeight()));
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
requestPermissions(new String[]{"android.permission.CAMERA","android.permission.WRITE_EXTERNAL_STORAGE","android.permission.READ_EXTERNAL_STORAGE"}, 1);
createVideoFolder();
createImageFolder();
mChronometer = (Chronometer) findViewById(R.id.chronometer);
mTextureView = (TextureView) findViewById(R.id.textureView);
mStillImageButton = (ImageButton) findViewById(R.id.cameraImageButton2);
mStillImageButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if(!(mIsTimelapse || mIsRecording)) {
checkWriteStoragePermission();
}
lockFocus();
}
});
mRecordImageButton = (ImageButton) findViewById(R.id.videoOnlineImageButton);
mRecordImageButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mIsRecording || mIsTimelapse) {
mChronometer.stop();
mChronometer.setVisibility(View.INVISIBLE);
mIsRecording = false;
mIsTimelapse = false;
mRecordImageButton.setImageResource(R.mipmap.btn_video_online);
// Starting the preview prior to stopping recording which should hopefully
// resolve issues being seen in Samsung devices.
startPreview();
mMediaRecorder.stop();
mMediaRecorder.reset();
Intent mediaStoreUpdateIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaStoreUpdateIntent.setData(Uri.fromFile(new File(mVideoFileName)));
sendBroadcast(mediaStoreUpdateIntent);
} else {
mIsRecording = true;
mRecordImageButton.setImageResource(R.mipmap.btn_video_busy);
checkWriteStoragePermission();
}
}
});
mRecordImageButton.setOnLongClickListener(new View.OnLongClickListener() {
#Override
public boolean onLongClick(View v) {
mIsTimelapse =true;
mRecordImageButton.setImageResource(R.mipmap.btn_timelapse);
checkWriteStoragePermission();
return true;
}
});
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if(mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
connectCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if(requestCode == REQUEST_CAMERA_PERMISSION_RESULT) {
if(grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(),
"Application will not run without camera services", Toast.LENGTH_SHORT).show();
}
if(grantResults[1] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(),
"Application will not have audio on record", Toast.LENGTH_SHORT).show();
}
}
if(requestCode == REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT) {
if(grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if(mIsRecording || mIsTimelapse) {
mIsRecording = true;
mRecordImageButton.setImageResource(R.mipmap.btn_video_busy);
}
Toast.makeText(this,
"Permission successfully granted!", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(this,
"App needs to save video to run", Toast.LENGTH_SHORT).show();
}
}
}
#Override
protected void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
#Override
public void onWindowFocusChanged(boolean hasFocas) {
super.onWindowFocusChanged(hasFocas);
View decorView = getWindow().getDecorView();
if(hasFocas) {
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION);
}
}
private void setupCamera(int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for(String cameraId : cameraManager.getCameraIdList()){
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) ==
CameraCharacteristics.LENS_FACING_FRONT){
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
mTotalRotation = sensorToDeviceRotation(cameraCharacteristics, deviceOrientation);
boolean swapRotation = mTotalRotation == 90 || mTotalRotation == 270;
int rotatedWidth = width;
int rotatedHeight = height;
if(swapRotation) {
rotatedWidth = height;
rotatedHeight = width;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedWidth, rotatedHeight);
mVideoSize = chooseOptimalSize(map.getOutputSizes(MediaRecorder.class), rotatedWidth, rotatedHeight);
mImageSize = chooseOptimalSize(map.getOutputSizes(ImageFormat.JPEG), rotatedWidth, rotatedHeight);
mImageReader = ImageReader.newInstance(mImageSize.getWidth(), mImageSize.getHeight(), ImageFormat.JPEG, 1);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void connectCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if(ContextCompat.checkSelfPermission(this, android.Manifest.permission.CAMERA) ==
PackageManager.PERMISSION_GRANTED) {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
} else {
if(shouldShowRequestPermissionRationale(android.Manifest.permission.CAMERA)) {
Toast.makeText(this,
"Video app required access to camera", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[] {android.Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO
}, REQUEST_CAMERA_PERMISSION_RESULT);
}
} else {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
System.out.println("Opened camera " + mCameraId);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startRecord() {
try {
if(mIsRecording) {
setupMediaRecorder();
} else if(mIsTimelapse) {
setupTimelapse();
}
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
Surface recordSurface = mMediaRecorder.getSurface();
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
mCaptureRequestBuilder.addTarget(previewSurface);
mCaptureRequestBuilder.addTarget(recordSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, recordSurface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
mRecordCaptureSession = session;
try {
mRecordCaptureSession.setRepeatingRequest(
mCaptureRequestBuilder.build(), null, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.d(TAG, "onConfigureFailed: startRecord");
}
}, null);
} catch (Exception e) {
e.printStackTrace();
}
}
private void startPreview() {
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
try {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
Log.d(TAG, "onConfigured: startPreview");
mPreviewCaptureSession = session;
try {
mPreviewCaptureSession.setRepeatingRequest(mCaptureRequestBuilder.build(),
null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.d(TAG, "onConfigureFailed: startPreview");
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startStillCaptureRequest() {
try {
if(mIsRecording) {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_VIDEO_SNAPSHOT);
} else {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
//mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_MANUAL);
Debug.logStack("Debug","Building still capture request",1);
}
mCaptureRequestBuilder.addTarget(mImageReader.getSurface());
mCaptureRequestBuilder.set(CaptureRequest.JPEG_ORIENTATION, mTotalRotation);
CameraCaptureSession.CaptureCallback stillCaptureCallback = new
CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
try {
createImageFileName();
} catch (IOException e) {
e.printStackTrace();
}
}
};
if(mIsRecording) {
mRecordCaptureSession.capture(mCaptureRequestBuilder.build(), stillCaptureCallback, null);
} else {
mPreviewCaptureSession.capture(mCaptureRequestBuilder.build(), stillCaptureCallback, null);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void closeCamera() {
if(mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
if(mMediaRecorder != null) {
mMediaRecorder.release();
mMediaRecorder = null;
}
}
private void startBackgroundThread() {
mBackgroundHandlerThread = new HandlerThread("Camera2VideoImage");
mBackgroundHandlerThread.start();
mBackgroundHandler = new Handler(mBackgroundHandlerThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundHandlerThread.quitSafely();
try {
mBackgroundHandlerThread.join();
mBackgroundHandlerThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private static int sensorToDeviceRotation(CameraCharacteristics cameraCharacteristics, int deviceOrientation) {
int sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
deviceOrientation = ORIENTATIONS.get(deviceOrientation);
return (sensorOrientation + deviceOrientation + 360) % 360;
}
private static Size chooseOptimalSize(Size[] choices, int width, int height) {
List<Size> bigEnough = new ArrayList<Size>();
for(Size option : choices) {
if(option.getHeight() == option.getWidth() * height / width &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
if(bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizeByArea());
} else {
return choices[0];
}
}
private void createVideoFolder() {
File movieFile = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES);
mVideoFolder = new File(movieFile, "camera2VideoImage");
if(!mVideoFolder.exists()) {
mVideoFolder.mkdirs();
}
}
private File createVideoFileName() throws IOException {
String timestamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String prepend = "VIDEO_" + timestamp + "_";
File videoFile = File.createTempFile(prepend, ".mp4", mVideoFolder);
mVideoFileName = videoFile.getAbsolutePath();
return videoFile;
}
private void createImageFolder() {
File imageFile = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
mImageFolder = new File(imageFile, "camera2VideoImage");
if(!mImageFolder.exists()) {
mImageFolder.mkdirs();
}
}
private File createImageFileName() throws IOException {
String timestamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String prepend = "IMAGE_" + timestamp + "_";
File imageFile = File.createTempFile(prepend, ".jpg", mImageFolder);
mImageFileName = imageFile.getAbsolutePath();
return imageFile;
}
private void checkWriteStoragePermission() {
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if(ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED) {
try {
createVideoFileName();
} catch (IOException e) {
e.printStackTrace();
}
if(mIsTimelapse || mIsRecording) {
startRecord();
mMediaRecorder.start();
mChronometer.setBase(SystemClock.elapsedRealtime());
mChronometer.setVisibility(View.VISIBLE);
mChronometer.start();
}
} else {
if(shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE)) {
Toast.makeText(this, "app needs to be able to save videos", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT);
}
} else {
try {
createVideoFileName();
} catch (IOException e) {
e.printStackTrace();
}
if(mIsRecording || mIsTimelapse) {
startRecord();
mMediaRecorder.start();
mChronometer.setBase(SystemClock.elapsedRealtime());
mChronometer.setVisibility(View.VISIBLE);
mChronometer.start();
}
}
}
private void setupMediaRecorder() throws IOException {
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
//mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setOutputFile(mVideoFileName);
mMediaRecorder.setVideoEncodingBitRate(1000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
//mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mMediaRecorder.setOrientationHint(mTotalRotation);
mMediaRecorder.prepare();
}
private void setupTimelapse() throws IOException {
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_TIME_LAPSE_HIGH));
mMediaRecorder.setOutputFile(mVideoFileName);
mMediaRecorder.setCaptureRate(2);
mMediaRecorder.setOrientationHint(mTotalRotation);
mMediaRecorder.prepare();
}
private void lockFocus() {
mCaptureState = STATE_WAIT_LOCK;
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
try {
if(mIsRecording) {
mRecordCaptureSession.capture(mCaptureRequestBuilder.build(), mRecordCaptureCallback, mBackgroundHandler);
} else {
mPreviewCaptureSession.capture(mCaptureRequestBuilder.build(), mPreviewCaptureCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
I would be grateful if anyone could identify why I am having this error and help me to get the app to properly take pictures.

How to Voice call using socket and node.js in Android

Hello, I want to design a two-way telephone connection using socket.io in Android and socket in node.js, which takes the sound from the source phone and sends it to the destination phone in runtime, and instantly sends the sound from the source and sends bytes to the destination. Convert the transmission to audio and play the audio ! , The voice is sent to the server, but the server does not send the information to the destination!
public class VoiceCallActivity extends AppCompatActivity {
private static final String LOG_TAG = "socket_main_act";
private static final int REQUEST_RECORD_AUDIO_PERMISSION = 200;
private static final int SAMPLING_RATE = 22050;
private MediaPlayer mediaPlayer = new MediaPlayer();
private MediaRecorder recorder = new MediaRecorder();
private boolean permissionToRecord = false;
private String[] permissions = {Manifest.permission.RECORD_AUDIO};
private Socket mSocket;
private void startRecording(Socket socket) {
Log.i(LOG_TAG, "start recording.");
try {
ParcelFileDescriptor[] descriptors = new ParcelFileDescriptor[0];
try {
descriptors = ParcelFileDescriptor.createPipe();
} catch (IOException e) {
e.printStackTrace();
}
ParcelFileDescriptor recorderRead = new ParcelFileDescriptor(descriptors[0]);
ParcelFileDescriptor recorderWrite = new ParcelFileDescriptor(descriptors[1]);
InputStream inputStream = new ParcelFileDescriptor.AutoCloseInputStream(recorderRead);
Log.i(LOG_TAG, "Setup IO.");
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.AAC_ADTS);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC_ELD);
recorder.setOutputFile(recorderWrite.getFileDescriptor());
recorder.setAudioSamplingRate(SAMPLING_RATE);
Log.i(LOG_TAG, "Setup recorder.");
recorder.prepare();
recorder.start();
Log.i(LOG_TAG, "Start recording.");
int read = 1;
byte[] data = new byte[SAMPLING_RATE];
while (read != -1) {
read = inputStream.read(data, 0, data.length);
JSONObject obj = new JSONObject();
try {
obj.put("data", 1);
socket.emit("audio_give", obj);
} catch (JSONException e) {
e.printStackTrace();
}
}
Log.i(LOG_TAG, "Data from recorder exhausted.");
} catch (IOException e) {
e.printStackTrace();
Log.e(LOG_TAG, "Recording faults");
}
}
public Emitter.Listener onAudioBroadcast = new Emitter.Listener() {
#Override
public void call(final Object... args) {
runOnUiThread(new Runnable() {
#Override
public void run() {
byte[] data = Base64.decode(args[0].toString(), Base64.DEFAULT);
Log.e("TAG", "call: args = " + args[0]);
ByteArrayOutputStream out = new ByteArrayOutputStream();
ObjectOutputStream os;
try {
/* os = new ObjectOutputStream(out);
for (Object arg : args) {
os.writeObject(arg);
}
data = out.toByteArray();*/
Log.i(LOG_TAG, "Read data as byte array.");
AudioDataSource src = new AudioDataSource(data);
Log.i(LOG_TAG, "Setup player and src");
AudioAttributes attr = new AudioAttributes.Builder()
.setFlags(AudioAttributes.FLAG_AUDIBILITY_ENFORCED)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.build();
Log.i(LOG_TAG, "Setup attributes");
mediaPlayer.setAudioAttributes(attr);
Log.i(LOG_TAG, "Player set with attributes.");
mediaPlayer.setDataSource(src);
Log.i(LOG_TAG, "Player read from source");
mediaPlayer.prepare();
Log.i(LOG_TAG, "prepare player.");
mediaPlayer.start();
Log.i(LOG_TAG, "Start playing.");
} catch (IOException e) {
Log.e(LOG_TAG, "Player failed to stream audio: " + e.toString());
e.printStackTrace();
}
}
});
}
};
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case REQUEST_RECORD_AUDIO_PERMISSION:
permissionToRecord = grantResults[0] == PackageManager.PERMISSION_GRANTED;
break;
}
if (!permissionToRecord) finish();
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ActivityCompat.requestPermissions(this, permissions, REQUEST_RECORD_AUDIO_PERMISSION);
setContentView(R.layout.activity_main);
try {
IO.Options opts = new IO.Options();
opts.reconnection = true;
opts.port = 3000;
opts.timeout = 15000;
opts.reconnectionDelay = 10000;
mSocket = IO.socket("http://192.168.2.1:3000", opts);
mSocket.on("audio_receiver", onAudioBroadcast);
mSocket.on(Socket.EVENT_CONNECT, onConnect);
mSocket.on(Socket.EVENT_DISCONNECT, onDisconnect);
mSocket.on(Socket.EVENT_CONNECT_ERROR, onConnectError);
mSocket.on(Socket.EVENT_CONNECT_TIMEOUT, onConnectError);
mSocket.connect();
startRecording(mSocket);
} catch (URISyntaxException e) {
Log.d(LOG_TAG, "uri is invalid.");
}
}
Emitter.Listener onConnect = new Emitter.Listener() {
#Override
public void call(Object... args) {
runOnUiThread(new Runnable() {
#Override
public void run() {
Log.e("TAG", "run: onConnect");
runOnUiThread(new Runnable() {
#Override
public void run() {
}
});
}
});
}
};
Emitter.Listener onDisconnect = new Emitter.Listener() {
#Override
public void call(Object... args) {
runOnUiThread(new Runnable() {
#Override
public void run() {
Log.e("TAG", "run: onDisconnect");
}
});
}
};
Emitter.Listener onConnectError = new Emitter.Listener() {
#Override
public void call(Object... args) {
runOnUiThread(new Runnable() {
#Override
public void run() {
Log.e("TAG", "run: onConnectError");
}
});
}
};
}
server socket.io node.js code :
const express = require('express');
const app = express();
let http = require('http').Server(app);
let io = require('socket.io')(http);
http.listen(3000, function () {
// Now listening
console.log("listening 3000 !");
});
io.on('connection', function (socket) {
socket.on('audio_give', function (data) {
socket.emit("audio_receiver",{data})
})
})

How to stop app from accessing the internet when a blacklisted app is installed

I have an app which automatically fetch data online whenever it is opened. I would like to make it a way that the app will only check for update online when a blacklisted app is not detected.
This is the update core.
public class UpdateCore extends AsyncTask<String, String, String> {
private static final String TAG = "NetGuard.Download";
private Context context;
private Listener listener;
private PowerManager.WakeLock wakeLock;
private HttpURLConnection uRLConnection;
private InputStream is;
private TorrentDetection torrent;
private BufferedReader buffer;
private String url;
public interface Listener {
void onLoading();
void onCompleted(String config) throws Exception;
void onCancelled();
void onException(String ex);
}
public UpdateCore(Context context, String url, Listener listener) {
this.context = context;
this.url = url;
this.listener = listener;
}
#Override
protected void onPreExecute() {
listener.onLoading();
}
#Override
protected String doInBackground(String... args) {
try {
String api = url;
if(!api.startsWith("http")){
api = new StringBuilder().append("http://").append(url).toString();
}
URL oracle = new URL(api);
HttpClient Client = new DefaultHttpClient();
HttpGet httpget = new HttpGet(oracle.toURI());
HttpResponse response = Client.execute(httpget);
InputStream in = response.getEntity().getContent();
BufferedReader reader = new BufferedReader(new InputStreamReader(
in, "iso-8859-1"), 8);
//BufferedReader reader = new BufferedReader(new InputStreamReader(in));
StringBuilder str = new StringBuilder();
String line = null;
while((line = reader.readLine()) != null)
{
str.append(line);
}
in.close();
return str.toString();
} catch (Exception e) {
return "error";
} finally {
if (buffer != null) {
try {
buffer.close();
} catch (IOException ignored) {
}
}
if (is != null) {
try {
is.close();
} catch (IOException ignored) {
}
}
if (uRLConnection != null) {
uRLConnection.disconnect();
}
}
}
#Override
protected void onCancelled() {
super.onCancelled();
// Log.i(TAG, "Cancelled");
// pd.dismiss();
listener.onCancelled();
}
#Override
protected void onPostExecute(String result) {
// wakeLock.release();
//nm.cancel(1);
// pd.dismiss();
try
{
if (result.equals("error"))
{
listener.onException(result);
}
else {
listener.onCompleted(result);
}
}
catch (Exception e)
{
listener.onException(e.getMessage());
}
}
}
This is the detection code
public class TorrentDetection
{
private Context context;
private String[] items;
private TorrentDetection.TorrentListener listener;
private Timer timer;
private Handler handler;
public interface TorrentListener {
public void detected(ArrayList pkg);
}
public TorrentDetection(Context c, String[] i, TorrentListener listener) {
context = c;
items = i;
this.listener = listener;
}
private boolean check(String uri)
{
PackageManager pm = context.getPackageManager();
boolean app_installed = false;
try
{
pm.getPackageInfo(uri, PackageManager.GET_ACTIVITIES);
app_installed = true;
}
catch (PackageManager.NameNotFoundException e)
{
app_installed = false;
}
return app_installed;
}
void check() {
ArrayList arrayList2 = new ArrayList();
for (String pack : items)
{
if(check(pack)){
arrayList2.add(pack);
}
}
if (arrayList2.size() > 0)
{
listener.detected(arrayList2);
stop();
}
}
public void start() {
handler = new Handler();
timer = new Timer();
TimerTask doAsynchronousTask = new TimerTask() {
#Override
public void run()
{
handler.post(new Runnable() {
public void run()
{
check();
}
});
}
};
timer.schedule(doAsynchronousTask, 0, 3000);
}
public void stop() {
if(timer != null){
timer.cancel();
timer = null;
}
if(handler != null){
handler = null;
}
}
}
The torrent detection code checks if the following apps are installed and returns a message that an unsupported app is installed.
public class Constraints
{
public static String updater = "https://pastenord.org/raw/random";
public static String[] torrentList = new String[]{
"com.guoshi.httpcanary",
"com.adguard.android.contentblocker"};
}
In my MainActivity this initiates the detection before the online update is done with torrent.start();
void update() {
torrent.start();
new UpdateCore(this, Constraints.updater, new UpdateCore.Listener() {
#Override
public void onLoading() {
}
#Override
public void onCompleted(final String config) {
try {
final JSONObject obj = new JSONObject(MilitaryGradeEncrypt.decryptBase64StringToString(config, Constraints.confpass));
if (Double.valueOf(obj.getString("Version")) <= Double.valueOf(conts.getConfigVersion())) {
} else {
new SweetAlertDialog(MainActivity.this, SweetAlertDialog.CUSTOM_IMAGE_TYPE)
.setTitleText("Update")
.setContentText("\n" + obj.getString("Message"))
.setConfirmText("Yes,Update it!")
.setCustomImage(R.drawable.ic_update)
.setConfirmClickListener(new SweetAlertDialog.OnSweetClickListener() {
#Override
public void onClick(SweetAlertDialog sDialog) {
sDialog.dismissWithAnimation();
welcomeNotif();
restart_app();
try {
db.updateData("1", config);
sp.edit().putString("CurrentConfigVersion", obj.getString("Version")).commit();
} catch (JSONException e) {}
}
})
.show();
}
} catch (Exception e) {
// Toast.makeText(MainActivity.this, e.getMessage() , 0).show();
}
}
#Override
public void onCancelled() {
}
#Override
public void onException(String ex) {
}
}).execute();
}
}
It then makes a popup when an unsupported app is detected with this.
torrent = new TorrentDetection(this, Constraints.torrentList, new TorrentDetection.TorrentListener() {
#Override
public void detected(ArrayList pkg)
{
stopService();
new AlertDialog.Builder(MainActivity.this)
.setTitle("unsupported App!")
.setMessage(String.format("%s", new Object[]{TextUtils.join(", ", (String[]) pkg.toArray(new String[pkg.size()]))}))
.setPositiveButton("OK", null)
//.setAnimation(Animation.SLIDE)
.setCancelable(false)
.create()
//.setIcon(R.mipmap.ic_info, Icon.Visible)
.show();
}
});
I would like the make the app only check for online update only when done of the blacklisted apps are installed. Any form of help is welcomed and appreciated.
use this method to check if an application is installed or not
public boolean isPackageInstalled(String packageName, PackageManager packageManager) {
try {
packageManager.getPackageInfo(packageName, 0);
return true;
} catch (PackageManager.NameNotFoundException e) {
return false;
}
}
then to check, simply call:
PackageManager pm = context.getPackageManager();
boolean isInstalled = isPackageInstalled("com.somepackage.name", pm);
// simply put an if statemement
if(!isInstalled){
//do your update here
}
else{
//display you have installed a blacklisted app
}
sidenote, if you are targeting android 11 and above, you need to provide the information about the packages you want to find out about in the manifest like this
<queries>
<!--Add queries here-->
<package android:name="com.somepackage.name" />
</queries>

How I Can Send Image I capture Using Camera 2 Api android to Server using Retrofit

I Build A camera 2 On Android And I Save the Image Once the Activity Open After 3 Second I take a Picture a save it on Device I try to Send the Image to server Below is my Code for Camera and API ,
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
//open your camera here
transformImage(width,height);
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
// Transform you image captured size according to the surface width and height
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
//This is called when the camera is open
Log.e(TAG, "onOpened");
cameraDevice = camera;
createCameraPreview();
transformImage(textureView.getWidth(),textureView.getHeight());
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
takePicture();
}
},3000);
}
#Override
public void onDisconnected(CameraDevice camera) {
cameraDevice.close();
}
#Override
public void onError(CameraDevice camera, int error) {
cameraDevice.close();
cameraDevice = null;
}
};
final CameraCaptureSession.CaptureCallback captureCallbackListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(CameraActivity.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
createCameraPreview();
}
};
protected void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
protected void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
protected void takePicture() {
if(null == cameraDevice) {
Log.e(TAG, "cameraDevice is null");
return;
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
// captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, 0);
Date date = new Date();
final File file = new File(Environment.getExternalStorageDirectory()+"/"+date.getMinutes()
+date.getSeconds()+"pic.jpg");
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
System.out.println("Image Object File" + image);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(CameraActivity.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
UploadImage uploadImage = new UploadImage();
uploadImage.setFile(file);
RequestBody requestFile =
RequestBody.create(
MediaType.parse("image/jpg"),
file
);
MultipartBody.Part body =
MultipartBody.Part.createFormData("files[0]", file.getName(), requestFile);
Call<UploadImage> call = RetrofitClient.getmInstance().getApi().upload(body,
SharedPrefManger.getInstance(getApplication()).getToken());
call.enqueue(new Callback<UploadImage>() {
#Override
public void onResponse(Call<UploadImage> call, Response<UploadImage> response) {
if (response.code() == 204){
Toast.makeText(CameraActivity.this, response.body().toString(), Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(CameraActivity.this, "Upload Error: " + response.message(), Toast.LENGTH_SHORT).show();
}
}
#Override
public void onFailure(Call<UploadImage> call, Throwable t) {
System.out.println("Error F: " + t.getMessage());
Toast.makeText(CameraActivity.this, t.getMessage(), Toast.LENGTH_LONG).show();
}
});
//Intent x = new Intent(CameraActivity.this,MainActivity.class);
//startActivity(x);
createCameraPreview();
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

Simultaneos playback of multiple speakers

I want to play multiple speakers at the same time.
In my apllication I'm getting audio from network, decode from C#, decode by opus and then want to play bytes. But now I can play only one speaker.
My AudioPLayer.class:
public class Player {
private static final String TAG = Player.class.getName();
private AudioTrack audioTrack;
private boolean isWorking;
public Player() {
try {
audioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
AudioConsts.SAMPLERATE,
AudioConsts.NUM_CHANNELS == 1 ? AudioConsts.CHANNEL_OUT_MONO : AudioConsts.CHANNEL_OUT_STEREO,
AudioConsts.ENCODING_PCM_16BIT,
AudioConsts.GetPlayerBufferSize(),
AudioTrack.MODE_STREAM);
} catch (Exception e){
Log.e(TAG, e.toString());
}
}
public void play() {
new Thread(new Runnable() {
#Override
public void run() {
isWorking = true;
try {
audioTrack.play();
} catch (Exception e) {
Log.d(e.toString(), "AUDIO EXCEPTION");
return;
}
int bufferSize = AudioConsts.GetPlayerBufferSize();
while (isWorking){
int cursor = audioTrack.getPlaybackHeadPosition();
if (cursor > bufferSize){
cursor %= bufferSize;
audioTrack.flush();
audioTrack.setPlaybackHeadPosition(cursor);
}
}
}
}).start();
}
public void stopReading(){
if (!isWorking)
return;
audioTrack.release();
isWorking = false;
}
public void appendForPlayback(byte[] audioMessage, int size) {
if (size != 0){
int writen = audioTrack.write(audioMessage, 0, size);
if (writen != size) {
//audioTrack.release();
Log.d(TAG, "WTF");
}
}
}
}
Also attach my AudioPlayer's initialization:
#Override
public void onCreate() {
super.onCreate();
...
player = new Player();
player.play();
IntentFilter filter = new IntentFilter();
filter.addAction(ON_UNITY_AUDIO_MESSAGE_RECEIVED);
filter.addAction(AudioConsts.START_RECORDER);
filter.addAction(AudioConsts.STOP_RECORDER);
broadcastReceiver = new BroadcastReceiver() {
#Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction().equals(ON_UNITY_AUDIO_MESSAGE_RECEIVED)) {
byte[] decryptedBytes = intent.getByteArrayExtra(UNITY_AUDIO_MESSAGE);
onUnityAudioReceivedFromNetwork(decryptedBytes);
} else if (intent.getAction().equals(AudioConsts.START_RECORDER)) {
incrementSessionCount();
recorder.startRecording();
} else if (intent.getAction().equals(AudioConsts.STOP_RECORDER)) {
recorder.stopRecording();
}
}
};
registerReceiver(broadcastReceiver, filter);
decodeMsg = new byte[AudioConsts.FRAME_SIZE * AudioConsts.ENCODING_PCM_16BIT];
opusDecoder = new OpusDecoder();
opusDecoder.init(AudioConsts.SAMPLERATE, AudioConsts.NUM_CHANNELS);
}
...
private void onUnityAudioReceivedFromNetwork(byte[] decryptedBytes) {
UnityAudioMessage audioMessage = UnityAudioMessage.fromBytesSharp(decryptedBytes);
if (audioMessage != null) {
try {
opusDecoder.decode(audioMessage.unityAudioMessage, decodeMsg, AudioConsts.FRAME_SIZE);
} catch (OpusError e) {
e.printStackTrace();
return;
}
player.appendForPlayback(decodeMsg, decodeMsg.length);
}
}
...
Can I release simultaneos playback of multiple speakers?
Also I tried release it with HaspMap of my players. But it works only like 1 audio track.
I tried a lot of things, but my solution use AsyncTask.class
Attach Player.class
public class Player {
private static final String TAG = Player.class.getName();
private AudioTrack audioTrack;
private boolean isWorking;
public Player() {
try {
audioTrack = new AudioTrack(
new AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_MEDIA)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.setLegacyStreamType(AudioManager.STREAM_MUSIC)
.build(),
new AudioFormat.Builder()
.setChannelMask(AudioFormat.CHANNEL_OUT_MONO)
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(AudioConsts.SAMPLERATE)
.build(),
AudioConsts.GetPlayerBufferSize(),
AudioTrack.MODE_STREAM,
AudioManager.AUDIO_SESSION_ID_GENERATE);
} catch (Exception e) {
Log.e(TAG, e.toString());
}
}
public void play() {
audioTrack.play();
}
public void stopReading() {
if (!isWorking)
return;
audioTrack.release();
isWorking = false;
}
public void appendForPlayback(byte[] audioMessage, int size) {
new Executor().doInBackground(audioMessage);
}
private class Executor extends AsyncTask<byte[], Void, Void> {
#Override
protected Void doInBackground(byte[]... bytes) {
for (byte[] audioMessage : bytes) {
if (audioMessage.length != 0) {
int writen = audioTrack.write(audioMessage, 0, audioMessage.length);
if (writen != audioMessage.length) {
Log.d(TAG, "WTF");
}
}
}
return null;
}
}}

Categories