Displaying and processing frames android camera - java

I have an application that process frames from camera and displayed on a layout, the class that captures and manages the camera frames is like:
package org.opencv.face;
import java.io.IOException;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
public Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private byte[] mBuffer;
public SampleViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public int getFrameWidth() {
return mFrameWidth;
}
public int getFrameHeight() {
return mFrameHeight;
}
public void setPreview() throws IOException {
mCamera.setPreviewDisplay(null);
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
System.arraycopy(data, 0, mFrame, 0, data.length);
SampleViewBase.this.notify();
}
camera.addCallbackBuffer(mBuffer);
}
});
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
mThreadRun = false;
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8 * 2;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);
/**
* This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
* #param previewWidth - the width of the preview frames that will be delivered via processFrame
* #param previewHeight - the height of the preview frames that will be delivered via processFrame
*/
protected abstract void onPreviewStarted(int previewWidtd, int previewHeight);
/**
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
* Any other resources used during the preview can be released.
*/
protected abstract void onPreviewStopped();
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
}
}
}
}
Screen not visualize anything but replacing the method "setPreview ()" with the following:
#TargetApi(11)
public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else
mCamera.setPreviewDisplay(null);
Now works properly, but only with Android versions above 3.0, and what I want is to work for any version.
How can I fix this?

If you want to receive preview callbacks, you must show preview, using setPreviewDisplay() with non-null argument. The API was designed to force you to show this preview on screen. Any workaround that lets you create a dummy SurfaceView that is never rendered, may not work on some devices or after next upgrade.
Prior to Honeycomb, it was possible to create a preview SurfaceView off screen (so that its position is far to the right), and thus the preview was not displayed. This bug was fixed later.
Luckily, with 3+ you can use setPreviewTexture(), and there is no way the platform can force you to actually show a texture.

Related

Adjust preview image preview size in camera API 2 android

I am creating a custom camera app. The height of my TextureView is set to 300 dp.
I want to set the video resolution as 640 x 480 with a preview height of 300dp(TextureView Height).
I want the camera preview to fill this area without stretching or cropping. How do I achieve this?
public class VideoActivity extends AppCompatActivity{
private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90;
private static final int SENSOR_ORIENTATION_INVERSE_DEGREES = 270;
private static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray();
private static final SparseIntArray INVERSE_ORIENTATIONS = new SparseIntArray();
private static final String TAG = "Camera2VideoFragment";
private static final int REQUEST_VIDEO_PERMISSIONS = 1;
private static final String FRAGMENT_DIALOG = "dialog";
private static final String[] VIDEO_PERMISSIONS = {
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO,
};
static {
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_0, 90);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_90, 0);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_180, 270);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
static {
INVERSE_ORIENTATIONS.append(Surface.ROTATION_0, 270);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_90, 180);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_180, 90);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_270, 0);
}
private AutoFitTextureView mTextureView;
private CameraDevice mCameraDevice;
/**
* A reference to the current {#link android.hardware.camera2.CameraCaptureSession} for
* preview.
*/
private CameraCaptureSession mPreviewSession;
/**
* {#link TextureView.SurfaceTextureListener} handles several lifecycle events on a
* {#link TextureView}.
*/
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
/**
* The {#link android.util.Size} of camera preview.
*/
private Size mPreviewSize;
/**
* The {#link android.util.Size} of video recording.
*/
private Size mVideoSize;
/**
* MediaRecorder
*/
private MediaRecorder mMediaRecorder;
/**
* Whether the app is recording video now
*/
private boolean mIsRecordingVideo;
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread mBackgroundThread;
/**
* A {#link Handler} for running tasks in the background.
*/
private Handler mBackgroundHandler;
/**
* A {#link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* {#link CameraDevice.StateCallback} is called when {#link CameraDevice} changes its status.
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = VideoActivity.this;
if (null != activity) {
activity.finish();
}
}
};
private Integer mSensorOrientation;
private String mNextVideoAbsolutePath;
private CaptureRequest.Builder mPreviewBuilder;
/**
* In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes
* larger than 1080p, since MediaRecorder cannot handle such a high-resolution video.
*
* #param choices The list of available sizes
* #return The video size
*/
private static Size chooseVideoSize(Size[] choices) {
for (Size size : choices) {
if (size.getWidth() == size.getHeight() * 4 / 3 && size.getWidth() <= 1080) {
return size;
}
}
Log.e(TAG, "Couldn't find any suitable video size");
return choices[choices.length - 1];
}
/**
* Given {#code choices} of {#code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* #param choices The list of sizes that the camera supports for the intended output class
* #param width The minimum desired width
* #param height The minimum desired height
* #param aspectRatio The aspect ratio
* #return The optimal {#code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * h / w &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video);
mTextureView = findViewById(R.id.texture);
}
#Override
public void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
/**
* Starts a background thread and its {#link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/**
* Stops the background thread and its {#link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Gets whether you should show UI with rationale for requesting permissions.
*
* #param permissions The permissions your app wants to request.
* #return Whether you can show permission rationale UI.
*/
// private boolean shouldShowRequestPermissionRationale(String[] permissions) {
// for (String permission : permissions) {
// if (FragmentCompat.shouldShowRequestPermissionRationale(this, permission)) {
// return true;
// }
// }
// return false;
// }
/**
* Tries to open a {#link CameraDevice}. The result is listened by `mStateCallback`.
*/
#SuppressWarnings("MissingPermission")
private void openCamera(int width, int height) {
// if (!hasPermissionsGranted(VIDEO_PERMISSIONS)) {
// requestVideoPermissions();
// return;
// }
final Activity activity = this;
if (null == activity || activity.isFinishing()) {
return;
}
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
Log.d(TAG, "tryAcquire");
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String cameraId = manager.getCameraIdList()[1];
// Choose the sizes for camera preview and video recording
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
if (map == null) {
throw new RuntimeException("Cannot get available preview/video sizes");
}
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
configureTransform(width, height);
mMediaRecorder = new MediaRecorder();
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
activity.finish();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
// Camera2VideoFragment.ErrorDialog.newInstance("cammera error")
// .show(getChildFragmentManager(), FRAGMENT_DIALOG);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mMediaRecorder) {
mMediaRecorder.release();
mMediaRecorder = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.");
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Start the camera preview.
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Activity activity = VideoActivity.this;
if (null != activity) {
Toast.makeText(VideoActivity.this, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Update the camera preview. {#link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
/**
* Configures the necessary {#link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*
* #param viewWidth The width of `mTextureView`
* #param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = VideoActivity.this;
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private void setUpMediaRecorder() throws IOException {
final Activity activity = VideoActivity.this;
if (null == activity) {
return;
}
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()) {
mNextVideoAbsolutePath = getVideoFilePath(VideoActivity.this);
}
mMediaRecorder.setOutputFile(mNextVideoAbsolutePath);
mMediaRecorder.setVideoEncodingBitRate(10000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
switch (mSensorOrientation) {
case SENSOR_ORIENTATION_DEFAULT_DEGREES:
mMediaRecorder.setOrientationHint(DEFAULT_ORIENTATIONS.get(rotation));
break;
case SENSOR_ORIENTATION_INVERSE_DEGREES:
mMediaRecorder.setOrientationHint(INVERSE_ORIENTATIONS.get(rotation));
break;
}
mMediaRecorder.prepare();
}
private String getVideoFilePath(Context context) {
final File dir = context.getExternalFilesDir(null);
return (dir == null ? "" : (dir.getAbsolutePath() + "/"))
+ System.currentTimeMillis() + ".mp4";
}
private void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
setUpMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
// Set up Surface for the camera preview
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
// Set up Surface for the MediaRecorder
Surface recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
// Start a capture session
// Once the session starts, we can update the UI and start recording
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
VideoActivity.this.runOnUiThread(new Runnable() {
#Override
public void run() {
// UI
mIsRecordingVideo = true;
// Start recording
mMediaRecorder.start();
}
});
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Activity activity = VideoActivity.this;
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException | IOException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (mPreviewSession != null) {
mPreviewSession.close();
mPreviewSession = null;
}
}
private void stopRecordingVideo() {
// UI
mIsRecordingVideo = false;
// mButtonVideo.setText("record");
// Stop recording
mMediaRecorder.stop();
mMediaRecorder.reset();
Activity activity = VideoActivity.this;
if (null != activity) {
Toast.makeText(activity, "Video saved: " + mNextVideoAbsolutePath,
Toast.LENGTH_SHORT).show();
Log.d(TAG, "Video saved: " + mNextVideoAbsolutePath);
}
mNextVideoAbsolutePath = null;
startPreview();
}
/**
* Compares two {#code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
// public static class ErrorDialog extends DialogFragment {
//
// private static final String ARG_MESSAGE = "message";
//
// public static Camera2VideoFragment.ErrorDialog newInstance(String message) {
// Camera2VideoFragment.ErrorDialog dialog = new Camera2VideoFragment.ErrorDialog();
// Bundle args = new Bundle();
// args.putString(ARG_MESSAGE, message);
// dialog.setArguments(args);
// return dialog;
// }
//
// #Override
// public Dialog onCreateDialog(Bundle savedInstanceState) {
// final Activity activity = getActivity();
// return new AlertDialog.Builder(activity)
// .setMessage(getArguments().getString(ARG_MESSAGE))
// .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
// #Override
// public void onClick(DialogInterface dialogInterface, int i) {
// activity.finish();
// }
// })
// .create();
// }
//
// }
public void Record(View view)
{
if (mIsRecordingVideo) {
stopRecordingVideo();
} else {
startRecordingVideo();
}
}
}

how to crop only face instead of whole body in oval form using Google Vision Face detection API in android

I want to make app which take photos only with faces but I get whole image instead of face when I save into my storage file. So how to crop face and save into storage with the help google vision face detection API.
So how to use frame in my code to get face list as well as how can I convert into bitmap and save into my storage.
main.xml
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/topLayout"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:keepScreenOn="true">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview
android:id="#+id/preview"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.GraphicOverlay
android:id="#+id/faceOverlay"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview>
</LinearLayout>
<android.support.v7.widget.AppCompatButton
android:id="#+id/take_pic_btn"
android:layout_gravity="bottom"
android:gravity="center"
android:background="#color/green"
android:layout_margin="10dp"
android:text="Take Image"
android:textStyle="bold"
android:textSize="20sp"
android:textAllCaps="false"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
</FrameLayout>
FaceTrackerActivity .java
public final class FaceTrackerActivity extends AppCompatActivity {
private static final String TAG = "FaceTracker";
private CameraSource mCameraSource = null;
private CameraSourcePreview mPreview;
private GraphicOverlay mGraphicOverlay;
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
private Button takePicButton;
FaceDetector detector;
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.main);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay) findViewById(R.id.faceOverlay);
takePicButton=(Button)findViewById(R.id.take_pic_btn);
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
int gc = ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE);
if (rc == PackageManager.PERMISSION_GRANTED && gc == PackageManager.PERMISSION_GRANTED) {
createCameraSource();
} else {
requestCameraPermission();
}
takePicButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Toast.makeText(getApplicationContext(),"dilip",Toast.LENGTH_LONG).show();
captureImage();
}
});
}
private void requestCameraPermission() {
Log.w(TAG, "Camera permission is not granted. Requesting permission");
final String[] permissions = new String[]{Manifest.permission.CAMERA,Manifest.permission.WRITE_EXTERNAL_STORAGE};
if (!ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.CAMERA) && !ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.WRITE_EXTERNAL_STORAGE) ) {
ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM);
return;
}
final Activity thisActivity = this;
View.OnClickListener listener = new View.OnClickListener() {
#Override
public void onClick(View view) {
ActivityCompat.requestPermissions(thisActivity, permissions,
RC_HANDLE_CAMERA_PERM);
}
};
Snackbar.make(mGraphicOverlay, R.string.permission_camera_rationale,
Snackbar.LENGTH_INDEFINITE)
.setAction(R.string.ok, listener)
.show();
}
private void createCameraSource() {
Context context = getApplicationContext();
/* FaceDetector detector = new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());*/
detector= new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
MyFaceDetector myFaceDetector = new MyFaceDetector(detector);
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());
mCameraSource = new CameraSource.Builder(context, myFaceDetector)
.build();
if (!detector.isOperational()) {
}
mCameraSource = new CameraSource.Builder(context, detector)
.setRequestedPreviewSize(640, 480)
.setFacing(CameraSource.CAMERA_FACING_FRONT)
.setRequestedFps(10.0f)
.build();
}
/**
* Restarts the camera.
*/
#Override
protected void onResume() {
super.onResume();
startCameraSource();
}
/**
* Stops the camera.
*/
#Override
protected void onPause() {
super.onPause();
mPreview.stop();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mCameraSource != null) {
mCameraSource.release();
}
}
/**
* Callback for the result from requesting permissions. This method
* is invoked for every call on {#link #requestPermissions(String[], int)}.
* <p>
* <strong>Note:</strong> It is possible that the permissions request interaction
* with the user is interrupted. In this case you will receive empty permissions
* and results arrays which should be treated as a cancellation.
* </p>
*
* #param requestCode The request code passed in {#link #requestPermissions(String[], int)}.
* #param permissions The requested permissions. Never null.
* #param grantResults The grant results for the corresponding permissions
* which is either {#link PackageManager#PERMISSION_GRANTED}
* or {#link PackageManager#PERMISSION_DENIED}. Never null.
* #see #requestPermissions(String[], int)
*/
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
if (requestCode != RC_HANDLE_CAMERA_PERM) {
Log.d(TAG, "Got unexpected permission result: " + requestCode);
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
return;
}
if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED &&grantResults[1] == PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Camera permission granted - initialize the camera source");
// we have permission, so create the camerasource
createCameraSource();
return;
}
Log.e(TAG, "Permission not granted: results len = " + grantResults.length +
" Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)"));
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Face Tracker sample")
.setMessage(R.string.no_camera_permission)
.setPositiveButton(R.string.ok, listener)
.show();
}
//==============================================================================================
// Camera Source Preview
//==============================================================================================
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
private void startCameraSource() {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
//==============================================================================================
// Graphic Face Tracker
//==============================================================================================
/**
* Factory for creating a face tracker to be associated with a new face. The multiprocessor
* uses this factory to create face trackers as needed -- one for each individual.
*/
private class GraphicFaceTrackerFactory implements MultiProcessor.Factory<Face> {
#Override
public Tracker<Face> create(Face face) {
return new GraphicFaceTracker(mGraphicOverlay);
}
}
/**
* Face tracker for each detected individual. This maintains a face graphic within the app's
* associated face overlay.
*/
private class GraphicFaceTracker extends Tracker<Face> {
private GraphicOverlay mOverlay;
private FaceGraphic mFaceGraphic;
GraphicFaceTracker(GraphicOverlay overlay) {
mOverlay = overlay;
mFaceGraphic = new FaceGraphic(overlay);
}
/**
* Start tracking the detected face instance within the face overlay.
*/
#Override
public void onNewItem(int faceId, Face item) {
mFaceGraphic.setId(faceId);
}
**`strong text`**
#Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
mOverlay.add(mFaceGraphic);
mFaceGraphic.updateFace(face);
}
/**
* Hide the graphic when the corresponding face was not detected. This can happen for
* intermediate frames temporarily (e.g., if the face was momentarily blocked from
* view).
*/
#Override
public void onMissing(FaceDetector.Detections<Face> detectionResults) {
mOverlay.remove(mFaceGraphic);
}
/**
* Called when the face is assumed to be gone for good. Remove the graphic annotation from
* the overlay.
*/
#Override
public void onDone() {
mOverlay.remove(mFaceGraphic);
}
}
private void captureImage() {
mPreview.setDrawingCacheEnabled(true);
final Bitmap drawingCache = mPreview.getDrawingCache();
mCameraSource.takePicture(null, new CameraSource.PictureCallback() {
#Override
public void onPictureTaken(byte[] bytes) {
int orientation = Exif.getOrientation(bytes);
Bitmap temp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
Bitmap picture = rotateImage(temp,orientation);
Bitmap overlay = Bitmap.createBitmap(mGraphicOverlay.getWidth(),mGraphicOverlay.getHeight(),picture.getConfig());
Canvas canvas = new Canvas(overlay);
Matrix matrix = new Matrix();
matrix.setScale((float)overlay.getWidth()/(float)picture.getWidth(),(float)overlay.getHeight()/(float)picture.getHeight());
// mirror by inverting scale and translating
matrix.preScale(-1, 1);
matrix.postTranslate(canvas.getWidth(), 0);
Paint paint = new Paint();
canvas.drawBitmap(picture,matrix,paint);
canvas.drawBitmap(drawingCache,0,0,paint);
try {
String mainpath = getExternalStorageDirectory() + separator + "MaskIt" + separator + "images" + separator;
File basePath = new File(mainpath);
if (!basePath.exists())
Log.d("CAPTURE_BASE_PATH", basePath.mkdirs() ? "Success": "Failed");
String path = mainpath + "photo_" + getPhotoTime() + ".jpg";
File captureFile = new File(path);
captureFile.createNewFile();
if (!captureFile.exists())
Log.d("CAPTURE_FILE_PATH", captureFile.createNewFile() ? "Success": "Failed");
FileOutputStream stream = new FileOutputStream(captureFile);
overlay.compress(Bitmap.CompressFormat.PNG, 100, stream);
stream.flush();
stream.close();
picture.recycle();
drawingCache.recycle();
mPreview.setDrawingCacheEnabled(false);
} catch (IOException e) {
e.printStackTrace();
}
}
private String getPhotoTime() {
DateFormat dateFormatter = new SimpleDateFormat("yyyyMMdd hhmmss");
dateFormatter.setLenient(false);
Date today = new Date();
String s = dateFormatter.format(today);
return s;
}
});
}
private Bitmap rotateImage(Bitmap bm, int i) {
Matrix matrix = new Matrix();
switch (i) {
case ExifInterface.ORIENTATION_NORMAL:
return bm;
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
matrix.setScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
matrix.setRotate(180);
break;
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
matrix.setRotate(180);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_TRANSPOSE:
matrix.setRotate(90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_90:
matrix.setRotate(90);
break;
case ExifInterface.ORIENTATION_TRANSVERSE:
matrix.setRotate(-90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
matrix.setRotate(-90);
break;
default:
return bm;
}
try {
Bitmap bmRotated = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true);
bm.recycle();
return bmRotated;
} catch (OutOfMemoryError e) {
e.printStackTrace();
return null;
}
}
}
CameraSourcePreview.java
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.content.res.Configuration;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import com.google.android.gms.common.images.Size;
import com.google.android.gms.vision.CameraSource;
import java.io.IOException;
public class CameraSourcePreview extends ViewGroup {
private static final String TAG = "CameraSourcePreview";
private Context mContext;
private SurfaceView mSurfaceView;
private boolean mStartRequested;
private boolean mSurfaceAvailable;
private CameraSource mCameraSource;
private GraphicOverlay mOverlay;
public CameraSourcePreview(Context context, AttributeSet attrs) {
super(context, attrs);
mContext = context;
mStartRequested = false;
mSurfaceAvailable = false;
mSurfaceView = new SurfaceView(context);
mSurfaceView.getHolder().addCallback(new SurfaceCallback());
addView(mSurfaceView);
}
public void start(CameraSource cameraSource) throws IOException {
if (cameraSource == null) {
stop();
}
mCameraSource = cameraSource;
if (mCameraSource != null) {
mStartRequested = true;
startIfReady();
}
}
public void start(CameraSource cameraSource, GraphicOverlay overlay) throws IOException {
mOverlay = overlay;
start(cameraSource);
}
public void stop() {
if (mCameraSource != null) {
mCameraSource.stop();
}
}
public void release() {
if (mCameraSource != null) {
mCameraSource.release();
mCameraSource = null;
}
}
private void startIfReady() throws IOException {
if (mStartRequested && mSurfaceAvailable) {
mCameraSource.start(mSurfaceView.getHolder());
if (mOverlay != null) {
Size size = mCameraSource.getPreviewSize();
int min = Math.min(size.getWidth(), size.getHeight());
int max = Math.max(size.getWidth(), size.getHeight());
if (isPortraitMode()) {
// Swap width and height sizes when in portrait, since it will be rotated by
// 90 degrees
mOverlay.setCameraInfo(min, max, mCameraSource.getCameraFacing());
} else {
mOverlay.setCameraInfo(max, min, mCameraSource.getCameraFacing());
}
mOverlay.clear();
}
mStartRequested = false;
}
}
private class SurfaceCallback implements SurfaceHolder.Callback {
#Override
public void surfaceCreated(SurfaceHolder surface) {
mSurfaceAvailable = true;
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surface) {
mSurfaceAvailable = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
}
#Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int previewWidth = 320;
int previewHeight = 240;
if (mCameraSource != null) {
Size size = mCameraSource.getPreviewSize();
if (size != null) {
previewWidth = size.getWidth();
previewHeight = size.getHeight();
}
}
// Swap width and height sizes when in portrait, since it will be rotated 90 degrees
if (isPortraitMode()) {
int tmp = previewWidth;
previewWidth = previewHeight;
previewHeight = tmp;
}
final int viewWidth = right - left;
final int viewHeight = bottom - top;
int childWidth;
int childHeight;
int childXOffset = 0;
int childYOffset = 0;
float widthRatio = (float) viewWidth / (float) previewWidth;
float heightRatio = (float) viewHeight / (float) previewHeight;
// To fill the view with the camera preview, while also preserving the correct aspect ratio,
// it is usually necessary to slightly oversize the child and to crop off portions along one
// of the dimensions. We scale up based on the dimension requiring the most correction, and
// compute a crop offset for the other dimension.
if (widthRatio > heightRatio) {
childWidth = viewWidth;
childHeight = (int) ((float) previewHeight * widthRatio);
childYOffset = (childHeight - viewHeight) / 2;
} else {
childWidth = (int) ((float) previewWidth * heightRatio);
childHeight = viewHeight;
childXOffset = (childWidth - viewWidth) / 2;
}
for (int i = 0; i < getChildCount(); ++i) {
// One dimension will be cropped. We shift child over or up by this offset and adjust
// the size to maintain the proper aspect ratio.
getChildAt(i).layout(
-1 * childXOffset, -1 * childYOffset,
childWidth - childXOffset, childHeight - childYOffset);
}
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
private boolean isPortraitMode() {
int orientation = mContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
return false;
}
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
return true;
}
Log.d(TAG, "isPortraitMode returning false by default");
return false;
}
}
GraphicOverlay.java
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.View;
import com.google.android.gms.vision.CameraSource;
import java.util.HashSet;
import java.util.Set;
public class GraphicOverlay extends View {
private final Object mLock = new Object();
private int mPreviewWidth;
private float mWidthScaleFactor = 1.0f;
private int mPreviewHeight;
private float mHeightScaleFactor = 1.0f;
private int mFacing = CameraSource.CAMERA_FACING_BACK;
private Set<Graphic> mGraphics = new HashSet<>();
public static abstract class Graphic {
private GraphicOverlay mOverlay;
public Graphic(GraphicOverlay overlay) {
mOverlay = overlay;
}
public abstract void draw(Canvas canvas);
public float scaleX(float horizontal) {
return horizontal * mOverlay.mWidthScaleFactor;
}
public float scaleY(float vertical) {
return vertical * mOverlay.mHeightScaleFactor;
}
public float translateX(float x) {
if (mOverlay.mFacing == CameraSource.CAMERA_FACING_FRONT) {
return mOverlay.getWidth() - scaleX(x);
} else {
return scaleX(x);
}
}
public float translateY(float y) {
return scaleY(y);
}
public void postInvalidate() {
mOverlay.postInvalidate();
}
}
public GraphicOverlay(Context context, AttributeSet attrs) {
super(context, attrs);
}
public void clear() {
synchronized (mLock) {
mGraphics.clear();
}
postInvalidate();
}
public void add(Graphic graphic) {
synchronized (mLock) {
mGraphics.add(graphic);
}
postInvalidate();
}
public void remove(Graphic graphic) {
synchronized (mLock) {
mGraphics.remove(graphic);
}
postInvalidate();
}
public void setCameraInfo(int previewWidth, int previewHeight, int facing) {
synchronized (mLock) {
mPreviewWidth = previewWidth;
mPreviewHeight = previewHeight;
mFacing = facing;
}
postInvalidate();
}
/**
* Draws the overlay with its associated graphic objects.
*/
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
synchronized (mLock) {
if ((mPreviewWidth != 0) && (mPreviewHeight != 0)) {
mWidthScaleFactor = (float) canvas.getWidth() / (float)mPreviewWidth;
mHeightScaleFactor = (float) canvas.getHeight() / (float) mPreviewHeight;
}
for (Graphic graphic : mGraphics) {
graphic.draw(canvas);
}
}
}
}
You should pass the captured image to FaceDetector API and crop it thereafter.
fun getFace(context: Context, data: ByteArray): Bitmap? {
try {
val imageStrem = ByteArrayInputStream(data)
var bitmap = BitmapFactory.decodeStream(imageStrem)
if (bitmap.width > bitmap.height) {
val matrix = Matrix()
matrix.postRotate(270f)
if (bitmap.width > 1500) matrix.postScale(0.5f, 0.5f)
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
}
val faceDetector = FaceDetector.Builder(context).setProminentFaceOnly(true).setTrackingEnabled(false).build()
val frame = Frame.Builder().setBitmap(bitmap).build()
val faces = faceDetector.detect(frame)
var results: Bitmap? = null
for (i in 0 until faces.size()) {
val thisFace = faces.valueAt(i)
val x = thisFace.position.x
val y = thisFace.position.y
val x2 = x / 4 + thisFace.width
val y2 = y / 4 + thisFace.height
results = Bitmap.createBitmap(bitmap, x.toInt(), y.toInt(), x2.toInt(), y2.toInt())
}
return results
} catch (e: Exception) {
Log.e("GET_FACE", e.message)
}
return null
}
Source

Integrating ZbarScanner with android app

I am developing a mobile app to integrate with ZbarScanner. However, I am meet with error "ZBarScannerActivity cannot be resolved to a type" and "ZBarConstants cannot be resolved to a variable". please assist to help with the following code:
MainActivity.java:
package com.example.vscanner;
import net.sourceforge.zbar.Symbol;
import android.os.Bundle;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.view.Menu;
import android.view.View;
import android.widget.Toast;
public class MainActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
private static final int ZBAR_SCANNER_REQUEST = 0;
private static final int ZBAR_QR_SCANNER_REQUEST = 1;
public void launchScanner(View v) {
if (isCameraAvailable()) {
Intent intent = new Intent(this, ZBarScannerActivity.class);
startActivityForResult(intent, ZBAR_SCANNER_REQUEST);
} else {
Toast.makeText(this, "Rear Facing Camera Unavailable", Toast.LENGTH_SHORT).show();
}
}
public void launchQRScanner(View v) {
if (isCameraAvailable()) {
Intent intent = new Intent(this, ZBarScannerActivity.class);
intent.putExtra(ZBarConstants.SCAN_MODES, new int[]{Symbol.QRCODE});
startActivityForResult(intent, ZBAR_SCANNER_REQUEST);
} else {
Toast.makeText(this, "Rear Facing Camera Unavailable", Toast.LENGTH_SHORT).show();
}
}
public boolean isCameraAvailable() {
PackageManager pm = getPackageManager();
return pm.hasSystemFeature(PackageManager.FEATURE_CAMERA);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data)
{
if (resultCode == RESULT_OK)
{
// Scan result is available by making a call to data.getStringExtra(ZBarConstants.SCAN_RESULT)
// Type of the scan result is available by making a call to data.getStringExtra(ZBarConstants.SCAN_RESULT_TYPE)
Toast.makeText(this, "Scan Result = " + data.getStringExtra(ZBarConstants.SCAN_RESULT), Toast.LENGTH_SHORT).show();
Toast.makeText(this, "Scan Result Type = " + data.getStringExtra(ZBarConstants.SCAN_RESULT_TYPE), Toast.LENGTH_SHORT).show();
// The value of type indicates one of the symbols listed in Advanced Options below.
} else if(resultCode == RESULT_CANCELED) {
Toast.makeText(this, "Camera unavailable", Toast.LENGTH_SHORT).show();
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
Paste these 3 classes into your source folder.
ZBarConstants.java
package com.example.vscanner;
public interface ZBarConstants {
public static final String SCAN_MODES = "SCAN_MODES";
public static final String SCAN_RESULT = "SCAN_RESULT";
public static final String SCAN_RESULT_TYPE = "SCAN_RESULT_TYPE";
public static final String ERROR_INFO = "ERROR_INFO";
}
ZBarScannerActivity.java
package com.example.vscanner;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.hardware.Camera;
import android.os.Bundle;
import android.os.Handler;
import android.text.TextUtils;
import android.view.Window;
import android.view.WindowManager;
import net.sourceforge.zbar.Config;
import net.sourceforge.zbar.Image;
import net.sourceforge.zbar.ImageScanner;
import net.sourceforge.zbar.Symbol;
import net.sourceforge.zbar.SymbolSet;
public class ZBarScannerActivity extends Activity implements Camera.PreviewCallback, ZBarConstants {
private static final String TAG = "ZBarScannerActivity";
private CameraPreview mPreview;
private Camera mCamera;
private ImageScanner mScanner;
private Handler mAutoFocusHandler;
private boolean mPreviewing = true;
static {
System.loadLibrary("iconv");
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(!isCameraAvailable()) {
// Cancel request if there is no rear-facing camera.
cancelRequest();
return;
}
// Hide the window title.
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
mAutoFocusHandler = new Handler();
// Create and configure the ImageScanner;
setupScanner();
// Create a RelativeLayout container that will hold a SurfaceView,
// and set it as the content of our activity.
mPreview = new CameraPreview(this, this, autoFocusCB);
setContentView(mPreview);
}
public void setupScanner() {
mScanner = new ImageScanner();
mScanner.setConfig(0, Config.X_DENSITY, 3);
mScanner.setConfig(0, Config.Y_DENSITY, 3);
int[] symbols = getIntent().getIntArrayExtra(SCAN_MODES);
if (symbols != null) {
mScanner.setConfig(Symbol.NONE, Config.ENABLE, 0);
for (int symbol : symbols) {
mScanner.setConfig(symbol, Config.ENABLE, 1);
}
}
}
#Override
protected void onResume() {
super.onResume();
// Open the default i.e. the first rear facing camera.
mCamera = Camera.open();
if(mCamera == null) {
// Cancel request if mCamera is null.
cancelRequest();
return;
}
mPreview.setCamera(mCamera);
mPreview.showSurfaceView();
mPreviewing = true;
}
#Override
protected void onPause() {
super.onPause();
// Because the Camera object is a shared resource, it's very
// important to release it when the activity is paused.
if (mCamera != null) {
mPreview.setCamera(null);
mCamera.cancelAutoFocus();
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();
// According to Jason Kuang on http://stackoverflow.com/questions/6519120/how-to-recover-camera-preview-from-sleep,
// there might be surface recreation problems when the device goes to sleep. So lets just hide it and
// recreate on resume
mPreview.hideSurfaceView();
mPreviewing = false;
mCamera = null;
}
}
public boolean isCameraAvailable() {
PackageManager pm = getPackageManager();
return pm.hasSystemFeature(PackageManager.FEATURE_CAMERA);
}
public void cancelRequest() {
Intent dataIntent = new Intent();
dataIntent.putExtra(ERROR_INFO, "Camera unavailable");
setResult(Activity.RESULT_CANCELED, dataIntent);
finish();
}
public void onPreviewFrame(byte[] data, Camera camera) {
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = parameters.getPreviewSize();
Image barcode = new Image(size.width, size.height, "Y800");
barcode.setData(data);
int result = mScanner.scanImage(barcode);
if (result != 0) {
mCamera.cancelAutoFocus();
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewing = false;
SymbolSet syms = mScanner.getResults();
for (Symbol sym : syms) {
String symData = sym.getData();
if (!TextUtils.isEmpty(symData)) {
Intent dataIntent = new Intent();
dataIntent.putExtra(SCAN_RESULT, symData);
dataIntent.putExtra(SCAN_RESULT_TYPE, sym.getType());
setResult(Activity.RESULT_OK, dataIntent);
finish();
break;
}
}
}
}
private Runnable doAutoFocus = new Runnable() {
public void run() {
if(mCamera != null && mPreviewing) {
mCamera.autoFocus(autoFocusCB);
}
}
};
// Mimic continuous auto-focusing
Camera.AutoFocusCallback autoFocusCB = new Camera.AutoFocusCallback() {
public void onAutoFocus(boolean success, Camera camera) {
mAutoFocusHandler.postDelayed(doAutoFocus, 1000);
}
};
}
CameraPreview.java
package com.example.vscanner;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import java.io.IOException;
import java.util.List;
class CameraPreview extends ViewGroup implements SurfaceHolder.Callback {
private final String TAG = "CameraPreview";
SurfaceView mSurfaceView;
SurfaceHolder mHolder;
Size mPreviewSize;
List<Size> mSupportedPreviewSizes;
Camera mCamera;
PreviewCallback mPreviewCallback;
AutoFocusCallback mAutoFocusCallback;
CameraPreview(Context context, PreviewCallback previewCallback, AutoFocusCallback autoFocusCb) {
super(context);
mPreviewCallback = previewCallback;
mAutoFocusCallback = autoFocusCb;
mSurfaceView = new SurfaceView(context);
addView(mSurfaceView);
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCamera(Camera camera) {
mCamera = camera;
if (mCamera != null) {
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();
requestLayout();
}
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
// We purposely disregard child measurements because act as a
// wrapper to a SurfaceView that centers the camera preview instead
// of stretching it.
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if (mSupportedPreviewSizes != null) {
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
}
}
#Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
if (changed && getChildCount() > 0) {
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (mPreviewSize != null) {
previewWidth = mPreviewSize.width;
previewHeight = mPreviewSize.height;
}
// Center the child SurfaceView within the parent.
if (width * previewHeight > height * previewWidth) {
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
} else {
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2,
width, (height + scaledChildHeight) / 2);
}
}
}
public void hideSurfaceView() {
mSurfaceView.setVisibility(View.INVISIBLE);
}
public void showSurfaceView() {
mSurfaceView.setVisibility(View.VISIBLE);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
try {
if (mCamera != null) {
mCamera.setPreviewDisplay(holder);
}
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
if (mCamera != null) {
mCamera.cancelAutoFocus();
mCamera.stopPreview();
}
}
private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
for (Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (holder.getSurface() == null){
// preview surface does not exist
return;
}
if (mCamera != null) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
requestLayout();
mCamera.setParameters(parameters);
mCamera.setPreviewCallback(mPreviewCallback);
mCamera.startPreview();
mCamera.autoFocus(mAutoFocusCallback);
}
}
}

Customize CameraView in Android

i'm going to use beyondar framework in my app and i have a problem with the cameraview. Beyondar uses cameraview in landscape mode but i want to use this view only in portrait mode at the half of the screen, as image shows.
BeyondAR CameraView.java
public class CameraView extends SurfaceView implements SurfaceHolder.Callback,
Camera.PictureCallback {
/**
*
* #author Joan Puig Sanz (joanpuigsanz#gmail.com)
*
*/
public static interface IPictureCallback {
/**
* This method is called when the snapshot of the camera is ready. If
* there is an error, the image will be null
*
* #param picture
*/
void onPictureTaken(Bitmap picture);
}
private SurfaceHolder mHolder;
private Camera mCamera;
private IPictureCallback mCameraCallback;
private BitmapFactory.Options mOptions;
private Size mPreviewSize;
private List<Size> mSupportedPreviewSizes;
private List<String> mSupportedFlashModes;
public CameraView(Context context) {
super(context);
init(context);
}
public CameraView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context);
}
public CameraView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
private void init(Context context) {
mHolder = getHolder();
mHolder.addCallback(this);
try {
mCamera = Camera.open();
//mCamera.setDisplayOrientation(90);
Method rotateMethod;
rotateMethod = android.hardware.Camera.class.getMethod("setDisplayOrientation", int.class);
rotateMethod.invoke(mCamera, 90);
//Camera.Parameters params = mCamera.getParameters();
//params.setPreviewSize(427, 1240);
//mCamera.setParameters(params);
setCamera(mCamera);
} catch (Exception e) {
Log.e(Constants.TAG, "ERROR: Unable to open the camera", e);
}
if (android.os.Build.VERSION.SDK_INT <= 10) {// Android 2.3.x or lower
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
}
public void setCamera(Camera camera) {
mCamera = camera;
if (mCamera != null) {
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();
mSupportedFlashModes = mCamera.getParameters().getSupportedFlashModes();
// Set the camera to Auto Flash mode.
if (mSupportedFlashModes != null
&& mSupportedFlashModes.contains(Camera.Parameters.FLASH_MODE_AUTO)) {
Camera.Parameters parameters = mCamera.getParameters();
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);
//parameters.setPreviewSize(300, 200);
mCamera.setParameters(parameters);
}
}
}
public void setSupportedPreviewSizes(List<Size> supportedPreviewSizes) {
mSupportedPreviewSizes = supportedPreviewSizes;
}
public Size getPreviewSize() {
return mPreviewSize;
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
try {
if (mCamera == null) {
init(getContext());
if (mCamera == null) {
return;
}
}
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
if (mCamera != null) {
mCamera.release();
}
mCamera = null;
Log.e(Constants.TAG, "CameraView -- ERROR en SurfaceCreated", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
if (mCamera == null) {
return;
}
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if (mSupportedPreviewSizes != null) {
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
private Size getOptimalPreviewSize(List<Size> sizes, int width, int height) {
Size result = null;
for (Camera.Size size : sizes) {
if (size.width <= width && size.height <= height) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) {
result = size;
}
}
}
}
return result;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (mCamera == null || getPreviewSize() == null) {
return;
}
Camera.Parameters parameters = mCamera.getParameters();
Size previewSize = getPreviewSize();
parameters.setPreviewSize(previewSize.width, previewSize.height);
mCamera.setParameters(parameters);
previewCamera();
}
#Override
public void onPictureTaken(byte[] imageData, Camera camera) {
if (imageData != null && mCameraCallback != null) {
mCameraCallback.onPictureTaken(StoreByteImage(imageData));
}
previewCamera();
}
public void previewCamera() {
if (mCamera == null){
return;
}
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
Log.d(Constants.TAG, "Cannot start preview.", e);
}
}
private Bitmap StoreByteImage(byte[] imageData) {
Bitmap myImage = DebugBitmap.decodeByteArray(imageData, 0, imageData.length, mOptions);
imageData = null;
System.gc();
return myImage;
}
public void tackePicture(IPictureCallback cameraCallback) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4;
tackePicture(cameraCallback, options);
}
public void tackePicture(IPictureCallback cameraCallback, BitmapFactory.Options options) {
if (mCamera == null) {
return;
}
mCameraCallback = cameraCallback;
mCamera.takePicture(null, this, this);
mOptions = options;
}
}
Check out the last version of the library, this issue is fixed and the use of the framework is way better
https://github.com/BeyondAR/beyondar

Android FrameLayout.addView(camera_view) crashes app

I'm creating a app which has a camera built by myself using camera preview. And it works fine on my Android 4.1 device (the camera opens and takes photos). But when I try on my Android 2.3 device, the app crashes on framelayout.addView(camera_view) because when I comment this line the app doesn't crashes.
Here's my code (the measures are just to fit the device screen):
private void setUpCamera() {
camera_view = new CameraSurfaceView(getApplicationContext());
frame_layout = (FrameLayout) findViewById(R.id.image_frame);
// v it's a square that I draw
v.getLayoutParams().width = (int) ((720/4) + (5*screenDensity));
v.getLayoutParams().height = (int) ((720/4) + (5*screenDensity));
// img it's the imageview where I display the photo
img.getLayoutParams().width = (int) (720/4);
img.getLayoutParams().height = (int) (720/4);
frame_layout.getLayoutParams().width = (int) (720/4);
frame_layout.getLayoutParams().height = (int) (960/4);
frame_layout.addView(camera_view);
}
Here is the camera_view class:
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback{
private SurfaceHolder m_holder;
public Camera camera = null;
public CameraSurfaceView(Context context) {
super(context);
m_holder = getHolder();
m_holder.addCallback(this);
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters params = camera.getParameters();
if (this.getResources().getConfiguration().orientation != Configuration.ORIENTATION_LANDSCAPE) {
camera.setDisplayOrientation(90);
} else {
camera.setDisplayOrientation(0);
}
params.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
params.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
params.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
params.setExposureCompensation(0);
params.setJpegQuality(100);
List<Size> sizes = params.getSupportedPictureSizes();
Camera.Size size = sizes.get(0);
for(int i=0;i<sizes.size();i++)
{
if(sizes.get(i).width > size.width)
size = sizes.get(i);
}
params.setPreviewSize(size.width, size.height);
params.setPictureSize(size.width, size.height);
camera.setParameters(params);
camera.startPreview();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
camera = Camera.open();
try {
camera.setPreviewDisplay(m_holder);
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
camera.stopPreview();
camera.release();
camera = null;
}
public void capture(Camera.PictureCallback jpeghandler){
camera.takePicture(null, null, jpeghandler);
}
}

Categories