I am creating a custom camera app. The height of my TextureView is set to 300 dp.
I want to set the video resolution as 640 x 480 with a preview height of 300dp(TextureView Height).
I want the camera preview to fill this area without stretching or cropping. How do I achieve this?
public class VideoActivity extends AppCompatActivity{
private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90;
private static final int SENSOR_ORIENTATION_INVERSE_DEGREES = 270;
private static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray();
private static final SparseIntArray INVERSE_ORIENTATIONS = new SparseIntArray();
private static final String TAG = "Camera2VideoFragment";
private static final int REQUEST_VIDEO_PERMISSIONS = 1;
private static final String FRAGMENT_DIALOG = "dialog";
private static final String[] VIDEO_PERMISSIONS = {
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO,
};
static {
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_0, 90);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_90, 0);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_180, 270);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
static {
INVERSE_ORIENTATIONS.append(Surface.ROTATION_0, 270);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_90, 180);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_180, 90);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_270, 0);
}
private AutoFitTextureView mTextureView;
private CameraDevice mCameraDevice;
/**
* A reference to the current {#link android.hardware.camera2.CameraCaptureSession} for
* preview.
*/
private CameraCaptureSession mPreviewSession;
/**
* {#link TextureView.SurfaceTextureListener} handles several lifecycle events on a
* {#link TextureView}.
*/
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
/**
* The {#link android.util.Size} of camera preview.
*/
private Size mPreviewSize;
/**
* The {#link android.util.Size} of video recording.
*/
private Size mVideoSize;
/**
* MediaRecorder
*/
private MediaRecorder mMediaRecorder;
/**
* Whether the app is recording video now
*/
private boolean mIsRecordingVideo;
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread mBackgroundThread;
/**
* A {#link Handler} for running tasks in the background.
*/
private Handler mBackgroundHandler;
/**
* A {#link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* {#link CameraDevice.StateCallback} is called when {#link CameraDevice} changes its status.
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = VideoActivity.this;
if (null != activity) {
activity.finish();
}
}
};
private Integer mSensorOrientation;
private String mNextVideoAbsolutePath;
private CaptureRequest.Builder mPreviewBuilder;
/**
* In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes
* larger than 1080p, since MediaRecorder cannot handle such a high-resolution video.
*
* #param choices The list of available sizes
* #return The video size
*/
private static Size chooseVideoSize(Size[] choices) {
for (Size size : choices) {
if (size.getWidth() == size.getHeight() * 4 / 3 && size.getWidth() <= 1080) {
return size;
}
}
Log.e(TAG, "Couldn't find any suitable video size");
return choices[choices.length - 1];
}
/**
* Given {#code choices} of {#code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* #param choices The list of sizes that the camera supports for the intended output class
* #param width The minimum desired width
* #param height The minimum desired height
* #param aspectRatio The aspect ratio
* #return The optimal {#code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * h / w &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video);
mTextureView = findViewById(R.id.texture);
}
#Override
public void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
/**
* Starts a background thread and its {#link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/**
* Stops the background thread and its {#link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Gets whether you should show UI with rationale for requesting permissions.
*
* #param permissions The permissions your app wants to request.
* #return Whether you can show permission rationale UI.
*/
// private boolean shouldShowRequestPermissionRationale(String[] permissions) {
// for (String permission : permissions) {
// if (FragmentCompat.shouldShowRequestPermissionRationale(this, permission)) {
// return true;
// }
// }
// return false;
// }
/**
* Tries to open a {#link CameraDevice}. The result is listened by `mStateCallback`.
*/
#SuppressWarnings("MissingPermission")
private void openCamera(int width, int height) {
// if (!hasPermissionsGranted(VIDEO_PERMISSIONS)) {
// requestVideoPermissions();
// return;
// }
final Activity activity = this;
if (null == activity || activity.isFinishing()) {
return;
}
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
Log.d(TAG, "tryAcquire");
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String cameraId = manager.getCameraIdList()[1];
// Choose the sizes for camera preview and video recording
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
if (map == null) {
throw new RuntimeException("Cannot get available preview/video sizes");
}
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
configureTransform(width, height);
mMediaRecorder = new MediaRecorder();
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
activity.finish();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
// Camera2VideoFragment.ErrorDialog.newInstance("cammera error")
// .show(getChildFragmentManager(), FRAGMENT_DIALOG);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mMediaRecorder) {
mMediaRecorder.release();
mMediaRecorder = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.");
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Start the camera preview.
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Activity activity = VideoActivity.this;
if (null != activity) {
Toast.makeText(VideoActivity.this, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Update the camera preview. {#link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
/**
* Configures the necessary {#link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*
* #param viewWidth The width of `mTextureView`
* #param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = VideoActivity.this;
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private void setUpMediaRecorder() throws IOException {
final Activity activity = VideoActivity.this;
if (null == activity) {
return;
}
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()) {
mNextVideoAbsolutePath = getVideoFilePath(VideoActivity.this);
}
mMediaRecorder.setOutputFile(mNextVideoAbsolutePath);
mMediaRecorder.setVideoEncodingBitRate(10000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
switch (mSensorOrientation) {
case SENSOR_ORIENTATION_DEFAULT_DEGREES:
mMediaRecorder.setOrientationHint(DEFAULT_ORIENTATIONS.get(rotation));
break;
case SENSOR_ORIENTATION_INVERSE_DEGREES:
mMediaRecorder.setOrientationHint(INVERSE_ORIENTATIONS.get(rotation));
break;
}
mMediaRecorder.prepare();
}
private String getVideoFilePath(Context context) {
final File dir = context.getExternalFilesDir(null);
return (dir == null ? "" : (dir.getAbsolutePath() + "/"))
+ System.currentTimeMillis() + ".mp4";
}
private void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
setUpMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
// Set up Surface for the camera preview
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
// Set up Surface for the MediaRecorder
Surface recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
// Start a capture session
// Once the session starts, we can update the UI and start recording
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
VideoActivity.this.runOnUiThread(new Runnable() {
#Override
public void run() {
// UI
mIsRecordingVideo = true;
// Start recording
mMediaRecorder.start();
}
});
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Activity activity = VideoActivity.this;
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException | IOException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (mPreviewSession != null) {
mPreviewSession.close();
mPreviewSession = null;
}
}
private void stopRecordingVideo() {
// UI
mIsRecordingVideo = false;
// mButtonVideo.setText("record");
// Stop recording
mMediaRecorder.stop();
mMediaRecorder.reset();
Activity activity = VideoActivity.this;
if (null != activity) {
Toast.makeText(activity, "Video saved: " + mNextVideoAbsolutePath,
Toast.LENGTH_SHORT).show();
Log.d(TAG, "Video saved: " + mNextVideoAbsolutePath);
}
mNextVideoAbsolutePath = null;
startPreview();
}
/**
* Compares two {#code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
// public static class ErrorDialog extends DialogFragment {
//
// private static final String ARG_MESSAGE = "message";
//
// public static Camera2VideoFragment.ErrorDialog newInstance(String message) {
// Camera2VideoFragment.ErrorDialog dialog = new Camera2VideoFragment.ErrorDialog();
// Bundle args = new Bundle();
// args.putString(ARG_MESSAGE, message);
// dialog.setArguments(args);
// return dialog;
// }
//
// #Override
// public Dialog onCreateDialog(Bundle savedInstanceState) {
// final Activity activity = getActivity();
// return new AlertDialog.Builder(activity)
// .setMessage(getArguments().getString(ARG_MESSAGE))
// .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
// #Override
// public void onClick(DialogInterface dialogInterface, int i) {
// activity.finish();
// }
// })
// .create();
// }
//
// }
public void Record(View view)
{
if (mIsRecordingVideo) {
stopRecordingVideo();
} else {
startRecordingVideo();
}
}
}
Related
2020-04-22 16:14:49.759 1809-1809/? E/servicemanager: Could not find android.hardware.power.IPower/default in the VINTF manifest.
This keeps popping up. Am coding a camera, that when a button is clicked, an image is cropped.
Here is a custom view I am adding to a fragment.
public class DrawView extends View {
Point[] points = new Point[4];
/**
* point1 and point 3 are of same group and same as point 2 and point4
*/
int groupId = -1;
private ArrayList<ColorBall> colorballs = new ArrayList<>();
private int mStrokeColor = Color.parseColor("#AADB1255");
private int mFillColor = Color.parseColor("#55DB1255");
private Rect mCropRect = new Rect();
// array that holds the balls
private int balID = 0;
// variable to know what ball is being dragged
Paint paint;
public DrawView(Context context) {
this(context, null);
}
public DrawView(Context context, AttributeSet attrs) {
this(context, attrs, -1);
}
public DrawView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
private void init() {
paint = new Paint();
setFocusable(true); // necessary for getting the touch events
}
private void initRectangle(int X, int Y) {
//initialize rectangle.
points[0] = new Point();
points[0].x = X - 200;
points[0].y = Y - 100;
points[1] = new Point();
points[1].x = X;
points[1].y = Y + 30;
points[2] = new Point();
points[2].x = X + 30;
points[2].y = Y + 30;
points[3] = new Point();
points[3].x = X + 30;
points[3].y = Y;
balID = 2;
groupId = 1;
// declare each ball with the ColorBall class
for (int i = 0; i < points.length; i++) {
colorballs.add(new ColorBall(getContext(), R.drawable.gray_circle, points[i], i));
}
}
// the method that draws the balls
#Override
protected void onDraw(Canvas canvas) {
if(points[3]==null) {
//point4 null when view first create
initRectangle(getWidth() / 2, getHeight() / 2);
}
int left, top, right, bottom;
left = points[0].x;
top = points[0].y;
right = points[0].x;
bottom = points[0].y;
for (int i = 1; i < points.length; i++) {
left = left > points[i].x ? points[i].x : left;
top = top > points[i].y ? points[i].y : top;
right = right < points[i].x ? points[i].x : right;
bottom = bottom < points[i].y ? points[i].y : bottom;
}
paint.setAntiAlias(true);
paint.setDither(true);
paint.setStrokeJoin(Paint.Join.ROUND);
paint.setStrokeWidth(5);
//draw stroke
paint.setStyle(Paint.Style.STROKE);
paint.setColor(mStrokeColor);
paint.setStrokeWidth(2);
mCropRect.left = left + colorballs.get(0).getWidthOfBall() / 2;
mCropRect.top = top + colorballs.get(0).getWidthOfBall() / 2;
mCropRect.right = right + colorballs.get(2).getWidthOfBall() / 2;
mCropRect.bottom = bottom + colorballs.get(3).getWidthOfBall() / 2;
canvas.drawRect(mCropRect, paint);
//fill the rectangle
paint.setStyle(Paint.Style.FILL);
paint.setColor(mFillColor);
paint.setStrokeWidth(0);
canvas.drawRect(mCropRect, paint);
// draw the balls on the canvas
paint.setColor(Color.RED);
paint.setTextSize(18);
paint.setStrokeWidth(0);
for (int i =0; i < colorballs.size(); i ++) {
ColorBall ball = colorballs.get(i);
canvas.drawBitmap(ball.getBitmap(), ball.getX(), ball.getY(),
paint);
canvas.drawText("" + (i+1), ball.getX(), ball.getY(), paint);
}
}
// events when touching the screen
public boolean onTouchEvent(MotionEvent event) {
int eventAction = event.getAction();
int X = (int) event.getX();
int Y = (int) event.getY();
switch (eventAction) {
case MotionEvent.ACTION_DOWN: // touch down so check if the finger is on
// a ball
if (points[0] == null) {
initRectangle(X, Y);
} else {
//resize rectangle
balID = -1;
groupId = -1;
for (int i = colorballs.size()-1; i>=0; i--) {
ColorBall ball = colorballs.get(i);
// check if inside the bounds of the ball (circle)
// get the center for the ball
int centerX = ball.getX() + ball.getWidthOfBall();
int centerY = ball.getY() + ball.getHeightOfBall();
paint.setColor(Color.CYAN);
// calculate the radius from the touch to the center of the
// ball
double radCircle = Math
.sqrt((double) (((centerX - X) * (centerX - X)) + (centerY - Y)
* (centerY - Y)));
if (radCircle < ball.getWidthOfBall()) {
balID = ball.getID();
if (balID == 1 || balID == 3) {
groupId = 2;
} else {
groupId = 1;
}
invalidate();
break;
}
invalidate();
}
}
break;
case MotionEvent.ACTION_MOVE: // touch drag with the ball
if (balID > -1) {
// move the balls the same as the finger
colorballs.get(balID).setX(X);
colorballs.get(balID).setY(Y);
paint.setColor(Color.CYAN);
if (groupId == 1) {
colorballs.get(1).setX(colorballs.get(0).getX());
colorballs.get(1).setY(colorballs.get(2).getY());
colorballs.get(3).setX(colorballs.get(2).getX());
colorballs.get(3).setY(colorballs.get(0).getY());
} else {
colorballs.get(0).setX(colorballs.get(1).getX());
colorballs.get(0).setY(colorballs.get(3).getY());
colorballs.get(2).setX(colorballs.get(3).getX());
colorballs.get(2).setY(colorballs.get(1).getY());
}
invalidate();
}
break;
case MotionEvent.ACTION_UP:
// touch drop - just do things here after dropping
break;
}
// redraw the canvas
invalidate();
return true;
}
public Drawable doTheCrop(Bitmap sourceBitmap) throws IOException {
//Bitmap sourceBitmap = null;
//Drawable backgroundDrawable = getBackground();
/*
if (backgroundDrawable instanceof BitmapDrawable) {
BitmapDrawable bitmapDrawable = (BitmapDrawable) backgroundDrawable;
if(bitmapDrawable.getBitmap() != null) {
sourceBitmap = bitmapDrawable.getBitmap();
}
}*/
//source bitmap was scaled, you should calculate the rate
float widthRate = ((float) sourceBitmap.getWidth()) / getWidth();
float heightRate = ((float) sourceBitmap.getHeight()) / getHeight();
//crop the source bitmap with rate value
int left = (int) (mCropRect.left * widthRate);
int top = (int) (mCropRect.top * heightRate);
int right = (int) (mCropRect.right * widthRate);
int bottom = (int) (mCropRect.bottom * heightRate);
Bitmap croppedBitmap = Bitmap.createBitmap(sourceBitmap, left, top, right - left, bottom - top);
Drawable drawable = new BitmapDrawable(getResources(), croppedBitmap);
return drawable;
/*
setContentView(R.layout.fragment_dashboard);
Button btn = (Button)findViewById(R.id.capture);
if (btn == null){
System.out.println("NULL");
}
try{
btn.setText("HI");
}
catch (Exception e){
}
//setBackground(drawable);*/
//savebitmap(croppedBitmap);
}
private File savebitmap(Bitmap bmp) throws IOException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.JPEG, 60, bytes);
File f = new File(Environment.getExternalStorageDirectory()
+ "/" + "testimage.jpg");
Toast.makeText(getContext(), "YUP", Toast.LENGTH_LONG).show();
f.createNewFile();
FileOutputStream fo = new FileOutputStream(f);
fo.write(bytes.toByteArray());
fo.close();
return f;
}
public static class ColorBall {
Bitmap bitmap;
Context mContext;
Point point;
int id;
public ColorBall(Context context, int resourceId, Point point, int id) {
this.id = id;
bitmap = BitmapFactory.decodeResource(context.getResources(),
resourceId);
mContext = context;
this.point = point;
}
public int getWidthOfBall() {
return bitmap.getWidth();
}
public int getHeightOfBall() {
return bitmap.getHeight();
}
public Bitmap getBitmap() {
return bitmap;
}
public int getX() {
return point.x;
}
public int getY() {
return point.y;
}
public int getID() {
return id;
}
public void setX(int x) {
point.x = x;
}
public void setY(int y) {
point.y = y;
}
}
}
Here is the fragment that I have added a camera do, basically the main part of the application that I am working on.
public class DashboardFragment extends Fragment {
private DashboardViewModel dashboardViewModel;
//All my constants
private DrawView mDrawView;
private Drawable imgDraw;
private TextureView txtView;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static{
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private String cameraID;
private String pathway;
CameraDevice cameraDevice;
CameraCaptureSession cameraCaptureSession;
CaptureRequest captureRequest;
CaptureRequest.Builder captureRequestBuilder;
private Size imageDimensions;
private ImageReader imageReader;
private File file;
Handler mBackgroundHandler;
HandlerThread mBackgroundThread;
public View onCreateView(#NonNull LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState) {
dashboardViewModel =
ViewModelProviders.of(this).get(DashboardViewModel.class);
View root = inflater.inflate(R.layout.fragment_dashboard, container, false);
try{
txtView = (TextureView)root.findViewById(R.id.textureView);
txtView.setSurfaceTextureListener(textureListener);
mDrawView = root.findViewById(draw_view);
Button cap = (Button)root.findViewById(R.id.capture);
cap.setClickable(true);
cap.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
Log.i("HOLA","HOLA");
takePicture();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});
}
catch (Exception e){
Log.i("HI",e.toString());
}
/*
txtView = (TextureView)root.findViewById(R.id.textureView);
txtView.setSurfaceTextureListener(textureListener);
mDrawView = root.findViewById(R.id.draw_view);
Button cap = (Button)root.findViewById(R.id.capture);
cap.setClickable(true);
cap.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
Log.i("HOLA","HOLA");
takePicture();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});*/
return root;
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults){
if (requestCode == 101){
if (grantResults[0] == PackageManager.PERMISSION_DENIED){
Toast.makeText(getActivity().getApplicationContext(), "Permission is required",Toast.LENGTH_LONG);
}
}
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
cameraDevice = camera;
try {
createCameraPreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
cameraDevice.close();
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int i) {
cameraDevice.close();
cameraDevice = null;
}
};
private void createCameraPreview() throws CameraAccessException {
SurfaceTexture texture = txtView.getSurfaceTexture(); //?
texture.setDefaultBufferSize(imageDimensions.getWidth(), imageDimensions.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
if (cameraDevice == null){
return;
}
cameraCaptureSession = session;
try {
updatePreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getActivity().getApplicationContext(), "CONFIGURATION", Toast.LENGTH_LONG);
}
}, null);
}
private void updatePreview() throws CameraAccessException {
if (cameraDevice == null){
return;
}
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
}
private void openCamera() throws CameraAccessException {
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
cameraID = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
imageDimensions = map.getOutputSizes(SurfaceTexture.class)[0];
if (ActivityCompat.checkSelfPermission(getActivity().getApplicationContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED
&& ActivityCompat.checkSelfPermission(getActivity().getApplicationContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED){
ActivityCompat.requestPermissions(getActivity(), new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, 101);
return;
}
manager.openCamera(cameraID, stateCallback, null);
}
private void takePicture() throws CameraAccessException {
if (cameraDevice == null) {
Log.i("NOt working", "hi");
return;
}
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
int width = 640;
int height = 480;
if (jpegSizes != null && jpegSizes.length > 0) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
final ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(txtView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
Long tsLong = System.currentTimeMillis() / 1000;
String ts = tsLong.toString();
file = new File(Environment.getExternalStorageDirectory() + "/" + ts + ".jpg");
pathway = Environment.getExternalStorageDirectory() + "/" + ts + ".jpg";
//cameraDevice.close();
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader imageReader) {
Image image = null;
//image = reader.acquireLatestImage();
image = reader.acquireNextImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes , 0, bytes.length);
try {
Drawable back = mDrawView.doTheCrop(bitmap);
Button btn = (Button)getView().findViewById(R.id.capture);
btn.setBackground(back);
} catch (IOException e) {
e.printStackTrace();
}
/*
try {
save(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null){
image.close();
}
}*/
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback(){
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result){
super.onCaptureCompleted(session, request, result);
try {
createCameraPreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
}
}, mBackgroundHandler);
}
private void save (byte[] bytes) throws IOException {
OutputStream outputStream = null;
outputStream = new FileOutputStream(file);
outputStream.write(bytes);
Toast.makeText(getActivity().getApplicationContext(),pathway,Toast.LENGTH_LONG).show();
outputStream.close();
imgDraw = Drawable.createFromPath(pathway);
//mDrawView.doTheCrop(imgDraw);
}
#Override
public void onResume(){
super.onResume();
startBackgroundThread();
if (txtView.isAvailable()){
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
else{
txtView.setSurfaceTextureListener(textureListener);
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
protected void stopBackgroundThread() throws InterruptedException{
mBackgroundThread.quitSafely();
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
}
#Override
public void onPause(){
try {
stopBackgroundThread();
} catch (InterruptedException e) {
e.printStackTrace();
}
super.onPause();
}
}
Here is the xml file for that fragment.
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".ui.dashboard.DashboardFragment">
<TextureView
android:id = "#+id/textureView"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
<com.PeavlerDevelopment.OpinionMinion.ui.dashboard.DrawView
android:id="#+id/draw_view"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
<Button
android:id="#+id/capture"
android:layout_width="100dp"
android:layout_height="200dp"
android:clickable="true"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"></Button>
</androidx.constraintlayout.widget.ConstraintLayout>
The problem seems to lie somewhere in the doCrop method of the DrawView class.
If there is anything else that would help make the problem more clear, let me know! I will gladly share the github repo with you.
Thank you.
As you can see in Android Design Documenation the VINTF stands for Vendor Interface and its a Manifest structure to aggregate data form the device. That specific log means that your manifest is missing something like this:
<hal>
<name>android.hardware.power</name>
<transport>hwbinder</transport>
<version>1.1</version>
<interface>
<name>IPower</name>
<instance>default</instance>
</interface>
</hal>
which basically is hardware power information.
I think it's not related to what you are trying to do, but I need more info than that log.
I want to make app which take photos only with faces but I get whole image instead of face when I save into my storage file. So how to crop face and save into storage with the help google vision face detection API.
So how to use frame in my code to get face list as well as how can I convert into bitmap and save into my storage.
main.xml
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/topLayout"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:keepScreenOn="true">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview
android:id="#+id/preview"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.GraphicOverlay
android:id="#+id/faceOverlay"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview>
</LinearLayout>
<android.support.v7.widget.AppCompatButton
android:id="#+id/take_pic_btn"
android:layout_gravity="bottom"
android:gravity="center"
android:background="#color/green"
android:layout_margin="10dp"
android:text="Take Image"
android:textStyle="bold"
android:textSize="20sp"
android:textAllCaps="false"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
</FrameLayout>
FaceTrackerActivity .java
public final class FaceTrackerActivity extends AppCompatActivity {
private static final String TAG = "FaceTracker";
private CameraSource mCameraSource = null;
private CameraSourcePreview mPreview;
private GraphicOverlay mGraphicOverlay;
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
private Button takePicButton;
FaceDetector detector;
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.main);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay) findViewById(R.id.faceOverlay);
takePicButton=(Button)findViewById(R.id.take_pic_btn);
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
int gc = ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE);
if (rc == PackageManager.PERMISSION_GRANTED && gc == PackageManager.PERMISSION_GRANTED) {
createCameraSource();
} else {
requestCameraPermission();
}
takePicButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Toast.makeText(getApplicationContext(),"dilip",Toast.LENGTH_LONG).show();
captureImage();
}
});
}
private void requestCameraPermission() {
Log.w(TAG, "Camera permission is not granted. Requesting permission");
final String[] permissions = new String[]{Manifest.permission.CAMERA,Manifest.permission.WRITE_EXTERNAL_STORAGE};
if (!ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.CAMERA) && !ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.WRITE_EXTERNAL_STORAGE) ) {
ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM);
return;
}
final Activity thisActivity = this;
View.OnClickListener listener = new View.OnClickListener() {
#Override
public void onClick(View view) {
ActivityCompat.requestPermissions(thisActivity, permissions,
RC_HANDLE_CAMERA_PERM);
}
};
Snackbar.make(mGraphicOverlay, R.string.permission_camera_rationale,
Snackbar.LENGTH_INDEFINITE)
.setAction(R.string.ok, listener)
.show();
}
private void createCameraSource() {
Context context = getApplicationContext();
/* FaceDetector detector = new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());*/
detector= new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
MyFaceDetector myFaceDetector = new MyFaceDetector(detector);
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());
mCameraSource = new CameraSource.Builder(context, myFaceDetector)
.build();
if (!detector.isOperational()) {
}
mCameraSource = new CameraSource.Builder(context, detector)
.setRequestedPreviewSize(640, 480)
.setFacing(CameraSource.CAMERA_FACING_FRONT)
.setRequestedFps(10.0f)
.build();
}
/**
* Restarts the camera.
*/
#Override
protected void onResume() {
super.onResume();
startCameraSource();
}
/**
* Stops the camera.
*/
#Override
protected void onPause() {
super.onPause();
mPreview.stop();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mCameraSource != null) {
mCameraSource.release();
}
}
/**
* Callback for the result from requesting permissions. This method
* is invoked for every call on {#link #requestPermissions(String[], int)}.
* <p>
* <strong>Note:</strong> It is possible that the permissions request interaction
* with the user is interrupted. In this case you will receive empty permissions
* and results arrays which should be treated as a cancellation.
* </p>
*
* #param requestCode The request code passed in {#link #requestPermissions(String[], int)}.
* #param permissions The requested permissions. Never null.
* #param grantResults The grant results for the corresponding permissions
* which is either {#link PackageManager#PERMISSION_GRANTED}
* or {#link PackageManager#PERMISSION_DENIED}. Never null.
* #see #requestPermissions(String[], int)
*/
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
if (requestCode != RC_HANDLE_CAMERA_PERM) {
Log.d(TAG, "Got unexpected permission result: " + requestCode);
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
return;
}
if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED &&grantResults[1] == PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Camera permission granted - initialize the camera source");
// we have permission, so create the camerasource
createCameraSource();
return;
}
Log.e(TAG, "Permission not granted: results len = " + grantResults.length +
" Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)"));
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Face Tracker sample")
.setMessage(R.string.no_camera_permission)
.setPositiveButton(R.string.ok, listener)
.show();
}
//==============================================================================================
// Camera Source Preview
//==============================================================================================
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
private void startCameraSource() {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
//==============================================================================================
// Graphic Face Tracker
//==============================================================================================
/**
* Factory for creating a face tracker to be associated with a new face. The multiprocessor
* uses this factory to create face trackers as needed -- one for each individual.
*/
private class GraphicFaceTrackerFactory implements MultiProcessor.Factory<Face> {
#Override
public Tracker<Face> create(Face face) {
return new GraphicFaceTracker(mGraphicOverlay);
}
}
/**
* Face tracker for each detected individual. This maintains a face graphic within the app's
* associated face overlay.
*/
private class GraphicFaceTracker extends Tracker<Face> {
private GraphicOverlay mOverlay;
private FaceGraphic mFaceGraphic;
GraphicFaceTracker(GraphicOverlay overlay) {
mOverlay = overlay;
mFaceGraphic = new FaceGraphic(overlay);
}
/**
* Start tracking the detected face instance within the face overlay.
*/
#Override
public void onNewItem(int faceId, Face item) {
mFaceGraphic.setId(faceId);
}
**`strong text`**
#Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
mOverlay.add(mFaceGraphic);
mFaceGraphic.updateFace(face);
}
/**
* Hide the graphic when the corresponding face was not detected. This can happen for
* intermediate frames temporarily (e.g., if the face was momentarily blocked from
* view).
*/
#Override
public void onMissing(FaceDetector.Detections<Face> detectionResults) {
mOverlay.remove(mFaceGraphic);
}
/**
* Called when the face is assumed to be gone for good. Remove the graphic annotation from
* the overlay.
*/
#Override
public void onDone() {
mOverlay.remove(mFaceGraphic);
}
}
private void captureImage() {
mPreview.setDrawingCacheEnabled(true);
final Bitmap drawingCache = mPreview.getDrawingCache();
mCameraSource.takePicture(null, new CameraSource.PictureCallback() {
#Override
public void onPictureTaken(byte[] bytes) {
int orientation = Exif.getOrientation(bytes);
Bitmap temp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
Bitmap picture = rotateImage(temp,orientation);
Bitmap overlay = Bitmap.createBitmap(mGraphicOverlay.getWidth(),mGraphicOverlay.getHeight(),picture.getConfig());
Canvas canvas = new Canvas(overlay);
Matrix matrix = new Matrix();
matrix.setScale((float)overlay.getWidth()/(float)picture.getWidth(),(float)overlay.getHeight()/(float)picture.getHeight());
// mirror by inverting scale and translating
matrix.preScale(-1, 1);
matrix.postTranslate(canvas.getWidth(), 0);
Paint paint = new Paint();
canvas.drawBitmap(picture,matrix,paint);
canvas.drawBitmap(drawingCache,0,0,paint);
try {
String mainpath = getExternalStorageDirectory() + separator + "MaskIt" + separator + "images" + separator;
File basePath = new File(mainpath);
if (!basePath.exists())
Log.d("CAPTURE_BASE_PATH", basePath.mkdirs() ? "Success": "Failed");
String path = mainpath + "photo_" + getPhotoTime() + ".jpg";
File captureFile = new File(path);
captureFile.createNewFile();
if (!captureFile.exists())
Log.d("CAPTURE_FILE_PATH", captureFile.createNewFile() ? "Success": "Failed");
FileOutputStream stream = new FileOutputStream(captureFile);
overlay.compress(Bitmap.CompressFormat.PNG, 100, stream);
stream.flush();
stream.close();
picture.recycle();
drawingCache.recycle();
mPreview.setDrawingCacheEnabled(false);
} catch (IOException e) {
e.printStackTrace();
}
}
private String getPhotoTime() {
DateFormat dateFormatter = new SimpleDateFormat("yyyyMMdd hhmmss");
dateFormatter.setLenient(false);
Date today = new Date();
String s = dateFormatter.format(today);
return s;
}
});
}
private Bitmap rotateImage(Bitmap bm, int i) {
Matrix matrix = new Matrix();
switch (i) {
case ExifInterface.ORIENTATION_NORMAL:
return bm;
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
matrix.setScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
matrix.setRotate(180);
break;
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
matrix.setRotate(180);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_TRANSPOSE:
matrix.setRotate(90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_90:
matrix.setRotate(90);
break;
case ExifInterface.ORIENTATION_TRANSVERSE:
matrix.setRotate(-90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
matrix.setRotate(-90);
break;
default:
return bm;
}
try {
Bitmap bmRotated = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true);
bm.recycle();
return bmRotated;
} catch (OutOfMemoryError e) {
e.printStackTrace();
return null;
}
}
}
CameraSourcePreview.java
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.content.res.Configuration;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import com.google.android.gms.common.images.Size;
import com.google.android.gms.vision.CameraSource;
import java.io.IOException;
public class CameraSourcePreview extends ViewGroup {
private static final String TAG = "CameraSourcePreview";
private Context mContext;
private SurfaceView mSurfaceView;
private boolean mStartRequested;
private boolean mSurfaceAvailable;
private CameraSource mCameraSource;
private GraphicOverlay mOverlay;
public CameraSourcePreview(Context context, AttributeSet attrs) {
super(context, attrs);
mContext = context;
mStartRequested = false;
mSurfaceAvailable = false;
mSurfaceView = new SurfaceView(context);
mSurfaceView.getHolder().addCallback(new SurfaceCallback());
addView(mSurfaceView);
}
public void start(CameraSource cameraSource) throws IOException {
if (cameraSource == null) {
stop();
}
mCameraSource = cameraSource;
if (mCameraSource != null) {
mStartRequested = true;
startIfReady();
}
}
public void start(CameraSource cameraSource, GraphicOverlay overlay) throws IOException {
mOverlay = overlay;
start(cameraSource);
}
public void stop() {
if (mCameraSource != null) {
mCameraSource.stop();
}
}
public void release() {
if (mCameraSource != null) {
mCameraSource.release();
mCameraSource = null;
}
}
private void startIfReady() throws IOException {
if (mStartRequested && mSurfaceAvailable) {
mCameraSource.start(mSurfaceView.getHolder());
if (mOverlay != null) {
Size size = mCameraSource.getPreviewSize();
int min = Math.min(size.getWidth(), size.getHeight());
int max = Math.max(size.getWidth(), size.getHeight());
if (isPortraitMode()) {
// Swap width and height sizes when in portrait, since it will be rotated by
// 90 degrees
mOverlay.setCameraInfo(min, max, mCameraSource.getCameraFacing());
} else {
mOverlay.setCameraInfo(max, min, mCameraSource.getCameraFacing());
}
mOverlay.clear();
}
mStartRequested = false;
}
}
private class SurfaceCallback implements SurfaceHolder.Callback {
#Override
public void surfaceCreated(SurfaceHolder surface) {
mSurfaceAvailable = true;
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surface) {
mSurfaceAvailable = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
}
#Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int previewWidth = 320;
int previewHeight = 240;
if (mCameraSource != null) {
Size size = mCameraSource.getPreviewSize();
if (size != null) {
previewWidth = size.getWidth();
previewHeight = size.getHeight();
}
}
// Swap width and height sizes when in portrait, since it will be rotated 90 degrees
if (isPortraitMode()) {
int tmp = previewWidth;
previewWidth = previewHeight;
previewHeight = tmp;
}
final int viewWidth = right - left;
final int viewHeight = bottom - top;
int childWidth;
int childHeight;
int childXOffset = 0;
int childYOffset = 0;
float widthRatio = (float) viewWidth / (float) previewWidth;
float heightRatio = (float) viewHeight / (float) previewHeight;
// To fill the view with the camera preview, while also preserving the correct aspect ratio,
// it is usually necessary to slightly oversize the child and to crop off portions along one
// of the dimensions. We scale up based on the dimension requiring the most correction, and
// compute a crop offset for the other dimension.
if (widthRatio > heightRatio) {
childWidth = viewWidth;
childHeight = (int) ((float) previewHeight * widthRatio);
childYOffset = (childHeight - viewHeight) / 2;
} else {
childWidth = (int) ((float) previewWidth * heightRatio);
childHeight = viewHeight;
childXOffset = (childWidth - viewWidth) / 2;
}
for (int i = 0; i < getChildCount(); ++i) {
// One dimension will be cropped. We shift child over or up by this offset and adjust
// the size to maintain the proper aspect ratio.
getChildAt(i).layout(
-1 * childXOffset, -1 * childYOffset,
childWidth - childXOffset, childHeight - childYOffset);
}
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
private boolean isPortraitMode() {
int orientation = mContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
return false;
}
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
return true;
}
Log.d(TAG, "isPortraitMode returning false by default");
return false;
}
}
GraphicOverlay.java
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.View;
import com.google.android.gms.vision.CameraSource;
import java.util.HashSet;
import java.util.Set;
public class GraphicOverlay extends View {
private final Object mLock = new Object();
private int mPreviewWidth;
private float mWidthScaleFactor = 1.0f;
private int mPreviewHeight;
private float mHeightScaleFactor = 1.0f;
private int mFacing = CameraSource.CAMERA_FACING_BACK;
private Set<Graphic> mGraphics = new HashSet<>();
public static abstract class Graphic {
private GraphicOverlay mOverlay;
public Graphic(GraphicOverlay overlay) {
mOverlay = overlay;
}
public abstract void draw(Canvas canvas);
public float scaleX(float horizontal) {
return horizontal * mOverlay.mWidthScaleFactor;
}
public float scaleY(float vertical) {
return vertical * mOverlay.mHeightScaleFactor;
}
public float translateX(float x) {
if (mOverlay.mFacing == CameraSource.CAMERA_FACING_FRONT) {
return mOverlay.getWidth() - scaleX(x);
} else {
return scaleX(x);
}
}
public float translateY(float y) {
return scaleY(y);
}
public void postInvalidate() {
mOverlay.postInvalidate();
}
}
public GraphicOverlay(Context context, AttributeSet attrs) {
super(context, attrs);
}
public void clear() {
synchronized (mLock) {
mGraphics.clear();
}
postInvalidate();
}
public void add(Graphic graphic) {
synchronized (mLock) {
mGraphics.add(graphic);
}
postInvalidate();
}
public void remove(Graphic graphic) {
synchronized (mLock) {
mGraphics.remove(graphic);
}
postInvalidate();
}
public void setCameraInfo(int previewWidth, int previewHeight, int facing) {
synchronized (mLock) {
mPreviewWidth = previewWidth;
mPreviewHeight = previewHeight;
mFacing = facing;
}
postInvalidate();
}
/**
* Draws the overlay with its associated graphic objects.
*/
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
synchronized (mLock) {
if ((mPreviewWidth != 0) && (mPreviewHeight != 0)) {
mWidthScaleFactor = (float) canvas.getWidth() / (float)mPreviewWidth;
mHeightScaleFactor = (float) canvas.getHeight() / (float) mPreviewHeight;
}
for (Graphic graphic : mGraphics) {
graphic.draw(canvas);
}
}
}
}
You should pass the captured image to FaceDetector API and crop it thereafter.
fun getFace(context: Context, data: ByteArray): Bitmap? {
try {
val imageStrem = ByteArrayInputStream(data)
var bitmap = BitmapFactory.decodeStream(imageStrem)
if (bitmap.width > bitmap.height) {
val matrix = Matrix()
matrix.postRotate(270f)
if (bitmap.width > 1500) matrix.postScale(0.5f, 0.5f)
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
}
val faceDetector = FaceDetector.Builder(context).setProminentFaceOnly(true).setTrackingEnabled(false).build()
val frame = Frame.Builder().setBitmap(bitmap).build()
val faces = faceDetector.detect(frame)
var results: Bitmap? = null
for (i in 0 until faces.size()) {
val thisFace = faces.valueAt(i)
val x = thisFace.position.x
val y = thisFace.position.y
val x2 = x / 4 + thisFace.width
val y2 = y / 4 + thisFace.height
results = Bitmap.createBitmap(bitmap, x.toInt(), y.toInt(), x2.toInt(), y2.toInt())
}
return results
} catch (e: Exception) {
Log.e("GET_FACE", e.message)
}
return null
}
Source
I am trying to build a simple app that launches the camera app and allows me to take a picture and save it to the device but I keep getting this error.
java.lang.RuntimeException: Fail to connect to camera service
MainActivity
public class MainActivity extends AppCompatActivity
{
private static final String TAG = MainActivity.class.getName();
Preview preview;
private Camera camera;
private int cameraId;
#Override
protected void onCreate(Bundle savedInstanceState)
{
final Camera.PictureCallback jpegCallback;
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
preview = new Preview(this, (SurfaceView)findViewById(R.id.surfaceView));
preview.setLayoutParams(new ActionBar.LayoutParams(ActionBar.LayoutParams.MATCH_PARENT, ActionBar.LayoutParams.MATCH_PARENT));
((FrameLayout) findViewById(R.id.activity_main)).addView(preview);
preview.setKeepScreenOn(true);
jpegCallback = new Camera.PictureCallback()
{
public void onPictureTaken(final byte[] data,
final Camera camera)
{
new SaveImageTask().execute(data);
resetCam();
Log.d(TAG, "onPictureTaken - jpeg");
}
};
preview.setOnClickListener(new View.OnClickListener()
{
#Override
public void onClick(View view)
{
if(camera != null)
{
camera.takePicture(null,
// ShutterCallback shutter
null,
// PictureCallback raw,
null,
// PictureCallback postview,
jpegCallback); // PictureCallback jpeg);
}
}
});
}
#Override
protected void onResume()
{
super.onResume();
getCamera(CameraInfo.CAMERA_FACING_BACK);
if(camera != null)
{
camera.startPreview();
preview.setCamera(camera);
}
}
#Override
protected void onPause()
{
if(camera != null)
{
camera.stopPreview();
preview.setCamera(null);
camera.release();
camera = null;
}
super.onPause();
}
private void getCamera(final int desiredCamera)
{
if(!getPackageManager()
.hasSystemFeature(PackageManager.FEATURE_CAMERA))
{
Toast.makeText(this, "No camera on this device", Toast.LENGTH_LONG)
.show();
}
else
{
cameraId = findCamera(desiredCamera);
if(cameraId < 0)
{
Toast.makeText(this, "Camera no found.",
Toast.LENGTH_LONG).show();
}
else
{
Log.d(TAG, Integer.toString(cameraId));
camera = Camera.open(cameraId);
camera.setDisplayOrientation(90);
}
}
}
private int findCamera(final int desiredCamera)
{
final int numberOfCameras;
int cameraId;
numberOfCameras = Camera.getNumberOfCameras();
cameraId = -1;
for(int i = 0; i < numberOfCameras; i++)
{
final CameraInfo info;
info = new CameraInfo();
Camera.getCameraInfo(i, info);
if(info.facing == desiredCamera)
{
Log.d(TAG, "Camera found");
cameraId = i;
break;
}
}
return (cameraId);
}
private void resetCam()
{
camera.startPreview();
preview.setCamera(camera);
}
private void refreshGallery(final File file)
{
final Intent mediaScanIntent;
mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaScanIntent.setData(Uri.fromFile(file));
sendBroadcast(mediaScanIntent);
}
private class SaveImageTask
extends AsyncTask<byte[], Void, Void>
{
#Override
protected Void doInBackground(final byte[]... data)
{
FileOutputStream outStream;
final File sdCard;
final File dir;
final String fileName;
final File outFile;
sdCard = Environment.getExternalStorageDirectory();
dir = new File (sdCard.getAbsolutePath() + "/camtest");
dir.mkdirs();
fileName = String.format("%d.jpg", System.currentTimeMillis());
outFile = new File(dir, fileName);
outStream = null;
try
{
outStream = new FileOutputStream(outFile);
outStream.write(data[0]);
outStream.flush();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length + " to " + outFile.getAbsolutePath());
refreshGallery(outFile);
}
catch(final FileNotFoundException ex)
{
Log.e(TAG, "File not found", ex);
}
catch(final IOException ex)
{
Log.e(TAG, "IOException", ex);
}
finally
{
try
{
if(outStream != null)
{
outStream.close();
}
}
catch(final IOException ex)
{
Log.e(TAG, "IOException", ex);
}
}
return null;
}
}
}
Preview
class Preview
extends ViewGroup
implements Callback
{
private final String TAG = Preview.class.getName();
private SurfaceView surfaceView;
private SurfaceHolder holder;
private Camera.Size previewSize;
private List<Camera.Size> supportedPreviewSizes;
private Camera camera;
Preview(final Context context,
final SurfaceView sv)
{
super(context);
surfaceView = sv;
holder = surfaceView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCamera(Camera camera)
{
this.camera = camera;
if(this.camera != null)
{
final Camera.Parameters params;
final List<String> focusModes;
supportedPreviewSizes = this.camera.getParameters().getSupportedPreviewSizes();
requestLayout();
params = this.camera.getParameters();
focusModes = params.getSupportedFocusModes();
if(focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
this.camera.setParameters(params);
}
}
}
#Override
protected void onMeasure(final int widthMeasureSpec,
final int heightMeasureSpec)
{
// We purposely disregard child measurements because act as a
// wrapper to a SurfaceView that centers the camera preview instead
// of stretching it.
final int width;
final int height;
width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if(supportedPreviewSizes != null)
{
previewSize = getOptimalPreviewSize(supportedPreviewSizes, width, height);
}
}
#Override
protected void onLayout(boolean changed,
int l,
int t,
int r,
int b)
{
if (changed && getChildCount() > 0)
{
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (previewSize != null)
{
previewWidth = previewSize.width;
previewHeight = previewSize.height;
}
// Center the child SurfaceView within the parent.
if (width * previewHeight > height * previewWidth)
{
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
}
else
{
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2,
width, (height + scaledChildHeight) / 2);
}
}
}
public void surfaceCreated(SurfaceHolder holder)
{
// The Surface has been created, acquire the camera and tell it where
// to draw.
try
{
if(camera != null)
{
camera.setPreviewDisplay(holder);
}
}
catch (IOException exception)
{
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder)
{
// Surface will be destroyed when we return, so stop the preview.
if(camera != null)
{
camera.stopPreview();
}
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes,
int w,
int h)
{
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
for (Camera.Size size : sizes)
{
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
if (optimalSize == null)
{
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes)
{
if (Math.abs(size.height - targetHeight) < minDiff)
{
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder,
int format,
int width,
int height)
{
if(camera != null)
{
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(previewSize.width, previewSize.height);
requestLayout();
camera.setParameters(parameters);
camera.startPreview();
}
}
I looked several places, including on here and the answers I found did not help. I also looked through the code of the template that I am using and did not see where I went wrong, so I am sorry if this seems like a bad question. But I am making a watch face and I am trying to extend it from my Engine, WeatherWatchFaceEngine. But when I do this, I get the error that is in the title of this question. What have I done wrong?
This is the code of the watch face:
public class NimbusSplashAnalog extends WeatherWatchFaceService {
/**
* Update rate in milliseconds for interactive mode. We update once a second to advance the
* second hand.
*/
private static final long INTERACTIVE_UPDATE_RATE_MS = TimeUnit.SECONDS.toMillis(1);
/**
* Handler message id for updating the time periodically in interactive mode.
*/
private static final int MSG_UPDATE_TIME = 0;
#Override
public Engine onCreateEngine() {
return new Engine();
}
private class Engine extends WeatherWatchFaceEngine {
Paint mBackgroundPaint;
Paint mHandPaint;
boolean mAmbient;
Time mTime;
final Handler mUpdateTimeHandler = new EngineHandler(this);
final BroadcastReceiver mTimeZoneReceiver = new BroadcastReceiver() {
#Override
public void onReceive(Context context, Intent intent) {
mTime.clear(intent.getStringExtra("time-zone"));
mTime.setToNow();
}
};
boolean mRegisteredTimeZoneReceiver = false;
/**
* Whether the display supports fewer bits for each color in ambient mode. When true, we
* disable anti-aliasing in ambient mode.
*/
boolean mLowBitAmbient;
#Override
public void onCreate(SurfaceHolder holder) {
super.onCreate(holder);
setWatchFaceStyle(new WatchFaceStyle.Builder(NimbusSplashAnalog.this)
.setCardPeekMode(WatchFaceStyle.PEEK_MODE_SHORT)
.setBackgroundVisibility(WatchFaceStyle.BACKGROUND_VISIBILITY_INTERRUPTIVE)
.setShowSystemUiTime(false)
.build());
Resources resources = NimbusSplashAnalog.this.getResources();
mBackgroundPaint = new Paint();
mBackgroundPaint.setColor(resources.getColor(R.color.analog_background));
mHandPaint = new Paint();
mHandPaint.setColor(resources.getColor(R.color.analog_hands));
mHandPaint.setStrokeWidth(resources.getDimension(R.dimen.analog_hand_stroke));
mHandPaint.setAntiAlias(true);
mHandPaint.setStrokeCap(Paint.Cap.ROUND);
mTime = new Time();
}
#Override
public void onDestroy() {
mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME);
super.onDestroy();
}
#Override
public void onPropertiesChanged(Bundle properties) {
super.onPropertiesChanged(properties);
mLowBitAmbient = properties.getBoolean(PROPERTY_LOW_BIT_AMBIENT, false);
}
#Override
public void onTimeTick() {
super.onTimeTick();
invalidate();
}
#Override
public void onAmbientModeChanged(boolean inAmbientMode) {
super.onAmbientModeChanged(inAmbientMode);
if (mAmbient != inAmbientMode) {
mAmbient = inAmbientMode;
if (mLowBitAmbient) {
mHandPaint.setAntiAlias(!inAmbientMode);
}
invalidate();
}
// Whether the timer should be running depends on whether we're visible (as well as
// whether we're in ambient mode), so we may need to start or stop the timer.
updateTimer();
}
#Override
public void onDraw(Canvas canvas, Rect bounds) {
mTime.setToNow();
int width = bounds.width();
int height = bounds.height();
// Draw the background.
canvas.drawRect(0, 0, canvas.getWidth(), canvas.getHeight(), mBackgroundPaint);
// Find the center. Ignore the window insets so that, on round watches with a
// "chin", the watch face is centered on the entire screen, not just the usable
// portion.
float centerX = width / 2f;
float centerY = height / 2f;
float secRot = mTime.second / 30f * (float) Math.PI;
int minutes = mTime.minute;
float minRot = minutes / 30f * (float) Math.PI;
float hrRot = ((mTime.hour + (minutes / 60f)) / 6f) * (float) Math.PI;
float secLength = centerX - 20;
float minLength = centerX - 40;
float hrLength = centerX - 80;
if (!mAmbient) {
float secX = (float) Math.sin(secRot) * secLength;
float secY = (float) -Math.cos(secRot) * secLength;
canvas.drawLine(centerX, centerY, centerX + secX, centerY + secY, mHandPaint);
}
float minX = (float) Math.sin(minRot) * minLength;
float minY = (float) -Math.cos(minRot) * minLength;
canvas.drawLine(centerX, centerY, centerX + minX, centerY + minY, mHandPaint);
float hrX = (float) Math.sin(hrRot) * hrLength;
float hrY = (float) -Math.cos(hrRot) * hrLength;
canvas.drawLine(centerX, centerY, centerX + hrX, centerY + hrY, mHandPaint);
}
#Override
public void onVisibilityChanged(boolean visible) {
super.onVisibilityChanged(visible);
if (visible) {
registerReceiver();
// Update time zone in case it changed while we weren't visible.
mTime.clear(TimeZone.getDefault().getID());
mTime.setToNow();
} else {
unregisterReceiver();
}
// Whether the timer should be running depends on whether we're visible (as well as
// whether we're in ambient mode), so we may need to start or stop the timer.
updateTimer();
}
private void registerReceiver() {
if (mRegisteredTimeZoneReceiver) {
return;
}
mRegisteredTimeZoneReceiver = true;
IntentFilter filter = new IntentFilter(Intent.ACTION_TIMEZONE_CHANGED);
NimbusSplashAnalog.this.registerReceiver(mTimeZoneReceiver, filter);
}
private void unregisterReceiver() {
if (!mRegisteredTimeZoneReceiver) {
return;
}
mRegisteredTimeZoneReceiver = false;
NimbusSplashAnalog.this.unregisterReceiver(mTimeZoneReceiver);
}
/**
* Starts the {#link #mUpdateTimeHandler} timer if it should be running and isn't currently
* or stops it if it shouldn't be running but currently is.
*/
private void updateTimer() {
mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME);
if (shouldTimerBeRunning()) {
mUpdateTimeHandler.sendEmptyMessage(MSG_UPDATE_TIME);
}
}
/**
* Returns whether the {#link #mUpdateTimeHandler} timer should be running. The timer should
* only run when we're visible and in interactive mode.
*/
private boolean shouldTimerBeRunning() {
return isVisible() && !isInAmbientMode();
}
/**
* Handle updating the time periodically in interactive mode.
*/
private void handleUpdateTimeMessage() {
invalidate();
if (shouldTimerBeRunning()) {
long timeMs = System.currentTimeMillis();
long delayMs = INTERACTIVE_UPDATE_RATE_MS
- (timeMs % INTERACTIVE_UPDATE_RATE_MS);
mUpdateTimeHandler.sendEmptyMessageDelayed(MSG_UPDATE_TIME, delayMs);
}
}
}
private static class EngineHandler extends Handler {
private final WeakReference<NimbusSplashAnalog.Engine> mWeakReference;
public EngineHandler(NimbusSplashAnalog.Engine reference) {
mWeakReference = new WeakReference<>(reference);
}
#Override
public void handleMessage(Message msg) {
NimbusSplashAnalog.Engine engine = mWeakReference.get();
if (engine != null) {
switch (msg.what) {
case MSG_UPDATE_TIME:
engine.handleUpdateTimeMessage();
break;
}
}
}
}
}
This is the code of the Engine/Service that I am extending from:
public abstract class WeatherWatchFaceService extends CanvasWatchFaceService {
public class WeatherWatchFaceEngine extends CanvasWatchFaceService.Engine
implements
GoogleApiClient.ConnectionCallbacks,
GoogleApiClient.OnConnectionFailedListener,
DataApi.DataListener, NodeApi.NodeListener {
protected static final int MSG_UPDATE_TIME = 0;
protected long UPDATE_RATE_MS;
protected static final long WEATHER_INFO_TIME_OUT = DateUtils.HOUR_IN_MILLIS * 6;
protected final BroadcastReceiver mTimeZoneReceiver = new BroadcastReceiver() {
#Override
public void onReceive(Context context, Intent intent) {
//Time zone changed
mWeatherInfoReceivedTime = 0;
mTime.clear(intent.getStringExtra("time-zone"));
mTime.setToNow();
}
};
/**
* Handler to update the time periodically in interactive mode.
*/
protected final Handler mUpdateTimeHandler = new Handler() {
#Override
public void handleMessage(Message message) {
switch (message.what) {
case MSG_UPDATE_TIME:
invalidate();
if (shouldUpdateTimerBeRunning()) {
long timeMs = System.currentTimeMillis();
long delayMs = UPDATE_RATE_MS - (timeMs % UPDATE_RATE_MS);
mUpdateTimeHandler.sendEmptyMessageDelayed(MSG_UPDATE_TIME, delayMs);
requireWeatherInfo();
}
break;
}
}
};
protected int mTheme = 3;
protected int mTimeUnit = ConverterUtil.TIME_UNIT_12;
protected AssetManager mAsserts;
protected Bitmap mWeatherConditionDrawable;
protected GoogleApiClient mGoogleApiClient;
protected Paint mBackgroundPaint;
protected Paint mDatePaint;
protected Paint mDateSuffixPaint;
protected Paint mDebugInfoPaint;
protected Paint mTemperatureBorderPaint;
protected Paint mTemperaturePaint;
protected Paint mTemperatureSuffixPaint;
protected Paint mTimePaint;
protected Resources mResources;
protected String mWeatherCondition;
protected String mWeatherConditionResourceName;
protected Time mSunriseTime;
protected Time mSunsetTime;
protected Time mTime;
protected boolean isRound;
protected boolean mLowBitAmbient;
protected boolean mRegisteredService = false;
protected int mBackgroundColor;
protected int mBackgroundDefaultColor;
protected int mRequireInterval;
protected int mTemperature = Integer.MAX_VALUE;
protected int mTemperatureScale;
protected long mWeatherInfoReceivedTime;
protected long mWeatherInfoRequiredTime;
private String mName;
public WeatherWatchFaceEngine(String name) {
mName = name;
mGoogleApiClient = new GoogleApiClient.Builder(WeatherWatchFaceService.this)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.addApi(Wearable.API)
.build();
}
#Override
public void onConnected(Bundle bundle) {
log("Connected: " + bundle);
getConfig();
Wearable.NodeApi.addListener(mGoogleApiClient, this);
Wearable.DataApi.addListener(mGoogleApiClient, this);
requireWeatherInfo();
}
#Override
public void onConnectionSuspended(int i) {
log("ConnectionSuspended: " + i);
}
#Override
public void onDataChanged(DataEventBuffer dataEvents) {
for (int i = 0; i < dataEvents.getCount(); i++) {
DataEvent event = dataEvents.get(i);
DataMap dataMap = DataMap.fromByteArray(event.getDataItem().getData());
log("onDataChanged: " + dataMap);
fetchConfig(dataMap);
}
}
#Override
public void onPeerConnected(Node node) {
log("PeerConnected: " + node);
requireWeatherInfo();
}
#Override
public void onPeerDisconnected(Node node) {
log("PeerDisconnected: " + node);
}
#Override
public void onConnectionFailed(ConnectionResult connectionResult) {
log("ConnectionFailed: " + connectionResult);
}
#Override
public void onCreate(SurfaceHolder holder) {
super.onCreate(holder);
setWatchFaceStyle(new WatchFaceStyle.Builder(WeatherWatchFaceService.this)
.setCardPeekMode(WatchFaceStyle.PEEK_MODE_SHORT)
.setAmbientPeekMode(WatchFaceStyle.AMBIENT_PEEK_MODE_HIDDEN)
.setBackgroundVisibility(WatchFaceStyle.BACKGROUND_VISIBILITY_INTERRUPTIVE)
.setShowSystemUiTime(false)
.build());
mResources = WeatherWatchFaceService.this.getResources();
mAsserts = WeatherWatchFaceService.this.getAssets();
mDebugInfoPaint = new Paint();
mDebugInfoPaint.setColor(Color.parseColor("White"));
mDebugInfoPaint.setTextSize(20);
mDebugInfoPaint.setAntiAlias(true);
mTime = new Time();
mSunriseTime = new Time();
mSunsetTime = new Time();
mRequireInterval = mResources.getInteger(R.integer.weather_default_require_interval);
mWeatherInfoRequiredTime = System.currentTimeMillis() - (DateUtils.SECOND_IN_MILLIS * 58);
mGoogleApiClient.connect();
}
#Override
public void onDestroy() {
log("Destroy");
mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME);
super.onDestroy();
}
#Override
public void onInterruptionFilterChanged(int interruptionFilter) {
super.onInterruptionFilterChanged(interruptionFilter);
log("onInterruptionFilterChanged: " + interruptionFilter);
}
#Override
public void onPropertiesChanged(Bundle properties) {
super.onPropertiesChanged(properties);
mLowBitAmbient = properties.getBoolean(WatchFaceService.PROPERTY_LOW_BIT_AMBIENT, false);
log("onPropertiesChanged: LowBitAmbient=" + mLowBitAmbient);
}
#Override
public void onTimeTick() {
super.onTimeTick();
log("TimeTick");
invalidate();
requireWeatherInfo();
}
#Override
public void onVisibilityChanged(boolean visible) {
super.onVisibilityChanged(visible);
log("onVisibilityChanged: " + visible);
if (visible) {
mGoogleApiClient.connect();
registerTimeZoneService();
// Update time zone in case it changed while we weren't visible.
mTime.clear(TimeZone.getDefault().getID());
mTime.setToNow();
} else {
if (mGoogleApiClient != null && mGoogleApiClient.isConnected()) {
Wearable.DataApi.removeListener(mGoogleApiClient, this);
Wearable.NodeApi.removeListener(mGoogleApiClient, this);
mGoogleApiClient.disconnect();
}
unregisterTimeZoneService();
}
// Whether the timer should be running depends on whether we're visible (as well as
// whether we're in ambient mode), so we may need to start or stop the timer.
updateTimer();
}
protected Paint createTextPaint(int color, Typeface typeface) {
Paint paint = new Paint();
paint.setColor(color);
if (typeface != null)
paint.setTypeface(typeface);
paint.setAntiAlias(true);
return paint;
}
protected boolean shouldUpdateTimerBeRunning() {
return isVisible() && !isInAmbientMode();
}
protected void fetchConfig(DataMap config) {
if (config.containsKey(Consts.KEY_WEATHER_UPDATE_TIME)) {
mWeatherInfoReceivedTime = config.getLong(Consts.KEY_WEATHER_UPDATE_TIME);
}
if (config.containsKey(Consts.KEY_WEATHER_CONDITION)) {
String cond = config.getString(Consts.KEY_WEATHER_CONDITION);
if (TextUtils.isEmpty(cond)) {
mWeatherCondition = null;
} else {
mWeatherCondition = cond;
}
}
if (config.containsKey(Consts.KEY_WEATHER_TEMPERATURE)) {
mTemperature = config.getInt(Consts.KEY_WEATHER_TEMPERATURE);
if (mTemperatureScale != ConverterUtil.FAHRENHEIT) {
mTemperature = ConverterUtil.convertFahrenheitToCelsius(mTemperature);
}
}
if (config.containsKey(Consts.KEY_WEATHER_SUNRISE)) {
mSunriseTime.set(config.getLong(Consts.KEY_WEATHER_SUNRISE) * 1000);
log("SunriseTime: " + mSunriseTime);
}
if (config.containsKey(Consts.KEY_WEATHER_SUNSET)) {
mSunsetTime.set(config.getLong(Consts.KEY_WEATHER_SUNSET) * 1000);
log("SunsetTime: " + mSunsetTime);
}
if (config.containsKey(Consts.KEY_CONFIG_TEMPERATURE_SCALE)) {
int scale = config.getInt(Consts.KEY_CONFIG_TEMPERATURE_SCALE);
if (scale != mTemperatureScale) {
if (scale == ConverterUtil.FAHRENHEIT) {
mTemperature = ConverterUtil.convertCelsiusToFahrenheit(mTemperature);
} else {
mTemperature = ConverterUtil.convertFahrenheitToCelsius(mTemperature);
}
}
mTemperatureScale = scale;
}
if (config.containsKey(Consts.KEY_CONFIG_THEME)) {
mTheme = config.getInt(Consts.KEY_CONFIG_THEME);
}
if (config.containsKey(Consts.KEY_CONFIG_TIME_UNIT)) {
mTimeUnit = config.getInt(Consts.KEY_CONFIG_TIME_UNIT);
}
if (config.containsKey(Consts.KEY_CONFIG_REQUIRE_INTERVAL)) {
mRequireInterval = config.getInt(Consts.KEY_CONFIG_REQUIRE_INTERVAL);
}
invalidate();
}
protected void getConfig() {
log("Start getting Config");
Wearable.NodeApi.getLocalNode(mGoogleApiClient).setResultCallback(new ResultCallback<NodeApi.GetLocalNodeResult>() {
#Override
public void onResult(NodeApi.GetLocalNodeResult getLocalNodeResult) {
Uri uri = new Uri.Builder()
.scheme("wear")
.path(Consts.PATH_CONFIG + mName)
.authority(getLocalNodeResult.getNode().getId())
.build();
getConfig(uri);
uri = new Uri.Builder()
.scheme("wear")
.path(Consts.PATH_WEATHER_INFO)
.authority(getLocalNodeResult.getNode().getId())
.build();
getConfig(uri);
}
});
}
protected void getConfig(Uri uri) {
Wearable.DataApi.getDataItem(mGoogleApiClient, uri)
.setResultCallback(
new ResultCallback<DataApi.DataItemResult>() {
#Override
public void onResult(DataApi.DataItemResult dataItemResult) {
log("Finish Config: " + dataItemResult.getStatus());
if (dataItemResult.getStatus().isSuccess() && dataItemResult.getDataItem() != null) {
fetchConfig(DataMapItem.fromDataItem(dataItemResult.getDataItem()).getDataMap());
}
}
}
);
}
protected void log(String message) {
Log.d(WeatherWatchFaceService.this.getClass().getSimpleName(), message);
}
protected void registerTimeZoneService() {
//TimeZone
if (mRegisteredService) {
return;
}
mRegisteredService = true;
// TimeZone
IntentFilter filter = new IntentFilter(Intent.ACTION_TIMEZONE_CHANGED);
WeatherWatchFaceService.this.registerReceiver(mTimeZoneReceiver, filter);
}
protected void requireWeatherInfo() {
if (!mGoogleApiClient.isConnected())
return;
long timeMs = System.currentTimeMillis();
// The weather info is still up to date.
if ((timeMs - mWeatherInfoReceivedTime) <= mRequireInterval)
return;
// Try once in a min.
if ((timeMs - mWeatherInfoRequiredTime) <= DateUtils.MINUTE_IN_MILLIS)
return;
mWeatherInfoRequiredTime = timeMs;
Wearable.MessageApi.sendMessage(mGoogleApiClient, "", Consts.PATH_WEATHER_REQUIRE, null)
.setResultCallback(new ResultCallback<MessageApi.SendMessageResult>() {
#Override
public void onResult(MessageApi.SendMessageResult sendMessageResult) {
log("SendRequireMessage:" + sendMessageResult.getStatus());
}
});
}
protected void unregisterTimeZoneService() {
if (!mRegisteredService) {
return;
}
mRegisteredService = false;
//TimeZone
WeatherWatchFaceService.this.unregisterReceiver(mTimeZoneReceiver);
}
protected void updateTimer() {
mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME);
if (shouldUpdateTimerBeRunning()) {
mUpdateTimeHandler.sendEmptyMessage(MSG_UPDATE_TIME);
}
}
}
}
Any help would be GREATLY appreciated :) (This is also my first real time making a watch face, so please forgive me)
A default constructor accepts zero arguments. You have only a one-argument constructor in WeatherWatchFaceEngine.
Add a zero-argument constructor to WeatherWatchFaceEngine, whose signiture will be public WeatherWatchFaceEngine(){...}
How can I put the result of the QR "SCAN_RESULT" in different activity, this is my problem.
In the splash screen I need to go to the QRScanner (CaptureActivity), and then, to the ActivityQR who will send the info. to the SQLite.
The problem is: When I get de QRCode in string ("SCAN_RESULT") on "CaptureActivity.java", the app close because the app need "back" to the previous activity with this:
getIntent();
I try to modify this codeline, but this not solve my problem.
How can I put the "SCAN_RESULT" in the ActivityQR? I don't know how open other activity (this case ActivityQR) when the code had gottenm and put this String value on the ActivityQR
I Use "CaptureActivity" of ZXing (ZebraCrossing)
Thanks in advance.
SplashScreen.java :
{
Intent mainIntent = new Intent().setClass(SplashScreenActivity.this, com.google.zxing.client.android.CaptureActivity.class);
startActivity(mainIntent)
}
ActivityQR.java
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data){
if(REQUEST_CODE == requestCode && RESULT_OK == resultCode){
txResult.setText(data.getStringExtra("SCAN_RESULT"));
}
}
CaptureActivity.java
public class CaptureActivity extends Activity implements SurfaceHolder.Callback {
private static final String TAG = CaptureActivity.class.getSimpleName();
private static final long DEFAULT_INTENT_RESULT_DURATION_MS = 1500L;
private static final long BULK_MODE_SCAN_DELAY_MS = 1000L;
private static final String[] ZXING_URLS = { "http://zxing.appspot.com/scan", "zxing://scan/" };
public static final int HISTORY_REQUEST_CODE = 0x0000bacc;
private static final Collection<ResultMetadataType> DISPLAYABLE_METADATA_TYPES =
EnumSet.of(ResultMetadataType.ISSUE_NUMBER,
ResultMetadataType.SUGGESTED_PRICE,
ResultMetadataType.ERROR_CORRECTION_LEVEL,
ResultMetadataType.POSSIBLE_COUNTRY);
private CameraManager cameraManager;
private CaptureActivityHandler handler;
private Result savedResultToShow;
private ViewfinderView viewfinderView;
private TextView statusView;
private View resultView;
private Result lastResult;
private boolean hasSurface;
private boolean copyToClipboard;
private IntentSource source;
private String sourceUrl;
private ScanFromWebPageManager scanFromWebPageManager;
private Collection<BarcodeFormat> decodeFormats;
private Map<DecodeHintType,?> decodeHints;
private String characterSet;
private HistoryManager historyManager;
private InactivityTimer inactivityTimer;
private BeepManager beepManager;
private AmbientLightManager ambientLightManager;
ViewfinderView getViewfinderView() {
return viewfinderView;
}
public Handler getHandler() {
return handler;
}
CameraManager getCameraManager() {
return cameraManager;
}
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.capture);
hasSurface = false;
historyManager = new HistoryManager(this);
historyManager.trimHistory();
inactivityTimer = new InactivityTimer(this);
beepManager = new BeepManager(this);
ambientLightManager = new AmbientLightManager(this);
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
}
#Override
protected void onResume() {
super.onResume();
// CameraManager must be initialized here, not in onCreate(). This is necessary because we don't
// want to open the camera driver and measure the screen size if we're going to show the help on
// first launch. That led to bugs where the scanning rectangle was the wrong size and partially
// off screen.
cameraManager = new CameraManager(getApplication());
viewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view);
viewfinderView.setCameraManager(cameraManager);
resultView = findViewById(R.id.result_view);
statusView = (TextView) findViewById(R.id.status_view);
handler = null;
lastResult = null;
resetStatusView();
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
if (hasSurface) {
// The activity was paused but not stopped, so the surface still exists. Therefore
// surfaceCreated() won't be called, so init the camera here.
initCamera(surfaceHolder);
} else {
// Install the callback and wait for surfaceCreated() to init the camera.
surfaceHolder.addCallback(this);
}
beepManager.updatePrefs();
ambientLightManager.start(cameraManager);
inactivityTimer.onResume();
Intent intent = getIntent();
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
copyToClipboard = prefs.getBoolean(PreferencesActivity.KEY_COPY_TO_CLIPBOARD, true)
&& (intent == null || intent.getBooleanExtra(Intents.Scan.SAVE_HISTORY, true));
source = IntentSource.NONE;
decodeFormats = null;
characterSet = null;
if (intent != null) {
String action = intent.getAction();
String dataString = intent.getDataString();
if (Intents.Scan.ACTION.equals(action)) {
// Scan the formats the intent requested, and return the result to the calling activity.
source = IntentSource.NATIVE_APP_INTENT;
decodeFormats = DecodeFormatManager.parseDecodeFormats(intent);
decodeHints = DecodeHintManager.parseDecodeHints(intent);
if (intent.hasExtra(Intents.Scan.WIDTH) && intent.hasExtra(Intents.Scan.HEIGHT)) {
int width = intent.getIntExtra(Intents.Scan.WIDTH, 0);
int height = intent.getIntExtra(Intents.Scan.HEIGHT, 0);
if (width > 0 && height > 0) {
cameraManager.setManualFramingRect(width, height);
}
}
String customPromptMessage = intent.getStringExtra(Intents.Scan.PROMPT_MESSAGE);
if (customPromptMessage != null) {
statusView.setText(customPromptMessage);
}
} else if (dataString != null &&
dataString.contains("http://www.google") &&
dataString.contains("/m/products/scan")) {
// Scan only products and send the result to mobile Product Search.
source = IntentSource.PRODUCT_SEARCH_LINK;
sourceUrl = dataString;
decodeFormats = DecodeFormatManager.PRODUCT_FORMATS;
} else if (isZXingURL(dataString)) {
// Scan formats requested in query string (all formats if none specified).
// If a return URL is specified, send the results there. Otherwise, handle it ourselves.
source = IntentSource.ZXING_LINK;
sourceUrl = dataString;
Uri inputUri = Uri.parse(dataString);
scanFromWebPageManager = new ScanFromWebPageManager(inputUri);
decodeFormats = DecodeFormatManager.parseDecodeFormats(inputUri);
// Allow a sub-set of the hints to be specified by the caller.
decodeHints = DecodeHintManager.parseDecodeHints(inputUri);
}
characterSet = intent.getStringExtra(Intents.Scan.CHARACTER_SET);
}
}
private static boolean isZXingURL(String dataString) {
if (dataString == null) {
return false;
}
for (String url : ZXING_URLS) {
if (dataString.startsWith(url)) {
return true;
}
}
return false;
}
#Override
protected void onPause() {
if (handler != null) {
handler.quitSynchronously();
handler = null;
}
inactivityTimer.onPause();
ambientLightManager.stop();
cameraManager.closeDriver();
if (!hasSurface) {
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.removeCallback(this);
}
super.onPause();
}
#Override
protected void onDestroy() {
inactivityTimer.shutdown();
super.onDestroy();
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_BACK:
if (source == IntentSource.NATIVE_APP_INTENT) {
setResult(RESULT_CANCELED);
finish();
return true;
}
if ((source == IntentSource.NONE || source == IntentSource.ZXING_LINK) && lastResult != null) {
restartPreviewAfterDelay(0L);
return true;
}
break;
case KeyEvent.KEYCODE_FOCUS:
case KeyEvent.KEYCODE_CAMERA:
// Handle these events so they don't launch the Camera app
return true;
// Use volume up/down to turn on light
case KeyEvent.KEYCODE_VOLUME_DOWN:
cameraManager.setTorch(false);
return true;
case KeyEvent.KEYCODE_VOLUME_UP:
cameraManager.setTorch(true);
return true;
}
return super.onKeyDown(keyCode, event);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater menuInflater = getMenuInflater();
menuInflater.inflate(R.menu.capture, menu);
return super.onCreateOptionsMenu(menu);
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
switch (item.getItemId()) {
case R.id.menu_share:
intent.setClassName(this, ShareActivity.class.getName());
startActivity(intent);
break;
case R.id.menu_history:
intent.setClassName(this, HistoryActivity.class.getName());
startActivityForResult(intent, HISTORY_REQUEST_CODE);
break;
case R.id.menu_settings:
intent.setClassName(this, PreferencesActivity.class.getName());
startActivity(intent);
break;
case R.id.menu_help:
intent.setClassName(this, HelpActivity.class.getName());
startActivity(intent);
break;
default:
return super.onOptionsItemSelected(item);
}
return true;
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (resultCode == RESULT_OK) {
if (requestCode == HISTORY_REQUEST_CODE) {
int itemNumber = intent.getIntExtra(Intents.History.ITEM_NUMBER, -1);
if (itemNumber >= 0) {
HistoryItem historyItem = historyManager.buildHistoryItem(itemNumber);
decodeOrStoreSavedBitmap(null, historyItem.getResult());
}
}
}
}
private void decodeOrStoreSavedBitmap(Bitmap bitmap, Result result) {
// Bitmap isn't used yet -- will be used soon
if (handler == null) {
savedResultToShow = result;
} else {
if (result != null) {
savedResultToShow = result;
}
if (savedResultToShow != null) {
Message message = Message.obtain(handler, R.id.decode_succeeded, savedResultToShow);
handler.sendMessage(message);
}
savedResultToShow = null;
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
if (holder == null) {
Log.e(TAG, "*** WARNING *** surfaceCreated() gave us a null surface!");
}
if (!hasSurface) {
hasSurface = true;
initCamera(holder);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
hasSurface = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
/**
* A valid barcode has been found, so give an indication of success and show the results.
*
* #param rawResult The contents of the barcode.
* #param scaleFactor amount by which thumbnail was scaled
* #param barcode A greyscale bitmap of the camera data which was decoded.
*/
public void handleDecode(Result rawResult, Bitmap barcode, float scaleFactor) {
Intent it = getIntent();
it.putExtra("SCAN_RESULT", rawResult.getText());
it.putExtra("SCAN_FORMAT", rawResult.getBarcodeFormat().toString());
setResult(Activity.RESULT_OK, it);
finish();
inactivityTimer.onActivity();
lastResult = rawResult;
ResultHandler resultHandler = ResultHandlerFactory.makeResultHandler(this, rawResult);
boolean fromLiveScan = barcode != null;
if (fromLiveScan) {
historyManager.addHistoryItem(rawResult, resultHandler);
// Then not from history, so beep/vibrate and we have an image to draw on
beepManager.playBeepSoundAndVibrate();
drawResultPoints(barcode, scaleFactor, rawResult);
}
switch (source) {
case NATIVE_APP_INTENT:
case PRODUCT_SEARCH_LINK:
handleDecodeExternally(rawResult, resultHandler, barcode);
break;
case ZXING_LINK:
if (scanFromWebPageManager == null || !scanFromWebPageManager.isScanFromWebPage()) {
handleDecodeInternally(rawResult, resultHandler, barcode);
} else {
handleDecodeExternally(rawResult, resultHandler, barcode);
}
break;
case NONE:
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
if (fromLiveScan && prefs.getBoolean(PreferencesActivity.KEY_BULK_MODE, false)) {
Toast.makeText(getApplicationContext(),
getResources().getString(R.string.msg_bulk_mode_scanned) + " (" + rawResult.getText() + ')',
Toast.LENGTH_SHORT).show();
// Wait a moment or else it will scan the same barcode continuously about 3 times
restartPreviewAfterDelay(BULK_MODE_SCAN_DELAY_MS);
} else {
handleDecodeInternally(rawResult, resultHandler, barcode);
}
break;
}
}
/**
* Superimpose a line for 1D or dots for 2D to highlight the key features of the barcode.
*
* #param barcode A bitmap of the captured image.
* #param scaleFactor amount by which thumbnail was scaled
* #param rawResult The decoded results which contains the points to draw.
*/
private void drawResultPoints(Bitmap barcode, float scaleFactor, Result rawResult) {
ResultPoint[] points = rawResult.getResultPoints();
if (points != null && points.length > 0) {
Canvas canvas = new Canvas(barcode);
Paint paint = new Paint();
paint.setColor(getResources().getColor(R.color.result_points));
if (points.length == 2) {
paint.setStrokeWidth(4.0f);
drawLine(canvas, paint, points[0], points[1], scaleFactor);
} else if (points.length == 4 &&
(rawResult.getBarcodeFormat() == BarcodeFormat.UPC_A ||
rawResult.getBarcodeFormat() == BarcodeFormat.EAN_13)) {
// Hacky special case -- draw two lines, for the barcode and metadata
drawLine(canvas, paint, points[0], points[1], scaleFactor);
drawLine(canvas, paint, points[2], points[3], scaleFactor);
} else {
paint.setStrokeWidth(10.0f);
for (ResultPoint point : points) {
if (point != null) {
canvas.drawPoint(scaleFactor * point.getX(), scaleFactor * point.getY(), paint);
}
}
}
}
}
private static void drawLine(Canvas canvas, Paint paint, ResultPoint a, ResultPoint b, float scaleFactor) {
if (a != null && b != null) {
canvas.drawLine(scaleFactor * a.getX(),
scaleFactor * a.getY(),
scaleFactor * b.getX(),
scaleFactor * b.getY(),
paint);
}
}
// Put up our own UI for how to handle the decoded contents.
private void handleDecodeInternally(Result rawResult, ResultHandler resultHandler, Bitmap barcode) {
statusView.setVisibility(View.GONE);
viewfinderView.setVisibility(View.GONE);
resultView.setVisibility(View.VISIBLE);
ImageView barcodeImageView = (ImageView) findViewById(R.id.barcode_image_view);
if (barcode == null) {
barcodeImageView.setImageBitmap(BitmapFactory.decodeResource(getResources(),
R.drawable.launcher_icon));
} else {
barcodeImageView.setImageBitmap(barcode);
}
TextView formatTextView = (TextView) findViewById(R.id.format_text_view);
formatTextView.setText(rawResult.getBarcodeFormat().toString());
TextView typeTextView = (TextView) findViewById(R.id.type_text_view);
typeTextView.setText(resultHandler.getType().toString());
DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);
TextView timeTextView = (TextView) findViewById(R.id.time_text_view);
timeTextView.setText(formatter.format(new Date(rawResult.getTimestamp())));
TextView metaTextView = (TextView) findViewById(R.id.meta_text_view);
View metaTextViewLabel = findViewById(R.id.meta_text_view_label);
metaTextView.setVisibility(View.GONE);
metaTextViewLabel.setVisibility(View.GONE);
Map<ResultMetadataType,Object> metadata = rawResult.getResultMetadata();
if (metadata != null) {
StringBuilder metadataText = new StringBuilder(20);
for (Map.Entry<ResultMetadataType,Object> entry : metadata.entrySet()) {
if (DISPLAYABLE_METADATA_TYPES.contains(entry.getKey())) {
metadataText.append(entry.getValue()).append('\n');
}
}
if (metadataText.length() > 0) {
metadataText.setLength(metadataText.length() - 1);
metaTextView.setText(metadataText);
metaTextView.setVisibility(View.VISIBLE);
metaTextViewLabel.setVisibility(View.VISIBLE);
}
}
TextView contentsTextView = (TextView) findViewById(R.id.contents_text_view);
CharSequence displayContents = resultHandler.getDisplayContents();
contentsTextView.setText(displayContents);
// Crudely scale betweeen 22 and 32 -- bigger font for shorter text
int scaledSize = Math.max(22, 32 - displayContents.length() / 4);
contentsTextView.setTextSize(TypedValue.COMPLEX_UNIT_SP, scaledSize);
TextView supplementTextView = (TextView) findViewById(R.id.contents_supplement_text_view);
supplementTextView.setText("");
supplementTextView.setOnClickListener(null);
if (PreferenceManager.getDefaultSharedPreferences(this).getBoolean(
PreferencesActivity.KEY_SUPPLEMENTAL, true)) {
SupplementalInfoRetriever.maybeInvokeRetrieval(supplementTextView,
resultHandler.getResult(),
historyManager,
this);
}
int buttonCount = resultHandler.getButtonCount();
ViewGroup buttonView = (ViewGroup) findViewById(R.id.result_button_view);
buttonView.requestFocus();
for (int x = 0; x < ResultHandler.MAX_BUTTON_COUNT; x++) {
TextView button = (TextView) buttonView.getChildAt(x);
if (x < buttonCount) {
button.setVisibility(View.VISIBLE);
button.setText(resultHandler.getButtonText(x));
button.setOnClickListener(new ResultButtonListener(resultHandler, x));
} else {
button.setVisibility(View.GONE);
}
}
if (copyToClipboard && !resultHandler.areContentsSecure()) {
ClipboardInterface.setText(displayContents, this);
}
}
// Briefly show the contents of the barcode, then handle the result outside Barcode Scanner.
private void handleDecodeExternally(Result rawResult, ResultHandler resultHandler, Bitmap barcode) {
if (barcode != null) {
viewfinderView.drawResultBitmap(barcode);
}
long resultDurationMS;
if (getIntent() == null) {
resultDurationMS = DEFAULT_INTENT_RESULT_DURATION_MS;
} else {
resultDurationMS = getIntent().getLongExtra(Intents.Scan.RESULT_DISPLAY_DURATION_MS,
DEFAULT_INTENT_RESULT_DURATION_MS);
}
if (resultDurationMS > 0) {
String rawResultString = String.valueOf(rawResult);
if (rawResultString.length() > 32) {
rawResultString = rawResultString.substring(0, 32) + " ...";
}
statusView.setText(getString(resultHandler.getDisplayTitle()) + " : " + rawResultString);
}
if (copyToClipboard && !resultHandler.areContentsSecure()) {
CharSequence text = resultHandler.getDisplayContents();
ClipboardInterface.setText(text, this);
}
if (source == IntentSource.NATIVE_APP_INTENT) {
// Hand back whatever action they requested - this can be changed to Intents.Scan.ACTION when
// the deprecated intent is retired.
Intent intent = new Intent(getIntent().getAction());
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
intent.putExtra(Intents.Scan.RESULT, rawResult.toString());
intent.putExtra(Intents.Scan.RESULT_FORMAT, rawResult.getBarcodeFormat().toString());
byte[] rawBytes = rawResult.getRawBytes();
if (rawBytes != null && rawBytes.length > 0) {
intent.putExtra(Intents.Scan.RESULT_BYTES, rawBytes);
}
Map<ResultMetadataType,?> metadata = rawResult.getResultMetadata();
if (metadata != null) {
if (metadata.containsKey(ResultMetadataType.UPC_EAN_EXTENSION)) {
intent.putExtra(Intents.Scan.RESULT_UPC_EAN_EXTENSION,
metadata.get(ResultMetadataType.UPC_EAN_EXTENSION).toString());
}
Number orientation = (Number) metadata.get(ResultMetadataType.ORIENTATION);
if (orientation != null) {
intent.putExtra(Intents.Scan.RESULT_ORIENTATION, orientation.intValue());
}
String ecLevel = (String) metadata.get(ResultMetadataType.ERROR_CORRECTION_LEVEL);
if (ecLevel != null) {
intent.putExtra(Intents.Scan.RESULT_ERROR_CORRECTION_LEVEL, ecLevel);
}
#SuppressWarnings("unchecked")
Iterable<byte[]> byteSegments = (Iterable<byte[]>) metadata.get(ResultMetadataType.BYTE_SEGMENTS);
if (byteSegments != null) {
int i = 0;
for (byte[] byteSegment : byteSegments) {
intent.putExtra(Intents.Scan.RESULT_BYTE_SEGMENTS_PREFIX + i, byteSegment);
i++;
}
}
}
sendReplyMessage(R.id.return_scan_result, intent, resultDurationMS);
} else if (source == IntentSource.PRODUCT_SEARCH_LINK) {
// Reformulate the URL which triggered us into a query, so that the request goes to the same
// TLD as the scan URL.
int end = sourceUrl.lastIndexOf("/scan");
String replyURL = sourceUrl.substring(0, end) + "?q=" + resultHandler.getDisplayContents() + "&source=zxing";
sendReplyMessage(R.id.launch_product_query, replyURL, resultDurationMS);
} else if (source == IntentSource.ZXING_LINK) {
if (scanFromWebPageManager != null && scanFromWebPageManager.isScanFromWebPage()) {
String replyURL = scanFromWebPageManager.buildReplyURL(rawResult, resultHandler);
sendReplyMessage(R.id.launch_product_query, replyURL, resultDurationMS);
}
}
}
private void sendReplyMessage(int id, Object arg, long delayMS) {
if (handler != null) {
Message message = Message.obtain(handler, id, arg);
if (delayMS > 0L) {
handler.sendMessageDelayed(message, delayMS);
} else {
handler.sendMessage(message);
}
}
}
private void initCamera(SurfaceHolder surfaceHolder) {
if (surfaceHolder == null) {
throw new IllegalStateException("No SurfaceHolder provided");
}
if (cameraManager.isOpen()) {
Log.w(TAG, "initCamera() while already open -- late SurfaceView callback?");
return;
}
try {
cameraManager.openDriver(surfaceHolder);
// Creating the handler starts the preview, which can also throw a RuntimeException.
if (handler == null) {
handler = new CaptureActivityHandler(this, decodeFormats, decodeHints, characterSet, cameraManager);
}
decodeOrStoreSavedBitmap(null, null);
} catch (IOException ioe) {
Log.w(TAG, ioe);
displayFrameworkBugMessageAndExit();
} catch (RuntimeException e) {
// Barcode Scanner has seen crashes in the wild of this variety:
// java.?lang.?RuntimeException: Fail to connect to camera service
Log.w(TAG, "Unexpected error initializing camera", e);
displayFrameworkBugMessageAndExit();
}
}
private void displayFrameworkBugMessageAndExit() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(getString(R.string.app_name));
builder.setMessage(getString(R.string.msg_camera_framework_bug));
builder.setPositiveButton(R.string.button_ok, new FinishListener(this));
builder.setOnCancelListener(new FinishListener(this));
builder.show();
}
public void restartPreviewAfterDelay(long delayMS) {
if (handler != null) {
handler.sendEmptyMessageDelayed(R.id.restart_preview, delayMS);
}
resetStatusView();
}
private void resetStatusView() {
resultView.setVisibility(View.GONE);
statusView.setText(R.string.msg_default_status);
statusView.setVisibility(View.VISIBLE);
viewfinderView.setVisibility(View.VISIBLE);
lastResult = null;
}
public void drawViewfinder() {
viewfinderView.drawViewfinder();
}
}
your SplashScreen Java should look like this:
{
Intent mainIntent = new Intent().setClass(SplashScreenActivity.this, com.google.zxing.client.android.CaptureActivity.class);
// use startActivityForResult instead of startActivity
startActivityForResult(mainIntent)
}
but I guess you already solved you problem!