Delete interface open camera on Android studio - java

I really need help here. I have project which call MyHeartRate, I downloaded the source code of heart rate project at Github. So I try to run that source code and there is no error. In the interface layout, it display the "camera". Example, when you open your camera then you will see the screen focus on what area that you want to take the picture. So my problem is, I want to remove that "camera" in my layout. I need to know which code need to be delete so then the "camera" not in my layout.
here's my code
xml file
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="#+id/layout">
<LinearLayout android:id="#+id/top"
android:paddingLeft="10dp"
android:paddingRight="10dp"
android:orientation="horizontal"
android:layout_width="fill_parent"
android:layout_height="50dp">
<TextView android:id="#+id/text"
android:text="#string/default_text"
android:textSize="32dp"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
</TextView>
<RelativeLayout
android:orientation="horizontal"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<com.jwetherell.heart_rate_monitor.HeartbeatView android:id="#+id/image"
android:layout_centerInParent="true"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
</com.jwetherell.heart_rate_monitor.HeartbeatView>
</RelativeLayout>
</LinearLayout>
<SurfaceView android:id="#+id/preview"
android:layout_weight="1"
android:layout_width="fill_parent"
android:layout_height="0dp">
</SurfaceView>
java file(HeartRateMonitor)
public class HeartRateMonitor extends Activity {
private static final String TAG = "HeartRateMonitor";
private static final AtomicBoolean processing = new AtomicBoolean(false);
private static SurfaceView preview = null;
private static SurfaceHolder previewHolder = null;
private static Camera camera = null;
private static View image = null;
private static TextView text = null;
private static WakeLock wakeLock = null;
private static int averageIndex = 0;
private static final int averageArraySize = 4;
private static final int[] averageArray = new int[averageArraySize];
public static enum TYPE {
GREEN, RED
};
private static TYPE currentType = TYPE.GREEN;
public static TYPE getCurrent() {
return currentType;
}
private static int beatsIndex = 0;
private static final int beatsArraySize = 3;
private static final int[] beatsArray = new int[beatsArraySize];
private static double beats = 0;
private static long startTime = 0;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
preview = (SurfaceView) findViewById(R.id.preview);
previewHolder = preview.getHolder();
previewHolder.addCallback(surfaceCallback);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
image = findViewById(R.id.image);
text = (TextView) findViewById(R.id.text);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
wakeLock = pm.newWakeLock(PowerManager.FULL_WAKE_LOCK, "DoNotDimScreen");
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
}
#Override
public void onResume() {
super.onResume();
wakeLock.acquire();
camera = Camera.open();
startTime = System.currentTimeMillis();
}
#Override
public void onPause() {
super.onPause();
wakeLock.release();
camera.setPreviewCallback(null);
camera.stopPreview();
camera.release();
camera = null;
}
private static PreviewCallback previewCallback = new PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera cam) {
if (data == null) throw new NullPointerException();
Camera.Size size = cam.getParameters().getPreviewSize();
if (size == null) throw new NullPointerException();
if (!processing.compareAndSet(false, true)) return;
int width = size.width;
int height = size.height;
int imgAvg = ImageProcessing.decodeYUV420SPtoRedAvg(data.clone(), height, width);
// Log.i(TAG, "imgAvg="+imgAvg);
if (imgAvg == 0 || imgAvg == 255) {
processing.set(false);
return;
}
int averageArrayAvg = 0;
int averageArrayCnt = 0;
for (int i = 0; i < averageArray.length; i++) {
if (averageArray[i] > 0) {
averageArrayAvg += averageArray[i];
averageArrayCnt++;
}
}
int rollingAverage = (averageArrayCnt > 0) ? (averageArrayAvg / averageArrayCnt) : 0;
TYPE newType = currentType;
if (imgAvg < rollingAverage) {
newType = TYPE.RED;
if (newType != currentType) {
beats++;
// Log.d(TAG, "BEAT!! beats="+beats);
}
} else if (imgAvg > rollingAverage) {
newType = TYPE.GREEN;
}
if (averageIndex == averageArraySize) averageIndex = 0;
averageArray[averageIndex] = imgAvg;
averageIndex++;
// Transitioned from one state to another to the same
if (newType != currentType) {
currentType = newType;
image.postInvalidate();
}
long endTime = System.currentTimeMillis();
double totalTimeInSecs = (endTime - startTime) / 1000d;
if (totalTimeInSecs >= 10) {
double bps = (beats / totalTimeInSecs);
int dpm = (int) (bps * 60d);
if (dpm < 30 || dpm > 180) {
startTime = System.currentTimeMillis();
beats = 0;
processing.set(false);
return;
}
// Log.d(TAG,
// "totalTimeInSecs="+totalTimeInSecs+" beats="+beats);
if (beatsIndex == beatsArraySize) beatsIndex = 0;
beatsArray[beatsIndex] = dpm;
beatsIndex++;
int beatsArrayAvg = 0;
int beatsArrayCnt = 0;
for (int i = 0; i < beatsArray.length; i++) {
if (beatsArray[i] > 0) {
beatsArrayAvg += beatsArray[i];
beatsArrayCnt++;
}
}
int beatsAvg = (beatsArrayAvg / beatsArrayCnt);
text.setText(String.valueOf(beatsAvg));
startTime = System.currentTimeMillis();
beats = 0;
}
processing.set(false);
}
};
private static SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
camera.setPreviewDisplay(previewHolder);
camera.setPreviewCallback(previewCallback);
} catch (Throwable t) {
Log.e("PreviewDemo-surfaceCallback", "Exception in setPreviewDisplay()", t);
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters parameters = camera.getParameters();
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
Camera.Size size = getSmallestPreviewSize(width, height, parameters);
if (size != null) {
parameters.setPreviewSize(size.width, size.height);
Log.d(TAG, "Using width=" + size.width + " height=" + size.height);
}
camera.setParameters(parameters);
camera.startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// Ignore
}
};
private static Camera.Size getSmallestPreviewSize(int width, int height, Camera.Parameters parameters) {
Camera.Size result = null;
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
if (size.width <= width && size.height <= height) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea < resultArea) result = size;
}
}
}
return result;
}
}
tq,
faizal

I found the answer. just comment this code
//preview = (SurfaceView) findViewById(R.id.preview);
//previewHolder = preview.getHolder();
//previewHolder.addCallback(surfaceCallback);
//previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
done.

Related

What is the VINTF manifest in android studio? Have an error

2020-04-22 16:14:49.759 1809-1809/? E/servicemanager: Could not find android.hardware.power.IPower/default in the VINTF manifest.
This keeps popping up. Am coding a camera, that when a button is clicked, an image is cropped.
Here is a custom view I am adding to a fragment.
public class DrawView extends View {
Point[] points = new Point[4];
/**
* point1 and point 3 are of same group and same as point 2 and point4
*/
int groupId = -1;
private ArrayList<ColorBall> colorballs = new ArrayList<>();
private int mStrokeColor = Color.parseColor("#AADB1255");
private int mFillColor = Color.parseColor("#55DB1255");
private Rect mCropRect = new Rect();
// array that holds the balls
private int balID = 0;
// variable to know what ball is being dragged
Paint paint;
public DrawView(Context context) {
this(context, null);
}
public DrawView(Context context, AttributeSet attrs) {
this(context, attrs, -1);
}
public DrawView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
private void init() {
paint = new Paint();
setFocusable(true); // necessary for getting the touch events
}
private void initRectangle(int X, int Y) {
//initialize rectangle.
points[0] = new Point();
points[0].x = X - 200;
points[0].y = Y - 100;
points[1] = new Point();
points[1].x = X;
points[1].y = Y + 30;
points[2] = new Point();
points[2].x = X + 30;
points[2].y = Y + 30;
points[3] = new Point();
points[3].x = X + 30;
points[3].y = Y;
balID = 2;
groupId = 1;
// declare each ball with the ColorBall class
for (int i = 0; i < points.length; i++) {
colorballs.add(new ColorBall(getContext(), R.drawable.gray_circle, points[i], i));
}
}
// the method that draws the balls
#Override
protected void onDraw(Canvas canvas) {
if(points[3]==null) {
//point4 null when view first create
initRectangle(getWidth() / 2, getHeight() / 2);
}
int left, top, right, bottom;
left = points[0].x;
top = points[0].y;
right = points[0].x;
bottom = points[0].y;
for (int i = 1; i < points.length; i++) {
left = left > points[i].x ? points[i].x : left;
top = top > points[i].y ? points[i].y : top;
right = right < points[i].x ? points[i].x : right;
bottom = bottom < points[i].y ? points[i].y : bottom;
}
paint.setAntiAlias(true);
paint.setDither(true);
paint.setStrokeJoin(Paint.Join.ROUND);
paint.setStrokeWidth(5);
//draw stroke
paint.setStyle(Paint.Style.STROKE);
paint.setColor(mStrokeColor);
paint.setStrokeWidth(2);
mCropRect.left = left + colorballs.get(0).getWidthOfBall() / 2;
mCropRect.top = top + colorballs.get(0).getWidthOfBall() / 2;
mCropRect.right = right + colorballs.get(2).getWidthOfBall() / 2;
mCropRect.bottom = bottom + colorballs.get(3).getWidthOfBall() / 2;
canvas.drawRect(mCropRect, paint);
//fill the rectangle
paint.setStyle(Paint.Style.FILL);
paint.setColor(mFillColor);
paint.setStrokeWidth(0);
canvas.drawRect(mCropRect, paint);
// draw the balls on the canvas
paint.setColor(Color.RED);
paint.setTextSize(18);
paint.setStrokeWidth(0);
for (int i =0; i < colorballs.size(); i ++) {
ColorBall ball = colorballs.get(i);
canvas.drawBitmap(ball.getBitmap(), ball.getX(), ball.getY(),
paint);
canvas.drawText("" + (i+1), ball.getX(), ball.getY(), paint);
}
}
// events when touching the screen
public boolean onTouchEvent(MotionEvent event) {
int eventAction = event.getAction();
int X = (int) event.getX();
int Y = (int) event.getY();
switch (eventAction) {
case MotionEvent.ACTION_DOWN: // touch down so check if the finger is on
// a ball
if (points[0] == null) {
initRectangle(X, Y);
} else {
//resize rectangle
balID = -1;
groupId = -1;
for (int i = colorballs.size()-1; i>=0; i--) {
ColorBall ball = colorballs.get(i);
// check if inside the bounds of the ball (circle)
// get the center for the ball
int centerX = ball.getX() + ball.getWidthOfBall();
int centerY = ball.getY() + ball.getHeightOfBall();
paint.setColor(Color.CYAN);
// calculate the radius from the touch to the center of the
// ball
double radCircle = Math
.sqrt((double) (((centerX - X) * (centerX - X)) + (centerY - Y)
* (centerY - Y)));
if (radCircle < ball.getWidthOfBall()) {
balID = ball.getID();
if (balID == 1 || balID == 3) {
groupId = 2;
} else {
groupId = 1;
}
invalidate();
break;
}
invalidate();
}
}
break;
case MotionEvent.ACTION_MOVE: // touch drag with the ball
if (balID > -1) {
// move the balls the same as the finger
colorballs.get(balID).setX(X);
colorballs.get(balID).setY(Y);
paint.setColor(Color.CYAN);
if (groupId == 1) {
colorballs.get(1).setX(colorballs.get(0).getX());
colorballs.get(1).setY(colorballs.get(2).getY());
colorballs.get(3).setX(colorballs.get(2).getX());
colorballs.get(3).setY(colorballs.get(0).getY());
} else {
colorballs.get(0).setX(colorballs.get(1).getX());
colorballs.get(0).setY(colorballs.get(3).getY());
colorballs.get(2).setX(colorballs.get(3).getX());
colorballs.get(2).setY(colorballs.get(1).getY());
}
invalidate();
}
break;
case MotionEvent.ACTION_UP:
// touch drop - just do things here after dropping
break;
}
// redraw the canvas
invalidate();
return true;
}
public Drawable doTheCrop(Bitmap sourceBitmap) throws IOException {
//Bitmap sourceBitmap = null;
//Drawable backgroundDrawable = getBackground();
/*
if (backgroundDrawable instanceof BitmapDrawable) {
BitmapDrawable bitmapDrawable = (BitmapDrawable) backgroundDrawable;
if(bitmapDrawable.getBitmap() != null) {
sourceBitmap = bitmapDrawable.getBitmap();
}
}*/
//source bitmap was scaled, you should calculate the rate
float widthRate = ((float) sourceBitmap.getWidth()) / getWidth();
float heightRate = ((float) sourceBitmap.getHeight()) / getHeight();
//crop the source bitmap with rate value
int left = (int) (mCropRect.left * widthRate);
int top = (int) (mCropRect.top * heightRate);
int right = (int) (mCropRect.right * widthRate);
int bottom = (int) (mCropRect.bottom * heightRate);
Bitmap croppedBitmap = Bitmap.createBitmap(sourceBitmap, left, top, right - left, bottom - top);
Drawable drawable = new BitmapDrawable(getResources(), croppedBitmap);
return drawable;
/*
setContentView(R.layout.fragment_dashboard);
Button btn = (Button)findViewById(R.id.capture);
if (btn == null){
System.out.println("NULL");
}
try{
btn.setText("HI");
}
catch (Exception e){
}
//setBackground(drawable);*/
//savebitmap(croppedBitmap);
}
private File savebitmap(Bitmap bmp) throws IOException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.JPEG, 60, bytes);
File f = new File(Environment.getExternalStorageDirectory()
+ "/" + "testimage.jpg");
Toast.makeText(getContext(), "YUP", Toast.LENGTH_LONG).show();
f.createNewFile();
FileOutputStream fo = new FileOutputStream(f);
fo.write(bytes.toByteArray());
fo.close();
return f;
}
public static class ColorBall {
Bitmap bitmap;
Context mContext;
Point point;
int id;
public ColorBall(Context context, int resourceId, Point point, int id) {
this.id = id;
bitmap = BitmapFactory.decodeResource(context.getResources(),
resourceId);
mContext = context;
this.point = point;
}
public int getWidthOfBall() {
return bitmap.getWidth();
}
public int getHeightOfBall() {
return bitmap.getHeight();
}
public Bitmap getBitmap() {
return bitmap;
}
public int getX() {
return point.x;
}
public int getY() {
return point.y;
}
public int getID() {
return id;
}
public void setX(int x) {
point.x = x;
}
public void setY(int y) {
point.y = y;
}
}
}
Here is the fragment that I have added a camera do, basically the main part of the application that I am working on.
public class DashboardFragment extends Fragment {
private DashboardViewModel dashboardViewModel;
//All my constants
private DrawView mDrawView;
private Drawable imgDraw;
private TextureView txtView;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static{
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private String cameraID;
private String pathway;
CameraDevice cameraDevice;
CameraCaptureSession cameraCaptureSession;
CaptureRequest captureRequest;
CaptureRequest.Builder captureRequestBuilder;
private Size imageDimensions;
private ImageReader imageReader;
private File file;
Handler mBackgroundHandler;
HandlerThread mBackgroundThread;
public View onCreateView(#NonNull LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState) {
dashboardViewModel =
ViewModelProviders.of(this).get(DashboardViewModel.class);
View root = inflater.inflate(R.layout.fragment_dashboard, container, false);
try{
txtView = (TextureView)root.findViewById(R.id.textureView);
txtView.setSurfaceTextureListener(textureListener);
mDrawView = root.findViewById(draw_view);
Button cap = (Button)root.findViewById(R.id.capture);
cap.setClickable(true);
cap.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
Log.i("HOLA","HOLA");
takePicture();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});
}
catch (Exception e){
Log.i("HI",e.toString());
}
/*
txtView = (TextureView)root.findViewById(R.id.textureView);
txtView.setSurfaceTextureListener(textureListener);
mDrawView = root.findViewById(R.id.draw_view);
Button cap = (Button)root.findViewById(R.id.capture);
cap.setClickable(true);
cap.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
Log.i("HOLA","HOLA");
takePicture();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});*/
return root;
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults){
if (requestCode == 101){
if (grantResults[0] == PackageManager.PERMISSION_DENIED){
Toast.makeText(getActivity().getApplicationContext(), "Permission is required",Toast.LENGTH_LONG);
}
}
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
cameraDevice = camera;
try {
createCameraPreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
cameraDevice.close();
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int i) {
cameraDevice.close();
cameraDevice = null;
}
};
private void createCameraPreview() throws CameraAccessException {
SurfaceTexture texture = txtView.getSurfaceTexture(); //?
texture.setDefaultBufferSize(imageDimensions.getWidth(), imageDimensions.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
if (cameraDevice == null){
return;
}
cameraCaptureSession = session;
try {
updatePreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getActivity().getApplicationContext(), "CONFIGURATION", Toast.LENGTH_LONG);
}
}, null);
}
private void updatePreview() throws CameraAccessException {
if (cameraDevice == null){
return;
}
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
}
private void openCamera() throws CameraAccessException {
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
cameraID = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
imageDimensions = map.getOutputSizes(SurfaceTexture.class)[0];
if (ActivityCompat.checkSelfPermission(getActivity().getApplicationContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED
&& ActivityCompat.checkSelfPermission(getActivity().getApplicationContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED){
ActivityCompat.requestPermissions(getActivity(), new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, 101);
return;
}
manager.openCamera(cameraID, stateCallback, null);
}
private void takePicture() throws CameraAccessException {
if (cameraDevice == null) {
Log.i("NOt working", "hi");
return;
}
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
int width = 640;
int height = 480;
if (jpegSizes != null && jpegSizes.length > 0) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
final ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(txtView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
Long tsLong = System.currentTimeMillis() / 1000;
String ts = tsLong.toString();
file = new File(Environment.getExternalStorageDirectory() + "/" + ts + ".jpg");
pathway = Environment.getExternalStorageDirectory() + "/" + ts + ".jpg";
//cameraDevice.close();
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader imageReader) {
Image image = null;
//image = reader.acquireLatestImage();
image = reader.acquireNextImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes , 0, bytes.length);
try {
Drawable back = mDrawView.doTheCrop(bitmap);
Button btn = (Button)getView().findViewById(R.id.capture);
btn.setBackground(back);
} catch (IOException e) {
e.printStackTrace();
}
/*
try {
save(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null){
image.close();
}
}*/
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback(){
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result){
super.onCaptureCompleted(session, request, result);
try {
createCameraPreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
}
}, mBackgroundHandler);
}
private void save (byte[] bytes) throws IOException {
OutputStream outputStream = null;
outputStream = new FileOutputStream(file);
outputStream.write(bytes);
Toast.makeText(getActivity().getApplicationContext(),pathway,Toast.LENGTH_LONG).show();
outputStream.close();
imgDraw = Drawable.createFromPath(pathway);
//mDrawView.doTheCrop(imgDraw);
}
#Override
public void onResume(){
super.onResume();
startBackgroundThread();
if (txtView.isAvailable()){
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
else{
txtView.setSurfaceTextureListener(textureListener);
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
protected void stopBackgroundThread() throws InterruptedException{
mBackgroundThread.quitSafely();
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
}
#Override
public void onPause(){
try {
stopBackgroundThread();
} catch (InterruptedException e) {
e.printStackTrace();
}
super.onPause();
}
}
Here is the xml file for that fragment.
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".ui.dashboard.DashboardFragment">
<TextureView
android:id = "#+id/textureView"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
<com.PeavlerDevelopment.OpinionMinion.ui.dashboard.DrawView
android:id="#+id/draw_view"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
<Button
android:id="#+id/capture"
android:layout_width="100dp"
android:layout_height="200dp"
android:clickable="true"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"></Button>
</androidx.constraintlayout.widget.ConstraintLayout>
The problem seems to lie somewhere in the doCrop method of the DrawView class.
If there is anything else that would help make the problem more clear, let me know! I will gladly share the github repo with you.
Thank you.
As you can see in Android Design Documenation the VINTF stands for Vendor Interface and its a Manifest structure to aggregate data form the device. That specific log means that your manifest is missing something like this:
<hal>
<name>android.hardware.power</name>
<transport>hwbinder</transport>
<version>1.1</version>
<interface>
<name>IPower</name>
<instance>default</instance>
</interface>
</hal>
which basically is hardware power information.
I think it's not related to what you are trying to do, but I need more info than that log.

how to crop only face instead of whole body in oval form using Google Vision Face detection API in android

I want to make app which take photos only with faces but I get whole image instead of face when I save into my storage file. So how to crop face and save into storage with the help google vision face detection API.
So how to use frame in my code to get face list as well as how can I convert into bitmap and save into my storage.
main.xml
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/topLayout"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:keepScreenOn="true">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview
android:id="#+id/preview"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.GraphicOverlay
android:id="#+id/faceOverlay"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview>
</LinearLayout>
<android.support.v7.widget.AppCompatButton
android:id="#+id/take_pic_btn"
android:layout_gravity="bottom"
android:gravity="center"
android:background="#color/green"
android:layout_margin="10dp"
android:text="Take Image"
android:textStyle="bold"
android:textSize="20sp"
android:textAllCaps="false"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
</FrameLayout>
FaceTrackerActivity .java
public final class FaceTrackerActivity extends AppCompatActivity {
private static final String TAG = "FaceTracker";
private CameraSource mCameraSource = null;
private CameraSourcePreview mPreview;
private GraphicOverlay mGraphicOverlay;
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
private Button takePicButton;
FaceDetector detector;
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.main);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay) findViewById(R.id.faceOverlay);
takePicButton=(Button)findViewById(R.id.take_pic_btn);
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
int gc = ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE);
if (rc == PackageManager.PERMISSION_GRANTED && gc == PackageManager.PERMISSION_GRANTED) {
createCameraSource();
} else {
requestCameraPermission();
}
takePicButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Toast.makeText(getApplicationContext(),"dilip",Toast.LENGTH_LONG).show();
captureImage();
}
});
}
private void requestCameraPermission() {
Log.w(TAG, "Camera permission is not granted. Requesting permission");
final String[] permissions = new String[]{Manifest.permission.CAMERA,Manifest.permission.WRITE_EXTERNAL_STORAGE};
if (!ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.CAMERA) && !ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.WRITE_EXTERNAL_STORAGE) ) {
ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM);
return;
}
final Activity thisActivity = this;
View.OnClickListener listener = new View.OnClickListener() {
#Override
public void onClick(View view) {
ActivityCompat.requestPermissions(thisActivity, permissions,
RC_HANDLE_CAMERA_PERM);
}
};
Snackbar.make(mGraphicOverlay, R.string.permission_camera_rationale,
Snackbar.LENGTH_INDEFINITE)
.setAction(R.string.ok, listener)
.show();
}
private void createCameraSource() {
Context context = getApplicationContext();
/* FaceDetector detector = new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());*/
detector= new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
MyFaceDetector myFaceDetector = new MyFaceDetector(detector);
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());
mCameraSource = new CameraSource.Builder(context, myFaceDetector)
.build();
if (!detector.isOperational()) {
}
mCameraSource = new CameraSource.Builder(context, detector)
.setRequestedPreviewSize(640, 480)
.setFacing(CameraSource.CAMERA_FACING_FRONT)
.setRequestedFps(10.0f)
.build();
}
/**
* Restarts the camera.
*/
#Override
protected void onResume() {
super.onResume();
startCameraSource();
}
/**
* Stops the camera.
*/
#Override
protected void onPause() {
super.onPause();
mPreview.stop();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mCameraSource != null) {
mCameraSource.release();
}
}
/**
* Callback for the result from requesting permissions. This method
* is invoked for every call on {#link #requestPermissions(String[], int)}.
* <p>
* <strong>Note:</strong> It is possible that the permissions request interaction
* with the user is interrupted. In this case you will receive empty permissions
* and results arrays which should be treated as a cancellation.
* </p>
*
* #param requestCode The request code passed in {#link #requestPermissions(String[], int)}.
* #param permissions The requested permissions. Never null.
* #param grantResults The grant results for the corresponding permissions
* which is either {#link PackageManager#PERMISSION_GRANTED}
* or {#link PackageManager#PERMISSION_DENIED}. Never null.
* #see #requestPermissions(String[], int)
*/
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
if (requestCode != RC_HANDLE_CAMERA_PERM) {
Log.d(TAG, "Got unexpected permission result: " + requestCode);
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
return;
}
if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED &&grantResults[1] == PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Camera permission granted - initialize the camera source");
// we have permission, so create the camerasource
createCameraSource();
return;
}
Log.e(TAG, "Permission not granted: results len = " + grantResults.length +
" Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)"));
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Face Tracker sample")
.setMessage(R.string.no_camera_permission)
.setPositiveButton(R.string.ok, listener)
.show();
}
//==============================================================================================
// Camera Source Preview
//==============================================================================================
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
private void startCameraSource() {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
//==============================================================================================
// Graphic Face Tracker
//==============================================================================================
/**
* Factory for creating a face tracker to be associated with a new face. The multiprocessor
* uses this factory to create face trackers as needed -- one for each individual.
*/
private class GraphicFaceTrackerFactory implements MultiProcessor.Factory<Face> {
#Override
public Tracker<Face> create(Face face) {
return new GraphicFaceTracker(mGraphicOverlay);
}
}
/**
* Face tracker for each detected individual. This maintains a face graphic within the app's
* associated face overlay.
*/
private class GraphicFaceTracker extends Tracker<Face> {
private GraphicOverlay mOverlay;
private FaceGraphic mFaceGraphic;
GraphicFaceTracker(GraphicOverlay overlay) {
mOverlay = overlay;
mFaceGraphic = new FaceGraphic(overlay);
}
/**
* Start tracking the detected face instance within the face overlay.
*/
#Override
public void onNewItem(int faceId, Face item) {
mFaceGraphic.setId(faceId);
}
**`strong text`**
#Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
mOverlay.add(mFaceGraphic);
mFaceGraphic.updateFace(face);
}
/**
* Hide the graphic when the corresponding face was not detected. This can happen for
* intermediate frames temporarily (e.g., if the face was momentarily blocked from
* view).
*/
#Override
public void onMissing(FaceDetector.Detections<Face> detectionResults) {
mOverlay.remove(mFaceGraphic);
}
/**
* Called when the face is assumed to be gone for good. Remove the graphic annotation from
* the overlay.
*/
#Override
public void onDone() {
mOverlay.remove(mFaceGraphic);
}
}
private void captureImage() {
mPreview.setDrawingCacheEnabled(true);
final Bitmap drawingCache = mPreview.getDrawingCache();
mCameraSource.takePicture(null, new CameraSource.PictureCallback() {
#Override
public void onPictureTaken(byte[] bytes) {
int orientation = Exif.getOrientation(bytes);
Bitmap temp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
Bitmap picture = rotateImage(temp,orientation);
Bitmap overlay = Bitmap.createBitmap(mGraphicOverlay.getWidth(),mGraphicOverlay.getHeight(),picture.getConfig());
Canvas canvas = new Canvas(overlay);
Matrix matrix = new Matrix();
matrix.setScale((float)overlay.getWidth()/(float)picture.getWidth(),(float)overlay.getHeight()/(float)picture.getHeight());
// mirror by inverting scale and translating
matrix.preScale(-1, 1);
matrix.postTranslate(canvas.getWidth(), 0);
Paint paint = new Paint();
canvas.drawBitmap(picture,matrix,paint);
canvas.drawBitmap(drawingCache,0,0,paint);
try {
String mainpath = getExternalStorageDirectory() + separator + "MaskIt" + separator + "images" + separator;
File basePath = new File(mainpath);
if (!basePath.exists())
Log.d("CAPTURE_BASE_PATH", basePath.mkdirs() ? "Success": "Failed");
String path = mainpath + "photo_" + getPhotoTime() + ".jpg";
File captureFile = new File(path);
captureFile.createNewFile();
if (!captureFile.exists())
Log.d("CAPTURE_FILE_PATH", captureFile.createNewFile() ? "Success": "Failed");
FileOutputStream stream = new FileOutputStream(captureFile);
overlay.compress(Bitmap.CompressFormat.PNG, 100, stream);
stream.flush();
stream.close();
picture.recycle();
drawingCache.recycle();
mPreview.setDrawingCacheEnabled(false);
} catch (IOException e) {
e.printStackTrace();
}
}
private String getPhotoTime() {
DateFormat dateFormatter = new SimpleDateFormat("yyyyMMdd hhmmss");
dateFormatter.setLenient(false);
Date today = new Date();
String s = dateFormatter.format(today);
return s;
}
});
}
private Bitmap rotateImage(Bitmap bm, int i) {
Matrix matrix = new Matrix();
switch (i) {
case ExifInterface.ORIENTATION_NORMAL:
return bm;
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
matrix.setScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
matrix.setRotate(180);
break;
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
matrix.setRotate(180);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_TRANSPOSE:
matrix.setRotate(90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_90:
matrix.setRotate(90);
break;
case ExifInterface.ORIENTATION_TRANSVERSE:
matrix.setRotate(-90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
matrix.setRotate(-90);
break;
default:
return bm;
}
try {
Bitmap bmRotated = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true);
bm.recycle();
return bmRotated;
} catch (OutOfMemoryError e) {
e.printStackTrace();
return null;
}
}
}
CameraSourcePreview.java
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.content.res.Configuration;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import com.google.android.gms.common.images.Size;
import com.google.android.gms.vision.CameraSource;
import java.io.IOException;
public class CameraSourcePreview extends ViewGroup {
private static final String TAG = "CameraSourcePreview";
private Context mContext;
private SurfaceView mSurfaceView;
private boolean mStartRequested;
private boolean mSurfaceAvailable;
private CameraSource mCameraSource;
private GraphicOverlay mOverlay;
public CameraSourcePreview(Context context, AttributeSet attrs) {
super(context, attrs);
mContext = context;
mStartRequested = false;
mSurfaceAvailable = false;
mSurfaceView = new SurfaceView(context);
mSurfaceView.getHolder().addCallback(new SurfaceCallback());
addView(mSurfaceView);
}
public void start(CameraSource cameraSource) throws IOException {
if (cameraSource == null) {
stop();
}
mCameraSource = cameraSource;
if (mCameraSource != null) {
mStartRequested = true;
startIfReady();
}
}
public void start(CameraSource cameraSource, GraphicOverlay overlay) throws IOException {
mOverlay = overlay;
start(cameraSource);
}
public void stop() {
if (mCameraSource != null) {
mCameraSource.stop();
}
}
public void release() {
if (mCameraSource != null) {
mCameraSource.release();
mCameraSource = null;
}
}
private void startIfReady() throws IOException {
if (mStartRequested && mSurfaceAvailable) {
mCameraSource.start(mSurfaceView.getHolder());
if (mOverlay != null) {
Size size = mCameraSource.getPreviewSize();
int min = Math.min(size.getWidth(), size.getHeight());
int max = Math.max(size.getWidth(), size.getHeight());
if (isPortraitMode()) {
// Swap width and height sizes when in portrait, since it will be rotated by
// 90 degrees
mOverlay.setCameraInfo(min, max, mCameraSource.getCameraFacing());
} else {
mOverlay.setCameraInfo(max, min, mCameraSource.getCameraFacing());
}
mOverlay.clear();
}
mStartRequested = false;
}
}
private class SurfaceCallback implements SurfaceHolder.Callback {
#Override
public void surfaceCreated(SurfaceHolder surface) {
mSurfaceAvailable = true;
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surface) {
mSurfaceAvailable = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
}
#Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int previewWidth = 320;
int previewHeight = 240;
if (mCameraSource != null) {
Size size = mCameraSource.getPreviewSize();
if (size != null) {
previewWidth = size.getWidth();
previewHeight = size.getHeight();
}
}
// Swap width and height sizes when in portrait, since it will be rotated 90 degrees
if (isPortraitMode()) {
int tmp = previewWidth;
previewWidth = previewHeight;
previewHeight = tmp;
}
final int viewWidth = right - left;
final int viewHeight = bottom - top;
int childWidth;
int childHeight;
int childXOffset = 0;
int childYOffset = 0;
float widthRatio = (float) viewWidth / (float) previewWidth;
float heightRatio = (float) viewHeight / (float) previewHeight;
// To fill the view with the camera preview, while also preserving the correct aspect ratio,
// it is usually necessary to slightly oversize the child and to crop off portions along one
// of the dimensions. We scale up based on the dimension requiring the most correction, and
// compute a crop offset for the other dimension.
if (widthRatio > heightRatio) {
childWidth = viewWidth;
childHeight = (int) ((float) previewHeight * widthRatio);
childYOffset = (childHeight - viewHeight) / 2;
} else {
childWidth = (int) ((float) previewWidth * heightRatio);
childHeight = viewHeight;
childXOffset = (childWidth - viewWidth) / 2;
}
for (int i = 0; i < getChildCount(); ++i) {
// One dimension will be cropped. We shift child over or up by this offset and adjust
// the size to maintain the proper aspect ratio.
getChildAt(i).layout(
-1 * childXOffset, -1 * childYOffset,
childWidth - childXOffset, childHeight - childYOffset);
}
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
private boolean isPortraitMode() {
int orientation = mContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
return false;
}
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
return true;
}
Log.d(TAG, "isPortraitMode returning false by default");
return false;
}
}
GraphicOverlay.java
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.View;
import com.google.android.gms.vision.CameraSource;
import java.util.HashSet;
import java.util.Set;
public class GraphicOverlay extends View {
private final Object mLock = new Object();
private int mPreviewWidth;
private float mWidthScaleFactor = 1.0f;
private int mPreviewHeight;
private float mHeightScaleFactor = 1.0f;
private int mFacing = CameraSource.CAMERA_FACING_BACK;
private Set<Graphic> mGraphics = new HashSet<>();
public static abstract class Graphic {
private GraphicOverlay mOverlay;
public Graphic(GraphicOverlay overlay) {
mOverlay = overlay;
}
public abstract void draw(Canvas canvas);
public float scaleX(float horizontal) {
return horizontal * mOverlay.mWidthScaleFactor;
}
public float scaleY(float vertical) {
return vertical * mOverlay.mHeightScaleFactor;
}
public float translateX(float x) {
if (mOverlay.mFacing == CameraSource.CAMERA_FACING_FRONT) {
return mOverlay.getWidth() - scaleX(x);
} else {
return scaleX(x);
}
}
public float translateY(float y) {
return scaleY(y);
}
public void postInvalidate() {
mOverlay.postInvalidate();
}
}
public GraphicOverlay(Context context, AttributeSet attrs) {
super(context, attrs);
}
public void clear() {
synchronized (mLock) {
mGraphics.clear();
}
postInvalidate();
}
public void add(Graphic graphic) {
synchronized (mLock) {
mGraphics.add(graphic);
}
postInvalidate();
}
public void remove(Graphic graphic) {
synchronized (mLock) {
mGraphics.remove(graphic);
}
postInvalidate();
}
public void setCameraInfo(int previewWidth, int previewHeight, int facing) {
synchronized (mLock) {
mPreviewWidth = previewWidth;
mPreviewHeight = previewHeight;
mFacing = facing;
}
postInvalidate();
}
/**
* Draws the overlay with its associated graphic objects.
*/
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
synchronized (mLock) {
if ((mPreviewWidth != 0) && (mPreviewHeight != 0)) {
mWidthScaleFactor = (float) canvas.getWidth() / (float)mPreviewWidth;
mHeightScaleFactor = (float) canvas.getHeight() / (float) mPreviewHeight;
}
for (Graphic graphic : mGraphics) {
graphic.draw(canvas);
}
}
}
}
You should pass the captured image to FaceDetector API and crop it thereafter.
fun getFace(context: Context, data: ByteArray): Bitmap? {
try {
val imageStrem = ByteArrayInputStream(data)
var bitmap = BitmapFactory.decodeStream(imageStrem)
if (bitmap.width > bitmap.height) {
val matrix = Matrix()
matrix.postRotate(270f)
if (bitmap.width > 1500) matrix.postScale(0.5f, 0.5f)
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
}
val faceDetector = FaceDetector.Builder(context).setProminentFaceOnly(true).setTrackingEnabled(false).build()
val frame = Frame.Builder().setBitmap(bitmap).build()
val faces = faceDetector.detect(frame)
var results: Bitmap? = null
for (i in 0 until faces.size()) {
val thisFace = faces.valueAt(i)
val x = thisFace.position.x
val y = thisFace.position.y
val x2 = x / 4 + thisFace.width
val y2 = y / 4 + thisFace.height
results = Bitmap.createBitmap(bitmap, x.toInt(), y.toInt(), x2.toInt(), y2.toInt())
}
return results
} catch (e: Exception) {
Log.e("GET_FACE", e.message)
}
return null
}
Source

How to resize the bitmap image in this flood Fill algorithm code and custom view

I am new to programming and I am trying to implement a flood fill algorithm to colorize selected part in the image as a color book app.
I have found this link which provides full working code (but slow) for the algorithm. and also provide QueueLinearFloodFiller which is way faster.
The problem is that The custom MyView class is added to Relativelayout to show the bitmap that will be colored but the image is out of the view like this
I want to wrap it to fill the view and take its size like in wrap_content
I have tried to use this code to resize it
dashBoard = (RelativeLayout) findViewById(R.id.dashBoard);
int width = RelativeLayout.LayoutParams.WRAP_CONTENT;
int hight = RelativeLayout.LayoutParams.WRAP_CONTENT;
myView.setLayoutParams(new RelativeLayout.LayoutParams(width,hight));
dashBoard.addView(myView);
but it didn't work.
here is the full code and the XML
public class UnicornColorActivity extends AppCompatActivity {
private RelativeLayout dashBoard;
private MyView myView;
public ImageView image;
Button b_red, b_blue, b_green, b_orange, b_clear;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
myView = new MyView(this);
setContentView(R.layout.activity_unicorn_color);
findViewById(R.id.dashBoard);
b_red = (Button) findViewById(R.id.b_red);
b_blue = (Button) findViewById(R.id.b_blue);
b_green = (Button) findViewById(R.id.b_green);
b_orange = (Button) findViewById(R.id.b_orange);
b_red.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
myView.changePaintColor(0xFFFF0000);
}
});
b_blue.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
myView.changePaintColor(0xFF0000FF);
}
});
b_green.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
myView.changePaintColor(0xFF00FF00);
}
});
b_orange.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
myView.changePaintColor(0xFFFF9900);
}
});
dashBoard = (RelativeLayout) findViewById(R.id.dashBoard);
int width = RelativeLayout.LayoutParams.WRAP_CONTENT;
int hight = RelativeLayout.LayoutParams.WRAP_CONTENT;
myView.setLayoutParams(new RelativeLayout.LayoutParams(width,hight));
dashBoard.addView(myView);
}
public class MyView extends View {
private Paint paint;
private Path path;
public Bitmap mBitmap;
public ProgressDialog pd;
final Point p1 = new Point();
public Canvas canvas;
//Bitmap mutableBitmap ;
public MyView(Context context) {
super(context);
this.paint = new Paint();
this.paint.setAntiAlias(true);
pd = new ProgressDialog(context);
this.paint.setStyle(Paint.Style.STROKE);
paint.setStrokeJoin(Paint.Join.ROUND);
paint.setStrokeWidth(5f);
mBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.unicorn_2).copy(Bitmap.Config.ARGB_8888, true);
//Bitmap.createScaledBitmap(mBitmap, 60, 60 , false);
this.path = new Path();
}
#Override
protected void onDraw(Canvas canvas) {
this.canvas = canvas;
this.paint.setColor(Color.RED);
canvas.drawBitmap(mBitmap, 0, 0, paint);
}
#Override
public boolean onTouchEvent(MotionEvent event) {
float x = event.getX();
float y = event.getY();
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
p1.x = (int) x;
p1.y = (int) y;
final int sourceColor = mBitmap.getPixel((int) x, (int) y);
final int targetColor = paint.getColor();
new TheTask(mBitmap, p1, sourceColor, targetColor).execute();
invalidate();
}
return true;
}
public void clear() {
path.reset();
invalidate();
}
public int getCurrentPaintColor() {
return paint.getColor();
}
public void changePaintColor(int color){
this.paint.setColor(color);
}
class TheTask extends AsyncTask<Void, Integer, Void> {
Bitmap bmp;
Point pt;
int replacementColor, targetColor;
public TheTask(Bitmap bm, Point p, int sc, int tc) {
this.bmp = bm;
this.pt = p;
this.replacementColor = tc;
this.targetColor = sc;
pd.setMessage("Filling....");
pd.show();
}
#Override
protected void onPreExecute() {
pd.show();
}
#Override
protected void onProgressUpdate(Integer... values) {
}
#Override
protected Void doInBackground(Void... params) {
FloodFill f = new FloodFill();
f.floodFill(bmp, pt, targetColor, replacementColor);
return null;
}
#Override
protected void onPostExecute(Void result) {
pd.dismiss();
invalidate();
}
}
}
// flood fill
public class FloodFill {
public void floodFill(Bitmap image, Point node, int targetColor, int replacementColor) {
int width = image.getWidth();
int height = image.getHeight();
int target = targetColor;
int replacement = replacementColor;
if (target != replacement) {
Queue<Point> queue = new LinkedList<Point>();
do {
int x = node.x;
int y = node.y;
while (x > 0 && image.getPixel(x - 1, y) == target) {
x--;
}
boolean spanUp = false;
boolean spanDown = false;
while (x < width && image.getPixel(x, y) == target) {
image.setPixel(x, y, replacement);
if (!spanUp && y > 0 && image.getPixel(x, y - 1) == target) {
queue.add(new Point(x, y - 1));
spanUp = true;
} else if (spanUp && y > 0 && image.getPixel(x, y - 1) != target) {
spanUp = false;
}
if (!spanDown && y < height - 1 && image.getPixel(x, y + 1) == target) {
queue.add(new Point(x, y + 1));
spanDown = true;
} else if (spanDown && y < (height - 1) && image.getPixel(x, y + 1) != target) {
spanDown = false;
}
x++;
}
} while ((node = queue.poll()) != null);
}
}
}
}
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="#+id/drawingLayout"
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:app="http://schemas.android.com/apk/res-auto"
>
<android.support.v7.widget.CardView
android:id="#+id/cardView"
android:layout_width="match_parent"
android:layout_height="542dp"
android:layout_gravity="center"
android:layout_marginTop="8dp"
android:layout_marginRight="8dp"
android:layout_marginLeft="8dp"
android:layout_marginBottom="8dp"
android:background="?android:attr/selectableItemBackground"
app:cardBackgroundColor="#FFFFFF"
app:cardCornerRadius="16dp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent">
<RelativeLayout
android:id="#+id/dashBoard"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_above="#+id/b_red"
android:layout_alignParentLeft="true"
android:layout_alignParentRight="true"
android:layout_alignParentTop="true"
android:layout_marginBottom="10dp" >
</RelativeLayout>
</android.support.v7.widget.CardView>
<Button
android:id="#+id/b_red"
android:layout_width="65dp"
android:layout_height="40dp"
android:layout_alignParentBottom="true"
android:layout_alignParentLeft="true"
android:background="#FF0000" />
<Button
android:id="#+id/b_green"
android:layout_width="65dp"
android:layout_height="40dp"
android:layout_alignParentBottom="true"
android:layout_toRightOf="#+id/b_red"
android:background="#00FF00" />
<Button
android:id="#+id/b_blue"
android:layout_width="65dp"
android:layout_height="40dp"
android:layout_alignParentBottom="true"
android:layout_toRightOf="#+id/b_green"
android:background="#0000FF" />
<Button
android:id="#+id/b_orange"
android:layout_width="65dp"
android:layout_height="40dp"
android:layout_alignParentBottom="true"
android:layout_toRightOf="#+id/b_blue"
android:background="#FF9900" />
<Button
android:id="#+id/button5"
android:layout_width="60dp"
android:layout_height="40dp"
android:layout_alignParentBottom="true"
android:layout_alignParentRight="true"
android:text="Clear" />
Can Someone please help me to fix this

Trying to add a touch event on my bitmap, but i got wrong values? Android Studio

Im new on Stack Overflow!, I joined here for some problem that I got right now.
Im working with Android Studio where im trying to make a type of game that it requires a grid usage to start it. At this point im trying to implement the touch event on some bitmaps, those bitmaps are the tiles of my game but im kinda of confused, in My view, where I place all of the tiles i can get all of the coordinates (including height and width) of each tile, I check those values but when I apply the touch event (On Action Down) im getting the coords of my last tile of my grid. Thats strange, because when im drawing it in my canvas Im still got the actual values (the real coords) but in touch are different, im trying to figure this but no result at all.
Can someone here tell me where i go wrong?
Sorry for my english.
PS: The grid can be dragged.
MyView.java
package com.example.sammuzero.aplicamesta;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Map;
/**
* Created by lezama on 24-07-2017.
*/
public class MyView extends SurfaceView {
Bitmap piso1;
Bitmap muro;
Bitmap queso;
SurfaceHolder aguanta;
GameLoopThread loopGame;
Terreno pintaTerreno;
Sprite mouse;
boolean scroll;
float destinyX = 0;
float destinyY = 0;
float originX = 0;
float originY = 0;
float transX = 0;
float transY = 0;
float escala = 1;
public MyView(Context context, AttributeSet attrs) {
super(context, attrs);
aguanta = getHolder();
loopGame = new GameLoopThread(this);
aguanta.addCallback(new SurfaceHolder.Callback(){
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
loopGame.setRunning(true);
loopGame.start();
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
boolean retry = true;
loopGame.setRunning(false);
while (retry) {
try {
loopGame.join();
retry = false;
} catch (InterruptedException e) {}
}
}
});
piso1 = BitmapFactory.decodeResource(getResources(), R.drawable.floor1);
muro = BitmapFactory.decodeResource(getResources(), R.drawable.pared);
queso = BitmapFactory.decodeResource(getResources(), R.drawable.kso);
pintaTerreno = new Terreno(0,0,new MapBit(piso1), new MapBit(muro));
}
public void onDraw(Canvas canvas) {
canvas.drawColor(Color.WHITE);
canvas.scale(escala,escala);
canvas.translate((destinyX - originX), (destinyY - originY));
transX = (destinyX - originX);
transY = (destinyY - originY);
for(int i = 0; i < pintaTerreno.getAncho(); i++) {
for (int j = 0; j < pintaTerreno.getAlto(); j ++){
dibujaCasilla(i,j, canvas);
Log.i("Probando X,Y", "X: " + pintaTerreno.getCasilla(i, j).getGrafico().getX()
+ " Y: " + pintaTerreno.getCasilla(i, j).getGrafico().getY());
}
}
canvas.scale(1.0f,1.0f);
//mouse.onDraw(canvas);
}
public void queTerrenoDibujo(Terreno e)
{
pintaTerreno = e;
}
public void setEscala(int escalaATransformar) {
float nuevaEscala;
nuevaEscala = (float) escalaATransformar / 100;
escala = nuevaEscala;
}
private void dibujaCasilla(int i, int j,Canvas canvas) {
// Primero revisa si hay una pared o no alli.
if (pintaTerreno.getCasilla(i,j).getRepresenta() == 'P') {
pintaTerreno.getCasilla(i,j).getGrafico().setX(64 * i);
pintaTerreno.getCasilla(i,j).getGrafico().setY(64 * j);
pintaTerreno.getCasilla(i,j).getGrafico().onDraw(canvas);
}
else {
pintaTerreno.getCasilla(i,j).getGrafico().setX(64 * i);
pintaTerreno.getCasilla(i,j).getGrafico().setY(64 * j);
pintaTerreno.getCasilla(i,j).getGrafico().onDraw(canvas);
if(pintaTerreno.getCasilla(i,j).getRepresenta() == 'Q')
canvas.drawBitmap(queso,(64 * i) + 16, (64 * j) + 16, null);
}
/*
MapBit nuevoBloque = null;
if (pintaTerreno.getCasilla(i,j).getRepresenta() == 'P') {
nuevoBloque = new MapBit((64 * i),(64 * j),muro);
nuevoBloque.onDraw(canvas);
}
else {
nuevoBloque = new MapBit((64 * i),(64 * j),piso1);
nuevoBloque.onDraw(canvas);
if(pintaTerreno.getCasilla(i,j).getRepresenta() == 'Q')
canvas.drawBitmap(queso,(64 * i) + 16, (64 * j) + 16, null);
}
*/
// Luego, asegurate de los elementos que estan en esa casilla.
}
#Override
public boolean onTouchEvent(MotionEvent event) {
// TODO Auto-generated method stub
//return super.onTouchEvent(event);
int action = event.getAction();
if (action==MotionEvent.ACTION_MOVE){
destinyX = event.getX();
destinyY = event.getY();
}
else if (action==MotionEvent.ACTION_DOWN) {
Log.i("Probando X,Y en 0,0", "X: " + pintaTerreno.getCasilla(0, 0).getGrafico().getX()
+ " Y: " + pintaTerreno.getCasilla(0, 0).getGrafico().getY());
for (int i = 0; i < pintaTerreno.getAncho(); i++) {
for (int j = 0; j < pintaTerreno.getAlto(); j++) {
Log.i("Probando X,Y", "X: " + pintaTerreno.getCasilla(i, j).getGrafico().getX()
+ " Y: " + pintaTerreno.getCasilla(i, j).getGrafico().getY());
String msg = "X: " + pintaTerreno.getCasilla(i, j).getGrafico().getX() + " Y: " + pintaTerreno.getCasilla(i, j).getGrafico().getY();
if (pintaTerreno.getCasilla(i, j).getGrafico().isCollition(event.getX(), event.getY())) {
Toast.makeText(getContext(), msg, Toast.LENGTH_SHORT).show();
}
}
}
originX = event.getX() - transX;
originY = event.getY() - transY;
scroll = true;
}
else if (action==MotionEvent.ACTION_UP){
transX = (destinyX - originX);
transY = (destinyY - originY);
scroll = false;
}
return true;
}
}
Terreno.java (The grid)
package com.example.sammuzero.aplicamesta;
import java.util.ArrayList;
import java.util.Random;
/**
*
* #author estudiante
*/
public class Terreno {
private ArrayList<Object> todo = new ArrayList<>();
private Casilla[][] plataforma;
private int alto;
private int ancho;
public Terreno (int alt, int anch, MapBit nada, MapBit pared)
{
int i;
int j;
this.plataforma = new Casilla[alt][anch];
this.alto = alt;
this.ancho = anch;
for(i = 0; i < this.alto; i++)
for(j = 0; j < this.ancho; j++)
this.plataforma[i][j] = new Casilla(i,j, nada);
this.inicializa(pared);
}
// Getters //
public int getAlto(){return this.alto;}
public int getAncho(){return this.ancho;}
public Casilla getCasilla(int i, int j){return this.plataforma[i][j];}
public ArrayList<Object> getTodo(){return this.todo;}
// Setters //
public void setEnCasilla(int x, int y, Object o, char r){this.plataforma[x][y].setObjeto(o,r);}
// Metodos //
public void inicializa(MapBit pared)
{
int cuantos = ((this.alto*this.ancho) * 10) / 100;
Random rand = new Random();
int llevo = 0;
int sel = 0;
for(int i = 0; i < this.alto ; i++)
for(int j = 0 ; j < this.ancho; j++)
{
sel = 1+rand.nextInt(10);
if(sel == 1 && llevo < cuantos)
{
Pared nueva = new Pared(this.plataforma[i][j]);
this.plataforma[i][j].setGrafico(pared);
todo.add(nueva);
llevo ++;
}
else if (sel == 10) {
Queso nuevo = new Queso(this.plataforma[i][j]);
todo.add(nuevo);
}
}
}
}
Casilla.java (the tile)
package com.example.sammuzero.aplicamesta;
import android.support.design.widget.Snackbar;
/**
* Created by lezama on 28-07-2017.
*/
public class Casilla {
private int posX;
private int posY;
private Object contenido;
private char representa;
private MapBit grafico;
public Casilla(int i, int j, MapBit graf)
{
this.posX = i;
this.posY = j;
this.representa = '0';
contenido = null;
this.grafico = graf;
}
public int getPosX() {return this.posX;}
public int getPosY() {return this.posY;}
public char getRepresenta() {return this.representa;}
public MapBit getGrafico() {return this.grafico;}
public void setObjeto(Object item , char rep)
{
this.contenido = item;
this.representa = rep;
}
public void setGrafico (MapBit grafico1) {this.grafico = grafico1;}
/*
public void onTouch()
{
Toast.makeText(this.getContext(), "X:"+this.posX+" Y:"+this.posY, Toast.LENGTH_SHORT).show();
} */
}
MapBit.java (where it resides my Bitmap)
package com.example.sammuzero.aplicamesta;
import android.graphics.Bitmap;
import android.graphics.Canvas;
/**
* TI, MapBit!
* Created by Samuzero15 on 28-07-2017.
*/
public class MapBit {
private float x;
private float y;
private int height;
private int width;
private Bitmap bmp;
public MapBit(Bitmap pmb)
{
this.bmp = pmb;
this.height = this.bmp.getHeight();
this.width = this.bmp.getWidth();
}
// Getmethis
public float getX(){return this.x;}
public float getY(){return this.y;}
public int getHeight(){return this.height;}
public int getWidth(){return this.width;}
public Bitmap getBitmap(){return this.bmp;}
public void setX(float newX){this.x = newX;}
public void setY(float newY){this.y = newY;}
public void onDraw(Canvas canvas)
{
canvas.drawBitmap(this.bmp,x,y,null);
}
public boolean isCollition(float x2, float y2) {
return x2 > this.x && x2 < this.x + this.width && y2 > this.y && y2 < this.y + this.height;
}
}
activity_main.xml (The main view for landscape)
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_gravity="center"
tools:context="com.example.sammuzero.aplicamesta.MainActivity">
<RelativeLayout
android:layout_width="304dp"
android:layout_height="439dp"
tools:layout_editor_absoluteX="8dp"
tools:layout_editor_absoluteY="8dp">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
android:gravity="center"
android:orientation="vertical"
android:weightSum="1"
tools:layout_editor_absoluteX="8dp"
tools:layout_editor_absoluteY="8dp">
<view
android:id="#+id/game"
class="com.example.sammuzero.aplicamesta.MyView"
id="#+id/view4"
layout_width="match_parent"
android:layout_width="match_parent"
android:layout_height="356dp"
android:gravity="center" />
<SeekBar
android:id="#+id/zoom"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical|center"
android:max="150"
android:min="25"
android:progress="100" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
tools:layout_editor_absoluteX="8dp"
tools:layout_editor_absoluteY="8dp">
<Button
android:id="#+id/exit"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_weight="1"
android:text="Salir" />
<Button
android:id="#+id/gatos"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_weight="1"
android:text="Gatos" />
<Button
android:id="#+id/reinicio"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_weight="1"
android:text="Repetir" />
<ImageButton
android:id="#+id/nextTurn"
android:layout_width="wrap_content"
android:layout_height="match_parent"
app:srcCompat="#android:drawable/ic_media_play" />
</LinearLayout>
</LinearLayout>
</RelativeLayout>
The main activity in java:
public class MainActivity extends AppCompatActivity implements View.OnClickListener {
MyView elJuego;
Button salida;
Button reset;
Button nextTurn;
SeekBar escal;
Bitmap piso1;
Bitmap muro;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
salida = (Button) findViewById(R.id.exit);
reset = (Button) findViewById(R.id.reinicio);
elJuego = (MyView) findViewById(R.id.game);
escal = (SeekBar) findViewById(R.id.zoom);
salida.setOnClickListener(this);
reset.setOnClickListener(this);
escal.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
int progressChanged = 0;
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser){
progressChanged = progress;
elJuego.setEscala(progressChanged);
}
public void onStartTrackingTouch(SeekBar seekBar) {}
public void onStopTrackingTouch(SeekBar seekBar) {}
});
piso1 = BitmapFactory.decodeResource(getResources(), R.drawable.floor1);
muro = BitmapFactory.decodeResource(getResources(), R.drawable.pared);
}
#Override
public void onClick(View v)
{
switch(v.getId())
{
case R.id.exit:
finish();
System.exit(0);
break;
case R.id.reinicio:
elJuego.invalidate();
Terreno nuevo = new Terreno(2,2,new MapBit(piso1) , new MapBit(muro));
elJuego.queTerrenoDibujo(nuevo);
default: break;
}
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
}
}

Android failed to connect to camera service

I am trying to build a simple app that launches the camera app and allows me to take a picture and save it to the device but I keep getting this error.
java.lang.RuntimeException: Fail to connect to camera service
MainActivity
public class MainActivity extends AppCompatActivity
{
private static final String TAG = MainActivity.class.getName();
Preview preview;
private Camera camera;
private int cameraId;
#Override
protected void onCreate(Bundle savedInstanceState)
{
final Camera.PictureCallback jpegCallback;
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
preview = new Preview(this, (SurfaceView)findViewById(R.id.surfaceView));
preview.setLayoutParams(new ActionBar.LayoutParams(ActionBar.LayoutParams.MATCH_PARENT, ActionBar.LayoutParams.MATCH_PARENT));
((FrameLayout) findViewById(R.id.activity_main)).addView(preview);
preview.setKeepScreenOn(true);
jpegCallback = new Camera.PictureCallback()
{
public void onPictureTaken(final byte[] data,
final Camera camera)
{
new SaveImageTask().execute(data);
resetCam();
Log.d(TAG, "onPictureTaken - jpeg");
}
};
preview.setOnClickListener(new View.OnClickListener()
{
#Override
public void onClick(View view)
{
if(camera != null)
{
camera.takePicture(null,
// ShutterCallback shutter
null,
// PictureCallback raw,
null,
// PictureCallback postview,
jpegCallback); // PictureCallback jpeg);
}
}
});
}
#Override
protected void onResume()
{
super.onResume();
getCamera(CameraInfo.CAMERA_FACING_BACK);
if(camera != null)
{
camera.startPreview();
preview.setCamera(camera);
}
}
#Override
protected void onPause()
{
if(camera != null)
{
camera.stopPreview();
preview.setCamera(null);
camera.release();
camera = null;
}
super.onPause();
}
private void getCamera(final int desiredCamera)
{
if(!getPackageManager()
.hasSystemFeature(PackageManager.FEATURE_CAMERA))
{
Toast.makeText(this, "No camera on this device", Toast.LENGTH_LONG)
.show();
}
else
{
cameraId = findCamera(desiredCamera);
if(cameraId < 0)
{
Toast.makeText(this, "Camera no found.",
Toast.LENGTH_LONG).show();
}
else
{
Log.d(TAG, Integer.toString(cameraId));
camera = Camera.open(cameraId);
camera.setDisplayOrientation(90);
}
}
}
private int findCamera(final int desiredCamera)
{
final int numberOfCameras;
int cameraId;
numberOfCameras = Camera.getNumberOfCameras();
cameraId = -1;
for(int i = 0; i < numberOfCameras; i++)
{
final CameraInfo info;
info = new CameraInfo();
Camera.getCameraInfo(i, info);
if(info.facing == desiredCamera)
{
Log.d(TAG, "Camera found");
cameraId = i;
break;
}
}
return (cameraId);
}
private void resetCam()
{
camera.startPreview();
preview.setCamera(camera);
}
private void refreshGallery(final File file)
{
final Intent mediaScanIntent;
mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaScanIntent.setData(Uri.fromFile(file));
sendBroadcast(mediaScanIntent);
}
private class SaveImageTask
extends AsyncTask<byte[], Void, Void>
{
#Override
protected Void doInBackground(final byte[]... data)
{
FileOutputStream outStream;
final File sdCard;
final File dir;
final String fileName;
final File outFile;
sdCard = Environment.getExternalStorageDirectory();
dir = new File (sdCard.getAbsolutePath() + "/camtest");
dir.mkdirs();
fileName = String.format("%d.jpg", System.currentTimeMillis());
outFile = new File(dir, fileName);
outStream = null;
try
{
outStream = new FileOutputStream(outFile);
outStream.write(data[0]);
outStream.flush();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length + " to " + outFile.getAbsolutePath());
refreshGallery(outFile);
}
catch(final FileNotFoundException ex)
{
Log.e(TAG, "File not found", ex);
}
catch(final IOException ex)
{
Log.e(TAG, "IOException", ex);
}
finally
{
try
{
if(outStream != null)
{
outStream.close();
}
}
catch(final IOException ex)
{
Log.e(TAG, "IOException", ex);
}
}
return null;
}
}
}
Preview
class Preview
extends ViewGroup
implements Callback
{
private final String TAG = Preview.class.getName();
private SurfaceView surfaceView;
private SurfaceHolder holder;
private Camera.Size previewSize;
private List<Camera.Size> supportedPreviewSizes;
private Camera camera;
Preview(final Context context,
final SurfaceView sv)
{
super(context);
surfaceView = sv;
holder = surfaceView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCamera(Camera camera)
{
this.camera = camera;
if(this.camera != null)
{
final Camera.Parameters params;
final List<String> focusModes;
supportedPreviewSizes = this.camera.getParameters().getSupportedPreviewSizes();
requestLayout();
params = this.camera.getParameters();
focusModes = params.getSupportedFocusModes();
if(focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
this.camera.setParameters(params);
}
}
}
#Override
protected void onMeasure(final int widthMeasureSpec,
final int heightMeasureSpec)
{
// We purposely disregard child measurements because act as a
// wrapper to a SurfaceView that centers the camera preview instead
// of stretching it.
final int width;
final int height;
width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if(supportedPreviewSizes != null)
{
previewSize = getOptimalPreviewSize(supportedPreviewSizes, width, height);
}
}
#Override
protected void onLayout(boolean changed,
int l,
int t,
int r,
int b)
{
if (changed && getChildCount() > 0)
{
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (previewSize != null)
{
previewWidth = previewSize.width;
previewHeight = previewSize.height;
}
// Center the child SurfaceView within the parent.
if (width * previewHeight > height * previewWidth)
{
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
}
else
{
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2,
width, (height + scaledChildHeight) / 2);
}
}
}
public void surfaceCreated(SurfaceHolder holder)
{
// The Surface has been created, acquire the camera and tell it where
// to draw.
try
{
if(camera != null)
{
camera.setPreviewDisplay(holder);
}
}
catch (IOException exception)
{
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder)
{
// Surface will be destroyed when we return, so stop the preview.
if(camera != null)
{
camera.stopPreview();
}
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes,
int w,
int h)
{
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
for (Camera.Size size : sizes)
{
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
if (optimalSize == null)
{
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes)
{
if (Math.abs(size.height - targetHeight) < minDiff)
{
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder,
int format,
int width,
int height)
{
if(camera != null)
{
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(previewSize.width, previewSize.height);
requestLayout();
camera.setParameters(parameters);
camera.startPreview();
}
}

Categories