I am making a custom camera with face detection, which works successfully.
But I want to add stickers like to the recorded/previewed face.
The location of the eyes is used to properly size and place a hat, glasses, and tie etc. on the Preview.
The Face Detection with the FaceOverlayView
public class FaceOverlayView extends View {
private Paint mPaint;
private Paint mTextPaint;
private int mDisplayOrientation;
private int mOrientation;
private Face[] mFaces;
public FaceOverlayView(Context context) {
super(context);
initialize();
}
private void initialize() {
// We want a green box around the face:
mPaint = new Paint();
mPaint.setAntiAlias(true);
mPaint.setDither(true);
mPaint.setColor(Color.GREEN);
mPaint.setAlpha(128);
mPaint.setStyle(Paint.Style.FILL_AND_STROKE);
mTextPaint = new Paint();
mTextPaint.setAntiAlias(true);
mTextPaint.setDither(true);
mTextPaint.setTextSize(20);
mTextPaint.setColor(Color.GREEN);
mTextPaint.setStyle(Paint.Style.FILL);
}
public void setFaces(Face[] faces) {
mFaces = faces;
invalidate();
}
public void setOrientation(int orientation) {
mOrientation = orientation;
}
public void setDisplayOrientation(int displayOrientation) {
mDisplayOrientation = displayOrientation;
invalidate();
}
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mFaces != null && mFaces.length > 0) {
Matrix matrix = new Matrix();
Util.prepareMatrix(matrix, false, mDisplayOrientation, getWidth(), getHeight());
canvas.save();
matrix.postRotate(mOrientation);
canvas.rotate(-mOrientation);
RectF rectF = new RectF();
for (Face face : mFaces) {
rectF.set(face.rect);
matrix.mapRect(rectF);
canvas.drawRect(rectF, mPaint);
canvas.drawText("Score " + face.score, rectF.right, rectF.top, mTextPaint);
}
canvas.restore();
}
}
}
I want to add the hat and sunglasses on the preview like a similar in the Play Store, Face 28:
More Info.
I am using for MoodMeSDK to detect the eyes and mouth
The result is 66 Points:
I want put sunglasses, caps, lips, etc. on the face. In the Face28 APK using the SVG file for the face stickers.
I have done something almost similar to this before. So here's how you need to do this: first you need to locate the points relative to the View's Rectangle, for example if you want to place a hat, first pin-point head and relative rectangle of hat based on the CameraView, then place the hat on that coordinate. This is the easy part. The hard part is saving the image. for this you need to save width and length of CameraView and stickers on it and their locations on CameraView. Then you capture the image and get a Bitmap/Drawable. You will most likely get different sizes for produced bitmap than CameraView so you need to re-calculate the coordinate of stickers on this bitmap based on it's w/h to the CameraView and merge stickers on new coordinates and then save it. It is not easy but it is possible as I did it.
Here's my code(in my case sticker was being placed in center of picture):
/**
* Created by Mohammad Erfan Molaei on 9/26/16.
*/
public class CaptureActivity extends AppCompatActivity implements
ActivityCompat.OnRequestPermissionsResultCallback {
private static final String TAG = "CaptureActivity";
private FloatingActionButton takePicture;
private int mCurrentFlash;
private CameraView mCameraView;
private int cameraWidth;
private int cameraHeight;
private int drawableWidth;
private int drawableHeight;
private Handler mBackgroundHandler;
private boolean selectedBrand;
#Override
public void setTheme(int resId) {
selectedBrand = getSharedPreferences(getString(R.string.brand_pref), MODE_PRIVATE)
.getBoolean(getString(R.string.selected_brand), true);
super.setTheme(selectedBrand ? R.style.AppTheme_CaptureTheme : R.style.AppTheme_CaptureTheme2);
}
private String itemID = null;
private View.OnClickListener mOnClickListener = new View.OnClickListener() {
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.take_picture:
if (mCameraView != null) {
mCameraView.takePicture();
takePicture.setEnabled(false);
}
break;
case R.id.scale_up_btn:
scaleUpImage();
break;
case R.id.scale_down_btn:
scaleDownImage();
break;
}
}
};
private void scaleUpImage() {
if (mCameraView != null) {
SizeAwareImageView imageView = (SizeAwareImageView) mCameraView.findViewById(R.id.mImageView);
/*Log.e(TAG, "scaleDownImage: oldWidth: " + imageView.getLayoutParams().width +
", oldHeight: " + imageView.getLayoutParams().height);
Log.e(TAG, "scaleDownImage: newWidth2B: " + (imageView.getLayoutParams().width * 1.1f) +
", newHeight2B: " + ((1.1f * imageView.getLayoutParams().width) *
imageView.getLayoutParams().height /
imageView.getLayoutParams().width));
Log.e(TAG, "cameraWidth: " + mCameraView.getLayoutParams().width );
sdasd*/
if (imageView.getWidth() * 1.1f > mCameraView.getWidth() ||
((1.1f * imageView.getWidth()) *
imageView.getHeight() /
imageView.getWidth()) > mCameraView.getHeight())
return;
imageView.getLayoutParams().height = (int) ((1.1f * imageView.getWidth()) *
imageView.getHeight() /
imageView.getWidth());
imageView.getLayoutParams().width = (int) (imageView.getWidth() * 1.1f);
imageView.setScaleType(ImageView.ScaleType.FIT_CENTER);
imageView.requestLayout();
/*drawableWidth = dp2px(imageView.getWidth());
drawableHeight = dp2px(imageView.getHeight());*/
}
}
private void scaleDownImage() {
if (mCameraView != null) {
SizeAwareImageView imageView = (SizeAwareImageView) mCameraView.findViewById(R.id.mImageView);
if (imageView.getWidth() * 0.9f > mCameraView.getWidth() ||
((0.9f * imageView.getWidth()) *
imageView.getHeight() /
imageView.getWidth()) > mCameraView.getHeight())
return;
imageView.getLayoutParams().height = (int) ((0.9f * imageView.getWidth()) *
imageView.getHeight() /
imageView.getWidth());
imageView.getLayoutParams().width = (int) (imageView.getWidth() * 0.9f);
imageView.setScaleType(ImageView.ScaleType.FIT_CENTER);
imageView.requestLayout();
/*drawableWidth = dp2px(imageView.getWidth());
drawableHeight = dp2px(imageView.getHeight());*/
}
}
private void rotateImage() {
if (mCameraView != null) {
SizeAwareImageView imageView = (SizeAwareImageView) mCameraView.findViewById(R.id.mImageView);
/*Drawable mDrawable = imageView.getDrawable();
int mDrawableWidth = mDrawable.getBounds().width();
int mDrawableHeight = mDrawable.getBounds().height();*/
int newWidth = imageView.getHeight();
int newHeight = imageView.getWidth();
float scaleFactor = 1;
/*Log.e(TAG, "rotateImage: prevWidth: " + newHeight + ", prevHeight: " + newWidth);
Log.e(TAG, "rotateImage: cameraWidth: " + mCameraView.getWidth() );*/
if (newWidth > mCameraView.getWidth() ) {
scaleFactor = (float)newWidth / (float)mCameraView.getWidth();
newWidth = mCameraView.getWidth();
newHeight *= scaleFactor;
} else if (newHeight > mCameraView.getHeight() ) {
scaleFactor = (float)newHeight / (float)mCameraView.getHeight();
newHeight = mCameraView.getHeight();
newWidth *= scaleFactor;
}
Log.e(TAG, "rotateImage: scaleFactor: " + scaleFactor);
imageView.setRotation(imageView.getRotation() + 90);
imageView.getLayoutParams().height = newHeight;
imageView.getLayoutParams().width = newWidth;
imageView.setScaleType(ImageView.ScaleType.FIT_CENTER);
imageView.requestLayout();
/*drawableWidth = dp2px(imageView.getWidth());
drawableHeight = dp2px(imageView.getHeight());*/
//imageView.setImageDrawable(getRotatedDrawable(imageView));
/*Bitmap bitmapOrg = drawableToBitmap(imageView.getDrawable());
// createa matrix for the manipulation
Matrix matrix = imageView.getImageMatrix();
int width = bitmapOrg.getWidth();
int height = bitmapOrg.getHeight();
// rotate the Bitmap
matrix.postRotate(90);
// recreate the new Bitmap
Bitmap resizedBitmap = Bitmap.createBitmap(bitmapOrg, 0, 0,
width, height, matrix, true);
// make a Drawable from Bitmap to allow to set the BitMap
// to the ImageView, ImageButton or what ever
BitmapDrawable bmd = new BitmapDrawable(getResources(), resizedBitmap);
// set the Drawable on the ImageView
imageView.setImageDrawable(bmd);*/
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_capture);
mCameraView = (CameraView) findViewById(R.id.camera);
if (mCameraView != null) {
mCameraView.addCallback(mCallback);
}
takePicture = (FloatingActionButton) findViewById(R.id.take_picture);
if (takePicture != null) {
takePicture.setOnClickListener(mOnClickListener);
}
/*if (selectedBrand) {
assert takePicture != null;
takePicture.setBackgroundColor(ContextCompat.getColor(getBaseContext(),R.color.colorAccent));
findViewById(R.id.control).setBackgroundColor(ContextCompat.getColor(getBaseContext(),R.color.colorPrimary));
} else {
assert takePicture != null;
takePicture.setBackgroundColor(ContextCompat.getColor(getBaseContext(),R.color.colorAccent2));
findViewById(R.id.control).setBackgroundColor(ContextCompat.getColor(getBaseContext(),R.color.colorPrimary2));
}*/
findViewById(R.id.scale_up_btn).setOnClickListener(mOnClickListener);
findViewById(R.id.scale_down_btn).setOnClickListener(mOnClickListener);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.setDisplayShowTitleEnabled(false);
}
if (savedInstanceState == null) {
Bundle extras = getIntent().getExtras();
if(extras != null) {
itemID = extras.getString("id", null);
}
} else {
itemID = (String) savedInstanceState.getSerializable("id");
}
if( itemID != null ) {
new AsyncImageLoader().execute(itemID);
} else {
this.finish();
return;
}
ViewTreeObserver viewTreeObserver = mCameraView.getViewTreeObserver();
if (viewTreeObserver.isAlive()) {
viewTreeObserver.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
#Override
public void onGlobalLayout() {
/*if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
mCameraView.getViewTreeObserver().removeGlobalOnLayoutListener(this);
} else {
mCameraView.getViewTreeObserver().removeOnGlobalLayoutListener(this);
}*/
cameraWidth = dp2px(mCameraView.getWidth());
cameraHeight = dp2px(mCameraView.getHeight());
Log.e("camB4Action", "" + cameraWidth + ", " + cameraHeight);
}
});
}
}
#Override
protected void onResume() {
super.onResume();
mCameraView.start();
}
#Override
protected void onPause() {
mCameraView.stop();
super.onPause();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mBackgroundHandler != null) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
mBackgroundHandler.getLooper().quitSafely();
} else {
mBackgroundHandler.getLooper().quit();
}
mBackgroundHandler = null;
}
}
private Drawable getFlippedDrawable(final Drawable d) {
final Drawable[] arD = { d };
return new LayerDrawable(arD) {
#Override
public void draw(final Canvas canvas) {
canvas.save();
canvas.scale(-1, 1, d.getBounds().width() / 2, d.getBounds().height() / 2);
super.draw(canvas);
canvas.restore();
}
};
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.camera, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.switch_flash:
if (mCameraView != null) {
mCurrentFlash = (mCurrentFlash + 1) % FLASH_OPTIONS.length;
item.setTitle(FLASH_TITLES[mCurrentFlash]);
item.setIcon(FLASH_ICONS[mCurrentFlash]);
mCameraView.setFlash(FLASH_OPTIONS[mCurrentFlash]);
}
break;
case R.id.switch_camera:
if (mCameraView != null) {
int facing = mCameraView.getFacing();
mCameraView.setFacing(facing == CameraView.FACING_FRONT ?
CameraView.FACING_BACK : CameraView.FACING_FRONT);
}
break;
case R.id.mirror_image:
if (mCameraView != null) {
SizeAwareImageView imageView = (SizeAwareImageView) mCameraView.findViewById(R.id.mImageView);
imageView.setImageDrawable(getFlippedDrawable(imageView.getDrawable()));
imageView.requestLayout();
}
break;
case R.id.rotate_image:
if (mCameraView != null) {
rotateImage();
}
break;
}
return false;
}
private Handler getBackgroundHandler() {
if (mBackgroundHandler == null) {
HandlerThread thread = new HandlerThread("background");
thread.setPriority(Thread.MAX_PRIORITY);
thread.start();
mBackgroundHandler = new Handler(thread.getLooper());
}
return mBackgroundHandler;
}
public static Bitmap scaleBitmap(Bitmap bitmap, int wantedWidth, int wantedHeight, float rotation) {
Log.e(TAG, "scaleBitmap: bitmapWidth: " + bitmap.getWidth() + ", bitmapHeight: " + bitmap.getHeight() );
Log.e(TAG, "scaleBitmap: wantedWidth: " +
((rotation % 180 == 90) ? wantedHeight : wantedWidth) +
", wantedHeight: " + ((rotation % 180 == 90) ? wantedWidth : wantedHeight) );
Bitmap output = Bitmap.createBitmap(
(rotation % 180 == 90) ? wantedHeight : wantedWidth,
(rotation % 180 == 90) ? wantedWidth : wantedHeight, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(output);
Matrix m = new Matrix();
m.setScale((float)
((rotation % 180 == 90) ? wantedHeight : wantedWidth) / bitmap.getWidth(),
(float) ((rotation % 180 == 90) ? wantedWidth : wantedHeight) / bitmap.getHeight());
canvas.drawBitmap(bitmap, m, new Paint());
return output;
/*Matrix m = new Matrix();
m.setRectToRect(new RectF(0, 0, b.getWidth(), b.getHeight()),
new RectF(0, 0, (rotation % 180 == 90) ? wantedHeight : wantedWidth,
(rotation % 180 == 90) ? wantedWidth : wantedHeight), Matrix.ScaleToFit.CENTER);
return Bitmap.createBitmap(b, 0, 0, b.getWidth(), b.getHeight(), m, true);*/
}
private CameraView.Callback mCallback
= new CameraView.Callback() {
ProgressiveToast progressiveToast;
#Override
public void onCameraOpened(CameraView cameraView) {
Log.d(TAG, "onCameraOpened");
}
#Override
public void onCameraClosed(CameraView cameraView) {
Log.d(TAG, "onCameraClosed");
}
#Override
public void onPictureTaken(CameraView cameraView, final byte[] data) {
Log.d(TAG, "onPictureTaken " + data.length);
/*TastyToast.makeText(cameraView.getContext(), getString(R.string.pic_being_saved),
TastyToast.LENGTH_LONG, TastyToast.INFO);*/
progressiveToast = ProgressiveToast.getInstance();
progressiveToast.show(CaptureActivity.this, getString(R.string.in_action_be_patient), -1);
getBackgroundHandler().post(new Runnable() {
#Override
public void run() {
mCameraView.stop();
// This demo app saves the taken picture to a constant file.
// $ adb pull /sdcard/Android/data/com.google.android.cameraview.demo/files/Pictures/picture.jpg
SizeAwareImageView imageView = ((SizeAwareImageView) mCameraView.findViewById(R.id.mImageView));
Bitmap imageBitmap =
drawableToBitmap(imageView.getDrawable());
Matrix matrix = new Matrix();
float rotation = mCameraView.findViewById(R.id.mImageView).getRotation();
matrix.postRotate(rotation);
//matrix.postScale(drawableWidth, drawableHeight);
/*
matrix.setScale((float)
((rotation% 180 == 90) ? drawableWidth : drawableHeight) / imageBitmap.getWidth(),
(float) ((rotation% 180 == 90) ? drawableWidth : drawableHeight) / imageBitmap.getHeight());*/
Log.e(TAG, "rotation: " + rotation);
imageBitmap = Bitmap.createBitmap(imageBitmap , 0, 0,
imageBitmap.getWidth(), imageBitmap.getHeight(), matrix, true);
imageBitmap = scaleBitmap(imageBitmap, drawableWidth, drawableHeight, rotation);
Bitmap cameraBmp = BitmapFactory.decodeByteArray(data, 0, data.length);
cameraBmp = fixOrientation(cameraBmp);
File dir = new File (Environment.getExternalStorageDirectory().getAbsolutePath()
+ File.separator + getString(R.string.gallery_folder_name) +
(selectedBrand ? getString(R.string.ibr_eng) :
getString(R.string.tiyaco_eng)));
dir.mkdirs();
File file = new File(dir.getAbsolutePath() ,
Long.toString(Calendar.getInstance().getTimeInMillis()) + ".jpg");
try {
file.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
OutputStream os = null;
InputStream is = overlayBitmapToCenter(cameraBmp, imageBitmap, rotation);
byte[] buffer = new byte[10 * 1024];
int n = 0;
try {
os = new FileOutputStream(file);
while (-1 != (n = is.read(buffer))) {
os.write(buffer, 0, n);
}
} catch (IOException e) {
Log.w(TAG, "Cannot write to " + file, e);
} finally {
if (os != null) {
try {
os.close();
} catch (IOException e) {
// Ignore
}
runOnUiThread(new Runnable() {
#Override
public void run() {
if (mCameraView != null)
try {
mCameraView.start();
} catch (Exception ignored){}
if (takePicture != null) {
takePicture.setEnabled(true);
}
progressiveToast.dismiss();
TastyToast.makeText(getApplicationContext(), getString(R.string.picture_taken),
TastyToast.LENGTH_LONG, TastyToast.SUCCESS);
}
});
}
}
}
});
}
};
public Bitmap fixOrientation(Bitmap mBitmap) {
if (mBitmap.getWidth() > mBitmap.getHeight()) {
Matrix matrix = new Matrix();
matrix.postRotate(90);
return Bitmap.createBitmap(mBitmap , 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, true);
}
return mBitmap;
}
private int dp2px(int dp) {
return (int)((dp * getResources().getDisplayMetrics().density) + 0.5);
}
public static Bitmap drawableToBitmap (Drawable drawable) {
Bitmap bitmap = null;
if (drawable instanceof BitmapDrawable) {
BitmapDrawable bitmapDrawable = (BitmapDrawable) drawable;
if(bitmapDrawable.getBitmap() != null) {
return bitmapDrawable.getBitmap();
}
}
if(drawable.getIntrinsicWidth() <= 0 || drawable.getIntrinsicHeight() <= 0) {
bitmap = Bitmap.createBitmap(1, 1, Bitmap.Config.ARGB_8888); // Single color bitmap will be created of 1x1 pixel
} else {
bitmap = Bitmap.createBitmap(drawable.getIntrinsicWidth(), drawable.getIntrinsicHeight(), Bitmap.Config.ARGB_8888);
}
Canvas canvas = new Canvas(bitmap);
drawable.setBounds(0, 0, canvas.getWidth(), canvas.getHeight());
drawable.draw(canvas);
return bitmap;
}
public ByteArrayInputStream overlayBitmapToCenter(Bitmap bitmap1, Bitmap bitmap2, float rotation) {
float alpha = (float)cameraWidth / (float)((rotation % 180 == 90) ? drawableHeight : drawableWidth);
float beta = (float)((rotation % 180 == 90) ? drawableHeight : drawableWidth) /
(float)((rotation % 180 == 90) ? drawableWidth : drawableHeight);
int bitmap1Width = bitmap1.getWidth();
int bitmap1Height = bitmap1.getHeight();
Bitmap scaledImg = Bitmap.createScaledBitmap(bitmap2, (int)((float)bitmap1Width / alpha),
(int)(((float)bitmap1Width / alpha) / beta), false);
int bitmap2Width = scaledImg.getWidth();
int bitmap2Height = scaledImg.getHeight();
/*Log.e("cam", "" + bitmap1Width + ", " + bitmap1Height );
Log.e("img", "" + bitmap2Width + ", " + bitmap2Height );
Log.e("alpha", "" + alpha );
Log.e("beta", "" + beta );*/
float marginLeft = (float) (bitmap1Width * 0.5 - bitmap2Width * 0.5);
float marginTop = (float) (bitmap1Height * 0.5 - bitmap2Height * 0.5);
Bitmap overlayBitmap = Bitmap.createBitmap(bitmap1Width, bitmap1Height, bitmap1.getConfig());
Canvas canvas = new Canvas(overlayBitmap);
canvas.drawBitmap(bitmap1, new Matrix(), null);
canvas.drawBitmap(scaledImg, marginLeft, marginTop, null);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
overlayBitmap.compress(Bitmap.CompressFormat.PNG, 100, stream);
return new ByteArrayInputStream(stream.toByteArray());
}
private class AsyncImageLoader extends AsyncTask<String, Void, BitmapDrawable>{
private Realm realm;
private ProductModel product;
#Override
protected BitmapDrawable doInBackground(String... itemIds) {
realm = Realm.getDefaultInstance();
product = realm.where(ProductModel.class)
.equalTo("isIbr", selectedBrand)
.equalTo("id", itemIds[0])
.findFirst();
byte[] image = product.getImage();
product = null;
realm.close();
BitmapDrawable mDrawable = new BitmapDrawable(getResources(), BitmapFactory
.decodeByteArray(image, 0, image.length));
int mDrawableHeight = mDrawable.getIntrinsicHeight();
int mDrawableWidth = mDrawable.getIntrinsicWidth();
int valueInPixels = (int) getResources().getDimension(R.dimen.video_view_dimen);
mDrawable.setBounds(0, 0, valueInPixels, valueInPixels * mDrawableHeight / mDrawableWidth);
return mDrawable;
}
#Override
protected void onPostExecute(BitmapDrawable drawable) {
super.onPostExecute(drawable);
LayoutInflater vi = (LayoutInflater) getApplicationContext()
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
final View v = vi.inflate(R.layout.imageview_product, null);
((SizeAwareImageView)v).setImageDrawable(drawable);
ViewTreeObserver viewTreeObserver = v.getViewTreeObserver();
if (viewTreeObserver.isAlive()) {
viewTreeObserver.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
#Override
public void onGlobalLayout() {
Log.e(TAG, "onGlobalLayout: updating sizes for drawable");
float[] sizez = ((SizeAwareImageView) v).getImageWidthAndHeight();
/*if (v.getRotation() == 90 || v.getRotation() == 270) {
drawableWidth = dp2px(sizez[1]);
drawableHeight = dp2px(sizez[0]);
} else {
drawableWidth = dp2px(sizez[0]);
drawableHeight = dp2px(sizez[1]);
}*/
drawableWidth = dp2px((int) sizez[0]);
drawableHeight = dp2px((int) sizez[1]);
/*Log.e("picB4Action", "" + drawableWidth + ", " + drawableHeight);*/
}
});
}
int px = (int) (getResources().getDimension(R.dimen.video_view_dimen)/* /
getResources().getDisplayMetrics().density*/);
mCameraView.addView(v, new FrameLayout.LayoutParams(px, px, Gravity.CENTER));
}
}
}
SizeAwareImageView.java :
public class SizeAwareImageView extends ImageView {
public SizeAwareImageView(Context context) {
super(context);
}
public SizeAwareImageView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public SizeAwareImageView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
#TargetApi(Build.VERSION_CODES.LOLLIPOP)
public SizeAwareImageView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
// Get image matrix values and place them in an array
float[] f = new float[9];
getImageMatrix().getValues(f);
// Extract the scale values using the constants (if aspect ratio maintained, scaleX == scaleY)
final float scaleX = f[Matrix.MSCALE_X];
final float scaleY = f[Matrix.MSCALE_Y];
// Get the drawable (could also get the bitmap behind the drawable and getWidth/getHeight)
final Drawable d = getDrawable();
final int origW = d.getIntrinsicWidth();
final int origH = d.getIntrinsicHeight();
// Calculate the actual dimensions
final int actW = Math.round(origW * scaleX);
final int actH = Math.round(origH * scaleY);
Log.e("DBG", "["+origW+","+origH+"] -> ["+actW+","+actH+"] & scales: x="+scaleX+" y="+scaleY);
}
public float[] getMatrixValues() {
float[] f = new float[9];
getImageMatrix().getValues(f);
return f;
}
public float[] getImageWidthAndHeight() {
// Get image matrix values and place them in an array
float[] f = new float[9];
getImageMatrix().getValues(f);
// Extract the scale values using the constants (if aspect ratio maintained, scaleX == scaleY)
final float scaleX = f[Matrix.MSCALE_X];
final float scaleY = f[Matrix.MSCALE_Y];
// Get the drawable (could also get the bitmap behind the drawable and getWidth/getHeight)
final Drawable d = getDrawable();
final int origW = d.getIntrinsicWidth();
final int origH = d.getIntrinsicHeight();
// Calculate the actual dimensions
final int actW = Math.round(origW * scaleX);
final int actH = Math.round(origH * scaleY);
//Log.e("DBG", "["+origW+","+origH+"] -> ["+actW+","+actH+"] & scales: x="+scaleX+" y="+scaleY);
return new float[] {actW, actH, scaleX, scaleY};
}
}
Related
2020-04-22 16:14:49.759 1809-1809/? E/servicemanager: Could not find android.hardware.power.IPower/default in the VINTF manifest.
This keeps popping up. Am coding a camera, that when a button is clicked, an image is cropped.
Here is a custom view I am adding to a fragment.
public class DrawView extends View {
Point[] points = new Point[4];
/**
* point1 and point 3 are of same group and same as point 2 and point4
*/
int groupId = -1;
private ArrayList<ColorBall> colorballs = new ArrayList<>();
private int mStrokeColor = Color.parseColor("#AADB1255");
private int mFillColor = Color.parseColor("#55DB1255");
private Rect mCropRect = new Rect();
// array that holds the balls
private int balID = 0;
// variable to know what ball is being dragged
Paint paint;
public DrawView(Context context) {
this(context, null);
}
public DrawView(Context context, AttributeSet attrs) {
this(context, attrs, -1);
}
public DrawView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
private void init() {
paint = new Paint();
setFocusable(true); // necessary for getting the touch events
}
private void initRectangle(int X, int Y) {
//initialize rectangle.
points[0] = new Point();
points[0].x = X - 200;
points[0].y = Y - 100;
points[1] = new Point();
points[1].x = X;
points[1].y = Y + 30;
points[2] = new Point();
points[2].x = X + 30;
points[2].y = Y + 30;
points[3] = new Point();
points[3].x = X + 30;
points[3].y = Y;
balID = 2;
groupId = 1;
// declare each ball with the ColorBall class
for (int i = 0; i < points.length; i++) {
colorballs.add(new ColorBall(getContext(), R.drawable.gray_circle, points[i], i));
}
}
// the method that draws the balls
#Override
protected void onDraw(Canvas canvas) {
if(points[3]==null) {
//point4 null when view first create
initRectangle(getWidth() / 2, getHeight() / 2);
}
int left, top, right, bottom;
left = points[0].x;
top = points[0].y;
right = points[0].x;
bottom = points[0].y;
for (int i = 1; i < points.length; i++) {
left = left > points[i].x ? points[i].x : left;
top = top > points[i].y ? points[i].y : top;
right = right < points[i].x ? points[i].x : right;
bottom = bottom < points[i].y ? points[i].y : bottom;
}
paint.setAntiAlias(true);
paint.setDither(true);
paint.setStrokeJoin(Paint.Join.ROUND);
paint.setStrokeWidth(5);
//draw stroke
paint.setStyle(Paint.Style.STROKE);
paint.setColor(mStrokeColor);
paint.setStrokeWidth(2);
mCropRect.left = left + colorballs.get(0).getWidthOfBall() / 2;
mCropRect.top = top + colorballs.get(0).getWidthOfBall() / 2;
mCropRect.right = right + colorballs.get(2).getWidthOfBall() / 2;
mCropRect.bottom = bottom + colorballs.get(3).getWidthOfBall() / 2;
canvas.drawRect(mCropRect, paint);
//fill the rectangle
paint.setStyle(Paint.Style.FILL);
paint.setColor(mFillColor);
paint.setStrokeWidth(0);
canvas.drawRect(mCropRect, paint);
// draw the balls on the canvas
paint.setColor(Color.RED);
paint.setTextSize(18);
paint.setStrokeWidth(0);
for (int i =0; i < colorballs.size(); i ++) {
ColorBall ball = colorballs.get(i);
canvas.drawBitmap(ball.getBitmap(), ball.getX(), ball.getY(),
paint);
canvas.drawText("" + (i+1), ball.getX(), ball.getY(), paint);
}
}
// events when touching the screen
public boolean onTouchEvent(MotionEvent event) {
int eventAction = event.getAction();
int X = (int) event.getX();
int Y = (int) event.getY();
switch (eventAction) {
case MotionEvent.ACTION_DOWN: // touch down so check if the finger is on
// a ball
if (points[0] == null) {
initRectangle(X, Y);
} else {
//resize rectangle
balID = -1;
groupId = -1;
for (int i = colorballs.size()-1; i>=0; i--) {
ColorBall ball = colorballs.get(i);
// check if inside the bounds of the ball (circle)
// get the center for the ball
int centerX = ball.getX() + ball.getWidthOfBall();
int centerY = ball.getY() + ball.getHeightOfBall();
paint.setColor(Color.CYAN);
// calculate the radius from the touch to the center of the
// ball
double radCircle = Math
.sqrt((double) (((centerX - X) * (centerX - X)) + (centerY - Y)
* (centerY - Y)));
if (radCircle < ball.getWidthOfBall()) {
balID = ball.getID();
if (balID == 1 || balID == 3) {
groupId = 2;
} else {
groupId = 1;
}
invalidate();
break;
}
invalidate();
}
}
break;
case MotionEvent.ACTION_MOVE: // touch drag with the ball
if (balID > -1) {
// move the balls the same as the finger
colorballs.get(balID).setX(X);
colorballs.get(balID).setY(Y);
paint.setColor(Color.CYAN);
if (groupId == 1) {
colorballs.get(1).setX(colorballs.get(0).getX());
colorballs.get(1).setY(colorballs.get(2).getY());
colorballs.get(3).setX(colorballs.get(2).getX());
colorballs.get(3).setY(colorballs.get(0).getY());
} else {
colorballs.get(0).setX(colorballs.get(1).getX());
colorballs.get(0).setY(colorballs.get(3).getY());
colorballs.get(2).setX(colorballs.get(3).getX());
colorballs.get(2).setY(colorballs.get(1).getY());
}
invalidate();
}
break;
case MotionEvent.ACTION_UP:
// touch drop - just do things here after dropping
break;
}
// redraw the canvas
invalidate();
return true;
}
public Drawable doTheCrop(Bitmap sourceBitmap) throws IOException {
//Bitmap sourceBitmap = null;
//Drawable backgroundDrawable = getBackground();
/*
if (backgroundDrawable instanceof BitmapDrawable) {
BitmapDrawable bitmapDrawable = (BitmapDrawable) backgroundDrawable;
if(bitmapDrawable.getBitmap() != null) {
sourceBitmap = bitmapDrawable.getBitmap();
}
}*/
//source bitmap was scaled, you should calculate the rate
float widthRate = ((float) sourceBitmap.getWidth()) / getWidth();
float heightRate = ((float) sourceBitmap.getHeight()) / getHeight();
//crop the source bitmap with rate value
int left = (int) (mCropRect.left * widthRate);
int top = (int) (mCropRect.top * heightRate);
int right = (int) (mCropRect.right * widthRate);
int bottom = (int) (mCropRect.bottom * heightRate);
Bitmap croppedBitmap = Bitmap.createBitmap(sourceBitmap, left, top, right - left, bottom - top);
Drawable drawable = new BitmapDrawable(getResources(), croppedBitmap);
return drawable;
/*
setContentView(R.layout.fragment_dashboard);
Button btn = (Button)findViewById(R.id.capture);
if (btn == null){
System.out.println("NULL");
}
try{
btn.setText("HI");
}
catch (Exception e){
}
//setBackground(drawable);*/
//savebitmap(croppedBitmap);
}
private File savebitmap(Bitmap bmp) throws IOException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.JPEG, 60, bytes);
File f = new File(Environment.getExternalStorageDirectory()
+ "/" + "testimage.jpg");
Toast.makeText(getContext(), "YUP", Toast.LENGTH_LONG).show();
f.createNewFile();
FileOutputStream fo = new FileOutputStream(f);
fo.write(bytes.toByteArray());
fo.close();
return f;
}
public static class ColorBall {
Bitmap bitmap;
Context mContext;
Point point;
int id;
public ColorBall(Context context, int resourceId, Point point, int id) {
this.id = id;
bitmap = BitmapFactory.decodeResource(context.getResources(),
resourceId);
mContext = context;
this.point = point;
}
public int getWidthOfBall() {
return bitmap.getWidth();
}
public int getHeightOfBall() {
return bitmap.getHeight();
}
public Bitmap getBitmap() {
return bitmap;
}
public int getX() {
return point.x;
}
public int getY() {
return point.y;
}
public int getID() {
return id;
}
public void setX(int x) {
point.x = x;
}
public void setY(int y) {
point.y = y;
}
}
}
Here is the fragment that I have added a camera do, basically the main part of the application that I am working on.
public class DashboardFragment extends Fragment {
private DashboardViewModel dashboardViewModel;
//All my constants
private DrawView mDrawView;
private Drawable imgDraw;
private TextureView txtView;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static{
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private String cameraID;
private String pathway;
CameraDevice cameraDevice;
CameraCaptureSession cameraCaptureSession;
CaptureRequest captureRequest;
CaptureRequest.Builder captureRequestBuilder;
private Size imageDimensions;
private ImageReader imageReader;
private File file;
Handler mBackgroundHandler;
HandlerThread mBackgroundThread;
public View onCreateView(#NonNull LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState) {
dashboardViewModel =
ViewModelProviders.of(this).get(DashboardViewModel.class);
View root = inflater.inflate(R.layout.fragment_dashboard, container, false);
try{
txtView = (TextureView)root.findViewById(R.id.textureView);
txtView.setSurfaceTextureListener(textureListener);
mDrawView = root.findViewById(draw_view);
Button cap = (Button)root.findViewById(R.id.capture);
cap.setClickable(true);
cap.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
Log.i("HOLA","HOLA");
takePicture();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});
}
catch (Exception e){
Log.i("HI",e.toString());
}
/*
txtView = (TextureView)root.findViewById(R.id.textureView);
txtView.setSurfaceTextureListener(textureListener);
mDrawView = root.findViewById(R.id.draw_view);
Button cap = (Button)root.findViewById(R.id.capture);
cap.setClickable(true);
cap.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
Log.i("HOLA","HOLA");
takePicture();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});*/
return root;
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults){
if (requestCode == 101){
if (grantResults[0] == PackageManager.PERMISSION_DENIED){
Toast.makeText(getActivity().getApplicationContext(), "Permission is required",Toast.LENGTH_LONG);
}
}
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
cameraDevice = camera;
try {
createCameraPreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
cameraDevice.close();
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int i) {
cameraDevice.close();
cameraDevice = null;
}
};
private void createCameraPreview() throws CameraAccessException {
SurfaceTexture texture = txtView.getSurfaceTexture(); //?
texture.setDefaultBufferSize(imageDimensions.getWidth(), imageDimensions.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
if (cameraDevice == null){
return;
}
cameraCaptureSession = session;
try {
updatePreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getActivity().getApplicationContext(), "CONFIGURATION", Toast.LENGTH_LONG);
}
}, null);
}
private void updatePreview() throws CameraAccessException {
if (cameraDevice == null){
return;
}
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
}
private void openCamera() throws CameraAccessException {
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
cameraID = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
imageDimensions = map.getOutputSizes(SurfaceTexture.class)[0];
if (ActivityCompat.checkSelfPermission(getActivity().getApplicationContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED
&& ActivityCompat.checkSelfPermission(getActivity().getApplicationContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED){
ActivityCompat.requestPermissions(getActivity(), new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, 101);
return;
}
manager.openCamera(cameraID, stateCallback, null);
}
private void takePicture() throws CameraAccessException {
if (cameraDevice == null) {
Log.i("NOt working", "hi");
return;
}
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
int width = 640;
int height = 480;
if (jpegSizes != null && jpegSizes.length > 0) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
final ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(txtView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
Long tsLong = System.currentTimeMillis() / 1000;
String ts = tsLong.toString();
file = new File(Environment.getExternalStorageDirectory() + "/" + ts + ".jpg");
pathway = Environment.getExternalStorageDirectory() + "/" + ts + ".jpg";
//cameraDevice.close();
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader imageReader) {
Image image = null;
//image = reader.acquireLatestImage();
image = reader.acquireNextImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes , 0, bytes.length);
try {
Drawable back = mDrawView.doTheCrop(bitmap);
Button btn = (Button)getView().findViewById(R.id.capture);
btn.setBackground(back);
} catch (IOException e) {
e.printStackTrace();
}
/*
try {
save(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null){
image.close();
}
}*/
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback(){
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result){
super.onCaptureCompleted(session, request, result);
try {
createCameraPreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
}
}, mBackgroundHandler);
}
private void save (byte[] bytes) throws IOException {
OutputStream outputStream = null;
outputStream = new FileOutputStream(file);
outputStream.write(bytes);
Toast.makeText(getActivity().getApplicationContext(),pathway,Toast.LENGTH_LONG).show();
outputStream.close();
imgDraw = Drawable.createFromPath(pathway);
//mDrawView.doTheCrop(imgDraw);
}
#Override
public void onResume(){
super.onResume();
startBackgroundThread();
if (txtView.isAvailable()){
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
else{
txtView.setSurfaceTextureListener(textureListener);
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
protected void stopBackgroundThread() throws InterruptedException{
mBackgroundThread.quitSafely();
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
}
#Override
public void onPause(){
try {
stopBackgroundThread();
} catch (InterruptedException e) {
e.printStackTrace();
}
super.onPause();
}
}
Here is the xml file for that fragment.
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".ui.dashboard.DashboardFragment">
<TextureView
android:id = "#+id/textureView"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
<com.PeavlerDevelopment.OpinionMinion.ui.dashboard.DrawView
android:id="#+id/draw_view"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
<Button
android:id="#+id/capture"
android:layout_width="100dp"
android:layout_height="200dp"
android:clickable="true"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"></Button>
</androidx.constraintlayout.widget.ConstraintLayout>
The problem seems to lie somewhere in the doCrop method of the DrawView class.
If there is anything else that would help make the problem more clear, let me know! I will gladly share the github repo with you.
Thank you.
As you can see in Android Design Documenation the VINTF stands for Vendor Interface and its a Manifest structure to aggregate data form the device. That specific log means that your manifest is missing something like this:
<hal>
<name>android.hardware.power</name>
<transport>hwbinder</transport>
<version>1.1</version>
<interface>
<name>IPower</name>
<instance>default</instance>
</interface>
</hal>
which basically is hardware power information.
I think it's not related to what you are trying to do, but I need more info than that log.
I want to make app which take photos only with faces but I get whole image instead of face when I save into my storage file. So how to crop face and save into storage with the help google vision face detection API.
So how to use frame in my code to get face list as well as how can I convert into bitmap and save into my storage.
main.xml
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/topLayout"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:keepScreenOn="true">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview
android:id="#+id/preview"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.GraphicOverlay
android:id="#+id/faceOverlay"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview>
</LinearLayout>
<android.support.v7.widget.AppCompatButton
android:id="#+id/take_pic_btn"
android:layout_gravity="bottom"
android:gravity="center"
android:background="#color/green"
android:layout_margin="10dp"
android:text="Take Image"
android:textStyle="bold"
android:textSize="20sp"
android:textAllCaps="false"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
</FrameLayout>
FaceTrackerActivity .java
public final class FaceTrackerActivity extends AppCompatActivity {
private static final String TAG = "FaceTracker";
private CameraSource mCameraSource = null;
private CameraSourcePreview mPreview;
private GraphicOverlay mGraphicOverlay;
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
private Button takePicButton;
FaceDetector detector;
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.main);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay) findViewById(R.id.faceOverlay);
takePicButton=(Button)findViewById(R.id.take_pic_btn);
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
int gc = ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE);
if (rc == PackageManager.PERMISSION_GRANTED && gc == PackageManager.PERMISSION_GRANTED) {
createCameraSource();
} else {
requestCameraPermission();
}
takePicButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Toast.makeText(getApplicationContext(),"dilip",Toast.LENGTH_LONG).show();
captureImage();
}
});
}
private void requestCameraPermission() {
Log.w(TAG, "Camera permission is not granted. Requesting permission");
final String[] permissions = new String[]{Manifest.permission.CAMERA,Manifest.permission.WRITE_EXTERNAL_STORAGE};
if (!ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.CAMERA) && !ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.WRITE_EXTERNAL_STORAGE) ) {
ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM);
return;
}
final Activity thisActivity = this;
View.OnClickListener listener = new View.OnClickListener() {
#Override
public void onClick(View view) {
ActivityCompat.requestPermissions(thisActivity, permissions,
RC_HANDLE_CAMERA_PERM);
}
};
Snackbar.make(mGraphicOverlay, R.string.permission_camera_rationale,
Snackbar.LENGTH_INDEFINITE)
.setAction(R.string.ok, listener)
.show();
}
private void createCameraSource() {
Context context = getApplicationContext();
/* FaceDetector detector = new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());*/
detector= new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
MyFaceDetector myFaceDetector = new MyFaceDetector(detector);
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());
mCameraSource = new CameraSource.Builder(context, myFaceDetector)
.build();
if (!detector.isOperational()) {
}
mCameraSource = new CameraSource.Builder(context, detector)
.setRequestedPreviewSize(640, 480)
.setFacing(CameraSource.CAMERA_FACING_FRONT)
.setRequestedFps(10.0f)
.build();
}
/**
* Restarts the camera.
*/
#Override
protected void onResume() {
super.onResume();
startCameraSource();
}
/**
* Stops the camera.
*/
#Override
protected void onPause() {
super.onPause();
mPreview.stop();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mCameraSource != null) {
mCameraSource.release();
}
}
/**
* Callback for the result from requesting permissions. This method
* is invoked for every call on {#link #requestPermissions(String[], int)}.
* <p>
* <strong>Note:</strong> It is possible that the permissions request interaction
* with the user is interrupted. In this case you will receive empty permissions
* and results arrays which should be treated as a cancellation.
* </p>
*
* #param requestCode The request code passed in {#link #requestPermissions(String[], int)}.
* #param permissions The requested permissions. Never null.
* #param grantResults The grant results for the corresponding permissions
* which is either {#link PackageManager#PERMISSION_GRANTED}
* or {#link PackageManager#PERMISSION_DENIED}. Never null.
* #see #requestPermissions(String[], int)
*/
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
if (requestCode != RC_HANDLE_CAMERA_PERM) {
Log.d(TAG, "Got unexpected permission result: " + requestCode);
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
return;
}
if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED &&grantResults[1] == PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Camera permission granted - initialize the camera source");
// we have permission, so create the camerasource
createCameraSource();
return;
}
Log.e(TAG, "Permission not granted: results len = " + grantResults.length +
" Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)"));
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Face Tracker sample")
.setMessage(R.string.no_camera_permission)
.setPositiveButton(R.string.ok, listener)
.show();
}
//==============================================================================================
// Camera Source Preview
//==============================================================================================
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
private void startCameraSource() {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
//==============================================================================================
// Graphic Face Tracker
//==============================================================================================
/**
* Factory for creating a face tracker to be associated with a new face. The multiprocessor
* uses this factory to create face trackers as needed -- one for each individual.
*/
private class GraphicFaceTrackerFactory implements MultiProcessor.Factory<Face> {
#Override
public Tracker<Face> create(Face face) {
return new GraphicFaceTracker(mGraphicOverlay);
}
}
/**
* Face tracker for each detected individual. This maintains a face graphic within the app's
* associated face overlay.
*/
private class GraphicFaceTracker extends Tracker<Face> {
private GraphicOverlay mOverlay;
private FaceGraphic mFaceGraphic;
GraphicFaceTracker(GraphicOverlay overlay) {
mOverlay = overlay;
mFaceGraphic = new FaceGraphic(overlay);
}
/**
* Start tracking the detected face instance within the face overlay.
*/
#Override
public void onNewItem(int faceId, Face item) {
mFaceGraphic.setId(faceId);
}
**`strong text`**
#Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
mOverlay.add(mFaceGraphic);
mFaceGraphic.updateFace(face);
}
/**
* Hide the graphic when the corresponding face was not detected. This can happen for
* intermediate frames temporarily (e.g., if the face was momentarily blocked from
* view).
*/
#Override
public void onMissing(FaceDetector.Detections<Face> detectionResults) {
mOverlay.remove(mFaceGraphic);
}
/**
* Called when the face is assumed to be gone for good. Remove the graphic annotation from
* the overlay.
*/
#Override
public void onDone() {
mOverlay.remove(mFaceGraphic);
}
}
private void captureImage() {
mPreview.setDrawingCacheEnabled(true);
final Bitmap drawingCache = mPreview.getDrawingCache();
mCameraSource.takePicture(null, new CameraSource.PictureCallback() {
#Override
public void onPictureTaken(byte[] bytes) {
int orientation = Exif.getOrientation(bytes);
Bitmap temp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
Bitmap picture = rotateImage(temp,orientation);
Bitmap overlay = Bitmap.createBitmap(mGraphicOverlay.getWidth(),mGraphicOverlay.getHeight(),picture.getConfig());
Canvas canvas = new Canvas(overlay);
Matrix matrix = new Matrix();
matrix.setScale((float)overlay.getWidth()/(float)picture.getWidth(),(float)overlay.getHeight()/(float)picture.getHeight());
// mirror by inverting scale and translating
matrix.preScale(-1, 1);
matrix.postTranslate(canvas.getWidth(), 0);
Paint paint = new Paint();
canvas.drawBitmap(picture,matrix,paint);
canvas.drawBitmap(drawingCache,0,0,paint);
try {
String mainpath = getExternalStorageDirectory() + separator + "MaskIt" + separator + "images" + separator;
File basePath = new File(mainpath);
if (!basePath.exists())
Log.d("CAPTURE_BASE_PATH", basePath.mkdirs() ? "Success": "Failed");
String path = mainpath + "photo_" + getPhotoTime() + ".jpg";
File captureFile = new File(path);
captureFile.createNewFile();
if (!captureFile.exists())
Log.d("CAPTURE_FILE_PATH", captureFile.createNewFile() ? "Success": "Failed");
FileOutputStream stream = new FileOutputStream(captureFile);
overlay.compress(Bitmap.CompressFormat.PNG, 100, stream);
stream.flush();
stream.close();
picture.recycle();
drawingCache.recycle();
mPreview.setDrawingCacheEnabled(false);
} catch (IOException e) {
e.printStackTrace();
}
}
private String getPhotoTime() {
DateFormat dateFormatter = new SimpleDateFormat("yyyyMMdd hhmmss");
dateFormatter.setLenient(false);
Date today = new Date();
String s = dateFormatter.format(today);
return s;
}
});
}
private Bitmap rotateImage(Bitmap bm, int i) {
Matrix matrix = new Matrix();
switch (i) {
case ExifInterface.ORIENTATION_NORMAL:
return bm;
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
matrix.setScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
matrix.setRotate(180);
break;
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
matrix.setRotate(180);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_TRANSPOSE:
matrix.setRotate(90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_90:
matrix.setRotate(90);
break;
case ExifInterface.ORIENTATION_TRANSVERSE:
matrix.setRotate(-90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
matrix.setRotate(-90);
break;
default:
return bm;
}
try {
Bitmap bmRotated = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true);
bm.recycle();
return bmRotated;
} catch (OutOfMemoryError e) {
e.printStackTrace();
return null;
}
}
}
CameraSourcePreview.java
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.content.res.Configuration;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import com.google.android.gms.common.images.Size;
import com.google.android.gms.vision.CameraSource;
import java.io.IOException;
public class CameraSourcePreview extends ViewGroup {
private static final String TAG = "CameraSourcePreview";
private Context mContext;
private SurfaceView mSurfaceView;
private boolean mStartRequested;
private boolean mSurfaceAvailable;
private CameraSource mCameraSource;
private GraphicOverlay mOverlay;
public CameraSourcePreview(Context context, AttributeSet attrs) {
super(context, attrs);
mContext = context;
mStartRequested = false;
mSurfaceAvailable = false;
mSurfaceView = new SurfaceView(context);
mSurfaceView.getHolder().addCallback(new SurfaceCallback());
addView(mSurfaceView);
}
public void start(CameraSource cameraSource) throws IOException {
if (cameraSource == null) {
stop();
}
mCameraSource = cameraSource;
if (mCameraSource != null) {
mStartRequested = true;
startIfReady();
}
}
public void start(CameraSource cameraSource, GraphicOverlay overlay) throws IOException {
mOverlay = overlay;
start(cameraSource);
}
public void stop() {
if (mCameraSource != null) {
mCameraSource.stop();
}
}
public void release() {
if (mCameraSource != null) {
mCameraSource.release();
mCameraSource = null;
}
}
private void startIfReady() throws IOException {
if (mStartRequested && mSurfaceAvailable) {
mCameraSource.start(mSurfaceView.getHolder());
if (mOverlay != null) {
Size size = mCameraSource.getPreviewSize();
int min = Math.min(size.getWidth(), size.getHeight());
int max = Math.max(size.getWidth(), size.getHeight());
if (isPortraitMode()) {
// Swap width and height sizes when in portrait, since it will be rotated by
// 90 degrees
mOverlay.setCameraInfo(min, max, mCameraSource.getCameraFacing());
} else {
mOverlay.setCameraInfo(max, min, mCameraSource.getCameraFacing());
}
mOverlay.clear();
}
mStartRequested = false;
}
}
private class SurfaceCallback implements SurfaceHolder.Callback {
#Override
public void surfaceCreated(SurfaceHolder surface) {
mSurfaceAvailable = true;
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surface) {
mSurfaceAvailable = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
}
#Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int previewWidth = 320;
int previewHeight = 240;
if (mCameraSource != null) {
Size size = mCameraSource.getPreviewSize();
if (size != null) {
previewWidth = size.getWidth();
previewHeight = size.getHeight();
}
}
// Swap width and height sizes when in portrait, since it will be rotated 90 degrees
if (isPortraitMode()) {
int tmp = previewWidth;
previewWidth = previewHeight;
previewHeight = tmp;
}
final int viewWidth = right - left;
final int viewHeight = bottom - top;
int childWidth;
int childHeight;
int childXOffset = 0;
int childYOffset = 0;
float widthRatio = (float) viewWidth / (float) previewWidth;
float heightRatio = (float) viewHeight / (float) previewHeight;
// To fill the view with the camera preview, while also preserving the correct aspect ratio,
// it is usually necessary to slightly oversize the child and to crop off portions along one
// of the dimensions. We scale up based on the dimension requiring the most correction, and
// compute a crop offset for the other dimension.
if (widthRatio > heightRatio) {
childWidth = viewWidth;
childHeight = (int) ((float) previewHeight * widthRatio);
childYOffset = (childHeight - viewHeight) / 2;
} else {
childWidth = (int) ((float) previewWidth * heightRatio);
childHeight = viewHeight;
childXOffset = (childWidth - viewWidth) / 2;
}
for (int i = 0; i < getChildCount(); ++i) {
// One dimension will be cropped. We shift child over or up by this offset and adjust
// the size to maintain the proper aspect ratio.
getChildAt(i).layout(
-1 * childXOffset, -1 * childYOffset,
childWidth - childXOffset, childHeight - childYOffset);
}
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
private boolean isPortraitMode() {
int orientation = mContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
return false;
}
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
return true;
}
Log.d(TAG, "isPortraitMode returning false by default");
return false;
}
}
GraphicOverlay.java
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.View;
import com.google.android.gms.vision.CameraSource;
import java.util.HashSet;
import java.util.Set;
public class GraphicOverlay extends View {
private final Object mLock = new Object();
private int mPreviewWidth;
private float mWidthScaleFactor = 1.0f;
private int mPreviewHeight;
private float mHeightScaleFactor = 1.0f;
private int mFacing = CameraSource.CAMERA_FACING_BACK;
private Set<Graphic> mGraphics = new HashSet<>();
public static abstract class Graphic {
private GraphicOverlay mOverlay;
public Graphic(GraphicOverlay overlay) {
mOverlay = overlay;
}
public abstract void draw(Canvas canvas);
public float scaleX(float horizontal) {
return horizontal * mOverlay.mWidthScaleFactor;
}
public float scaleY(float vertical) {
return vertical * mOverlay.mHeightScaleFactor;
}
public float translateX(float x) {
if (mOverlay.mFacing == CameraSource.CAMERA_FACING_FRONT) {
return mOverlay.getWidth() - scaleX(x);
} else {
return scaleX(x);
}
}
public float translateY(float y) {
return scaleY(y);
}
public void postInvalidate() {
mOverlay.postInvalidate();
}
}
public GraphicOverlay(Context context, AttributeSet attrs) {
super(context, attrs);
}
public void clear() {
synchronized (mLock) {
mGraphics.clear();
}
postInvalidate();
}
public void add(Graphic graphic) {
synchronized (mLock) {
mGraphics.add(graphic);
}
postInvalidate();
}
public void remove(Graphic graphic) {
synchronized (mLock) {
mGraphics.remove(graphic);
}
postInvalidate();
}
public void setCameraInfo(int previewWidth, int previewHeight, int facing) {
synchronized (mLock) {
mPreviewWidth = previewWidth;
mPreviewHeight = previewHeight;
mFacing = facing;
}
postInvalidate();
}
/**
* Draws the overlay with its associated graphic objects.
*/
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
synchronized (mLock) {
if ((mPreviewWidth != 0) && (mPreviewHeight != 0)) {
mWidthScaleFactor = (float) canvas.getWidth() / (float)mPreviewWidth;
mHeightScaleFactor = (float) canvas.getHeight() / (float) mPreviewHeight;
}
for (Graphic graphic : mGraphics) {
graphic.draw(canvas);
}
}
}
}
You should pass the captured image to FaceDetector API and crop it thereafter.
fun getFace(context: Context, data: ByteArray): Bitmap? {
try {
val imageStrem = ByteArrayInputStream(data)
var bitmap = BitmapFactory.decodeStream(imageStrem)
if (bitmap.width > bitmap.height) {
val matrix = Matrix()
matrix.postRotate(270f)
if (bitmap.width > 1500) matrix.postScale(0.5f, 0.5f)
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
}
val faceDetector = FaceDetector.Builder(context).setProminentFaceOnly(true).setTrackingEnabled(false).build()
val frame = Frame.Builder().setBitmap(bitmap).build()
val faces = faceDetector.detect(frame)
var results: Bitmap? = null
for (i in 0 until faces.size()) {
val thisFace = faces.valueAt(i)
val x = thisFace.position.x
val y = thisFace.position.y
val x2 = x / 4 + thisFace.width
val y2 = y / 4 + thisFace.height
results = Bitmap.createBitmap(bitmap, x.toInt(), y.toInt(), x2.toInt(), y2.toInt())
}
return results
} catch (e: Exception) {
Log.e("GET_FACE", e.message)
}
return null
}
Source
I have GridView widget which is populated by 40 images taken from ArrayList, problem is when widget is trying to load more positions than 40 and it makes about few hundreds of blank cells with "loading" text. This wouldn't happen if number of items was fixed by 40, but how to do that?
This is my RemoteViewsFactory class:
public class WidgetRemoteViewsFactory implements RemoteViewsService.RemoteViewsFactory {
private Context ctx;
private Cursor cursor;
private ArrayList<Bitmap> photos = new ArrayList<>(40);
public WidgetRemoteViewsFactory(Context applicationContext, Intent intent) {
ctx = applicationContext;
}
#Override
public void onCreate() {
}
#Override
public void onDataSetChanged() {
String[] projection = new String[]{
MediaStore.Images.ImageColumns._ID,
MediaStore.Images.ImageColumns.DATA,
MediaStore.Images.ImageColumns.BUCKET_DISPLAY_NAME,
MediaStore.Images.ImageColumns.DATE_TAKEN,
MediaStore.Images.ImageColumns.MIME_TYPE
};
cursor = ctx.getContentResolver().query(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, projection, null,
null, MediaStore.Images.ImageColumns.DATE_TAKEN + " DESC");
cursor.moveToFirst();
for (int i = 1; i <= 39; i++) {
Bitmap bmp = BitmapFactory.decodeFile(cursor.getString(1));
photos.add(bmp);
cursor.moveToNext();
Log.d(TAG, "loop iteration" + i );
}
}
#Override
public void onDestroy() {
if (cursor != null) {
cursor.close();
}
}
#Override
public int getCount() {
return cursor == null ? 0 : cursor.getCount();
}
#Override
public RemoteViews getViewAt(int position) {
if (position == AdapterView.INVALID_POSITION ||
cursor == null || !cursor.moveToPosition(position) || photos == null || photos.size() == 0 || position >= photos.size()) {
return null;
}
RemoteViews views = new RemoteViews(ctx.getPackageName(), R.layout.widget_item);
Bitmap img = resizeBitmapFitXY(250, 150, photos.get(position));
views.setImageViewBitmap(R.id.imageView, img);
return views;
}
#Override
public RemoteViews getLoadingView() {
return null;
}
#Override
public int getViewTypeCount() {
return 1;
}
#Override
public long getItemId(int position) {
return cursor.moveToPosition(position) ? cursor.getLong(0) : position;
}
#Override
public boolean hasStableIds() {
return true;
}
public Bitmap resizeBitmapFitXY(int width, int height, Bitmap bitmap){
Bitmap background = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
float originalWidth = bitmap.getWidth(), originalHeight = bitmap.getHeight();
Canvas canvas = new Canvas(background);
float scale, xTranslation = 0.0f, yTranslation = 0.0f;
if (originalWidth > originalHeight) {
scale = height/originalHeight;
xTranslation = (width - originalWidth * scale)/2.0f;
}
else {
scale = width / originalWidth;
yTranslation = (height - originalHeight * scale)/2.0f;
}
Matrix transformation = new Matrix();
transformation.postTranslate(xTranslation, yTranslation);
transformation.preScale(scale, scale);
Paint paint = new Paint();
paint.setFilterBitmap(true);
canvas.drawBitmap(bitmap, transformation, paint);
return background;
}
}
you need to replace this code
cursor.moveToFirst();
for (int i = 1; i <= 39; i++) {
Bitmap bmp = BitmapFactory.decodeFile(cursor.getString(1));
photos.add(bmp);
cursor.moveToNext();
Log.d(TAG, "loop iteration" + i );
}
and put this instead
if (cursor!=null && cursor.getCount()>0) {
while (cursor.moveToNext()) {
Bitmap bmp = BitmapFactory.decodeFile(cursor.getString(1));
photos.add(bmp);
Log.d(TAG, "loop iteration" + i );
}
}
the problem that you used a fixed size in the for loop for (int i = 1; i <= 39; i++)
and the solution is to use while() loop to get all the item from the cursor and add it to your list
update answer after the comment :
Q: if you decide in the loop to load only 40 and not to load the blank text
you must to do this
#Override
public int getCount() {
return 40;
}
so i am trying now for about a week to get this thing working but to no avail....
i want the user to pick an image from his gallery and then crop that image into a nice circle profile photo i get the photo to crop but the background is still square.... i found this question and tried implementing the answer he gave but still the background is square Cropping circular area from bitmap in Android
i googled and just found more ways to do it but still gets the square background.... any help will be appreciated
public class RegisterActivity extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
private String mParam1;
private String mParam2;
RoundedImageView roundImageView ;
String numberToPass = "1";//default 1 for male
String selectedImagePath;
EditText etNickname, etAge;
Button btnNext;
ImageView profilePhoto, imageview;
Bitmap bitmap;
private OnRegisterListener mListener;
public RegisterActivity() {
// Required empty public constructor
}
public static RegisterActivity newInstance(String param1, String param2) {
RegisterActivity fragment = new RegisterActivity();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_register, container, false);
etNickname = (EditText) view.findViewById(R.id.etNickName);
btnNext = (Button) view.findViewById(R.id.btnNextRegister);
profilePhoto = (ImageView) view.findViewById(R.id.imageButton);
bitmap = BitmapFactory.decodeResource(getResources(),
R.drawable.blender);
// profilePhoto.setImageBitmap(bitmap);
/* Bitmap bitmap2 = BitmapFactory.decodeResource(this.getResources(),R.drawable.blender);
Bitmap circularBitmap = ImageConverter.getRoundedCornerBitmap(bitmap2, 100);
ImageView circularImageView = (ImageView)view.findViewById(R.id.imageButton);
circularImageView.setImageBitmap(circularBitmap); */
profilePhoto.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
openGallery();
}
});
return view;
}
// TODO: Rename method, update argument and hook method into UI event
public void onButtonPressed(Uri uri) {
if (mListener != null) {
mListener.onFragmentInteraction(uri);
}
}
#Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof OnRegisterListener) {
mListener = (OnRegisterListener) context;
} else {
throw new RuntimeException(context.toString()
+ " must implement OnRegisterListener");
}
}
#Override
public void onDetach() {
super.onDetach();
mListener = null;
}
public interface OnRegisterListener {
// TODO: Update argument type and name
void onFragmentInteraction(Uri uri);
}
//this allows the uset to select one image from openGallery
public void openGallery() {
Intent gallery = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.INTERNAL_CONTENT_URI);
startActivityForResult(gallery, 1);
}
//when starting activity for result and choose an image, the code will automatically continue here
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK && requestCode == 1 && null != data) {
if (requestCode == 1) {
Uri current_ImageURI = data.getData();
/* String[] filePathColumn = { MediaStore.Images.Media.DATA };
Cursor cursor = getActivity().getContentResolver().query(current_ImageURI,
filePathColumn, null, null, null);
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
String picturePath = cursor.getString(columnIndex);
cursor.close();*/
selectedImagePath = getPath(current_ImageURI);
profilePhoto.setImageBitmap(circleBitmap(decodeSampledBitmap(new File(selectedImagePath), 250, 250)));
}
}
}
public String getPath(Uri contentUri) {
// we have to check for sdk version because from lollipop the retrieval of path is different
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
// can post image
String[] proj = {MediaStore.Images.Media.DATA};
Cursor cursor = getActivity().getApplicationContext().getContentResolver().query(contentUri, proj, null, null, null);
int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
return cursor.getString(column_index);
} else {
String filePath = "";
String wholeID = DocumentsContract.getDocumentId(contentUri);
// Split at colon, use second item in the array
String id = wholeID.split(":")[1];
String[] column = {MediaStore.Images.Media.DATA};
// where id is equal to
String sel = MediaStore.Images.Media._ID + "=?";
Cursor cursor = getActivity().getContentResolver().query(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, column, sel, new String[]{id}, null);
int columnIndex = cursor.getColumnIndex(column[0]);
if (cursor.moveToFirst()) {
filePath = cursor.getString(columnIndex);
}
cursor.close();
return filePath;
}
}
public Bitmap decodeSampledBitmap(File res, int reqWidth, int reqHeight) {
if (res != null) {
// First decode with inJustDecodeBounds=true to check dimensions
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
try {
FileInputStream stream2 = null;
try {
stream2 = new FileInputStream(res);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
BitmapFactory.decodeStream(stream2, null, options);
stream2.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
// Calculate inSampleSize
BitmapFactory.Options o2 = new BitmapFactory.Options();
o2.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight);
o2.inJustDecodeBounds = false;
FileInputStream stream = null;
try {
stream = new FileInputStream(res);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
Bitmap bitmap = BitmapFactory.decodeStream(stream, null, o2);
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
return bitmap;
} else
return null;
}
public int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
final int halfHeight = height / 2;
final int halfWidth = width / 2;
// Calculate the largest inSampleSize value that is a power of 2 and keeps both
// height and width larger than the requested height and width.
while ((halfHeight / inSampleSize) > reqHeight && (halfWidth / inSampleSize) > reqWidth) {
inSampleSize *= 2;
}
}
return inSampleSize;
}
//-----------------------------------------------------------------
private Bitmap circleBitmap(Bitmap bitmap) {
final Bitmap output = Bitmap.createBitmap(bitmap.getWidth(),
bitmap.getHeight(), Bitmap.Config.ARGB_8888);
int squareBitmapWidth = Math.min(bitmap.getWidth(), bitmap.getHeight());
/* Canvas
The Canvas class holds the "draw" calls. To draw something, you need 4 basic
components: A Bitmap to hold the pixels, a Canvas to host the draw calls (writing
into the bitmap), a drawing primitive (e.g. Rect, Path, text, Bitmap), and a paint
(to describe the colors and styles for the drawing). */
final Canvas canvas = new Canvas(output);
final int color = Color.RED;
final Paint paint = new Paint();
final Rect rect = new Rect(0, 0, bitmap.getWidth(), bitmap.getHeight());
// final Rect rect = new Rect(0, 0, squareBitmapWidth, squareBitmapWidth);
final RectF rectF = new RectF(rect);
paint.setAntiAlias(true);
canvas.drawARGB(0, 0, 0, 0);
paint.setColor(color);
canvas.drawCircle(bitmap.getWidth() / 2, bitmap.getHeight() / 2, bitmap.getWidth() / 2, paint);
//canvas.drawOval(rectF, paint);
paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_IN));
//float left = (squareBitmapWidth - bitmap.getWidth()) / 2;
//float top = (squareBitmapWidth - bitmap.getHeight()) / 2;
//canvas.drawBitmap(bitmap, left, top, paint);
canvas.drawBitmap(bitmap, rect, rect, paint);
//bitmap.recycle();
return output;
}
//---------------------------------------------------------------------------------------------
//end img upload
I use this code and it works perfectly:
public static Bitmap getCircularBitmap(Bitmap bitmap) {
Bitmap output;
if (bitmap.getWidth() > bitmap.getHeight()) {
output = Bitmap.createBitmap(bitmap.getHeight(), bitmap.getHeight(), Bitmap.Config.ARGB_8888);
} else {
output = Bitmap.createBitmap(bitmap.getWidth(), bitmap.getWidth(), Bitmap.Config.ARGB_8888);
}
Canvas canvas = new Canvas(output);
final int color = 0xff424242;
final Paint paint = new Paint();
final Rect rect = new Rect(0, 0, bitmap.getWidth(), bitmap.getHeight());
float r = 0;
if (bitmap.getWidth() > bitmap.getHeight()) {
r = bitmap.getHeight() / 2;
} else {
r = bitmap.getWidth() / 2;
}
paint.setAntiAlias(true);
canvas.drawARGB(0, 0, 0, 0);
paint.setColor(color);
canvas.drawCircle(r, r, r, paint);
paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_IN));
canvas.drawBitmap(bitmap, rect, rect, paint);
return output;
}
You can custom ImageView to make its all drawing things circular. Here is my implementation(not the best solution for performance-eager application but good enough for condition that don't invalidate ImageView much):
class CircleImageView extends ImageView {
private Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
public CircleImageView(Context context) {
super(context);
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int radiusMeasureSpec = widthMeasureSpec;
super.onMeasure(radiusMeasureSpec, radiusMeasureSpec);
int radiusMeasureSize = MeasureSpec.getSize(widthMeasureSpec);
setMeasuredDimension(radiusMeasureSize, radiusMeasureSize);
}
#Override
public void draw(Canvas viewCanvas) {
final int EDGE_SIZE = viewCanvas.getWidth();
// Draw this View's things.
Bitmap fgBm = Bitmap.createBitmap(EDGE_SIZE, EDGE_SIZE, Bitmap.Config.ARGB_8888);
Canvas fgCanvas = new Canvas(fgBm);
super.draw(fgCanvas);
// Transfer to a special shape.
Bitmap shapedBm = Bitmap.createBitmap(EDGE_SIZE, EDGE_SIZE, Bitmap.Config.ARGB_8888);
Canvas shapedCanvas = new Canvas(shapedBm);
shapedCanvas.drawCircle(EDGE_SIZE/2, EDGE_SIZE/2, EDGE_SIZE/2, mPaint);
mPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_IN));
shapedCanvas.drawBitmap(fgBm, 0, 0, mPaint);
mPaint.setXfermode(null);
// Move drawn things to View's canvas.
viewCanvas.drawBitmap(shapedBm, 0, 0, mPaint);
fgBm.recycle();
shapedBm.recycle();
}
}
There's someone who customizes ImageView using BitmapShader in an SO post without Xfermode and create extra Bitmap instances. Here's his implementation.
you can use trusted library like This one
Adding circle image adding border to your image and some other feature
I am trying to build a simple app that launches the camera app and allows me to take a picture and save it to the device but I keep getting this error.
java.lang.RuntimeException: Fail to connect to camera service
MainActivity
public class MainActivity extends AppCompatActivity
{
private static final String TAG = MainActivity.class.getName();
Preview preview;
private Camera camera;
private int cameraId;
#Override
protected void onCreate(Bundle savedInstanceState)
{
final Camera.PictureCallback jpegCallback;
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
preview = new Preview(this, (SurfaceView)findViewById(R.id.surfaceView));
preview.setLayoutParams(new ActionBar.LayoutParams(ActionBar.LayoutParams.MATCH_PARENT, ActionBar.LayoutParams.MATCH_PARENT));
((FrameLayout) findViewById(R.id.activity_main)).addView(preview);
preview.setKeepScreenOn(true);
jpegCallback = new Camera.PictureCallback()
{
public void onPictureTaken(final byte[] data,
final Camera camera)
{
new SaveImageTask().execute(data);
resetCam();
Log.d(TAG, "onPictureTaken - jpeg");
}
};
preview.setOnClickListener(new View.OnClickListener()
{
#Override
public void onClick(View view)
{
if(camera != null)
{
camera.takePicture(null,
// ShutterCallback shutter
null,
// PictureCallback raw,
null,
// PictureCallback postview,
jpegCallback); // PictureCallback jpeg);
}
}
});
}
#Override
protected void onResume()
{
super.onResume();
getCamera(CameraInfo.CAMERA_FACING_BACK);
if(camera != null)
{
camera.startPreview();
preview.setCamera(camera);
}
}
#Override
protected void onPause()
{
if(camera != null)
{
camera.stopPreview();
preview.setCamera(null);
camera.release();
camera = null;
}
super.onPause();
}
private void getCamera(final int desiredCamera)
{
if(!getPackageManager()
.hasSystemFeature(PackageManager.FEATURE_CAMERA))
{
Toast.makeText(this, "No camera on this device", Toast.LENGTH_LONG)
.show();
}
else
{
cameraId = findCamera(desiredCamera);
if(cameraId < 0)
{
Toast.makeText(this, "Camera no found.",
Toast.LENGTH_LONG).show();
}
else
{
Log.d(TAG, Integer.toString(cameraId));
camera = Camera.open(cameraId);
camera.setDisplayOrientation(90);
}
}
}
private int findCamera(final int desiredCamera)
{
final int numberOfCameras;
int cameraId;
numberOfCameras = Camera.getNumberOfCameras();
cameraId = -1;
for(int i = 0; i < numberOfCameras; i++)
{
final CameraInfo info;
info = new CameraInfo();
Camera.getCameraInfo(i, info);
if(info.facing == desiredCamera)
{
Log.d(TAG, "Camera found");
cameraId = i;
break;
}
}
return (cameraId);
}
private void resetCam()
{
camera.startPreview();
preview.setCamera(camera);
}
private void refreshGallery(final File file)
{
final Intent mediaScanIntent;
mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaScanIntent.setData(Uri.fromFile(file));
sendBroadcast(mediaScanIntent);
}
private class SaveImageTask
extends AsyncTask<byte[], Void, Void>
{
#Override
protected Void doInBackground(final byte[]... data)
{
FileOutputStream outStream;
final File sdCard;
final File dir;
final String fileName;
final File outFile;
sdCard = Environment.getExternalStorageDirectory();
dir = new File (sdCard.getAbsolutePath() + "/camtest");
dir.mkdirs();
fileName = String.format("%d.jpg", System.currentTimeMillis());
outFile = new File(dir, fileName);
outStream = null;
try
{
outStream = new FileOutputStream(outFile);
outStream.write(data[0]);
outStream.flush();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length + " to " + outFile.getAbsolutePath());
refreshGallery(outFile);
}
catch(final FileNotFoundException ex)
{
Log.e(TAG, "File not found", ex);
}
catch(final IOException ex)
{
Log.e(TAG, "IOException", ex);
}
finally
{
try
{
if(outStream != null)
{
outStream.close();
}
}
catch(final IOException ex)
{
Log.e(TAG, "IOException", ex);
}
}
return null;
}
}
}
Preview
class Preview
extends ViewGroup
implements Callback
{
private final String TAG = Preview.class.getName();
private SurfaceView surfaceView;
private SurfaceHolder holder;
private Camera.Size previewSize;
private List<Camera.Size> supportedPreviewSizes;
private Camera camera;
Preview(final Context context,
final SurfaceView sv)
{
super(context);
surfaceView = sv;
holder = surfaceView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCamera(Camera camera)
{
this.camera = camera;
if(this.camera != null)
{
final Camera.Parameters params;
final List<String> focusModes;
supportedPreviewSizes = this.camera.getParameters().getSupportedPreviewSizes();
requestLayout();
params = this.camera.getParameters();
focusModes = params.getSupportedFocusModes();
if(focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
this.camera.setParameters(params);
}
}
}
#Override
protected void onMeasure(final int widthMeasureSpec,
final int heightMeasureSpec)
{
// We purposely disregard child measurements because act as a
// wrapper to a SurfaceView that centers the camera preview instead
// of stretching it.
final int width;
final int height;
width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if(supportedPreviewSizes != null)
{
previewSize = getOptimalPreviewSize(supportedPreviewSizes, width, height);
}
}
#Override
protected void onLayout(boolean changed,
int l,
int t,
int r,
int b)
{
if (changed && getChildCount() > 0)
{
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (previewSize != null)
{
previewWidth = previewSize.width;
previewHeight = previewSize.height;
}
// Center the child SurfaceView within the parent.
if (width * previewHeight > height * previewWidth)
{
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
}
else
{
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2,
width, (height + scaledChildHeight) / 2);
}
}
}
public void surfaceCreated(SurfaceHolder holder)
{
// The Surface has been created, acquire the camera and tell it where
// to draw.
try
{
if(camera != null)
{
camera.setPreviewDisplay(holder);
}
}
catch (IOException exception)
{
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder)
{
// Surface will be destroyed when we return, so stop the preview.
if(camera != null)
{
camera.stopPreview();
}
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes,
int w,
int h)
{
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
for (Camera.Size size : sizes)
{
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
if (optimalSize == null)
{
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes)
{
if (Math.abs(size.height - targetHeight) < minDiff)
{
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder,
int format,
int width,
int height)
{
if(camera != null)
{
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(previewSize.width, previewSize.height);
requestLayout();
camera.setParameters(parameters);
camera.startPreview();
}
}