I'm trying to make floating cameraview for my screen recorder application.
FloatingViewService.java
public class FloatingViewService extends LifecycleService implements CameraXConfig.Provider, LifecycleOwner {
private View mFloatingView;
private WindowManager mWindowManager;
PreviewView previewView;
private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
public FloatingViewService() {
}
#Nullable
#Override
public IBinder onBind(Intent intent) {
super.onBind(intent);
return null;
}
#Override
public void onCreate() {
super.onCreate();
mFloatingView = LayoutInflater.from(this).inflate(R.layout.layout_floating_widget, null);
final WindowManager.LayoutParams params;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) {
params = new WindowManager.LayoutParams(
WindowManager.LayoutParams.WRAP_CONTENT,
WindowManager.LayoutParams.WRAP_CONTENT,
WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY,
WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE,
PixelFormat.TRANSLUCENT);
} else {
params = new WindowManager.LayoutParams(
WindowManager.LayoutParams.WRAP_CONTENT,
WindowManager.LayoutParams.WRAP_CONTENT,
WindowManager.LayoutParams.TYPE_PHONE,
WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE,
PixelFormat.TRANSLUCENT);
}
params.width = 400;
params.height = 300;
mWindowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
mWindowManager.addView(mFloatingView, params);
mFloatingView.findViewById(R.id.previewFrame).setOnTouchListener(new View.OnTouchListener() {
private int initialX;
private int initialY;
private float initialTouchX;
private float initialTouchY;
#Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
initialX = params.x;
initialY = params.y;
initialTouchX = event.getRawX();
initialTouchY = event.getRawY();
return true;
case MotionEvent.ACTION_UP:
return true;
case MotionEvent.ACTION_MOVE:
//this code is helping the widget to move around the screen with fingers
params.x = initialX + (int) (event.getRawX() - initialTouchX);
params.y = initialY + (int) (event.getRawY() - initialTouchY);
mWindowManager.updateViewLayout(mFloatingView, params);
return true;
}
return false;
}
});
cameraFun(mFloatingView);
}
public void cameraFun(View mFloatingView) {
previewView = mFloatingView.findViewById(R.id.previewFrame);
cameraProviderFuture = ProcessCameraProvider.getInstance(this);
cameraProviderFuture.addListener(() -> {
try {
ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
bindPreview(cameraProvider);
} catch (ExecutionException | InterruptedException e) {
}
}, ContextCompat.getMainExecutor(this));
}
void bindPreview(#NonNull ProcessCameraProvider cameraProvider) {
Preview preview = new Preview.Builder()
.build();
CameraSelector cameraSelector = new CameraSelector.Builder()
.requireLensFacing(CameraSelector.LENS_FACING_FRONT)
.build();
preview.setSurfaceProvider(previewView.createSurfaceProvider());
OrientationEventListener orientationEventListener = new OrientationEventListener(this) {
#Override
public void onOrientationChanged(int orientation) {
int rotation;
int orientationcheck = getResources().getConfiguration().orientation;
if (orientationcheck == Configuration.ORIENTATION_PORTRAIT) {
if (orientation >= 45 && orientation < 135) {
rotation = 360;
} else if (orientation >= 135 && orientation < 225) {
rotation = 180;
} else if (orientation >= 225 && orientation < 315) {
rotation = 360;
} else {
rotation = 0;
}
Log.i("orientation", String.valueOf(orientation) + "= ORIENTATION_PORTRAIT" + orientationcheck);
} else {
if (orientation >= 45 && orientation < 135) {
rotation = 90;
} else if (orientation >= 135 && orientation < 225) {
rotation = 180;
} else if (orientation >= 225 && orientation < 315) {
rotation = 270;
} else {
rotation = 0;
}
Log.i("orientation", String.valueOf(orientation) + "= ORIENTATION_Land" + orientationcheck);
}
Log.i("orientation", String.valueOf(orientation) + "= " + orientationcheck);
previewView.setRotation(rotation);
}
};
orientationEventListener.enable();
cameraProvider.bindToLifecycle(this, cameraSelector, preview);
}
#NonNull
#Override
public CameraXConfig getCameraXConfig() {
return Camera2Config.defaultConfig();
}
public void onDestroy() {
super.onDestroy();
mWindowManager.removeView(mFloatingView);
}
}
layout_floating_widget.xml
<?xml version="1.0" encoding="utf-8"?>
<androidx.camera.view.PreviewView xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/previewFrame"
android:layout_width="300dp"
android:layout_height="250dp"
android:layout_alignParentStart="true"
android:layout_alignParentTop="true"
android:layout_alignParentEnd="true"
android:layout_alignParentBottom="true"
android:layout_gravity="center" />
BackgroundService.java Check below code how I call FloatingViewService.java
public class BackgroundService extends Service {
Boolean onoff = true;
#Nullable
#Override
public IBinder onBind(Intent intent) {
return null;
}
public void onCreate() {
super.onCreate();
new FloatingViewService();
if (onoff) {
onoff = false;
startService(new Intent(this, FloatingViewService.class));
} else {
onoff = true;
stopService(new Intent(this, FloatingViewService.class));
}
}
}
ERROR:
java.lang.IllegalArgumentException: Trying to create LifecycleCamera with destroyed lifecycle.
at androidx.camera.lifecycle.LifecycleCameraRepository.createLifecycleCamera(LifecycleCameraRepository.java:103)
at androidx.camera.lifecycle.ProcessCameraProvider.bindToLifecycle(ProcessCameraProvider.java:414)
at androidx.camera.lifecycle.ProcessCameraProvider.bindToLifecycle(ProcessCameraProvider.java:275)
at com.rdbrain.FloatingViewService.bindPreview(FloatingViewService.java:196)
at com.rdbrain.FloatingViewService.lambda$cameraFun$0$FloatingViewService(FloatingViewService.java:138)
at com.rdbrain.-$$Lambda$FloatingViewService$qkM-fNI79D-TvnilmrNFZjQYwlI.run(Unknown Source:2)
at android.os.Handler.handleCallback(Handler.java:790)
at android.os.Handler.dispatchMessage(Handler.java:99)
at android.os.Looper.loop(Looper.java:164)
at android.app.ActivityThread.main(ActivityThread.java:6626)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:438)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:811)
Note : This code is working as separate module but when I connect to screen recorder it not working, I have setup all necessary permission and service in manifests.xml
Use viewLifecycleOwner instead of this in cameraProvider.bindToLifecycle(this, cameraSelector, preview);
Related
I added my application to Github to find my bug.
So the bug is like when I open my app for a first time and open "info" then "settings" and click thought all options, then "new measurement" then it goes to previous screen (settings) instead of new measurement. Could anybody help me?
Here is the link to app
Edit:
Here are probably the meaningfull classes:
Stages.java
public class Stages extends BaseActivity {
private final Stage0StageFragment stageZeroFragment = new Stage0StageFragment();
private final Stage1StageFragment stageOneFragment = new Stage1StageFragment();
private final Stage2StageFragment stageTwoFragment = new Stage2StageFragment();
private final Stage3StageFragment stageThreeFragment = new Stage3StageFragment();
private BottomNavigationView bottomNavView;
private int startingPosition, newPosition;
OnSwitchFragmentFromStageTwo onSwitchFragmentFromStageTwo;
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Util.setThemeBasedOnPrefs(this);
setContentView(R.layout.stages);
spEditor.putBoolean("wantToClosePxCmInfo", false).apply();
bottomNavView = findViewById(R.id.bottom_navigation);
bottomNavView.setItemIconTintList(null);
bottomNavView.setOnNavigationItemSelectedListener(item -> {
if (bottomNavView.getSelectedItemId() == R.id.navigation_stage_zero) {
if (!sp.getBoolean("correctionDone", false)) {
AlertDialog alertDialog = Util.createAlertDialog(Stages.this, android.R.drawable.ic_dialog_info, R.string.hide_dialog_title, getString(R.string.stage_zero_confirmation), (dialog, which) -> {
spEditor.putBoolean("correctionDone", true).apply();
bottomNavView.setSelectedItemId(item.getItemId());
showFragment(item.getItemId());
});
alertDialog.setOnShowListener(arg0 -> setAlertDialogButtonsAttributes(alertDialog));
alertDialog.show();
return false;
} else {
showFragment(item.getItemId());
return true;
}
} else if (bottomNavView.getSelectedItemId() == R.id.navigation_stage_two) {
try {
if (onSwitchFragmentFromStageTwo.onSwitchFragmentFromFragmentTwo() <= 0.5 && !sp.getBoolean("wantToClosePxCmInfo", false)) {
AlertDialog ad = Util.createAlertDialog(Stages.this, android.R.drawable.ic_dialog_alert, R.string.hide_dialog_title_alert,
getResources().getString(R.string.benchmark_not_drawn), (dialog, which) -> {
spEditor.putBoolean("wantToClosePxCmInfo", true).apply();
bottomNavView.setSelectedItemId(item.getItemId());
showFragment(item.getItemId());
});
ad.setOnShowListener(arg0 -> setAlertDialogButtonsAttributes(ad));
ad.show();
return false;
} else {
showFragment(item.getItemId());
return true;
}
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
} else {
showFragment(item.getItemId());
}
return true;
});
FragmentTransaction ft = getSupportFragmentManager().beginTransaction();
if (!sp.getBoolean("correctionDone", false))
ft.replace(R.id.content_frame, stageZeroFragment);
else {
ft.replace(R.id.content_frame, stageOneFragment);
bottomNavView.setSelectedItemId(R.id.navigation_stage_one);
}
ft.commit();
}
#Override
BaseActivity getActivity() {
return this;
}
private void setAlertDialogButtonsAttributes(AlertDialog alertDialog2) {
alertDialog2.getButton(DialogInterface.BUTTON_NEGATIVE).setBackground(AppCompatResources.getDrawable(this, R.drawable.button_selector));
alertDialog2.getButton(DialogInterface.BUTTON_POSITIVE).setBackground(AppCompatResources.getDrawable(this, R.drawable.button_selector));
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.WRAP_CONTENT,
1.0f
);
params.setMargins(10, 0, 10, 0);
alertDialog2.getButton(DialogInterface.BUTTON_NEGATIVE).setLayoutParams(params);
alertDialog2.getButton(DialogInterface.BUTTON_POSITIVE).setLayoutParams(params);
}
public void showFragment(int viewId) {
Fragment fragment = null;
switch (viewId) {
case R.id.navigation_stage_zero:
if (bottomNavView.getSelectedItemId() != R.id.navigation_stage_zero) {
fragment = stageZeroFragment;
newPosition = 0;
}
break;
case R.id.navigation_stage_one:
if (bottomNavView.getSelectedItemId() != R.id.navigation_stage_one) {
fragment = stageOneFragment;
newPosition = 1;
}
break;
case R.id.navigation_stage_two:
if (bottomNavView.getSelectedItemId() != R.id.navigation_stage_two) {
fragment = stageTwoFragment;
newPosition = 2;
}
break;
case R.id.navigation_stage_three:
if (bottomNavView.getSelectedItemId() != R.id.navigation_stage_three) {
fragment = stageThreeFragment;
newPosition = 3;
}
break;
}
if (fragment != null) {
if (startingPosition > newPosition) {
getSupportFragmentManager()
.beginTransaction()
.setCustomAnimations(R.anim.slide_in_left, R.anim.slide_out_right)
.replace(R.id.content_frame, fragment).commit();
}
if (startingPosition < newPosition) {
getSupportFragmentManager()
.beginTransaction()
.setCustomAnimations(R.anim.slide_in_right, R.anim.slide_out_left)
.replace(R.id.content_frame, fragment).commit();
}
startingPosition = newPosition;
}
}
}
Stage0StageFragment.java
public class Stage0StageFragment extends AbstractStageFragment {
private CheckBox checkboxSkip;
private int xCorrection, yCorrection;
private IndicatorSeekBar indicatorSeekBar;
private IndicatorSeekBar indicatorSeekBar2;
private AlertDialog alertDialog;
#Override
int getLayout() {
return R.layout.stage_zero;
}
#Override
public void onResume() {
super.onResume();
new CameraOpenerTask(this).execute();
}
#Override
public void onPause() {
super.onPause();
spEditor.putInt("indicator1", indicatorSeekBar.getProgress());
spEditor.putInt("indicator2", indicatorSeekBar2.getProgress());
spEditor.apply();
if (alertDialog.isShowing())
alertDialog.dismiss();
}
#Override
#SuppressLint({"ClickableViewAccessibility", "CommitPrefEdits"})
public void onViewCreated(#NonNull View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
view.setOnTouchListener((v, event) -> {
int aktX = (int) event.getX();
int aktY = (int) event.getY();
setParamsToDrawRectangle(aktX, aktY);
return true;
});
configureSeekBars(view);
configureInitDialog();
}
#Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Imgproc.cvtColor(inputFrame.rgba(), mRgba, Imgproc.COLOR_RGBA2RGB, 1);
Core.transpose(mGray, mGray);
Core.flip(mGray, mGray, -1);
Imgproc.line(mRgba, p1, p2, Util.BLUE);
Imgproc.line(mRgba, p3, p4, Util.BLUE);
Imgproc.rectangle(mRgba, new Point(touchedYD, touchedXL), new Point(touchedYU, touchedXR), Util.WHITE, 2);
return mRgba;
}
private void setParamsToDrawRectangle(int aktX, int aktY) {
// poziomo
if (-aktX + camLayHeight + (xCorrection * 10) < (camLayHeight / 2)) {
touchedXR = -aktX + camLayHeight + (xCorrection * 10);
if (touchedXR < 0) touchedXR = 0;
} else {
touchedXL = -aktX + camLayHeight + (xCorrection * 10);
if (touchedXL > camLayHeight) touchedXL = camLayHeight;
}
// pionowo
if (aktY - (yCorrection * 10) < (camLayWidth / 2)) {
touchedYU = aktY - (yCorrection * 10);
if (touchedYU < 0) touchedYU = 0;
} else {
touchedYD = aktY - (yCorrection * 10);
if (touchedYD > camLayWidth) touchedYD = camLayWidth;
}
}
public void configureSeekBars(View view) {
indicatorSeekBar = view.findViewById(R.id.percent_indicator);
indicatorSeekBar.setOnSeekChangeListener(new MyOnSeekChangeListener("x"));
indicatorSeekBar2 = view.findViewById(R.id.percent_indicator2);
indicatorSeekBar2.setOnSeekChangeListener(new MyOnSeekChangeListener("y"));
TextView tv = view.findViewById(R.id.info_about_indicators);
if (sp.getInt("indicator1", 0) != 0)
indicatorSeekBar.setProgress(sp.getInt("indicator1", 0));
if (sp.getInt("indicator2", 0) != 0)
indicatorSeekBar2.setProgress(sp.getInt("indicator2", 0));
if (sp.getInt("indicator1", 0) != 0 || sp.getInt("indicator2", 0) != 0)
tv.setVisibility(View.VISIBLE);
}
public void setScrollViewParamsDependingOnFont(View checkboxLayout) {
ScrollView layout = checkboxLayout.findViewById(R.id.scrollView);
ViewGroup.LayoutParams params = layout.getLayoutParams();
String fontPref = sp.getString("font", "Arial");
if (fontPref.equals("Ginger"))
params.height = (int) getResources().getDimension(R.dimen.height_of_checkbox);
if (fontPref.equals("Arial")) params.height = ViewGroup.LayoutParams.WRAP_CONTENT;
layout.setLayoutParams(params);
}
void configureInitDialog() {
View checkboxLayout = View.inflate(getActivity(), R.layout.checkbox, null);
setScrollViewParamsDependingOnFont(checkboxLayout);
alertDialog = new AlertDialog.Builder(getActivity(), R.style.MyStyle).create();
alertDialog.setView(checkboxLayout);
alertDialog.setIcon(android.R.drawable.ic_dialog_info);
alertDialog.setTitle("Info");
alertDialog.setMessage(getResources().getString(R.string.stage_zero_dialog));
alertDialog.setCancelable(false);
alertDialog.setButton(DialogInterface.BUTTON_NEUTRAL, "Ok", (dialogInterface, i) -> {
String checkBoxResult = "";
checkboxSkip = checkboxLayout.findViewById(R.id.checkboxSkip);
if (checkboxSkip.isChecked())
checkBoxResult = "linia2";
if (checkBoxResult.equals("linia2"))
spEditor.putBoolean("hideDialog", true).apply();
});
alertDialog.setOnShowListener(arg0 -> alertDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setBackground(ResourcesCompat.getDrawable(getResources(), R.drawable.button_selector, null)));
if (!sp.getBoolean("hideDialog", false))
alertDialog.show();
}
public class MyOnSeekChangeListener implements OnSeekChangeListener {
private final String axisCorrection;
MyOnSeekChangeListener(String axisCorrection) {
this.axisCorrection = axisCorrection;
}
#Override
public void onSeeking(SeekParams seekParams) {
if (axisCorrection.equals("x"))
xCorrection = seekParams.progress;
if (axisCorrection.equals("y"))
yCorrection = seekParams.progress;
spEditor.putInt("xCorrection", xCorrection);
spEditor.putInt("yCorrection", yCorrection);
spEditor.apply();
}
#Override
public void onStartTrackingTouch(IndicatorSeekBar indicatorSeekBar) {
}
#Override
public void onStopTrackingTouch(IndicatorSeekBar indicatorSeekBar) {
}
}
Something wrong happen in those classes probably when opening "new measurement".
2020-04-22 16:14:49.759 1809-1809/? E/servicemanager: Could not find android.hardware.power.IPower/default in the VINTF manifest.
This keeps popping up. Am coding a camera, that when a button is clicked, an image is cropped.
Here is a custom view I am adding to a fragment.
public class DrawView extends View {
Point[] points = new Point[4];
/**
* point1 and point 3 are of same group and same as point 2 and point4
*/
int groupId = -1;
private ArrayList<ColorBall> colorballs = new ArrayList<>();
private int mStrokeColor = Color.parseColor("#AADB1255");
private int mFillColor = Color.parseColor("#55DB1255");
private Rect mCropRect = new Rect();
// array that holds the balls
private int balID = 0;
// variable to know what ball is being dragged
Paint paint;
public DrawView(Context context) {
this(context, null);
}
public DrawView(Context context, AttributeSet attrs) {
this(context, attrs, -1);
}
public DrawView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
private void init() {
paint = new Paint();
setFocusable(true); // necessary for getting the touch events
}
private void initRectangle(int X, int Y) {
//initialize rectangle.
points[0] = new Point();
points[0].x = X - 200;
points[0].y = Y - 100;
points[1] = new Point();
points[1].x = X;
points[1].y = Y + 30;
points[2] = new Point();
points[2].x = X + 30;
points[2].y = Y + 30;
points[3] = new Point();
points[3].x = X + 30;
points[3].y = Y;
balID = 2;
groupId = 1;
// declare each ball with the ColorBall class
for (int i = 0; i < points.length; i++) {
colorballs.add(new ColorBall(getContext(), R.drawable.gray_circle, points[i], i));
}
}
// the method that draws the balls
#Override
protected void onDraw(Canvas canvas) {
if(points[3]==null) {
//point4 null when view first create
initRectangle(getWidth() / 2, getHeight() / 2);
}
int left, top, right, bottom;
left = points[0].x;
top = points[0].y;
right = points[0].x;
bottom = points[0].y;
for (int i = 1; i < points.length; i++) {
left = left > points[i].x ? points[i].x : left;
top = top > points[i].y ? points[i].y : top;
right = right < points[i].x ? points[i].x : right;
bottom = bottom < points[i].y ? points[i].y : bottom;
}
paint.setAntiAlias(true);
paint.setDither(true);
paint.setStrokeJoin(Paint.Join.ROUND);
paint.setStrokeWidth(5);
//draw stroke
paint.setStyle(Paint.Style.STROKE);
paint.setColor(mStrokeColor);
paint.setStrokeWidth(2);
mCropRect.left = left + colorballs.get(0).getWidthOfBall() / 2;
mCropRect.top = top + colorballs.get(0).getWidthOfBall() / 2;
mCropRect.right = right + colorballs.get(2).getWidthOfBall() / 2;
mCropRect.bottom = bottom + colorballs.get(3).getWidthOfBall() / 2;
canvas.drawRect(mCropRect, paint);
//fill the rectangle
paint.setStyle(Paint.Style.FILL);
paint.setColor(mFillColor);
paint.setStrokeWidth(0);
canvas.drawRect(mCropRect, paint);
// draw the balls on the canvas
paint.setColor(Color.RED);
paint.setTextSize(18);
paint.setStrokeWidth(0);
for (int i =0; i < colorballs.size(); i ++) {
ColorBall ball = colorballs.get(i);
canvas.drawBitmap(ball.getBitmap(), ball.getX(), ball.getY(),
paint);
canvas.drawText("" + (i+1), ball.getX(), ball.getY(), paint);
}
}
// events when touching the screen
public boolean onTouchEvent(MotionEvent event) {
int eventAction = event.getAction();
int X = (int) event.getX();
int Y = (int) event.getY();
switch (eventAction) {
case MotionEvent.ACTION_DOWN: // touch down so check if the finger is on
// a ball
if (points[0] == null) {
initRectangle(X, Y);
} else {
//resize rectangle
balID = -1;
groupId = -1;
for (int i = colorballs.size()-1; i>=0; i--) {
ColorBall ball = colorballs.get(i);
// check if inside the bounds of the ball (circle)
// get the center for the ball
int centerX = ball.getX() + ball.getWidthOfBall();
int centerY = ball.getY() + ball.getHeightOfBall();
paint.setColor(Color.CYAN);
// calculate the radius from the touch to the center of the
// ball
double radCircle = Math
.sqrt((double) (((centerX - X) * (centerX - X)) + (centerY - Y)
* (centerY - Y)));
if (radCircle < ball.getWidthOfBall()) {
balID = ball.getID();
if (balID == 1 || balID == 3) {
groupId = 2;
} else {
groupId = 1;
}
invalidate();
break;
}
invalidate();
}
}
break;
case MotionEvent.ACTION_MOVE: // touch drag with the ball
if (balID > -1) {
// move the balls the same as the finger
colorballs.get(balID).setX(X);
colorballs.get(balID).setY(Y);
paint.setColor(Color.CYAN);
if (groupId == 1) {
colorballs.get(1).setX(colorballs.get(0).getX());
colorballs.get(1).setY(colorballs.get(2).getY());
colorballs.get(3).setX(colorballs.get(2).getX());
colorballs.get(3).setY(colorballs.get(0).getY());
} else {
colorballs.get(0).setX(colorballs.get(1).getX());
colorballs.get(0).setY(colorballs.get(3).getY());
colorballs.get(2).setX(colorballs.get(3).getX());
colorballs.get(2).setY(colorballs.get(1).getY());
}
invalidate();
}
break;
case MotionEvent.ACTION_UP:
// touch drop - just do things here after dropping
break;
}
// redraw the canvas
invalidate();
return true;
}
public Drawable doTheCrop(Bitmap sourceBitmap) throws IOException {
//Bitmap sourceBitmap = null;
//Drawable backgroundDrawable = getBackground();
/*
if (backgroundDrawable instanceof BitmapDrawable) {
BitmapDrawable bitmapDrawable = (BitmapDrawable) backgroundDrawable;
if(bitmapDrawable.getBitmap() != null) {
sourceBitmap = bitmapDrawable.getBitmap();
}
}*/
//source bitmap was scaled, you should calculate the rate
float widthRate = ((float) sourceBitmap.getWidth()) / getWidth();
float heightRate = ((float) sourceBitmap.getHeight()) / getHeight();
//crop the source bitmap with rate value
int left = (int) (mCropRect.left * widthRate);
int top = (int) (mCropRect.top * heightRate);
int right = (int) (mCropRect.right * widthRate);
int bottom = (int) (mCropRect.bottom * heightRate);
Bitmap croppedBitmap = Bitmap.createBitmap(sourceBitmap, left, top, right - left, bottom - top);
Drawable drawable = new BitmapDrawable(getResources(), croppedBitmap);
return drawable;
/*
setContentView(R.layout.fragment_dashboard);
Button btn = (Button)findViewById(R.id.capture);
if (btn == null){
System.out.println("NULL");
}
try{
btn.setText("HI");
}
catch (Exception e){
}
//setBackground(drawable);*/
//savebitmap(croppedBitmap);
}
private File savebitmap(Bitmap bmp) throws IOException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.JPEG, 60, bytes);
File f = new File(Environment.getExternalStorageDirectory()
+ "/" + "testimage.jpg");
Toast.makeText(getContext(), "YUP", Toast.LENGTH_LONG).show();
f.createNewFile();
FileOutputStream fo = new FileOutputStream(f);
fo.write(bytes.toByteArray());
fo.close();
return f;
}
public static class ColorBall {
Bitmap bitmap;
Context mContext;
Point point;
int id;
public ColorBall(Context context, int resourceId, Point point, int id) {
this.id = id;
bitmap = BitmapFactory.decodeResource(context.getResources(),
resourceId);
mContext = context;
this.point = point;
}
public int getWidthOfBall() {
return bitmap.getWidth();
}
public int getHeightOfBall() {
return bitmap.getHeight();
}
public Bitmap getBitmap() {
return bitmap;
}
public int getX() {
return point.x;
}
public int getY() {
return point.y;
}
public int getID() {
return id;
}
public void setX(int x) {
point.x = x;
}
public void setY(int y) {
point.y = y;
}
}
}
Here is the fragment that I have added a camera do, basically the main part of the application that I am working on.
public class DashboardFragment extends Fragment {
private DashboardViewModel dashboardViewModel;
//All my constants
private DrawView mDrawView;
private Drawable imgDraw;
private TextureView txtView;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static{
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private String cameraID;
private String pathway;
CameraDevice cameraDevice;
CameraCaptureSession cameraCaptureSession;
CaptureRequest captureRequest;
CaptureRequest.Builder captureRequestBuilder;
private Size imageDimensions;
private ImageReader imageReader;
private File file;
Handler mBackgroundHandler;
HandlerThread mBackgroundThread;
public View onCreateView(#NonNull LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState) {
dashboardViewModel =
ViewModelProviders.of(this).get(DashboardViewModel.class);
View root = inflater.inflate(R.layout.fragment_dashboard, container, false);
try{
txtView = (TextureView)root.findViewById(R.id.textureView);
txtView.setSurfaceTextureListener(textureListener);
mDrawView = root.findViewById(draw_view);
Button cap = (Button)root.findViewById(R.id.capture);
cap.setClickable(true);
cap.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
Log.i("HOLA","HOLA");
takePicture();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});
}
catch (Exception e){
Log.i("HI",e.toString());
}
/*
txtView = (TextureView)root.findViewById(R.id.textureView);
txtView.setSurfaceTextureListener(textureListener);
mDrawView = root.findViewById(R.id.draw_view);
Button cap = (Button)root.findViewById(R.id.capture);
cap.setClickable(true);
cap.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
Log.i("HOLA","HOLA");
takePicture();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});*/
return root;
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults){
if (requestCode == 101){
if (grantResults[0] == PackageManager.PERMISSION_DENIED){
Toast.makeText(getActivity().getApplicationContext(), "Permission is required",Toast.LENGTH_LONG);
}
}
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
cameraDevice = camera;
try {
createCameraPreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
cameraDevice.close();
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int i) {
cameraDevice.close();
cameraDevice = null;
}
};
private void createCameraPreview() throws CameraAccessException {
SurfaceTexture texture = txtView.getSurfaceTexture(); //?
texture.setDefaultBufferSize(imageDimensions.getWidth(), imageDimensions.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
if (cameraDevice == null){
return;
}
cameraCaptureSession = session;
try {
updatePreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getActivity().getApplicationContext(), "CONFIGURATION", Toast.LENGTH_LONG);
}
}, null);
}
private void updatePreview() throws CameraAccessException {
if (cameraDevice == null){
return;
}
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
}
private void openCamera() throws CameraAccessException {
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
cameraID = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
imageDimensions = map.getOutputSizes(SurfaceTexture.class)[0];
if (ActivityCompat.checkSelfPermission(getActivity().getApplicationContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED
&& ActivityCompat.checkSelfPermission(getActivity().getApplicationContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED){
ActivityCompat.requestPermissions(getActivity(), new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, 101);
return;
}
manager.openCamera(cameraID, stateCallback, null);
}
private void takePicture() throws CameraAccessException {
if (cameraDevice == null) {
Log.i("NOt working", "hi");
return;
}
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
int width = 640;
int height = 480;
if (jpegSizes != null && jpegSizes.length > 0) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
final ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(txtView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
Long tsLong = System.currentTimeMillis() / 1000;
String ts = tsLong.toString();
file = new File(Environment.getExternalStorageDirectory() + "/" + ts + ".jpg");
pathway = Environment.getExternalStorageDirectory() + "/" + ts + ".jpg";
//cameraDevice.close();
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader imageReader) {
Image image = null;
//image = reader.acquireLatestImage();
image = reader.acquireNextImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes , 0, bytes.length);
try {
Drawable back = mDrawView.doTheCrop(bitmap);
Button btn = (Button)getView().findViewById(R.id.capture);
btn.setBackground(back);
} catch (IOException e) {
e.printStackTrace();
}
/*
try {
save(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null){
image.close();
}
}*/
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback(){
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result){
super.onCaptureCompleted(session, request, result);
try {
createCameraPreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
}
}, mBackgroundHandler);
}
private void save (byte[] bytes) throws IOException {
OutputStream outputStream = null;
outputStream = new FileOutputStream(file);
outputStream.write(bytes);
Toast.makeText(getActivity().getApplicationContext(),pathway,Toast.LENGTH_LONG).show();
outputStream.close();
imgDraw = Drawable.createFromPath(pathway);
//mDrawView.doTheCrop(imgDraw);
}
#Override
public void onResume(){
super.onResume();
startBackgroundThread();
if (txtView.isAvailable()){
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
else{
txtView.setSurfaceTextureListener(textureListener);
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
protected void stopBackgroundThread() throws InterruptedException{
mBackgroundThread.quitSafely();
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
}
#Override
public void onPause(){
try {
stopBackgroundThread();
} catch (InterruptedException e) {
e.printStackTrace();
}
super.onPause();
}
}
Here is the xml file for that fragment.
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".ui.dashboard.DashboardFragment">
<TextureView
android:id = "#+id/textureView"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
<com.PeavlerDevelopment.OpinionMinion.ui.dashboard.DrawView
android:id="#+id/draw_view"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
<Button
android:id="#+id/capture"
android:layout_width="100dp"
android:layout_height="200dp"
android:clickable="true"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"></Button>
</androidx.constraintlayout.widget.ConstraintLayout>
The problem seems to lie somewhere in the doCrop method of the DrawView class.
If there is anything else that would help make the problem more clear, let me know! I will gladly share the github repo with you.
Thank you.
As you can see in Android Design Documenation the VINTF stands for Vendor Interface and its a Manifest structure to aggregate data form the device. That specific log means that your manifest is missing something like this:
<hal>
<name>android.hardware.power</name>
<transport>hwbinder</transport>
<version>1.1</version>
<interface>
<name>IPower</name>
<instance>default</instance>
</interface>
</hal>
which basically is hardware power information.
I think it's not related to what you are trying to do, but I need more info than that log.
I want to make app which take photos only with faces but I get whole image instead of face when I save into my storage file. So how to crop face and save into storage with the help google vision face detection API.
So how to use frame in my code to get face list as well as how can I convert into bitmap and save into my storage.
main.xml
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/topLayout"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:keepScreenOn="true">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview
android:id="#+id/preview"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.GraphicOverlay
android:id="#+id/faceOverlay"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview>
</LinearLayout>
<android.support.v7.widget.AppCompatButton
android:id="#+id/take_pic_btn"
android:layout_gravity="bottom"
android:gravity="center"
android:background="#color/green"
android:layout_margin="10dp"
android:text="Take Image"
android:textStyle="bold"
android:textSize="20sp"
android:textAllCaps="false"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
</FrameLayout>
FaceTrackerActivity .java
public final class FaceTrackerActivity extends AppCompatActivity {
private static final String TAG = "FaceTracker";
private CameraSource mCameraSource = null;
private CameraSourcePreview mPreview;
private GraphicOverlay mGraphicOverlay;
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
private Button takePicButton;
FaceDetector detector;
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.main);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay) findViewById(R.id.faceOverlay);
takePicButton=(Button)findViewById(R.id.take_pic_btn);
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
int gc = ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE);
if (rc == PackageManager.PERMISSION_GRANTED && gc == PackageManager.PERMISSION_GRANTED) {
createCameraSource();
} else {
requestCameraPermission();
}
takePicButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Toast.makeText(getApplicationContext(),"dilip",Toast.LENGTH_LONG).show();
captureImage();
}
});
}
private void requestCameraPermission() {
Log.w(TAG, "Camera permission is not granted. Requesting permission");
final String[] permissions = new String[]{Manifest.permission.CAMERA,Manifest.permission.WRITE_EXTERNAL_STORAGE};
if (!ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.CAMERA) && !ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.WRITE_EXTERNAL_STORAGE) ) {
ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM);
return;
}
final Activity thisActivity = this;
View.OnClickListener listener = new View.OnClickListener() {
#Override
public void onClick(View view) {
ActivityCompat.requestPermissions(thisActivity, permissions,
RC_HANDLE_CAMERA_PERM);
}
};
Snackbar.make(mGraphicOverlay, R.string.permission_camera_rationale,
Snackbar.LENGTH_INDEFINITE)
.setAction(R.string.ok, listener)
.show();
}
private void createCameraSource() {
Context context = getApplicationContext();
/* FaceDetector detector = new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());*/
detector= new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
MyFaceDetector myFaceDetector = new MyFaceDetector(detector);
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());
mCameraSource = new CameraSource.Builder(context, myFaceDetector)
.build();
if (!detector.isOperational()) {
}
mCameraSource = new CameraSource.Builder(context, detector)
.setRequestedPreviewSize(640, 480)
.setFacing(CameraSource.CAMERA_FACING_FRONT)
.setRequestedFps(10.0f)
.build();
}
/**
* Restarts the camera.
*/
#Override
protected void onResume() {
super.onResume();
startCameraSource();
}
/**
* Stops the camera.
*/
#Override
protected void onPause() {
super.onPause();
mPreview.stop();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mCameraSource != null) {
mCameraSource.release();
}
}
/**
* Callback for the result from requesting permissions. This method
* is invoked for every call on {#link #requestPermissions(String[], int)}.
* <p>
* <strong>Note:</strong> It is possible that the permissions request interaction
* with the user is interrupted. In this case you will receive empty permissions
* and results arrays which should be treated as a cancellation.
* </p>
*
* #param requestCode The request code passed in {#link #requestPermissions(String[], int)}.
* #param permissions The requested permissions. Never null.
* #param grantResults The grant results for the corresponding permissions
* which is either {#link PackageManager#PERMISSION_GRANTED}
* or {#link PackageManager#PERMISSION_DENIED}. Never null.
* #see #requestPermissions(String[], int)
*/
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
if (requestCode != RC_HANDLE_CAMERA_PERM) {
Log.d(TAG, "Got unexpected permission result: " + requestCode);
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
return;
}
if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED &&grantResults[1] == PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Camera permission granted - initialize the camera source");
// we have permission, so create the camerasource
createCameraSource();
return;
}
Log.e(TAG, "Permission not granted: results len = " + grantResults.length +
" Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)"));
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Face Tracker sample")
.setMessage(R.string.no_camera_permission)
.setPositiveButton(R.string.ok, listener)
.show();
}
//==============================================================================================
// Camera Source Preview
//==============================================================================================
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
private void startCameraSource() {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
//==============================================================================================
// Graphic Face Tracker
//==============================================================================================
/**
* Factory for creating a face tracker to be associated with a new face. The multiprocessor
* uses this factory to create face trackers as needed -- one for each individual.
*/
private class GraphicFaceTrackerFactory implements MultiProcessor.Factory<Face> {
#Override
public Tracker<Face> create(Face face) {
return new GraphicFaceTracker(mGraphicOverlay);
}
}
/**
* Face tracker for each detected individual. This maintains a face graphic within the app's
* associated face overlay.
*/
private class GraphicFaceTracker extends Tracker<Face> {
private GraphicOverlay mOverlay;
private FaceGraphic mFaceGraphic;
GraphicFaceTracker(GraphicOverlay overlay) {
mOverlay = overlay;
mFaceGraphic = new FaceGraphic(overlay);
}
/**
* Start tracking the detected face instance within the face overlay.
*/
#Override
public void onNewItem(int faceId, Face item) {
mFaceGraphic.setId(faceId);
}
**`strong text`**
#Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
mOverlay.add(mFaceGraphic);
mFaceGraphic.updateFace(face);
}
/**
* Hide the graphic when the corresponding face was not detected. This can happen for
* intermediate frames temporarily (e.g., if the face was momentarily blocked from
* view).
*/
#Override
public void onMissing(FaceDetector.Detections<Face> detectionResults) {
mOverlay.remove(mFaceGraphic);
}
/**
* Called when the face is assumed to be gone for good. Remove the graphic annotation from
* the overlay.
*/
#Override
public void onDone() {
mOverlay.remove(mFaceGraphic);
}
}
private void captureImage() {
mPreview.setDrawingCacheEnabled(true);
final Bitmap drawingCache = mPreview.getDrawingCache();
mCameraSource.takePicture(null, new CameraSource.PictureCallback() {
#Override
public void onPictureTaken(byte[] bytes) {
int orientation = Exif.getOrientation(bytes);
Bitmap temp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
Bitmap picture = rotateImage(temp,orientation);
Bitmap overlay = Bitmap.createBitmap(mGraphicOverlay.getWidth(),mGraphicOverlay.getHeight(),picture.getConfig());
Canvas canvas = new Canvas(overlay);
Matrix matrix = new Matrix();
matrix.setScale((float)overlay.getWidth()/(float)picture.getWidth(),(float)overlay.getHeight()/(float)picture.getHeight());
// mirror by inverting scale and translating
matrix.preScale(-1, 1);
matrix.postTranslate(canvas.getWidth(), 0);
Paint paint = new Paint();
canvas.drawBitmap(picture,matrix,paint);
canvas.drawBitmap(drawingCache,0,0,paint);
try {
String mainpath = getExternalStorageDirectory() + separator + "MaskIt" + separator + "images" + separator;
File basePath = new File(mainpath);
if (!basePath.exists())
Log.d("CAPTURE_BASE_PATH", basePath.mkdirs() ? "Success": "Failed");
String path = mainpath + "photo_" + getPhotoTime() + ".jpg";
File captureFile = new File(path);
captureFile.createNewFile();
if (!captureFile.exists())
Log.d("CAPTURE_FILE_PATH", captureFile.createNewFile() ? "Success": "Failed");
FileOutputStream stream = new FileOutputStream(captureFile);
overlay.compress(Bitmap.CompressFormat.PNG, 100, stream);
stream.flush();
stream.close();
picture.recycle();
drawingCache.recycle();
mPreview.setDrawingCacheEnabled(false);
} catch (IOException e) {
e.printStackTrace();
}
}
private String getPhotoTime() {
DateFormat dateFormatter = new SimpleDateFormat("yyyyMMdd hhmmss");
dateFormatter.setLenient(false);
Date today = new Date();
String s = dateFormatter.format(today);
return s;
}
});
}
private Bitmap rotateImage(Bitmap bm, int i) {
Matrix matrix = new Matrix();
switch (i) {
case ExifInterface.ORIENTATION_NORMAL:
return bm;
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
matrix.setScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
matrix.setRotate(180);
break;
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
matrix.setRotate(180);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_TRANSPOSE:
matrix.setRotate(90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_90:
matrix.setRotate(90);
break;
case ExifInterface.ORIENTATION_TRANSVERSE:
matrix.setRotate(-90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
matrix.setRotate(-90);
break;
default:
return bm;
}
try {
Bitmap bmRotated = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true);
bm.recycle();
return bmRotated;
} catch (OutOfMemoryError e) {
e.printStackTrace();
return null;
}
}
}
CameraSourcePreview.java
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.content.res.Configuration;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import com.google.android.gms.common.images.Size;
import com.google.android.gms.vision.CameraSource;
import java.io.IOException;
public class CameraSourcePreview extends ViewGroup {
private static final String TAG = "CameraSourcePreview";
private Context mContext;
private SurfaceView mSurfaceView;
private boolean mStartRequested;
private boolean mSurfaceAvailable;
private CameraSource mCameraSource;
private GraphicOverlay mOverlay;
public CameraSourcePreview(Context context, AttributeSet attrs) {
super(context, attrs);
mContext = context;
mStartRequested = false;
mSurfaceAvailable = false;
mSurfaceView = new SurfaceView(context);
mSurfaceView.getHolder().addCallback(new SurfaceCallback());
addView(mSurfaceView);
}
public void start(CameraSource cameraSource) throws IOException {
if (cameraSource == null) {
stop();
}
mCameraSource = cameraSource;
if (mCameraSource != null) {
mStartRequested = true;
startIfReady();
}
}
public void start(CameraSource cameraSource, GraphicOverlay overlay) throws IOException {
mOverlay = overlay;
start(cameraSource);
}
public void stop() {
if (mCameraSource != null) {
mCameraSource.stop();
}
}
public void release() {
if (mCameraSource != null) {
mCameraSource.release();
mCameraSource = null;
}
}
private void startIfReady() throws IOException {
if (mStartRequested && mSurfaceAvailable) {
mCameraSource.start(mSurfaceView.getHolder());
if (mOverlay != null) {
Size size = mCameraSource.getPreviewSize();
int min = Math.min(size.getWidth(), size.getHeight());
int max = Math.max(size.getWidth(), size.getHeight());
if (isPortraitMode()) {
// Swap width and height sizes when in portrait, since it will be rotated by
// 90 degrees
mOverlay.setCameraInfo(min, max, mCameraSource.getCameraFacing());
} else {
mOverlay.setCameraInfo(max, min, mCameraSource.getCameraFacing());
}
mOverlay.clear();
}
mStartRequested = false;
}
}
private class SurfaceCallback implements SurfaceHolder.Callback {
#Override
public void surfaceCreated(SurfaceHolder surface) {
mSurfaceAvailable = true;
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surface) {
mSurfaceAvailable = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
}
#Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int previewWidth = 320;
int previewHeight = 240;
if (mCameraSource != null) {
Size size = mCameraSource.getPreviewSize();
if (size != null) {
previewWidth = size.getWidth();
previewHeight = size.getHeight();
}
}
// Swap width and height sizes when in portrait, since it will be rotated 90 degrees
if (isPortraitMode()) {
int tmp = previewWidth;
previewWidth = previewHeight;
previewHeight = tmp;
}
final int viewWidth = right - left;
final int viewHeight = bottom - top;
int childWidth;
int childHeight;
int childXOffset = 0;
int childYOffset = 0;
float widthRatio = (float) viewWidth / (float) previewWidth;
float heightRatio = (float) viewHeight / (float) previewHeight;
// To fill the view with the camera preview, while also preserving the correct aspect ratio,
// it is usually necessary to slightly oversize the child and to crop off portions along one
// of the dimensions. We scale up based on the dimension requiring the most correction, and
// compute a crop offset for the other dimension.
if (widthRatio > heightRatio) {
childWidth = viewWidth;
childHeight = (int) ((float) previewHeight * widthRatio);
childYOffset = (childHeight - viewHeight) / 2;
} else {
childWidth = (int) ((float) previewWidth * heightRatio);
childHeight = viewHeight;
childXOffset = (childWidth - viewWidth) / 2;
}
for (int i = 0; i < getChildCount(); ++i) {
// One dimension will be cropped. We shift child over or up by this offset and adjust
// the size to maintain the proper aspect ratio.
getChildAt(i).layout(
-1 * childXOffset, -1 * childYOffset,
childWidth - childXOffset, childHeight - childYOffset);
}
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
private boolean isPortraitMode() {
int orientation = mContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
return false;
}
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
return true;
}
Log.d(TAG, "isPortraitMode returning false by default");
return false;
}
}
GraphicOverlay.java
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.View;
import com.google.android.gms.vision.CameraSource;
import java.util.HashSet;
import java.util.Set;
public class GraphicOverlay extends View {
private final Object mLock = new Object();
private int mPreviewWidth;
private float mWidthScaleFactor = 1.0f;
private int mPreviewHeight;
private float mHeightScaleFactor = 1.0f;
private int mFacing = CameraSource.CAMERA_FACING_BACK;
private Set<Graphic> mGraphics = new HashSet<>();
public static abstract class Graphic {
private GraphicOverlay mOverlay;
public Graphic(GraphicOverlay overlay) {
mOverlay = overlay;
}
public abstract void draw(Canvas canvas);
public float scaleX(float horizontal) {
return horizontal * mOverlay.mWidthScaleFactor;
}
public float scaleY(float vertical) {
return vertical * mOverlay.mHeightScaleFactor;
}
public float translateX(float x) {
if (mOverlay.mFacing == CameraSource.CAMERA_FACING_FRONT) {
return mOverlay.getWidth() - scaleX(x);
} else {
return scaleX(x);
}
}
public float translateY(float y) {
return scaleY(y);
}
public void postInvalidate() {
mOverlay.postInvalidate();
}
}
public GraphicOverlay(Context context, AttributeSet attrs) {
super(context, attrs);
}
public void clear() {
synchronized (mLock) {
mGraphics.clear();
}
postInvalidate();
}
public void add(Graphic graphic) {
synchronized (mLock) {
mGraphics.add(graphic);
}
postInvalidate();
}
public void remove(Graphic graphic) {
synchronized (mLock) {
mGraphics.remove(graphic);
}
postInvalidate();
}
public void setCameraInfo(int previewWidth, int previewHeight, int facing) {
synchronized (mLock) {
mPreviewWidth = previewWidth;
mPreviewHeight = previewHeight;
mFacing = facing;
}
postInvalidate();
}
/**
* Draws the overlay with its associated graphic objects.
*/
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
synchronized (mLock) {
if ((mPreviewWidth != 0) && (mPreviewHeight != 0)) {
mWidthScaleFactor = (float) canvas.getWidth() / (float)mPreviewWidth;
mHeightScaleFactor = (float) canvas.getHeight() / (float) mPreviewHeight;
}
for (Graphic graphic : mGraphics) {
graphic.draw(canvas);
}
}
}
}
You should pass the captured image to FaceDetector API and crop it thereafter.
fun getFace(context: Context, data: ByteArray): Bitmap? {
try {
val imageStrem = ByteArrayInputStream(data)
var bitmap = BitmapFactory.decodeStream(imageStrem)
if (bitmap.width > bitmap.height) {
val matrix = Matrix()
matrix.postRotate(270f)
if (bitmap.width > 1500) matrix.postScale(0.5f, 0.5f)
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
}
val faceDetector = FaceDetector.Builder(context).setProminentFaceOnly(true).setTrackingEnabled(false).build()
val frame = Frame.Builder().setBitmap(bitmap).build()
val faces = faceDetector.detect(frame)
var results: Bitmap? = null
for (i in 0 until faces.size()) {
val thisFace = faces.valueAt(i)
val x = thisFace.position.x
val y = thisFace.position.y
val x2 = x / 4 + thisFace.width
val y2 = y / 4 + thisFace.height
results = Bitmap.createBitmap(bitmap, x.toInt(), y.toInt(), x2.toInt(), y2.toInt())
}
return results
} catch (e: Exception) {
Log.e("GET_FACE", e.message)
}
return null
}
Source
I have created a DialogFragment class that I need to show from within a onDragListener. I tried a regular AlertDialog but couldn't get the activity context to be static. I tried adding this to my onDragListener class and to my CustomLayoutClass but it says that getSupportFragmentManager() cannot be referenced from a static method or that it cannot be resolved.
public static void showScoopDialog() {
FragmentManager fm = getSupportFragmentManager();
ScoopSizeDialog editNameDialogFragment = ScoopSizeDialog.newInstance("Some Title");
editNameDialogFragment.show(fm, "fragment_edit_name");
}
This is my custom onDragListener class. I need to show a dialog with edittext when the user drops an image:
public class ChoiceDragListener implements View.OnDragListener {
boolean DEBUG = true;
Context context;
public String TAG = "Drag Layout:";
public ChoiceDragListener(Context context) {
this.context = context;
}
#Override
public boolean onDrag(View v, DragEvent event) {
switch (event.getAction()) {
case DragEvent.ACTION_DRAG_STARTED:
if(DEBUG) Log.v("here","drag started");
break;
case DragEvent.ACTION_DRAG_ENTERED:
break;
case DragEvent.ACTION_DRAG_LOCATION:
int mCurX = (int) event.getX();
int mCurY = (int) event.getY();
if (DEBUG) Log.v("Cur(X, Y) : " ,"here ::" + mCurX + ", " + mCurY );
break;
case DragEvent.ACTION_DRAG_EXITED:
if (DEBUG)
Log.v("here","drag exits");
break;
case DragEvent.ACTION_DROP:
//handle the dragged view being dropped over a drop view
View view = (View) event.getLocalState();
ClipData cd = event.getClipData();
ClipData.Item item = cd.getItemAt(0);
String resp = item.coerceToText(context).toString();
//view dragged item is being dropped on
ImageView dropTarget = (ImageView) v;
//view being dragged and dropped
ImageView dropped = (ImageView) view;
dropped.setEnabled(false);
//if an item has already been dropped here, there will be a tag
Object tag = dropTarget.getTag();
CreateProd.nsList.add(dropped.getTag().toString());
Log.d(TAG, dropped.getTag().toString() + "added to list");
//if there is already an item here, set it back visible in its original place
if (tag != null) {
//the tag is the view id already dropped here
int existingID = (Integer)tag;
//set the original view visible again
((Activity) context).findViewById(existingID).setVisibility(View.VISIBLE);
}
break;
case DragEvent.ACTION_DRAG_ENDED:
if (DEBUG) Log.i("drag event", "ended::" + ChoiceTouchListener.offsetX + "," + ChoiceTouchListener.offsetY);
/**
* returning false so that goes to parentView onDrag function
*/
return false;
//break;
default:
break;
}
return true;
}
}
And this is my custom layout:
public class DragLayout extends RelativeLayout {
boolean DEBUG = true;
AnimationDrawable blenderAnim;
Handler handlerAnim2;
Context context;
private int dimensionInPixel = 200;
int screenWidth,screenHeight;
static float up_x=0,up_y=0;
boolean mIsScrolling = false;
public DragLayout(Context context) {
super(context);
// TODO Auto-generated constructor stub
this.context = context;
//not to include in main program
getDimensionsofScreen();
setLayout();
setViews();
}
private void setLayout() {
// set according to parent layout (not according to current layout)
RelativeLayout.LayoutParams rLp = new RelativeLayout.LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
rLp.topMargin = 2 * (screenHeight / 25); // calculating 1/10 of 4/5
// screen
this.setLayoutParams(rLp);
}
void setViews() {
ImageView img2 = new ImageView(context);
int dimensionInDp = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dimensionInPixel, getResources().getDisplayMetrics());
RelativeLayout.LayoutParams rLp = new RelativeLayout.LayoutParams(
(screenWidth / 5), (screenHeight / 5));
rLp.topMargin = (screenHeight / 10);
rLp.leftMargin = (4*screenWidth / 10);
rLp.addRule(RelativeLayout.CENTER_IN_PARENT, RelativeLayout.TRUE);
img2.setLayoutParams(rLp);
img2.getLayoutParams().height = dimensionInDp;
img2.getLayoutParams().width = dimensionInDp;
img2.setImageDrawable(getResources().getDrawable(R.drawable.blender_anim));
img2.setOnDragListener(new ChoiceDragListener(context));
this.addView(img2);
blenderAnim = (AnimationDrawable)img2.getDrawable();
blenderAnim.setOneShot(true);
blenderAnim.stop();
}
public ArrayList<Integer> getDimensionsofScreen() {
//metrics that holds the value of height and width
DisplayMetrics displayMetrics = context.getResources().getDisplayMetrics();;
ArrayList<Integer> vals = new ArrayList<Integer>();
vals.add(displayMetrics.widthPixels);
vals.add(displayMetrics.heightPixels);
screenHeight = displayMetrics.heightPixels;
screenWidth = displayMetrics.widthPixels;
return vals;
}
#SuppressLint("NewApi")
#Override
public boolean onDragEvent(DragEvent event) {
int mCurX = (int) event.getX();
int mCurY = (int) event.getY();
if (event.getAction() == DragEvent.ACTION_DRAG_STARTED || event.getAction() == DragEvent.ACTION_DRAG_ENTERED) {
if (blenderAnim.isRunning()) {
blenderAnim.stop();
} else {
blenderAnim.run();
handlerAnim2 = new Handler();
handlerAnim2.postDelayed(
new Runnable() {
#Override
public void run() {
blenderAnim.stop();
}},
getAnimationDuration(blenderAnim));
}
}
if (event.getAction() == DragEvent.ACTION_DROP || event.getAction() == DragEvent.ACTION_DRAG_EXITED) {
if (blenderAnim.isRunning()) {
blenderAnim.stop();
} else {
blenderAnim.run();
handlerAnim2 = new Handler();
handlerAnim2.postDelayed(
new Runnable(){
#Override
public void run() {
blenderAnim.stop();
}},
getAnimationDuration(blenderAnim));
}
Log.v("here", "it is :: " + mCurX + ", " + mCurY);
View view1 = (View) event.getLocalState();
view1.setVisibility(View.VISIBLE);
ObjectAnimator animationx = ObjectAnimator.ofFloat(view1,"translationX", mCurX - ChoiceTouchListener.offsetX-(screenWidth / 10),0.0f);
ObjectAnimator animationy = ObjectAnimator.ofFloat(view1, "translationY", mCurY - ChoiceTouchListener.offsetY - (screenHeight / 10), 0.0f);
AnimatorSet animSet = new AnimatorSet();
animSet.setDuration(500);
animSet.playTogether(animationx,animationy);
animSet.start();
}
if (event.getAction() == DragEvent.ACTION_DROP || event.getAction() == DragEvent.ACTION_DRAG_ENDED){
if (blenderAnim.isRunning()) {
blenderAnim.stop();
}
}
return true;
}
private int getAnimationDuration(AnimationDrawable src) {
int dur = 0;
for (int i = 0; i<src.getNumberOfFrames(); i++) {
dur += src.getDuration(i);
}
return dur;
}
private void showScoopDialog() {
FragmentManager fm = getSupportFragmentManager();
ScoopSizeDialog editNameDialogFragment = ScoopSizeDialog.newInstance("Some Title");
editNameDialogFragment.show(fm, "fragment_edit_name");
}
}
This is a continuation of my first topic here, about how to use code to mimic an on screen touch.
I wanted to incorporate this function to a floating, onscreen, chat-head (like what facebook did). So basically, when I tap the chat-head this service will mimic taps a certain amounts of times and at a certain rates. (Which are predefined somewhere else.) It will also mimic the tap at a certain spot x, y. (which are also predefined.) I want the service to tap whatever is behind (on the screen).
Here is my full code but it doesn't work and it doesn't show any errors.
It is a bit long and lengthy, if you don't have much time, then just skip to the runnable section. I am pretty confident there isn't any errors before the runnable section.
public class TapService extends Service{
private WindowManager windowManager;
private ImageView chatHead;
boolean mHasDoubleClicked = false;
long lastPressTime;
private Boolean _enable = true;
int x = 0;
int y = 0;
int tapTimes;
long delayInterval;
#Override
public IBinder onBind(Intent intent) {
// Not Used
return null;
}
#Override
public void onCreate() {
super.onCreate();
//////////////Chathead Core Section
windowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
chatHead = new ImageView(this);
chatHead.setImageResource(R.drawable.floating2);
final WindowManager.LayoutParams params = new WindowManager.LayoutParams(
WindowManager.LayoutParams.WRAP_CONTENT,
WindowManager.LayoutParams.WRAP_CONTENT,
WindowManager.LayoutParams.TYPE_PHONE,
WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE,
PixelFormat.TRANSLUCENT);
params.gravity = Gravity.TOP | Gravity.LEFT;
params.x = 0;
params.y = 100;
windowManager.addView(chatHead, params);
//////////Chathead Core Section
getTapInformation();
try {
chatHead.setOnTouchListener(new View.OnTouchListener() {
private WindowManager.LayoutParams paramsF = params;
private int initialX;
private int initialY;
private float initialTouchX;
private float initialTouchY;
#Override public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
// Get current time in nano seconds.
long pressTime = System.currentTimeMillis();
// If double click...
if (pressTime - lastPressTime <= 300) {
// Do something else here
mHasDoubleClicked = true;
}
else { // If not double click....
mHasDoubleClicked = false;
}
lastPressTime = pressTime;
initialX = paramsF.x;
initialY = paramsF.y;
initialTouchX = event.getRawX();
initialTouchY = event.getRawY();
x = paramsF.x;
y = paramsF.y;
break;
case MotionEvent.ACTION_UP:
break;
case MotionEvent.ACTION_MOVE:
paramsF.x = initialX + (int) (event.getRawX() - initialTouchX);
paramsF.y = initialY + (int) (event.getRawY() - initialTouchY);
x = paramsF.x;
y = paramsF.y;
windowManager.updateViewLayout(chatHead, paramsF);
break;
}
return false;
}
});
} catch (Exception e) {
// TODO: handle exception
}
chatHead.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg0) {
_enable = false;
handler.post(runnable);
// Intent intent = new Intent(getApplicationContext(), MainActivity.class);
// intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK|Intent.FLAG_ACTIVITY_SINGLE_TOP|Intent.FLAG_ACTIVITY_REORDER_TO_FRONT);
// getApplicationContext().startActivity(intent);
}
});
}
private Handler handler = new Handler();
private final Runnable runnable = new Runnable() {
#Override
public void run() {
long downTime;
long eventTime;
Log.v("Screen Tapper", "Start Tapping");
for(int i = 0; i <tapTimes; i++) {
downTime = eventTime = SystemClock.uptimeMillis();
MotionEvent event = MotionEvent.obtain(downTime, eventTime, MotionEvent.ACTION_DOWN, x, y, 0);
chatHead.onTouchEvent(event);
Log.v("Screen Tapper", "touchDown ----- "+x+","+y);
handler.postDelayed(runnable, 10);
downTime = eventTime = SystemClock.uptimeMillis();
MotionEvent event2 = MotionEvent.obtain(downTime, eventTime, MotionEvent.ACTION_UP, x, y, 0);
chatHead.onTouchEvent(event2);
Log.v("Screen Tapper", "touchUp ----- "+x+","+y);
handler.postDelayed(runnable, delayInterval);
}
}
};
private void getTapInformation() {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getApplicationContext());
tapTimes = prefs.getInt("TAP_TIMES", 1);
delayInterval = prefs.getLong("Rate", 1);
Log.v("Screen Tapper", "Get Information");
}
#Override
public void onDestroy() {
super.onDestroy();
if (chatHead != null) windowManager.removeView(chatHead);
}
}
I don't see anything wrong with my code but I have a feeling that the chatHead.onTouchEvent(...); is not correct
Any help is appreciated.
Thanks in advance
I beleive you need to run touch events on the UI thread for them to work, try this out
view.post(new Runnable() {
#Override
public void run() {
onTouchEvent(event);
}
});