Drawing Straight Line in Bitmap Android Development - java

So I found this code online, but the problem is that instead of drawing a straight Line (Black Straight thin Line)instead it draws dotted lines, But everytime I modify it to draw a line I get an error. Any help would be appreciated.
package cameraadvanced.android2.oreillyschool.com.hello4;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.net.Uri;
import android.os.Bundle;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnTouchListener;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import java.io.FileNotFoundException;
public class mic extends Activity {
Button btnLoadImage;
TextView textSource, textInfo;
ImageView imageResult;
final int RQS_IMAGE1 = 1;
Uri source;
Bitmap bitmapMaster;
Canvas canvasMaster;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fingeract);
btnLoadImage = (Button) findViewById(R.id.loadimage);
textSource = (TextView) findViewById(R.id.sourceuri);
imageResult = (ImageView) findViewById(R.id.result);
btnLoadImage.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View arg0) {
Intent intent = new Intent(Intent.ACTION_PICK,
android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(intent, RQS_IMAGE1);
}
});
imageResult.setOnTouchListener(new OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent event) {
int action = event.getAction();
int x = (int) event.getX();
int y = (int) event.getY();
switch (action) {
case MotionEvent.ACTION_DOWN:
textSource.setText("ACTION_DOWN- " + x + " : " + y);
drawOnProjectedBitMap((ImageView) v, bitmapMaster, x, y);
break;
case MotionEvent.ACTION_MOVE:
textSource.setText("ACTION_MOVE- " + x + " : " + y);
drawOnProjectedBitMap((ImageView) v, bitmapMaster, x, y);
break;
case MotionEvent.ACTION_UP:
textSource.setText("ACTION_UP- " + x + " : " + y);
drawOnProjectedBitMap((ImageView) v, bitmapMaster, x, y);
break;
}
/*
* Return 'true' to indicate that the event have been consumed.
* If auto-generated 'false', your code can detect ACTION_DOWN only,
* cannot detect ACTION_MOVE and ACTION_UP.
*/
return true;
}
});
}
/*
* Project position on ImageView to position on Bitmap
* draw on it
*/
private void drawOnProjectedBitMap(ImageView iv, Bitmap bm, int x, int y) {
if (x < 0 || y < 0 || x > iv.getWidth() || y > iv.getHeight()) {
//outside ImageView
return;
} else {
int projectedX = (int) ((double) x * ((double) bm.getWidth() / (double) iv.getWidth()));
int projectedY = (int) ((double) y * ((double) bm.getHeight() / (double) iv.getHeight()));
Paint paint = new Paint();
paint.setStyle(Paint.Style.FILL);
paint.setColor(Color.WHITE);
paint.setStrokeWidth(3);
canvasMaster.drawCircle(projectedX, projectedY, 5, paint);
imageResult.invalidate();
textSource.setText(x + ":" + y + "/" + iv.getWidth() + " : " + iv.getHeight() + "\n" +
projectedX + " : " + projectedY + "/" + bm.getWidth() + " : " + bm.getHeight()
);
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
Bitmap tempBitmap;
if (resultCode == RESULT_OK) {
switch (requestCode) {
case RQS_IMAGE1:
source = data.getData();
textSource.setText(source.toString());
try {
//tempBitmap is Immutable bitmap,
//cannot be passed to Canvas constructor
tempBitmap = BitmapFactory.decodeStream(
getContentResolver().openInputStream(source));
Config config;
if (tempBitmap.getConfig() != null) {
config = tempBitmap.getConfig();
} else {
config = Config.ARGB_8888;
}
//bitmapMaster is Mutable bitmap
bitmapMaster = Bitmap.createBitmap(
tempBitmap.getWidth(),
tempBitmap.getHeight(),
config);
canvasMaster = new Canvas(bitmapMaster);
canvasMaster.drawBitmap(tempBitmap, 0, 0, null);
imageResult.setImageBitmap(bitmapMaster);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
break;
}
}
}
}

Read your code again. That's what drawOnProjectedBitMap does: it draws circles at specific points (current position). If you want lines instead of dots, you need to draw a line from the previous position to the current position.

Related

Unable to access current frame being processed in the "ML Kit Vision Quickstart Sample App"

I am working on an android project where I have access facial landmarks from the currently processed frame. Then I have to perform some calculation based on the face positions. At last I have to save the current frame being processed.
For this I am using ML Kit Vision Quickstart Sample App. This code is doing most of my work. From this code I am using the LivePreviewActivity.java, FaceDetectorProcessor.java and FaceGraphic.java class. I have performed all the calculations inside the FaceGraphics.java class. But I am NOT able to access the frame that is being processed currently.
LivePreviewActivity.java
/*
* Copyright 2020 Google LLC. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.mlkit.vision.demo;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.os.Bundle;
import androidx.core.app.ActivityCompat;
import androidx.core.app.ActivityCompat.OnRequestPermissionsResultCallback;
import androidx.core.content.ContextCompat;
import androidx.appcompat.app.AppCompatActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.ArrayAdapter;
import android.widget.CompoundButton;
import android.widget.ImageView;
import android.widget.Spinner;
import android.widget.Toast;
import android.widget.ToggleButton;
import com.google.android.gms.common.annotation.KeepName;
import com.google.mlkit.common.model.LocalModel;
import com.google.mlkit.vision.demo.automl.AutoMLImageLabelerProcessor;
import com.google.mlkit.vision.demo.barcodescanner.BarcodeScannerProcessor;
import com.google.mlkit.vision.demo.facedetector.FaceDetectorProcessor;
import com.google.mlkit.vision.demo.labeldetector.LabelDetectorProcessor;
import com.google.mlkit.vision.demo.objectdetector.ObjectDetectorProcessor;
import com.google.mlkit.vision.demo.preference.PreferenceUtils;
import com.google.mlkit.vision.demo.preference.SettingsActivity;
import com.google.mlkit.vision.demo.preference.SettingsActivity.LaunchSource;
import com.google.mlkit.vision.demo.textdetector.TextRecognitionProcessor;
import com.google.mlkit.vision.face.FaceDetectorOptions;
import com.google.mlkit.vision.label.custom.CustomImageLabelerOptions;
import com.google.mlkit.vision.label.defaults.ImageLabelerOptions;
import com.google.mlkit.vision.objects.custom.CustomObjectDetectorOptions;
import com.google.mlkit.vision.objects.defaults.ObjectDetectorOptions;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Live preview demo for ML Kit APIs.
*/
#KeepName
public final class LivePreviewActivity extends AppCompatActivity
implements OnRequestPermissionsResultCallback,
OnItemSelectedListener,
CompoundButton.OnCheckedChangeListener {
private static final String FACE_DETECTION = "Face Detection";
private static final String TAG = "LivePreviewActivity";
private static final int PERMISSION_REQUESTS = 1;
private CameraSource cameraSource = null;
private CameraSourcePreview preview;
private GraphicOverlay graphicOverlay;
private String selectedModel = FACE_DETECTION;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d(TAG, "onCreate");
setContentView(R.layout.activity_vision_live_preview);
preview = findViewById(R.id.preview);
if (preview == null) {
Log.d(TAG, "Preview is null");
}
graphicOverlay = findViewById(R.id.graphic_overlay);
if (graphicOverlay == null) {
Log.d(TAG, "graphicOverlay is null");
}
Spinner spinner = findViewById(R.id.spinner);
List<String> options = new ArrayList<>();
options.add(FACE_DETECTION);
// Creating adapter for spinner
ArrayAdapter<String> dataAdapter = new ArrayAdapter<>(this, R.layout.spinner_style, options);
// Drop down layout style - list view with radio button
dataAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
// attaching data adapter to spinner
spinner.setAdapter(dataAdapter);
spinner.setOnItemSelectedListener(this);
ToggleButton facingSwitch = findViewById(R.id.facing_switch);
facingSwitch.setOnCheckedChangeListener(this);
ImageView settingsButton = findViewById(R.id.settings_button);
settingsButton.setOnClickListener(
v -> {
Intent intent = new Intent(getApplicationContext(), SettingsActivity.class);
intent.putExtra(SettingsActivity.EXTRA_LAUNCH_SOURCE,
SettingsActivity.LaunchSource.LIVE_PREVIEW);
startActivity(intent);
});
if (allPermissionsGranted()) {
createCameraSource(selectedModel);
} else {
getRuntimePermissions();
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.live_preview_menu, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.settings) {
Intent intent = new Intent(this, SettingsActivity.class);
intent.putExtra(SettingsActivity.EXTRA_LAUNCH_SOURCE, LaunchSource.LIVE_PREVIEW);
startActivity(intent);
return true;
}
return super.onOptionsItemSelected(item);
}
#Override
public synchronized void onItemSelected(AdapterView<?> parent, View view, int pos, long id) {
// An item was selected. You can retrieve the selected item using
// parent.getItemAtPosition(pos)
selectedModel = parent.getItemAtPosition(pos).toString();
Log.d(TAG, "Selected model: " + selectedModel);
preview.stop();
if (allPermissionsGranted()) {
createCameraSource(selectedModel);
startCameraSource();
} else {
getRuntimePermissions();
}
}
#Override
public void onNothingSelected(AdapterView<?> parent) {
// Do nothing.
}
#Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
Log.d(TAG, "Set facing");
if (cameraSource != null) {
if (isChecked) {
cameraSource.setFacing(CameraSource.CAMERA_FACING_FRONT);
} else {
cameraSource.setFacing(CameraSource.CAMERA_FACING_BACK);
}
}
preview.stop();
startCameraSource();
}
private void createCameraSource(String model) {
// If there's no existing cameraSource, create one.
if (cameraSource == null) {
cameraSource = new CameraSource(this, graphicOverlay);
}
try {
Log.i(TAG, "Using Face Detector Processor");
FaceDetectorOptions faceDetectorOptions = new FaceDetectorOptions.Builder()
.setContourMode(FaceDetectorOptions.CONTOUR_MODE_ALL)
.build();
// PreferenceUtils.getFaceDetectorOptionsForLivePreview(this);
cameraSource.setMachineLearningFrameProcessor(
new FaceDetectorProcessor(this, faceDetectorOptions));
} catch (Exception e) {
Log.e(TAG, "Can not create image processor: " + model, e);
Toast.makeText(
getApplicationContext(),
"Can not create image processor: " + e.getMessage(),
Toast.LENGTH_LONG)
.show();
}
}
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
private void startCameraSource() {
if (cameraSource != null) {
try {
if (preview == null) {
Log.d(TAG, "resume: Preview is null");
}
if (graphicOverlay == null) {
Log.d(TAG, "resume: graphOverlay is null");
}
preview.start(cameraSource, graphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
cameraSource.release();
cameraSource = null;
}
}
}
#Override
public void onResume() {
super.onResume();
Log.d(TAG, "onResume");
createCameraSource(selectedModel);
startCameraSource();
}
/**
* Stops the camera.
*/
#Override
protected void onPause() {
super.onPause();
preview.stop();
}
#Override
public void onDestroy() {
super.onDestroy();
if (cameraSource != null) {
cameraSource.release();
}
}
private String[] getRequiredPermissions() {
try {
PackageInfo info =
this.getPackageManager()
.getPackageInfo(this.getPackageName(), PackageManager.GET_PERMISSIONS);
String[] ps = info.requestedPermissions;
if (ps != null && ps.length > 0) {
return ps;
} else {
return new String[0];
}
} catch (Exception e) {
return new String[0];
}
}
private boolean allPermissionsGranted() {
for (String permission : getRequiredPermissions()) {
if (!isPermissionGranted(this, permission)) {
return false;
}
}
return true;
}
private void getRuntimePermissions() {
List<String> allNeededPermissions = new ArrayList<>();
for (String permission : getRequiredPermissions()) {
if (!isPermissionGranted(this, permission)) {
allNeededPermissions.add(permission);
}
}
if (!allNeededPermissions.isEmpty()) {
ActivityCompat.requestPermissions(
this, allNeededPermissions.toArray(new String[0]), PERMISSION_REQUESTS);
}
}
#Override
public void onRequestPermissionsResult(
int requestCode, String[] permissions, int[] grantResults) {
Log.i(TAG, "Permission granted!");
if (allPermissionsGranted()) {
createCameraSource(selectedModel);
}
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
private static boolean isPermissionGranted(Context context, String permission) {
if (ContextCompat.checkSelfPermission(context, permission)
== PackageManager.PERMISSION_GRANTED) {
Log.i(TAG, "Permission granted: " + permission);
return true;
}
Log.i(TAG, "Permission NOT granted: " + permission);
return false;
}
}
FaceDetectorProcessor.java
/*
* Copyright 2020 Google LLC. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.mlkit.vision.demo.facedetector;
import android.content.Context;
import android.graphics.PointF;
import android.util.Log;
import androidx.annotation.NonNull;
import com.google.android.gms.tasks.Task;
import com.google.mlkit.vision.common.InputImage;
import com.google.mlkit.vision.demo.GraphicOverlay;
import com.google.mlkit.vision.demo.VisionProcessorBase;
import com.google.mlkit.vision.face.Face;
import com.google.mlkit.vision.face.FaceDetection;
import com.google.mlkit.vision.face.FaceDetector;
import com.google.mlkit.vision.face.FaceDetectorOptions;
import com.google.mlkit.vision.face.FaceLandmark;
import java.util.List;
import java.util.Locale;
/**
* Face Detector Demo.
*/
public class FaceDetectorProcessor extends VisionProcessorBase<List<Face>> {
private static final String TAG = "FaceDetectorProcessor";
private final FaceDetector detector;
public FaceDetectorProcessor(Context context) {
this(
context,
new FaceDetectorOptions.Builder()
.setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
.enableTracking()
.build());
}
public FaceDetectorProcessor(Context context, FaceDetectorOptions options) {
super(context);
Log.v(MANUAL_TESTING_LOG, "Face detector options: " + options);
detector = FaceDetection.getClient(options);
}
#Override
public void stop() {
super.stop();
detector.close();
}
#Override
protected Task<List<Face>> detectInImage(InputImage image) {
return detector.process(image);
}
#Override
protected void onSuccess(#NonNull List<Face> faces, #NonNull GraphicOverlay graphicOverlay) {
for (Face face : faces) {
graphicOverlay.add(new FaceGraphic(graphicOverlay, face));
logExtrasForTesting(face);
}
}
private static void logExtrasForTesting(Face face) {
if (face != null) {
Log.v(MANUAL_TESTING_LOG, "face bounding box: " + face.getBoundingBox().flattenToString());
Log.v(MANUAL_TESTING_LOG, "face Euler Angle X: " + face.getHeadEulerAngleX());
Log.v(MANUAL_TESTING_LOG, "face Euler Angle Y: " + face.getHeadEulerAngleY());
Log.v(MANUAL_TESTING_LOG, "face Euler Angle Z: " + face.getHeadEulerAngleZ());
// All landmarks
int[] landMarkTypes =
new int[]{
FaceLandmark.MOUTH_BOTTOM,
FaceLandmark.MOUTH_RIGHT,
FaceLandmark.MOUTH_LEFT,
FaceLandmark.RIGHT_EYE,
FaceLandmark.LEFT_EYE,
FaceLandmark.RIGHT_EAR,
FaceLandmark.LEFT_EAR,
FaceLandmark.RIGHT_CHEEK,
FaceLandmark.LEFT_CHEEK,
FaceLandmark.NOSE_BASE
};
String[] landMarkTypesStrings =
new String[]{
"MOUTH_BOTTOM",
"MOUTH_RIGHT",
"MOUTH_LEFT",
"RIGHT_EYE",
"LEFT_EYE",
"RIGHT_EAR",
"LEFT_EAR",
"RIGHT_CHEEK",
"LEFT_CHEEK",
"NOSE_BASE"
};
for (int i = 0; i < landMarkTypes.length; i++) {
FaceLandmark landmark = face.getLandmark(landMarkTypes[i]);
if (landmark == null) {
Log.v(
MANUAL_TESTING_LOG,
"No landmark of type: " + landMarkTypesStrings[i] + " has been detected");
} else {
PointF landmarkPosition = landmark.getPosition();
String landmarkPositionStr =
String.format(Locale.US, "x: %f , y: %f", landmarkPosition.x, landmarkPosition.y);
Log.v(
MANUAL_TESTING_LOG,
"Position for face landmark: "
+ landMarkTypesStrings[i]
+ " is :"
+ landmarkPositionStr);
}
}
Log.v(
MANUAL_TESTING_LOG,
"face left eye open probability: " + face.getLeftEyeOpenProbability());
Log.v(
MANUAL_TESTING_LOG,
"face right eye open probability: " + face.getRightEyeOpenProbability());
Log.v(MANUAL_TESTING_LOG, "face smiling probability: " + face.getSmilingProbability());
Log.v(MANUAL_TESTING_LOG, "face tracking id: " + face.getTrackingId());
}
}
#Override
protected void onFailure(#NonNull Exception e) {
Log.e(TAG, "Face detection failed " + e);
}
}
FaceGraphics.java
/*
* Copyright 2020 Google LLC. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.mlkit.vision.demo.facedetector;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PointF;
import android.util.Log;
import com.google.mlkit.vision.demo.GraphicOverlay;
import com.google.mlkit.vision.demo.GraphicOverlay.Graphic;
import com.google.mlkit.vision.face.Face;
import com.google.mlkit.vision.face.FaceContour;
import com.google.mlkit.vision.face.FaceLandmark;
import com.google.mlkit.vision.face.FaceLandmark.LandmarkType;
import java.util.Locale;
/**
* Graphic instance for rendering face position, contour, and landmarks within the associated
* graphic overlay view.
*/
public class FaceGraphic extends Graphic {
private static final float FACE_POSITION_RADIUS = 4.0f;
private static final float ID_TEXT_SIZE = 30.0f;
private static final float ID_Y_OFFSET = 40.0f;
private static final float ID_X_OFFSET = -40.0f;
private static final float BOX_STROKE_WIDTH = 5.0f;
private static final int NUM_COLORS = 10;
private static final int[][] COLORS = new int[][]{
// {Text color, background color}
{Color.BLACK, Color.WHITE},
{Color.WHITE, Color.MAGENTA},
{Color.BLACK, Color.LTGRAY},
{Color.WHITE, Color.RED},
{Color.WHITE, Color.BLUE},
{Color.WHITE, Color.DKGRAY},
{Color.BLACK, Color.CYAN},
{Color.BLACK, Color.YELLOW},
{Color.WHITE, Color.BLACK},
{Color.BLACK, Color.GREEN}
};
private final Paint facePositionPaint;
private final Paint[] idPaints;
private final Paint[] boxPaints;
private final Paint[] labelPaints;
private volatile Face face;
FaceGraphic(GraphicOverlay overlay, Face face) {
super(overlay);
this.face = face;
final int selectedColor = Color.WHITE;
facePositionPaint = new Paint();
facePositionPaint.setColor(selectedColor);
int numColors = COLORS.length;
idPaints = new Paint[numColors];
boxPaints = new Paint[numColors];
labelPaints = new Paint[numColors];
for (int i = 0; i < numColors; i++) {
idPaints[i] = new Paint();
idPaints[i].setColor(COLORS[i][0] /* text color */);
idPaints[i].setTextSize(ID_TEXT_SIZE);
boxPaints[i] = new Paint();
boxPaints[i].setColor(COLORS[i][1] /* background color */);
boxPaints[i].setStyle(Paint.Style.STROKE);
boxPaints[i].setStrokeWidth(BOX_STROKE_WIDTH);
labelPaints[i] = new Paint();
labelPaints[i].setColor(COLORS[i][1] /* background color */);
labelPaints[i].setStyle(Paint.Style.FILL);
}
}
/**
* Draws the face annotations for position on the supplied canvas.
*/
#Override
public void draw(Canvas canvas) {
Face face = this.face;
if (face == null) {
return;
}
// Draws a circle at the position of the detected face, with the face's track id below.
float x0 = translateX(face.getBoundingBox().centerX());
float y0 = translateY(face.getBoundingBox().centerY());
// canvas.drawCircle(x0, y0, FACE_POSITION_RADIUS, facePositionPaint);
// Calculate positions.
float left = x0 - scale(face.getBoundingBox().width() / 2.0f);
float top = y0 - scale(face.getBoundingBox().height() / 2.0f);
float right = x0 + scale(face.getBoundingBox().width() / 2.0f);
float bottom = y0 + scale(face.getBoundingBox().height() / 2.0f);
float lineHeight = ID_TEXT_SIZE + BOX_STROKE_WIDTH;
float yLabelOffset = -lineHeight;
// Decide color based on face ID
int colorID = (face.getTrackingId() == null)
? 0 : Math.abs(face.getTrackingId() % NUM_COLORS);
/**
// Calculate width and height of label box
float textWidth = idPaints[colorID].measureText("ID: " + face.getTrackingId());
if (face.getSmilingProbability() != null) {
yLabelOffset -= lineHeight;
textWidth = Math.max(textWidth, idPaints[colorID].measureText(
String.format(Locale.US, "Happiness: %.2f", face.getSmilingProbability())));
}
if (face.getLeftEyeOpenProbability() != null) {
yLabelOffset -= lineHeight;
textWidth = Math.max(textWidth, idPaints[colorID].measureText(
String.format(Locale.US, "Left eye: %.2f", face.getLeftEyeOpenProbability())));
}
if (face.getRightEyeOpenProbability() != null) {
yLabelOffset -= lineHeight;
textWidth = Math.max(textWidth, idPaints[colorID].measureText(
String.format(Locale.US, "Right eye: %.2f", face.getLeftEyeOpenProbability())));
}
// Draw labels
canvas.drawRect(left - BOX_STROKE_WIDTH,
top + yLabelOffset,
left + textWidth + (2 * BOX_STROKE_WIDTH),
top,
labelPaints[colorID]);
yLabelOffset += ID_TEXT_SIZE;
canvas.drawRect(left, top, right, bottom, boxPaints[colorID]);
canvas.drawText("ID: " + face.getTrackingId(), left, top + yLabelOffset,
idPaints[colorID]);
yLabelOffset += lineHeight;
**/
/**
// Draws all face contours.
for (FaceContour contour : face.getAllContours()) {
for (PointF point : contour.getPoints()) {
canvas.drawCircle(
translateX(point.x), translateY(point.y), FACE_POSITION_RADIUS, facePositionPaint);
}
}
**/
FaceContour contour = face.getContour(FaceContour.NOSE_BRIDGE);
float x1 = 0, y1 = 0;
for (PointF point : contour.getPoints()) {
// canvas.drawCircle(translateX(point.x), translateY(point.y), FACE_POSITION_RADIUS, facePositionPaint);
x1 = translateX(point.x);
y1 = translateY(point.y);
break;
}
/**
// Draws smiling and left/right eye open probabilities.
if (face.getSmilingProbability() != null) {
canvas.drawText(
"Smiling: " + String.format(Locale.US, "%.2f", face.getSmilingProbability()),
left,
top + yLabelOffset,
idPaints[colorID]);
yLabelOffset += lineHeight;
}
**/
/**
FaceLandmark leftEye = face.getLandmark(FaceLandmark.LEFT_EYE);
if (leftEye != null && face.getLeftEyeOpenProbability() != null) {
canvas.drawText(
"Left eye open: " + String.format(Locale.US, "%.2f", face.getLeftEyeOpenProbability()),
translateX(leftEye.getPosition().x) + ID_X_OFFSET,
translateY(leftEye.getPosition().y) + ID_Y_OFFSET,
idPaints[colorID]);
} else if (leftEye != null && face.getLeftEyeOpenProbability() == null) {
canvas.drawText(
"Left eye",
left,
top + yLabelOffset,
idPaints[colorID]);
yLabelOffset += lineHeight;
} else if (leftEye == null && face.getLeftEyeOpenProbability() != null) {
canvas.drawText(
"Left eye open: " + String.format(Locale.US, "%.2f", face.getLeftEyeOpenProbability()),
left,
top + yLabelOffset,
idPaints[colorID]);
yLabelOffset += lineHeight;
}
FaceLandmark rightEye = face.getLandmark(FaceLandmark.RIGHT_EYE);
if (rightEye != null && face.getRightEyeOpenProbability() != null) {
canvas.drawText(
"Right eye open: " + String.format(Locale.US, "%.2f", face.getRightEyeOpenProbability()),
translateX(rightEye.getPosition().x) + ID_X_OFFSET,
translateY(rightEye.getPosition().y) + ID_Y_OFFSET,
idPaints[colorID]);
} else if (rightEye != null && face.getRightEyeOpenProbability() == null) {
canvas.drawText(
"Right eye",
left,
top + yLabelOffset,
idPaints[colorID]);
yLabelOffset += lineHeight;
} else if (rightEye == null && face.getRightEyeOpenProbability() != null) {
canvas.drawText(
"Right eye open: " + String.format(Locale.US, "%.2f", face.getRightEyeOpenProbability()),
left,
top + yLabelOffset,
idPaints[colorID]);
}
**/
/**
// Draw facial landmarks
drawFaceLandmark(canvas, FaceLandmark.LEFT_EYE);
drawFaceLandmark(canvas, FaceLandmark.RIGHT_EYE);
drawFaceLandmark(canvas, FaceLandmark.LEFT_CHEEK);
drawFaceLandmark(canvas, FaceLandmark.RIGHT_CHEEK);
**/
}
private void drawFaceLandmark(Canvas canvas, #LandmarkType int landmarkType) {
FaceLandmark faceLandmark = face.getLandmark(landmarkType);
if (faceLandmark != null) {
canvas.drawCircle(
translateX(faceLandmark.getPosition().x),
translateY(faceLandmark.getPosition().y),
FACE_POSITION_RADIUS,
facePositionPaint);
}
}
}
In the above code I want to access the current frame being processed. Either inside the LivePreviewActivity or FaceGraphics.java.
Please help me.
By Frame , i am assuming you want the bitmap which satisfy your algorithm . Also as you mentioned that you are using LivePreviewActivity that means you are real time face detection.
There is no way to access the bitmap or current frame from FaceDetectorProcessor
Instead what you need to do is to access the original Image inside VisionProcessorBase.java
modify the common process logic like this to pass the current bitmap to FaceDetectorProcessor
// -----------------Common processing logic-------------------------------------------------------
private Task<T> requestDetectInImage(
final InputImage image,
final GraphicOverlay graphicOverlay,
#Nullable final Bitmap originalCameraImage,
boolean shouldShowFps) {
final long startMs = SystemClock.elapsedRealtime();
return detectInImage(image)
.addOnSuccessListener(
executor,
results -> {
long currentLatencyMs = SystemClock.elapsedRealtime() - startMs;
numRuns++;
frameProcessedInOneSecondInterval++;
totalRunMs += currentLatencyMs;
maxRunMs = Math.max(currentLatencyMs, maxRunMs);
minRunMs = Math.min(currentLatencyMs, minRunMs);
// Only log inference info once per second. When frameProcessedInOneSecondInterval is
// equal to 1, it means this is the first frame processed during the current second.
if (frameProcessedInOneSecondInterval == 1) {
Log.d(TAG, "Max latency is: " + maxRunMs);
Log.d(TAG, "Min latency is: " + minRunMs);
Log.d(TAG, "Num of Runs: " + numRuns + ", Avg latency is: " + totalRunMs / numRuns);
MemoryInfo mi = new MemoryInfo();
activityManager.getMemoryInfo(mi);
long availableMegs = mi.availMem / 0x100000L;
Log.d(TAG, "Memory available in system: " + availableMegs + " MB");
}
graphicOverlay.clear();
if (originalCameraImage != null) {
graphicOverlay.add(new CameraImageGraphic(graphicOverlay, originalCameraImage));
}
//passing the original bitmap to FaceDetectorProcessor
VisionProcessorBase.this.onSuccess(originalCameraImage,results, graphicOverlay);
graphicOverlay.add(
new InferenceInfoGraphic(
graphicOverlay, currentLatencyMs, shouldShowFps ? framesPerSecond : null));
graphicOverlay.postInvalidate();
})
.addOnFailureListener(
executor,
e -> {
graphicOverlay.clear();
graphicOverlay.postInvalidate();
String error = "Failed to process. Error: " + e.getLocalizedMessage();
Toast.makeText(
graphicOverlay.getContext(),
error + "\nCause: " + e.getCause(),
Toast.LENGTH_SHORT)
.show();
Log.d(TAG, error);
e.printStackTrace();
VisionProcessorBase.this.onFailure(e);
});
}
You also need to change abstract onSuccess FaceDetectorProcessor like this :
protected abstract void onSuccess(Bitmap currentBitmap,#NonNull T results, #NonNull GraphicOverlay graphicOverlay);
After the required changes , you can access the currentBitmap in onSuccess Method inside FaceDetectorProcessor and you can also pass this to FaceGraphic.java from here
#Override
protected void onSuccess(Bitmap currentBitmap, #NonNull List<Face> faces, #NonNull GraphicOverlay graphicOverlay) {
for (Face face : faces) {
graphicOverlay.add(new FaceGraphic(currentBitmap,graphicOverlay, face));
logExtrasForTesting(face);
}
}
Inside FaceGraphic.java access current bitmap from constructor
FaceGraphic(Bitmap currentBitmap,GraphicOverlay overlay, Face face) {
this.currentBitmap = currentBitmap;
}
I hope this will help you out and fulfill your need.

How to use bitmap region as a brush to paint over same bitmap?

I am trying to use a bitmap region to be used as a bitmap brush to be selected from within the same image and then use that brush to paint the other region. So basically it will have two modes
1. Selection Mode: to be used as a brush
2. Paint mode : to be painted with the brush selected in selection mode.
So far I have come up with the following algorithm which just paints green without any selection.Kindly please help me with this.
package com.example.android.apptouch;
import java.io.FileNotFoundException;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.view.Display;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnTouchListener;
import android.widget.Button;
import android.widget.ImageView;
import java.io.OutputStream;
import android.content.ContentValues;
import android.graphics.Bitmap.CompressFormat;
import android.provider.MediaStore.Images.Media;
import android.widget.Toast;
public class apptouch extends Activity implements OnClickListener,
OnTouchListener {
ImageView choosenImageView;
Button choosePicture;
Button savePicture;
Button NewButton;
Bitmap bmp;
Bitmap alteredBitmap;
Canvas canvas;
Paint paint;
Matrix matrix;
float downx = 0;
float downy = 0;
float upx = 0;
float upy = 0;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_apptouch);
choosenImageView = (ImageView) this.findViewById(R.id.ChoosenImageView);
choosePicture = (Button) this.findViewById(R.id.ChoosePictureButton);
savePicture = (Button) this.findViewById(R.id.SavePictureButton);
savePicture.setOnClickListener(this);
choosePicture.setOnClickListener(this);
choosenImageView.setOnTouchListener(this);
}
public void NewButton(View view) {
ImageInpaint image= new ImageInpaint(bmp);
image.init(true);
}
public void onClick(View v) {
if (v == choosePicture) {
Intent choosePictureIntent = new Intent(
Intent.ACTION_PICK,
android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(choosePictureIntent, 0);
} else if (v == savePicture) {
if (alteredBitmap != null) {
ContentValues contentValues = new ContentValues(3);
contentValues.put(Media.DISPLAY_NAME, "Draw On Me");
Uri imageFileUri = getContentResolver().insert(Media.EXTERNAL_CONTENT_URI, contentValues);
try {
OutputStream imageFileOS = getContentResolver().openOutputStream(imageFileUri);
alteredBitmap.compress(CompressFormat.JPEG, 90, imageFileOS);
Toast t = Toast.makeText(this, "Saved!", Toast.LENGTH_SHORT);
t.show();
} catch (Exception e) {
Log.v("EXCEPTION", e.getMessage());
}
}
}
}
protected void onActivityResult(int requestCode, int resultCode,
Intent intent) {
super.onActivityResult(requestCode, resultCode, intent);
if (resultCode == RESULT_OK) {
Uri imageFileUri = intent.getData();
try {
BitmapFactory.Options bmpFactoryOptions = new BitmapFactory.Options();
bmpFactoryOptions.inJustDecodeBounds = true;
bmp = BitmapFactory.decodeStream(getContentResolver().openInputStream(
imageFileUri), null, bmpFactoryOptions);
bmpFactoryOptions.inJustDecodeBounds = false;
bmp = BitmapFactory.decodeStream(getContentResolver().openInputStream(
imageFileUri), null, bmpFactoryOptions);
alteredBitmap = Bitmap.createBitmap(bmp.getWidth(), bmp
.getHeight(), bmp.getConfig());
canvas = new Canvas(alteredBitmap);
paint = new Paint();
paint.setColor(Color.GREEN);
paint.setStrokeWidth(5);
matrix = new Matrix();
canvas.drawBitmap(bmp, matrix, paint);
choosenImageView.setImageBitmap(alteredBitmap);
choosenImageView.setOnTouchListener(this);
} catch (Exception e) {
Log.v("ERROR", e.toString());
}
}
}
public boolean onTouch(View v, MotionEvent event) {
int action = event.getAction();
switch (action) {
case MotionEvent.ACTION_DOWN:
downx = event.getX();
downy = event.getY();
break;
case MotionEvent.ACTION_MOVE:
upx = event.getX();
upy = event.getY();
canvas.drawLine(downx, downy, upx, upy, paint);
choosenImageView.invalidate();
downx = upx;
downy = upy;
break;
case MotionEvent.ACTION_UP:
upx = event.getX();
upy = event.getY();
canvas.drawLine(downx, downy, upx, upy, paint);
choosenImageView.invalidate();
break;
case MotionEvent.ACTION_CANCEL:
break;
default:
break;
}
return true;
}
}

Syntax error on token "(", delete this token

I'm working on an app for my last year of A-Levels and I'm trying to make a colour averager. I can make the app take the picture and display it in an imageView but I can't seem to get the pixels from the image. I've tried using "imageViewName".getPixels or anything else .getPixels but the brackets after getPixels have the error "Syntax error on token "(", delete this token" and the same for the end bracket. The error occurs here: photoViewForCrop.getPixels(pixels[], x, y, imageWidth, imageHeight); Which is quite close to the end of the code.
Here's my activity that takes the photo and saves it to the SD card, please ignore that it's called 'UploadPhotoActivity', not sure what I was thinking.
Imports or ints that aren't used in the code were added by my teacher when he was messing with it and not helping at all.
package com.colours.javerager;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import android.app.ActionBar;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;
public class UploadPhotoActivity extends Activity {
//Activity request codes
private static final int CAMERA_CAPTURE_IMAGE_REQUEST_CODE = 100;
public static final int MEDIA_TYPE_IMAGE = 1;
//directory for file names
private static final String IMAGE_DIRECTORY_NAME = "JAverager";
private Uri fileUri; //file url to store
private ImageView photoViewForCrop;
private Button takePhotoButton;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_upload_photo);
// Show the Up button in the action bar.
//setupActionBar();
ActionBar actionBar = getActionBar();
// hide the action bar
actionBar.hide();
photoViewForCrop = (ImageView) findViewById(R.id.photoViewForCrop);
takePhotoButton = (Button) findViewById(R.id.takePhotoButton);
/**
* Capture image button click event
*/
takePhotoButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// capture picture
captureImage();
}
});
if (!isDeviceSupportCamera()) {
Toast.makeText(getApplicationContext(),
"Sorry! Your device doesn't support camera",
Toast.LENGTH_LONG).show();
// will close the app if the device does't have camera
finish();
}
}
//checks if device has a camera
private boolean isDeviceSupportCamera() {
if (getApplicationContext().getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA)) {
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
private void captureImage() {
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
fileUri = getOutputMediaFileUri(MEDIA_TYPE_IMAGE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
// start the image capture Intent
startActivityForResult(intent, CAMERA_CAPTURE_IMAGE_REQUEST_CODE);
}
#Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
// save file url in bundle as it will be null on screen orientation
// changes
outState.putParcelable("file_uri", fileUri);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
// get the file url
fileUri = savedInstanceState.getParcelable("file_uri");
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// if the result is capturing Image
if (requestCode == CAMERA_CAPTURE_IMAGE_REQUEST_CODE) {
if (resultCode == RESULT_OK) {
// successfully captured the image
// display it in image view
previewCapturedImage();
} else if (resultCode == RESULT_CANCELED) {
// user cancelled Image capture
Toast.makeText(getApplicationContext(),
"User cancelled image capture", Toast.LENGTH_SHORT)
.show();
} else {
// failed to capture image
Toast.makeText(getApplicationContext(),
"Sorry! Failed to capture image", Toast.LENGTH_SHORT)
.show();
}
}
}
/**
* Display image from a path to ImageView
*/
private void previewCapturedImage() {
try {
photoViewForCrop.setVisibility(View.VISIBLE);
// bitmap factory
BitmapFactory.Options options = new BitmapFactory.Options();
// downsizing image as it throws OutOfMemory Exception for larger
// images
options.inSampleSize = 2;
final Bitmap bitmap = BitmapFactory.decodeFile(fileUri.getPath(),
options);
photoViewForCrop.setImageBitmap(bitmap);
} catch (NullPointerException e) {
e.printStackTrace();
}
}
public Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
/**
* returning image
*/
private static File getOutputMediaFile(int type) {
// External sdcard location
File mediaStorageDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
IMAGE_DIRECTORY_NAME);
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d(IMAGE_DIRECTORY_NAME, "Oops! Failed create "
+ IMAGE_DIRECTORY_NAME + " directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "JAverager_" + timeStamp + ".jpg");
} else {
return null;
}
return mediaFile;
}
int imageWidth = 640;
int imageHeight = 480;
int[] pixels = new int[307200];
int x = 1;
int y = 1;
photoViewForCrop.getPixels(pixels[], x, y, imageWidth, imageHeight);
int pixelTotal= 307200;
int valuethispixel;
int currentBlue;
int currentRed;
int currentGreen;
int totalBlue = 0;
int totalRed = 0;
int totalGreen = 0;
int currentPixel = 1;
int tempNum;
int avRed;
int avGreen;
int avBlue;
{
while(1 < pixelTotal){
tempNum = (Integer) pixels[currentPixel];
currentBlue = Color.blue(tempNum);
currentRed = Color.red(tempNum);
currentGreen = Color.green(tempNum);
totalBlue = totalBlue + currentBlue;
totalRed = totalRed + currentRed;
totalGreen = totalGreen + currentGreen;
currentPixel = currentPixel + 1;
}
totalBlue = totalBlue / pixelTotal;
totalRed = totalRed / pixelTotal;
totalGreen = totalGreen / pixelTotal;
}
};
ImageView does not implement the functionality you want.
Perhaps you should look at PixelGrabber ( http://docs.oracle.com/javase/7/docs/api/java/awt/image/PixelGrabber.html
everything after the getOutputMediaFile method is not inside a method, thats why your method call and after that the while loop definition won't work

How to reset position of ImageView?

In my program can I move image across screen? Sometimes I am loosing it from site and cannot find it. I wish to add function which will place imag on original position.
My own aproach wich does not work (and related question to it to make it work):
ImageView iv = current;
Matrix matrix = iv.getImageMatrix;
float[] values = new float[9];
matrix.getValues(values);
int a = values[Matrix.MTRANS_X];
int b = values[Matrix.MTRANS_Y];
matrix.postTranslate(-a,-b);
iv.setImageMatrix(matrix);
EDIT: Latest code
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.ExecutionException;
import ua.mirkvartir.android.frontend.UILApplication;
import android.app.Activity;
import android.app.Fragment;
import android.app.ProgressDialog;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.graphics.PointF;
import android.graphics.drawable.BitmapDrawable;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.view.MotionEventCompat;
import android.util.FloatMath;
import android.util.Log;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnTouchListener;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.Gallery;
import android.widget.ImageView;
import android.widget.ImageView.ScaleType;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
public class ImageShowActivity extends Activity {
int p = 0;
Activity app;
ImageAdapterr ia;
ImageView imView;
int imgPos = 0;
Bitmap bm = null;
int geg = 90;
public int width = 0;
public int hight = 0;
Gallery gallery;
Matrix matrix = new Matrix();
Matrix shift = new Matrix();
private int INVALID_POINTER_ID = -1;
private int mActivePointerId = INVALID_POINTER_ID;
private ScaleGestureDetector mScaleDetector;
private float mLastTouchX = 0;
private float mLastTouchY = 0;
private float mPosX = 0;
private float mPosY = 0;
ImageView current;
public List<ImageView> images;
public int reset = 0;
HashMap<Integer, Bitmap> map = new HashMap<Integer, Bitmap>();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.image_show);
app = this;
images = new ArrayList<ImageView>(
UILApplication.photo_buffer_big.size());
Log.d("images", UILApplication.photo_buffer_big.size() + "");
Log.d("images", images.size() + "");
gallery = (Gallery) findViewById(R.id.gallery);
// EDGES ARE INVISIBLE
gallery.setHorizontalFadingEdgeEnabled(false);
ia = new ImageAdapterr(this);
gallery.setAdapter(ia);
final int length = UILApplication.photo_buffer_big.size();
Button back_btn = (Button) findViewById(R.id.analitics_back_btn);
back_btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
finish();
}
});
final TextView img_counter_tv = (TextView) findViewById(R.id.img_counter);
img_counter_tv.setText(p + 1 + "/" + length);
Button nextButton = (Button) findViewById(R.id.next_btn);
nextButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
if (p < length - 1) {
p++;
} else {
p = 0;
}
gallery.setSelection(p, true);
img_counter_tv.setText(p + 1 + "/" + length);
}
});
Button backButton = (Button) findViewById(R.id.back_btn);
backButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
if (p == 0) {
p = length - 1;
} else {
p--;
}
gallery.setSelection(p, true);
img_counter_tv.setText(p + 1 + "/" + length);
}
});
}
public void rotateS(View v) {
ImageView iv = current;
Bitmap b = ((BitmapDrawable) iv.getDrawable()).getBitmap();
Matrix matrix = new Matrix();
matrix.postRotate(geg);
Bitmap bMapRotate = Bitmap.createBitmap(b, 0, 0, b.getWidth(),
b.getHeight(), matrix, true);
iv.setImageBitmap(bMapRotate);
}
public void extendS(View v) {
reset = 1;
Log.d("restart", "yes");
runOnUiThread(new Runnable() {
public void run() {
View viewToUpdate = gallery.getChildAt(p - gallery.getFirstVisiblePosition());
viewToUpdate.invalidate();
// ia.notifyDataSetChanged();
}
});
}
public static Bitmap resizeBitmap(Bitmap photo, float x, float y) {
try {
// get current bitmap width and height
int width = photo.getWidth();
int height = photo.getHeight();
// determine how much to scale
float scaleWidth = x / width;
float scaleHeight = y / height;
Log.d("aspect3", "w: " + scaleWidth + " h: " + scaleHeight);
// create the matrix for the manipulation
Matrix matrix = new Matrix();
// resize the bitmap
matrix.postScale(scaleWidth, scaleHeight);
// recreate the new bitmap
Bitmap resizebitmap = Bitmap.createBitmap(photo, 0, 0, width,
height, matrix, false);
return resizebitmap;
} catch (NullPointerException e) {
e.printStackTrace();
} catch (OutOfMemoryError e) {
e.printStackTrace();
System.gc();
}
return null;
}
class ImageAdapterr extends BaseAdapter {
/** The parent context */
private Context myContext;
/** Simple Constructor saving the 'parent' context. */
public ImageAdapterr(Context c) {
this.myContext = c;
}
Matrix savedMatrix = new Matrix();
/** Returns the amount of images we have defined. */
public int getCount() {
return UILApplication.photo_buffer_big.size();
}
/* Use the array-Positions as unique IDs */
public Object getItem(int position) {
return position;
}
public long getItemId(int position) {
return position;
}
/**
* Returns a new ImageView to be displayed, depending on the position
* passed.
*/
public View getView(final int position, View convertView,
ViewGroup parent) {
ImageView imView = new ImageView(this.myContext);
current = imView;
imgPos = position;
if (bm==null){
AsyncLoad imLoad = new AsyncLoad();
imLoad.execute();
try {
bm = imLoad.get();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
if (bm != null) {
imView.setImageBitmap(bm);
} else if (bm == null) {
imView.setImageResource(R.drawable.logo);
}
/* Image should be scaled as width/height are set. */
imView.setScaleType(ImageView.ScaleType.FIT_CENTER);
/* Set the Width/Height of the ImageView. */
imView.setLayoutParams(new Gallery.LayoutParams(
LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
imView.setOnTouchListener(new OnTouchListener() {
private static final String TAG = "Touch";
// These matrices will be used to move and zoom image
PointF start = new PointF();
public PointF mid = new PointF();
// We can be in one of these 3 states
public static final int NONE = 0;
public static final int DRAG = 1;
public static final int ZOOM = 2;
public int mode = NONE;
float oldDist;
public boolean onTouch(View v, MotionEvent event) {
ImageView view = (ImageView) v;
view.setScaleType(ImageView.ScaleType.MATRIX);
switch (event.getAction() & MotionEvent.ACTION_MASK) {
case MotionEvent.ACTION_DOWN:
savedMatrix.set(matrix);
start.set(event.getX(), event.getY());
Log.d(TAG, "mode=DRAG");
mode = DRAG;
break;
case MotionEvent.ACTION_POINTER_DOWN:
oldDist = spacing(event);
Log.d(TAG, "oldDist=" + oldDist);
if (oldDist > 10f) {
savedMatrix.set(matrix);
midPoint(mid, event);
mode = ZOOM;
Log.d(TAG, "mode=ZOOM");
}
break;
case MotionEvent.ACTION_MOVE:
if (mode == DRAG) {
matrix.set(savedMatrix);
matrix.postTranslate(event.getX() - start.x,
event.getY() - start.y);
} else if (mode == ZOOM) {
float newDist = spacing(event);
Log.d(TAG, "newDist=" + newDist);
if (newDist > 10f) {
matrix.set(savedMatrix);
float scale = newDist / oldDist;
matrix.postScale(scale, scale, mid.x, mid.y);
}
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_POINTER_UP:
mode = NONE;
Log.d(TAG, "mode=NONE");
break;
}
// Perform the transformation
Log.d("point",
(event.getX() - start.x) + " "
+ (event.getY() - start.y));
// Log.d("point",start.x +" "+start.y);
float[] values = new float[9];
matrix.getValues(values);
float a = values[Matrix.MTRANS_X];
float b = values[Matrix.MTRANS_Y];
Log.d("touch matrix", values[Matrix.MPERSP_0] + " "
+ values[Matrix.MPERSP_1] + " "
+ values[Matrix.MPERSP_2]);
Log.d("touch matrix scale", values[Matrix.MSCALE_X] + " "
+ values[Matrix.MSCALE_Y]);
Log.d("touch matrix scew", values[Matrix.MSKEW_X] + " "
+ values[Matrix.MSKEW_Y]);
Log.d("touch matrix trans", values[Matrix.MTRANS_X] + " "
+ values[Matrix.MTRANS_Y]);
if (reset == 1) {
matrix.reset();
savedMatrix.reset();
}
view.setImageMatrix(matrix);
reset = 0;
// images.set(position, view);
return true; // indicate event was handled
}
private float spacing(MotionEvent event) {
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return FloatMath.sqrt(x * x + y * y);
}
private void midPoint(PointF point, MotionEvent event) {
float x = event.getX(0) + event.getX(1);
float y = event.getY(0) + event.getY(1);
point.set(x / 2, y / 2);
}
});
return imView;
}
/**
* Returns the size (0.0f to 1.0f) of the views depending on the
* 'offset' to the center.
*/
public float getScale(boolean focused, int offset) {
/* Formula: 1 / (2 ^ offset) */
return Math.max(0, 1.0f / (float) Math.pow(2, Math.abs(offset)));
}
}
class AsyncLoad extends AsyncTask<Void, Void, Bitmap> {
ProgressDialog pd;
#Override
protected void onPreExecute() {
pd = new ProgressDialog(app);
pd.setOwnerActivity(app);
pd.setTitle("Идет загрузка...");
pd.setCancelable(true);
pd.show();
}
#Override
protected Bitmap doInBackground(Void... arg0) {
// TODO Auto-generated method stub
try {
/*
* Open a new URL and get the InputStream to load data from it.
*/
URL aURL = new URL(UILApplication.photo_buffer_big.get(imgPos));
URLConnection conn = aURL.openConnection();
conn.connect();
InputStream is = conn.getInputStream();
/* Buffered is always good for a performance plus. */
BufferedInputStream bis = new BufferedInputStream(is);
/* Decode url-data to a bitmap. */
bm = BitmapFactory.decodeStream(bis);
bis.close();
is.close();
/* Apply the Bitmap to the ImageView that will be returned. */
// imView.setImageBitmap(bm);
} catch (IOException e) {
// imView.setImageResource(R.drawable.logo);
bm = null;
Log.e("DEBUGTAG", "Remote Image Exception", e);
}
map.put(imgPos, bm);
Log.d("map", map.size() + "");
return bm;
}
#Override
protected void onPostExecute(Bitmap arg0) {
pd.dismiss();
}
}
}
XML
<RelativeLayout
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:background="#drawable/top_tab_bg"
android:padding="10dp" >
<Button
android:id="#+id/analitics_back_btn"
style="#style/ButtonText"
android:layout_marginRight="5dip"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_toLeftOf="#+id/back_btn"
android:background="#drawable/btn_clk_selector"
android:text="Назад" />
<Button
android:id="#+id/btn_rotate"
style="#style/ButtonText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginRight="5dip"
android:layout_toLeftOf="#+id/analitics_back_btn"
android:background="#drawable/btn_clk_selector"
android:onClick="rotateS"
android:text="Повернуть" />
<Button
android:id="#+id/btn_ex"
style="#style/ButtonText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginRight="5dip"
android:layout_toLeftOf="#+id/btn_rotate"
android:background="#drawable/btn_clk_selector"
android:onClick="extendS"
android:text="рас" />
<Button
android:id="#+id/next_btn"
style="#style/ButtonText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentRight="true"
android:layout_marginLeft="5dp"
android:background="#drawable/next_img_btn"
android:paddingRight="10dp" />
<TextView
android:id="#+id/img_counter"
style="#style/ButtonText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerVertical="true"
android:layout_toLeftOf="#+id/next_btn"
android:text="1/10" />
<Button
android:id="#+id/back_btn"
style="#style/ButtonText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginRight="5dp"
android:layout_toLeftOf="#+id/img_counter"
android:background="#drawable/back_img_btn" />
</RelativeLayout>
<RelativeLayout
android:id="#+id/gal"
android:layout_width="fill_parent"
android:layout_height="fill_parent" >
<Gallery
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="#+id/gallery"
android:adjustViewBounds="true"
android:spacing="10dp"
/>
</RelativeLayout>
</LinearLayout>
EDIT: problem with new code - image is not rested after enlargeS was called. It is reseted after enlargeS call + tab on screen. Also image is partially reseted - it goes to original position and scale, but it goes to its original resolution. So usually i bacamos much smaller then screen, while i need it to fill screen.
Use the code here below:
Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.image);
RelativeLayout layout = (RelativeLayout) findViewById(R.id.layout);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.FILL_PARENT,
LinearLayout.LayoutParams.WRAP_CONTENT);
imageView = new ImageView(this);
imageView.setLayoutParams(params);
imageView.setImageBitmap(bitmap);
layout.setGravity(Gravity.CENTER_VERTICAL | Gravity.TOP);
layout.addView(imageView);
This will position the ImageView at the center of the RelativeLayout which contains it.
EDIT: see in the code above - I changed the RelativeLayout.LayoutParams to LinearLayout.LayoutParams, because it is the LinearLayout, which contains the RelativeLayout in fact. Now it should work.
Try to use this in your code to set the image view wherever you want on the screen,
iconSMS.setImageResource(R.drawable.ic_launcher_smsmms);
LayoutParams paramsIconSMS = new LayoutParams(iconwidth,iconheight);
iconwidthspacing=(int)Math.round(((float)width/480)*(float)45);
iconheightspacing=(int)Math.round(((float)height/800)*(float)520);
paramsIconSMS.setMargins(iconwidthspacing, iconheightspacing, 0, 0);
iconSMS.setId(204);
iconSMS.setLayoutParams(paramsIconSMS);
Here,
iconwidth=(int)Math.round(width*iconwidthf);
iconheight=(int)Math.round(height*iconheightf);
and width and height variables are,
requestWindowFeature(Window.FEATURE_NO_TITLE);
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
width = metrics.widthPixels;
height = metrics.heightPixels;
by width and height you are getting the screen size of your phone in pixels.

sharing image on sdcard using ACTION_SEND. The image deletes!

I have a strange problem in my android app. I have an activity which shows an image. The image is saved on the SD card. The user can choose share from the context menu which is coded with the standard ACTION_SEND code. Please see below
public void shareImage(){
Intent emailShare = new Intent(android.content.Intent.ACTION_SEND);
emailShare.setType("image/jpeg");
emailShare.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
emailShare.putExtra(Intent.EXTRA_STREAM,uriId);
startActivity(Intent.createChooser(emailShare,"Send picture using:"));
}
Where uriId is the uri of the image.
This works and the image can be sent to facebook or whatever. But if I then exit my app, rotate the screen the image is missing. I get a file io error in the log when i try and retrieve the image. I have used Astro to search for the image but it is nowhere to be found!
I will post the entire class below. If anyone has any ideas I would be very appreciative.
Stephen
'package com.pengilleys.fishingsnapz;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.PointF;
import android.graphics.RectF;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.util.FloatMath;
import android.util.Log;
import android.view.Display;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.widget.ImageView;
import android.widget.TextView;
public class ImagePreview extends Activity implements OnTouchListener{
protected Context mContext;
static final String TAG = "ImagePreview";
static final int SHARE_ID = 0;
static final int EDIT_ID = 1;
static final int PROGRESS_DIALOG = 0;
ProgressDialog progressDialog;
private Bundle extras;
protected static Bitmap bm;
protected DBAdapter db = new DBAdapter(ImagePreview.this);
Long id;
private FacebookManager fb;
private Uri uriId;
//Touch gesture related variables
Matrix matrix = new Matrix();
Matrix savedMatrix = new Matrix();
// Remember some things for zooming
PointF start = new PointF();
PointF mid = new PointF();
float oldDist = 0;
// We can be in one of these 3 states
static final int NONE = 0;
static final int DRAG = 1;
static final int ZOOM = 2;
int mode = NONE;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.image_view);
mContext = this;
try{
extras = getIntent().getExtras();
id = (Long) extras.get(DBAdapter.KEY_ROWID);
Log.d(TAG,"id="+id);
ImageView imageV = (ImageView)findViewById(R.id.image_preview);
imageV.setOnTouchListener(this);
//get image
bm = getImage(id);
imageV.setImageBitmap(bm);
imageV.setImageMatrix(matrix);
//get snap details for text overlay
ArrayList<String> details = new ArrayList<String>();
details = getSnapDetails(id);
prepareSnapDetails(details);
}catch(Exception e){
Log.e(TAG,e.toString());
}finally{
}
}
private ArrayList<String> getSnapDetails(long id){
ArrayList<String> fishDetails = new ArrayList<String>();
Cursor cursor;
//open the database and grab the snap details
db.open();
cursor = db.fetchNote(id);
db.close();
//loop through cursor results and put into our arraylist<string>
try{
startManagingCursor(cursor);
cursor.moveToFirst();
//only one row but multiple columns, manually assign columns
String value = cursor.getString(cursor.getColumnIndexOrThrow("date"));
value = checkValue(value);
fishDetails.add(value);
value = cursor.getString(cursor.getColumnIndexOrThrow("type"));
value = checkValue(value);
fishDetails.add(value);
value = cursor.getString(cursor.getColumnIndexOrThrow("weight"));
value = checkValue(value);
fishDetails.add(value);
value = cursor.getString(cursor.getColumnIndexOrThrow("place"));
value = checkValue(value);
fishDetails.add(value);
}catch(Exception e){
Log.e(TAG,e.toString());
}
db.close();
return fishDetails;
}
private String checkValue(String value){
if(value==""){value="-";};
return value;
}
private boolean prepareSnapDetails(ArrayList<String> details){
TextView tx = (TextView)findViewById(R.id.details_overlay);
tx.setText("");
String detail="";
String[] labels = new String[]{"Date: ","Type: ","Weight: ","Place: "};
try{
for(int i=0;i<4;i++){
//add text details to textview
detail = labels[i] + details.get(i) + '\n';
Log.d(TAG,detail);
CharSequence ch = detail.subSequence(0,detail.length());
tx.append(ch);
}
}catch(Exception e){
Log.e(TAG,e.toString());
}
return true;
}
private Bitmap getImage(Long id){
try{
db.open();
Cursor c = db.fetchUri(id);
uriId = Uri.parse(c.getString(c.getColumnIndexOrThrow(DBAdapter.KEY_URI)));
ContentResolver cr = getContentResolver();
Bitmap bitmap = MediaStore.Images.Media.getBitmap(cr,uriId);
Display display = getWindowManager().getDefaultDisplay();
int width = display.getWidth();
int height = display.getHeight();
if(bitmap.getWidth() > width || bitmap.getHeight() > height){
float heightRatio = (float) height / bitmap.getHeight();
float widthRatio = (float) width / bitmap.getWidth();
Log.d(TAG,"height="+heightRatio + " width=" + widthRatio);
matrix.setScale(widthRatio, heightRatio);
}else{
matrix.setTranslate(1f,1f);
Log.d(TAG,"bitmap.getWidth="+bitmap.getWidth() + " vs width="+width);
Log.d(TAG,"bitmap.getHeight="+bitmap.getHeight() + " vs height="+height);
}
db.close();
return bitmap;
} catch (FileNotFoundException e) {
Log.e("GETIMAGE",e.toString());
return null;
} catch (IOException e) {
Log.e("GETIMAGE",e.toString());
return null;
}catch(Exception e){
Log.e("GETIMAGE",e.toString());
return null;
}
}
public void shareImage(){
Intent emailShare = new Intent(android.content.Intent.ACTION_SEND);
emailShare.setType("image/jpeg");
emailShare.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
emailShare.putExtra(Intent.EXTRA_STREAM,uriId);
startActivity(Intent.createChooser(emailShare,"Send picture using:"));
}
#Override
public boolean onTouch(View v, MotionEvent event) {
ImageView view = (ImageView) v;
final float MIN_ZOOM = 0.25f;
final float MAX_ZOOM = 4;
float height = view.getDrawable().getIntrinsicHeight();
float width = view.getDrawable().getIntrinsicWidth();
float[] matrixValues = new float[9];
RectF viewRect = new RectF(0, 0, view.getWidth(), view.getHeight());
dumpEvent(event);
switch(event.getAction() & MotionEvent.ACTION_MASK){
case MotionEvent.ACTION_DOWN:
savedMatrix.set(matrix);
start.set(event.getX(), event.getY());
Log.d(TAG,"mode=DRAG");
mode=DRAG;
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_POINTER_DOWN:
oldDist=spacing(event);
Log.d(TAG, "oldDist=" + oldDist);
if(oldDist>10f){
savedMatrix.set(matrix);
midPoint(mid,event);
mode=ZOOM;
Log.d(TAG,"mode=ZOOM");
}
break;
case MotionEvent.ACTION_MOVE:
if(mode==DRAG){
Log.d(TAG,"mode=MOVE");
matrix.set(savedMatrix);
// limit pan
matrix.getValues(matrixValues);
float currentY = matrixValues[Matrix.MTRANS_Y];
float currentX = matrixValues[Matrix.MTRANS_X];
float currentScale = matrixValues[Matrix.MSCALE_X];
float currentHeight = height * currentScale;
float currentWidth = width * currentScale;
float dx = event.getX() - start.x;
float dy = event.getY() - start.y;
float newX = currentX+dx;
float newY = currentY+dy;
RectF drawingRect = new RectF(newX, newY, newX+currentWidth, newY+currentHeight);
float diffUp = Math.min(viewRect.bottom-drawingRect.bottom, viewRect.top-drawingRect.top);
float diffDown = Math.max(viewRect.bottom-drawingRect.bottom, viewRect.top-drawingRect.top);
float diffLeft = Math.min(viewRect.left-drawingRect.left, viewRect.right-drawingRect.right);
float diffRight = Math.max(viewRect.left-drawingRect.left, viewRect.right-drawingRect.right);
if(diffUp > 0 ){
dy +=diffUp;
}
if(diffDown < 0){
dy +=diffDown;
}
if( diffLeft> 0){
dx += diffLeft;
}
if(diffRight < 0){
dx += diffRight;
}
matrix.postTranslate(dx, dy);
}else if(mode==ZOOM){
float newDist= spacing(event);
if(newDist>10f){
matrix.set(savedMatrix);
float scale = newDist / oldDist;
Log.d(TAG,"Scale="+scale);
matrix.getValues(matrixValues);
float currentScale = matrixValues[Matrix.MSCALE_X];
Log.d(TAG,"currentScale="+currentScale);
// limit zoom
if (scale * currentScale > MAX_ZOOM) {
scale = MAX_ZOOM / currentScale;
} else if (scale * currentScale < MIN_ZOOM) {
scale = MIN_ZOOM / currentScale;
}
matrix.postScale(scale, scale, mid.x, mid.y);
}
}
break;
}
//perform the transformation
view.setImageMatrix(matrix);
return true;
}
private static float spacing(MotionEvent event){
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return FloatMath.sqrt(x * x + y * y);
}
private static void midPoint(PointF point, MotionEvent event){
float x = event.getX(0) + event.getX(1);
float y = event.getY(0) + event.getY(1);
point.set(x / 2, y / 2);
}
/** Show an event in the logcat view for debugging **/
private void dumpEvent(MotionEvent event){
String names[] = {"DOWN","UP","MOVE","CANCEL","OUTSIDE","POINTER_DOWN","POINTER_UP","7?","8?","9?"};
StringBuilder sb = new StringBuilder();
int action = event.getAction();
int actionCode = action & MotionEvent.ACTION_MASK;
sb.append("event ACTION_").append(names[actionCode]);
if(actionCode==MotionEvent.ACTION_POINTER_DOWN
|| actionCode==MotionEvent.ACTION_POINTER_UP){
sb.append("pid ").append(
action >> MotionEvent.ACTION_POINTER_ID_SHIFT);
sb.append(")");
};
sb.append(" [");
for(int i=0;i < event.getPointerCount();i++){
sb.append("#").append(i);
sb.append("(pid ").append(event.getPointerId(i));
sb.append(")=").append((int) event.getX(i));
sb.append(",").append((int) event.getY(i));
if(i+1 < event.getPointerCount())
sb.append(";");
}
sb.append("]");
Log.d("DUMP", sb.toString());
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
menu.add(0, SHARE_ID, 0, R.string.share_id);
menu.add(0,EDIT_ID,0,R.string.edit_details);
return true;
}
#Override
public boolean onMenuItemSelected(int featureId, MenuItem item) {
switch(item.getItemId()) {
case SHARE_ID:
try{
shareImage();
}catch(Exception e){
Log.e(TAG,e.toString());
}
return true;
case EDIT_ID:
try{
Intent intent = new Intent(mContext,SnapDetails.class);
intent.putExtra(DBAdapter.KEY_ROWID,id);
startActivityForResult(intent,FishingSnapz.SNAP_EDIT);
}catch(Exception e){
Log.e(TAG,e.toString());
}
}
return super.onMenuItemSelected(featureId, item);
}
#Override
protected void onResume(){
super.onResume();
}
#Override
protected void onActivityResult (int requestCode, int resultCode, Intent data){
super.onActivityResult(requestCode, resultCode, data);
//Required by facebook single sign on - check that we are returning to activity from FB
if(data!=null){
fb.authorizeCallback(requestCode, resultCode, data);
}else{
extras = getIntent().getExtras();
id = (Long) extras.get(DBAdapter.KEY_ROWID);
//get snap details for text overlay
ArrayList<String> details = new ArrayList<String>();
details = getSnapDetails(id);
prepareSnapDetails(details);
}
}
}
'
Sounds like a bug in whatever app you are using to share it with. You might consider making a spare copy of the file before issuing the ACTION_SEND, then deleting the spare copy later if it is still hanging around.
This happens when sharing an image via Facebook. Do this:
1 make a copy,
2 share the copy,
3 delete the copy.

Categories