Android code can't access phone camera...what's wrong? - java

I'm having trouble getting the camera preview to work. Any ideas what's going on? I don't think my code can connect/find the camera on my phone... I'm very new at programming.
Here's the error messages:
07-31 22:10:29.940 26291-26291/user.cameratest W/CameraBase: An error occurred while connecting to camera: 0
07-31 22:10:30.168 26291-26320/user.cameratest E/Surface: getSlotFromBufferLocked: unknown buffer: 0xb9bfa3e8
07-31 22:10:30.191 26291-26291/user.cameratest W/CameraBase: An error occurred while connecting to camera: 0
07-31 22:10:36.957 26291-26320/user.cameratest E/Surface: getSlotFromBufferLocked: unknown buffer: 0xb9c89198
07-31 22:10:40.458 26291-26320/user.cameratest E/Surface: getSlotFromBufferLocked: unknown buffer: 0xb9b47570
Here is the code:
AndroidManifest.XML
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="user.cameratest">
<uses-feature android:name="android.hardware.camera" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<!--uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" /-->
<application
android:icon="#mipmap/ic_launcher"
android:label="#string/app_name" >
<activity
android:label="#string/app_name"
android:name=".cameraActivity"
android:screenOrientation="landscape" >
<intent-filter >
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
cameraActivity.java
package user.cameratest;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup.LayoutParams;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.Toast;
public class cameraActivity extends Activity { //
private static final String TAG = "cameraActivity";
cameraPreview preview;
Button buttonClick;
Camera camera; //
Activity act;
Context ctx;
#Override
public void onCreate(Bundle savedInstanceState) { //
super.onCreate(savedInstanceState); //
ctx = this;
act = this;
requestWindowFeature(Window.FEATURE_NO_TITLE); //
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); //
setContentView(R.layout.camera_layout); //
preview = new cameraPreview(this, (SurfaceView)findViewById(R.id.surfaceView));
preview.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
((FrameLayout) findViewById(R.id.layout)).addView(preview);
preview.setKeepScreenOn(true);
preview.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View arg0) {
camera.takePicture(shutterCallback, rawCallback, jpegCallback);
}
});
Toast.makeText(ctx, getString(R.string.take_photo_help), Toast.LENGTH_LONG).show();
// buttonClick = (Button) findViewById(R.id.btnCapture);
//
// buttonClick.setOnClickListener(new OnClickListener() {
// public void onClick(View v) {
//// preview.camera.takePicture(shutterCallback, rawCallback, jpegCallback);
// camera.takePicture(shutterCallback, rawCallback, jpegCallback);
// }
// });
//
// buttonClick.setOnLongClickListener(new OnLongClickListener(){
// #Override
// public boolean onLongClick(View arg0) {
// camera.autoFocus(new AutoFocusCallback(){
// #Override
// public void onAutoFocus(boolean arg0, Camera arg1) {
// //camera.takePicture(shutterCallback, rawCallback, jpegCallback);
// }
// });
// return true;
// }
// });
}
#Override
protected void onResume() {
super.onResume();
int numCams = Camera.getNumberOfCameras();
if(numCams > 0){
try{
camera = Camera.open(0);
camera.startPreview();
preview.setCamera(camera);
} catch (RuntimeException ex){
Toast.makeText(ctx, getString(R.string.camera_not_found), Toast.LENGTH_LONG).show();
}
}
}
#Override
protected void onPause() {
if(camera != null) {
camera.stopPreview();
preview.setCamera(null);
camera.release();
camera = null;
}
super.onPause();
}
private void resetCam() {
camera.startPreview();
preview.setCamera(camera);
}
private void refreshGallery(File file) {
Intent mediaScanIntent = new Intent( Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaScanIntent.setData(Uri.fromFile(file));
sendBroadcast(mediaScanIntent);
}
ShutterCallback shutterCallback = new ShutterCallback() {
public void onShutter() {
// Log.d(TAG, "onShutter'd");
}
};
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
// Log.d(TAG, "onPictureTaken - raw");
}
};
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
new SaveImageTask().execute(data);
resetCam();
Log.d(TAG, "onPictureTaken - jpeg");
}
};
private class SaveImageTask extends AsyncTask<byte[], Void, Void> {
#Override
protected Void doInBackground(byte[]... data) {
FileOutputStream outStream = null;
// Write to SD Card
try {
File sdCard = Environment.getExternalStorageDirectory();
File dir = new File (sdCard.getAbsolutePath() + "/camtest");
dir.mkdirs();
String fileName = String.format("%d.jpg", System.currentTimeMillis());
File outFile = new File(dir, fileName);
outStream = new FileOutputStream(outFile);
outStream.write(data[0]);
outStream.flush();
outStream.close();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length + " to " + outFile.getAbsolutePath());
refreshGallery(outFile);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
return null;
}
}
}
cameraPreview.java
package user.cameratest;
/**
* #author Jose Davis Nidhin
*/
import java.io.IOException;
import java.util.List;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
class cameraPreview extends ViewGroup implements SurfaceHolder.Callback { //
private final String TAG = "cameraPreview"; //
SurfaceView mSurfaceView; //
SurfaceHolder mHolder; //
Size mPreviewSize; //
List<Size> mSupportedPreviewSizes; //
Camera mCamera; //
cameraPreview(Context context, SurfaceView sv) { //
super(context); //
mSurfaceView = sv; //
// addView(mSurfaceView);
mHolder = mSurfaceView.getHolder(); //
mHolder.addCallback(this); //
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); //
}
public void setCamera(Camera camera) { //
mCamera = camera; //
if (mCamera != null) { //
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes(); //
requestLayout(); //
// get Camera parameters
Camera.Parameters params = mCamera.getParameters(); //
List<String> focusModes = params.getSupportedFocusModes(); //
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { //
// set the focus mode
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); //
// set Camera parameters
mCamera.setParameters(params); //
}
}
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { //
// We purposely disregard child measurements because act as a
// wrapper to a SurfaceView that centers the camera preview instead
// of stretching it.
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec); //
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec); //
setMeasuredDimension(width, height); //
if (mSupportedPreviewSizes != null) { //
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height); //
}
}
#Override
protected void onLayout(boolean changed, int l, int t, int r, int b) { //
if (changed && getChildCount() > 0) {
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (mPreviewSize != null) {
previewWidth = mPreviewSize.width;
previewHeight = mPreviewSize.height;
}
// Center the child SurfaceView within the parent.
if (width * previewHeight > height * previewWidth) {
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
} else {
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2,
width, (height + scaledChildHeight) / 2);
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
try {
if (mCamera != null) {
mCamera.setPreviewDisplay(holder);
}
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
if (mCamera != null) {
mCamera.stopPreview();
}
}
private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
for (Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if(mCamera != null) {
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
requestLayout();
mCamera.setParameters(parameters);
mCamera.startPreview();
}
}
}
camera_layout.xml
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/layout"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<SurfaceView
android:id="#+id/surfaceView"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<!-- Button
android:id="#+id/btnCapture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_horizontal"
android:background="#drawable/round_button"
android:text="#string/btn_capture" /-->
</FrameLayout>

Are you asking for the permissions?:
https://developer.android.com/training/permissions/requesting.html
For the camera the permission is Manifest.permission.CAMERA.
Also the android.hardware.camera is deprecated you should use android.hardware.camera2 https://developer.android.com/reference/android/hardware/camera2/package-summary.html

Related

Is Canvas.rotate and Canvas.drawBitmap broken or is there something wrong with my code?

I'm trying to draw an image using Canvas.rotate and/or Canvas.drawBimtap, although everytime I use it, the image appears in random locations, no matter the X and Y coordinates I set it to. Here is my code, let me know if this is an issue I should report or if there is just an error in my code:
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Paint;
import android.graphics.SurfaceTexture;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.location.Address;
import android.location.Geocoder;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.RotateAnimation;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.UUID;
public class Photo_Page extends AppCompatActivity implements SensorEventListener {
#Override
protected void onStart()
{
super.onStart();
GlobalClass application=(GlobalClass) getApplication();
TextView projectnameheader = (TextView) findViewById(R.id.projectnameheader2);
projectnameheader.setText(application.projectnameheader);
TextView projectnameoverlay = (TextView) findViewById(R.id.projectnameoverlay);
projectnameoverlay.setText(application.projectnameheader);
String currentDateTimeString = java.text.DateFormat.getDateTimeInstance().format(new Date());
Button addcodeandnote = (Button) findViewById(R.id.addcodebutton);
EditText entercode2 = (EditText) findViewById(R.id.entercode);
EditText enternote2 = (EditText) findViewById(R.id.enternote);
TextView codedisplay = (TextView) findViewById(R.id.code);
TextView notedisplay = (TextView) findViewById(R.id.note);
Button savecodebutton = (Button) findViewById(R.id.codesavebutton);
TextView dateandtime = (TextView) findViewById(R.id.dateandtime);
dateandtime.setText(currentDateTimeString);
addcodeandnote.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v)
{
if (addcodebuttonpressedtimes>=1) {
addcodebuttonpressedtimes=0;
entercode2.setVisibility(View.INVISIBLE);
enternote2.setVisibility(View.INVISIBLE);
savecodebutton.setVisibility(View.INVISIBLE);
}
else{
addcodebuttonpressedtimes +=1;
entercode2.setVisibility(View.VISIBLE);
enternote2.setVisibility(View.VISIBLE);
savecodebutton.setVisibility(View.VISIBLE);
savecodebutton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
code=entercode2.getText().toString();
note=enternote2.getText().toString();
codedisplay.setText(code);
notedisplay.setText(note);
Toast.makeText(Photo_Page.this, "Code and Note were saved successfully!", Toast.LENGTH_LONG).show();
entercode2.setVisibility(View.INVISIBLE);
enternote2.setVisibility(View.INVISIBLE);
savecodebutton.setVisibility(View.INVISIBLE);
addcodebuttonpressedtimes=0;
if (code==null) {
codedisplay.setVisibility(View.INVISIBLE);
}
else{
codedisplay.setVisibility(View.VISIBLE);
}
if (note==null) {
notedisplay.setVisibility(View.INVISIBLE);
}
else{
notedisplay.setVisibility(View.VISIBLE);
}
}
});
}
}
});
}
String m_path = Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DCIM).getAbsolutePath();
public String directionNESW;
public List gpslist = new ArrayList();
private final static String TAG = MainActivity.class.getSimpleName();
public Calendar c = Calendar.getInstance();
public SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
public SimpleDateFormat sdftime = new SimpleDateFormat("HHmmss");
public String time;
public String date;
boolean laton = false;
boolean longon = false;
boolean alton = false;
public String names = "Address (GPS signal not found)";
public double latitude;
public double longitude;
public double altitude;
public boolean photowithcode = false;
private Button btnCapture;
private TextureView textureView2;
public String addcodebuttonpressed = "false";
public int addcodebuttonpressedtimes = 0;
public String code = "Code (Empty)";
public String note = "Note (Empty)";
public TextView degrees;
public TextView direction;
public ImageView compass;
public SensorManager sensorManager;
public Sensor accelerometerSensor, magnetometerSensor;
public float[] lastAccelerometer = new float[3];
public float[] lastMagnetometer = new float[3];
public float[] rotationMatrix = new float[9];
public float[] orientation = new float[3];
boolean isLastAccelerometerArrayCopied = false;
boolean isLastMagnetometerArrayCopied = false;
long lastUpdatedTime = 0;
float currentDegrees = 0f;
private ImageView compass2;
private float[] mGravity = new float[3];
private float[] mGeomagnetic = new float[3];
private float azimuth = 0f;
private float currectAzimuth = 0f;
private SensorManager mSensorManager;
//Check state orientation of output image
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static{
ORIENTATIONS.append(Surface.ROTATION_0,90);
ORIENTATIONS.append(Surface.ROTATION_90,0);
ORIENTATIONS.append(Surface.ROTATION_180,270);
ORIENTATIONS.append(Surface.ROTATION_270,180);
}
private String cameraId;
private CameraDevice cameraDevice;
private CameraCaptureSession cameraCaptureSessions;
private CaptureRequest.Builder captureRequestBuilder;
private Size imageDimension;
private ImageReader imageReader;
//Save to FILE
private File file;
private static final int REQUEST_CAMERA_PERMISSION = 200;
private boolean mFlashSupported;
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
cameraDevice = camera;
createCameraPreview();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
cameraDevice.close();
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int i) {
cameraDevice.close();
cameraDevice=null;
}
};
private TextView gpsdisplay;
public TextView altitudetext;
private LocationManager locationManager;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_photo_page);
compass2 = (ImageView)findViewById(R.id.compass);
degrees = (TextView)findViewById(R.id.degree);
direction = (TextView)findViewById(R.id.direction);
mSensorManager = (SensorManager)getSystemService(SENSOR_SERVICE);
sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
accelerometerSensor = sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
magnetometerSensor = sensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
TextView address = (TextView) findViewById(R.id.address);
gpsdisplay = findViewById(R.id.gpscoords);
altitudetext = findViewById(R.id.altitude);
locationManager = (LocationManager) getSystemService(LOCATION_SERVICE);
if(ContextCompat.checkSelfPermission(Photo_Page.this, Manifest.permission.ACCESS_COARSE_LOCATION)!= PackageManager.PERMISSION_GRANTED &&
ContextCompat.checkSelfPermission(Photo_Page.this,Manifest.permission.ACCESS_FINE_LOCATION)!=PackageManager.PERMISSION_GRANTED);
{
ActivityCompat.requestPermissions(Photo_Page.this,new String[]{Manifest.permission.ACCESS_COARSE_LOCATION,Manifest.permission.ACCESS_FINE_LOCATION},69);
}
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 10, 1, new LocationListener() {
#Override
public void onLocationChanged(Location location) {
latitude = location.getLatitude();
longitude = location.getLongitude();
altitude = location.getAltitude();
if ((alton && laton && longon) == false) {
gpsdisplay.setText(String.format("%.5f", latitude) + ", " + String.format("%.5f", longitude));
laton = true;
longon = true;
altitudetext.setText(String.format("%.2f", altitude) + " Meters");
alton = true;
}
Geocoder geocoder = new Geocoder(Photo_Page.this, Locale.getDefault());
try {
List<Address> addresses = geocoder.getFromLocation(latitude, longitude, 1);
setUpdata(addresses);
} catch (IOException e) {
e.printStackTrace();
}
}
private void setUpdata(List<Address> addresses) {
String add = addresses.get(0).getAddressLine(0);
String city = addresses.get(0).getLocality();
String state = addresses.get(0).getAdminArea();
String zip = addresses.get(0).getPostalCode();
names = add;
address.setText(names);
}
});
textureView2 = (TextureView)findViewById(R.id.textureView);
//From Java 1.4 , you can use keyword 'assert' to check expression true or false
assert textureView2 != null;
textureView2.setSurfaceTextureListener(textureListener);
btnCapture = (Button)findViewById(R.id.btnCapture);
Button btnCapturewithcode = (Button)findViewById(R.id.btnCapturewithcode);
btnCapture.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
takePicture();
}
});
btnCapturewithcode.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
photowithcode = true;
Toast.makeText(Photo_Page.this, time, Toast.LENGTH_SHORT).show();
takePicture();
}
});
}
private void takePicture() {
c = Calendar.getInstance();
time = sdftime.format(c.getTime());
date = sdf.format(c.getTime());
GlobalClass application = (GlobalClass) getApplication();
final File projectfile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), application.projectnameheader);
if (!projectfile.exists()) {
Log.d(TAG, "Folder doesn't exist, creating it...");
boolean rv = projectfile.mkdir();
}
String currentDateTimeString = java.text.DateFormat.getDateTimeInstance().format(new Date());
TextView dateandtime = (TextView) findViewById(R.id.dateandtime);
dateandtime.setText(currentDateTimeString);
gpsdisplay.setText(String.format("%.5f", latitude) + ", " + String.format("%.5f", longitude));
altitudetext.setText(String.format("%.2f", altitude) + " Meters");
if(cameraDevice == null)
return;
CameraManager manager = (CameraManager)getSystemService(Context.CAMERA_SERVICE);
try{
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
if(characteristics != null)
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
.getOutputSizes(ImageFormat.JPEG);
//Capture image with custom size
int width = 480;
int height = 640;
if(jpegSizes != null && jpegSizes.length > 0)
{
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
final ImageReader reader = ImageReader.newInstance(width,height,ImageFormat.JPEG,2);
List<Surface> outputSurface = new ArrayList<>(2);
outputSurface.add(reader.getSurface());
outputSurface.add(new Surface(textureView2.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
//Check orientation base on device
time = sdftime.format(c.getTime());
date = sdf.format(c.getTime());
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION,ORIENTATIONS.get(rotation));
file = new File(projectfile+"/"+(application.projectnameheader+"_"+date+"_"+time)+".jpg");
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader imageReader) {
Image image = null;
try{
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
}
catch (FileNotFoundException e)
{
e.printStackTrace();
}
catch (IOException e)
{
e.printStackTrace();
}
finally {
{
if(image != null)
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream outputStream = null;
try{
outputStream = new FileOutputStream(file);
outputStream.write(bytes);
}finally {
if(outputStream != null)
outputStream.close();
}
}
};
reader.setOnImageAvailableListener(readerListener,mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
GlobalClass application = (GlobalClass) getApplication();
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
Bitmap bitmap = BitmapFactory.decodeFile(file.getAbsolutePath());
Bitmap dest = Bitmap.createBitmap(bitmap.getHeight(), bitmap.getWidth(), Bitmap.Config.ARGB_8888);
Canvas cs = new Canvas(dest);
cs.rotate(90,720 ,720);
Paint tPaint = new Paint();
tPaint.setTextSize(bitmap.getHeight()/25);
tPaint.setColor(Color.WHITE);
tPaint.setStyle(Paint.Style.FILL);
cs.drawBitmap(bitmap, 0f, 0f, null);
float height = tPaint.measureText("yY");
cs.rotate(270,720 ,720);
cs.drawText(application.projectnameheader, 25, bitmap.getHeight()+545+bitmap.getHeight()/25, tPaint);
cs.drawText(currentDateTimeString, 25, bitmap.getHeight()+615+bitmap.getHeight()/25, tPaint);
cs.drawText(String.format("%.0f",currectAzimuth) + "°", 1225, bitmap.getHeight()+690+bitmap.getHeight()/25, tPaint);
cs.drawText(directionNESW, 1235, bitmap.getHeight()+615+bitmap.getHeight()/25, tPaint);
cs.drawText(String.format("%.5f", latitude) + ", " + String.format("%.5f", longitude), 25, bitmap.getHeight()+690+bitmap.getHeight()/25, tPaint);
cs.drawText(String.format("%.2f", altitude) + " Meters", 25, bitmap.getHeight()+755+bitmap.getHeight()/25, tPaint);
cs.drawText(names, 25,bitmap.getHeight()+825+bitmap.getHeight()/25, tPaint);
Bitmap compassbitmap = BitmapFactory.decodeResource(getResources(), R.drawable.compass2);
cs.rotate(azimuth*-1,-190,2500);
cs.scale(0.25f,0.25f,-190,2500);
cs.drawBitmap(compassbitmap, 0, 0, null);
if (photowithcode==true) {
cs.drawText(code, 25, bitmap.getHeight()+895+bitmap.getHeight()/25, tPaint);
cs.drawText(note, 25, bitmap.getHeight()+970+bitmap.getHeight()/25, tPaint);
photowithcode=false;
}
try {
dest.compress(Bitmap.CompressFormat.JPEG, 100, new FileOutputStream(new File(projectfile+"/"+(application.projectnameheader+"_"+date+"_"+time)+"___WITHTEXT.jpg")));
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
gpslist.add(latitude + longitude);
Toast.makeText(Photo_Page.this, "Saved "+file, Toast.LENGTH_SHORT).show();
createCameraPreview();
}
};
cameraDevice.createCaptureSession(outputSurface, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
try{
cameraCaptureSession.capture(captureBuilder.build(),captureListener,mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
}
},mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void createCameraPreview() {
try{
SurfaceTexture texture = textureView2.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(imageDimension.getWidth(),imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
if(cameraDevice == null)
return;
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(Photo_Page.this, "Changed", Toast.LENGTH_SHORT).show();
}
},null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void updatePreview() {
if(cameraDevice == null)
Toast.makeText(this, "Error", Toast.LENGTH_SHORT).show();
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE,CaptureRequest.CONTROL_MODE_AUTO);
try{
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(),null,mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera() {
CameraManager manager = (CameraManager)getSystemService(Context.CAMERA_SERVICE);
try{
cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
//Check realtime permission if run higher API 23
if(ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED)
{
ActivityCompat.requestPermissions(this,new String[]{
Manifest.permission.CAMERA,
Manifest.permission.WRITE_EXTERNAL_STORAGE
},REQUEST_CAMERA_PERMISSION);
return;
}
manager.openCamera(cameraId,stateCallback,null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(this, "You can't use camera without permission", Toast.LENGTH_SHORT).show();
finish();
}
}
}
#Override
protected void onResume() {
super.onResume();
mSensorManager.registerListener(this,mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD),
SensorManager.SENSOR_DELAY_GAME);
mSensorManager.registerListener(this,mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
SensorManager.SENSOR_DELAY_GAME);
startBackgroundThread();
if(textureView2.isAvailable())
openCamera();
else
textureView2.setSurfaceTextureListener(textureListener);
}
#Override
protected void onPause() {
stopBackgroundThread();
super.onPause();
mSensorManager.unregisterListener(this);
}
#Override
public void onSensorChanged(SensorEvent sensorEvent) {
final float alpha= 0.97f;
synchronized (this) {
if(sensorEvent.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
mGravity[0] = alpha*mGravity[0]+(1-alpha)*sensorEvent.values[0];
mGravity[1] = alpha*mGravity[1]+(1-alpha)*sensorEvent.values[1];
mGravity[2] = alpha*mGravity[2]+(1-alpha)*sensorEvent.values[2];
}
if(sensorEvent.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) {
mGeomagnetic[0] = alpha*mGeomagnetic[0]+(1-alpha)*sensorEvent.values[0];
mGeomagnetic[1] = alpha*mGeomagnetic[1]+(1-alpha)*sensorEvent.values[1];
mGeomagnetic[2] = alpha*mGeomagnetic[2]+(1-alpha)*sensorEvent.values[2];
}
float R[] = new float[9];
float I[] = new float[9];
boolean success = SensorManager.getRotationMatrix(R,I,mGravity,mGeomagnetic);
if (success){
float orientation[] = new float[3];
SensorManager.getOrientation(R,orientation);
azimuth = (float)Math.toDegrees(orientation[0]);
azimuth = (azimuth+360)%360;
//
Animation anim = new RotateAnimation(-currectAzimuth,-azimuth, Animation.RELATIVE_TO_SELF,0.5f,Animation.RELATIVE_TO_SELF,0.5f);
currectAzimuth = azimuth;
degrees.setText(String.format("%.0f",currectAzimuth) + "°");
anim.setDuration(500);
anim.setRepeatCount(0);
anim.setFillAfter(true);
compass2.startAnimation(anim);
}
}
}
#Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try{
mBackgroundThread.join();
mBackgroundThread= null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
}
My goal with this code is to draw the image of a compass on a picture that is being saved to a special file on a device. I tested this by taking a picture but I noticed that, even when I changed nothing in my code, the compass would move to a random location, sometimes not even appearing.
You seem to have a lot of hard coded positions for the centre of the rotations and other items that might not be a suitable value for your actual size of bitmap backed canvas.
Probably best for the centre of rotation to be the centre of the bitmap i.e bitmap.getHeight()/2f and bitmap.getWidth()/2f and make everything relate to the size of the bitmap.
e.g.
cs.rotate(90,bitmap.getWidth()/2f ,bitmap.getHeight()/2f);
You also seem to do a rotation around a value that will change a lot depending on how you are holding the phone and what fluctuations there are in the magnetic field. The magnetic sensors reading are not that accurate and again the centre of this azimuth rotation is a weird hardcoded location (Same goes for the scale by 0.25f)

how to crop only face instead of whole body in oval form using Google Vision Face detection API in android

I want to make app which take photos only with faces but I get whole image instead of face when I save into my storage file. So how to crop face and save into storage with the help google vision face detection API.
So how to use frame in my code to get face list as well as how can I convert into bitmap and save into my storage.
main.xml
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/topLayout"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:keepScreenOn="true">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview
android:id="#+id/preview"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.GraphicOverlay
android:id="#+id/faceOverlay"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview>
</LinearLayout>
<android.support.v7.widget.AppCompatButton
android:id="#+id/take_pic_btn"
android:layout_gravity="bottom"
android:gravity="center"
android:background="#color/green"
android:layout_margin="10dp"
android:text="Take Image"
android:textStyle="bold"
android:textSize="20sp"
android:textAllCaps="false"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
</FrameLayout>
FaceTrackerActivity .java
public final class FaceTrackerActivity extends AppCompatActivity {
private static final String TAG = "FaceTracker";
private CameraSource mCameraSource = null;
private CameraSourcePreview mPreview;
private GraphicOverlay mGraphicOverlay;
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
private Button takePicButton;
FaceDetector detector;
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.main);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay) findViewById(R.id.faceOverlay);
takePicButton=(Button)findViewById(R.id.take_pic_btn);
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
int gc = ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE);
if (rc == PackageManager.PERMISSION_GRANTED && gc == PackageManager.PERMISSION_GRANTED) {
createCameraSource();
} else {
requestCameraPermission();
}
takePicButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Toast.makeText(getApplicationContext(),"dilip",Toast.LENGTH_LONG).show();
captureImage();
}
});
}
private void requestCameraPermission() {
Log.w(TAG, "Camera permission is not granted. Requesting permission");
final String[] permissions = new String[]{Manifest.permission.CAMERA,Manifest.permission.WRITE_EXTERNAL_STORAGE};
if (!ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.CAMERA) && !ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.WRITE_EXTERNAL_STORAGE) ) {
ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM);
return;
}
final Activity thisActivity = this;
View.OnClickListener listener = new View.OnClickListener() {
#Override
public void onClick(View view) {
ActivityCompat.requestPermissions(thisActivity, permissions,
RC_HANDLE_CAMERA_PERM);
}
};
Snackbar.make(mGraphicOverlay, R.string.permission_camera_rationale,
Snackbar.LENGTH_INDEFINITE)
.setAction(R.string.ok, listener)
.show();
}
private void createCameraSource() {
Context context = getApplicationContext();
/* FaceDetector detector = new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());*/
detector= new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
MyFaceDetector myFaceDetector = new MyFaceDetector(detector);
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());
mCameraSource = new CameraSource.Builder(context, myFaceDetector)
.build();
if (!detector.isOperational()) {
}
mCameraSource = new CameraSource.Builder(context, detector)
.setRequestedPreviewSize(640, 480)
.setFacing(CameraSource.CAMERA_FACING_FRONT)
.setRequestedFps(10.0f)
.build();
}
/**
* Restarts the camera.
*/
#Override
protected void onResume() {
super.onResume();
startCameraSource();
}
/**
* Stops the camera.
*/
#Override
protected void onPause() {
super.onPause();
mPreview.stop();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mCameraSource != null) {
mCameraSource.release();
}
}
/**
* Callback for the result from requesting permissions. This method
* is invoked for every call on {#link #requestPermissions(String[], int)}.
* <p>
* <strong>Note:</strong> It is possible that the permissions request interaction
* with the user is interrupted. In this case you will receive empty permissions
* and results arrays which should be treated as a cancellation.
* </p>
*
* #param requestCode The request code passed in {#link #requestPermissions(String[], int)}.
* #param permissions The requested permissions. Never null.
* #param grantResults The grant results for the corresponding permissions
* which is either {#link PackageManager#PERMISSION_GRANTED}
* or {#link PackageManager#PERMISSION_DENIED}. Never null.
* #see #requestPermissions(String[], int)
*/
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
if (requestCode != RC_HANDLE_CAMERA_PERM) {
Log.d(TAG, "Got unexpected permission result: " + requestCode);
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
return;
}
if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED &&grantResults[1] == PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Camera permission granted - initialize the camera source");
// we have permission, so create the camerasource
createCameraSource();
return;
}
Log.e(TAG, "Permission not granted: results len = " + grantResults.length +
" Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)"));
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Face Tracker sample")
.setMessage(R.string.no_camera_permission)
.setPositiveButton(R.string.ok, listener)
.show();
}
//==============================================================================================
// Camera Source Preview
//==============================================================================================
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
private void startCameraSource() {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
//==============================================================================================
// Graphic Face Tracker
//==============================================================================================
/**
* Factory for creating a face tracker to be associated with a new face. The multiprocessor
* uses this factory to create face trackers as needed -- one for each individual.
*/
private class GraphicFaceTrackerFactory implements MultiProcessor.Factory<Face> {
#Override
public Tracker<Face> create(Face face) {
return new GraphicFaceTracker(mGraphicOverlay);
}
}
/**
* Face tracker for each detected individual. This maintains a face graphic within the app's
* associated face overlay.
*/
private class GraphicFaceTracker extends Tracker<Face> {
private GraphicOverlay mOverlay;
private FaceGraphic mFaceGraphic;
GraphicFaceTracker(GraphicOverlay overlay) {
mOverlay = overlay;
mFaceGraphic = new FaceGraphic(overlay);
}
/**
* Start tracking the detected face instance within the face overlay.
*/
#Override
public void onNewItem(int faceId, Face item) {
mFaceGraphic.setId(faceId);
}
**`strong text`**
#Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
mOverlay.add(mFaceGraphic);
mFaceGraphic.updateFace(face);
}
/**
* Hide the graphic when the corresponding face was not detected. This can happen for
* intermediate frames temporarily (e.g., if the face was momentarily blocked from
* view).
*/
#Override
public void onMissing(FaceDetector.Detections<Face> detectionResults) {
mOverlay.remove(mFaceGraphic);
}
/**
* Called when the face is assumed to be gone for good. Remove the graphic annotation from
* the overlay.
*/
#Override
public void onDone() {
mOverlay.remove(mFaceGraphic);
}
}
private void captureImage() {
mPreview.setDrawingCacheEnabled(true);
final Bitmap drawingCache = mPreview.getDrawingCache();
mCameraSource.takePicture(null, new CameraSource.PictureCallback() {
#Override
public void onPictureTaken(byte[] bytes) {
int orientation = Exif.getOrientation(bytes);
Bitmap temp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
Bitmap picture = rotateImage(temp,orientation);
Bitmap overlay = Bitmap.createBitmap(mGraphicOverlay.getWidth(),mGraphicOverlay.getHeight(),picture.getConfig());
Canvas canvas = new Canvas(overlay);
Matrix matrix = new Matrix();
matrix.setScale((float)overlay.getWidth()/(float)picture.getWidth(),(float)overlay.getHeight()/(float)picture.getHeight());
// mirror by inverting scale and translating
matrix.preScale(-1, 1);
matrix.postTranslate(canvas.getWidth(), 0);
Paint paint = new Paint();
canvas.drawBitmap(picture,matrix,paint);
canvas.drawBitmap(drawingCache,0,0,paint);
try {
String mainpath = getExternalStorageDirectory() + separator + "MaskIt" + separator + "images" + separator;
File basePath = new File(mainpath);
if (!basePath.exists())
Log.d("CAPTURE_BASE_PATH", basePath.mkdirs() ? "Success": "Failed");
String path = mainpath + "photo_" + getPhotoTime() + ".jpg";
File captureFile = new File(path);
captureFile.createNewFile();
if (!captureFile.exists())
Log.d("CAPTURE_FILE_PATH", captureFile.createNewFile() ? "Success": "Failed");
FileOutputStream stream = new FileOutputStream(captureFile);
overlay.compress(Bitmap.CompressFormat.PNG, 100, stream);
stream.flush();
stream.close();
picture.recycle();
drawingCache.recycle();
mPreview.setDrawingCacheEnabled(false);
} catch (IOException e) {
e.printStackTrace();
}
}
private String getPhotoTime() {
DateFormat dateFormatter = new SimpleDateFormat("yyyyMMdd hhmmss");
dateFormatter.setLenient(false);
Date today = new Date();
String s = dateFormatter.format(today);
return s;
}
});
}
private Bitmap rotateImage(Bitmap bm, int i) {
Matrix matrix = new Matrix();
switch (i) {
case ExifInterface.ORIENTATION_NORMAL:
return bm;
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
matrix.setScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
matrix.setRotate(180);
break;
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
matrix.setRotate(180);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_TRANSPOSE:
matrix.setRotate(90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_90:
matrix.setRotate(90);
break;
case ExifInterface.ORIENTATION_TRANSVERSE:
matrix.setRotate(-90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
matrix.setRotate(-90);
break;
default:
return bm;
}
try {
Bitmap bmRotated = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true);
bm.recycle();
return bmRotated;
} catch (OutOfMemoryError e) {
e.printStackTrace();
return null;
}
}
}
CameraSourcePreview.java
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.content.res.Configuration;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import com.google.android.gms.common.images.Size;
import com.google.android.gms.vision.CameraSource;
import java.io.IOException;
public class CameraSourcePreview extends ViewGroup {
private static final String TAG = "CameraSourcePreview";
private Context mContext;
private SurfaceView mSurfaceView;
private boolean mStartRequested;
private boolean mSurfaceAvailable;
private CameraSource mCameraSource;
private GraphicOverlay mOverlay;
public CameraSourcePreview(Context context, AttributeSet attrs) {
super(context, attrs);
mContext = context;
mStartRequested = false;
mSurfaceAvailable = false;
mSurfaceView = new SurfaceView(context);
mSurfaceView.getHolder().addCallback(new SurfaceCallback());
addView(mSurfaceView);
}
public void start(CameraSource cameraSource) throws IOException {
if (cameraSource == null) {
stop();
}
mCameraSource = cameraSource;
if (mCameraSource != null) {
mStartRequested = true;
startIfReady();
}
}
public void start(CameraSource cameraSource, GraphicOverlay overlay) throws IOException {
mOverlay = overlay;
start(cameraSource);
}
public void stop() {
if (mCameraSource != null) {
mCameraSource.stop();
}
}
public void release() {
if (mCameraSource != null) {
mCameraSource.release();
mCameraSource = null;
}
}
private void startIfReady() throws IOException {
if (mStartRequested && mSurfaceAvailable) {
mCameraSource.start(mSurfaceView.getHolder());
if (mOverlay != null) {
Size size = mCameraSource.getPreviewSize();
int min = Math.min(size.getWidth(), size.getHeight());
int max = Math.max(size.getWidth(), size.getHeight());
if (isPortraitMode()) {
// Swap width and height sizes when in portrait, since it will be rotated by
// 90 degrees
mOverlay.setCameraInfo(min, max, mCameraSource.getCameraFacing());
} else {
mOverlay.setCameraInfo(max, min, mCameraSource.getCameraFacing());
}
mOverlay.clear();
}
mStartRequested = false;
}
}
private class SurfaceCallback implements SurfaceHolder.Callback {
#Override
public void surfaceCreated(SurfaceHolder surface) {
mSurfaceAvailable = true;
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surface) {
mSurfaceAvailable = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
}
#Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int previewWidth = 320;
int previewHeight = 240;
if (mCameraSource != null) {
Size size = mCameraSource.getPreviewSize();
if (size != null) {
previewWidth = size.getWidth();
previewHeight = size.getHeight();
}
}
// Swap width and height sizes when in portrait, since it will be rotated 90 degrees
if (isPortraitMode()) {
int tmp = previewWidth;
previewWidth = previewHeight;
previewHeight = tmp;
}
final int viewWidth = right - left;
final int viewHeight = bottom - top;
int childWidth;
int childHeight;
int childXOffset = 0;
int childYOffset = 0;
float widthRatio = (float) viewWidth / (float) previewWidth;
float heightRatio = (float) viewHeight / (float) previewHeight;
// To fill the view with the camera preview, while also preserving the correct aspect ratio,
// it is usually necessary to slightly oversize the child and to crop off portions along one
// of the dimensions. We scale up based on the dimension requiring the most correction, and
// compute a crop offset for the other dimension.
if (widthRatio > heightRatio) {
childWidth = viewWidth;
childHeight = (int) ((float) previewHeight * widthRatio);
childYOffset = (childHeight - viewHeight) / 2;
} else {
childWidth = (int) ((float) previewWidth * heightRatio);
childHeight = viewHeight;
childXOffset = (childWidth - viewWidth) / 2;
}
for (int i = 0; i < getChildCount(); ++i) {
// One dimension will be cropped. We shift child over or up by this offset and adjust
// the size to maintain the proper aspect ratio.
getChildAt(i).layout(
-1 * childXOffset, -1 * childYOffset,
childWidth - childXOffset, childHeight - childYOffset);
}
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
private boolean isPortraitMode() {
int orientation = mContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
return false;
}
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
return true;
}
Log.d(TAG, "isPortraitMode returning false by default");
return false;
}
}
GraphicOverlay.java
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.View;
import com.google.android.gms.vision.CameraSource;
import java.util.HashSet;
import java.util.Set;
public class GraphicOverlay extends View {
private final Object mLock = new Object();
private int mPreviewWidth;
private float mWidthScaleFactor = 1.0f;
private int mPreviewHeight;
private float mHeightScaleFactor = 1.0f;
private int mFacing = CameraSource.CAMERA_FACING_BACK;
private Set<Graphic> mGraphics = new HashSet<>();
public static abstract class Graphic {
private GraphicOverlay mOverlay;
public Graphic(GraphicOverlay overlay) {
mOverlay = overlay;
}
public abstract void draw(Canvas canvas);
public float scaleX(float horizontal) {
return horizontal * mOverlay.mWidthScaleFactor;
}
public float scaleY(float vertical) {
return vertical * mOverlay.mHeightScaleFactor;
}
public float translateX(float x) {
if (mOverlay.mFacing == CameraSource.CAMERA_FACING_FRONT) {
return mOverlay.getWidth() - scaleX(x);
} else {
return scaleX(x);
}
}
public float translateY(float y) {
return scaleY(y);
}
public void postInvalidate() {
mOverlay.postInvalidate();
}
}
public GraphicOverlay(Context context, AttributeSet attrs) {
super(context, attrs);
}
public void clear() {
synchronized (mLock) {
mGraphics.clear();
}
postInvalidate();
}
public void add(Graphic graphic) {
synchronized (mLock) {
mGraphics.add(graphic);
}
postInvalidate();
}
public void remove(Graphic graphic) {
synchronized (mLock) {
mGraphics.remove(graphic);
}
postInvalidate();
}
public void setCameraInfo(int previewWidth, int previewHeight, int facing) {
synchronized (mLock) {
mPreviewWidth = previewWidth;
mPreviewHeight = previewHeight;
mFacing = facing;
}
postInvalidate();
}
/**
* Draws the overlay with its associated graphic objects.
*/
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
synchronized (mLock) {
if ((mPreviewWidth != 0) && (mPreviewHeight != 0)) {
mWidthScaleFactor = (float) canvas.getWidth() / (float)mPreviewWidth;
mHeightScaleFactor = (float) canvas.getHeight() / (float) mPreviewHeight;
}
for (Graphic graphic : mGraphics) {
graphic.draw(canvas);
}
}
}
}
You should pass the captured image to FaceDetector API and crop it thereafter.
fun getFace(context: Context, data: ByteArray): Bitmap? {
try {
val imageStrem = ByteArrayInputStream(data)
var bitmap = BitmapFactory.decodeStream(imageStrem)
if (bitmap.width > bitmap.height) {
val matrix = Matrix()
matrix.postRotate(270f)
if (bitmap.width > 1500) matrix.postScale(0.5f, 0.5f)
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
}
val faceDetector = FaceDetector.Builder(context).setProminentFaceOnly(true).setTrackingEnabled(false).build()
val frame = Frame.Builder().setBitmap(bitmap).build()
val faces = faceDetector.detect(frame)
var results: Bitmap? = null
for (i in 0 until faces.size()) {
val thisFace = faces.valueAt(i)
val x = thisFace.position.x
val y = thisFace.position.y
val x2 = x / 4 + thisFace.width
val y2 = y / 4 + thisFace.height
results = Bitmap.createBitmap(bitmap, x.toInt(), y.toInt(), x2.toInt(), y2.toInt())
}
return results
} catch (e: Exception) {
Log.e("GET_FACE", e.message)
}
return null
}
Source

Integrating ZbarScanner with android app

I am developing a mobile app to integrate with ZbarScanner. However, I am meet with error "ZBarScannerActivity cannot be resolved to a type" and "ZBarConstants cannot be resolved to a variable". please assist to help with the following code:
MainActivity.java:
package com.example.vscanner;
import net.sourceforge.zbar.Symbol;
import android.os.Bundle;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.view.Menu;
import android.view.View;
import android.widget.Toast;
public class MainActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
private static final int ZBAR_SCANNER_REQUEST = 0;
private static final int ZBAR_QR_SCANNER_REQUEST = 1;
public void launchScanner(View v) {
if (isCameraAvailable()) {
Intent intent = new Intent(this, ZBarScannerActivity.class);
startActivityForResult(intent, ZBAR_SCANNER_REQUEST);
} else {
Toast.makeText(this, "Rear Facing Camera Unavailable", Toast.LENGTH_SHORT).show();
}
}
public void launchQRScanner(View v) {
if (isCameraAvailable()) {
Intent intent = new Intent(this, ZBarScannerActivity.class);
intent.putExtra(ZBarConstants.SCAN_MODES, new int[]{Symbol.QRCODE});
startActivityForResult(intent, ZBAR_SCANNER_REQUEST);
} else {
Toast.makeText(this, "Rear Facing Camera Unavailable", Toast.LENGTH_SHORT).show();
}
}
public boolean isCameraAvailable() {
PackageManager pm = getPackageManager();
return pm.hasSystemFeature(PackageManager.FEATURE_CAMERA);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data)
{
if (resultCode == RESULT_OK)
{
// Scan result is available by making a call to data.getStringExtra(ZBarConstants.SCAN_RESULT)
// Type of the scan result is available by making a call to data.getStringExtra(ZBarConstants.SCAN_RESULT_TYPE)
Toast.makeText(this, "Scan Result = " + data.getStringExtra(ZBarConstants.SCAN_RESULT), Toast.LENGTH_SHORT).show();
Toast.makeText(this, "Scan Result Type = " + data.getStringExtra(ZBarConstants.SCAN_RESULT_TYPE), Toast.LENGTH_SHORT).show();
// The value of type indicates one of the symbols listed in Advanced Options below.
} else if(resultCode == RESULT_CANCELED) {
Toast.makeText(this, "Camera unavailable", Toast.LENGTH_SHORT).show();
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
Paste these 3 classes into your source folder.
ZBarConstants.java
package com.example.vscanner;
public interface ZBarConstants {
public static final String SCAN_MODES = "SCAN_MODES";
public static final String SCAN_RESULT = "SCAN_RESULT";
public static final String SCAN_RESULT_TYPE = "SCAN_RESULT_TYPE";
public static final String ERROR_INFO = "ERROR_INFO";
}
ZBarScannerActivity.java
package com.example.vscanner;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.hardware.Camera;
import android.os.Bundle;
import android.os.Handler;
import android.text.TextUtils;
import android.view.Window;
import android.view.WindowManager;
import net.sourceforge.zbar.Config;
import net.sourceforge.zbar.Image;
import net.sourceforge.zbar.ImageScanner;
import net.sourceforge.zbar.Symbol;
import net.sourceforge.zbar.SymbolSet;
public class ZBarScannerActivity extends Activity implements Camera.PreviewCallback, ZBarConstants {
private static final String TAG = "ZBarScannerActivity";
private CameraPreview mPreview;
private Camera mCamera;
private ImageScanner mScanner;
private Handler mAutoFocusHandler;
private boolean mPreviewing = true;
static {
System.loadLibrary("iconv");
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(!isCameraAvailable()) {
// Cancel request if there is no rear-facing camera.
cancelRequest();
return;
}
// Hide the window title.
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
mAutoFocusHandler = new Handler();
// Create and configure the ImageScanner;
setupScanner();
// Create a RelativeLayout container that will hold a SurfaceView,
// and set it as the content of our activity.
mPreview = new CameraPreview(this, this, autoFocusCB);
setContentView(mPreview);
}
public void setupScanner() {
mScanner = new ImageScanner();
mScanner.setConfig(0, Config.X_DENSITY, 3);
mScanner.setConfig(0, Config.Y_DENSITY, 3);
int[] symbols = getIntent().getIntArrayExtra(SCAN_MODES);
if (symbols != null) {
mScanner.setConfig(Symbol.NONE, Config.ENABLE, 0);
for (int symbol : symbols) {
mScanner.setConfig(symbol, Config.ENABLE, 1);
}
}
}
#Override
protected void onResume() {
super.onResume();
// Open the default i.e. the first rear facing camera.
mCamera = Camera.open();
if(mCamera == null) {
// Cancel request if mCamera is null.
cancelRequest();
return;
}
mPreview.setCamera(mCamera);
mPreview.showSurfaceView();
mPreviewing = true;
}
#Override
protected void onPause() {
super.onPause();
// Because the Camera object is a shared resource, it's very
// important to release it when the activity is paused.
if (mCamera != null) {
mPreview.setCamera(null);
mCamera.cancelAutoFocus();
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();
// According to Jason Kuang on http://stackoverflow.com/questions/6519120/how-to-recover-camera-preview-from-sleep,
// there might be surface recreation problems when the device goes to sleep. So lets just hide it and
// recreate on resume
mPreview.hideSurfaceView();
mPreviewing = false;
mCamera = null;
}
}
public boolean isCameraAvailable() {
PackageManager pm = getPackageManager();
return pm.hasSystemFeature(PackageManager.FEATURE_CAMERA);
}
public void cancelRequest() {
Intent dataIntent = new Intent();
dataIntent.putExtra(ERROR_INFO, "Camera unavailable");
setResult(Activity.RESULT_CANCELED, dataIntent);
finish();
}
public void onPreviewFrame(byte[] data, Camera camera) {
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = parameters.getPreviewSize();
Image barcode = new Image(size.width, size.height, "Y800");
barcode.setData(data);
int result = mScanner.scanImage(barcode);
if (result != 0) {
mCamera.cancelAutoFocus();
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewing = false;
SymbolSet syms = mScanner.getResults();
for (Symbol sym : syms) {
String symData = sym.getData();
if (!TextUtils.isEmpty(symData)) {
Intent dataIntent = new Intent();
dataIntent.putExtra(SCAN_RESULT, symData);
dataIntent.putExtra(SCAN_RESULT_TYPE, sym.getType());
setResult(Activity.RESULT_OK, dataIntent);
finish();
break;
}
}
}
}
private Runnable doAutoFocus = new Runnable() {
public void run() {
if(mCamera != null && mPreviewing) {
mCamera.autoFocus(autoFocusCB);
}
}
};
// Mimic continuous auto-focusing
Camera.AutoFocusCallback autoFocusCB = new Camera.AutoFocusCallback() {
public void onAutoFocus(boolean success, Camera camera) {
mAutoFocusHandler.postDelayed(doAutoFocus, 1000);
}
};
}
CameraPreview.java
package com.example.vscanner;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import java.io.IOException;
import java.util.List;
class CameraPreview extends ViewGroup implements SurfaceHolder.Callback {
private final String TAG = "CameraPreview";
SurfaceView mSurfaceView;
SurfaceHolder mHolder;
Size mPreviewSize;
List<Size> mSupportedPreviewSizes;
Camera mCamera;
PreviewCallback mPreviewCallback;
AutoFocusCallback mAutoFocusCallback;
CameraPreview(Context context, PreviewCallback previewCallback, AutoFocusCallback autoFocusCb) {
super(context);
mPreviewCallback = previewCallback;
mAutoFocusCallback = autoFocusCb;
mSurfaceView = new SurfaceView(context);
addView(mSurfaceView);
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCamera(Camera camera) {
mCamera = camera;
if (mCamera != null) {
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();
requestLayout();
}
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
// We purposely disregard child measurements because act as a
// wrapper to a SurfaceView that centers the camera preview instead
// of stretching it.
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if (mSupportedPreviewSizes != null) {
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
}
}
#Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
if (changed && getChildCount() > 0) {
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (mPreviewSize != null) {
previewWidth = mPreviewSize.width;
previewHeight = mPreviewSize.height;
}
// Center the child SurfaceView within the parent.
if (width * previewHeight > height * previewWidth) {
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
} else {
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2,
width, (height + scaledChildHeight) / 2);
}
}
}
public void hideSurfaceView() {
mSurfaceView.setVisibility(View.INVISIBLE);
}
public void showSurfaceView() {
mSurfaceView.setVisibility(View.VISIBLE);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
try {
if (mCamera != null) {
mCamera.setPreviewDisplay(holder);
}
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
if (mCamera != null) {
mCamera.cancelAutoFocus();
mCamera.stopPreview();
}
}
private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
for (Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (holder.getSurface() == null){
// preview surface does not exist
return;
}
if (mCamera != null) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
requestLayout();
mCamera.setParameters(parameters);
mCamera.setPreviewCallback(mPreviewCallback);
mCamera.startPreview();
mCamera.autoFocus(mAutoFocusCallback);
}
}
}

How does pinch zoom work with panning for image in Android

Goal
An activity is made to view image, we can pinch zoom or pan the image. The image is centered in the screen in the beginning. Pinch zoom is centered at the center of the image, even after the image is panned somewhere else in the screen.
The image for displaying is downloaded from a given URL, and the URL is passed from extra of an intent to start the image viewing activity.
Pinch zoom is implemented by postScale(), pan by postTranslate().
Problem
After panning the image somewhere, the pinch-zoom center is still at the center of the screen. Tried to follow the center of the image when it's been moved to a new place but my code doesn't work that way. Please give some idea.
The image downloading and panning work well.
Code
activity_image_viewer_layout.xml
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent" >
<LinearLayout
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:gravity="center"
android:background="#color/MyPureBlack" >
<LinearLayout
android:id="#+id/progressbar_wrapper"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:orientation="vertical" >
<ProgressBar
android:id="#+id/progressbar"
style="?android:attr/progressBarStyleHorizontal"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:max="100"
android:progress="0"
android:layout_marginLeft="20dp"
android:layout_marginRight="20dp"
android:layout_gravity="center" >
</ProgressBar>
</LinearLayout>
<ImageView
android:id="#+id/image_viewer"
android:visibility="gone"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:background="#color/MyPureBlack"
android:scaleType="matrix" >
</ImageView>
</LinearLayout>
</FrameLayout>
ActivityImageViewer.java
package com.com2us.hubapp.android;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLConnection;
import org.apache.http.util.ByteArrayBuffer;
import android.app.Activity;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.FloatMath;
import android.util.Log;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.ViewTreeObserver;
import android.view.ViewTreeObserver.OnGlobalLayoutListener;
import android.view.animation.AlphaAnimation;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
public class ActivityImageViewer extends Activity {
File imageFile = null;
// Matrices for pinch zoom and pan
Matrix matrix = new Matrix();
Matrix savedMatrix = new Matrix();
Matrix savedMatrixZoom = new Matrix();
// State of motion event
static final int NONE = 0;
static final int PAN = 1;
static final int PINCH_ZOOM = 2;
int mode = NONE;
// The first pointer down
PointF start = new PointF();
// The center of the image (Failed to track it when the image has been moved)
PointF centerOfImage = new PointF();
// oldest is the Cartesian distance between first two pointers when the second pointer is down
float oldDist = 1f;
// MIN_SCALE/MAX_SCALE is the min/max scale factor
private final float MIN_SCALE = 0.5f;
private final float MAX_SCALE = 3.0f;
// TOUCH_SENSITIVE is the minimum Cartesian distance between the first two pointers that triggers the pinch zoom
private final float TOUCH_SENSITIVE = 10.0f;
private final float SPACING_LEFT_AND_RIGHT = 30.0f;
private final float SPACING_TOP_AND_BOTTOM = 30.0f;
// The ImageView widget
private ImageView image_viewer;
// The progress bar shows what current progress is before the image downloading is completed
private ProgressBar progressbar;
private LinearLayout progressbar_wrapper;
// An async task that downloads the image from a given URL
private DownloadFilesTask downloadFilesTask;
private class DownloadFilesTask extends AsyncTask<String, Integer, Bitmap> {
protected Bitmap doInBackground(String... urls) {
InputStream input = null;
OutputStream output = null;
try {
URL url = new URL(urls[0]);
URLConnection connection = url.openConnection();
connection.connect();
int lenghtOfFile = connection.getContentLength();
// download the file
InputStream is = connection.getInputStream();
BufferedInputStream bis = new BufferedInputStream(is, 8190);
ByteArrayBuffer baf = new ByteArrayBuffer(50);
int current = 0;
while ((current = bis.read()) != -1) {
baf.append((byte)current);
}
byte[] imageData = baf.toByteArray();
Bitmap bmp = BitmapFactory.decodeByteArray(imageData, 0, imageData.length);
//final int percent = (int) (total * 100 / lenghtOfFile);
//publishProgress(percent);
//lenghtOfFile
return bmp;
} catch (Exception e) {
} finally {
try {
if (output != null)
output.close();
output = null;
} catch (IOException e) {
}
try {
if (input != null)
input.close();
input = null;
} catch (IOException e) {
}
}
return null;
} // protected Bitmap doInBackground(String... urls) {}
protected void onProgressUpdate(Integer... progress) {
progressbar.setProgress(progress[0]);
}
protected void onPostExecute(Bitmap bmp) {
if (bmp != null) {
final AlphaAnimation animationAfter = new AlphaAnimation(0.0f, 1.0f);
animationAfter.setDuration(300);
animationAfter.setFillEnabled(true);
animationAfter.setFillAfter(true);
image_viewer.setAnimation(animationAfter);
image_viewer.setImageBitmap(bmp);
ViewTreeObserver viewTreeObserver = image_viewer.getViewTreeObserver();
viewTreeObserver.addOnGlobalLayoutListener(new OnGlobalLayoutListener() {
#Override
public void onGlobalLayout() {
Drawable drawable = image_viewer.getDrawable();
int dx = (image_viewer.getWidth() - drawable.getIntrinsicWidth()) / 2;
int dy = (image_viewer.getHeight() - drawable.getIntrinsicHeight()) / 2;
matrix.postTranslate(dx, dy);
image_viewer.setImageMatrix(matrix);
}
});
progressbar_wrapper.setVisibility(View.GONE);
image_viewer.setVisibility(View.VISIBLE);
} else {
android.os.Handler handler = new android.os.Handler();
handler.postDelayed(new Runnable() {
#Override
public void run() {
finish();
}
}, 2000);
}
} // End of protected void onPostExecute(Bitmap bmp) {}
} // End of private class DownloadFilesTask extends AsyncTask<String, Integer, Bitmap> {}
// These are activity life cycle handling
// onCreate
#Override
public void onCreate(Bundle savedInstanceState) {
//setTheme(R.style.HubTheme);
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_image_viewer);
progressbar_wrapper = (LinearLayout) findViewById(R.id.progressbar_wrapper);
image_viewer = (ImageView) findViewById(R.id.image_viewer);
progressbar = (ProgressBar) findViewById(R.id.progressbar);
image_viewer.setOnTouchListener(new MyOnTouchListener());
final String uriForImage = getIntent().getStringExtra("url");
downloadFilesTask = new DownloadFilesTask();
downloadFilesTask.execute(uriForImage);
}
// onStart
#Override
protected void onStart() {
super.onStart();
}
// onResume
#Override
protected void onResume() {
super.onResume();
}
// onPause
#Override
protected void onPause() {
super.onPause();
}
// onStop
#Override
protected void onStop() {
super.onStop();
}
// onRestart
#Override
protected void onRestart() {
super.onRestart();
}
// onDestroy
#Override
protected void onDestroy() {
super.onDestroy();
if (imageFile != null) {
try {
Drawable drawable = image_viewer.getDrawable();
BitmapDrawable bitmapDrawable = (BitmapDrawable) drawable;
Bitmap bitmap = bitmapDrawable.getBitmap();
bitmap.recycle();
drawable = null;
bitmapDrawable = null;
bitmap = null;
} catch (NullPointerException e) {
}
}
}
// onKeyDown
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
this.onBackPressed();
}
return true;
}
// onBackPressed
public void onBackPressed() {
finish();
}
// onConfigurationChanged
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (newConfig.equals(Configuration.ORIENTATION_LANDSCAPE)) {
} else if (newConfig.equals(Configuration.ORIENTATION_PORTRAIT)) {
}
}
// onLowMemory
#Override
public void onLowMemory() {
super.onLowMemory();
finish();
}
// Get the Cartesian distance between the first two pointers
private float spacing(MotionEvent event) {
float x = 0;
float y = 0;
try {
Method getX = MotionEvent.class.getMethod("getX", Integer.TYPE);
Method getY = MotionEvent.class.getMethod("getX", Integer.TYPE);
// x = event.getX(0) - event.getX(1);
// y = event.getY(0) - event.getY(1);
float x1 = (Float) getX.invoke(event, 0);
float x2 = (Float) getX.invoke(event, 1);
x = x1 - x2;
float y1 = (Float) getY.invoke(event, 0);
float y2 = (Float) getY.invoke(event, 1);
y = y1 - y2;
} catch (SecurityException e) {
} catch (NoSuchMethodException e) {
} catch (IllegalArgumentException e) {
} catch (IllegalAccessException e) {
} catch (InvocationTargetException e) {
}
return FloatMath.sqrt(x * x + y * y);
}
// Some flags set manually for convenience
private final int MotionEvent_ACTION_MASK = 255; // that is 0xFF or 11111111
private final int MotionEvent_ACTION_POINTER_DOWN = 5; // that is 101
private final int MotionEvent_ACTION_POINTER_UP = 6; // that is 110
private class MyOnTouchListener implements OnTouchListener {
// onTouch
#Override
public boolean onTouch(View v, MotionEvent event) {
ImageView view = (ImageView) v;
Drawable drawable = view.getDrawable();
if (drawable == null)
return true;
switch (event.getAction() & MotionEvent_ACTION_MASK) {
case MotionEvent.ACTION_DOWN:
savedMatrix.set(matrix);
start.set(event.getX(), event.getY());
mode = PAN;
break;
case MotionEvent_ACTION_POINTER_DOWN:
oldDist = spacing(event);
if (oldDist > TOUCH_SENSITIVE) {
savedMatrix.set(matrix);
mode = PINCH_ZOOM;
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent_ACTION_POINTER_UP:
mode = NONE;
break;
case MotionEvent.ACTION_MOVE:
if (mode == PAN) {
// /////////////////////////////////////////
matrix.set(savedMatrix);
float[] matrixValues = new float[9];
Rect viewRect = new Rect(view.getLeft(), view.getTop(), view.getRight(), view.getBottom());
matrix.getValues(matrixValues);
float currentY = matrixValues[Matrix.MTRANS_Y];
float currentX = matrixValues[Matrix.MTRANS_X];
float currentScale = matrixValues[Matrix.MSCALE_X];
float currentHeight = drawable.getIntrinsicHeight() * currentScale;
float currentWidth = drawable.getIntrinsicWidth() * currentScale;
float dx = event.getX() - start.x;
float dy = event.getY() - start.y;
float newX = currentX + dx;
float newY = currentY + dy;
RectF drawingRect = new RectF(newX, newY, newX + currentWidth, newY + currentHeight);
float diffUp = Math.min(viewRect.bottom - drawingRect.bottom, viewRect.top - drawingRect.top) - SPACING_TOP_AND_BOTTOM;
float diffDown = Math.max(viewRect.bottom - drawingRect.bottom, viewRect.top - drawingRect.top) + SPACING_TOP_AND_BOTTOM;
float diffLeft = Math.min(viewRect.left - drawingRect.left, viewRect.right - drawingRect.right) - SPACING_LEFT_AND_RIGHT;
float diffRight = Math.max(viewRect.left - drawingRect.left, viewRect.right - drawingRect.right) + SPACING_LEFT_AND_RIGHT;
if (diffUp > 0) {
dy += diffUp;
}
if (diffDown < 0) {
dy += diffDown;
}
if (diffLeft > 0) {
dx += diffLeft;
}
if (diffRight < 0) {
dx += diffRight;
}
matrix.postTranslate(dx, dy);
} else if (mode == PINCH_ZOOM) {
float newDist = spacing(event);
if (newDist > TOUCH_SENSITIVE) {
matrix.set(savedMatrix);
float scale = newDist / oldDist;
// Get the center of the image. (Failed to get it when image has been moved)
Rect viewRect = new Rect(view.getLeft(), view.getTop(), view.getRight(), view.getBottom());
centerOfImage.x = viewRect.centerX();
centerOfImage.y = viewRect.centerY();
float[] f = new float[9];
Matrix tmp = new Matrix(matrix);
tmp.postScale(scale, scale, centerOfImage.x, centerOfImage.y);
tmp.getValues(f);
float scaleX = f[Matrix.MSCALE_X];
if (scaleX < MIN_SCALE || scaleX > MAX_SCALE) {
matrix.set(savedMatrixZoom);
} else {
matrix.postScale(scale, scale, centerOfImage.x, centerOfImage.y);
savedMatrixZoom.set(matrix);
}
}
}
break;
}
view.setImageMatrix(matrix);
return true;
} // End of public boolean onTouch(View v, MotionEvent event) {}
} // End of private class MyOnTouchListener implements OnTouchListener {}
} // End of public class ActivityImageViewer extends Activity {}
You can use the Scale Gesture Detector for pinch to zoom. Instead of creating pinch to zoom from scratch you can do something like following,
public class MyCustomView extends View {
private ScaleGestureDetector mScaleDetector;
private float mScaleFactor = 1.f;
public MyCustomView(Context mContext){
...
// View code goes here
...
mScaleDetector = new ScaleGestureDetector(context, new ScaleListener());
}
#Override
public boolean onTouchEvent(MotionEvent ev) {
// Let the ScaleGestureDetector inspect all events.
mScaleDetector.onTouchEvent(ev);
return true;
}
#Override
public void onDraw(Canvas canvas) {
super.onDraw(canvas);
canvas.save();
canvas.scale(mScaleFactor, mScaleFactor);
...
// onDraw() code goes here
...
canvas.restore();
}
private class ScaleListener
extends ScaleGestureDetector.SimpleOnScaleGestureListener {
#Override
public boolean onScale(ScaleGestureDetector detector) {
mScaleFactor *= detector.getScaleFactor();
// Don't let the object get too small or too large.
mScaleFactor = Math.max(0.1f, Math.min(mScaleFactor, 5.0f));
invalidate();
return true;
}
}
}
Note : Your translation will reside in onDraw method to scale an image.

How to reset position of ImageView?

In my program can I move image across screen? Sometimes I am loosing it from site and cannot find it. I wish to add function which will place imag on original position.
My own aproach wich does not work (and related question to it to make it work):
ImageView iv = current;
Matrix matrix = iv.getImageMatrix;
float[] values = new float[9];
matrix.getValues(values);
int a = values[Matrix.MTRANS_X];
int b = values[Matrix.MTRANS_Y];
matrix.postTranslate(-a,-b);
iv.setImageMatrix(matrix);
EDIT: Latest code
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.ExecutionException;
import ua.mirkvartir.android.frontend.UILApplication;
import android.app.Activity;
import android.app.Fragment;
import android.app.ProgressDialog;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.graphics.PointF;
import android.graphics.drawable.BitmapDrawable;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.view.MotionEventCompat;
import android.util.FloatMath;
import android.util.Log;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnTouchListener;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.Gallery;
import android.widget.ImageView;
import android.widget.ImageView.ScaleType;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
public class ImageShowActivity extends Activity {
int p = 0;
Activity app;
ImageAdapterr ia;
ImageView imView;
int imgPos = 0;
Bitmap bm = null;
int geg = 90;
public int width = 0;
public int hight = 0;
Gallery gallery;
Matrix matrix = new Matrix();
Matrix shift = new Matrix();
private int INVALID_POINTER_ID = -1;
private int mActivePointerId = INVALID_POINTER_ID;
private ScaleGestureDetector mScaleDetector;
private float mLastTouchX = 0;
private float mLastTouchY = 0;
private float mPosX = 0;
private float mPosY = 0;
ImageView current;
public List<ImageView> images;
public int reset = 0;
HashMap<Integer, Bitmap> map = new HashMap<Integer, Bitmap>();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.image_show);
app = this;
images = new ArrayList<ImageView>(
UILApplication.photo_buffer_big.size());
Log.d("images", UILApplication.photo_buffer_big.size() + "");
Log.d("images", images.size() + "");
gallery = (Gallery) findViewById(R.id.gallery);
// EDGES ARE INVISIBLE
gallery.setHorizontalFadingEdgeEnabled(false);
ia = new ImageAdapterr(this);
gallery.setAdapter(ia);
final int length = UILApplication.photo_buffer_big.size();
Button back_btn = (Button) findViewById(R.id.analitics_back_btn);
back_btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
finish();
}
});
final TextView img_counter_tv = (TextView) findViewById(R.id.img_counter);
img_counter_tv.setText(p + 1 + "/" + length);
Button nextButton = (Button) findViewById(R.id.next_btn);
nextButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
if (p < length - 1) {
p++;
} else {
p = 0;
}
gallery.setSelection(p, true);
img_counter_tv.setText(p + 1 + "/" + length);
}
});
Button backButton = (Button) findViewById(R.id.back_btn);
backButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
if (p == 0) {
p = length - 1;
} else {
p--;
}
gallery.setSelection(p, true);
img_counter_tv.setText(p + 1 + "/" + length);
}
});
}
public void rotateS(View v) {
ImageView iv = current;
Bitmap b = ((BitmapDrawable) iv.getDrawable()).getBitmap();
Matrix matrix = new Matrix();
matrix.postRotate(geg);
Bitmap bMapRotate = Bitmap.createBitmap(b, 0, 0, b.getWidth(),
b.getHeight(), matrix, true);
iv.setImageBitmap(bMapRotate);
}
public void extendS(View v) {
reset = 1;
Log.d("restart", "yes");
runOnUiThread(new Runnable() {
public void run() {
View viewToUpdate = gallery.getChildAt(p - gallery.getFirstVisiblePosition());
viewToUpdate.invalidate();
// ia.notifyDataSetChanged();
}
});
}
public static Bitmap resizeBitmap(Bitmap photo, float x, float y) {
try {
// get current bitmap width and height
int width = photo.getWidth();
int height = photo.getHeight();
// determine how much to scale
float scaleWidth = x / width;
float scaleHeight = y / height;
Log.d("aspect3", "w: " + scaleWidth + " h: " + scaleHeight);
// create the matrix for the manipulation
Matrix matrix = new Matrix();
// resize the bitmap
matrix.postScale(scaleWidth, scaleHeight);
// recreate the new bitmap
Bitmap resizebitmap = Bitmap.createBitmap(photo, 0, 0, width,
height, matrix, false);
return resizebitmap;
} catch (NullPointerException e) {
e.printStackTrace();
} catch (OutOfMemoryError e) {
e.printStackTrace();
System.gc();
}
return null;
}
class ImageAdapterr extends BaseAdapter {
/** The parent context */
private Context myContext;
/** Simple Constructor saving the 'parent' context. */
public ImageAdapterr(Context c) {
this.myContext = c;
}
Matrix savedMatrix = new Matrix();
/** Returns the amount of images we have defined. */
public int getCount() {
return UILApplication.photo_buffer_big.size();
}
/* Use the array-Positions as unique IDs */
public Object getItem(int position) {
return position;
}
public long getItemId(int position) {
return position;
}
/**
* Returns a new ImageView to be displayed, depending on the position
* passed.
*/
public View getView(final int position, View convertView,
ViewGroup parent) {
ImageView imView = new ImageView(this.myContext);
current = imView;
imgPos = position;
if (bm==null){
AsyncLoad imLoad = new AsyncLoad();
imLoad.execute();
try {
bm = imLoad.get();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
if (bm != null) {
imView.setImageBitmap(bm);
} else if (bm == null) {
imView.setImageResource(R.drawable.logo);
}
/* Image should be scaled as width/height are set. */
imView.setScaleType(ImageView.ScaleType.FIT_CENTER);
/* Set the Width/Height of the ImageView. */
imView.setLayoutParams(new Gallery.LayoutParams(
LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
imView.setOnTouchListener(new OnTouchListener() {
private static final String TAG = "Touch";
// These matrices will be used to move and zoom image
PointF start = new PointF();
public PointF mid = new PointF();
// We can be in one of these 3 states
public static final int NONE = 0;
public static final int DRAG = 1;
public static final int ZOOM = 2;
public int mode = NONE;
float oldDist;
public boolean onTouch(View v, MotionEvent event) {
ImageView view = (ImageView) v;
view.setScaleType(ImageView.ScaleType.MATRIX);
switch (event.getAction() & MotionEvent.ACTION_MASK) {
case MotionEvent.ACTION_DOWN:
savedMatrix.set(matrix);
start.set(event.getX(), event.getY());
Log.d(TAG, "mode=DRAG");
mode = DRAG;
break;
case MotionEvent.ACTION_POINTER_DOWN:
oldDist = spacing(event);
Log.d(TAG, "oldDist=" + oldDist);
if (oldDist > 10f) {
savedMatrix.set(matrix);
midPoint(mid, event);
mode = ZOOM;
Log.d(TAG, "mode=ZOOM");
}
break;
case MotionEvent.ACTION_MOVE:
if (mode == DRAG) {
matrix.set(savedMatrix);
matrix.postTranslate(event.getX() - start.x,
event.getY() - start.y);
} else if (mode == ZOOM) {
float newDist = spacing(event);
Log.d(TAG, "newDist=" + newDist);
if (newDist > 10f) {
matrix.set(savedMatrix);
float scale = newDist / oldDist;
matrix.postScale(scale, scale, mid.x, mid.y);
}
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_POINTER_UP:
mode = NONE;
Log.d(TAG, "mode=NONE");
break;
}
// Perform the transformation
Log.d("point",
(event.getX() - start.x) + " "
+ (event.getY() - start.y));
// Log.d("point",start.x +" "+start.y);
float[] values = new float[9];
matrix.getValues(values);
float a = values[Matrix.MTRANS_X];
float b = values[Matrix.MTRANS_Y];
Log.d("touch matrix", values[Matrix.MPERSP_0] + " "
+ values[Matrix.MPERSP_1] + " "
+ values[Matrix.MPERSP_2]);
Log.d("touch matrix scale", values[Matrix.MSCALE_X] + " "
+ values[Matrix.MSCALE_Y]);
Log.d("touch matrix scew", values[Matrix.MSKEW_X] + " "
+ values[Matrix.MSKEW_Y]);
Log.d("touch matrix trans", values[Matrix.MTRANS_X] + " "
+ values[Matrix.MTRANS_Y]);
if (reset == 1) {
matrix.reset();
savedMatrix.reset();
}
view.setImageMatrix(matrix);
reset = 0;
// images.set(position, view);
return true; // indicate event was handled
}
private float spacing(MotionEvent event) {
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return FloatMath.sqrt(x * x + y * y);
}
private void midPoint(PointF point, MotionEvent event) {
float x = event.getX(0) + event.getX(1);
float y = event.getY(0) + event.getY(1);
point.set(x / 2, y / 2);
}
});
return imView;
}
/**
* Returns the size (0.0f to 1.0f) of the views depending on the
* 'offset' to the center.
*/
public float getScale(boolean focused, int offset) {
/* Formula: 1 / (2 ^ offset) */
return Math.max(0, 1.0f / (float) Math.pow(2, Math.abs(offset)));
}
}
class AsyncLoad extends AsyncTask<Void, Void, Bitmap> {
ProgressDialog pd;
#Override
protected void onPreExecute() {
pd = new ProgressDialog(app);
pd.setOwnerActivity(app);
pd.setTitle("Идет загрузка...");
pd.setCancelable(true);
pd.show();
}
#Override
protected Bitmap doInBackground(Void... arg0) {
// TODO Auto-generated method stub
try {
/*
* Open a new URL and get the InputStream to load data from it.
*/
URL aURL = new URL(UILApplication.photo_buffer_big.get(imgPos));
URLConnection conn = aURL.openConnection();
conn.connect();
InputStream is = conn.getInputStream();
/* Buffered is always good for a performance plus. */
BufferedInputStream bis = new BufferedInputStream(is);
/* Decode url-data to a bitmap. */
bm = BitmapFactory.decodeStream(bis);
bis.close();
is.close();
/* Apply the Bitmap to the ImageView that will be returned. */
// imView.setImageBitmap(bm);
} catch (IOException e) {
// imView.setImageResource(R.drawable.logo);
bm = null;
Log.e("DEBUGTAG", "Remote Image Exception", e);
}
map.put(imgPos, bm);
Log.d("map", map.size() + "");
return bm;
}
#Override
protected void onPostExecute(Bitmap arg0) {
pd.dismiss();
}
}
}
XML
<RelativeLayout
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:background="#drawable/top_tab_bg"
android:padding="10dp" >
<Button
android:id="#+id/analitics_back_btn"
style="#style/ButtonText"
android:layout_marginRight="5dip"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_toLeftOf="#+id/back_btn"
android:background="#drawable/btn_clk_selector"
android:text="Назад" />
<Button
android:id="#+id/btn_rotate"
style="#style/ButtonText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginRight="5dip"
android:layout_toLeftOf="#+id/analitics_back_btn"
android:background="#drawable/btn_clk_selector"
android:onClick="rotateS"
android:text="Повернуть" />
<Button
android:id="#+id/btn_ex"
style="#style/ButtonText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginRight="5dip"
android:layout_toLeftOf="#+id/btn_rotate"
android:background="#drawable/btn_clk_selector"
android:onClick="extendS"
android:text="рас" />
<Button
android:id="#+id/next_btn"
style="#style/ButtonText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentRight="true"
android:layout_marginLeft="5dp"
android:background="#drawable/next_img_btn"
android:paddingRight="10dp" />
<TextView
android:id="#+id/img_counter"
style="#style/ButtonText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerVertical="true"
android:layout_toLeftOf="#+id/next_btn"
android:text="1/10" />
<Button
android:id="#+id/back_btn"
style="#style/ButtonText"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginRight="5dp"
android:layout_toLeftOf="#+id/img_counter"
android:background="#drawable/back_img_btn" />
</RelativeLayout>
<RelativeLayout
android:id="#+id/gal"
android:layout_width="fill_parent"
android:layout_height="fill_parent" >
<Gallery
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="#+id/gallery"
android:adjustViewBounds="true"
android:spacing="10dp"
/>
</RelativeLayout>
</LinearLayout>
EDIT: problem with new code - image is not rested after enlargeS was called. It is reseted after enlargeS call + tab on screen. Also image is partially reseted - it goes to original position and scale, but it goes to its original resolution. So usually i bacamos much smaller then screen, while i need it to fill screen.
Use the code here below:
Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.image);
RelativeLayout layout = (RelativeLayout) findViewById(R.id.layout);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.FILL_PARENT,
LinearLayout.LayoutParams.WRAP_CONTENT);
imageView = new ImageView(this);
imageView.setLayoutParams(params);
imageView.setImageBitmap(bitmap);
layout.setGravity(Gravity.CENTER_VERTICAL | Gravity.TOP);
layout.addView(imageView);
This will position the ImageView at the center of the RelativeLayout which contains it.
EDIT: see in the code above - I changed the RelativeLayout.LayoutParams to LinearLayout.LayoutParams, because it is the LinearLayout, which contains the RelativeLayout in fact. Now it should work.
Try to use this in your code to set the image view wherever you want on the screen,
iconSMS.setImageResource(R.drawable.ic_launcher_smsmms);
LayoutParams paramsIconSMS = new LayoutParams(iconwidth,iconheight);
iconwidthspacing=(int)Math.round(((float)width/480)*(float)45);
iconheightspacing=(int)Math.round(((float)height/800)*(float)520);
paramsIconSMS.setMargins(iconwidthspacing, iconheightspacing, 0, 0);
iconSMS.setId(204);
iconSMS.setLayoutParams(paramsIconSMS);
Here,
iconwidth=(int)Math.round(width*iconwidthf);
iconheight=(int)Math.round(height*iconheightf);
and width and height variables are,
requestWindowFeature(Window.FEATURE_NO_TITLE);
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
width = metrics.widthPixels;
height = metrics.heightPixels;
by width and height you are getting the screen size of your phone in pixels.

Categories