Add library in my android project - java

I want to use a library for my android project.
When I use an external library, everything works.
But when I add .jar to my folder "libs", there is this error :
04-08 10:39:47.879: E/AndroidRuntime(13453): java.lang.RuntimeException: Unable to start activity ComponentInfo{fr.allbrary/eu.janmuller.android.simplecropimage.CropImage}: java.lang.NullPointerException
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2245)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2295)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.ActivityThread.access$700(ActivityThread.java:150)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1280)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.os.Handler.dispatchMessage(Handler.java:99)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.os.Looper.loop(Looper.java:176)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.ActivityThread.main(ActivityThread.java:5279)
04-08 10:39:47.879: E/AndroidRuntime(13453): at java.lang.reflect.Method.invokeNative(Native Method)
04-08 10:39:47.879: E/AndroidRuntime(13453): at java.lang.reflect.Method.invoke(Method.java:511)
04-08 10:39:47.879: E/AndroidRuntime(13453): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1102)
04-08 10:39:47.879: E/AndroidRuntime(13453): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:869)
04-08 10:39:47.879: E/AndroidRuntime(13453): at dalvik.system.NativeStart.main(Native Method)
04-08 10:39:47.879: E/AndroidRuntime(13453): Caused by: java.lang.NullPointerException
04-08 10:39:47.879: E/AndroidRuntime(13453): at eu.janmuller.android.simplecropimage.CropImage.onCreate(CropImage.java:167)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.Activity.performCreate(Activity.java:5267)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1097)
04-08 10:39:47.879: E/AndroidRuntime(13453): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2209)
04-08 10:39:47.879: E/AndroidRuntime(13453): ... 11 more
Code :
/**
* The activity can crop specific region of interest from an image.
*/
public class CropImage extends MonitoredActivity {
final int IMAGE_MAX_SIZE = 1024;
private static final String TAG = "CropImage";
public static final String IMAGE_PATH = "image-path";
public static final String SCALE = "scale";
public static final String ORIENTATION_IN_DEGREES = "orientation_in_degrees";
public static final String ASPECT_X = "aspectX";
public static final String ASPECT_Y = "aspectY";
public static final String OUTPUT_X = "outputX";
public static final String OUTPUT_Y = "outputY";
public static final String SCALE_UP_IF_NEEDED = "scaleUpIfNeeded";
public static final String CIRCLE_CROP = "circleCrop";
public static final String RETURN_DATA = "return-data";
public static final String RETURN_DATA_AS_BITMAP = "data";
public static final String ACTION_INLINE_DATA = "inline-data";
// These are various options can be specified in the intent.
private Bitmap.CompressFormat mOutputFormat = Bitmap.CompressFormat.JPEG;
private Uri mSaveUri = null;
private boolean mDoFaceDetection = true;
private boolean mCircleCrop = false;
private final Handler mHandler = new Handler();
private int mAspectX;
private int mAspectY;
private int mOutputX;
private int mOutputY;
private boolean mScale;
private CropImageView mImageView;
private ContentResolver mContentResolver;
private Bitmap mBitmap;
private String mImagePath;
boolean mWaitingToPick; // Whether we are wait the user to pick a face.
boolean mSaving; // Whether the "save" button is already clicked.
HighlightView mCrop;
// These options specifiy the output image size and whether we should
// scale the output to fit it (or just crop it).
private boolean mScaleUp = true;
private final BitmapManager.ThreadSet mDecodingThreads =
new BitmapManager.ThreadSet();
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
mContentResolver = getContentResolver();
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.cropimage);
mImageView = (CropImageView) findViewById(R.id.image);
showStorageToast(this);
Intent intent = getIntent();
Bundle extras = intent.getExtras();
if (extras != null) {
if (extras.getString(CIRCLE_CROP) != null) {
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.HONEYCOMB) {
mImageView.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
}
mCircleCrop = true;
mAspectX = 1;
mAspectY = 1;
}
mImagePath = extras.getString(IMAGE_PATH);
mSaveUri = getImageUri(mImagePath);
mBitmap = getBitmap(mImagePath);
if (extras.containsKey(ASPECT_X) && extras.get(ASPECT_X) instanceof Integer) {
mAspectX = extras.getInt(ASPECT_X);
} else {
throw new IllegalArgumentException("aspect_x must be integer");
}
if (extras.containsKey(ASPECT_Y) && extras.get(ASPECT_Y) instanceof Integer) {
mAspectY = extras.getInt(ASPECT_Y);
} else {
throw new IllegalArgumentException("aspect_y must be integer");
}
mOutputX = extras.getInt(OUTPUT_X);
mOutputY = extras.getInt(OUTPUT_Y);
mScale = extras.getBoolean(SCALE, true);
mScaleUp = extras.getBoolean(SCALE_UP_IF_NEEDED, true);
}
if (mBitmap == null) {
Log.d(TAG, "finish!!!");
finish();
return;
}
// Make UI fullscreen.
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
findViewById(R.id.discard).setOnClickListener(
new View.OnClickListener() {
public void onClick(View v) {
setResult(RESULT_CANCELED);
finish();
}
});
findViewById(R.id.save).setOnClickListener(
new View.OnClickListener() {
public void onClick(View v) {
try {
onSaveClicked();
} catch (Exception e) {
finish();
}
}
});
findViewById(R.id.rotateLeft).setOnClickListener(
new View.OnClickListener() {
public void onClick(View v) {
mBitmap = Util.rotateImage(mBitmap, -90);
RotateBitmap rotateBitmap = new RotateBitmap(mBitmap);
mImageView.setImageRotateBitmapResetBase(rotateBitmap, true);
mRunFaceDetection.run();
}
});
findViewById(R.id.rotateRight).setOnClickListener(
new View.OnClickListener() {
public void onClick(View v) {
mBitmap = Util.rotateImage(mBitmap, 90);
RotateBitmap rotateBitmap = new RotateBitmap(mBitmap);
mImageView.setImageRotateBitmapResetBase(rotateBitmap, true);
mRunFaceDetection.run();
}
});
startFaceDetection();
}
private Uri getImageUri(String path) {
return Uri.fromFile(new File(path));
}
private Bitmap getBitmap(String path) {
Uri uri = getImageUri(path);
InputStream in = null;
try {
in = mContentResolver.openInputStream(uri);
//Decode image size
BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
BitmapFactory.decodeStream(in, null, o);
in.close();
int scale = 1;
if (o.outHeight > IMAGE_MAX_SIZE || o.outWidth > IMAGE_MAX_SIZE) {
scale = (int) Math.pow(2, (int) Math.round(Math.log(IMAGE_MAX_SIZE / (double) Math.max(o.outHeight, o.outWidth)) / Math.log(0.5)));
}
BitmapFactory.Options o2 = new BitmapFactory.Options();
o2.inSampleSize = scale;
in = mContentResolver.openInputStream(uri);
Bitmap b = BitmapFactory.decodeStream(in, null, o2);
in.close();
return b;
} catch (FileNotFoundException e) {
Log.e(TAG, "file " + path + " not found");
} catch (IOException e) {
Log.e(TAG, "file " + path + " not found");
}
return null;
}
private void startFaceDetection() {
if (isFinishing()) {
return;
}
mImageView.setImageBitmapResetBase(mBitmap, true);
Util.startBackgroundJob(this, null,
"Please wait\u2026",
new Runnable() {
public void run() {
final CountDownLatch latch = new CountDownLatch(1);
final Bitmap b = mBitmap;
mHandler.post(new Runnable() {
public void run() {
if (b != mBitmap && b != null) {
mImageView.setImageBitmapResetBase(b, true);
mBitmap.recycle();
mBitmap = b;
}
if (mImageView.getScale() == 1F) {
mImageView.center(true, true);
}
latch.countDown();
}
});
try {
latch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
mRunFaceDetection.run();
}
}, mHandler);
}
private void onSaveClicked() throws Exception {
// TODO this code needs to change to use the decode/crop/encode single
// step api so that we don't require that the whole (possibly large)
// bitmap doesn't have to be read into memory
if (mSaving) return;
if (mCrop == null) {
return;
}
mSaving = true;
Rect r = mCrop.getCropRect();
int width = r.width();
int height = r.height();
// If we are circle cropping, we want alpha channel, which is the
// third param here.
Bitmap croppedImage;
try {
croppedImage = Bitmap.createBitmap(width, height,
mCircleCrop ? Bitmap.Config.ARGB_8888 : Bitmap.Config.RGB_565);
} catch (Exception e) {
throw e;
}
if (croppedImage == null) {
return;
}
{
Canvas canvas = new Canvas(croppedImage);
Rect dstRect = new Rect(0, 0, width, height);
canvas.drawBitmap(mBitmap, r, dstRect, null);
}
if (mCircleCrop) {
// OK, so what's all this about?
// Bitmaps are inherently rectangular but we want to return
// something that's basically a circle. So we fill in the
// area around the circle with alpha. Note the all important
// PortDuff.Mode.CLEAR.
Canvas c = new Canvas(croppedImage);
Path p = new Path();
p.addCircle(width / 2F, height / 2F, width / 2F,
Path.Direction.CW);
c.clipPath(p, Region.Op.DIFFERENCE);
c.drawColor(0x00000000, PorterDuff.Mode.CLEAR);
}
/* If the output is required to a specific size then scale or fill */
if (mOutputX != 0 && mOutputY != 0) {
if (mScale) {
/* Scale the image to the required dimensions */
Bitmap old = croppedImage;
croppedImage = Util.transform(new Matrix(),
croppedImage, mOutputX, mOutputY, mScaleUp);
if (old != croppedImage) {
old.recycle();
}
} else {
/* Don't scale the image crop it to the size requested.
* Create an new image with the cropped image in the center and
* the extra space filled.
*/
// Don't scale the image but instead fill it so it's the
// required dimension
Bitmap b = Bitmap.createBitmap(mOutputX, mOutputY,
Bitmap.Config.RGB_565);
Canvas canvas = new Canvas(b);
Rect srcRect = mCrop.getCropRect();
Rect dstRect = new Rect(0, 0, mOutputX, mOutputY);
int dx = (srcRect.width() - dstRect.width()) / 2;
int dy = (srcRect.height() - dstRect.height()) / 2;
/* If the srcRect is too big, use the center part of it. */
srcRect.inset(Math.max(0, dx), Math.max(0, dy));
/* If the dstRect is too big, use the center part of it. */
dstRect.inset(Math.max(0, -dx), Math.max(0, -dy));
/* Draw the cropped bitmap in the center */
canvas.drawBitmap(mBitmap, srcRect, dstRect, null);
/* Set the cropped bitmap as the new bitmap */
croppedImage.recycle();
croppedImage = b;
}
}
// Return the cropped image directly or save it to the specified URI.
Bundle myExtras = getIntent().getExtras();
if (myExtras != null && (myExtras.getParcelable("data") != null
|| myExtras.getBoolean(RETURN_DATA))) {
Bundle extras = new Bundle();
extras.putParcelable(RETURN_DATA_AS_BITMAP, croppedImage);
setResult(RESULT_OK,
(new Intent()).setAction(ACTION_INLINE_DATA).putExtras(extras));
finish();
} else {
final Bitmap b = croppedImage;
Util.startBackgroundJob(this, null, getString(R.string.saving_image),
new Runnable() {
public void run() {
saveOutput(b);
}
}, mHandler);
}
}
private void saveOutput(Bitmap croppedImage) {
if (mSaveUri != null) {
OutputStream outputStream = null;
try {
outputStream = mContentResolver.openOutputStream(mSaveUri);
if (outputStream != null) {
croppedImage.compress(mOutputFormat, 90, outputStream);
}
} catch (IOException ex) {
Log.e(TAG, "Cannot open file: " + mSaveUri, ex);
setResult(RESULT_CANCELED);
finish();
return;
} finally {
Util.closeSilently(outputStream);
}
Bundle extras = new Bundle();
Intent intent = new Intent(mSaveUri.toString());
intent.putExtras(extras);
intent.putExtra(IMAGE_PATH, mImagePath);
intent.putExtra(ORIENTATION_IN_DEGREES, Util.getOrientationInDegree(this));
setResult(RESULT_OK, intent);
} else {
Log.e(TAG, "not defined image url");
}
croppedImage.recycle();
finish();
}
#Override
protected void onPause() {
super.onPause();
BitmapManager.instance().cancelThreadDecoding(mDecodingThreads);
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mBitmap != null) {
mBitmap.recycle();
}
}
Runnable mRunFaceDetection = new Runnable() {
#SuppressWarnings("hiding")
float mScale = 1F;
Matrix mImageMatrix;
FaceDetector.Face[] mFaces = new FaceDetector.Face[3];
int mNumFaces;
// For each face, we create a HightlightView for it.
private void handleFace(FaceDetector.Face f) {
PointF midPoint = new PointF();
int r = ((int) (f.eyesDistance() * mScale)) * 2;
f.getMidPoint(midPoint);
midPoint.x *= mScale;
midPoint.y *= mScale;
int midX = (int) midPoint.x;
int midY = (int) midPoint.y;
HighlightView hv = new HighlightView(mImageView);
int width = mBitmap.getWidth();
int height = mBitmap.getHeight();
Rect imageRect = new Rect(0, 0, width, height);
RectF faceRect = new RectF(midX, midY, midX, midY);
faceRect.inset(-r, -r);
if (faceRect.left < 0) {
faceRect.inset(-faceRect.left, -faceRect.left);
}
if (faceRect.top < 0) {
faceRect.inset(-faceRect.top, -faceRect.top);
}
if (faceRect.right > imageRect.right) {
faceRect.inset(faceRect.right - imageRect.right,
faceRect.right - imageRect.right);
}
if (faceRect.bottom > imageRect.bottom) {
faceRect.inset(faceRect.bottom - imageRect.bottom,
faceRect.bottom - imageRect.bottom);
}
hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop,
mAspectX != 0 && mAspectY != 0);
mImageView.add(hv);
}
// Create a default HightlightView if we found no face in the picture.
private void makeDefault() {
HighlightView hv = new HighlightView(mImageView);
int width = mBitmap.getWidth();
int height = mBitmap.getHeight();
Rect imageRect = new Rect(0, 0, width, height);
// make the default size about 4/5 of the width or height
int cropWidth = Math.min(width, height) * 4 / 5;
int cropHeight = cropWidth;
if (mAspectX != 0 && mAspectY != 0) {
if (mAspectX > mAspectY) {
cropHeight = cropWidth * mAspectY / mAspectX;
} else {
cropWidth = cropHeight * mAspectX / mAspectY;
}
}
int x = (width - cropWidth) / 2;
int y = (height - cropHeight) / 2;
RectF cropRect = new RectF(x, y, x + cropWidth, y + cropHeight);
hv.setup(mImageMatrix, imageRect, cropRect, mCircleCrop,
mAspectX != 0 && mAspectY != 0);
mImageView.mHighlightViews.clear(); // Thong added for rotate
mImageView.add(hv);
}
// Scale the image down for faster face detection.
private Bitmap prepareBitmap() {
if (mBitmap == null) {
return null;
}
// 256 pixels wide is enough.
if (mBitmap.getWidth() > 256) {
mScale = 256.0F / mBitmap.getWidth();
}
Matrix matrix = new Matrix();
matrix.setScale(mScale, mScale);
return Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, true);
}
public void run() {
mImageMatrix = mImageView.getImageMatrix();
Bitmap faceBitmap = prepareBitmap();
mScale = 1.0F / mScale;
if (faceBitmap != null && mDoFaceDetection) {
FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
faceBitmap.getHeight(), mFaces.length);
mNumFaces = detector.findFaces(faceBitmap, mFaces);
}
if (faceBitmap != null && faceBitmap != mBitmap) {
faceBitmap.recycle();
}
mHandler.post(new Runnable() {
public void run() {
mWaitingToPick = mNumFaces > 1;
if (mNumFaces > 0) {
for (int i = 0; i < mNumFaces; i++) {
handleFace(mFaces[i]);
}
} else {
makeDefault();
}
mImageView.invalidate();
if (mImageView.mHighlightViews.size() == 1) {
mCrop = mImageView.mHighlightViews.get(0);
mCrop.setFocus(true);
}
if (mNumFaces > 1) {
Toast.makeText(CropImage.this,
"Multi face crop help",
Toast.LENGTH_SHORT).show();
}
}
});
}
};
public static final int NO_STORAGE_ERROR = -1;
public static final int CANNOT_STAT_ERROR = -2;
public static void showStorageToast(Activity activity) {
showStorageToast(activity, calculatePicturesRemaining(activity));
}
public static void showStorageToast(Activity activity, int remaining) {
String noStorageText = null;
if (remaining == NO_STORAGE_ERROR) {
String state = Environment.getExternalStorageState();
if (state.equals(Environment.MEDIA_CHECKING)) {
noStorageText = activity.getString(R.string.preparing_card);
} else {
noStorageText = activity.getString(R.string.no_storage_card);
}
} else if (remaining < 1) {
noStorageText = activity.getString(R.string.not_enough_space);
}
if (noStorageText != null) {
Toast.makeText(activity, noStorageText, 5000).show();
}
}
public static int calculatePicturesRemaining(Activity activity) {
try {
/*if (!ImageManager.hasStorage()) {
return NO_STORAGE_ERROR;
} else {*/
String storageDirectory = "";
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
storageDirectory = Environment.getExternalStorageDirectory().toString();
}
else {
storageDirectory = activity.getFilesDir().toString();
}
StatFs stat = new StatFs(storageDirectory);
float remaining = ((float) stat.getAvailableBlocks()
* (float) stat.getBlockSize()) / 400000F;
return (int) remaining;
//}
} catch (Exception ex) {
// if we can't stat the filesystem then we don't know how many
// pictures are remaining. it might be zero but just leave it
// blank since we really don't know.
return CANNOT_STAT_ERROR;
}
}
}
Why ?

Add jar file using following procedure :-
Right click on the project--->Build Path--->Configure Build Path...--->In left side you have to choose Java Build Path--->Libraries--->Add External JARs--->ok--->ok
Hope this will resolve the issue

This is a build path issue.
Make sure your bin folder is not included in your build path.
Right click on your project > go to properties > Build Path.
Make sure that Honeycomb library is in your libs/ folder and not in your source folder.
Include the libraries in libs/ individually in the build path.
BTW, you may want to bring in the android-support-v4 library to get Ice Cream Sandwich support instead of the Honeycomb support library

You cant simply add the jar files in your app.. Go to buildpath and then add external jars into your app then it will work

Related

Camera 2 Api Error - the error that does not take pictures

I am having an issue with the Camera 2 Api.
When I try to take a picture it would not function. The system would not crash. The error only occurs when I try to take a picture.
Furthermore, I took all the permissions that I need in another activity.
The following error occurs.
Access denied finding property "persist.vendor.camera.privapp.list"
I do not know if it has a relation with the error. Please use it as a reference.
I know that there are similar questions, but neither worked.
I deleted some codes. If you have any inconvenience please let me know.
Thank you in advance.
public class Camera extends AppCompatActivity implements TextureView.SurfaceTextureListener,
ActivityCompat.OnRequestPermissionsResultCallback {
private AutoFitTextureView mTextureView;
private static final int REQUEST_CAMERA_PERMISSION = 1;
private static final int REQUEST_WRITE_STORAGE = 2;
private static final int REQUEST_SAVE_IMAGE = 1002;
private static final int MAX_PREVIEW_WIDTH = 2220;
private static final int MAX_PREVIEW_HEIGHT = 1080;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
private String mCameraId;
private CameraDevice mCameraDevice;
private CameraCaptureSession mCaptureSession;
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private int mSensorOrientation;
private ImageReader mImageReader;
private Size mPreviewSize;
private File mImageFolder;
private Byte bytes;
private Bitmap bitmap;
private File mFile;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mTextureView = (AutoFitTextureView) findViewById(R.id.texture);
Button button02 = (Button)findViewById(R.id.button_take_picture);
button02.setOnClickListener(new Button.OnClickListener() {
#Override
public void onClick (View view){
takePicture();
}
}
);
}
}
private void setUpCameraOutputs ( int width, int height){
Activity activity = this;
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
mCameraId = cameraId;
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
Size largest = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
new CompareSizesByArea());
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
}
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
maxPreviewHeight, largest);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(
mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(
mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
}
}
protected void takePicture () {
if (null == mCameraDevice) {
Log.e("Camera", "cameraDevice is null");
return;
}
if (ContextCompat.checkSelfPermission(this,Manifest.permission.WRITE_EXTERNAL_STORAGE)!= PackageManager.PERMISSION_GRANTED) {
return;
}else{
if (shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE)){
Toast.makeText(this,"We need your permission to record and save image",Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},REQUEST_WRITE_STORAGE);
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
final ImageReader mImageReader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 2);
List<Surface> outputSurfaces = new ArrayList<>(2);
outputSurfaces.add(mImageReader.getSurface());
outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
int rotation = getWindowManager().getDefaultDisplay().getRotation();
//Callback that is called when a new image is available from ImageReader
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
//try {
mFile = new File(getExternalFilesDir(null), "sample.jpg");
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(mFile);
fileOutputStream.write(bytes);
} catch (Exception e) {
e.printStackTrace();
}finally {
mImageReader.close();
if (null != fileOutputStream){
try {
fileOutputStream.close();
}catch (IOException e){
e.printStackTrace();
}
}
}
}
};
mImageReader.setOnImageAvailableListener(readerListener, null);
final CameraCaptureSession.CaptureCallback captureListener =
new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(Camera.this, "Saved:" + mImageReader, Toast.LENGTH_SHORT).show();
createCameraPreviewSession();
}
};
mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(mPreviewRequestBuilder.build(), captureListener, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void createCameraPreviewSession () {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
Surface surface = new Surface(texture);
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
if (null == mCameraDevice) {
return;
}
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest,
null, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(
#NonNull CameraCaptureSession cameraCaptureSession) {
}
}, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
Log.d("debug","Camera error");
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
};
#RequiresApi(api = Build.VERSION_CODES.M)
private void openCamera ( int width, int height){
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
Toast.makeText(this,"No permission to open camera",Toast.LENGTH_SHORT).show();
requestPermissions(new String[]{Manifest.permission.CAMERA},REQUEST_CAMERA_PERMISSION);
return;
}
setUpCameraOutputs(width, height);
CameraManager manager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(mCameraId, mStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
}
Could you try to request for the camera permission as the first thing before trying to take the actual picture? Maybe this is the reason it's crashing.
Remove this permission part from your method "take picture"
if (ContextCompat.checkSelfPermission(this,Manifest.permission.WRITE_EXTERNAL_STORAGE)!= PackageManager.PERMISSION_GRANTED) {
return;
}else{
if (shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE)){
Toast.makeText(this,"We need your permission to record and save image",Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},REQUEST_WRITE_STORAGE);
}
You could execute it first before calling take picture

java.lang.RuntimeException: An error occured while executing doInBackground() Caused by: java.lang.NullPointerException

I am trying to run my project from android studio but I get in java.lang.RuntimeException error while executing doInBackground() and the app stopped I have sawed a lot of question about this but its didn't work.
Here is my code:
import...
public class LicensePlateProcessorAsync extends AsyncTask < LicensePlateProcessorParameter, String, Object > {
private static final String TAG = "LPRWorker";
private LicensePlateProcessorCallback listener;
private Object HandlerThread;
private Object LicensePlateProcessorParameter;
private Object String;
private java.lang.Object Object;
public LicensePlateProcessorAsync(LicensePlateProcessorCallback listener) {
this.listener = listener;
}
protected Object doInBackground(LicensePlateProcessorParameter...params) {
LicensePlateProcessorParameter param = params[0];
String path = param.getPath();
float fResizeRatio = param.getResizeRatio();
PreviewMode previewMode = param.getPreviewMode();
int iMedianBlurKernelSize = param.getMedianBlurKernelSize();
int iLineLength = param.getLineLength();
publishProgress("Loading file");
Mat mImage = null;
try {
ExifInterface exif = new ExifInterface(path);
int rotationCode = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
int rotationDegrees = 0;
if (rotationCode == ExifInterface.ORIENTATION_ROTATE_90) {
rotationDegrees = 90;
} else if (rotationCode == ExifInterface.ORIENTATION_ROTATE_180) {
rotationDegrees = 180;
} else if (rotationCode == ExifInterface.ORIENTATION_ROTATE_270) {
rotationDegrees = 270;
}
Matrix matrix = new Matrix();
if (rotationDegrees != 0) {
matrix.preRotate(rotationDegrees);
}
BitmapFactory.Options bmOptions = new BitmapFactory.Options();
bmOptions.inJustDecodeBounds = false;
Bitmap bitmapTmp = BitmapFactory.decodeFile(path, bmOptions);
Bitmap bitmap;
bitmap = Bitmap.createBitmap(
bitmapTmp, 0, 0,
bitmapTmp.getWidth(),
bitmapTmp.getHeight(),
matrix,
true);
mImage = new Mat();
Utils.bitmapToMat(bitmap, mImage);
bitmap.recycle();
} catch (IOException e) {
e.printStackTrace();
}
And the error is :
java.lang.RuntimeException: An error occured
while executing doInBackground()
at android.os.AsyncTask$3.done(AsyncTask.java: 299)
at java.util.concurrent.FutureTask.finishCompletion(FutureTask.java: 352)
at java.util.concurrent.FutureTask.setException(FutureTask.java: 219)
at java.util.concurrent.FutureTask.run(FutureTask.java: 239)
at android.os.AsyncTask$SerialExecutor$1.run(AsyncTask.java: 230)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java: 1080)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java: 573)
at java.lang.Thread.run(Thread.java: 856)
Caused by: java.lang.NullPointerException
at com.mhd_deeb.lprsvu.lpr_sit.LicensePlateProcessorAsync.doInBackground(LicensePlateProcessorAsync.java: 73)
at com.mhd_deeb.lprsvu.lpr_sit.LicensePlateProcessorAsync.doInBackground(LicensePlateProcessorAsync.java: 29)

getting a Bitmap from a Uri

protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_view_image);
imageView = (ImageView) findViewById(R.id.imageView);
Intent intent = getIntent();
Uri imageUri = intent.getData();
// Picasso.with(this).load(imageUri).into(imageView);
if(imageUri == null){
Log.d(TAG, "Check");
}
//BitmapFactory.Options options = new BitmapFactory.Options();
// options.inSampleSize = 8;
// bm = MediaStore.Images.Media.getBitmap(getContentResolver(),imageUri);
Bitmap bm = null;
try{
InputStream image;
try{
image = getContentResolver().openInputStream(imageUri);
bm = BitmapFactory.decodeStream(image);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
catch (Exception e){
e.printStackTrace();
}
//BitmapFactory.Options options = new BitmapFactory.Options();
// bm = BitmapFactory.decodeFile(file.getAbsolutePath(),options);
if ((bm == null)) {
prints("It doesn't work");
}
//Log.d(TAG,"Going to convert image.");
imageView.setImageBitmap(bm);
I've looked at a bunch of StackOverFlow questions, I've tried them all as you can see in the commented code. I'm trying to get a Bitmap that is unchanged when the photo is taken. The pixel position of the Bitmap is crucial for my goal. The image is high quality and maybe a larger file. My Bitmap is always null, I have made sure my Uri isn't. Any ideas?
I think the code below will help you
private void setImage(String path, CircleImageView img) {
options = new DisplayImageOptions.Builder()
.cacheInMemory(true)
.cacheOnDisk(true)
.considerExifParams(true)
.bitmapConfig(Bitmap.Config.RGB_565)
.showImageOnLoading(R.drawable.user)
.showImageForEmptyUri(R.drawable.user)
.showImageOnFail(R.drawable.user)
.build();
ImageLoader.getInstance().displayImage(path, img, options, new SimpleImageLoadingListener() {
#Override
public void onLoadingStarted(String imageUri, View view) {
super.onLoadingStarted(imageUri, view);
}
#Override
public void onLoadingFailed(String imageUri, View view, FailReason failReason) {
super.onLoadingFailed(imageUri, view, failReason);
}
#Override
public void onLoadingCancelled(String imageUri, View view) {
super.onLoadingCancelled(imageUri, view);
}
#Override
public void onLoadingComplete(String imageUri, View view, Bitmap loadedImage) {
super.onLoadingComplete(imageUri, view, loadedImage);
}
}, new ImageLoadingProgressListener() {
#Override
public void onProgressUpdate(String s, View view, int i, int i1) {
}
});
}
use universal image loader for SimpleImageLoadingListener()
The file has a too big resolution to make a bitmap out of it. There is not enough memory to construct a bitmap and hence a null is returned.
Try with a very small image file to see.
You can direct get Bitmap from URI if it is from your mobile storage from below code.
public Bitmap getBitmapFromURI(Uri uri){
String[] filePathColumn = {MediaStore.Images.Media.DATA};
Cursor cursor = getActivity().getContentResolver().query(uri, filePathColumn, null, null, null);
if(cursor!=null){
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
File f_image = new File(cursor.getString(columnIndex));
cursor.close();
BitmapFactory.Options o2 = new BitmapFactory.Options();
return BitmapFactory.decodeFile(f_image.getAbsolutePath(), o2);
}
return null;
}
In case of file is too large to load.. You need to compress image and show. For that you need to use Async Task as well.
I have made one Image compressor task you can use it for large image
public class ConvertBase64Task extends AsyncTask<Void, Bitmap, Bitmap> {
private ByteArrayOutputStream baos = new ByteArrayOutputStream();
private File file;
private int CompressionRatio = 80; //You can change it by what ever ratio you want. in 0 to 100.
private boolean shouldrotate = true;
private ImageCompressiorListner imageCompressiorListner;
public ConvertBase64Task(File file) {
this.file = file;
}
public void setRotation(boolean isRotate) {
shouldrotate = isRotate;
}
public void setCompressionRatio(int Ratio) {
CompressionRatio = Ratio;
}
public void setImageCompressiorListner(ImageCompressiorListner imageCompressiorListner) {
this.imageCompressiorListner = imageCompressiorListner;
}
#Override
protected void onPreExecute(){
}
#Override
protected Bitmap doInBackground(Void... params) {
try {
//***** Fatching file
//*****Code for Orientation
Matrix matrix = new Matrix();
if (shouldrotate) {
ExifInterface exif1 = new ExifInterface(file.getAbsolutePath());
int orientation = exif1.getAttributeInt(
ExifInterface.TAG_ORIENTATION, 1);
Log.d("EXIF", "Exif: " + orientation);
if (orientation == 6) {
matrix.postRotate(90);
} else if (orientation == 3) {
matrix.postRotate(180);
} else if (orientation == 8) {
matrix.postRotate(270);
} else {
matrix.postRotate(0);
}
} else {
matrix.postRotate(0);
}
try {
BitmapFactory.Options option = new BitmapFactory.Options();
option.inJustDecodeBounds = true;
BitmapFactory.decodeFile(file.getAbsolutePath(), option);
int file_size = Integer.parseInt(String.valueOf(file.length() / 1024));
Log.e("ImageSize", "" + file_size);
int scale = 1;
if (file_size < 512) {
Log.e("image size is good", "image size is less");
} else if (file_size < 1024) {
Log.e("image size is 1 mb", "image size is heavy");
scale = 2;
} else if (file_size < 1536) {
Log.e("image size is 1.5 mb", "image size is heavy");
scale = 2;
} else if (file_size < 2048) {
Log.e("image size is 2 mb", "image size is heavy");
scale = 4;
} else {
Log.e("image size > 2 mb", "image size is heavy");
scale = 4;
}
Log.e("Scale", "Finaly Scaling with " + scale);
BitmapFactory.Options o2 = new BitmapFactory.Options();
o2.inSampleSize = scale;
Bitmap pickimg = BitmapFactory.decodeFile(file.getAbsolutePath(), o2);
if (pickimg.getWidth() > 1280 || pickimg.getHeight() > 1000) {
int width = pickimg.getWidth();
int height = pickimg.getHeight();
while (width > 1280 || height > 700) {
width = (width * 90) / 100;
height = (height * 90) / 100;
}
pickimg = Bitmap.createScaledBitmap(pickimg, width, height, true);
} else {
pickimg = Bitmap.createBitmap(pickimg, 0, 0, pickimg.getWidth(), pickimg.getHeight(), matrix, true); // rotating bitmap
}
pickimg.compress(Bitmap.CompressFormat.JPEG, CompressionRatio, baos);
return pickimg;
} catch (OutOfMemoryError e) {
e.printStackTrace();
return null;
} catch (Exception e) {
e.printStackTrace();
return null;
}
} catch (Throwable e) {
e.printStackTrace();
return null;
}
}
#Override
protected void onPostExecute(Bitmap result) {
super.onPostExecute(result);
if (result != null) {
if(imageCompressiorListner!=null){
imageCompressiorListner.onImageCompressed(result);
}
} else {
if(imageCompressiorListner!=null){
imageCompressiorListner.onError();
}
}
}
public interface ImageCompressiorListner {
void onImageCompressed(Bitmap bitmap);
void onError();
}
}
You can use this as below
ConvertBase64Task task = new ConvertBase64Task(File Object of Image);
task.setImageCompressiorListner(new ConvertBase64Task.ImageCompressiorListner() {
#Override
public void onImageCompressed(Bitmap bitmap) {
}
#Override
public void onError() {
}
});
task.execute();
Hope this will help you out.

UI not showing up after switching to AsyncTask

I have recently switched over to AsyncTask after reading how efficent it is. It took some time porting my code, but I did it in the end. I have 1 AsyncTask that gets run first, setting up the background image/color that it gets off a json object. After that, it sets up TableRows for all the other fields in the JSON file. The background changes, but the UI does not show up. I placed System.out.println's inside my AsyncTask and I see that the code has been executed (the onPostExecute method), but none of the UI shows up. I have no idea what the problem is, hence me posting this question.
Here is my AsyncTask:
class GetImageAsync extends AsyncTask<String, Void, Drawable> {
private final WeakReference<View> ViewReference;
private String data;
private Context context;
private boolean isImageView;
private boolean scale;
private int id = -1;
private int color = -1;
private int height = -1;
public GetImageAsync(TableLayout tr, String data, Context context) {
isImageView = false;
this.context = context;
this.data = data;
ViewReference = new WeakReference<View>(tr);
System.out.println("inside async");
}
public GetImageAsync(ImageView iv, int id, Context context) {
isImageView = true;
this.context = context;
this.id = id;
ViewReference = new WeakReference<View>(iv);
}
public GetImageAsync(ImageView imageView,String data, Context context, int height) {
System.out.println("profile async");
this.height = height;
isImageView = true;
this.context = context;
this.data = data;
ViewReference = new WeakReference<View>(imageView);
}
// Decode image in background.
#Override
protected Drawable doInBackground(String... params) {
System.out.println(id + " : " + isImageView);
if(isImageView && id != -1) {
return getImageFromResource();
} else {
if(data.startsWith("#")) {
color = Color.parseColor(data);
return null;
}
try {
return getImageFromWeb(data, scale);
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
private Drawable getImageFromResource() {
return context.getResources().getDrawable(id);
}
private Drawable getImageFromWeb(String data, boolean b) throws MalformedURLException, IOException {
Bitmap img = BitmapFactory.decodeStream(new URL(data).openConnection().getInputStream());
if(height != -1) {
return new BitmapDrawable(context.getResources(), getCroppedBitmap(Bitmap.createScaledBitmap(
Bitmap.createScaledBitmap(img,
img.getWidth(),
img.getWidth(), true),
height * 2,
height * 2, true)));
}
return new BitmapDrawable(context.getResources(), img);
}
// Once complete, see if ImageView is still around and set bitmap.
#Override
protected void onPostExecute(Drawable bitmap) {
if(isImageView) { //this block does not {
System.out.println("post "+isImageView);
System.out.println("iv");
if (ViewReference != null && bitmap != null) {
final ImageView imageView = (ImageView) ViewReference.get();
if (imageView != null) {
imageView.setImageDrawable(bitmap);
}
} // }
} else { //this block runs {
System.out.println("post "+isImageView);
if (ViewReference != null) {
final TableLayout tr = (TableLayout) ViewReference.get();
if (tr != null) {
if(color != -1) {
tr.setBackgroundColor(color);
} else {
tr.setBackground(bitmap);
}
}
}
} // }
}
public static Bitmap getCroppedBitmap(Bitmap bitmap) {
int w = bitmap.getWidth();
int h = bitmap.getHeight();
int radius = Math.min(h / 2, w / 2);
Bitmap output = Bitmap.createBitmap(w + 8, h + 8, Config.ARGB_8888);
Paint p = new Paint();
p.setAntiAlias(true);
Canvas c = new Canvas(output);
c.drawARGB(0, 0, 0, 0);
p.setStyle(Style.FILL);
c.drawCircle((w / 2) + 4, (h / 2) + 4, radius, p);
p.setXfermode(new PorterDuffXfermode(Mode.SRC_IN));
c.drawBitmap(bitmap, 4, 4, p);
p.setXfermode(null);
p.setStyle(Style.STROKE);
p.setColor(Color.BLACK);
p.setStrokeWidth(1);
c.drawCircle((w / 2) + 4, (h / 2) + 4, radius, p);
return output;
}
}
and here is how I create the UI:
private void createUI(JSONObject jObject) throws JSONException {
int absIndex = 0;
if (android.os.Build.VERSION.SDK_INT >= 16) {
new GetImageAsync(tr, jObject.getString(("bg")), getApplicationContext()).execute("");
System.out.println("after async");
/*if (bgcolor != -1) {
System.out.println("color: " + bgcolor);
tr.setBackgroundColor(bgcolor);
} else if (bgpic != null) {
tr.setBackground(bgpic);
}*/
}
for (int i = 0; i < keys.length; i++) {
values.add(i, new ArrayList<String>());
values.get(i).add(0, keys[i]);
values.get(i).add(1, jObject.getString(keys[i]));
}
String lastString = WhoIsLast(jObject);
trcard.setBackground(getResources()
.getDrawable(R.drawable.bg_card/* abc_menu_dropdown_panel_holo_light */));
trcard.addView(tlcard);
tr.setPadding(0, 0, 0, 0);
TableLayout.LayoutParams tableRowParams = new TableLayout.LayoutParams(
TableLayout.LayoutParams.MATCH_PARENT,
TableLayout.LayoutParams.WRAP_CONTENT);
int[] attrs = { android.R.attr.dividerVertical };
TypedArray typedArray = getApplicationContext().obtainStyledAttributes(
attrs);
Drawable divider = typedArray.getDrawable(0);
typedArray.recycle();
tableRowParams.setMargins(20, 20, 20, 0);
trcard.setLayoutParams(tableRowParams);
tlcard.setDividerDrawable(divider);
tlcard.setDividerPadding(4);
tableScrollView.addView(trcard);
for (int i = 0; i < keys.length; i++) {
String value = values.get(i).get(1);
if (value != "") {
String key = values.get(i).get(0);
boolean last = false;
if (i == keys.length || value.equals(lastString) || i == 0) {
last = true;
System.out.println(value +" = "+lastString);
}
insertElement(value, key, absIndex++, last, drawables[i]);
if (!last) {
absIndex++;
}
}
}
}
private String WhoIsLast(JSONObject j) throws JSONException {
if (j.getString("facebook").equals("")) {
return "";
}
if (j.getString("twitter").equals("")) {
return "facebook";
}
if (j.getString("email").equals("")) {
return "twitter";
}
if (j.getString("phone").equals("")) {
return "email";
}
return "";
}
int absIndex = 0;
private void insertElement(String data, String key, int i,
boolean b, Integer id) throws JSONException {
LayoutInflater inflater = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View newRow = inflater.inflate(R.layout.row, null, false);
newRow.setLayoutParams(new TableRow.LayoutParams(
TableRow.LayoutParams.MATCH_PARENT,
TableRow.LayoutParams.WRAP_CONTENT));
TextView dataTextView = (TextView) newRow
.findViewById(R.id.rowTextView);
dataTextView.setText("\t " + data);
ImageView iv = (ImageView) newRow.findViewById(R.id.rowImageView);
newRow.setId(absIndex++);
newRow.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View view) {
//omitted
});
View v = new View(this);
v.setLayoutParams(new TableRow.LayoutParams(
TableRow.LayoutParams.MATCH_PARENT, 1));
v.setBackgroundColor(Color.argb(25, 111, 111, 111));
dataTextView.measure(MeasureSpec.UNSPECIFIED, MeasureSpec.UNSPECIFIED);
if (i == 0) {
System.out.println("before async iv");
new GetImageAsync(iv, jObject.getString("profilepic"), getApplicationContext(), dataTextView.getMeasuredHeight()).execute("");
System.out.println("after async iv");
//async.execute("");
dataTextView.setShadowLayer(3, 0, 0, Color.BLACK);
dataTextView.setTextColor(getResources().getColor(R.color.white));
} else {
new GetImageAsync(iv, id, getApplicationContext()).execute("");
}
if (i == 0) {
dataTextView.setTextSize(30);
tableScrollView.addView(newRow, i);
System.out.println("adding i=0null");
} else {
System.out.println("adding i = "+i +tlcard);
tlcard.addView(newRow, i - 1);
if (!b) {
tlcard.addView(v, i);
}
}
}
Edit: where I call createUi method:
final Thread thread = new Thread(new Runnable() {
#Override
public void run() {
jObject = getJson("http://www.tabcards.com/req/androidapi/L2o30H8JlFMtFYHW3KLxkts20ztc5Be6Z6m6v315/json/"
+ value);
System.out.println(value);
if (jObject != null) {
runOnUiThread(new Runnable() {
#Override
public void run() {
try {
createUI(jObject);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
});
thread.start();

Android: Camera surfaceview is blurry

I am building my own camera app using the android camera api. I have a working app but the preview is not as sharp as the default camera app. Why is this the case? Here is my code below.
public class showCamera extends SurfaceView implements SurfaceHolder.Callback {
private static final int PICTURE_SIZE_MAX_WIDTH =640;
private static final int PREVIEW_SIZE_MAX_WIDTH = 640;
//private Camera theCamera;
private SurfaceHolder holdMe;
private Camera theCamera;
int h;
int w;
public showCamera (Context context,Camera camera,int w,int h)
{
super(context);
theCamera = camera;
holdMe = getHolder();
holdMe.addCallback(this);
this.h=h;
this.w=w;
}
public showCamera(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
// TODO Auto-generated constructor stub
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
try {
theCamera.setPreviewDisplay(holder);
//setDisplayOrientation(theCamera,90);
if( (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT ))
{ theCamera.setDisplayOrientation(90);
}
Camera.Parameters parameters = theCamera.getParameters();
Log.d(" " , " THIS IS THE FLASH MODE = " + parameters.getFlashMode()) ;
List<String> g= parameters.getSupportedFocusModes();
for(int j=0;j<g.size();j++)
{
Log.d(" " , " THIS IS focus modes =" + g.get(j)) ;
}
Size bestPreviewSize = determineBestPreviewSize(parameters);
Size bestPictureSize = determineBestPictureSize(parameters);
parameters.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
parameters.setPictureSize(bestPictureSize.width, bestPictureSize.height);
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
theCamera.setParameters(parameters);
theCamera.startPreview();
} catch (IOException e) {
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
theCamera.stopPreview();
theCamera.release();
// TODO Auto-generated method stub
}
protected void setDisplayOrientation(Camera camera, int angle){
Method downPolymorphic;
try
{
downPolymorphic = camera.getClass().getMethod("setDisplayOrientation", new Class[] { int.class });
if (downPolymorphic != null)
downPolymorphic.invoke(camera, new Object[] { angle });
}
catch (Exception e1)
{
}
}
private Size determineBestPreviewSize(Camera.Parameters parameters) {
List<Size> sizes = parameters.getSupportedPreviewSizes();
return determineBestSize(sizes, PREVIEW_SIZE_MAX_WIDTH);
}
private Size determineBestPictureSize(Camera.Parameters parameters) {
List<Size> sizes = parameters.getSupportedPictureSizes();
return determineBestSize(sizes, PICTURE_SIZE_MAX_WIDTH);
}
protected Size determineBestSize(List<Size> sizes, int widthThreshold) {
Size bestSize = null;
for (Size currentSize : sizes) {
boolean isDesiredRatio = (currentSize.width / 4) == (currentSize.height / 3);
boolean isBetterSize = (bestSize == null || currentSize.width > bestSize.width);
boolean isInBounds = currentSize.width <= PICTURE_SIZE_MAX_WIDTH;
if (isDesiredRatio && isInBounds && isBetterSize) {
bestSize = currentSize;
}
}
if (bestSize == null) {
return sizes.get(0);
}
return bestSize;
}
AutoFocusCallback autoFocusCallback = new AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success, Camera camera) {
Log.i("tag","this ran sdfgfhgjkldxbvnm,jhgfdkmn" );
}
};
}
MainActivity
public class MainActivity extends Activity implements OnClickListener {
private Camera cameraObject;
private showCamera showCamera;
int h;
int w = 1080;
LinearLayout Top, Buttom;
Button b;
public static Camera isCameraAvailiable() {
Camera object = null;
try {
object = Camera.open();
object.getParameters();
} catch (Exception e) {
}
return object;
}
AutoFocusCallback autoFocusCallback = new AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success, Camera camera) {
Log.i("tag", "this ran sdfgfhgjkldxbvnm,jhgfdkmn");
}
};
private PictureCallback capturedIt = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
if (bitmap == null) {
Toast.makeText(getApplicationContext(), "not taken",
Toast.LENGTH_SHORT).show();
} else {
File pictureFile = MediaOutput();
if (pictureFile == null) {
Log.d("",
"Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
MediaStore.Images.Media.insertImage(getContentResolver(),
bitmap, "testing ", "");
Toast.makeText(getApplicationContext(), "taken",
Toast.LENGTH_SHORT).show();
fos.close();
} catch (FileNotFoundException e) {
Log.d("", "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d("", "Error accessing file: " + e.getMessage());
}
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camlay);
Top = (LinearLayout) findViewById(R.id.top_bar);
Buttom = (LinearLayout) findViewById(R.id.but_bar);
b = (Button) findViewById(R.id.but_pic);
b.setOnClickListener(this);
Display display = getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
int width = size.x;
int height = size.y;
h = (int) Math.round(0.8 * height);
Log.d(" ", " height " + h);
Log.d(" ", " width " + width);
Top.setLayoutParams(new LinearLayout.LayoutParams(width, (int) Math
.round(0.10 * height)));
Buttom.setLayoutParams(new LinearLayout.LayoutParams(width, (int) Math
.round(0.10 * height)));
cameraObject = isCameraAvailiable();
showCamera = new showCamera(this, cameraObject, width, h);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(showCamera, new FrameLayout.LayoutParams(width, h));
// preview.addView(showCamera);
}
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
switch (v.getId()) {
case R.id.but_pic:
// cameraObject.takePicture(null, null,capturedIt);
// parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
Camera.Parameters parameters = cameraObject.getParameters();
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
cameraObject.setParameters(parameters);
cameraObject.autoFocus(autoFocusCallback);
// cameraObject.stopPreview();
break;
}
}
private static File MediaOutput() {
File mediaStorageDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
"MyCameraApp");
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss")
.format(new Date());
File mediaFile;
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "IMG_" + timeStamp + ".jpg");
return mediaFile;
}
}
If you can point me in the right direction that would be great.
Did you ever tried to increase these values in your showCamera class:
private static final int PICTURE_SIZE_MAX_WIDTH = 640;
private static final int PREVIEW_SIZE_MAX_WIDTH = 640;

Categories