Currently I'm trying to work with the android camera and I got pretty far with my test project. It worked perfectly fine when tested on my HTC Desire S with Gingerbread Android. However after I updated to ICS pictures taken with the test app only show strange vertical lines (it's the exact same code).
Here is what images are created now all of a sudden:
http://imageshack.us/photo/my-images/191/rebuilder1.jpg/
Here is my code (whole class):
package inter.rebuilder;
import inter.rebuilder.R;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.List;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.PixelFormat;
import android.graphics.Bitmap.CompressFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.ErrorCallback;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.View.OnClickListener;
import android.view.View.OnTouchListener;
public class CameraView extends Activity implements SurfaceHolder.Callback,
OnClickListener {
static final int FOTO_MODE = 0;
private static final String TAG = "CameraTest";
Camera mCamera;
boolean mPreviewRunning = false;
private Context mContext = this;
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//boolean hasCam = checkCameraHardware(mContext);
Log.e(TAG, "onCreate");
Bundle extras = getIntent().getExtras();
getWindow().setFormat(PixelFormat.TRANSLUCENT);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.main);
mSurfaceView = (SurfaceView) findViewById(R.id.surface_camera);
mSurfaceView.setOnClickListener(this);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
}
Camera.PreviewCallback mPreviewCallback = new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
Camera.Parameters parameters = camera.getParameters();
int width = parameters.getPreviewSize().width;
int height = parameters.getPreviewSize().height;
ByteArrayOutputStream outstr = new ByteArrayOutputStream();
Rect rect = new Rect(0, 0, width, height);
YuvImage yuvimage=new YuvImage(data,ImageFormat.NV21,width,height,null);
yuvimage.compressToJpeg(rect, 100, outstr);
Bitmap bmp = BitmapFactory.decodeByteArray(outstr.toByteArray(), 0, outstr.size());
}
};
Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] imageData, Camera c) {
if (imageData != null) {
Intent mIntent = new Intent();
storeByteImage(mContext, imageData, 100);
try {
mCamera.unlock();
mCamera.reconnect();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
setResult(FOTO_MODE, mIntent);
startCameraPreview();
//mCamera.startPreview();
//finish();
//Intent intent = new Intent(CameraView.this, AndroidBoxExample.class);
//CameraView.this.startActivity(intent);
}
}
};
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
protected void onResume() {
Log.e(TAG, "onResume");
super.onResume();
}
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
}
protected void onPause() {
Log.e(TAG, "onPause");
super.onPause();
}
protected void onStop() {
Log.e(TAG, "onStop");
super.onStop();
}
public void surfaceCreated(SurfaceHolder holder) {
Log.e(TAG, "surfaceCreated");
mCamera = Camera.open();
//mCamera.unlock();
//mCamera.setDisplayOrientation(180);
}
private void startCameraPreview() {
Camera.Parameters p = mCamera.getParameters();
p.setPictureFormat(PixelFormat.JPEG);
//p.setPreviewSize(w, h);
List<Size> list = p.getSupportedPreviewSizes();
Camera.Size size = list.get(0);
p.setPreviewSize(size.width, size.height);
mCamera.setParameters(p);
mCamera.startPreview();
mPreviewRunning = true;
}
private void startCameraPreview(SurfaceHolder holder) {
Camera.Parameters p = mCamera.getParameters();
//p.setPreviewSize(w, h);
List<Size> list = p.getSupportedPreviewSizes();
Camera.Size size = list.get(0);
p.setPreviewSize(size.width, size.height);
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mCamera.startPreview();
mPreviewRunning = true;
//setCameraDisplayOrientation(this, 0, mCamera);
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
Log.e(TAG, "surfaceChanged");
// XXX stopPreview() will crash if preview is not running
if (mPreviewRunning) {
mCamera.stopPreview();
mPreviewRunning = false;
}
startCameraPreview(holder);
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.e(TAG, "surfaceDestroyed");
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
// TODO Auto-generated method stub
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
super.onConfigurationChanged(newConfig);
}
#Override
public void onContentChanged() {
// TODO Auto-generated method stub
super.onContentChanged();
}
#Override
public void onContextMenuClosed(Menu menu) {
// TODO Auto-generated method stub
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
super.onContextMenuClosed(menu);
}
private SurfaceView mSurfaceView;
private SurfaceHolder mSurfaceHolder;
public void onClick(View arg0) {
mCamera.takePicture(null, mPictureCallback, mPictureCallback);
}
private static File createDir() throws IOException {
String nameDir = "rebuilder";
File extStorageDir = Environment.getExternalStorageDirectory();
File sdImageMainDirectory = extStorageDir; //new File("/sdcard");
File dirFile = new File(sdImageMainDirectory.getPath()+"/"+nameDir);
boolean fileExisted = dirFile.exists();
if(!fileExisted) {
dirFile.mkdirs();
}
return dirFile;
}
private static File createFile(String name, File dirFile) throws IOException {
int counter = 1;
String fileName = name + counter+".jpg";
File imageFile = new File(dirFile.getPath()+"/"+fileName);
while(imageFile.exists()) {
counter = counter + 1;
fileName = name + counter+".jpg";
imageFile = new File(dirFile.getPath()+"/"+fileName);
}
imageFile.createNewFile();
return imageFile;
}
static Bitmap image1 = null;
static Bitmap image2 = null;
public static void blendTest(Bitmap myImage) throws IOException {
if(image1 == null && image2 == null) {
image1 = myImage;
return;
}
if(image1 != null && image2 != null) {
image2 = null;
image1 = myImage;
return;
}
if(image1 != null && image2 == null) {
image2 = myImage;
}
int width = Math.min(image1.getWidth(), image2.getWidth());
int height = Math.min(image1.getHeight(), image2.getHeight());
int[][] pixels1 = new int[width][height];
int[][] pixels2 = new int[width][height];
for(int i = 0; i < width; i++) {
for(int j = 0; j < height; j++) {
pixels1[i][j] = image1.getPixel(i, j);
}
}
for(int i = 0; i < width; i++) {
for(int j = 0; j < height; j++) {
pixels2[i][j] = image2.getPixel(i, j);
}
}
Bitmap image3 = Bitmap.createBitmap(width, height, image1.getConfig());
for(int i = 0; i < width; i++) {
for(int j = 0; j < height; j++) {
int color1 = pixels1[i][j];
int color2 = pixels2[i][j];
int red1 = Color.red(color1);
int red2 = Color.red(color2);
int green1 = Color.green(color1);
int green2 = Color.green(color2);
int blue1 = Color.blue(color1);
int blue2 = Color.blue(color2);
int newColor = Color.rgb((red1 + red2)/2, (green1 + green2)/2, (blue1 + blue2)/2);
image3.setPixel(i, j, newColor);
}
}
File dirFile = createDir();
File newBlend = createFile("blend", dirFile);
FileOutputStream fileOutputStream = new FileOutputStream(newBlend);
BufferedOutputStream bos = new BufferedOutputStream(
fileOutputStream);
image3.compress(CompressFormat.JPEG, 100, bos);
bos.flush();
bos.close();
}
// public static void setCameraDisplayOrientation(Activity activity,
// int cameraId, android.hardware.Camera camera) {
// android.hardware.Camera.CameraInfo info =
// new android.hardware.Camera.CameraInfo();
// android.hardware.Camera.getCameraInfo(cameraId, info);
// int rotation = activity.getWindowManager().getDefaultDisplay()
// .getRotation();
// int degrees = 0;
// switch (rotation) {
// case Surface.ROTATION_0: degrees = 0; break;
// case Surface.ROTATION_90: degrees = 90; break;
// case Surface.ROTATION_180: degrees = 180; break;
// case Surface.ROTATION_270: degrees = 270; break;
// }
//
// int result;
// if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
// result = (info.orientation + degrees) % 360;
// result = (360 - result) % 360; // compensate the mirror
// } else { // back-facing
// result = (info.orientation - degrees + 360) % 360;
// }
// camera.setDisplayOrientation(result);
// }
public static boolean storeByteImage(Context mContext, byte[] imageData, int quality) {
FileOutputStream fileOutputStream = null;
try {
File dirFile = createDir();
File imageFile = createFile("rebuilder", dirFile);
BitmapFactory.Options options=new BitmapFactory.Options();
options.inSampleSize = 5; //5
options.inDither = false; // Disable Dithering mode
options.inPurgeable = true; // Tell to gc that whether it needs free
// memory, the Bitmap can be cleared
options.inInputShareable = true; // Which kind of reference will be
// used to recover the Bitmap
// data after being clear, when
// it will be used in the future
options.inTempStorage = new byte[32 * 1024];
options.inPreferredConfig = Bitmap.Config.RGB_565;
Bitmap myImage = BitmapFactory.decodeByteArray(imageData, 0,
imageData.length,options);
int orientation;
// others devices
if(myImage.getHeight() < myImage.getWidth()){
orientation = 90;
} else {
orientation = 0;
}
Bitmap bMapRotate;
if (orientation != 0) {
Matrix matrix = new Matrix();
matrix.postRotate(orientation);
bMapRotate = Bitmap.createBitmap(myImage, 0, 0, myImage.getWidth(),
myImage.getHeight(), matrix, true);
} else
bMapRotate = Bitmap.createScaledBitmap(myImage, myImage.getWidth(),
myImage.getHeight(), true);
//blendTest(myImage);
fileOutputStream = new FileOutputStream(imageFile);
BufferedOutputStream bos = new BufferedOutputStream(
fileOutputStream);
bMapRotate.compress(CompressFormat.JPEG, quality, bos);
if (bMapRotate != null) {
bMapRotate.recycle();
bMapRotate = null;
}
bos.flush();
bos.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println("SD Card not ready");
}
return true;
}
}
Camera Hardware permissions and orientation landscape are set in the android manifest.
Please help me here.
Did you installed a custom ROM on your device? if yes then this might be a issue.
Un-Comment the method and all occurences of it (as of OP's code)
// public static void setCameraDisplayOrientation(Activity activity,
// int cameraId, android.hardware.Camera camera) {
// android.hardware.Camera.CameraInfo info =
// new android.hardware.Camera.CameraInfo();
// android.hardware.Camera.getCameraInfo(cameraId, info);
// int rotation = activity.getWindowManager().getDefaultDisplay()
// .getRotation();
// int degrees = 0;
// switch (rotation) {
// case Surface.ROTATION_0: degrees = 0; break;
// case Surface.ROTATION_90: degrees = 90; break;
// case Surface.ROTATION_180: degrees = 180; break;
// case Surface.ROTATION_270: degrees = 270; break;
// }
//
// int result;
// if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
// result = (info.orientation + degrees) % 360;
// result = (360 - result) % 360; // compensate the mirror
// } else { // back-facing
// result = (info.orientation - degrees + 360) % 360;
// }
// camera.setDisplayOrientation(result);
// }
Related
I have applied the following post answers but unable to solve the problem.
OpenCV camera orientation issue
I am getting the following exception.
Exception locking surface
java.lang.IllegalArgumentException
at android.view.Surface.nativeLockCanvas(Native Method)
at android.view.Surface.lockCanvas(Surface.java:264)
at android.view.SurfaceView$4.internalLockCanvas(SurfaceView.java:825)
at android.view.SurfaceView$4.lockCanvas(SurfaceView.java:793)
at org.opencv.android.CameraBridgeViewBase.deliverAndDrawFrame(CameraBridgeViewBase.java:403)
at org.opencv.android.JavaCameraView$CameraWorker.run(JavaCameraView.java:365)
at java.lang.Thread.run(Thread.java:818)
Here is opencv library function which throw the exception:
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
Canvas canvas = null ;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
canvas = getHolder().lockCanvas();
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
getHolder().unlockCanvasAndPost(canvas);
}
}
}
Here is my main activity where i use opencv library to call this method:
package org.opencv.samples.facedetect;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Camera;
import android.media.AudioManager;
import android.net.ConnectivityManager;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.provider.Settings;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import org.opencv.samples.facedetect.DetectionBasedTracker;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class FdActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 , SharedPreferences.OnSharedPreferenceChangeListener {
private static final String TAG = "OCVSample::Activity";
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
//private MenuItem mItemExit;
private MenuItem mItemSettings ;
private MenuItem showCounterValues ;
private TextView faceCounterTv ;
private Button resetButton ;
private Button savebtn ;
private Button quitButton ;
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mJavaDetector;
private DetectionBasedTracker mNativeDetector;
private int mDetectorType = JAVA_DETECTOR;
// private String[] mDetectorName;
private float mRelativeFaceSize = 0.2f;
private int mAbsoluteFaceSize = 0;
private float scaleFactor ;
private int minNeighbour ;
private int delayTime ;
private boolean isFaces_detect ;
private boolean isFaces_detect_pre ;
private boolean count_Face_Logic ;
private float countFace = 0.0f ;
private long startTime ;
private AudioManager mAudioManager ;
private static final int MY_PERMISSIONS_REQUEST_ACCOUNTS = 1;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("detectionBasedTracker");
try {
// load cascade file from application resources
InputStream is = getResources().openRawResource(R.raw.haarcascade_frontalface_default);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "haarcascade_frontalface_default.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mJavaDetector.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
mOpenCvCameraView.enableView();
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
public FdActivity() {
isFaces_detect = false ;
isFaces_detect_pre = false ;
count_Face_Logic = true ;
startTime = System.currentTimeMillis();
mAbsoluteFaceSize = 200 ;
scaleFactor = 1.2f ;
minNeighbour = 1 ;
delayTime = 1 ;
Log.i(TAG, "Instantiated new " + this.getClass());
}
/**
* Called when the activity is first created.
*/
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
//getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.face_detect_surface_view);
setSupportActionBar((Toolbar) findViewById(R.id.toolbar));
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view);
faceCounterTv = (TextView) findViewById(R.id.faceCountertv);
resetButton = (Button) findViewById(R.id.resetbtn);
savebtn = (Button) findViewById(R.id.savebtn);
quitButton = (Button) findViewById(R.id.quitbtn);
mAudioManager = (AudioManager) getSystemService(AUDIO_SERVICE);
resetButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
countFace = 0 ;
Toast.makeText(getApplicationContext() , "Reset the Face Counter" , Toast.LENGTH_LONG).show();
}
});
savebtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
saveFaceCounter();
Toast.makeText(getApplicationContext() , "Counter Value Saved" , Toast.LENGTH_SHORT).show();
}
});
quitButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
System.exit(0);
}
});
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
// mOpenCvCameraView.setAlpha(0);
mOpenCvCameraView.setCameraIndex(1);
mOpenCvCameraView.setCvCameraViewListener(this);
//if (checkAndRequestPermissions()){
// Toast.makeText(getApplicationContext() , "OnCreate" , Toast.LENGTH_LONG).show();
//setSharedPreferences();
//}
// check current state first
// boolean state = isAirplaneMode();
// // toggle the state
// if (state)
// toggleAirplaneMode(0, state);
// else
// toggleAirplaneMode(1, state);
}
#Override
public void onPause() {
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume() {
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
PreferenceManager.getDefaultSharedPreferences(this).unregisterOnSharedPreferenceChangeListener(this);
}
public void onCameraViewStarted(int width, int height) {
mGray = new Mat();
mRgba = new Mat();
}
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
// Mat mRgbaT = mRgba.t();
// Core.flip(mRgba.t(), mRgbaT, 1);
// Imgproc.resize(mRgbaT, mRgbaT, mRgba.size());
mGray = inputFrame.gray();
//Core.transpose(mGray, mGray);
//Core.flip(mGray, mGray, 0);
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, scaleFactor, minNeighbour, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
} else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
} else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++) {
Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
}
countDetectedFace(faces);
runOnUiThread(new Runnable() {
#Override
public void run() {
faceCounterTv.setText(String.valueOf(countFace));
}
});
return mRgba;
}
public void countDetectedFace(MatOfRect faces){
// do{
// This block is to make sure the it only count face when it appears. e.g. : no detected face --> face --> no detected face (count as 1)
if (faces.empty()){
isFaces_detect = isFaces_detect_pre = false ;
}
else{
isFaces_detect = true ;
}
// Only count when previous frame = 0 and current frame = 1. Eliminate counting when successive frame have face detected
if ((isFaces_detect_pre == false) && (isFaces_detect == true) && (count_Face_Logic == true)){
countFace += 0.25 ; // four times it detect face equal to 1
startTime = System.currentTimeMillis(); // store new time value so that it do not count every miliseconds
isFaces_detect_pre = true ;
Log.d(TAG , String.valueOf(countFace));
}
if ((System.currentTimeMillis() - startTime) < delayTime){ // to make sure it doesnt count every frame, buffer of 1 seconds
count_Face_Logic = false ;
}
else{
count_Face_Logic = true ;
}
// }while(!isAppExit);
}
}
How can I get rid of this exception?
I have a custom camera application , everything works fine .But whenever the app gets paused (when the onPause or onDestroyed is called) camera is released and afterwards when onResume is called and capture button is clicked is to take an image ,my Application crashes.How do i fix this ? Please help me , Thanks in advance
CameraActivity Code
package com.example.skmishra.plates.Activities;
import android.app.ActionBar;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.hardware.Camera;
import android.hardware.SensorManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.MotionEvent;
import android.view.OrientationEventListener;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ZoomControls;
import com.example.skmishra.plates.Asyncs.CameraAsync;
import com.example.skmishra.plates.CameraHandler;
import com.example.skmishra.plates.Library.Fonts;
import com.example.skmishra.plates.R;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* Created by skmishra on 12/28/2015.
*/
public class camera extends Activity {
private static final int RESULT_LOAD_IMAGE = 200 ;
private Camera mCamera=null;
private CameraHandler surface_view;
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
public static final String TAG = "Aloo";
int toRotate = 90;
public int currentCameraID = 0;
OrientationEventListener myOrientationEventListener;
private ZoomControls zoomControls;
private double mDist;
Boolean imageSwitchClicked = false;
Boolean mShowFlash = false;
ImageView mSwitch_cam;
ImageView mFlashBut;
FrameLayout preview;
CameraAsync mCamAsync;
ImageView imageGallery;
TextView raleway;
TextView headerCameraText;
Fonts mFonts;
int permCode=4;
Camera.Parameters params;
String recievedType=null;
#Override
protected void onCreate(Bundle savedInstanceState) {
Log.e("I Called Thus "," cda");
mCamAsync=new CameraAsync(this);
mCamAsync.execute();
super.onCreate(savedInstanceState);
setContentView(R.layout.camera);
imageGallery = (ImageView) findViewById(R.id.select_gallery);
mFonts = new Fonts();
preview = (FrameLayout) findViewById(R.id.camera_preview);
mFlashBut = (ImageView) findViewById(R.id.flash);
mSwitch_cam = (ImageView) findViewById(R.id.white_switch);
raleway = (TextView) findViewById(R.id.textView2);
headerCameraText = (TextView) findViewById(R.id.imageHead);
// mFonts.setRalewayBold(this, headerCameraText);
Intent gets = getIntent();
recievedType = gets.getExtras().getString("recievedCameraPurpose");
handleHeaderText(recievedType);
mFonts.setRalewayBold(this, raleway);
myOrientationEventListener
= new OrientationEventListener(this, SensorManager.SENSOR_DELAY_NORMAL) {
#Override
public void onOrientationChanged(int arg0) {
int rotation = arg0;
if (rotation > 340) {
if (currentCameraID == 0) {
toRotate = 90;
} else {
toRotate =270;
Log.e("POSITION_TITLT", "-> Potrait Front camera");
}
} else if (rotation < 80 && rotation > 30) {
toRotate = 180;
Log.e("POSITION_TILT", "-> Landscape Right " + rotation);
} else if (rotation < 280 && rotation > 240) {
toRotate = 0;
Log.e("POSITION_TILT", "-> Landscape Left " + rotation);
}
}
};
if (myOrientationEventListener.canDetectOrientation()) {
myOrientationEventListener.enable();
} else {
Toast.makeText(this, "Can't DetectOrientation", Toast.LENGTH_LONG).show();
finish();
}
}
private boolean checkifCamera(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
return true;
} else {
return false;
}
}
public Camera getCameraInstance() {
Camera c = null;
try {
releaseCameraAndPreview();
c = Camera.open();
} catch (Exception e) {
Toast.makeText(this, "Print error" + e.getMessage(), Toast.LENGTH_LONG).show();
}
return c;
}
public void onCompleteInstanceCameraAysnc(Camera camera)
{
mCamera = camera;
surface_view = new CameraHandler(this, mCamera);
params = mCamera.getParameters();
preview.addView(surface_view);
set_image_gallery();
}
public void switchC(View view) {
if (!imageSwitchClicked) {
mSwitch_cam.setAlpha(1.0f);
imageSwitchClicked = true;
} else {
mSwitch_cam.setAlpha(0.5f);
imageSwitchClicked = false;
}
setCameraID();
mCamera = surface_view.switchCamera();
params=mCamera.getParameters();
}
public void flash_onOf(View view) {
if (!mShowFlash) {
params.setFlashMode(Camera.Parameters.FLASH_MODE_ON);
mFlashBut.setAlpha(1.0f);
mShowFlash = true;
} else {
params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
mFlashBut.setAlpha(0.5f);
mShowFlash = false;
}
}
private void releaseCameraAndPreview() {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
mCamera.release();
mCamera=null;
}
else
{
Log.e("Cert","Lerts");
}
}
#Override
protected void onResume() {
super.onResume();
}
#Override
protected void onDestroy() {
Log.e("LLL", "Dessssdccc");
super.onDestroy();
try {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
myOrientationEventListener.disable();
mCamera.release();
mCamera=null;
permCode=15;
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
protected void onPause() {
Log.e("LLL", "Dessssdccc");
super.onPause();
try {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
myOrientationEventListener.disable();
mCamera.release();
mCamera=null;
permCode=15;
} catch (Exception e) {
e.printStackTrace();
}
}
public void takePH(View view) {
if(mShowFlash && !imageSwitchClicked)
{
params.setFlashMode(Camera.Parameters.FLASH_MODE_ON);
}
params.set("rotation", toRotate);
mCamera.setParameters(params);
mCamera.takePicture(null, null, mPicture);
}
Camera.PictureCallback mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null) {
Log.d(TAG, "Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
Intent i=new Intent(getApplicationContext(),ShowOut.class);
i.putExtra("purpose",recievedType);
i.putExtra("img-url",pictureFile.toString());
startActivity(i);
}
};
/**
* Create a file Uri for saving an image or video
*/
private static Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
/**
* Create a File for saving an image or video
*/
private static File getOutputMediaFile(int type) {
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "Plates");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_" + timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
public void handleHeaderText(String type) {
Log.e("Type",type);
headerCameraText.setText("");
if (type.equals("ADD_COVER_PLATES")) {
headerCameraText.setText("Take a cover image for your plate");
}
else if(type.equals("ADD_PROFILE_USER"))
{
imageGallery.setVisibility(View.GONE);
}
else if(type.equals("PLATE_UPLOAD_SINGLETON")) {
headerCameraText.setText("Click an image for a plate");
}
}
public void setCameraID() {
if (currentCameraID == Camera.CameraInfo.CAMERA_FACING_BACK) {
currentCameraID = Camera.CameraInfo.CAMERA_FACING_FRONT;
toRotate = 270;
} else {
currentCameraID = Camera.CameraInfo.CAMERA_FACING_BACK;
toRotate = 90;
}
}
#Override
public boolean onTouchEvent(MotionEvent event) {
// Get the pointer ID
Camera.Parameters params = mCamera.getParameters();
int action = event.getAction();
if (event.getPointerCount() > 1) {
// handle multi-touch events
if (action == MotionEvent.ACTION_POINTER_DOWN) {
mDist = getFingerSpacing(event);
} else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) {
mCamera.cancelAutoFocus();
handleZoom(event, params);
}
} else {
// handle single touch events
if (action == MotionEvent.ACTION_UP) {
handleFocus(event, params);
}
}
return true;
}
private void handleZoom(MotionEvent event, Camera.Parameters params) {
int maxZoom = params.getMaxZoom();
int zoom = params.getZoom();
double newDist = getFingerSpacing(event);
if (newDist > mDist) {
//zoom in
if (zoom < maxZoom)
zoom++;
} else if (newDist < mDist) {
//zoom out
if (zoom > 0)
zoom--;
}
mDist = newDist;
params.setZoom(zoom);
mCamera.setParameters(params);
}
public void handleFocus(MotionEvent event, Camera.Parameters params) {
int pointerId = event.getPointerId(0);
int pointerIndex = event.findPointerIndex(pointerId);
// Get the pointer's current position
float x = event.getX(pointerIndex);
float y = event.getY(pointerIndex);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
mCamera.autoFocus(new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean b, Camera camera) {
// currently set to auto-focus on single touch
}
});
}
}
/**
* Determine the space between the first two fingers
*/
private double getFingerSpacing(MotionEvent event) {
// ...
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
double pres;
pres = Math.sqrt(x * x + y * y);
return pres;
}
public void set_image_gallery() {
// Find the last picture
String[] projection = new String[]{
MediaStore.Images.ImageColumns._ID,
MediaStore.Images.ImageColumns.DATA,
MediaStore.Images.ImageColumns.BUCKET_DISPLAY_NAME,
MediaStore.Images.ImageColumns.DATE_TAKEN,
MediaStore.Images.ImageColumns.MIME_TYPE
};
final Cursor cursor = getContentResolver()
.query(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, projection, null,
null,MediaStore.Images.ImageColumns._ID + " DESC");
// Put it in the image view
if (cursor.moveToFirst()) {
String imageLocation = cursor.getString(1);
File imageFile = new File(imageLocation);
if (imageFile.exists()) { // TODO: is there a better way to do this?
Bitmap bm=decodeFile(imageFile);
imageGallery.setImageBitmap(bm);
}
}
cursor.close();
}
public Bitmap decodeFile(File f) {
try {
//Decode image size
BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
BitmapFactory.decodeStream(new FileInputStream(f), null, o);
//The new size we want to scale to
final int REQUIRED_SIZE = 490;
//Find the correct scale value. It should be the power of 2.
int scale = 1;
while (o.outWidth / scale / 2 >= REQUIRED_SIZE && o.outHeight / scale / 2 >= REQUIRED_SIZE)
scale *= 2;
//Decode with inSampleSize
BitmapFactory.Options o2 = new BitmapFactory.Options();
o2.inSampleSize = scale;
return BitmapFactory.decodeStream(new FileInputStream(f), null, o2);
} catch (FileNotFoundException e) {
}
return null;
}
public void imagePick(View view)
{
Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
photoPickerIntent.setType("image/*");
startActivityForResult(photoPickerIntent, RESULT_LOAD_IMAGE);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == RESULT_LOAD_IMAGE && resultCode == RESULT_OK && null != data) {
Uri selectedImage = data.getData();
String[] filePathColumn = {MediaStore.Images.Media.DATA};
Cursor cursor = getContentResolver().query(selectedImage,
filePathColumn, null, null, null);
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
String picturePath = cursor.getString(columnIndex);
cursor.close();
Intent transfer=null;
if(recievedType.equals("ADD_COVER_PLATES")) {
transfer = new Intent(this, create_plates.class);
}
else if(recievedType.equals("PLATE_UPLOAD_SINGLETON"))
{
transfer=new Intent(this,plate_select_upload.class);
}
transfer.putExtra("imagUrl",picturePath);
startActivity(transfer);
}
}
}
Camera Handler Code
package com.example.skmishra.plates;
import android.content.Context;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.IOException;
/**
* Created by skmishra on 12/28/2015.
*/
public class CameraHandler extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera=null;
public int currentCameraID=0;
public CameraHandler(Context context,Camera camera) {
super(context);
mCamera=camera;
mHolder=getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_GPU);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
if(mCamera==null)
{
mCamera=Camera.open();
}
mCamera.setPreviewDisplay(holder);
Camera.Parameters p = mCamera.getParameters();
}
catch (IOException e)
{
Log.d("--DS", "Error setting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
fixOr();
if(mHolder.getSurface()==null)
{
return;
}
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e){
Log.d("--DS", "Error starting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
mCamera.release();
mCamera = null;
}
public void fixOr()
{
mCamera.stopPreview();
mCamera.setDisplayOrientation(90);
mCamera.startPreview();
}
public Camera switchCamera() {
mCamera.stopPreview();
mCamera.release();
if(currentCameraID==Camera.CameraInfo.CAMERA_FACING_BACK)
{
currentCameraID = Camera.CameraInfo.CAMERA_FACING_FRONT;
}
else
{
currentCameraID=Camera.CameraInfo.CAMERA_FACING_BACK;
}
mCamera=Camera.open(currentCameraID);
fixOr();
try {
mCamera.setPreviewDisplay(mHolder);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
return mCamera;
}
}
UPDATE **
StackTrace
Process: com.example.skmishra.plates, PID: 10575
java.lang.RuntimeException: Fail to connect to camera service
at android.hardware.Camera.<init>(Camera.java:545)
at android.hardware.Camera.open(Camera.java:403)
at com.example.skmishra.plates.CameraHandler.surfaceCreated(CameraHandler.java:35)
at android.view.SurfaceView.updateWindow(SurfaceView.java:599)
at android.view.SurfaceView.onWindowVisibilityChanged(SurfaceView.java:243)
at android.view.View.dispatchWindowVisibilityChanged(View.java:9034)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:1319)
at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1062)
at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:5873)
at android.view.Choreographer$CallbackRecord.run(Choreographer.java:767)
at android.view.Choreographer.doCallbacks(Choreographer.java:580)
at android.view.Choreographer.doFrame(Choreographer.java:550)
at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:753)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:135)
at android.app.ActivityThread.main(ActivityThread.java:5753)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1405)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1200)
After a lot of research , i finally found out what the problem was . Problem was with the FrameLayout , i had to remove it on Pause and recreate it on OnResume
#Override
protected void onResume() {
super.onResume();
mCamAsync = new CameraAsync(this);//Async task to get the camera instance
mCamAsync.execute();
}
#Override
protected void onPause() {
super.onPause();
releaseCameraAndPreview();
preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.removeViewAt(0);
}
** EDIT **
i also removed the execution of cameraAsync from onCreate , that means i instantiate the camera only in OnResume
I am using this library javaCV to record video on android. They have provided a sample VideoRecording Activity But There is some bug which I could not figure out what I am doing it wrong or what is missing which causing this bug.
package org.bytedeco.javacv.recordactivity;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import java.io.IOException;
import java.nio.ShortBuffer;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import static org.bytedeco.javacpp.opencv_core.*;
public class RecordActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link = "/mnt/sdcard/stream.flv";
long startTime = 0;
boolean recording = false;
private volatile FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private IplImage yuvIplimage = null;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
/** The number of seconds in the continuous record loop (or 0 to disable loop). */
final int RECORD_LENGTH = 10;
IplImage[] images;
long[] timestamps;
ShortBuffer[] samples;
int imagesIndex, samplesIndex;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.main);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
}
#Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if (cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.main, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if (1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG,"init recorder");
if (RECORD_LENGTH > 0) {
imagesIndex = 0;
images = new IplImage[RECORD_LENGTH * frameRate];
timestamps = new long[images.length];
for (int i = 0; i < images.length; i++) {
images[i] = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
timestamps[i] = -1;
}
} else if (yuvIplimage == null) {
yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
Log.i(LOG_TAG, "create yuvIplimage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("flv");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
if (RECORD_LENGTH > 0) {
Log.v(LOG_TAG,"Writing frames");
try {
int firstIndex = imagesIndex % samples.length;
int lastIndex = (imagesIndex - 1) % images.length;
if (imagesIndex <= images.length) {
firstIndex = 0;
lastIndex = imagesIndex - 1;
}
if ((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
startTime = 0;
}
if (lastIndex < firstIndex) {
lastIndex += images.length;
}
for (int i = firstIndex; i <= lastIndex; i++) {
long t = timestamps[i % timestamps.length] - startTime;
if (t >= 0) {
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(images[i % images.length]);
}
}
firstIndex = samplesIndex % samples.length;
lastIndex = (samplesIndex - 1) % samples.length;
if (samplesIndex <= samples.length) {
firstIndex = 0;
lastIndex = samplesIndex - 1;
}
if (lastIndex < firstIndex) {
lastIndex += samples.length;
}
for (int i = firstIndex; i <= lastIndex; i++) {
recorder.record(samples[i % samples.length]);
}
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
recording = false;
Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
if (RECORD_LENGTH > 0) {
samplesIndex = 0;
samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];
for (int i = 0; i < samples.length; i++) {
samples[i] = ShortBuffer.allocate(bufferSize);
}
} else {
audioData = ShortBuffer.allocate(bufferSize);
}
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
if (RECORD_LENGTH > 0) {
audioData = samples[samplesIndex++ % samples.length];
audioData.position(0).limit(0);
}
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if (bufferReadResult > 0) {
Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
if (RECORD_LENGTH <= 0) try {
recorder.record(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG,"audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera","camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
Camera.Parameters camParams = mCamera.getParameters();
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate());
camParams.setPreviewFrameRate(frameRate);
mCamera.setParameters(camParams);
startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
if (RECORD_LENGTH > 0) {
int i = imagesIndex++ % images.length;
yuvIplimage = images[i];
timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
}
/* get video data */
if (yuvIplimage != null && recording) {
yuvIplimage.getByteBuffer().put(data);
if (RECORD_LENGTH <= 0) try {
Log.v(LOG_TAG,"Writing Frame");
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
#Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
}
I am getting this error
The type org.bytedeco.javacpp.avutil$AVFrame cannot be resolved. It is indirectly referenced from required .class files
at following line in the above code
if (RECORD_LENGTH <= 0) try {
recorder.record(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
}
at recorder.record(audioData). I don't know what I am doing wrong here. New to JavaCV. Any help will be appreciated.
The error message means that a jar containing the class org.bytedeco.javacpp.avutil$AVFrame cannot be found in the java class path. I would suggest to check the class path, the version of javacv used, maybe there were bugs that are now solved, you never know. Actually there are some similar discussions on javacv's github account, which may or may not be connected to your problem: javacv github, javacv github.
What makes me wonder is why you are putting your class in "org.bytedeco.javacv.recordactivity" package? "org.bytedeco.javacv" is javacv's own package, you just don't put your own class into the package structure that do not belong to you. It can only make you troubles in the future, for instance if javacv at some time decides to create package and class with the same name.
I am building my own camera app using the android camera api. I have a working app but the preview is not as sharp as the default camera app. Why is this the case? Here is my code below.
public class showCamera extends SurfaceView implements SurfaceHolder.Callback {
private static final int PICTURE_SIZE_MAX_WIDTH =640;
private static final int PREVIEW_SIZE_MAX_WIDTH = 640;
//private Camera theCamera;
private SurfaceHolder holdMe;
private Camera theCamera;
int h;
int w;
public showCamera (Context context,Camera camera,int w,int h)
{
super(context);
theCamera = camera;
holdMe = getHolder();
holdMe.addCallback(this);
this.h=h;
this.w=w;
}
public showCamera(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
// TODO Auto-generated constructor stub
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
try {
theCamera.setPreviewDisplay(holder);
//setDisplayOrientation(theCamera,90);
if( (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT ))
{ theCamera.setDisplayOrientation(90);
}
Camera.Parameters parameters = theCamera.getParameters();
Log.d(" " , " THIS IS THE FLASH MODE = " + parameters.getFlashMode()) ;
List<String> g= parameters.getSupportedFocusModes();
for(int j=0;j<g.size();j++)
{
Log.d(" " , " THIS IS focus modes =" + g.get(j)) ;
}
Size bestPreviewSize = determineBestPreviewSize(parameters);
Size bestPictureSize = determineBestPictureSize(parameters);
parameters.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
parameters.setPictureSize(bestPictureSize.width, bestPictureSize.height);
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
theCamera.setParameters(parameters);
theCamera.startPreview();
} catch (IOException e) {
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
theCamera.stopPreview();
theCamera.release();
// TODO Auto-generated method stub
}
protected void setDisplayOrientation(Camera camera, int angle){
Method downPolymorphic;
try
{
downPolymorphic = camera.getClass().getMethod("setDisplayOrientation", new Class[] { int.class });
if (downPolymorphic != null)
downPolymorphic.invoke(camera, new Object[] { angle });
}
catch (Exception e1)
{
}
}
private Size determineBestPreviewSize(Camera.Parameters parameters) {
List<Size> sizes = parameters.getSupportedPreviewSizes();
return determineBestSize(sizes, PREVIEW_SIZE_MAX_WIDTH);
}
private Size determineBestPictureSize(Camera.Parameters parameters) {
List<Size> sizes = parameters.getSupportedPictureSizes();
return determineBestSize(sizes, PICTURE_SIZE_MAX_WIDTH);
}
protected Size determineBestSize(List<Size> sizes, int widthThreshold) {
Size bestSize = null;
for (Size currentSize : sizes) {
boolean isDesiredRatio = (currentSize.width / 4) == (currentSize.height / 3);
boolean isBetterSize = (bestSize == null || currentSize.width > bestSize.width);
boolean isInBounds = currentSize.width <= PICTURE_SIZE_MAX_WIDTH;
if (isDesiredRatio && isInBounds && isBetterSize) {
bestSize = currentSize;
}
}
if (bestSize == null) {
return sizes.get(0);
}
return bestSize;
}
AutoFocusCallback autoFocusCallback = new AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success, Camera camera) {
Log.i("tag","this ran sdfgfhgjkldxbvnm,jhgfdkmn" );
}
};
}
MainActivity
public class MainActivity extends Activity implements OnClickListener {
private Camera cameraObject;
private showCamera showCamera;
int h;
int w = 1080;
LinearLayout Top, Buttom;
Button b;
public static Camera isCameraAvailiable() {
Camera object = null;
try {
object = Camera.open();
object.getParameters();
} catch (Exception e) {
}
return object;
}
AutoFocusCallback autoFocusCallback = new AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success, Camera camera) {
Log.i("tag", "this ran sdfgfhgjkldxbvnm,jhgfdkmn");
}
};
private PictureCallback capturedIt = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
if (bitmap == null) {
Toast.makeText(getApplicationContext(), "not taken",
Toast.LENGTH_SHORT).show();
} else {
File pictureFile = MediaOutput();
if (pictureFile == null) {
Log.d("",
"Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
MediaStore.Images.Media.insertImage(getContentResolver(),
bitmap, "testing ", "");
Toast.makeText(getApplicationContext(), "taken",
Toast.LENGTH_SHORT).show();
fos.close();
} catch (FileNotFoundException e) {
Log.d("", "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d("", "Error accessing file: " + e.getMessage());
}
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camlay);
Top = (LinearLayout) findViewById(R.id.top_bar);
Buttom = (LinearLayout) findViewById(R.id.but_bar);
b = (Button) findViewById(R.id.but_pic);
b.setOnClickListener(this);
Display display = getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
int width = size.x;
int height = size.y;
h = (int) Math.round(0.8 * height);
Log.d(" ", " height " + h);
Log.d(" ", " width " + width);
Top.setLayoutParams(new LinearLayout.LayoutParams(width, (int) Math
.round(0.10 * height)));
Buttom.setLayoutParams(new LinearLayout.LayoutParams(width, (int) Math
.round(0.10 * height)));
cameraObject = isCameraAvailiable();
showCamera = new showCamera(this, cameraObject, width, h);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(showCamera, new FrameLayout.LayoutParams(width, h));
// preview.addView(showCamera);
}
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
switch (v.getId()) {
case R.id.but_pic:
// cameraObject.takePicture(null, null,capturedIt);
// parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
Camera.Parameters parameters = cameraObject.getParameters();
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
cameraObject.setParameters(parameters);
cameraObject.autoFocus(autoFocusCallback);
// cameraObject.stopPreview();
break;
}
}
private static File MediaOutput() {
File mediaStorageDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
"MyCameraApp");
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss")
.format(new Date());
File mediaFile;
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "IMG_" + timeStamp + ".jpg");
return mediaFile;
}
}
If you can point me in the right direction that would be great.
Did you ever tried to increase these values in your showCamera class:
private static final int PICTURE_SIZE_MAX_WIDTH = 640;
private static final int PREVIEW_SIZE_MAX_WIDTH = 640;
I think I'm doing something wrong with my paths but I can't figure out what. I am testing on a Nexus 7 if that's relevant. Following examples I tried to copy the tessdata folder over to the SD card however it keeps telling me unable to copy file not found. I don't quite understand why. (I have the tesseract project as a library and I have the tessdata folder copied into assets).
All help is appreciated thanks!
Code:
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import android.app.Activity;
import android.content.Intent;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;
import com.googlecode.tesseract.android.TessBaseAPI;
public class MainActivity extends Activity {
private static ImageView imageView;
// protected static Bitmap bit;
protected static Bitmap mImageBitmap;
// protected static String DATA_PATH;
public static final String STORAGE_PATH = Environment.getExternalStorageDirectory().toString() + "/rjb";
protected static String tesspath = STORAGE_PATH + "/tessdata";
protected static String savepath = null;
protected static String TAG = "OCR";
protected static String lang = "eng";
// main method
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// This is my image view for to show the image with
imageView = (ImageView) this.findViewById(R.id.imageView1);
// this is the take a photo button
Button photoButton = (Button) this.findViewById(R.id.button1);
//check if the rjb directory is there
//make it if its not
createmydir();
//if (!(new File(STORAGE_PATH + File.separator + "tessdata" + File.separator + lang + ".traineddata")).exists()) {
try {
AssetManager assetManager = this.getAssets();
//open the asset manager and open the traineddata path
InputStream in = assetManager.open("tessdata/eng.traineddata");
OutputStream out = new FileOutputStream(tesspath + "/eng.traineddata");
byte[] buf = new byte[8024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
in.close();
out.close();
} catch (IOException e) {
android.util.Log.e(TAG, "Was unable to copy " + lang
+ " traineddata " + e.toString());
android.util.Log.e(TAG, "IM PRINTING THE STACK TRACE");
e.printStackTrace();
}
//} else {
processImage(STORAGE_PATH + File.separator + "savedAndroid.jpg");
//}
photoButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// CALL THE PICTURE (this works)
dispatchTakePictureIntent(0);
}
});
}
private void createmydir() {
File t = new File(STORAGE_PATH);
if(t.exists()) {
Toast.makeText(getApplicationContext(), "IM TOASTIN CAUSE IT EXISTS", Toast.LENGTH_LONG).show();
}
else {
t.mkdirs();
Toast.makeText(getApplicationContext(), "IM TOASTIN CUZ I MADE IT EXIST", Toast.LENGTH_LONG).show();
}
}
private void handleSmallCameraPhoto(Intent intent) {
Bundle extras = intent.getExtras();
mImageBitmap = (Bitmap) extras.get("data");
imageView.setImageBitmap(mImageBitmap);
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4;
saveImageAndroid(mImageBitmap);
Bitmap bitmap = BitmapFactory.decodeFile(savepath, options);
ExifInterface exif;
try {
exif = new ExifInterface(savepath);
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,ExifInterface.ORIENTATION_NORMAL);
int rotate = 0;
switch (exifOrientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotate = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotate = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotate = 270;
break;
}
if (rotate != 0) {
int w = bitmap.getWidth();
int h = bitmap.getHeight();
// Setting pre rotate
Matrix mtx = new Matrix();
mtx.preRotate(rotate);
// Rotating Bitmap & convert to ARGB_8888, required by tess
bitmap = Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, false);
bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
}
// DATA_PATH = getDataPath();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
handleSmallCameraPhoto(data);
}
// write bitmap to storage
// saves it as savedandroid.jpg
// saves location in savepath
private void saveImageAndroid(final Bitmap passedBitmap) {
try {
savepath = STORAGE_PATH + File.separator + "savedAndroid.jpg";
FileOutputStream mFileOutStream = new FileOutputStream(savepath);
passedBitmap.compress(Bitmap.CompressFormat.JPEG, 100,mFileOutStream);
mFileOutStream.flush();
mFileOutStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
private void dispatchTakePictureIntent(int actionCode) {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(takePictureIntent, actionCode);
}
private void processImage(final String filePath) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 2;
options.inPurgeable = true;
Bitmap bitmap = BitmapFactory.decodeFile(filePath, options);
if (bitmap != null) {
/*
* was for rotating but no longer needed int width =
* bitmap.getWidth(); int height = bitmap.getHeight(); Matrix matrix
* = new Matrix(); matrix.postRotate(rotation); bitmap =
* Bitmap.createBitmap(bitmap, 0, 0, width, height, matrix, false);
* bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
*/
TessBaseAPI baseApi = new TessBaseAPI();
baseApi.setDebug(true);
baseApi.init(STORAGE_PATH, "eng");
baseApi.setPageSegMode(100);
baseApi.setPageSegMode(7);
baseApi.setImage(bitmap);
String recognizedText = baseApi.getUTF8Text();
android.util.Log.i(TAG, "recognizedText: 1 " + recognizedText);
baseApi.end();
if (lang.equalsIgnoreCase("eng")) {
recognizedText = recognizedText
.replaceAll("[^a-zA-Z0-9]+", " ");
}
android.util.Log.i(TAG,
"recognizedText: 2 " + recognizedText.trim());
}
}
}
First try to place the files on sd card and just try simple ocr on an simple image like ear, if that works then go for copying the language files through program, because of that you may come to know where the error is.