I am building my own camera app using the android camera api. I have a working app but the preview is not as sharp as the default camera app. Why is this the case? Here is my code below.
public class showCamera extends SurfaceView implements SurfaceHolder.Callback {
private static final int PICTURE_SIZE_MAX_WIDTH =640;
private static final int PREVIEW_SIZE_MAX_WIDTH = 640;
//private Camera theCamera;
private SurfaceHolder holdMe;
private Camera theCamera;
int h;
int w;
public showCamera (Context context,Camera camera,int w,int h)
{
super(context);
theCamera = camera;
holdMe = getHolder();
holdMe.addCallback(this);
this.h=h;
this.w=w;
}
public showCamera(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
// TODO Auto-generated constructor stub
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
try {
theCamera.setPreviewDisplay(holder);
//setDisplayOrientation(theCamera,90);
if( (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT ))
{ theCamera.setDisplayOrientation(90);
}
Camera.Parameters parameters = theCamera.getParameters();
Log.d(" " , " THIS IS THE FLASH MODE = " + parameters.getFlashMode()) ;
List<String> g= parameters.getSupportedFocusModes();
for(int j=0;j<g.size();j++)
{
Log.d(" " , " THIS IS focus modes =" + g.get(j)) ;
}
Size bestPreviewSize = determineBestPreviewSize(parameters);
Size bestPictureSize = determineBestPictureSize(parameters);
parameters.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
parameters.setPictureSize(bestPictureSize.width, bestPictureSize.height);
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
theCamera.setParameters(parameters);
theCamera.startPreview();
} catch (IOException e) {
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
theCamera.stopPreview();
theCamera.release();
// TODO Auto-generated method stub
}
protected void setDisplayOrientation(Camera camera, int angle){
Method downPolymorphic;
try
{
downPolymorphic = camera.getClass().getMethod("setDisplayOrientation", new Class[] { int.class });
if (downPolymorphic != null)
downPolymorphic.invoke(camera, new Object[] { angle });
}
catch (Exception e1)
{
}
}
private Size determineBestPreviewSize(Camera.Parameters parameters) {
List<Size> sizes = parameters.getSupportedPreviewSizes();
return determineBestSize(sizes, PREVIEW_SIZE_MAX_WIDTH);
}
private Size determineBestPictureSize(Camera.Parameters parameters) {
List<Size> sizes = parameters.getSupportedPictureSizes();
return determineBestSize(sizes, PICTURE_SIZE_MAX_WIDTH);
}
protected Size determineBestSize(List<Size> sizes, int widthThreshold) {
Size bestSize = null;
for (Size currentSize : sizes) {
boolean isDesiredRatio = (currentSize.width / 4) == (currentSize.height / 3);
boolean isBetterSize = (bestSize == null || currentSize.width > bestSize.width);
boolean isInBounds = currentSize.width <= PICTURE_SIZE_MAX_WIDTH;
if (isDesiredRatio && isInBounds && isBetterSize) {
bestSize = currentSize;
}
}
if (bestSize == null) {
return sizes.get(0);
}
return bestSize;
}
AutoFocusCallback autoFocusCallback = new AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success, Camera camera) {
Log.i("tag","this ran sdfgfhgjkldxbvnm,jhgfdkmn" );
}
};
}
MainActivity
public class MainActivity extends Activity implements OnClickListener {
private Camera cameraObject;
private showCamera showCamera;
int h;
int w = 1080;
LinearLayout Top, Buttom;
Button b;
public static Camera isCameraAvailiable() {
Camera object = null;
try {
object = Camera.open();
object.getParameters();
} catch (Exception e) {
}
return object;
}
AutoFocusCallback autoFocusCallback = new AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success, Camera camera) {
Log.i("tag", "this ran sdfgfhgjkldxbvnm,jhgfdkmn");
}
};
private PictureCallback capturedIt = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
if (bitmap == null) {
Toast.makeText(getApplicationContext(), "not taken",
Toast.LENGTH_SHORT).show();
} else {
File pictureFile = MediaOutput();
if (pictureFile == null) {
Log.d("",
"Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
MediaStore.Images.Media.insertImage(getContentResolver(),
bitmap, "testing ", "");
Toast.makeText(getApplicationContext(), "taken",
Toast.LENGTH_SHORT).show();
fos.close();
} catch (FileNotFoundException e) {
Log.d("", "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d("", "Error accessing file: " + e.getMessage());
}
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camlay);
Top = (LinearLayout) findViewById(R.id.top_bar);
Buttom = (LinearLayout) findViewById(R.id.but_bar);
b = (Button) findViewById(R.id.but_pic);
b.setOnClickListener(this);
Display display = getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
int width = size.x;
int height = size.y;
h = (int) Math.round(0.8 * height);
Log.d(" ", " height " + h);
Log.d(" ", " width " + width);
Top.setLayoutParams(new LinearLayout.LayoutParams(width, (int) Math
.round(0.10 * height)));
Buttom.setLayoutParams(new LinearLayout.LayoutParams(width, (int) Math
.round(0.10 * height)));
cameraObject = isCameraAvailiable();
showCamera = new showCamera(this, cameraObject, width, h);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(showCamera, new FrameLayout.LayoutParams(width, h));
// preview.addView(showCamera);
}
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
switch (v.getId()) {
case R.id.but_pic:
// cameraObject.takePicture(null, null,capturedIt);
// parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
Camera.Parameters parameters = cameraObject.getParameters();
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
cameraObject.setParameters(parameters);
cameraObject.autoFocus(autoFocusCallback);
// cameraObject.stopPreview();
break;
}
}
private static File MediaOutput() {
File mediaStorageDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
"MyCameraApp");
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss")
.format(new Date());
File mediaFile;
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "IMG_" + timeStamp + ".jpg");
return mediaFile;
}
}
If you can point me in the right direction that would be great.
Did you ever tried to increase these values in your showCamera class:
private static final int PICTURE_SIZE_MAX_WIDTH = 640;
private static final int PREVIEW_SIZE_MAX_WIDTH = 640;
Related
I am making my own camera in android, then whenever I capture images the images orientation is okay. but when i open it in my external sdcard file it is like that:
this is my code:
public class CameraApp extends Activity implements SurfaceHolder.Callback {
private static final String TAG = "IMG_";
private static final String IMAGE_FOLDER = "/PhoneController/";
private static final String EXTENTION = ".jpg";
private String pictureName = "";
public String picNameToshare = "";
static final int FOTO_MODE = 0;
String speed;
String imageFilePath;
private Handler mHandler = new Handler();
public static String imageFilePath1;
FileOutputStream fos = null;
public Bitmap framebmpScaled;
public Bitmap SelecedFrmae;
public static Bitmap mergeBitmap;
public static Bitmap bmpfULL;
Camera camera;
Button button;
SurfaceView surfaceView;
SurfaceHolder surfaceHolder;
boolean previewing = false;
LayoutInflater controlInflater = null;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.camera);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
getWindow().setFormat(PixelFormat.UNKNOWN);
surfaceView = (SurfaceView) findViewById(R.id.camerapreview);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
button = (Button) findViewById(R.id.button);
button.setOnClickListener(buttonListener);
}
private OnClickListener buttonListener = new OnClickListener() {
public void onClick(View v) {
Handler myHandler = new Handler();
if (previewing) {
camera.stopPreview();
previewing = false;
Log.e("", "one");
}
if (camera != null) {
try {
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
previewing = true;
myHandler.postDelayed(mMyRunnable, 2000); // called after 5
// seconds
button.setText("Waiting...");
Log.e("", "two");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/*
* Handler myHandler = new Handler();
* myHandler.postDelayed(mMyRunnable, 5000); // called after 5
* seconds button.setText("Waiting...");
*/
}
};
private Runnable mMyRunnable = new Runnable() {
public void run() {
Camera.Parameters params = camera.getParameters();
params.set("rotation", 90);
camera.setParameters(params);
camera.takePicture(myShutterCallback, myPictureCallback_RAW,
myPictureCallback_JPG);
storePicture(mergeBitmap);
button.setText("Capture");
}
};
PictureCallback myPictureCallback_JPG = new PictureCallback() {
public void onPictureTaken(byte[] arg0, Camera arg1) {
// TODO Auto-generated method stub
Display display = getWindowManager().getDefaultDisplay();
final int ScreenWidth = display.getWidth();
final int ScreenHeight = display.getHeight();
Log.e("" + display.getWidth(), "" + display.getWidth());
Bitmap bitmapPicture = BitmapFactory.decodeByteArray(arg0, 0,
arg0.length);
Bitmap bmpScaled1 = Bitmap.createScaledBitmap(bitmapPicture,
ScreenWidth, ScreenHeight, true);
mergeBitmap = bmpScaled1;
showPicture();
}
};
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
if (previewing) {
camera.stopPreview();
previewing = false;
}
if (camera != null) {
try {
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
previewing = true;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
camera = Camera.open();
camera.setDisplayOrientation(90);
}
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
camera.stopPreview();
camera.release();
camera = null;
previewing = false;
}
void storePicture(Bitmap bm) {
mHandler.post(new Runnable() {
public void run() {
// showPicture();
}
});
String timeStamp = new SimpleDateFormat("yyyMMdd_HHmmss")
.format(new Date());
this.pictureName = TAG + timeStamp;
picNameToshare = this.pictureName;
this.imageFilePath = IMAGE_FOLDER + this.pictureName + EXTENTION;
this.imageFilePath = sanitizePath(this.imageFilePath);
try {
checkSDCard(this.imageFilePath);
fos = new FileOutputStream(this.imageFilePath);
if (fos != null) {
bm.compress(Bitmap.CompressFormat.JPEG, 85, fos);
Toast.makeText(this, "Image Saved", Toast.LENGTH_SHORT).show();
fos.close();
}
} catch (IOException ioe) {
Log.e(TAG, "CapturePicture : " + ioe.toString());
} catch (Exception e) {
Log.e(TAG, "CapturePicture : " + e.toString());
}
sendBroadcast(new Intent(
Intent.ACTION_MEDIA_MOUNTED,
Uri.parse("file://" + Environment.getExternalStorageDirectory())));
imageFilePath1 = this.imageFilePath;
}
/**
* Check the SDCard is mounted on device
*
* #param path
* of image file
* #throws IOException
*/
void checkSDCard(String path) throws IOException {
String state = android.os.Environment.getExternalStorageState();
if (!state.equals(android.os.Environment.MEDIA_MOUNTED)) {
Toast.makeText(this,
"Please insert sdcard other wise image won't stored",
Toast.LENGTH_SHORT).show();
throw new IOException("SD Card is not mounted. It is " + state
+ ".");
}
// make sure the directory we plan to store the recording is exists
File directory = new File(path).getParentFile();
if (!directory.exists() && !directory.mkdirs()) {
throw new IOException("Path to file could not be created.");
}
}
private String sanitizePath(String path) {
if (!path.startsWith("/")) {
path = "/" + path;
}
return Environment.getExternalStorageDirectory().getAbsolutePath()
+ path;
}
please help me.
Try using this
Camera.Parameters parameters = camera.getParameters();
parameters.set("orientation", "portrait");
camera.setParameters(parameters);
Below is the code for a custom camera app I'm working on. Everthing works fine, except when the images are taken with flash on. With flash on, the preview showed to the user to accept the image looks right, but the image saved to the sdcard is very dark (with only white objects slightly visible) and often just black. I've been trying to figure out the problem for days now. Any ideas what might be going on?
public class CustomCamera extends Activity implements SurfaceHolder.Callback{
Camera camera;
Surfaceview surfaceView;
SurfaceHolder surfaceHolder;
boolean previewing = false;
LayoutInflater controlInflater = null;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.main);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
getWindow().setFormat(PixelFormat.UNKNOWN);
surfaceView = (SurfaceView)findViewById(R.id.camerapreview);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
...
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if(previewing){
camera.stopPreview();
previewing = false;
}
if (camera != null){
try {
Camera.Parameters parameters = camera.getParameters();
camera.setDisplayOrientation(90);
determineDisplayOrientation();
camera.setPreviewDisplay(surfaceHolder);
previewing = true;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB){
List<String> supportedFocusModes = parameters.getSupportedFocusModes();
if (supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)){
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
} else{
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
}
if (parameters.getSupportedWhiteBalance().contains(
Parameters.WHITE_BALANCE_AUTO)) {
parameters.setWhiteBalance(Parameters.WHITE_BALANCE_AUTO);
}
if (parameters.getSupportedSceneModes().contains(
Parameters.SCENE_MODE_AUTO)) {
parameters.setSceneMode(Parameters.SCENE_MODE_AUTO);
}
camera.setParameters(parameters);
camera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
flashButton.setBackgroundResource(R.drawable.auto_flash);
camera = Camera.open(CURRENT_CAMERA_ID);
Camera.Parameters camParameters = camera.getParameters();
flashSupported = getApplicationContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH);
if ( flashSupported){
camParameters.setFlashMode(Parameters.FLASH_MODE_AUTO);
lastFlashMode = AUTO_FLASH;
camera.setParameters(camParameters);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
camera.stopPreview();
camera.release();
camera = null;
previewing = false;
}
//click listener for button to take picture
public void takePicture(View view){
if (camera != null){
orientationListener.rememberOrientation();
camera.takePicture(myShutterCallback, myPictureCallback_RAW, myPictureCallback_JPG);
showPicPreviewScreen();
}
}
Camera.ShutterCallback myShutterCallback = new Camera.ShutterCallback() {
#Override
public void onShutter() {
}
};
Camera.PictureCallback myPictureCallback_RAW = new Camera.PictureCallback(){
#Override
public void onPictureTaken(byte[] arg0, Camera arg1) {
}
};
Camera.PictureCallback myPictureCallback_JPG = new Camera.PictureCallback(){
#Override
public void onPictureTaken(byte[] arg0, Camera arg1) {
bitmapPicture = BitmapFactory.decodeByteArray(arg0, 0, arg0.length);
int rotation = (displayOrientation + orientationListener.getRememberedOrientation() + layoutOrientation) % 360;
if (rotation != 0){
Log.e("rotaion", "not 0, rotating");
Bitmap oldBitmap = bitmapPicture;
Matrix matrix = new Matrix();
matrix.postRotate(rotation);
bitmapPicture = Bitmap.createBitmap(bitmapPicture, 0, 0, bitmapPicture.getWidth(), bitmapPicture.getHeight(), matrix, false);
oldBitmap.recycle();
}
camera.stopPreview();
previewing = false;
ImageSaver imageSaver = new ImageSaver();
imageSaver.saveImage(getApplicationContext(), bitmapPicture);
}
};
}
And here's the class that saves the image
public class ImageSaver{
private Context context;
private String NameOfFolder = "/ProjectFolder";
String fileName;
String file_path;
public void saveImage(Context context,Bitmap ImageToSave){
context = context;
file_path = Environment.getExternalStorageDirectory().getAbsolutePath()+ NameOfFolder;
String CurrentDateAndTime= getCurrentDateAndTime();
File dir = new File(file_path);
if(!dir.exists()){
dir.mkdirs();
}
fileName = CurrentDateAndTime+ ".jpg";
File file = new File(dir, fileName);
try {
FileOutputStream fOut = new FileOutputStream(file);
ImageToSave.compress(Bitmap.CompressFormat.JPEG, 100, fOut); //85
fOut.flush();
fOut.close();
}
catch (FileNotFoundException e) {UnableToSave();}
catch (IOException e){UnableToSave();}
}
private String getCurrentDateAndTime() {
Calendar c = Calendar.getInstance();
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
String formattedDate = df.format(c.getTime());
return formattedDate;
}
}
Thanks a lot!
Most OpenCV examples for Android are based on a CameraViewActivity that receives, processes and displays the frames. But similar to this approach I want to build a service that is started when the Android device boots. The service should access the camera and do some continously image processing.
Can anyone tell me how to initialize the camera by OpenCV for this scenario? Or any link for samples?
I found several posts that explained how to do it. Basically you have to create an empty and invisible SurfaceView on the Android Camera and buffers for the preview frames. Here's part of my code.
Here's an interface that I wrote, because we do switch between local hardware and remote network cameras.
public interface ICamera {
boolean supportsRegionOfInterest();
void connect();
void release();
boolean isConnected();
}
Here's the code for the local camera that works without a visible Activity. It receives the frames in a separate thread.
public class HardwareCamera implements CameraAccess.ICamera,
Camera.PreviewCallback {
// see http://developer.android.com/guide/topics/media/camera.html for more
// details
private static final boolean USE_THREAD = true;
private final static String TAG = "HardwareCamera";
private final Context context;
private final int cameraIndex; // example: CameraInfo.CAMERA_FACING_FRONT or
// -1 for
// IP_CAM
private final CameraAccess user;
private Camera mCamera;
private int mFrameWidth;
private int mFrameHeight;
private CameraAccessFrame mCameraFrame;
private CameraHandlerThread mThread = null;
private SurfaceTexture texture = new SurfaceTexture(0);
// needed to avoid OpenCV error:
// "queueBuffer: BufferQueue has been abandoned!"
private byte[] mBuffer;
public HardwareCamera(Context context, CameraAccess user, int cameraIndex) {
this.context = context;
this.cameraIndex = cameraIndex;
this.user = user;
}
// private boolean checkCameraHardware() {
// if (context.getPackageManager().hasSystemFeature(
// PackageManager.FEATURE_CAMERA)) {
// // this device has a camera
// return true;
// } else {
// // no camera on this device
// return false;
// }
// }
public static Camera getCameraInstance(int facing) {
Camera c = null;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
int cameraCount = Camera.getNumberOfCameras();
int index = -1;
for (int camIdx = 0; camIdx < cameraCount; camIdx++) {
Camera.getCameraInfo(camIdx, cameraInfo);
if (cameraInfo.facing == facing) {
try {
c = Camera.open(camIdx);
index = camIdx;
break;
} catch (RuntimeException e) {
Log.e(TAG,
String.format(
"Camera is not available (in use or does not exist). Facing: %s Index: %s Error: %s",
facing, camIdx, e.getMessage()));
continue;
}
}
}
if (c != null)
Log.d(TAG, String.format("Camera opened. Facing: %s Index: %s",
facing, index));
else
Log.e(TAG, "Could not find any camera matching facing: " + facing);
// returns null if camera is unavailable
return c;
}
private synchronized void connectLocalCamera() {
if (!user.isOpenCVLoaded())
return;
if (USE_THREAD) {
if (mThread == null) {
mThread = new CameraHandlerThread(this);
}
synchronized (mThread) {
mThread.openCamera();
}
} else {
oldConnectCamera();
}
user.onCameraInitialized(mFrameWidth, mFrameHeight);
}
private/* synchronized */void oldConnectCamera() {
// synchronized (this) {
if (true) {// checkCameraHardware()) {
mCamera = getCameraInstance(cameraIndex);
if (mCamera == null)
return;
Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
// Camera.Size previewSize = sizes.get(0);
Collections.sort(sizes, new PreviewSizeComparer());
Camera.Size previewSize = null;
for (Camera.Size s : sizes) {
if (s == null)
break;
previewSize = s;
}
// List<Integer> formats = params.getSupportedPictureFormats();
// params.setPreviewFormat(ImageFormat.NV21);
params.setPreviewSize(previewSize.width, previewSize.height);
mCamera.setParameters(params);
params = mCamera.getParameters();
mFrameWidth = params.getPreviewSize().width;
mFrameHeight = params.getPreviewSize().height;
int size = mFrameWidth * mFrameHeight;
size = size
* ImageFormat.getBitsPerPixel(params.getPreviewFormat())
/ 8;
this.mBuffer = new byte[size];
Log.d(TAG, "Created callback buffer of size (bytes): " + size);
Mat mFrame = new Mat(mFrameHeight + (mFrameHeight / 2),
mFrameWidth, CvType.CV_8UC1);
mCameraFrame = new CameraAccessFrame(mFrame, mFrameWidth,
mFrameHeight);
if (this.texture != null)
this.texture.release();
this.texture = new SurfaceTexture(0);
try {
mCamera.setPreviewTexture(texture);
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mCamera.startPreview();
Log.d(TAG,
String.format(
"Camera preview started with %sx%s. Rendering to SurfaceTexture dummy while receiving preview frames.",
mFrameWidth, mFrameHeight));
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
// }
}
#Override
public synchronized void onPreviewFrame(byte[] frame, Camera arg1) {
mCameraFrame.put(frame);
if (user.isAutomaticReceive() || user.waitForReceive(500))
user.onPreviewFrame(mCameraFrame);
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class CameraAccessFrame implements CameraFrame {
private Mat mYuvFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
private Bitmap mCachedBitmap;
private boolean mRgbaConverted;
private boolean mBitmapConverted;
#Override
public Mat gray() {
return mYuvFrameData.submat(0, mHeight, 0, mWidth);
}
#Override
public Mat rgba() {
if (!mRgbaConverted) {
Imgproc.cvtColor(mYuvFrameData, mRgba,
Imgproc.COLOR_YUV2BGR_NV12, 4);
mRgbaConverted = true;
}
return mRgba;
}
// #Override
// public Mat yuv() {
// return mYuvFrameData;
// }
#Override
public synchronized Bitmap toBitmap() {
if (mBitmapConverted)
return mCachedBitmap;
Mat rgba = this.rgba();
Utils.matToBitmap(rgba, mCachedBitmap);
mBitmapConverted = true;
return mCachedBitmap;
}
public CameraAccessFrame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mRgba = new Mat();
this.mCachedBitmap = Bitmap.createBitmap(width, height,
Bitmap.Config.ARGB_8888);
}
public synchronized void put(byte[] frame) {
mYuvFrameData.put(0, 0, frame);
invalidate();
}
public void release() {
mRgba.release();
mCachedBitmap.recycle();
}
public void invalidate() {
mRgbaConverted = false;
mBitmapConverted = false;
}
};
private class PreviewSizeComparer implements Comparator<Camera.Size> {
#Override
public int compare(Size arg0, Size arg1) {
if (arg0 != null && arg1 == null)
return -1;
if (arg0 == null && arg1 != null)
return 1;
if (arg0.width < arg1.width)
return -1;
else if (arg0.width > arg1.width)
return 1;
else
return 0;
}
}
private static class CameraHandlerThread extends HandlerThread {
Handler mHandler;
HardwareCamera owner;
CameraHandlerThread(HardwareCamera owner) {
super("CameraHandlerThread");
this.owner = owner;
start();
mHandler = new Handler(getLooper());
}
synchronized void notifyCameraOpened() {
notify();
}
void openCamera() {
mHandler.post(new Runnable() {
#Override
public void run() {
owner.oldConnectCamera();
notifyCameraOpened();
}
});
try {
wait();
} catch (InterruptedException e) {
Log.w(TAG, "wait was interrupted");
}
}
}
#Override
public boolean supportsRegionOfInterest() {
return false;
}
#Override
public void connect() {
connectLocalCamera();
}
#Override
public void release() {
synchronized (this) {
if (USE_THREAD) {
if (mThread != null) {
mThread.interrupt();
mThread = null;
}
}
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
try {
mCamera.setPreviewTexture(null);
} catch (IOException e) {
Log.e(TAG, "Could not release preview-texture from camera.");
}
mCamera.release();
Log.d(TAG, "Preview stopped and camera released");
}
mCamera = null;
if (mCameraFrame != null) {
mCameraFrame.release();
}
if (texture != null)
texture.release();
}
}
#Override
public boolean isConnected() {
return mCamera != null;
}
}
The camera frame is taken from the OpenCV samples. It's responsible for converting the raw byte-array into OpenCV mat structures. The implementation of that interface is inside the code above.
public interface CameraFrame extends CvCameraViewFrame {
Bitmap toBitmap();
#Override
Mat rgba();
#Override
Mat gray();
}
I've written a camera application, but when I want to show the camera's live preview(before taking the picture), the preview is rotated 90 degrees! here is my camera activity's code :
public class CameraActivity extends Activity{
public static final int MEDIA_TYPE_IMAGE = 1 ;
private Camera mCamera;
private CameraPreview mPreview;
Uri photoPath ;
protected void onStop()
{
super.onStop();
mCamera.release();
}
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
// Create an instance of Camera
mCamera = getCameraInstance();
// Create our Preview view and set it as the content of our activity.
mPreview = new CameraPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
ViewGroup.LayoutParams previewParam = preview.getLayoutParams() ;
Parameters cameraParam = mCamera.getParameters() ;
double ratio = (double)cameraParam.getPictureSize().height / (double)cameraParam.getPictureSize().width ;
// previewParam.height= cameraParam.getPictureSize().height / 5 ;
// previewParam.width = cameraParam.getPictureSize().width / 5 ;
Display display = getWindowManager().getDefaultDisplay();
Point size = new Point();
try
{
display.getSize(size);
}
catch(java.lang.NoSuchMethodError ignore)
{
size.x = display.getWidth();
size.y = display.getHeight() ;
}
int width = size.x;
int height = size.y;
previewParam.width = width;
previewParam.height = (int)(previewParam.width * ratio) ;
// preview.setLayoutParams(previewParam) ;
preview.addView(mPreview);
}
//Camera Classes here
private final static String TAG = "Navid";
public static Camera getCameraInstance()
{
Camera c = null ;
try
{
c = Camera.open() ;
}
catch(Exception e)
{
}
return c ;
}
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback
{
private SurfaceHolder mHolder ;
private Camera mCamera;
public CameraPreview(Context context , Camera camera)
{
super(context) ;
mCamera = camera ;
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder)
{
try
{
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
}
catch(IOException e)
{
Log.d(TAG,"Camera Preview Failed!: "+e.getMessage());
}
}
public void surfaceChanged(SurfaceHolder holder , int m , int n , int w)
{
}
public void surfaceDestroyed(SurfaceHolder holder)
{
}
}
private static File getOutputMediaFile(int type){
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "MyCameraApp");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (! mediaStorageDir.exists()){
if (! mediaStorageDir.mkdirs()){
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE){
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_"+ timeStamp + ".jpg");
}else {
return null;
}
return mediaFile;
}
//save the picture here
private PictureCallback mPicture = new PictureCallback() {
// public final static int MEDIA_TYPE_IMAGE = 1 ;
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
photoPath = Uri.fromFile(pictureFile);
if (pictureFile == null){
Log.d("Errore Doorbin", "Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
// these lines are for the gallery to scan the SDCard manually
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "MyCameraApp");
sendBroadcast(new Intent(Intent.ACTION_MEDIA_MOUNTED,
Uri.parse("file://"+ mediaStorageDir)));
// photoPath = Uri.fromFile(mediaStorageDir) ;
/* MediaScannerConnection.scanFile(CameraActivity.this,
new String[] { fos.toString() }, null,
new MediaScannerConnection.OnScanCompletedListener() {
public void onScanCompleted(String path, Uri uri) {
// code to execute when scanning is complete
}
});*/
// fos.close();
} catch (FileNotFoundException e) {
Log.d("Errore Doorbin", "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d("Errore Doorbin", "Error accessing file: " + e.getMessage());
}
catch (Exception e)
{
Log.d("Errore Doorbin", "errore Kolli dade!" + e.getMessage()) ;
}
}
};
public void capture(View v)
{
//mCamera.takePicture(null, null, mPicture);
//mCamera.release();
//mCamera = getCameraInstance() ;
//mCamera.startPreview();
TakePictureTask takePicture = new TakePictureTask() ;
takePicture.execute() ;
}
public void accept(View v)
{
Intent data = new Intent() ;
data.setData(photoPath) ;
setResult(RESULT_OK, data);
finish() ;
}
public void retake(View v)
{
Button button = (Button)findViewById(R.id.button_accept);
button.setVisibility(View.GONE);
button = (Button)findViewById(R.id.button_capture) ;
button.setVisibility(View.VISIBLE) ;
button = (Button)findViewById(R.id.button_retake);
button.setVisibility(View.GONE) ;
mCamera.startPreview();
}
/**
* A pretty basic example of an AsyncTask that takes the photo and
* then sleeps for a defined period of time before finishing. Upon
* finishing, it will restart the preview - Camera.startPreview().
*/
private class TakePictureTask extends AsyncTask<Void, Void, Void> {
#Override
protected void onPostExecute(Void result) {
// This returns the preview back to the live camera feed
Button button = (Button)findViewById(R.id.button_accept) ;
button.setVisibility(View.VISIBLE) ;
button = (Button)findViewById(R.id.button_retake) ;
button.setVisibility(View.VISIBLE);
button = (Button)findViewById(R.id.button_capture);
button.setVisibility(View.GONE);
//mCamera.startPreview();
}
#Override
protected Void doInBackground(Void... params) {
mCamera.takePicture(null, null, mPicture);
// Sleep for however long, you could store this in a variable and
// have it updated by a menu item which the user selects.
try {
Thread.sleep(3000); // 3 second preview
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
}
}
and in my application's manifest I've changed this activities orientation to portrait!
What is the problem ? why does it look like this ?
This happens to be a bug in earlier versions of Android.
A workaround is to rotate the camera by default.
if (this.getResources().getConfiguration().orientation != Configuration.ORIENTATION_LANDSCAPE) {
camera.setDisplayOrientation(90);
lp.height = previewSurfaceHeight;
lp.width = (int) (previewSurfaceHeight / aspect);
} else {
camera.setDisplayOrientation(0);
lp.width = previewSurfaceWidth;
lp.height = (int) (previewSurfaceWidth / aspect);
}
Now I am developing camera application.When I launch my app first it display back camera and I have one button when I click that button it will turn to front camera.on that time I get error null pointer Exception.but when I run front camera individually it display front camera perfectly.
This is my java code.
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
// getAspectRatio();
getWindow().setFormat(PixelFormat.UNKNOWN);
surfaceView = (SurfaceView)findViewById(R.id.camerapreview);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
Button buttonTakePicture = (Button)findViewById(R.id.takepicture);
buttonTakePicture.setOnClickListener(new Button.OnClickListener(){
#Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
camera.takePicture(myShutterCallback, myPictureCallback_RAW, myPictureCallback_JPG);
}});
front=(Button)findViewById(R.id.front);
front.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
{
Intent font_intent=new Intent(AndroidCamera.this,MainActivity.class);
startActivity(font_intent);
}
}
});
ShutterCallback myShutterCallback = new ShutterCallback(){
#Override
public void onShutter() {
// TODO Auto-generated method stub
}};
PictureCallback myPictureCallback_RAW = new PictureCallback(){
#Override
public void onPictureTaken(byte[] arg0, Camera arg1) {
// TODO Auto-generated method stub
}};
PictureCallback myPictureCallback_JPG = new PictureCallback(){
#Override
public void onPictureTaken(byte[] arg0, Camera arg1)
{
// TODO Auto-generated method stub
bitmapPicture = BitmapFactory.decodeByteArray(arg0, 0, arg0.length);
Bitmap mBitmap2 = BitmapFactory.decodeResource(getResources(),currenteffect);
Bitmap newgerresized=getResizedBitmap(mBitmap2, bitmapPicture.getWidth(), bitmapPicture.getHeight());
Bitmap map = applyOverlayEffect(newgerresized, bitmapPicture);
String fieName = UUID.randomUUID().toString();
SaveImage(fieName, 100,map);
camera.startPreview();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,int height) {
// TODO Auto-generated method stub
if(previewing)
{
camera.stopPreview();
previewing = false;
}
if (camera != null){
try
{
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
previewing = true;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
#Override
public void surfaceCreated(SurfaceHolder holder)
{
try{
// TODO Auto-generated method stub
camera = Camera.open();
mgr = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
mgr.setStreamMute(AudioManager.STREAM_SYSTEM, true);
}catch (Exception e) {
e.printStackTrace();
}finally
{
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder)
{
// TODO Auto-generated method stub
camera.stopPreview();
camera.release();
camera = null;
previewing = false;
}
for front this is my java code
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
int currentapiVersion = android.os.Build.VERSION.SDK_INT;
if (currentapiVersion > android.os.Build.VERSION_CODES.FROYO){
id=findFrontFacingCamera();
Log.d("TestLedActivity","L'id trovato e': "+id);
camera = Camera.open(id+1);
} else{
Log.d("TestLedActivity","La versione e' froyo");
camera = Camera.open();
}
preview=(SurfaceView)findViewById(R.id.surfaceView1);
// TODO Auto-generated method stub
previewHolder=preview.getHolder();
previewHolder.addCallback(surfaceCallback);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
i=(ImageView)findViewById(R.id.imageView1);
currenteffect=R.drawable.water;
mBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.water);
newgerresized=getResizedBitmap(mBitmap, width,height);
bi=Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
paint = new Paint();
paint.setAlpha(100);
canvas=new Canvas(bi);
canvas.setBitmap(bi);
canvas.drawBitmap(newgerresized, 0, 0, paint);
i.setImageBitmap(bi);
}
public static Bitmap getResizedBitmap(Bitmap bm, int newWidth, int newHeight)
{
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] byteArray = stream.toByteArray();
BitmapFactory.decodeStream(new ByteArrayInputStream(byteArray), null, options);
int srcWidth = bm.getWidth();
int srcHeight = bm.getHeight();
int desiredWidth = newWidth;
int desiredHeight = newHeight;
int inSampleSize = 1;
while(srcWidth / 2 > desiredWidth)
{
srcWidth /= 2;
srcHeight /= 2;
inSampleSize *= 2;
}
float desiredWidthScale = (float) desiredWidth / srcWidth;
float desiredHeightScale = (float) desiredHeight / srcHeight;
// Decode with inSampleSize
options.inJustDecodeBounds = false;
options.inDither = false;
options.inSampleSize = inSampleSize;
options.inScaled = false;
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
Bitmap sampledSrcBitmap = BitmapFactory.decodeStream(new ByteArrayInputStream(byteArray), null, options);
Matrix matrix = new Matrix();
matrix.postScale(desiredWidthScale, desiredHeightScale);
matrix.postRotate(Orientation);
Bitmap bmp = Bitmap.createBitmap(sampledSrcBitmap, 0, 0, sampledSrcBitmap.getWidth(), sampledSrcBitmap.getHeight(), matrix, true);
return bmp;
}
#Override
public void onResume() {
super.onResume();
}
#Override
public void onPause() {
if (inPreview) {
camera.stopPreview();
}
camera.release();
camera=null;
inPreview=false;
super.onPause();
}
private int findFrontFacingCamera() {
int idCamera=0;
// Look for front-facing camera, using the Gingerbread API.
// Java reflection is used for backwards compatibility with pre-Gingerbread APIs.
try {
Class<?> cameraClass = Class.forName("android.hardware.Camera");
Object cameraInfo = null;
Field field = null;
int cameraCount = 0;
Method getNumberOfCamerasMethod = cameraClass.getMethod( "getNumberOfCameras" );
if ( getNumberOfCamerasMethod != null ) {
cameraCount = (Integer) getNumberOfCamerasMethod.invoke( null, (Object[]) null );
}
Class<?> cameraInfoClass = Class.forName("android.hardware.Camera$CameraInfo");
if ( cameraInfoClass != null ) {
cameraInfo = cameraInfoClass.newInstance();
}
if ( cameraInfo != null ) {
field = cameraInfo.getClass().getField( "facing" );
}
Method getCameraInfoMethod = cameraClass.getMethod( "getCameraInfo", Integer.TYPE, cameraInfoClass );
if ( getCameraInfoMethod != null && cameraInfoClass != null && field != null ) {
for ( int camIdx = 0; camIdx < cameraCount; camIdx++ ) {
getCameraInfoMethod.invoke( null, camIdx, cameraInfo );
int facing = field.getInt( cameraInfo );
if ( facing == 1 ) { // Camera.CameraInfo.CAMERA_FACING_FRONT
try {
Method cameraOpenMethod = cameraClass.getMethod( "open", Integer.TYPE );
if ( cameraOpenMethod != null ) {
Log.d("TestLedActivity","Id frontale trovato: "+camIdx);
//camera = (Camera) cameraOpenMethod.invoke( null, camIdx );
idCamera=camIdx;
}
} catch (RuntimeException e) {
Log.e("TestLedActivity", "Camera failed to open: " + e.getLocalizedMessage());
}
}
}
}
}
// Ignore the bevy of checked exceptions the Java Reflection API throws - if it fails, who cares.
catch ( ClassNotFoundException e ) {Log.e("TestLedActivity", "ClassNotFoundException" + e.getLocalizedMessage());}
catch ( NoSuchMethodException e ) {Log.e("TestLedActivity", "NoSuchMethodException" + e.getLocalizedMessage());}
catch ( NoSuchFieldException e ) {Log.e("TestLedActivity", "NoSuchFieldException" + e.getLocalizedMessage());}
catch ( IllegalAccessException e ) {Log.e("TestLedActivity", "IllegalAccessException" + e.getLocalizedMessage());}
catch ( InvocationTargetException e ) {Log.e("TestLedActivity", "InvocationTargetException" + e.getLocalizedMessage());}
catch ( InstantiationException e ) {Log.e("TestLedActivity", "InstantiationException" + e.getLocalizedMessage());}
catch ( SecurityException e ) {Log.e("TestLedActivity", "SecurityException" + e.getLocalizedMessage());}
if ( camera == null ) {
Log.d("TestLedActivity","Devo aprire la camera dietro");
// Try using the pre-Gingerbread APIs to open the camera.
idCamera=0;
}
return idCamera;
}
private Camera.Size getBestPreviewSize(int width, int height,
Camera.Parameters parameters) {
Camera.Size result=null;
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
if (size.width<=width && size.height<=height) {
if (result==null) {
result=size;
}
else {
int resultArea=result.width*result.height;
int newArea=size.width*size.height;
if (newArea>resultArea) {
result=size;
}
}
}
}
return(result);
}
SurfaceHolder.Callback surfaceCallback=new SurfaceHolder.Callback() {
public void surfaceCreated(SurfaceHolder holder) {
try {
camera.setPreviewDisplay(previewHolder);
}
catch (Throwable t) {
Log.e("PreviewDemo-surfaceCallback",
"Exception in setPreviewDisplay()", t);
Toast
.makeText(getApplicationContext(), t.getMessage(), Toast.LENGTH_LONG)
.show();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder,
int format, int width,
int height) {
Camera.Parameters parameters=camera.getParameters();
Camera.Size size=getBestPreviewSize(width, height,
parameters);
if (size!=null) {
//parameters.set("camera-id", 0);
parameters.setPreviewSize(size.width, size.height);
camera.setParameters(parameters);
camera.startPreview();
inPreview=true;
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// no-op
}
};
}
please help me