switch from back camera to front camera i get null pointer exception? - java

Now I am developing camera application.When I launch my app first it display back camera and I have one button when I click that button it will turn to front camera.on that time I get error null pointer Exception.but when I run front camera individually it display front camera perfectly.
This is my java code.
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
// getAspectRatio();
getWindow().setFormat(PixelFormat.UNKNOWN);
surfaceView = (SurfaceView)findViewById(R.id.camerapreview);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
Button buttonTakePicture = (Button)findViewById(R.id.takepicture);
buttonTakePicture.setOnClickListener(new Button.OnClickListener(){
#Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
camera.takePicture(myShutterCallback, myPictureCallback_RAW, myPictureCallback_JPG);
}});
front=(Button)findViewById(R.id.front);
front.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
{
Intent font_intent=new Intent(AndroidCamera.this,MainActivity.class);
startActivity(font_intent);
}
}
});
ShutterCallback myShutterCallback = new ShutterCallback(){
#Override
public void onShutter() {
// TODO Auto-generated method stub
}};
PictureCallback myPictureCallback_RAW = new PictureCallback(){
#Override
public void onPictureTaken(byte[] arg0, Camera arg1) {
// TODO Auto-generated method stub
}};
PictureCallback myPictureCallback_JPG = new PictureCallback(){
#Override
public void onPictureTaken(byte[] arg0, Camera arg1)
{
// TODO Auto-generated method stub
bitmapPicture = BitmapFactory.decodeByteArray(arg0, 0, arg0.length);
Bitmap mBitmap2 = BitmapFactory.decodeResource(getResources(),currenteffect);
Bitmap newgerresized=getResizedBitmap(mBitmap2, bitmapPicture.getWidth(), bitmapPicture.getHeight());
Bitmap map = applyOverlayEffect(newgerresized, bitmapPicture);
String fieName = UUID.randomUUID().toString();
SaveImage(fieName, 100,map);
camera.startPreview();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,int height) {
// TODO Auto-generated method stub
if(previewing)
{
camera.stopPreview();
previewing = false;
}
if (camera != null){
try
{
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
previewing = true;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
#Override
public void surfaceCreated(SurfaceHolder holder)
{
try{
// TODO Auto-generated method stub
camera = Camera.open();
mgr = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
mgr.setStreamMute(AudioManager.STREAM_SYSTEM, true);
}catch (Exception e) {
e.printStackTrace();
}finally
{
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder)
{
// TODO Auto-generated method stub
camera.stopPreview();
camera.release();
camera = null;
previewing = false;
}
for front this is my java code
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
int currentapiVersion = android.os.Build.VERSION.SDK_INT;
if (currentapiVersion > android.os.Build.VERSION_CODES.FROYO){
id=findFrontFacingCamera();
Log.d("TestLedActivity","L'id trovato e': "+id);
camera = Camera.open(id+1);
} else{
Log.d("TestLedActivity","La versione e' froyo");
camera = Camera.open();
}
preview=(SurfaceView)findViewById(R.id.surfaceView1);
// TODO Auto-generated method stub
previewHolder=preview.getHolder();
previewHolder.addCallback(surfaceCallback);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
i=(ImageView)findViewById(R.id.imageView1);
currenteffect=R.drawable.water;
mBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.water);
newgerresized=getResizedBitmap(mBitmap, width,height);
bi=Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
paint = new Paint();
paint.setAlpha(100);
canvas=new Canvas(bi);
canvas.setBitmap(bi);
canvas.drawBitmap(newgerresized, 0, 0, paint);
i.setImageBitmap(bi);
}
public static Bitmap getResizedBitmap(Bitmap bm, int newWidth, int newHeight)
{
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] byteArray = stream.toByteArray();
BitmapFactory.decodeStream(new ByteArrayInputStream(byteArray), null, options);
int srcWidth = bm.getWidth();
int srcHeight = bm.getHeight();
int desiredWidth = newWidth;
int desiredHeight = newHeight;
int inSampleSize = 1;
while(srcWidth / 2 > desiredWidth)
{
srcWidth /= 2;
srcHeight /= 2;
inSampleSize *= 2;
}
float desiredWidthScale = (float) desiredWidth / srcWidth;
float desiredHeightScale = (float) desiredHeight / srcHeight;
// Decode with inSampleSize
options.inJustDecodeBounds = false;
options.inDither = false;
options.inSampleSize = inSampleSize;
options.inScaled = false;
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
Bitmap sampledSrcBitmap = BitmapFactory.decodeStream(new ByteArrayInputStream(byteArray), null, options);
Matrix matrix = new Matrix();
matrix.postScale(desiredWidthScale, desiredHeightScale);
matrix.postRotate(Orientation);
Bitmap bmp = Bitmap.createBitmap(sampledSrcBitmap, 0, 0, sampledSrcBitmap.getWidth(), sampledSrcBitmap.getHeight(), matrix, true);
return bmp;
}
#Override
public void onResume() {
super.onResume();
}
#Override
public void onPause() {
if (inPreview) {
camera.stopPreview();
}
camera.release();
camera=null;
inPreview=false;
super.onPause();
}
private int findFrontFacingCamera() {
int idCamera=0;
// Look for front-facing camera, using the Gingerbread API.
// Java reflection is used for backwards compatibility with pre-Gingerbread APIs.
try {
Class<?> cameraClass = Class.forName("android.hardware.Camera");
Object cameraInfo = null;
Field field = null;
int cameraCount = 0;
Method getNumberOfCamerasMethod = cameraClass.getMethod( "getNumberOfCameras" );
if ( getNumberOfCamerasMethod != null ) {
cameraCount = (Integer) getNumberOfCamerasMethod.invoke( null, (Object[]) null );
}
Class<?> cameraInfoClass = Class.forName("android.hardware.Camera$CameraInfo");
if ( cameraInfoClass != null ) {
cameraInfo = cameraInfoClass.newInstance();
}
if ( cameraInfo != null ) {
field = cameraInfo.getClass().getField( "facing" );
}
Method getCameraInfoMethod = cameraClass.getMethod( "getCameraInfo", Integer.TYPE, cameraInfoClass );
if ( getCameraInfoMethod != null && cameraInfoClass != null && field != null ) {
for ( int camIdx = 0; camIdx < cameraCount; camIdx++ ) {
getCameraInfoMethod.invoke( null, camIdx, cameraInfo );
int facing = field.getInt( cameraInfo );
if ( facing == 1 ) { // Camera.CameraInfo.CAMERA_FACING_FRONT
try {
Method cameraOpenMethod = cameraClass.getMethod( "open", Integer.TYPE );
if ( cameraOpenMethod != null ) {
Log.d("TestLedActivity","Id frontale trovato: "+camIdx);
//camera = (Camera) cameraOpenMethod.invoke( null, camIdx );
idCamera=camIdx;
}
} catch (RuntimeException e) {
Log.e("TestLedActivity", "Camera failed to open: " + e.getLocalizedMessage());
}
}
}
}
}
// Ignore the bevy of checked exceptions the Java Reflection API throws - if it fails, who cares.
catch ( ClassNotFoundException e ) {Log.e("TestLedActivity", "ClassNotFoundException" + e.getLocalizedMessage());}
catch ( NoSuchMethodException e ) {Log.e("TestLedActivity", "NoSuchMethodException" + e.getLocalizedMessage());}
catch ( NoSuchFieldException e ) {Log.e("TestLedActivity", "NoSuchFieldException" + e.getLocalizedMessage());}
catch ( IllegalAccessException e ) {Log.e("TestLedActivity", "IllegalAccessException" + e.getLocalizedMessage());}
catch ( InvocationTargetException e ) {Log.e("TestLedActivity", "InvocationTargetException" + e.getLocalizedMessage());}
catch ( InstantiationException e ) {Log.e("TestLedActivity", "InstantiationException" + e.getLocalizedMessage());}
catch ( SecurityException e ) {Log.e("TestLedActivity", "SecurityException" + e.getLocalizedMessage());}
if ( camera == null ) {
Log.d("TestLedActivity","Devo aprire la camera dietro");
// Try using the pre-Gingerbread APIs to open the camera.
idCamera=0;
}
return idCamera;
}
private Camera.Size getBestPreviewSize(int width, int height,
Camera.Parameters parameters) {
Camera.Size result=null;
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
if (size.width<=width && size.height<=height) {
if (result==null) {
result=size;
}
else {
int resultArea=result.width*result.height;
int newArea=size.width*size.height;
if (newArea>resultArea) {
result=size;
}
}
}
}
return(result);
}
SurfaceHolder.Callback surfaceCallback=new SurfaceHolder.Callback() {
public void surfaceCreated(SurfaceHolder holder) {
try {
camera.setPreviewDisplay(previewHolder);
}
catch (Throwable t) {
Log.e("PreviewDemo-surfaceCallback",
"Exception in setPreviewDisplay()", t);
Toast
.makeText(getApplicationContext(), t.getMessage(), Toast.LENGTH_LONG)
.show();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder,
int format, int width,
int height) {
Camera.Parameters parameters=camera.getParameters();
Camera.Size size=getBestPreviewSize(width, height,
parameters);
if (size!=null) {
//parameters.set("camera-id", 0);
parameters.setPreviewSize(size.width, size.height);
camera.setParameters(parameters);
camera.startPreview();
inPreview=true;
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// no-op
}
};
}
please help me

Related

Camera 2 Api Error - the error that does not take pictures

I am having an issue with the Camera 2 Api.
When I try to take a picture it would not function. The system would not crash. The error only occurs when I try to take a picture.
Furthermore, I took all the permissions that I need in another activity.
The following error occurs.
Access denied finding property "persist.vendor.camera.privapp.list"
I do not know if it has a relation with the error. Please use it as a reference.
I know that there are similar questions, but neither worked.
I deleted some codes. If you have any inconvenience please let me know.
Thank you in advance.
public class Camera extends AppCompatActivity implements TextureView.SurfaceTextureListener,
ActivityCompat.OnRequestPermissionsResultCallback {
private AutoFitTextureView mTextureView;
private static final int REQUEST_CAMERA_PERMISSION = 1;
private static final int REQUEST_WRITE_STORAGE = 2;
private static final int REQUEST_SAVE_IMAGE = 1002;
private static final int MAX_PREVIEW_WIDTH = 2220;
private static final int MAX_PREVIEW_HEIGHT = 1080;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
private String mCameraId;
private CameraDevice mCameraDevice;
private CameraCaptureSession mCaptureSession;
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private int mSensorOrientation;
private ImageReader mImageReader;
private Size mPreviewSize;
private File mImageFolder;
private Byte bytes;
private Bitmap bitmap;
private File mFile;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mTextureView = (AutoFitTextureView) findViewById(R.id.texture);
Button button02 = (Button)findViewById(R.id.button_take_picture);
button02.setOnClickListener(new Button.OnClickListener() {
#Override
public void onClick (View view){
takePicture();
}
}
);
}
}
private void setUpCameraOutputs ( int width, int height){
Activity activity = this;
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
mCameraId = cameraId;
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
Size largest = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
new CompareSizesByArea());
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
}
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
maxPreviewHeight, largest);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(
mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(
mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
}
}
protected void takePicture () {
if (null == mCameraDevice) {
Log.e("Camera", "cameraDevice is null");
return;
}
if (ContextCompat.checkSelfPermission(this,Manifest.permission.WRITE_EXTERNAL_STORAGE)!= PackageManager.PERMISSION_GRANTED) {
return;
}else{
if (shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE)){
Toast.makeText(this,"We need your permission to record and save image",Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},REQUEST_WRITE_STORAGE);
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
final ImageReader mImageReader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 2);
List<Surface> outputSurfaces = new ArrayList<>(2);
outputSurfaces.add(mImageReader.getSurface());
outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
int rotation = getWindowManager().getDefaultDisplay().getRotation();
//Callback that is called when a new image is available from ImageReader
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
//try {
mFile = new File(getExternalFilesDir(null), "sample.jpg");
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(mFile);
fileOutputStream.write(bytes);
} catch (Exception e) {
e.printStackTrace();
}finally {
mImageReader.close();
if (null != fileOutputStream){
try {
fileOutputStream.close();
}catch (IOException e){
e.printStackTrace();
}
}
}
}
};
mImageReader.setOnImageAvailableListener(readerListener, null);
final CameraCaptureSession.CaptureCallback captureListener =
new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(Camera.this, "Saved:" + mImageReader, Toast.LENGTH_SHORT).show();
createCameraPreviewSession();
}
};
mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(mPreviewRequestBuilder.build(), captureListener, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void createCameraPreviewSession () {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
Surface surface = new Surface(texture);
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
if (null == mCameraDevice) {
return;
}
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest,
null, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(
#NonNull CameraCaptureSession cameraCaptureSession) {
}
}, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
Log.d("debug","Camera error");
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
};
#RequiresApi(api = Build.VERSION_CODES.M)
private void openCamera ( int width, int height){
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
Toast.makeText(this,"No permission to open camera",Toast.LENGTH_SHORT).show();
requestPermissions(new String[]{Manifest.permission.CAMERA},REQUEST_CAMERA_PERMISSION);
return;
}
setUpCameraOutputs(width, height);
CameraManager manager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(mCameraId, mStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
}
Could you try to request for the camera permission as the first thing before trying to take the actual picture? Maybe this is the reason it's crashing.
Remove this permission part from your method "take picture"
if (ContextCompat.checkSelfPermission(this,Manifest.permission.WRITE_EXTERNAL_STORAGE)!= PackageManager.PERMISSION_GRANTED) {
return;
}else{
if (shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE)){
Toast.makeText(this,"We need your permission to record and save image",Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},REQUEST_WRITE_STORAGE);
}
You could execute it first before calling take picture

getting a Bitmap from a Uri

protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_view_image);
imageView = (ImageView) findViewById(R.id.imageView);
Intent intent = getIntent();
Uri imageUri = intent.getData();
// Picasso.with(this).load(imageUri).into(imageView);
if(imageUri == null){
Log.d(TAG, "Check");
}
//BitmapFactory.Options options = new BitmapFactory.Options();
// options.inSampleSize = 8;
// bm = MediaStore.Images.Media.getBitmap(getContentResolver(),imageUri);
Bitmap bm = null;
try{
InputStream image;
try{
image = getContentResolver().openInputStream(imageUri);
bm = BitmapFactory.decodeStream(image);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
catch (Exception e){
e.printStackTrace();
}
//BitmapFactory.Options options = new BitmapFactory.Options();
// bm = BitmapFactory.decodeFile(file.getAbsolutePath(),options);
if ((bm == null)) {
prints("It doesn't work");
}
//Log.d(TAG,"Going to convert image.");
imageView.setImageBitmap(bm);
I've looked at a bunch of StackOverFlow questions, I've tried them all as you can see in the commented code. I'm trying to get a Bitmap that is unchanged when the photo is taken. The pixel position of the Bitmap is crucial for my goal. The image is high quality and maybe a larger file. My Bitmap is always null, I have made sure my Uri isn't. Any ideas?
I think the code below will help you
private void setImage(String path, CircleImageView img) {
options = new DisplayImageOptions.Builder()
.cacheInMemory(true)
.cacheOnDisk(true)
.considerExifParams(true)
.bitmapConfig(Bitmap.Config.RGB_565)
.showImageOnLoading(R.drawable.user)
.showImageForEmptyUri(R.drawable.user)
.showImageOnFail(R.drawable.user)
.build();
ImageLoader.getInstance().displayImage(path, img, options, new SimpleImageLoadingListener() {
#Override
public void onLoadingStarted(String imageUri, View view) {
super.onLoadingStarted(imageUri, view);
}
#Override
public void onLoadingFailed(String imageUri, View view, FailReason failReason) {
super.onLoadingFailed(imageUri, view, failReason);
}
#Override
public void onLoadingCancelled(String imageUri, View view) {
super.onLoadingCancelled(imageUri, view);
}
#Override
public void onLoadingComplete(String imageUri, View view, Bitmap loadedImage) {
super.onLoadingComplete(imageUri, view, loadedImage);
}
}, new ImageLoadingProgressListener() {
#Override
public void onProgressUpdate(String s, View view, int i, int i1) {
}
});
}
use universal image loader for SimpleImageLoadingListener()
The file has a too big resolution to make a bitmap out of it. There is not enough memory to construct a bitmap and hence a null is returned.
Try with a very small image file to see.
You can direct get Bitmap from URI if it is from your mobile storage from below code.
public Bitmap getBitmapFromURI(Uri uri){
String[] filePathColumn = {MediaStore.Images.Media.DATA};
Cursor cursor = getActivity().getContentResolver().query(uri, filePathColumn, null, null, null);
if(cursor!=null){
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
File f_image = new File(cursor.getString(columnIndex));
cursor.close();
BitmapFactory.Options o2 = new BitmapFactory.Options();
return BitmapFactory.decodeFile(f_image.getAbsolutePath(), o2);
}
return null;
}
In case of file is too large to load.. You need to compress image and show. For that you need to use Async Task as well.
I have made one Image compressor task you can use it for large image
public class ConvertBase64Task extends AsyncTask<Void, Bitmap, Bitmap> {
private ByteArrayOutputStream baos = new ByteArrayOutputStream();
private File file;
private int CompressionRatio = 80; //You can change it by what ever ratio you want. in 0 to 100.
private boolean shouldrotate = true;
private ImageCompressiorListner imageCompressiorListner;
public ConvertBase64Task(File file) {
this.file = file;
}
public void setRotation(boolean isRotate) {
shouldrotate = isRotate;
}
public void setCompressionRatio(int Ratio) {
CompressionRatio = Ratio;
}
public void setImageCompressiorListner(ImageCompressiorListner imageCompressiorListner) {
this.imageCompressiorListner = imageCompressiorListner;
}
#Override
protected void onPreExecute(){
}
#Override
protected Bitmap doInBackground(Void... params) {
try {
//***** Fatching file
//*****Code for Orientation
Matrix matrix = new Matrix();
if (shouldrotate) {
ExifInterface exif1 = new ExifInterface(file.getAbsolutePath());
int orientation = exif1.getAttributeInt(
ExifInterface.TAG_ORIENTATION, 1);
Log.d("EXIF", "Exif: " + orientation);
if (orientation == 6) {
matrix.postRotate(90);
} else if (orientation == 3) {
matrix.postRotate(180);
} else if (orientation == 8) {
matrix.postRotate(270);
} else {
matrix.postRotate(0);
}
} else {
matrix.postRotate(0);
}
try {
BitmapFactory.Options option = new BitmapFactory.Options();
option.inJustDecodeBounds = true;
BitmapFactory.decodeFile(file.getAbsolutePath(), option);
int file_size = Integer.parseInt(String.valueOf(file.length() / 1024));
Log.e("ImageSize", "" + file_size);
int scale = 1;
if (file_size < 512) {
Log.e("image size is good", "image size is less");
} else if (file_size < 1024) {
Log.e("image size is 1 mb", "image size is heavy");
scale = 2;
} else if (file_size < 1536) {
Log.e("image size is 1.5 mb", "image size is heavy");
scale = 2;
} else if (file_size < 2048) {
Log.e("image size is 2 mb", "image size is heavy");
scale = 4;
} else {
Log.e("image size > 2 mb", "image size is heavy");
scale = 4;
}
Log.e("Scale", "Finaly Scaling with " + scale);
BitmapFactory.Options o2 = new BitmapFactory.Options();
o2.inSampleSize = scale;
Bitmap pickimg = BitmapFactory.decodeFile(file.getAbsolutePath(), o2);
if (pickimg.getWidth() > 1280 || pickimg.getHeight() > 1000) {
int width = pickimg.getWidth();
int height = pickimg.getHeight();
while (width > 1280 || height > 700) {
width = (width * 90) / 100;
height = (height * 90) / 100;
}
pickimg = Bitmap.createScaledBitmap(pickimg, width, height, true);
} else {
pickimg = Bitmap.createBitmap(pickimg, 0, 0, pickimg.getWidth(), pickimg.getHeight(), matrix, true); // rotating bitmap
}
pickimg.compress(Bitmap.CompressFormat.JPEG, CompressionRatio, baos);
return pickimg;
} catch (OutOfMemoryError e) {
e.printStackTrace();
return null;
} catch (Exception e) {
e.printStackTrace();
return null;
}
} catch (Throwable e) {
e.printStackTrace();
return null;
}
}
#Override
protected void onPostExecute(Bitmap result) {
super.onPostExecute(result);
if (result != null) {
if(imageCompressiorListner!=null){
imageCompressiorListner.onImageCompressed(result);
}
} else {
if(imageCompressiorListner!=null){
imageCompressiorListner.onError();
}
}
}
public interface ImageCompressiorListner {
void onImageCompressed(Bitmap bitmap);
void onError();
}
}
You can use this as below
ConvertBase64Task task = new ConvertBase64Task(File Object of Image);
task.setImageCompressiorListner(new ConvertBase64Task.ImageCompressiorListner() {
#Override
public void onImageCompressed(Bitmap bitmap) {
}
#Override
public void onError() {
}
});
task.execute();
Hope this will help you out.

Capture image in portrait but get an ouput of landscape in directory

I am making my own camera in android, then whenever I capture images the images orientation is okay. but when i open it in my external sdcard file it is like that:
this is my code:
public class CameraApp extends Activity implements SurfaceHolder.Callback {
private static final String TAG = "IMG_";
private static final String IMAGE_FOLDER = "/PhoneController/";
private static final String EXTENTION = ".jpg";
private String pictureName = "";
public String picNameToshare = "";
static final int FOTO_MODE = 0;
String speed;
String imageFilePath;
private Handler mHandler = new Handler();
public static String imageFilePath1;
FileOutputStream fos = null;
public Bitmap framebmpScaled;
public Bitmap SelecedFrmae;
public static Bitmap mergeBitmap;
public static Bitmap bmpfULL;
Camera camera;
Button button;
SurfaceView surfaceView;
SurfaceHolder surfaceHolder;
boolean previewing = false;
LayoutInflater controlInflater = null;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.camera);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
getWindow().setFormat(PixelFormat.UNKNOWN);
surfaceView = (SurfaceView) findViewById(R.id.camerapreview);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
button = (Button) findViewById(R.id.button);
button.setOnClickListener(buttonListener);
}
private OnClickListener buttonListener = new OnClickListener() {
public void onClick(View v) {
Handler myHandler = new Handler();
if (previewing) {
camera.stopPreview();
previewing = false;
Log.e("", "one");
}
if (camera != null) {
try {
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
previewing = true;
myHandler.postDelayed(mMyRunnable, 2000); // called after 5
// seconds
button.setText("Waiting...");
Log.e("", "two");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/*
* Handler myHandler = new Handler();
* myHandler.postDelayed(mMyRunnable, 5000); // called after 5
* seconds button.setText("Waiting...");
*/
}
};
private Runnable mMyRunnable = new Runnable() {
public void run() {
Camera.Parameters params = camera.getParameters();
params.set("rotation", 90);
camera.setParameters(params);
camera.takePicture(myShutterCallback, myPictureCallback_RAW,
myPictureCallback_JPG);
storePicture(mergeBitmap);
button.setText("Capture");
}
};
PictureCallback myPictureCallback_JPG = new PictureCallback() {
public void onPictureTaken(byte[] arg0, Camera arg1) {
// TODO Auto-generated method stub
Display display = getWindowManager().getDefaultDisplay();
final int ScreenWidth = display.getWidth();
final int ScreenHeight = display.getHeight();
Log.e("" + display.getWidth(), "" + display.getWidth());
Bitmap bitmapPicture = BitmapFactory.decodeByteArray(arg0, 0,
arg0.length);
Bitmap bmpScaled1 = Bitmap.createScaledBitmap(bitmapPicture,
ScreenWidth, ScreenHeight, true);
mergeBitmap = bmpScaled1;
showPicture();
}
};
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
if (previewing) {
camera.stopPreview();
previewing = false;
}
if (camera != null) {
try {
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
previewing = true;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
camera = Camera.open();
camera.setDisplayOrientation(90);
}
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
camera.stopPreview();
camera.release();
camera = null;
previewing = false;
}
void storePicture(Bitmap bm) {
mHandler.post(new Runnable() {
public void run() {
// showPicture();
}
});
String timeStamp = new SimpleDateFormat("yyyMMdd_HHmmss")
.format(new Date());
this.pictureName = TAG + timeStamp;
picNameToshare = this.pictureName;
this.imageFilePath = IMAGE_FOLDER + this.pictureName + EXTENTION;
this.imageFilePath = sanitizePath(this.imageFilePath);
try {
checkSDCard(this.imageFilePath);
fos = new FileOutputStream(this.imageFilePath);
if (fos != null) {
bm.compress(Bitmap.CompressFormat.JPEG, 85, fos);
Toast.makeText(this, "Image Saved", Toast.LENGTH_SHORT).show();
fos.close();
}
} catch (IOException ioe) {
Log.e(TAG, "CapturePicture : " + ioe.toString());
} catch (Exception e) {
Log.e(TAG, "CapturePicture : " + e.toString());
}
sendBroadcast(new Intent(
Intent.ACTION_MEDIA_MOUNTED,
Uri.parse("file://" + Environment.getExternalStorageDirectory())));
imageFilePath1 = this.imageFilePath;
}
/**
* Check the SDCard is mounted on device
*
* #param path
* of image file
* #throws IOException
*/
void checkSDCard(String path) throws IOException {
String state = android.os.Environment.getExternalStorageState();
if (!state.equals(android.os.Environment.MEDIA_MOUNTED)) {
Toast.makeText(this,
"Please insert sdcard other wise image won't stored",
Toast.LENGTH_SHORT).show();
throw new IOException("SD Card is not mounted. It is " + state
+ ".");
}
// make sure the directory we plan to store the recording is exists
File directory = new File(path).getParentFile();
if (!directory.exists() && !directory.mkdirs()) {
throw new IOException("Path to file could not be created.");
}
}
private String sanitizePath(String path) {
if (!path.startsWith("/")) {
path = "/" + path;
}
return Environment.getExternalStorageDirectory().getAbsolutePath()
+ path;
}
please help me.
Try using this
Camera.Parameters parameters = camera.getParameters();
parameters.set("orientation", "portrait");
camera.setParameters(parameters);

Android: Camera surfaceview is blurry

I am building my own camera app using the android camera api. I have a working app but the preview is not as sharp as the default camera app. Why is this the case? Here is my code below.
public class showCamera extends SurfaceView implements SurfaceHolder.Callback {
private static final int PICTURE_SIZE_MAX_WIDTH =640;
private static final int PREVIEW_SIZE_MAX_WIDTH = 640;
//private Camera theCamera;
private SurfaceHolder holdMe;
private Camera theCamera;
int h;
int w;
public showCamera (Context context,Camera camera,int w,int h)
{
super(context);
theCamera = camera;
holdMe = getHolder();
holdMe.addCallback(this);
this.h=h;
this.w=w;
}
public showCamera(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
// TODO Auto-generated constructor stub
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
try {
theCamera.setPreviewDisplay(holder);
//setDisplayOrientation(theCamera,90);
if( (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT ))
{ theCamera.setDisplayOrientation(90);
}
Camera.Parameters parameters = theCamera.getParameters();
Log.d(" " , " THIS IS THE FLASH MODE = " + parameters.getFlashMode()) ;
List<String> g= parameters.getSupportedFocusModes();
for(int j=0;j<g.size();j++)
{
Log.d(" " , " THIS IS focus modes =" + g.get(j)) ;
}
Size bestPreviewSize = determineBestPreviewSize(parameters);
Size bestPictureSize = determineBestPictureSize(parameters);
parameters.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
parameters.setPictureSize(bestPictureSize.width, bestPictureSize.height);
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
theCamera.setParameters(parameters);
theCamera.startPreview();
} catch (IOException e) {
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
theCamera.stopPreview();
theCamera.release();
// TODO Auto-generated method stub
}
protected void setDisplayOrientation(Camera camera, int angle){
Method downPolymorphic;
try
{
downPolymorphic = camera.getClass().getMethod("setDisplayOrientation", new Class[] { int.class });
if (downPolymorphic != null)
downPolymorphic.invoke(camera, new Object[] { angle });
}
catch (Exception e1)
{
}
}
private Size determineBestPreviewSize(Camera.Parameters parameters) {
List<Size> sizes = parameters.getSupportedPreviewSizes();
return determineBestSize(sizes, PREVIEW_SIZE_MAX_WIDTH);
}
private Size determineBestPictureSize(Camera.Parameters parameters) {
List<Size> sizes = parameters.getSupportedPictureSizes();
return determineBestSize(sizes, PICTURE_SIZE_MAX_WIDTH);
}
protected Size determineBestSize(List<Size> sizes, int widthThreshold) {
Size bestSize = null;
for (Size currentSize : sizes) {
boolean isDesiredRatio = (currentSize.width / 4) == (currentSize.height / 3);
boolean isBetterSize = (bestSize == null || currentSize.width > bestSize.width);
boolean isInBounds = currentSize.width <= PICTURE_SIZE_MAX_WIDTH;
if (isDesiredRatio && isInBounds && isBetterSize) {
bestSize = currentSize;
}
}
if (bestSize == null) {
return sizes.get(0);
}
return bestSize;
}
AutoFocusCallback autoFocusCallback = new AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success, Camera camera) {
Log.i("tag","this ran sdfgfhgjkldxbvnm,jhgfdkmn" );
}
};
}
MainActivity
public class MainActivity extends Activity implements OnClickListener {
private Camera cameraObject;
private showCamera showCamera;
int h;
int w = 1080;
LinearLayout Top, Buttom;
Button b;
public static Camera isCameraAvailiable() {
Camera object = null;
try {
object = Camera.open();
object.getParameters();
} catch (Exception e) {
}
return object;
}
AutoFocusCallback autoFocusCallback = new AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success, Camera camera) {
Log.i("tag", "this ran sdfgfhgjkldxbvnm,jhgfdkmn");
}
};
private PictureCallback capturedIt = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
if (bitmap == null) {
Toast.makeText(getApplicationContext(), "not taken",
Toast.LENGTH_SHORT).show();
} else {
File pictureFile = MediaOutput();
if (pictureFile == null) {
Log.d("",
"Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
MediaStore.Images.Media.insertImage(getContentResolver(),
bitmap, "testing ", "");
Toast.makeText(getApplicationContext(), "taken",
Toast.LENGTH_SHORT).show();
fos.close();
} catch (FileNotFoundException e) {
Log.d("", "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d("", "Error accessing file: " + e.getMessage());
}
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camlay);
Top = (LinearLayout) findViewById(R.id.top_bar);
Buttom = (LinearLayout) findViewById(R.id.but_bar);
b = (Button) findViewById(R.id.but_pic);
b.setOnClickListener(this);
Display display = getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
int width = size.x;
int height = size.y;
h = (int) Math.round(0.8 * height);
Log.d(" ", " height " + h);
Log.d(" ", " width " + width);
Top.setLayoutParams(new LinearLayout.LayoutParams(width, (int) Math
.round(0.10 * height)));
Buttom.setLayoutParams(new LinearLayout.LayoutParams(width, (int) Math
.round(0.10 * height)));
cameraObject = isCameraAvailiable();
showCamera = new showCamera(this, cameraObject, width, h);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(showCamera, new FrameLayout.LayoutParams(width, h));
// preview.addView(showCamera);
}
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
switch (v.getId()) {
case R.id.but_pic:
// cameraObject.takePicture(null, null,capturedIt);
// parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
Camera.Parameters parameters = cameraObject.getParameters();
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
cameraObject.setParameters(parameters);
cameraObject.autoFocus(autoFocusCallback);
// cameraObject.stopPreview();
break;
}
}
private static File MediaOutput() {
File mediaStorageDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
"MyCameraApp");
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss")
.format(new Date());
File mediaFile;
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "IMG_" + timeStamp + ".jpg");
return mediaFile;
}
}
If you can point me in the right direction that would be great.
Did you ever tried to increase these values in your showCamera class:
private static final int PICTURE_SIZE_MAX_WIDTH = 640;
private static final int PREVIEW_SIZE_MAX_WIDTH = 640;

Android + OpenCV: process camera frames in a service

Most OpenCV examples for Android are based on a CameraViewActivity that receives, processes and displays the frames. But similar to this approach I want to build a service that is started when the Android device boots. The service should access the camera and do some continously image processing.
Can anyone tell me how to initialize the camera by OpenCV for this scenario? Or any link for samples?
I found several posts that explained how to do it. Basically you have to create an empty and invisible SurfaceView on the Android Camera and buffers for the preview frames. Here's part of my code.
Here's an interface that I wrote, because we do switch between local hardware and remote network cameras.
public interface ICamera {
boolean supportsRegionOfInterest();
void connect();
void release();
boolean isConnected();
}
Here's the code for the local camera that works without a visible Activity. It receives the frames in a separate thread.
public class HardwareCamera implements CameraAccess.ICamera,
Camera.PreviewCallback {
// see http://developer.android.com/guide/topics/media/camera.html for more
// details
private static final boolean USE_THREAD = true;
private final static String TAG = "HardwareCamera";
private final Context context;
private final int cameraIndex; // example: CameraInfo.CAMERA_FACING_FRONT or
// -1 for
// IP_CAM
private final CameraAccess user;
private Camera mCamera;
private int mFrameWidth;
private int mFrameHeight;
private CameraAccessFrame mCameraFrame;
private CameraHandlerThread mThread = null;
private SurfaceTexture texture = new SurfaceTexture(0);
// needed to avoid OpenCV error:
// "queueBuffer: BufferQueue has been abandoned!"
private byte[] mBuffer;
public HardwareCamera(Context context, CameraAccess user, int cameraIndex) {
this.context = context;
this.cameraIndex = cameraIndex;
this.user = user;
}
// private boolean checkCameraHardware() {
// if (context.getPackageManager().hasSystemFeature(
// PackageManager.FEATURE_CAMERA)) {
// // this device has a camera
// return true;
// } else {
// // no camera on this device
// return false;
// }
// }
public static Camera getCameraInstance(int facing) {
Camera c = null;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
int cameraCount = Camera.getNumberOfCameras();
int index = -1;
for (int camIdx = 0; camIdx < cameraCount; camIdx++) {
Camera.getCameraInfo(camIdx, cameraInfo);
if (cameraInfo.facing == facing) {
try {
c = Camera.open(camIdx);
index = camIdx;
break;
} catch (RuntimeException e) {
Log.e(TAG,
String.format(
"Camera is not available (in use or does not exist). Facing: %s Index: %s Error: %s",
facing, camIdx, e.getMessage()));
continue;
}
}
}
if (c != null)
Log.d(TAG, String.format("Camera opened. Facing: %s Index: %s",
facing, index));
else
Log.e(TAG, "Could not find any camera matching facing: " + facing);
// returns null if camera is unavailable
return c;
}
private synchronized void connectLocalCamera() {
if (!user.isOpenCVLoaded())
return;
if (USE_THREAD) {
if (mThread == null) {
mThread = new CameraHandlerThread(this);
}
synchronized (mThread) {
mThread.openCamera();
}
} else {
oldConnectCamera();
}
user.onCameraInitialized(mFrameWidth, mFrameHeight);
}
private/* synchronized */void oldConnectCamera() {
// synchronized (this) {
if (true) {// checkCameraHardware()) {
mCamera = getCameraInstance(cameraIndex);
if (mCamera == null)
return;
Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
// Camera.Size previewSize = sizes.get(0);
Collections.sort(sizes, new PreviewSizeComparer());
Camera.Size previewSize = null;
for (Camera.Size s : sizes) {
if (s == null)
break;
previewSize = s;
}
// List<Integer> formats = params.getSupportedPictureFormats();
// params.setPreviewFormat(ImageFormat.NV21);
params.setPreviewSize(previewSize.width, previewSize.height);
mCamera.setParameters(params);
params = mCamera.getParameters();
mFrameWidth = params.getPreviewSize().width;
mFrameHeight = params.getPreviewSize().height;
int size = mFrameWidth * mFrameHeight;
size = size
* ImageFormat.getBitsPerPixel(params.getPreviewFormat())
/ 8;
this.mBuffer = new byte[size];
Log.d(TAG, "Created callback buffer of size (bytes): " + size);
Mat mFrame = new Mat(mFrameHeight + (mFrameHeight / 2),
mFrameWidth, CvType.CV_8UC1);
mCameraFrame = new CameraAccessFrame(mFrame, mFrameWidth,
mFrameHeight);
if (this.texture != null)
this.texture.release();
this.texture = new SurfaceTexture(0);
try {
mCamera.setPreviewTexture(texture);
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mCamera.startPreview();
Log.d(TAG,
String.format(
"Camera preview started with %sx%s. Rendering to SurfaceTexture dummy while receiving preview frames.",
mFrameWidth, mFrameHeight));
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
// }
}
#Override
public synchronized void onPreviewFrame(byte[] frame, Camera arg1) {
mCameraFrame.put(frame);
if (user.isAutomaticReceive() || user.waitForReceive(500))
user.onPreviewFrame(mCameraFrame);
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class CameraAccessFrame implements CameraFrame {
private Mat mYuvFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
private Bitmap mCachedBitmap;
private boolean mRgbaConverted;
private boolean mBitmapConverted;
#Override
public Mat gray() {
return mYuvFrameData.submat(0, mHeight, 0, mWidth);
}
#Override
public Mat rgba() {
if (!mRgbaConverted) {
Imgproc.cvtColor(mYuvFrameData, mRgba,
Imgproc.COLOR_YUV2BGR_NV12, 4);
mRgbaConverted = true;
}
return mRgba;
}
// #Override
// public Mat yuv() {
// return mYuvFrameData;
// }
#Override
public synchronized Bitmap toBitmap() {
if (mBitmapConverted)
return mCachedBitmap;
Mat rgba = this.rgba();
Utils.matToBitmap(rgba, mCachedBitmap);
mBitmapConverted = true;
return mCachedBitmap;
}
public CameraAccessFrame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mRgba = new Mat();
this.mCachedBitmap = Bitmap.createBitmap(width, height,
Bitmap.Config.ARGB_8888);
}
public synchronized void put(byte[] frame) {
mYuvFrameData.put(0, 0, frame);
invalidate();
}
public void release() {
mRgba.release();
mCachedBitmap.recycle();
}
public void invalidate() {
mRgbaConverted = false;
mBitmapConverted = false;
}
};
private class PreviewSizeComparer implements Comparator<Camera.Size> {
#Override
public int compare(Size arg0, Size arg1) {
if (arg0 != null && arg1 == null)
return -1;
if (arg0 == null && arg1 != null)
return 1;
if (arg0.width < arg1.width)
return -1;
else if (arg0.width > arg1.width)
return 1;
else
return 0;
}
}
private static class CameraHandlerThread extends HandlerThread {
Handler mHandler;
HardwareCamera owner;
CameraHandlerThread(HardwareCamera owner) {
super("CameraHandlerThread");
this.owner = owner;
start();
mHandler = new Handler(getLooper());
}
synchronized void notifyCameraOpened() {
notify();
}
void openCamera() {
mHandler.post(new Runnable() {
#Override
public void run() {
owner.oldConnectCamera();
notifyCameraOpened();
}
});
try {
wait();
} catch (InterruptedException e) {
Log.w(TAG, "wait was interrupted");
}
}
}
#Override
public boolean supportsRegionOfInterest() {
return false;
}
#Override
public void connect() {
connectLocalCamera();
}
#Override
public void release() {
synchronized (this) {
if (USE_THREAD) {
if (mThread != null) {
mThread.interrupt();
mThread = null;
}
}
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
try {
mCamera.setPreviewTexture(null);
} catch (IOException e) {
Log.e(TAG, "Could not release preview-texture from camera.");
}
mCamera.release();
Log.d(TAG, "Preview stopped and camera released");
}
mCamera = null;
if (mCameraFrame != null) {
mCameraFrame.release();
}
if (texture != null)
texture.release();
}
}
#Override
public boolean isConnected() {
return mCamera != null;
}
}
The camera frame is taken from the OpenCV samples. It's responsible for converting the raw byte-array into OpenCV mat structures. The implementation of that interface is inside the code above.
public interface CameraFrame extends CvCameraViewFrame {
Bitmap toBitmap();
#Override
Mat rgba();
#Override
Mat gray();
}

Categories