No callbacks being called after Camera.takePicture() - java

After following the steps given by the Android API documentation, I am not getting any callbacks after calling Camera.takePicture() (no shutter callback or image callback). I have the correct permissions declared in the manifest, I am not getting any exceptions, and I am able to use this code for recording videos. Similar questions to this issue have been regarding just the Camera.PictureCallback not being called but I haven't found any questions where the Camera.ShutterCallback isn't called. Here is my code:
private class CameraPreview extends SurfaceView implements SurfaceHolder.Callback,
Camera.PictureCallback, Camera.ShutterCallback {
protected SurfaceHolder mHolder;
#SuppressWarnings("deprecation")
public CameraPreview() {
super(VideoActivity.this);
mHolder = getHolder();
mHolder.addCallback(this);
// Deprecated setting is required for API < 11
// Automatically set for API >= 11, no issue with continuing to use it
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "Surface is created");
try {
mCamera = getCamera();
Log.d(TAG, "Camera created when surface was created");
if(mCamera != null) {
mCamera.lock();
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPictureFormat(ImageFormat.JPEG);
mCamera.setParameters(parameters);
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
Log.d(TAG, "Preview Started");
}
} catch(IOException e) {
Log.e(TAG, "IOException in setting camera preview: " + e.getMessage(), e);
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { }
#Override
public void surfaceDestroyed(SurfaceHolder holder) { }
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.d(TAG, "onPictureTaken called");
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if(pictureFile == null) {
Log.e(TAG, "Error creating media file");
return;
}
Log.d(TAG, "Writing to file: " + pictureFile);
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
Log.e(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.e(TAG, "Error accessing file: " + e.getMessage());
}
}
#Override
public void onShutter() {
Log.d(TAG, "onShuttter is called");
}
}
An instance of that class gets initialized in onStart() and is stored in an instance variable called mPreview. mPreview implements the Camera callback interfaces so I can use them in this way. The CameraPreview class has been working for recording videos.
The logic for taking the picture:
mCamera.lock();
mCamera.takePicture(mPreview, null, mPreview);
Toast.makeText(VideoActivity.this, "Picture Taken",
Toast.LENGTH_SHORT).show();
Log.d(TAG, "Picture Taken");
So my question is, why am I not getting any callbacks?

I figured out the issue a while ago and forgot to share it so here it is:
I needed to call mCamera.reconnect() after mCamera.startPreview() and before mCamera.takePicture(). I believe this is because I was receiving callbacks from another process to take the picture and reconnect needs to be called after the camera is used by another process.

Related

Custom Camera not saving the whole image

I have built an android custom camera in this way:
//CameraActivity.java
public class CameraActivity extends AppCompatActivity implements CameraPreview.OnCameraStatusListener {
String path, TAG = "CameraActivity";
private CameraPreview cameraPreview;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_camera);
Button takePhoto = findViewById(R.id.takePhoto);
takePhoto.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Log.e(TAG, "===takePhoto()===");
if (cameraPreview != null) {
cameraPreview.takePicture();
}
}
});
//Start camera
initCameraPreview();
}
private void initCameraPreview() {
Log.e(TAG, "===initCameraPreview()===");
cameraPreview = findViewById(R.id.cameraPreview);
cameraPreview.setOnCameraStatusListener(this);
}
#RequiresApi(api = Build.VERSION_CODES.N)
#Override
public void onCameraStopped(byte[] data) throws JSONException {
Log.e("TAG", "===onCameraStopped===");
File mSaveFile = getOutputMediaFile();
saveToFile(data, mSaveFile);
}
public File getOutputMediaFile() {
File mediaStorageDir =
new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), "CustomCameraPics");
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.e(TAG, "failed to create directory");
return null;
}
}
long time = System.currentTimeMillis();
File file = new File(mediaStorageDir.getPath() + File.separator + "myPic_" + time + ".png");
path = file.getPath();
Log.e("imagePath", path);
return file;
}
#RequiresApi(api = Build.VERSION_CODES.N)
public void saveToFile(byte[] bytes, File file) throws JSONException {
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
try {
FileOutputStream out = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.PNG, 90, out);
out.flush();
out.close();
Log.e("Image", "Saved");
Toast.makeText(CameraActivity.this, "Image Saved", Toast.LENGTH_SHORT).show();
} catch (Exception e) {
e.printStackTrace();
}
}
}
//activity_camera.xml
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="com.sukshi.mycamera.CameraActivity">
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="match_parent" >
<com.shank.mycamera.CameraPreview
android:id="#+id/cameraPreview"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<Button
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:text="Take pic"
android:id="#+id/takePhoto"/>
</RelativeLayout>
</RelativeLayout>
//CameraPreview.java
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraPreview";
private OnCameraStatusListener listener;
private Camera camera;
private SurfaceHolder holder;
private Camera.PictureCallback pictureCallback = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
try {
camera.stopPreview();
} catch (Exception e) {
}
if (null != listener) {
try {
listener.onCameraStopped(data);
} catch (JSONException e) {
e.printStackTrace();
}
}
}
};
public void takePicture() {
Log.e(TAG, "===takePicture()===");
if (camera != null) {
try {
camera.takePicture(null, null, pictureCallback);
} catch (Exception e) {
e.printStackTrace();
}
}
}
public CameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public interface OnCameraStatusListener {
void onCameraStopped(byte[] data) throws JSONException;
}
public void setOnCameraStatusListener(OnCameraStatusListener listener) {
this.listener = listener;
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
Log.e(TAG, "==surfaceCreated==");
camera = getCameraInstance();
try {
camera.setPreviewDisplay(holder);
} catch (Exception e) {
Log.e(TAG, "Error setting camera preview", e);
camera.release();
camera = null;
}
//update()
if (camera != null) {
camera.startPreview();
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
}
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.e(TAG, "==surfaceChanged==");
try {
camera.stopPreview();
} catch (Exception e) {
}
try {
camera.setPreviewDisplay(holder);
camera.startPreview();
} catch (Exception e) {
Log.e(TAG, "Error starting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.e(TAG, "==surfaceDestroyed==");
camera.release();
camera = null;
}
public static Camera getCameraInstance() {
Camera camera = null;
int cameraId = 0;
boolean frontCameraFound = false;
try {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
frontCameraFound = true;
}
}
if (frontCameraFound) {
camera = Camera.open(cameraId);
} else {
camera = Camera.open();
}
Camera.Parameters parameters = camera.getParameters();
camera.setDisplayOrientation(90);
parameters.setRotation(270);
camera.setParameters(parameters);
} catch (Exception e) {
Log.e(TAG, "getCamera failed");
}
return camera;
}
}
It saves the image when you click 'TAKE PIC' button.
My requirement: the whole image that the user can see on the screen in preview should be saved.
The problem I'm facing is that in some of the phones the image/bitmap saved doesn't contain everything that is shown to the user on screen.
As #CommonsWare has explained, there is no requirement that a captured picture should be same as preview picture. You can skip camera.takePicture() altogether, and in your cameraPreview.takePicture(), save the latest preview frame as bitmap.
The advantages of this approach are that your are guaranteed that the same image as the user sees in the preview is saved, and it happens with no delay.
The drawback is that the resolution and quality for image returned from camera.takePicture() may be noticeably better.
If you not satisfied with preview frame quality, you can use the same size for setPreviewSize() and for setPictureSize(), if the size is supported for both getSupportedPreviewSizes() and getSupportedPictureSizes(): usually you can find a satisfactory pair.
If you are not satisfied with the preview frame resolution, at least choose the same aspect ratio for setPreviewSize() and for setPictureSize(). Some devices produce really weird results when the two don't match.
But note that these steps can not guarantee that the captured image will be the same as previewed, even only because takePicture() inevitably happens with some delay.
One more remark: if you target Android N and above, consider switching from the deprecated Camera API to the camera2 API.

Simple application of photo on Android Studio using Android tutorial of Camera

I want to create an application to take a picture. I followed the Camera tutorial, I tried 3 times but unable to make it work.I added a button to take a picture.But the app is not working properly.Please help me out.Here is my code:
// CAMERA VIEW displays the picture in a FrameLayout
public class CameraView extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera){
super(context);
mCamera = camera;
mCamera.setDisplayOrientation(0);
//get the holder and set this class as the callback, so we can get camera data here
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
try{
//when the surface is created, we can set the camera to draw images in this surfaceholder
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (IOException e) {
Log.d("ERROR", "Camera error on surfaceCreated " + e.getMessage());
}
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i2, int i3) {
//before changing the application orientation, you need to stop the preview, rotate and then start it again
if(mHolder.getSurface() == null)//check if the surface is ready to receive camera data
return;
try{
mCamera.stopPreview();
} catch (Exception e){
//this will happen when you are trying the camera if it's not running
}
//now, recreate the camera preview
try{
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (IOException e) {
Log.d("ERROR", "Camera error on surfaceChanged " + e.getMessage());
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
//our app has only one screen, so we'll destroy the camera in the surface
//if you are unsing with more screens, please move this code your activity
mCamera.stopPreview();
mCamera.release();
}
}
And this is my activity, which takes a picture and save it
public class MainActivity extends Activity implements OnClickListener {
String TAG = "Main activity";
private Camera mCamera = null;
private CameraView mCameraView = null;
private Camera.PictureCallback mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null){
Log.d(TAG, "Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// app en plein ecran
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
this.getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
// CREATION DU DYNAMIQUE
final Button menu = (Button) findViewById(R.id.buttonMenu);
menu.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// Perform action on click
Intent mainToMenu;
mainToMenu = new Intent(MainActivity.this, MenuActivity.class);
startActivity(mainToMenu);
}
});
try {
mCamera = Camera.open();//you can use open(int) to use different cameras
} catch (Exception e) {
Log.d("ERROR", "Failed to get camera: " + e.getMessage());
}
if(mCamera != null) {
mCameraView = new CameraView(this, mCamera);//create a SurfaceView to show camera data
FrameLayout camera_view = (FrameLayout)findViewById(R.id.camera_view);
camera_view.addView(mCameraView);//add the SurfaceView to the layout
}
// quand j appuis sur un btn volume
final Button b = (Button) findViewById(R.id.button);
b.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// Perform action on click
mCamera.takePicture(null, null, mPicture);
}
});
}
public static final int MEDIA_TYPE_IMAGE = 1;
/** Create a File for saving an image or video */
private static File getOutputMediaFile(int type){
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), "FotoFoto");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (! mediaStorageDir.exists()){
if (! mediaStorageDir.mkdirs()){
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE){
mediaFile = new File(mediaStorageDir.getPath() + File.separator + "IMG_"+ timeStamp + ".jpg");
} else {
return null;
}
return mediaFile;
}
#Override
public void onClick(View v) {
}
}

Take picture with front camera in android service

I need to take a picture with the back camera in an android service but after reading the docs it seems you need a SurfaceView is it possible to take a picture without showing anything to the user?
Edit:
Will this work?
SurfaceTexture surfaceTexture = new SurfaceTexture(10);
Camera camera = Camera.open();
camera.getParameters().setPreviewSize(1, 1);
camera.setPreviewTexture(surfaceTexture);
camera.startPreview();
camera.takePicture(null, pictureCallback, null);
100% working . click picture from front camera using service .
public class MyService extends Service {
#Nullable
#Override
public IBinder onBind(Intent intent) {
return null;
}
#Override
public void onCreate() {
super.onCreate();
CapturePhoto();
}
private void CapturePhoto() {
Log.d("kkkk","Preparing to take photo");
Camera camera = null;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
int frontCamera = 1;
//int backCamera=0;
Camera.getCameraInfo(frontCamera, cameraInfo);
try {
camera = Camera.open(frontCamera);
} catch (RuntimeException e) {
Log.d("kkkk","Camera not available: " + 1);
camera = null;
//e.printStackTrace();
}
try {
if (null == camera) {
Log.d("kkkk","Could not get camera instance");
} else {
Log.d("kkkk","Got the camera, creating the dummy surface texture");
try {
camera.setPreviewTexture(new SurfaceTexture(0));
camera.startPreview();
} catch (Exception e) {
Log.d("kkkk","Could not set the surface preview texture");
e.printStackTrace();
}
camera.takePicture(null, null, new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFileDir=new File("/sdcard/CaptureByService");
if (!pictureFileDir.exists() && !pictureFileDir.mkdirs()) {
pictureFileDir.mkdirs();
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmss");
String date = dateFormat.format(new Date());
String photoFile = "ServiceClickedPic_" + "_" + date + ".jpg";
String filename = pictureFileDir.getPath() + File.separator + photoFile;
File mainPicture = new File(filename);
try {
FileOutputStream fos = new FileOutputStream(mainPicture);
fos.write(data);
fos.close();
Log.d("kkkk","image saved");
} catch (Exception error) {
Log.d("kkkk","Image could not be saved");
}
camera.release();
}
});
}
} catch (Exception e) {
camera.release();
}
}
}
You can set Width and Height of SurfaceView to 1dp and margintop to -10,so that it wont display it on screen but it functions as normal and you can Take picture without displaying Surface View to User
<SurfaceView
android:layout_width="1dp"
android:layout_height="1dp"
android:layout_marginTop="-10dp"
...
/>
in that case you can just use SurfaceTexture
SurfaceTexture surfaceTexture = new SurfaceTexture(10);
Camera camera = Camera.open();
camera.getParameters().setPreviewSize(1, 1);
camera.setPreviewTexture(surfaceTexture);
Yes it is possible. call your Camera Callback method to take picture from camera and Dont specify the size for preview and just start the preview it will work .
Omit this Step :
param.setPreviewSize(122,133);
or use
param.setPreviewSize(1, 1);

android java camera capture from asynctask

I am trying to do a code in an asynctask that takes a picture from the camera and send it to a server over UDP 100 times. However, the PictureCallback isn't called. Can someone please help me?
this is what i tried:
public class MainAsyncTask extends AsyncTask<Void, String, Void> {
protected static final String TAG = null;
public MainActivity mainAct;
public MainAsyncTask(MainActivity mainActivity)
{
super();
this.mainAct = mainActivity;
}
#Override
protected Void doInBackground(Void... params) {
DatagramSocket clientSocket = null;
InetAddress IPAddress = null;
try {
clientSocket = new DatagramSocket();
IPAddress = InetAddress.getByName("192.168.1.15");
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
byte [] data;
DatagramPacket sendPacket;
try {
for (int i=0; i < 100; i++)
{
publishProgress("");
File file = new File(Environment.getExternalStorageDirectory()+ File.separator +"img.jpg");
while (!file.exists() || file.length() == 0);
Bitmap screen = BitmapFactory.decodeFile(Environment.getExternalStorageDirectory()+ File.separator +"img.jpg");
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
screen.compress(Bitmap.CompressFormat.JPEG, 15, bytes);
data = bytes.toByteArray();
sendPacket = new DatagramPacket(data, data.length, IPAddress, 3107);
clientSocket.send(sendPacket);
file.delete();
}
clientSocket.close();
} catch (Exception e) {
// TODO Auto-generated catch block
publishProgress(e.getMessage());
}
return null;
}
public static void takeSnapShots(MainActivity mainAct)
{
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera)
{
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(Environment.getExternalStorageDirectory()+File.separator+"img"+".jpg");
outStream.write(data);
outStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally
{
camera.stopPreview();
camera.release();
camera = null;
}
Log.d(TAG, "onPictureTaken - jpeg");
}
};
SurfaceView surface = new SurfaceView(mainAct.getApplicationContext());
Camera camera = Camera.open();
try {
camera.setPreviewDisplay(surface.getHolder());
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
camera.startPreview();
camera.takePicture(null,null,jpegCallback);
}
protected void onProgressUpdate(String... progress) {
takeSnapShots(mainAct);
}
#Override
protected void onPostExecute(Void result)
{
}
}
I don't think that AsyncTask is the most convenient tool to do the job.
You need a SurfaceView that is not simply created out of nowhere, but connected to the screen. You should initialize your camera only once, and you cannot call camera.takePicture() in a loop. You can call takePicture() from onPictureTaken() callback, but you should also remember that you cannot work with sockets from the UI thread. Luckily, you can follow the Google recommendations.
the recommended way to access the camera is to open Camera on a separate thread.
and
Callbacks will be invoked on the event thread open(int) was called from.
If you open camera in a new HandlerThread, as shown here, the picture callbacks will arrive on that beckground thread, which may be used also for networking.
Also, I recommend you to send directly the JPEG buffer that you receive from the camera. I believe that overhead of saving image to file, reading the file to bitmap, and compressing the latter to another JPEG may be way too much. To control the image size, choose appropriate picture size. Note that the size should be selected from the list of sizes supported by the specific camera.
public class CameraView extends SurfaceView
implements SurfaceHolder.Callback, Camera.PictureCallback {
private static final String TAG = "CameraView";
private Camera camera;
private HandlerThread cameraThread;
private Handler handler;
private boolean bCameraInitialized = false;
private int picturesToTake = 0;
public CameraView(Context context, AttributeSet attr) {
super(context, attr);
// install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
getHolder().addCallback(this);
}
#Override public void surfaceCreated(SurfaceHolder holder) {
cameraThread = new HandlerThread("CameraHandlerThread");
cameraThread.start();
handler = new Handler(cameraThread.getLooper());
hanlder.post(new Runnable() {
#Override public void run() {
openRearCamera();
bCameraInitialized = false;
}
});
}
#Override public void surfaceDestroyed(SurfaceHolder holder) {
if (camera != null) {
Log.d(TAG, "Camera release");
camera.release();
camera = null;
bCameraInitialized = false;
}
}
// finalize the camera init now that we know preview size
#Override public void surfaceChanged(SurfaceHolder holder, int format, final int w, final int h) {
Log.w(TAG, "surfaceChanged(" + w + ", " + h + ")");
if (!bCameraInitialized) {
cameraSetup(w, h);
bCameraInitialized = true;
}
}
private void openRearCamera() {
if (camera != null) {
Log.e(TAG, "openRearCamera(): camera is not null");
return;
}
try {
camera = Camera.open(0);
Log.d(TAG, "Camera ready " + String.valueOf(camera));
}
catch (Throwable e) {
Log.e(TAG, "openRearCamera(): Camera.open() failed", e);
}
}
private void cameraSetup(int w, int h) {
if (camera == null) {
Log.e(TAG, "cameraSetup(): camera is null");
return;
}
Log.d(TAG, "Camera setup");
try {
Camera.Parameters params = camera.getParameters();
// still picture settings - be close to preview size
Camera.Size pictureSize = params.getSupportedPictureSizes()[0];
params.setPictureSize(pictureSize.width, optimalPictureSize.height);
camera.setParameters(params);
camera.setPreviewDisplay(getHolder());
camera.startPreview();
}
catch (Throwable e) {
Log.e(TAG, "Failed to finalize camera setup", e);
}
}
private void sendJpeg(byte[] data) {
DatagramSocket clientSocket = null;
InetAddress IPAddress = null;
try {
clientSocket = new DatagramSocket();
IPAddress = InetAddress.getByName("192.168.1.15");
}
catch (Exception e) {
Log.e(TAG, "failed to initialize client socket", e);
}
DatagramPacket sendPacket;
sendPacket = new DatagramPacket(data, data.length, IPAddress, 3107);
clientSocket.send(sendPacket);
Log.d(TAG, "sent image");
}
#Override public void onPictureTaken(byte[] data, Camera camera) {
sendJpeg(data);
camera.startPreview();
takePictures(picturesToTake-1);
}
public void takePictures(int n) {
if (n > 0) {
picturesToTake = n;
Log.d(TAG, "take " + n + " images");
camera.takePicture(null, null, this);
}
else {
Log.d(TAG, "all images captured");
}
}
}
The class above is a compilation from several projects, with error checking reduced to minimum for brevity. It may require some fixes to compile. You simply add a <CameraView /> to your activity layout, and call its takePictures when the user clicks a button or something.
Do you call to your AsyncTask like this? Just to create the AsyncTask is not enouge.
new MainAsyncTask(ActivityContext).execute();
You can't do this
camera.setPreviewDisplay(surface.getHolder());
From the docs:
http://developer.android.com/reference/android/hardware/Camera.html#setPreviewDisplay(android.view.SurfaceHolder)
"The SurfaceHolder must already contain a surface when this method is called. If you are using SurfaceView, you will need to register a SurfaceHolder.Callback with addCallback(SurfaceHolder.Callback) and wait for surfaceCreated(SurfaceHolder) before calling setPreviewDisplay() or starting preview."
You'd have to do something like this:
SurfaceHolder surfaceHolder = surface.getHolder();
surfaceHolder.addCallback(new Callback() {
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
camera.setPreviewDisplay(holder);
camera.startPreview();
camera.takePicture(null,null,jpegCallback);
} catch (IOException e) {
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {}
}
);

Taking pictures at regular intervals (Android Camera API)

So I'm trying to take multiple pictures at regular time intervals, however I get a "takePicture Failed" Exception after the first picture is previewed on the surfaceView.
Here's my takePictures() method which is called when a button is pressed:
public void takePictures() {
if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
camera = Camera.open();
if (camera != null) {
try {
camera.setPreviewDisplay(surfaceView.getHolder());
camera.startPreview();
camera.takePicture(null, null, new CustomPictureCallbcak(this, cacheDir, imageView, 3, 5000));
}
catch (Exception e) {
e.printStackTrace();
}
}
}
else {
Toast.makeText(this, "No camera found.", Toast.LENGTH_SHORT).show();
}
}
And here's the onPictureTaken() method of CustomPictureCallback :
#Override
public void onPictureTaken(byte[] data, Camera camera) {
//get date info for file name
SimpleDateFormat sdf = new SimpleDateFormat("ddmmyyyyhhmmss");
String date = sdf.format(new Date());
String fileDir = createImageFileName(date);
//write the image to cache
writeImageToCache(fileDir, data);
//display file name in a toast notification
Toast.makeText(c, fileDir, Toast.LENGTH_SHORT).show();
//show picture on imageview
imageView.setImageBitmap(BitmapFactory.decodeByteArray(data, 0, data.length));
//retake images
this.camera = camera;
while (numOfImagesAlreadyTaken <= numOfImages) {
Thread thread = new Thread() {
#Override
public void run() {
// TODO Auto-generated method stub
super.run();
try {
numOfImagesAlreadyTaken++;
CustomPictureCallbcak.this.camera.stopPreview();
sleep(delay);
CustomPictureCallbcak.this.camera.takePicture(null, null, CustomPictureCallbcak.this);
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
};
thread.start();
}
Toast.makeText(c, "Total images taken: " + numOfImagesAlreadyTaken, Toast.LENGTH_SHORT).show();
//release camera
camera.release();
camera = null;
}
As Aleksander Lidtke suggested, I created a single thread inside my takePictures() method and put a while loop inside it:
public void takePictures(final int numOfPictures, final int delay) {
if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
Thread thread = new Thread() {
#Override
public void run() {
// TODO Auto-generated method stub
super.run();
while (numOfPicturesAlreadyTaken <= numOfPictures) {
try {
camera = Camera.open();
camera.setPreviewDisplay(surfaceView.getHolder());
camera.startPreview();
camera.takePicture(null, null, new CustomPictureCallbcak(MainActivity.this, cacheDir, imageView));
numOfPicturesAlreadyTaken++;
sleep(delay);
}
catch (Exception e) {
e.printStackTrace();
Log.d("TEST", e.getMessage());
}
}
}
};
thread.start();
}
else {
Toast.makeText(this, "No camera found.", Toast.LENGTH_SHORT).show();
}
}

Categories