java.lang.IllegalStateException: failed to get surface - java

I am trying to create an app which enables the user to record his smartphones's screen.
This is my starting code:
import android.content.Context;
import android.content.Intent;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.MediaRecorder;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.DisplayMetrics;
import android.util.Log;
import android.widget.Button;
import android.widget.Toast;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
public class MainActivity extends AppCompatActivity
{
private static final int CAST_PERMISSION_CODE = 22;
private DisplayMetrics mDisplayMetrics = new DisplayMetrics();
private MediaProjection mMediaProjection;
private VirtualDisplay mVirtualDisplay;
private MediaRecorder mMediaRecorder;
private MediaProjectionManager mProjectionManager;
private Button startButton;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
startButton = (Button) findViewById( R.id.recordButton );
mMediaRecorder = new MediaRecorder();
mProjectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
getWindowManager().getDefaultDisplay().getMetrics(this.mDisplayMetrics);
prepareRecording();
startRecording();
}
private void startRecording() {
// If mMediaProjection is null that means we didn't get a context, lets ask the user
if (mMediaProjection == null) {
// This asks for user permissions to capture the screen
startActivityForResult(mProjectionManager.createScreenCaptureIntent(), CAST_PERMISSION_CODE);
return;
}
mVirtualDisplay = getVirtualDisplay();
mMediaRecorder.start();
}
private void stopRecording() {
if (mMediaRecorder != null) {
mMediaRecorder.stop();
mMediaRecorder.reset();
}
if (mVirtualDisplay != null) {
mVirtualDisplay.release();
}
if (mMediaProjection != null) {
mMediaProjection.stop();
}
prepareRecording();
}
public String getCurSysDate() {
return new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new Date());
}
private void prepareRecording() {
try {
mMediaRecorder.prepare();
} catch (Exception e) {
e.printStackTrace();
return;
}
final String directory = Environment.getExternalStorageDirectory() + File.separator + "Recordings";
if (!Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState())) {
Toast.makeText(this, "Failed to get External Storage", Toast.LENGTH_SHORT).show();
return;
}
final File folder = new File(directory);
boolean success = true;
if (!folder.exists()) {
success = folder.mkdir();
}
String filePath;
if (success) {
String videoName = ("capture_" + getCurSysDate() + ".mp4");
filePath = directory + File.separator + videoName;
} else {
Toast.makeText(this, "Failed to create Recordings directory", Toast.LENGTH_SHORT).show();
return;
}
int width = mDisplayMetrics.widthPixels;
int height = mDisplayMetrics.heightPixels;
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.setVideoEncodingBitRate(512 * 1000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(width, height);
mMediaRecorder.setOutputFile(filePath);
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode != CAST_PERMISSION_CODE) {
// Where did we get this request from ? -_-
//Log.w(TAG, "Unknown request code: " + requestCode);
return;
}
if (resultCode != RESULT_OK) {
Toast.makeText(this, "Screen Cast Permission Denied :(", Toast.LENGTH_SHORT).show();
return;
}
mMediaProjection = mProjectionManager.getMediaProjection(resultCode, data);
// TODO Register a callback that will listen onStop and release & prepare the recorder for next recording
// mMediaProjection.registerCallback(callback, null);
mVirtualDisplay = getVirtualDisplay();
mMediaRecorder.start();
}
private VirtualDisplay getVirtualDisplay()
{
int screenDensity = mDisplayMetrics.densityDpi;
int width = mDisplayMetrics.widthPixels;
int height = mDisplayMetrics.heightPixels;
return mMediaProjection.createVirtualDisplay(this.getClass().getSimpleName(), width, height, screenDensity, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, mMediaRecorder.getSurface(), null /*Callbacks*/, null /*Handler*/);
}
}
After showing a message that informs the user about the screen capture function, my app crushes.
java.lang.RuntimeException: Failure delivering result ResultInfo{who=null, request=22, result=-1, data=Intent { (has extras) }} to activity {gr.awm.clrecorder/gr.awm.clrecorder.MainActivity}: java.lang.IllegalStateException: failed to get surface
at android.app.ActivityThread.deliverResults(ActivityThread.java:3974)
at android.app.ActivityThread.handleSendResult(ActivityThread.java:4017)
at android.app.ActivityThread.access$1400(ActivityThread.java:172)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1471)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:145)
at android.app.ActivityThread.main(ActivityThread.java:5832)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1399)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1194)
Caused by: java.lang.IllegalStateException: failed to get surface
at android.media.MediaRecorder.getSurface(Native Method)
at gr.awm.clrecorder.MainActivity.getVirtualDisplay(MainActivity.java:148)
at gr.awm.clrecorder.MainActivity.onActivityResult(MainActivity.java:135)
Is there a way to solve this issue? Any advice would be helpful and deeply appreciated.
Thanks in advance

Nevermind the comment btw.
I dug into the documentation and your code and got the following results.
This is the order you call the mMediaRecorder methods to get a surface.
mMediaRecorder.prepare();
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.setVideoEncodingBitRate(512 * 1000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(width, height);
mMediaRecorder.setOutputFile(filePath);
This is what the documentation says
//Call this method before prepare().
setVideoEncodingBitRate(); //no exception thrown
//Must be called after setVideoSource(). Call this after setOutFormat() but before prepare().
setVideoSize(width, height); //IllegalStateException if it is called after prepare() or before setOutputFormat()
//Call this only before setOutputFormat().
setAudioSource(); //IllegalStateException if it is called after setOutputFormat()
setVideoSource(); //IllegalStateException if it is called after setOutputFormat()
//Call this after setOutputFormat() and before prepare().
setVideoEncoder(); //IllegalStateException if it is called before setOutputFormat() or after prepare()
setAudioEncoder(); //IllegalStateException if it is called before setOutputFormat() or after prepare().
//Call this after setAudioSource()/setVideoSource() but before prepare().
setOutputFormat(); //IllegalStateException if it is called after prepare() or before setAudioSource()/setVideoSource().
//Call this after setOutputFormat() but before prepare().
setOutputFile(); //IllegalStateException if it is called before setOutputFormat() or after prepare()
//Must be called after setVideoSource(). Call this after setOutFormat() but before prepare().
setVideoFrameRate(); //IllegalStateException if it is called after prepare() or before setOutputFormat().
//This method must be called after setting up the desired audio and video sources, encoders, file format, etc., but before start()
prepare() //IllegalStateException if it is called after start() or before setOutputFormat().
So in order to get the mMediaRecorder in a correct state you have to call the methods in this order:
setAudioSource()
setVideoSource()
setOutputFormat()
setAudioEncoder()
setVideoEncoder()
setVideoSize()
setVideoFrameRate()
setOutputFile()
setVideoEncodingBitRate()
prepare()
start()
I think I also got an undocumented error when I called the setEncoder Methods before the setSource Methods
Edit: I thought I got working code, but I still get IllegalStateExceptions although the code is in the order of the documentation.
Edit2: I got it working now. Things that might also not working and additional error messaages:
Permissions for external Storage and Microphone not set (add use-permissions in the Manifest)
Android MediaRecorder start failed in invalid state 4
IllegalStateException [start called in an invalid state: 1] on restarting Android MediaRecorder
I had to create a directory where the App could write to. I couldn't get the external Storage to work so I used
the data directory. But that is unrelated to the mMediaRecorder code
This code works:
private void prepareRecording() {
//Deal with FileDescriptor and Directory here
//Took audio out because emulator has no mic
//mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setVideoEncodingBitRate(512 * 1000);
//mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setVideoSize(width, height);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setOutputFile(filePath);
try {
mMediaRecorder.prepare();
} catch (Exception e) {
e.printStackTrace();
return;
}
//Field variable to hold surface object
//Deal with it as you see fit
surface = mMediaRecorder.getSurface();
Beware Although the above code works in creating the MediaRecorder correctly and writing to storage, it crashes the whole emulator when mMediaRecorder.stop() is called.

Maybe you set a wrong video size or a wrong video source. make sure mediaRecord.prepare() has been execute successfully before.
I also get the problem, After check all above,I fixed the issue.

I just got the same problem.
The problem only occurs once (after installing the game FOR THE FIRST TIME and after giving the permissions)
so I cleared the application's data (like a thousandth time) to reproduce the error but it never happened again.
So what I did to figure this out is deleting the folder from the storage
in your case the folder's name is String directory = "Recordings"
And this time I got to reproduce the error.
And what I did to fix it is making sure to create the folder once the "WRITE_EXTERNAL_STORAGE" permission is accepted and before calling all the MediaRecorder configuration
switch (requestCode) {
case REQUEST_PERMISSIONS: {
if ((grantResults.length > 0) && (grantResults[0] +
grantResults[1]) == PackageManager.PERMISSION_GRANTED) {
//onToggleScreenShare(mToggleButton);
File folder = new File(Environment.getExternalStorageDirectory() +
File.separator + "textingstories");
boolean success = true;
if (!folder.exists()) {
success = folder.mkdirs();
if (success) {
// Do something on success
StartRecord();
} else {
// Do something else on failure
}
}
else {
StartRecord();
}
and for android 10 you might want to add this to the manifest to make sure that the folder is created
<manifest ... >
<application android:requestLegacyExternalStorage="true" ... >
...
</application>
</manifest>

Related

Should I tap NFC tag every time I want to write in it?

I'm an android developer
I have an idea to develop an application that writes on NFC tags data. (Only writing)
I have an Edit Text and a button and when I click on the button I write on the NFC the data written in the field
I searched a lot and tried several codes
The only problem is my NFC tags sticks in the back of the phone and the code i'm using tells me I have to tap/tag the NFC tag to write in it
Is there another way to detect the NFC tag sticked on the back of the phone when clicking the button?
I have two activities main activity that will call methods from my NFCManager
I will share below the NFCManager class
NFCManager class:
import android.app.Activity;
import android.app.PendingIntent;
import android.content.Intent;
import android.content.IntentFilter;
import android.nfc.NdefMessage;
import android.nfc.NdefRecord;
import android.nfc.NfcAdapter;
import android.nfc.Tag;
import android.nfc.tech.Ndef;
import android.nfc.tech.NdefFormatable;
import java.io.ByteArrayOutputStream;
import java.util.Locale;
public class NFCManager {
private Activity activity;
private NfcAdapter nfcAdpt;
public NFCManager(Activity activity) {
this.activity = activity;
}
public void verifyNFC() throws NFCNotSupported, NFCNotEnabled {
nfcAdpt = NfcAdapter.getDefaultAdapter(activity);
if (nfcAdpt == null)
throw new NFCNotSupported();
if (!nfcAdpt.isEnabled())
throw new NFCNotEnabled();
}
public void enableDispatch() {
Intent nfcIntent = new Intent(activity, getClass());
nfcIntent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
PendingIntent pendingIntent = PendingIntent.getActivity(activity, 0, nfcIntent, 0);
IntentFilter[] intentFiltersArray = new IntentFilter[] {};
String[][] techList = new String[][] { { android.nfc.tech.Ndef.class.getName() }, { android.nfc.tech.NdefFormatable.class.getName() } };
nfcAdpt.enableForegroundDispatch(activity, pendingIntent, intentFiltersArray, techList);
}
public void disableDispatch() {
nfcAdpt.disableForegroundDispatch(activity);
}
public static class NFCNotSupported extends Exception {
public NFCNotSupported() {
super();
}
}
public static class NFCNotEnabled extends Exception {
public NFCNotEnabled() {
super();
}
}
public void writeTag(Tag tag, NdefMessage message) {
if (tag != null) {
try {
Ndef ndefTag = Ndef.get(tag);
if (ndefTag == null) {
// Let's try to format the Tag in NDEF
NdefFormatable nForm = NdefFormatable.get(tag);
if (nForm != null) {
nForm.connect();
nForm.format(message);
nForm.close();
}
}
else {
ndefTag.connect();
ndefTag.writeNdefMessage(message);
ndefTag.close();
}
}
catch(Exception e) {
e.printStackTrace();
}
}
}
public NdefMessage createUriMessage(String content, String type) {
NdefRecord record = NdefRecord.createUri(type + content);
NdefMessage msg = new NdefMessage(new NdefRecord[]{record});
return msg;
}
public NdefMessage createTextMessage(String content) {
try {
// Get UTF-8 byte
byte[] lang = Locale.getDefault().getLanguage().getBytes("UTF-8");
byte[] text = content.getBytes("UTF-8"); // Content in UTF-8
int langSize = lang.length;
int textLength = text.length;
ByteArrayOutputStream payload = new ByteArrayOutputStream(1 + langSize + textLength);
payload.write((byte) (langSize & 0x1F));
payload.write(lang, 0, langSize);
payload.write(text, 0, textLength);
NdefRecord record = new NdefRecord(NdefRecord.TNF_WELL_KNOWN, NdefRecord.RTD_TEXT, new byte[0], payload.toByteArray());
return new NdefMessage(new NdefRecord[]{record});
}
catch (Exception e) {
e.printStackTrace();
}
return null;
}
public NdefMessage createExternalMessage(String content) {
NdefRecord externalRecord = NdefRecord.createExternal("com.survivingwithandroid", "data", content.getBytes());
NdefMessage ndefMessage = new NdefMessage(new NdefRecord[] { externalRecord });
return ndefMessage;
}
}
Methods from my MainActivity:
#Override
protected void onResume() {
super.onResume();
try {
nfcMger.verifyNFC();
//nfcMger.enableDispatch();
Intent nfcIntent = new Intent(this, getClass());
nfcIntent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
PendingIntent pendingIntent = PendingIntent.getActivity(this, 0, nfcIntent, 0);
IntentFilter[] intentFiltersArray = new IntentFilter[] {};
String[][] techList = new String[][] { { android.nfc.tech.Ndef.class.getName() }, { android.nfc.tech.NdefFormatable.class.getName() } };
NfcAdapter nfcAdpt = NfcAdapter.getDefaultAdapter(this);
nfcAdpt.enableForegroundDispatch(this, pendingIntent, intentFiltersArray, techList);
}
catch(NFCManager.NFCNotSupported nfcnsup) {
Snackbar.make(v, "NFC not supported", Snackbar.LENGTH_LONG).show();
}
catch(NFCManager.NFCNotEnabled nfcnEn) {
Snackbar.make(v, "NFC Not enabled", Snackbar.LENGTH_LONG).show();
}
}
#Override
protected void onPause() {
super.onPause();
nfcMger.disableDispatch();
}
#Override
public void onNewIntent(Intent intent) {
super.onNewIntent(intent);
Log.d("Nfc", "New intent");
// It is the time to write the tag
currentTag = intent.getParcelableExtra(NfcAdapter.EXTRA_TAG);
if (message != null) {
nfcMger.writeTag(currentTag, message);
dialog.dismiss();
Snackbar.make(v, "Tag written", Snackbar.LENGTH_LONG).show();
} else {
// Handle intent
}
}
There is another and much better way to get notified that a Tag comes in to range especially if you are writing to a Tag, this is called enableReaderMode but you use case is strange.
I'm not sure why you would want an NFC tag stuck on the back of a phone because then it would act like very slow and small sized permanent storage when a file on the phones memory would be far better.
Remember once the Tag comes in to range you get notified via enableForegroundDispatch or enableReaderMode that a Tag has come in to range and get given a Tag object. As long as that Tag does not go out of range and you have stored the Tag object in the global scope of the activity then you can write to it as many times you like and for as long as you like.
Therefore it should be possible if complicated to do what you wand and write (or read) when even the user click a button.
I've not tested whether a Tag object is usable after your App is put in to the background and brought to the foreground again, but I think it is unlikely because a background App might be closed and closure would definitely invalid the Tag object.
But there are 2 problems with your code.
Really calling connect and write to your Tag should never be done on the UI thread as it is IO blocking and could be cancelled which would cause the Tag to be taken out of range and brought back in to range again. Luckily if you use enableReaderMode then you get notified in a separate thread.
You should only call close on the Tag when you no longer want to write to it, at the moment you are calling close after you have written once to it.
So the following will probably work for you but with the limitation that the Tag has to come in to range the first time after the App has started.
Use enableReaderMode to get notified that the Tag initially comes in to range, Store the Tag object in the global Activity scope and connect to it once in the enableReaderMode callback thread.
The from the UI when the button is press, start a new Thread to write to the tag.
Never call close on the Tag Object.
Note I've not tested this as it is a very strange use case.

MediaRecorder stop failed Android

I use this code to prepare my MediaRecorder for recording video. After this I call the start() method, which doesn't crash, however when I call the stop() method a crash occurs and the RuntimeException stop failed is raised. I also notice that the video file which is saved in the device is broken and is only 32B. I'm assuming I have an error somewhere when setting up the device in the below method. Notice that I am trying to record from the surfaceView live preview which is displayed on screen ( like snapchat) not from the native camera app.
private void initRecorder(Surface surface) throws IOException {
// It is very important to unlock the camera before doing setCamera
// or it will results in a black preview
if(mCamera == null) {
mCamera = Camera.open();
mCamera.unlock();
}
if(mMediaRecorder == null) mMediaRecorder = new MediaRecorder();
mMediaRecorder.setPreviewDisplay(surface);
mMediaRecorder.setCamera(mCamera);
mMediaRecorder.setOnErrorListener(new MediaRecorder.OnErrorListener() {
#Override
public void onError(MediaRecorder mr, int what, int extra) {
Toast.makeText(getApplicationContext(),
Integer.toString(what) + "_____" + Integer.toString(extra), Toast.LENGTH_LONG)
.show(); }
});
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
// mMediaRecorder.setOutputFormat(8);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setVideoEncodingBitRate(512 * 1000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(640, 480);
mMediaRecorder.setOutputFile(getVideoFile());
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
// This is thrown if the previous calls are not called with the
// proper order
e.printStackTrace();
}
mInitSuccesful = true;
}

LBP Cascade (OpenCV) won't load on android studio emulator

I've been trying to get some object tracking under my belt, and I managed to get a nice LBP cascade tracker running in C++, using OpenCV 3.1.
I wanted to try and get this robot tracker running on a phone, so I'm trying to transition it over to AndroidStudio. Unfortunately, everything except the actual cascade loading is working. I can get the camera to pull up in the app, I can have it show off the greyscale image instead of an rgb image, etc. It's just that cascade won't load, so the whole thing won't work.
Specs: Android Studio 1.5.1 emulating a API 19 phone (using the x86 google apis), using Opencv 3.1.0.
The CameraActivity code in question is here -
package <package name retracted for reasons>;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import android.app.Activity;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.Toast;
import java.util.Vector;
import <package name retracted here for reasons>.R;
public class CameraActivity extends Activity implements CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsJavaCamera = true;
private MenuItem mItemSwitchCamera = null;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public CameraActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.camera_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
CascadeClassifier robot_cascade;
public void onCameraViewStarted(int width, int height) {
Log.d(TAG, "Trying to get robot cascade");
robot_cascade = new CascadeClassifier(Environment.getExternalStorageDirectory().getAbsolutePath() + "/cascade.xml");
String robot_cascade_name = Environment.getExternalStorageDirectory().getAbsolutePath() + "/cascade.xml";
Log.d(TAG, "location is "+robot_cascade_name);
if(robot_cascade.empty()){
Log.d(TAG, "--(!)Error loading robot cascade");
}
Log.d(TAG, "Made it through loading cascade!");
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat frameGrey = new Mat();
Mat endFrame = new Mat();
endFrame = inputFrame.rgba();
MatOfRect robots = new MatOfRect();
Imgproc.cvtColor(inputFrame.rgba(), frameGrey, Imgproc.COLOR_BGRA2GRAY);
Imgproc.equalizeHist(frameGrey, frameGrey);
/*robot_cascade.detectMultiScale(frameGrey, robots, 1.2, 120, 0, new Size(200, 200), new Size(300, 300));
Log.d(TAG, "Found %x robots" + robots.toArray().length);
for (Rect rect : robots.toArray()) {
Imgproc.rectangle(endFrame, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
Log.d(TAG, "Robot at point ( %x , %x )"+(rect.x+rect.width/2)+(rect.y+rect.height/2));
} */
return endFrame;
//return inputFrame.rgba();
}
}
For right now I have the cascade.detectMultiScale commented out while it's not working. Running it yields:
01-26 21:07:42.085 2296-2296/? D/OCVSample::Activity: Trying to get robot cascade
01-26 21:07:42.085 2296-2296/? D/OCVSample::Activity: location is /storage/sdcard/cascade.xml
01-26 21:07:42.085 2296-2296/? D/OCVSample::Activity: --(!)Error loading robot cascade
01-26 21:07:42.085 2296-2296/? D/OCVSample::Activity: Made it through loading cascade!
And then it goes down to the onCameraFrame method happily, and currently just outputs what it's getting (I've been able to play with it to get grayscale out and so on)
The problem appears to be in the onCameraViewStared class -
public void onCameraViewStarted(int width, int height) {
Log.d(TAG, "Trying to get robot cascade");
robot_cascade = new CascadeClassifier(Environment.getExternalStorageDirectory().getAbsolutePath() + "/cascade.xml");
String robot_cascade_name = Environment.getExternalStorageDirectory().getAbsolutePath() + "/cascade.xml";
Log.d(TAG, "location is "+robot_cascade_name);
if(robot_cascade.empty()){
Log.d(TAG, "--(!)Error loading robot cascade");
}
Log.d(TAG, "Made it through loading cascade!");
}
Robot cascade always comes up empty.
I am sure that the cascade.xml is actually on the emulated phone - if I check through adb, it says it's chilling right there, and the android device monitor also shows that it's there.
The only thing I can think of is that ADM says that the permissions are -rwxrwx---, but I've got the WRITE_EXTERNAL_STORAGE and READ_EXTERNAL_STORAGE permissions in the manifest file, so I would think that'd be fine. (I could be completely wrong though, please correct me if I am).
Just in case it was important, I did try to chmod the cascade.xml to be read/writable by any user (not just owner and group), but it kept giving me "Bad Mode" no matter what I tried. The sd is mounted as read/writeable (because I was able to push the file onto the sd card in the first place), and I was in su, so I have no idea why it won't let me do that.
Edit: chmod is having really strange behavior - it basically won't do anything and fail quietly, or it'll say that it's a read-only file system... even though I can make files and directories and delete them no problem.
Edit x2: Moved file to /data/local, and it still doesn't work, but chmod worked on it so now I have all permissions. Still trying to figure out why cascade won't load it though.
So yeah, can't load a cascade that is definitely on the emulated sd card.
So, got it to work on the emulator (finally). Apparently the SD card on the emulator just doesn't allow chmod to work at all, so I moved the file to data/local. Then I changed everything to read from ("./data/local/cascade.xml"), and added robot_cascade.load(".data/local/cascade.xml); after the creation of the cascade classifier. Then I uncommented the stuff that was actually using the cascade, and it works beautifully.
So, the new onCameraViewStarted method -
public void onCameraViewStarted(int width, int height) {
Log.d(TAG, "Prog: Trying to get robot cascade");
File file = new File("./data/local/cascade.xml");
boolean fileExists = file.exists();
String fileDoesExist = String.valueOf(fileExists);
Log.d(TAG, "Prog: Does the cascade file exist? "+fileDoesExist);
robot_cascade = new CascadeClassifier("./data/local/cascade.xml");
robot_cascade.load("./data/local/cascade.xml");
String robot_cascade_name = "./data/local/cascade.xml";
Log.d(TAG, "Prog: location is "+robot_cascade_name);
if(robot_cascade.empty()){
Log.d(TAG, "Prog: --(!)Error loading robot cascade");
} else {
Log.d(TAG, "Prog: --Holy smite the cascade is actually there praise the sun");
}
Log.d(TAG, "Prog: Made it through loading cascade!");
}
Of course, now I'm trying to get it to work on the phone instead of emulated and that's not working, but that's a matter for a different question.

sound not playing in android > icecream sandwich

I was using following code to play sound. Everything worked fine before ICS. But on ICS and higher versions no sound is heard. There is no error, but no sound can be heard.
EDIT: Note, the following code is triggered by a broadcase receiver. BroadCast receiver invokes a async task. In the post process method of asycn task the following method is called.
What could the error possibly be?
public static void playSound(final Context context, final int volume,
Uri uri, final int stream, int maxTime, int tickTime) {
//stopPlaying();
/*
if (stream < 0 || stream > 100) {
throw new IllegalArgumentException(
"volume must be between 0 and 100 .Current volume "
+ volume);
}*/
final AudioManager mAudioManager = (AudioManager) context
.getSystemService(Context.AUDIO_SERVICE);
int deviceLocalVolume = getDeviceVolume(volume,
mAudioManager.getStreamMaxVolume(stream));
Log.d(TAG,
"device max volume = "
+ mAudioManager.getStreamMaxVolume(stream)
+ " for streamType " + stream);
Log.d(TAG, "playing sound " + uri.toString()
+ " with device local volume " + deviceLocalVolume);
final int oldVolume = mAudioManager.getStreamVolume(stream);
// set the volume to what we want it to be. In this case it's max volume
// for the alarm stream.
Log.d(Constants.APP_TAG, "setting device local volume to " + deviceLocalVolume);
mAudioManager.setStreamVolume(stream, deviceLocalVolume,
AudioManager.FLAG_REMOVE_SOUND_AND_VIBRATE);
final MediaPlayer mediaPlayer = new MediaPlayer();
golbalMMediaPlayer = mediaPlayer;
try {
final OnPreparedListener OnPreparedListener = new OnPreparedListener() {
#Override
public void onPrepared(final MediaPlayer mp) {
Log.d(TAG, "onMediaPlayercompletion listener");
mp.start();
countDownTimer.start();
}
};
mediaPlayer.setDataSource(context.getApplicationContext(), uri);
mediaPlayer.setAudioStreamType(stream);
mediaPlayer.setLooping(false);
mediaPlayer.setOnPreparedListener(OnPreparedListener);
mediaPlayer.setOnCompletionListener(new OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
Log.d(Constants.APP_TAG, "Entered onCompletion listener of mediaplayer");
mAudioManager.setStreamVolume(stream, oldVolume,
AudioManager.FLAG_REMOVE_SOUND_AND_VIBRATE);
try{
if(mediaPlayer != null && mediaPlayer.isPlaying()){
mediaPlayer.release();
}
}catch(Exception ex){
Log.e(Constants.APP_TAG, "error on oncompletion listener" ,ex);
}
}
});
CountDownTimer timer = new CountDownTimer(maxTime*1000, tickTime*1000) {
#Override
public void onTick(long millisUntilFinished) {
Log.d(TAG, "tick while playing sound ");
}
#Override
public void onFinish() {
Log.d(TAG, "timer finished");
stopPlaying();
}
};
countDownTimer = timer;
mediaPlayer.prepareAsync();
} catch (Exception e) {
Log.e(TAG, "problem while playing sound", e);
} finally {
}
}
LOGS:
:07-01 00:00:00.030: D/beephourly(9500): device max volume = 7 for streamType 5
07-01 00:00:00.030: D/beephourly(9500): playing sound content://media/internal/audio/media/166 with device local volume 7
07-01 00:00:00.030: D/beephourly(9500): setting device local volume to 7
07-01 00:00:00.080: D/beephourly(9500): vibrating with pattern = [J#428bae20
07-01 00:00:00.090: D/beephourly(9500): will show normal notification
07-01 00:00:00.100: D/beephourly(9500): notification is enabled
07-01 00:00:00.100: D/usersettings(9500): hr = 0
07-01 00:00:00.110: D/beephourly(9500): onMediaPlayercompletion listener
07-01 00:00:00.451: D/beephourly(9500): tick while playing sound
07-01 00:00:20.460: D/beephourly(9500): timer finished
07-01 00:00:20.460: D/beephourly(9500): got request to stop playing
07-01 00:00:20.460: D/beephourly(9500): cancelling countdowntimer
07-01 00:00:20.460: D/beephourly(9500): releasing mediaplayer now
Try this :
Playing sound
public class PlaySound extends Activity implements OnTouchListener {
private SoundPool soundPool;
private int soundID;
boolean loaded = false;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
View view = findViewById(R.id.textView1);
view.setOnTouchListener(this);
// Set the hardware buttons to control the music
this.setVolumeControlStream(AudioManager.STREAM_MUSIC);
// Load the sound
soundPool = new SoundPool(10, AudioManager.STREAM_MUSIC, 0);
soundPool.setOnLoadCompleteListener(new OnLoadCompleteListener() {
#Override
public void onLoadComplete(SoundPool soundPool, int sampleId,
int status) {
loaded = true;
}
});
soundID = soundPool.load(this, R.raw.sound1, 1);
}
#Override
public boolean onTouch(View v, MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_DOWN) {
// Getting the user sound settings
AudioManager audioManager = (AudioManager) getSystemService(AUDIO_SERVICE);
float actualVolume = (float) audioManager
.getStreamVolume(AudioManager.STREAM_MUSIC);
float maxVolume = (float) audioManager
.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
float volume = actualVolume / maxVolume;
// Is the sound loaded already?
if (loaded) {
soundPool.play(soundID, volume, volume, 1, 0, 1f);
Log.e("Test", "Played sound");
}
}
return false;
}
}
Layout file :
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical" >
<TextView
android:id="#+id/textView1"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:text="Click on the screen to start playing" >
</TextView>
</LinearLayout>
Source link : http://www.vogella.com/tutorials/AndroidMedia/article.html#sound
Sometimes MediaPlayer objects have to be declared as a public variable or they will be deleted by the Dalvik Heap.
public final MediaPlayer mediaPlayer = new MediaPlayer();
private MediaPlayer mPlayer;
....
SoundPool sp = new SoundPool(5, AudioManager.STREAM_MUSIC, 0);
int iTmp = sp.load(getBaseContext(), R.raw.windows_8_notify, 1);
sp.play(iTmp, 1, 1, 0, 0, 1);
mPlayer = MediaPlayer.create(getBaseContext(), R.raw.windows_8_notify);
mPlayer.start();
mPlayer.setLooping(true); }
First , where all your privates are , before the onCreate, put the first line, then, Inside the onCreate start the music, just make sure to change the "windows_8_notify" to the name of the song you want.
I would Wrap the call in an IllegalStateException, run it through the debugger and see what you get.
Things to try
Set the boolean isPlaying=mp.isPlaying(); and check its value.
Try a mp.reset() before starting and see if it works.
Implement MediaPlayer.OnErrorListener and register the method with the media player.
See what error you get. This might be helpful.
LOGS
...streamType 5
StreamType 5 means STREAM_NOTIFICATION.
(Called from notification?)
It should be STREAM_MUSIC (3)
To check it's not ICS/device specific problem,
- place a sound file (sound_01.ogg or sound_01.mp3) under res/raw/ folder
- place buttons named start_button and stop_button in main_layout
and try this.
(I've checked this code with API10 and API19 emulator and sounds are played.)
import android.app.Activity;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
public class MainActivity extends Activity
// implements MediaPlayer.OnPreparedListener
{
private MediaPlayer mediaPlayer;
private boolean isPrepared;
private boolean isPlaying;
private View start_button;
private View stop_button;
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main_layout);
Init();
}
#Override
protected void onResume()
{
super.onResume();
Load();
}
#Override
protected void onPause()
{
super.onPause();
Unload();
}
private void Init()
{
setVolumeControlStream(AudioManager.STREAM_MUSIC);
start_button = findViewById(R.id.start_button);
stop_button = findViewById(R.id.stop_button);
start_button.setOnClickListener(new OnClickListener()
{
#Override
public void onClick(View v)
{
Play();
}
});
stop_button.setOnClickListener(new OnClickListener()
{
#Override
public void onClick(View v)
{
Stop();
}
});
}
private void Load()
{
Unload();
// load from resource (res/raw/xx.ogg or .mp3)
// It's better to use thread
mediaPlayer = MediaPlayer.create(MainActivity.this, R.raw.sound_01); // On success, prepare() will already have been called
// mediaPlayer.setOnPreparedListener(this); // cannot set this listener (MediaPlayer.create does not return before prepared)
isPrepared = true;
}
private void Unload()
{
isPrepared = false;
if (null != mediaPlayer)
{
mediaPlayer.release();
mediaPlayer = null;
}
}
// #Override
// public void onPrepared(MediaPlayer mp)
// {
// isPrepared = true;
// }
private void Play()
{
// If you got "start called in state xx" error, no sound will be heard.
// To reset this error, call reset(), setDataSource() and prepare()
// (for resources: call release() and create())
if (!isPrepared)
{
return;
}
mediaPlayer.start();
isPlaying = true;
}
private void Stop()
{
// Do not omit this check
// or you will get "called in wrong state" errors
// like "pause called in state 8"
// and error (-38, 0)
if (!isPlaying)
{
return;
}
isPlaying = false;
mediaPlayer.pause();
mediaPlayer.seekTo(0);
}
}
If it's ICS/device specific, these links may help. (A little old...)
after small sound is played, no sound will be heard
Issue 35861: Low Volume sound cut out - ICS Galaxy Note
audio focus bug
Issue 1908: No Audio with Android 4.0.4 ICS Galaxy Tab 10.1
device specific problem
No sound during calls (samsung galaxy s3 problem)
You might have a problem if you are using other AsyncTasks or the SerialExecutor in another task elsewhere in your program (and you may not even know it if you are using third party SDK's).
See the post here:
https://code.google.com/p/android/issues/detail?id=20941
I'm suggesting this because your sound "tick" isn't working either. So it isn't a matter of AudioPlayer executing with an incorrect setting necessarily, but rather some other task appears to be blocking it until that task stops, and it probably is a task that runs concurrently with when you expect to hear sound.

Java.lang.Runtime exception: Take Picture failed?

I am taking pictures in the background inside my Android App. However it gives an error:
02-09 15:22:12.061: E/cheeta(28633): timer testing
02-09 15:22:13.546: W/System.err(28633): java.lang.RuntimeException: takePicture failed
02-09 15:22:13.546: W/System.err(28633): at android.hardware.Camera.native_takePicture(Native Method)
02-09 15:22:13.546: W/System.err(28633): at android.hardware.Camera.takePicture(Camera.java:1194)
02-09 15:22:13.551: W/System.err(28633): at cam.sharp.MainActivity$MyTimerTask.run(MainActivity.java:69)
02-09 15:22:13.551: W/System.err(28633): at java.util.Timer$TimerImpl.run(Timer.java:284)
02-09 15:22:13.551: E/cheeta(28633): timer testing
02-09 15:22:15.051: W/System.err(28633): java.lang.RuntimeException: takePicture failed
02-09 15:22:15.051: W/System.err(28633): at android.hardware.Camera.native_takePicture(Native Method)
02-09 15:22:15.051: W/System.err(28633): at android.hardware.Camera.takePicture(Camera.java:1194)
02-09 15:22:15.051: W/System.err(28633): at cam.sharp.MainActivity$MyTimerTask.run(MainActivity.java:69)
02-09 15:22:15.051: W/System.err(28633): at java.util.Timer$TimerImpl.run(Timer.java:284)
02-09 15:22:15.051: E/cheeta(28633): timer testing
02-09 15:22:16.551: W/System.err(28633): java.lang.RuntimeException: takePicture failed
02-09 15:22:16.556: W/System.err(28633): at android.hardware.Camera.native_takePicture(Native Method)
02-09 15:22:16.556: W/System.err(28633): at android.hardware.Camera.takePicture(Camera.java:1194)
02-09 15:22:16.561: W/System.err(28633): at cam.sharp.MainActivity$MyTimerTask.run(MainActivity.java:69)
02-09 15:22:16.561: W/System.err(28633): at java.util.Timer$TimerImpl.run(Timer.java:284)
02-09 15:22:16.561: E/cheeta(28633): timer testing
I have two files.
MainActivity.java and CameraPreview.java
Here is the code for both.
MainActivity.java
package cam.sharp;
import java.io.File;
import java.io.FileOutputStream;
import java.util.Timer;
import java.util.TimerTask;
import android.app.Activity;
import android.content.Context;
import android.hardware.Camera;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.widget.FrameLayout;
import android.widget.Toast;
public class MainActivity extends Activity {
private int cameraId = 0;
private Camera mCamera;
private CameraPreview mPreview;
String fileName = "tempImage.jpeg";
File file;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Create an instance of Camera
mCamera = getCameraInstance(cameraId);
if (mCamera == null) {
Toast.makeText(
getApplicationContext(),
"The camera service is currently unavailable, please try again!",
Toast.LENGTH_LONG).show();
finish();
} else {
// Create our Preview view and set it as the content of our
// activity.
mPreview = new CameraPreview(this, mCamera);
FrameLayout frameLayout = (FrameLayout) findViewById(R.id.camera_preview);
frameLayout.addView(mPreview);
}
// start thread for these
MyTimerTask myTask = new MyTimerTask();
Timer myTimer = new Timer();
// public void schedule (TimerTask task, long delay, long period)
// Schedule a task for repeated fixed-delay execution after a specific
// delay.
//
// Parameters
// task the task to schedule.
// delay amount of time in milliseconds before first execution.
// period amount of time in milliseconds between subsequent executions.
myTimer.schedule(myTask, 3000, 1500);
}
class MyTimerTask extends TimerTask {
public void run() {
try {
mCamera.takePicture(null, null, null, mPictureCallback);
file = new File(getFilesDir(), fileName);
} catch (Exception e) {
e.printStackTrace();
}
Log.e("cheeta", "timer testing");
}
}
Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] imageData, Camera c) {
Log.e("Callback TAG", "Here in jpeg Callback");
if (imageData != null) {
FileOutputStream outputStream;
try {
outputStream = openFileOutput(fileName,
Context.MODE_PRIVATE);
outputStream.write(imageData);
outputStream.close();
// Intent intent = new Intent(SnapScreen.this,
// PreviewScreen.class);
// if (fromMessageReview == true) {
// intent.putExtra("fromMessageReview", "true");
// }
// startActivity(intent);
// overridePendingTransition(R.anim.slide_in,
// R.anim.slide_out);
finish();
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
#Override
protected void onDestroy() {
super.onDestroy();
releaseCamera();
}
/** A safe way to get an instance of the Camera object. */
public static Camera getCameraInstance(int cameraId) {
Camera c = null;
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
c = Camera.open(cameraId);
} else {
c = Camera.open();
}
} catch (Exception e) {
c = null;
}
return c; // returns null if camera is unavailable
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
}
CameraPreview.java
package cam.sharp;
import java.io.IOException;
import android.annotation.SuppressLint;
import android.content.Context;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
/** A basic Camera preview class */
#SuppressLint("ViewConstructor")
public class CameraPreview extends SurfaceView implements
SurfaceHolder.Callback {
private static final String TAG = "Camera Preview";
private SurfaceHolder mHolder;
public Camera mCamera;
#SuppressWarnings("deprecation")
#SuppressLint("NewApi")
public CameraPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
mCamera.setDisplayOrientation(90);
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the
// preview.
try {
mCamera.setPreviewDisplay(holder);
mCamera.setDisplayOrientation(90);
mCamera.startPreview();
} catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
}
Can anyone see what is the issue? I am calling mCamera.startPreview(); but still no use.
Thanks
You have 2 problems in your code:
First: In your onPictureTaken call back you are calling the finish() method, wich in turn signals that the activity should be destroyed, and calls the onDestroy() method, wich in turn releases your camera. However your MainActivity.java is not destroyed (not really sure why, but through logCat, I found that the onCreate() is only being called once, so I assumed the activity is not destroyed. A possible explanation for this might be that the Timer is controlled by a diferent thread and as such might not be aware that the MainActivity was destroyed, but I can't confirm), and so your myTimer will continue to run, and when it gets to mCamera.takePicture(null, null, null, mPictureCallback); it will throw a NullPointException because the camera was already released, and the MainActivity.onCreate() wasn't called again to get a new instance to mCamera.
So, to solve the first problem:
Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] imageData, Camera c) {
Log.e("Callback TAG", "Here in jpeg Callback");
if (imageData != null) {
FileOutputStream outputStream = null;
try {
outputStream = openFileOutput(fileName, Context.MODE_PRIVATE);
outputStream.write(imageData);
// Removed the finish call you had here
} catch (Exception e) {
e.printStackTrace();
} finally {
if (outputStream != null) try {
outputStream.close();
} catch (IOException ex) {
// TODO Auto-generated catch block
ex.printStackTrace();
}
}
}
}
};
Second: Is where you call your startPreview() method. Accooding to the documentation of takePicture():
This method is only valid when preview is active (after
startPreview()). Preview will be stopped after the image is taken;
callers must call startPreview() again if they want to re-start
preview or take more pictures. This should not be called between
start() and stop().
You were only calling startPreview() once, when you create the camera, and because of problem 1, the onCreate() on MainActivity is only being called once. Since you have a timer taking pictures every 1.5 seconds, you should call startPreview() before calling takePicture(), so to solve this:
class MyTimerTask extends TimerTask {
public void run() {
try {
// Call startPreview before taking a picture
mCamera.startPreview();
mCamera.takePicture(null, null, null, mPictureCallback);
file = new File(getFilesDir(), fileName);
} catch (NullPointerException ne) {
ne.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
Log.e("cheeta", "timer testing");
}
}
After this the app continuasly takes pictures, and stores them. I never used a Timer like that so Im not sure how to make it stop. If you only want a small number of pictures taken within each call to the CameraPreview Activity I suggest you use a Timer with an action listener, like so:
Timer tm = new Timer(1000, new ActionListener() {
#Override
public void actionPerformed(ActionEvent e) {
// Take x pictures
tm.stop();
}
});

Categories