I have applied the following post answers but unable to solve the problem.
OpenCV camera orientation issue
I am getting the following exception.
Exception locking surface
java.lang.IllegalArgumentException
at android.view.Surface.nativeLockCanvas(Native Method)
at android.view.Surface.lockCanvas(Surface.java:264)
at android.view.SurfaceView$4.internalLockCanvas(SurfaceView.java:825)
at android.view.SurfaceView$4.lockCanvas(SurfaceView.java:793)
at org.opencv.android.CameraBridgeViewBase.deliverAndDrawFrame(CameraBridgeViewBase.java:403)
at org.opencv.android.JavaCameraView$CameraWorker.run(JavaCameraView.java:365)
at java.lang.Thread.run(Thread.java:818)
Here is opencv library function which throw the exception:
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
Canvas canvas = null ;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
canvas = getHolder().lockCanvas();
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
getHolder().unlockCanvasAndPost(canvas);
}
}
}
Here is my main activity where i use opencv library to call this method:
package org.opencv.samples.facedetect;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Camera;
import android.media.AudioManager;
import android.net.ConnectivityManager;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.provider.Settings;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import org.opencv.samples.facedetect.DetectionBasedTracker;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class FdActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 , SharedPreferences.OnSharedPreferenceChangeListener {
private static final String TAG = "OCVSample::Activity";
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
//private MenuItem mItemExit;
private MenuItem mItemSettings ;
private MenuItem showCounterValues ;
private TextView faceCounterTv ;
private Button resetButton ;
private Button savebtn ;
private Button quitButton ;
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mJavaDetector;
private DetectionBasedTracker mNativeDetector;
private int mDetectorType = JAVA_DETECTOR;
// private String[] mDetectorName;
private float mRelativeFaceSize = 0.2f;
private int mAbsoluteFaceSize = 0;
private float scaleFactor ;
private int minNeighbour ;
private int delayTime ;
private boolean isFaces_detect ;
private boolean isFaces_detect_pre ;
private boolean count_Face_Logic ;
private float countFace = 0.0f ;
private long startTime ;
private AudioManager mAudioManager ;
private static final int MY_PERMISSIONS_REQUEST_ACCOUNTS = 1;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("detectionBasedTracker");
try {
// load cascade file from application resources
InputStream is = getResources().openRawResource(R.raw.haarcascade_frontalface_default);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "haarcascade_frontalface_default.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mJavaDetector.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
mOpenCvCameraView.enableView();
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
public FdActivity() {
isFaces_detect = false ;
isFaces_detect_pre = false ;
count_Face_Logic = true ;
startTime = System.currentTimeMillis();
mAbsoluteFaceSize = 200 ;
scaleFactor = 1.2f ;
minNeighbour = 1 ;
delayTime = 1 ;
Log.i(TAG, "Instantiated new " + this.getClass());
}
/**
* Called when the activity is first created.
*/
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
//getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.face_detect_surface_view);
setSupportActionBar((Toolbar) findViewById(R.id.toolbar));
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view);
faceCounterTv = (TextView) findViewById(R.id.faceCountertv);
resetButton = (Button) findViewById(R.id.resetbtn);
savebtn = (Button) findViewById(R.id.savebtn);
quitButton = (Button) findViewById(R.id.quitbtn);
mAudioManager = (AudioManager) getSystemService(AUDIO_SERVICE);
resetButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
countFace = 0 ;
Toast.makeText(getApplicationContext() , "Reset the Face Counter" , Toast.LENGTH_LONG).show();
}
});
savebtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
saveFaceCounter();
Toast.makeText(getApplicationContext() , "Counter Value Saved" , Toast.LENGTH_SHORT).show();
}
});
quitButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
System.exit(0);
}
});
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
// mOpenCvCameraView.setAlpha(0);
mOpenCvCameraView.setCameraIndex(1);
mOpenCvCameraView.setCvCameraViewListener(this);
//if (checkAndRequestPermissions()){
// Toast.makeText(getApplicationContext() , "OnCreate" , Toast.LENGTH_LONG).show();
//setSharedPreferences();
//}
// check current state first
// boolean state = isAirplaneMode();
// // toggle the state
// if (state)
// toggleAirplaneMode(0, state);
// else
// toggleAirplaneMode(1, state);
}
#Override
public void onPause() {
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume() {
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
PreferenceManager.getDefaultSharedPreferences(this).unregisterOnSharedPreferenceChangeListener(this);
}
public void onCameraViewStarted(int width, int height) {
mGray = new Mat();
mRgba = new Mat();
}
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
// Mat mRgbaT = mRgba.t();
// Core.flip(mRgba.t(), mRgbaT, 1);
// Imgproc.resize(mRgbaT, mRgbaT, mRgba.size());
mGray = inputFrame.gray();
//Core.transpose(mGray, mGray);
//Core.flip(mGray, mGray, 0);
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, scaleFactor, minNeighbour, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
} else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
} else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++) {
Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
}
countDetectedFace(faces);
runOnUiThread(new Runnable() {
#Override
public void run() {
faceCounterTv.setText(String.valueOf(countFace));
}
});
return mRgba;
}
public void countDetectedFace(MatOfRect faces){
// do{
// This block is to make sure the it only count face when it appears. e.g. : no detected face --> face --> no detected face (count as 1)
if (faces.empty()){
isFaces_detect = isFaces_detect_pre = false ;
}
else{
isFaces_detect = true ;
}
// Only count when previous frame = 0 and current frame = 1. Eliminate counting when successive frame have face detected
if ((isFaces_detect_pre == false) && (isFaces_detect == true) && (count_Face_Logic == true)){
countFace += 0.25 ; // four times it detect face equal to 1
startTime = System.currentTimeMillis(); // store new time value so that it do not count every miliseconds
isFaces_detect_pre = true ;
Log.d(TAG , String.valueOf(countFace));
}
if ((System.currentTimeMillis() - startTime) < delayTime){ // to make sure it doesnt count every frame, buffer of 1 seconds
count_Face_Logic = false ;
}
else{
count_Face_Logic = true ;
}
// }while(!isAppExit);
}
}
How can I get rid of this exception?
Related
I have a custom camera application , everything works fine .But whenever the app gets paused (when the onPause or onDestroyed is called) camera is released and afterwards when onResume is called and capture button is clicked is to take an image ,my Application crashes.How do i fix this ? Please help me , Thanks in advance
CameraActivity Code
package com.example.skmishra.plates.Activities;
import android.app.ActionBar;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.hardware.Camera;
import android.hardware.SensorManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.MotionEvent;
import android.view.OrientationEventListener;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ZoomControls;
import com.example.skmishra.plates.Asyncs.CameraAsync;
import com.example.skmishra.plates.CameraHandler;
import com.example.skmishra.plates.Library.Fonts;
import com.example.skmishra.plates.R;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* Created by skmishra on 12/28/2015.
*/
public class camera extends Activity {
private static final int RESULT_LOAD_IMAGE = 200 ;
private Camera mCamera=null;
private CameraHandler surface_view;
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
public static final String TAG = "Aloo";
int toRotate = 90;
public int currentCameraID = 0;
OrientationEventListener myOrientationEventListener;
private ZoomControls zoomControls;
private double mDist;
Boolean imageSwitchClicked = false;
Boolean mShowFlash = false;
ImageView mSwitch_cam;
ImageView mFlashBut;
FrameLayout preview;
CameraAsync mCamAsync;
ImageView imageGallery;
TextView raleway;
TextView headerCameraText;
Fonts mFonts;
int permCode=4;
Camera.Parameters params;
String recievedType=null;
#Override
protected void onCreate(Bundle savedInstanceState) {
Log.e("I Called Thus "," cda");
mCamAsync=new CameraAsync(this);
mCamAsync.execute();
super.onCreate(savedInstanceState);
setContentView(R.layout.camera);
imageGallery = (ImageView) findViewById(R.id.select_gallery);
mFonts = new Fonts();
preview = (FrameLayout) findViewById(R.id.camera_preview);
mFlashBut = (ImageView) findViewById(R.id.flash);
mSwitch_cam = (ImageView) findViewById(R.id.white_switch);
raleway = (TextView) findViewById(R.id.textView2);
headerCameraText = (TextView) findViewById(R.id.imageHead);
// mFonts.setRalewayBold(this, headerCameraText);
Intent gets = getIntent();
recievedType = gets.getExtras().getString("recievedCameraPurpose");
handleHeaderText(recievedType);
mFonts.setRalewayBold(this, raleway);
myOrientationEventListener
= new OrientationEventListener(this, SensorManager.SENSOR_DELAY_NORMAL) {
#Override
public void onOrientationChanged(int arg0) {
int rotation = arg0;
if (rotation > 340) {
if (currentCameraID == 0) {
toRotate = 90;
} else {
toRotate =270;
Log.e("POSITION_TITLT", "-> Potrait Front camera");
}
} else if (rotation < 80 && rotation > 30) {
toRotate = 180;
Log.e("POSITION_TILT", "-> Landscape Right " + rotation);
} else if (rotation < 280 && rotation > 240) {
toRotate = 0;
Log.e("POSITION_TILT", "-> Landscape Left " + rotation);
}
}
};
if (myOrientationEventListener.canDetectOrientation()) {
myOrientationEventListener.enable();
} else {
Toast.makeText(this, "Can't DetectOrientation", Toast.LENGTH_LONG).show();
finish();
}
}
private boolean checkifCamera(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
return true;
} else {
return false;
}
}
public Camera getCameraInstance() {
Camera c = null;
try {
releaseCameraAndPreview();
c = Camera.open();
} catch (Exception e) {
Toast.makeText(this, "Print error" + e.getMessage(), Toast.LENGTH_LONG).show();
}
return c;
}
public void onCompleteInstanceCameraAysnc(Camera camera)
{
mCamera = camera;
surface_view = new CameraHandler(this, mCamera);
params = mCamera.getParameters();
preview.addView(surface_view);
set_image_gallery();
}
public void switchC(View view) {
if (!imageSwitchClicked) {
mSwitch_cam.setAlpha(1.0f);
imageSwitchClicked = true;
} else {
mSwitch_cam.setAlpha(0.5f);
imageSwitchClicked = false;
}
setCameraID();
mCamera = surface_view.switchCamera();
params=mCamera.getParameters();
}
public void flash_onOf(View view) {
if (!mShowFlash) {
params.setFlashMode(Camera.Parameters.FLASH_MODE_ON);
mFlashBut.setAlpha(1.0f);
mShowFlash = true;
} else {
params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
mFlashBut.setAlpha(0.5f);
mShowFlash = false;
}
}
private void releaseCameraAndPreview() {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
mCamera.release();
mCamera=null;
}
else
{
Log.e("Cert","Lerts");
}
}
#Override
protected void onResume() {
super.onResume();
}
#Override
protected void onDestroy() {
Log.e("LLL", "Dessssdccc");
super.onDestroy();
try {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
myOrientationEventListener.disable();
mCamera.release();
mCamera=null;
permCode=15;
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
protected void onPause() {
Log.e("LLL", "Dessssdccc");
super.onPause();
try {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
myOrientationEventListener.disable();
mCamera.release();
mCamera=null;
permCode=15;
} catch (Exception e) {
e.printStackTrace();
}
}
public void takePH(View view) {
if(mShowFlash && !imageSwitchClicked)
{
params.setFlashMode(Camera.Parameters.FLASH_MODE_ON);
}
params.set("rotation", toRotate);
mCamera.setParameters(params);
mCamera.takePicture(null, null, mPicture);
}
Camera.PictureCallback mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null) {
Log.d(TAG, "Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
Intent i=new Intent(getApplicationContext(),ShowOut.class);
i.putExtra("purpose",recievedType);
i.putExtra("img-url",pictureFile.toString());
startActivity(i);
}
};
/**
* Create a file Uri for saving an image or video
*/
private static Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
/**
* Create a File for saving an image or video
*/
private static File getOutputMediaFile(int type) {
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "Plates");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_" + timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
public void handleHeaderText(String type) {
Log.e("Type",type);
headerCameraText.setText("");
if (type.equals("ADD_COVER_PLATES")) {
headerCameraText.setText("Take a cover image for your plate");
}
else if(type.equals("ADD_PROFILE_USER"))
{
imageGallery.setVisibility(View.GONE);
}
else if(type.equals("PLATE_UPLOAD_SINGLETON")) {
headerCameraText.setText("Click an image for a plate");
}
}
public void setCameraID() {
if (currentCameraID == Camera.CameraInfo.CAMERA_FACING_BACK) {
currentCameraID = Camera.CameraInfo.CAMERA_FACING_FRONT;
toRotate = 270;
} else {
currentCameraID = Camera.CameraInfo.CAMERA_FACING_BACK;
toRotate = 90;
}
}
#Override
public boolean onTouchEvent(MotionEvent event) {
// Get the pointer ID
Camera.Parameters params = mCamera.getParameters();
int action = event.getAction();
if (event.getPointerCount() > 1) {
// handle multi-touch events
if (action == MotionEvent.ACTION_POINTER_DOWN) {
mDist = getFingerSpacing(event);
} else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) {
mCamera.cancelAutoFocus();
handleZoom(event, params);
}
} else {
// handle single touch events
if (action == MotionEvent.ACTION_UP) {
handleFocus(event, params);
}
}
return true;
}
private void handleZoom(MotionEvent event, Camera.Parameters params) {
int maxZoom = params.getMaxZoom();
int zoom = params.getZoom();
double newDist = getFingerSpacing(event);
if (newDist > mDist) {
//zoom in
if (zoom < maxZoom)
zoom++;
} else if (newDist < mDist) {
//zoom out
if (zoom > 0)
zoom--;
}
mDist = newDist;
params.setZoom(zoom);
mCamera.setParameters(params);
}
public void handleFocus(MotionEvent event, Camera.Parameters params) {
int pointerId = event.getPointerId(0);
int pointerIndex = event.findPointerIndex(pointerId);
// Get the pointer's current position
float x = event.getX(pointerIndex);
float y = event.getY(pointerIndex);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
mCamera.autoFocus(new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean b, Camera camera) {
// currently set to auto-focus on single touch
}
});
}
}
/**
* Determine the space between the first two fingers
*/
private double getFingerSpacing(MotionEvent event) {
// ...
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
double pres;
pres = Math.sqrt(x * x + y * y);
return pres;
}
public void set_image_gallery() {
// Find the last picture
String[] projection = new String[]{
MediaStore.Images.ImageColumns._ID,
MediaStore.Images.ImageColumns.DATA,
MediaStore.Images.ImageColumns.BUCKET_DISPLAY_NAME,
MediaStore.Images.ImageColumns.DATE_TAKEN,
MediaStore.Images.ImageColumns.MIME_TYPE
};
final Cursor cursor = getContentResolver()
.query(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, projection, null,
null,MediaStore.Images.ImageColumns._ID + " DESC");
// Put it in the image view
if (cursor.moveToFirst()) {
String imageLocation = cursor.getString(1);
File imageFile = new File(imageLocation);
if (imageFile.exists()) { // TODO: is there a better way to do this?
Bitmap bm=decodeFile(imageFile);
imageGallery.setImageBitmap(bm);
}
}
cursor.close();
}
public Bitmap decodeFile(File f) {
try {
//Decode image size
BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
BitmapFactory.decodeStream(new FileInputStream(f), null, o);
//The new size we want to scale to
final int REQUIRED_SIZE = 490;
//Find the correct scale value. It should be the power of 2.
int scale = 1;
while (o.outWidth / scale / 2 >= REQUIRED_SIZE && o.outHeight / scale / 2 >= REQUIRED_SIZE)
scale *= 2;
//Decode with inSampleSize
BitmapFactory.Options o2 = new BitmapFactory.Options();
o2.inSampleSize = scale;
return BitmapFactory.decodeStream(new FileInputStream(f), null, o2);
} catch (FileNotFoundException e) {
}
return null;
}
public void imagePick(View view)
{
Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
photoPickerIntent.setType("image/*");
startActivityForResult(photoPickerIntent, RESULT_LOAD_IMAGE);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == RESULT_LOAD_IMAGE && resultCode == RESULT_OK && null != data) {
Uri selectedImage = data.getData();
String[] filePathColumn = {MediaStore.Images.Media.DATA};
Cursor cursor = getContentResolver().query(selectedImage,
filePathColumn, null, null, null);
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
String picturePath = cursor.getString(columnIndex);
cursor.close();
Intent transfer=null;
if(recievedType.equals("ADD_COVER_PLATES")) {
transfer = new Intent(this, create_plates.class);
}
else if(recievedType.equals("PLATE_UPLOAD_SINGLETON"))
{
transfer=new Intent(this,plate_select_upload.class);
}
transfer.putExtra("imagUrl",picturePath);
startActivity(transfer);
}
}
}
Camera Handler Code
package com.example.skmishra.plates;
import android.content.Context;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.IOException;
/**
* Created by skmishra on 12/28/2015.
*/
public class CameraHandler extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera=null;
public int currentCameraID=0;
public CameraHandler(Context context,Camera camera) {
super(context);
mCamera=camera;
mHolder=getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_GPU);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
if(mCamera==null)
{
mCamera=Camera.open();
}
mCamera.setPreviewDisplay(holder);
Camera.Parameters p = mCamera.getParameters();
}
catch (IOException e)
{
Log.d("--DS", "Error setting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
fixOr();
if(mHolder.getSurface()==null)
{
return;
}
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e){
Log.d("--DS", "Error starting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
mCamera.release();
mCamera = null;
}
public void fixOr()
{
mCamera.stopPreview();
mCamera.setDisplayOrientation(90);
mCamera.startPreview();
}
public Camera switchCamera() {
mCamera.stopPreview();
mCamera.release();
if(currentCameraID==Camera.CameraInfo.CAMERA_FACING_BACK)
{
currentCameraID = Camera.CameraInfo.CAMERA_FACING_FRONT;
}
else
{
currentCameraID=Camera.CameraInfo.CAMERA_FACING_BACK;
}
mCamera=Camera.open(currentCameraID);
fixOr();
try {
mCamera.setPreviewDisplay(mHolder);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
return mCamera;
}
}
UPDATE **
StackTrace
Process: com.example.skmishra.plates, PID: 10575
java.lang.RuntimeException: Fail to connect to camera service
at android.hardware.Camera.<init>(Camera.java:545)
at android.hardware.Camera.open(Camera.java:403)
at com.example.skmishra.plates.CameraHandler.surfaceCreated(CameraHandler.java:35)
at android.view.SurfaceView.updateWindow(SurfaceView.java:599)
at android.view.SurfaceView.onWindowVisibilityChanged(SurfaceView.java:243)
at android.view.View.dispatchWindowVisibilityChanged(View.java:9034)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:1319)
at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1062)
at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:5873)
at android.view.Choreographer$CallbackRecord.run(Choreographer.java:767)
at android.view.Choreographer.doCallbacks(Choreographer.java:580)
at android.view.Choreographer.doFrame(Choreographer.java:550)
at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:753)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:135)
at android.app.ActivityThread.main(ActivityThread.java:5753)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1405)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1200)
After a lot of research , i finally found out what the problem was . Problem was with the FrameLayout , i had to remove it on Pause and recreate it on OnResume
#Override
protected void onResume() {
super.onResume();
mCamAsync = new CameraAsync(this);//Async task to get the camera instance
mCamAsync.execute();
}
#Override
protected void onPause() {
super.onPause();
releaseCameraAndPreview();
preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.removeViewAt(0);
}
** EDIT **
i also removed the execution of cameraAsync from onCreate , that means i instantiate the camera only in OnResume
This is the MainActivity.java
package com.test.webservertest;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.speech.tts.TextToSpeech;
import android.speech.tts.TextToSpeech.OnInitListener;
import android.support.v7.app.ActionBarActivity;
import android.util.AndroidRuntimeException;
import android.util.Xml;
import android.view.Menu;
import android.view.MenuItem;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.content.Context;
import android.view.View.OnClickListener;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.util.EncodingUtils;
import org.json.JSONObject;
import org.json.JSONTokener;
import java.io.DataOutputStream;
import java.net.HttpURLConnection;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.Socket;
import java.net.URL;
import java.net.UnknownHostException;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Locale;
import java.net.URLConnection;
import java.net.HttpURLConnection;
import java.io.*;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.logging.Logger;
import android.widget.Button;
import android.widget.TextView;
public class MainActivity extends ActionBarActivity
{
private static final int MY_DATA_CHECK_CODE = 0;
public static MainActivity currentActivity;
TextToSpeech mTts;
private String targetURL;
private String urlParameters;
private Button btnClick;
private String clicking = "clicked";
private final String[] ipaddresses = new String[2];
private final Integer[] ipports = new Integer[2];
private TextView text;
private Socket socket;
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//setContentView(new SingleTouchEventView(this, null));
/*ipaddresses[0] = "10.0.0.3";
ipaddresses[1] = "10.0.0.2";
ipports[0] = 8098;
ipports[1] = 8088;*/
//addListenerOnButton();
currentActivity = this;
initTTS();
}
public void addListenerOnButton() {
/*btnClick = (Button) findViewById(R.id.checkipbutton);
btnClick.setOnClickListener(new OnClickListener()
{
#Override
public void onClick(View arg0)
{
text = (TextView) findViewById(R.id.textView2);
try
{
} catch (Exception e)
{
text.setText("Connection Failed");
}
}
});*/
}
#Override
public boolean onCreateOptionsMenu(Menu menu)
{
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item)
{
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings)
{
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Dispatch onStart() to all fragments. Ensure any created loaders are
* now started.
*/
#Override
protected void onStart()
{
super.onStart();
TextToSpeechServer.main(null);
}
#Override
protected void onStop() {
super.onStop();
}
public void initTTS() {
Intent checkIntent = new Intent();
checkIntent.setAction(TextToSpeech.Engine.ACTION_CHECK_TTS_DATA);
startActivityForResult(checkIntent, MY_DATA_CHECK_CODE);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(requestCode == MY_DATA_CHECK_CODE) {
if(resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS) {
mTts = new TextToSpeech(getApplicationContext(), new OnInitListener() {
#Override
public void onInit(int i) {
if(i == TextToSpeech.SUCCESS) {
int result = mTts.setLanguage(Locale.US);
if(result == TextToSpeech.LANG_AVAILABLE
|| result == TextToSpeech.LANG_COUNTRY_AVAILABLE) {
mTts.setPitch(1);
mTts.speak(textforthespeacch, TextToSpeech.QUEUE_FLUSH, null);
}
}
}
});
} else {
Intent installIntent = new Intent();
installIntent.setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA);
startActivity(installIntent);
}
}
}
public static String textforthespeacch = "";
public static void TextToSpeak(String text) {
textforthespeacch = text;
}
private boolean is_start = true;
public class SingleTouchEventView extends View {
private Paint paint = new Paint();
private Path path = new Path();
private Path circlePath = new Path();
public SingleTouchEventView(Context context, AttributeSet attrs) {
super(context, attrs);
paint.setAntiAlias(true);
paint.setStrokeWidth(6f);
paint.setColor(Color.BLACK);
paint.setStyle(Paint.Style.STROKE);
paint.setStrokeJoin(Paint.Join.ROUND);
}
#Override
protected void onDraw(Canvas canvas) {
canvas.drawPath(path, paint);
canvas.drawPath(circlePath, paint);
}
#Override
public boolean onTouchEvent(MotionEvent event)
{
float eventX = event.getX();
float eventY = event.getY();
float lastdownx = 0;
float lastdowny = 0;
switch (event.getAction())
{
case MotionEvent.ACTION_DOWN:
path.moveTo(eventX, eventY);
circlePath.addCircle(eventX, eventY, 50, Path.Direction.CW);
lastdownx = eventX;
lastdowny = eventY;
Thread t = new Thread(new Runnable()
{
#Override
public void run()
{
byte[] response = null;
if (is_start == true)
{
response = Get("http://10.0.0.2:8098/?cmd=start");
is_start = false;
}
else
{
response = Get("http://10.0.0.2:8098/?cmd=stop");
is_start = true;
}
if (response!=null)
{
String a = null;
try
{
a = new String(response,"UTF-8");
textforthespeacch = a;
MainActivity.currentActivity.initTTS();
} catch (UnsupportedEncodingException e)
{
e.printStackTrace();
}
Logger.getLogger("MainActivity(inside thread)").info(a);
}
}
});
t.start();
return true;
case MotionEvent.ACTION_MOVE:
path.lineTo(eventX, eventY);
break;
case MotionEvent.ACTION_UP:
// nothing to do
circlePath.reset();
break;
default:
return false;
}
// Schedules a repaint.
invalidate();
return true;
}
}
private byte[] Get(String urlIn)
{
URL url = null;
String urlStr = urlIn;
if (urlIn!=null)
urlStr=urlIn;
try
{
url = new URL(urlStr);
} catch (MalformedURLException e)
{
e.printStackTrace();
return null;
}
HttpURLConnection urlConnection = null;
try
{
urlConnection = (HttpURLConnection) url.openConnection();
InputStream in = new BufferedInputStream(urlConnection.getInputStream());
byte[] buf=new byte[10*1024];
int szRead = in.read(buf);
byte[] bufOut;
if (szRead==10*1024)
{
throw new AndroidRuntimeException("the returned data is bigger than 10*1024.. we don't handle it..");
}
else
{
bufOut = Arrays.copyOf(buf, szRead);
}
return bufOut;
}
catch (IOException e)
{
e.printStackTrace();
return null;
}
finally
{
if (urlConnection!=null)
urlConnection.disconnect();
}
}
}
It was working fine. Maybe I need to enable something on my Android device?
My Android device is connected now to the PC via USB and working fine.
When I'm running my program in debug mode and touch the screen it never stop on:
#Override
public boolean onTouchEvent(MotionEvent event)
{
float eventX = event.getX();
I added a break point on the line: float eventX = event.getX();
But touching the screen does nothing.
You are not at all registering the OnTouchListener. First register OnTouchListener using anonymousclass or just implements it in your class
From your code you need the listener for this SingleTouchEventView view. Just add "implements View.OnTouchListenter" for the class. Because you have already overridden onTouch method in your code.
You have to change your style.xml when you remove "Extends ActionbarActivity"!
Check this change appTheme
I have created a Counter from the class CountdownTimer.java and I'd like to adjust this Timer so that it stores a co-ordinate, say every 2 seconds. This should be general at a later point. For now I'm happy if the result is storing a coordinate each second.
The problem I am having is that the Timer is too fast. After pressing Start at the Timer he prints the coordinates within 1 second!
I have printed out the result in a file with a data stamp, which is why I know that it is too fast.
So here is my code:
package com.bosch.uadclient;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.osmdroid.util.GeoPoint;
import android.annotation.SuppressLint;
import android.app.AlarmManager;
import android.app.Fragment;
import android.app.PendingIntent;
import android.content.Intent;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.os.Environment;
import android.os.SystemClock;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.example.uadclient.R;
public class PartyFragment extends Fragment {
private MyCountdownTimer countDownTimer;
private long pointsCollected;
private boolean timerHasStarted = false;
private TextView text;
private TextView EdittextTime;
private TextView EdittextIntervall;
private TextView timeElapsedView;
private ArrayList<String> arrayList = new ArrayList<String>();
private long mPauseTime;
private long startTime;
private long interval;
private int point = 0;
private Button buttonTimer;
// LocationProviderService locService;
String s1 = "";
String s2 = "";
String lon = "";
String lat = "";
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
LinearLayout rr = (LinearLayout) inflater.inflate(
R.layout.capturefragment, container, false);
EdittextTime = (TextView) rr.findViewById(R.id.editText1);
EdittextIntervall = (TextView) rr.findViewById(R.id.EditText01);
text = (TextView) rr.findViewById(R.id.timer);
timeElapsedView = (TextView) rr.findViewById(R.id.pointsCollected);
buttonTimer = (Button) rr.findViewById(R.id.button);
buttonTimer.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Fragment fragment = new FindFragment();
android.app.FragmentTransaction ft = getFragmentManager()
.beginTransaction();
switch (v.getId()) {
case R.id.button: {
String text0 = EdittextTime.getText().toString();
point = Integer.valueOf(text0);
long capture = point;
// Upload
String text1 = EdittextIntervall.getText().toString();
int sendinterval = Integer.valueOf(text1);
int sendintervall = sendinterval;
// Timer is self iniciated
startTime = 20000; // value in ms
interval = 1000;
countDownTimer = new MyCountdownTimer(startTime, interval);
text.setText(text.getText() + String.valueOf(startTime));
// ReadData and Parse File of coordinates
readfromFile();
String str1;
str1 = s1.toString();
readfromFile1();
String str2;
str2 = s2.toString();
GeoPoint geopoint = new GeoPoint(Double.parseDouble(str1),
Double.parseDouble(str2));
int AmountPoints = (int) ((startTime / 1000) / point);
if ((sendintervall > 0) && (capture > 0)) {
if (!timerHasStarted) {
countDownTimer.start();
timerHasStarted = true;
buttonTimer.setText("Tracing Started");
while (AmountPoints > 0) {
countDownTimer.onTick(1000, geopoint);
AmountPoints--;
}
} else {
countDownTimer.cancel();
timerHasStarted = false;
buttonTimer.setText("RESET");
}
} else {
System.out.println("Please type in all numbers");
}
}
buttonTimer.setText("Tracing Stopped");
ft.commit();
}
}
});
return rr;
}
// CountDownTimer class
public class MyCountdownTimer extends CountDownTimer {
public MyCountdownTimer(long startTime, long interval) {
super(startTime, interval);
}
public void onTick(long millisUntilFinished, GeoPoint geopoint) {
pointsCollected = (int) ((startTime / 1000) / point);
try {
storeGPS(geopoint);
} catch (IOException e) {
System.out.println("Point can not be written");
e.printStackTrace();
}
}
/**
* Pause the countdown.
*/
public long pause() {
long mStopTimeInFuture = 0;
mPauseTime = mStopTimeInFuture - SystemClock.elapsedRealtime();
boolean mPaused = true;
return mPauseTime;
}
#Override
public void onFinish() {
text.setText("Points are sent!");
timeElapsedView.setText("Points collected: "
+ String.valueOf(pointsCollected));
}
#SuppressLint("SimpleDateFormat")
public void storeGPS(GeoPoint gPt) throws IOException {
String stringbb = gPt.toString();
// SD card Access needed as a lot of points are saved
File mediaDir = new File(Environment.getExternalStorageDirectory()
.getPath());
if (!mediaDir.exists()) {
mediaDir.mkdir();
}
SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss ");
String uhrzeit = sdf.format(new Date());
File file = new File(mediaDir, "PointData.txt");
// file.createNewFile();
FileOutputStream fos = new FileOutputStream(file, true);
fos.write(uhrzeit.getBytes());
fos.write(stringbb.getBytes());
fos.write(System.getProperty("line.separator").getBytes());
fos.close();
System.out.println("Your file has been written");
}
#Override
public void onTick(long millisUntilFinished) {
text.setText("Time in seconds remain:" + millisUntilFinished / 1000);
pointsCollected = (int) (startTime / 1000 / point);
timeElapsedView.setText("Points collected: "
+ String.valueOf(pointsCollected));
}
}
}
Here is the output of the written file:
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9893183,52113450,0
19:26:11 9892283,52113450,0
Best Wishes, Marita
try this code,
In this code change the schedule method argument as per your need
schedule(TimerTask task,long delay,long period)
import java.util.*;
public class TimerDemo {
public static void main(String[] args) {
// creating timer task, timer
TimerTask tasknew = new TimerSchedulePeriod();
Timer timer = new Timer();
// scheduling the task at interval
timer.schedule(tasknew,100, 100);
}
// this method performs the task
public void run() {
System.out.println("timer working");
}
}
I have found a solution to my problem. Though I am not sure whether this is thread-safe?
try {
storeGPS(geopoint);
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (IOException e) {
System.out.println("Point can not be written");
e.printStackTrace();
}
There is a problem with the code for android applications
I tried to assign multiple actions to a single button.Here's the code:
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
public class MainActivity extends Activity implements OnClickListener {
AudioRecord audioRecord;
private Thread recordingThread = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
start_button = (Button) findViewById(R.id.button1);
stop_button = (Button) findViewById(R.id.button2);
start_button.setOnClickListener(this);
stop_button.setOnClickListener(this);
createAudioRecorder();
}
private void createAudioRecorder() {
int sampleRate = 16000;
int channelConfig = AudioFormat.CHANNEL_IN_MONO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int minInternalBufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
int internalBufferSize = minInternalBufferSize * 4;
Log.d(TAG, "minInternalBufferSize = " + minInternalBufferSize
+ ", internalBufferSize = " + internalBufferSize
+ ", myBufferSize = " + myBufferSize);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, internalBufferSize);
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.button1:
reading = true;
startRecorder();
break;
case R.id.button2:
reading = false;
stopRecorder();
break;
}
}
private void startRecorder() {
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
AudioData();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
protected void AudioData() {
audioRecord.startRecording();
byte data[] = new byte[myBufferSize];
byte[] myBuffer = new byte[myBufferSize];
int readCount = 0;
int totalCount = 0;
while (reading) {
readCount = audioRecord.read(myBuffer, 0, myBufferSize);
data = myBuffer;
totalCount += readCount;
}
}
private void stopRecorder() {
if (null != audioRecord) {
reading = false;
audioRecord.stop();
audioRecord.release();
audioRecord = null;
recordingThread = null;
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
When I run this code the first time it works, but not following..
Logs show that the error here somewhere (NullPointerException):
audioRecord.startRecording();
Help solve the problem!
When button2 is clicked, stopRecorder() is called, which assigns null to audioRecord. Therefore, the next time button1 is clicked, the new thread that is spawned will throw a NullPointerException when it calls AudioData().
Perhaps you can eliminate createAudioRecorder() and move its logic to the beginning of AudioData().
Also, I would rename AudioData(), which looks like a class name, to recordAudioData(). That would conform better to standard Java naming conventions.
I want to ask a question that I want to improve my frame rate. I am sending from android Device which is using Preview call with Buffer. I am than receiving it on the Server side. Ia m getting everything fine. The frame rate is terribly slow. Can you please guide me. I am doing the YUV to JPEG conversion on Android too, working fine but its making it terribly slow
Thanks
Regards
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.net.Socket;
import java.net.UnknownHostException;
import android.app.Activity;
import android.app.ActivityManager;
import android.app.ActivityManager.RunningServiceInfo;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.hardware.Camera;
import android.hardware.Camera.ErrorCallback;
import android.hardware.Camera.Size;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.hardware.Camera.PreviewCallback;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.Toast;
import android.app.Service;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.net.Uri;
import android.os.IBinder;
import android.provider.MediaStore;
import android.view.LayoutInflater;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.net.InetAddress;
import java.sql.Date;
import java.text.SimpleDateFormat;
import java.util.List;
import android.widget.TextView;
public class SdfJuliaActivity extends Activity implements SensorEventListener
{
private static String TAG = "SDFJulia";
/*
* WiFi Methods
* ------------
* Please do not delete any method, just adjust them if necessary.
*
* Problems:
* - if server is not available / running and app tries to connect,
* it crashes the emulator. Works fine on smartphone, though.
*
*/
protected static DataOutputStream os;
protected static Socket socket;
private static String address = "192.168.2.102:4444"; //also default IP address
private static String ipAddress;
private static int port;
//Connect to given ip address on given port
protected static boolean connect(String serverName, int port)
{
Log.d(TAG, "Connecting to " + serverName + ":" + port);
try
{
socket = new Socket(serverName,port);
os = new DataOutputStream(socket.getOutputStream());
}
catch (UnknownHostException e)
{
Log.d(TAG, "Unknown Host Exception while connecting: " + e);
return false;
}
catch (IOException e)
{
Log.d(TAG, "IO Exception while connecting: " + e);
return false;
}
if (!socket.isConnected()) { return false; }
return true;
}
//Close the current connection
protected static void disconnect()
{
if (!socket.isClosed())
{
try { socket.close(); }
catch (IOException e) {}
}
}
//Check if we are connected
protected static boolean connected()
{
//Check if socket is open
if (socket.isClosed())
{ return false; }
//Check if socket is actually connected to a remote host
if (!socket.isConnected())
{ return false; }
//We are connected!
return true;
}
//Send a single frame to the server
//YUV-Image
protected static void sendFrame(byte[] data)
{
//Check if a frame is actually available
if (data != null)
{
try
{
os.writeInt(data.length);
os.write(data, 0, data.length);
os.flush();
}
catch (IOException e) {};
}
}
//Send all sensor data to the server
protected static void sendData()
{
try
{
os.writeFloat(orientation_x);
os.writeFloat(orientation_y);
os.writeFloat(orientation_z);
os.writeDouble(gps_longitude);
os.writeDouble(gps_latitude);
os.writeDouble(gps_altitude);
}
catch (IOException e) {};
}
public static String getAddress()
{ return address; };
/*
* Sensor Stuff
* ------------
*/
SensorManager sensorManager = null;
private static float accelerometer_x;
private static float accelerometer_y;
private static float accelerometer_z;
private static float orientation_x;
private static float orientation_y;
private static float orientation_z;
//New sensor data available
public void onSensorChanged(SensorEvent event)
{
synchronized (this)
{
switch (event.sensor.getType())
{
case Sensor.TYPE_ACCELEROMETER:
accelerometer_x = event.values[0];
accelerometer_y = event.values[1];
accelerometer_z = event.values[2];
break;
case Sensor.TYPE_ORIENTATION:
orientation_x = event.values[0];
orientation_y = event.values[1];
orientation_z = event.values[2];
break;
}
}
}
//Sensor accuracy has changed (unused)
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
private static double gps_latitude;
private static double gps_longitude;
private static double gps_altitude;
private static long gps_lastfix;
/*
* Camera Methods
* --------------
*
*/
private Camera mCamera;
SurfaceView mPreview;
private void startVideo()
{
SurfaceHolder videoCaptureViewHolder = null;
try { mCamera = Camera.open(); }
catch (Exception e) { Log.d(TAG,"Can not open camera. In use?"); };
mCamera.setErrorCallback
(
new ErrorCallback()
{ public void onError(int error, Camera camera) {} }
);
Camera.Parameters parameters = mCamera.getParameters();
final int previewWidth = parameters.getPreviewSize().width;
final int previewHeight = parameters.getPreviewSize().height;
parameters.setPreviewFrameRate(30);
mCamera.setParameters(parameters);
parameters = mCamera.getParameters();
Log.d(TAG,"Format: " + parameters.getPreviewFormat());
Log.d(TAG,"FPS: " + parameters.getPreviewFrameRate());
if (null != mPreview)
{ videoCaptureViewHolder = mPreview.getHolder(); }
try { mCamera.setPreviewDisplay(videoCaptureViewHolder); }
catch (Throwable t) {}
//Get Preview Size to set the data buffers to it
Size previewSize=mCamera.getParameters().getPreviewSize();
//Set the Buffer Size according to Preview Size
int dataBufferSize=(int)(previewSize.height*previewSize.width*
(ImageFormat.getBitsPerPixel(mCamera.getParameters().getPreviewFormat())/8.0));
mCamera.addCallbackBuffer(new byte[dataBufferSize]);
mCamera.addCallbackBuffer(new byte[dataBufferSize]);
mCamera.addCallbackBuffer(new byte[dataBufferSize]);
//This method is called for every preview frame
//we use it to transmit both the current preview frame as well as the sensor data
mCamera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback()
{
int fpsCount = 0;
boolean test = true;
public void onPreviewFrame(byte[] data, Camera camera)
{
//Check if connection is present; if not, try to reconnect
if (!connected())
{ connect(ipAddress, port); }
else
{
//Prediction step XXX
//Log.d(TAG, "Transmitting data.");
//Convert image to YUV
/*YuvImage image = new YuvImage(data,ImageFormat.NV21,previewWidth,previewHeight,null);
Rect rect = new Rect(0,0,previewWidth,previewHeight);
ByteArrayOutputStream oas = new ByteArrayOutputStream();
image.compressToJpeg(rect,100,oas);
byte[] imageJPG = oas.toByteArray();*/
//if (test)
//{ Log.d(TAG,"Length: " + imageJPG.length); test = false; };
//Send the current frame to the server
sendFrame(data);
//Send the corresponding sensor data to the server
sendData();
camera.addCallbackBuffer(data);
}
}
});
try { mCamera.startPreview(); }
catch (Throwable e)
{
mCamera.release();
mCamera = null;
return;
}
}
private void stopVideo()
{
if (mCamera == null)
{ return; }
try
{
mCamera.stopPreview();
mCamera.setPreviewDisplay(null);
mCamera.setPreviewCallback(null);
mCamera.release();
}
catch (IOException e)
{
e.printStackTrace();
return;
}
mCamera = null;
}
/* Input Validation
* ----------------
*/
//Check if user specified address is valid
private static boolean checkAddress(String userinput)
{
String [] part = userinput.split("\\:");
if (part.length != 2)
{ return false; }
if (!validIP(part[0]))
{ return false; }
int i = Integer.parseInt(part[1]);
if (i < 1 || i > 10000)
{ return false; }
return true;
}
//Check for valid IP address
private static boolean validIP (String userinput)
{
String [] part = userinput.split("\\.");
if (part.length != 4)
{
Log.d(TAG,"Invalid ip address length.");
return false;
}
for (String s : part)
{
int i = Integer.parseInt(s);
if (i < 0 || i > 255)
{
Log.d(TAG,"Invalid ip address values.");
return false;
}
}
return true;
}
/*
* Activity Creation
* -----------------
*/
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
//Always use portrait view
//Otherwise activity gets destroyed when orientation is changed --> network & video stream is lost
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
setContentView(R.layout.main);
socket = new Socket();
/*
* Sensors
* -------
* This registers our app to receive the latest sensor data.
*/
sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
sensorManager.registerListener(this, sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), sensorManager.SENSOR_DELAY_FASTEST);
sensorManager.registerListener(this, sensorManager.getDefaultSensor(Sensor.TYPE_ORIENTATION), sensorManager.SENSOR_DELAY_FASTEST);
/*
* GPS
* ---
* This registers our app to receive the latest GPS data.
*/
LocationManager locationManager = (LocationManager) this.getSystemService(Context.LOCATION_SERVICE);
LocationListener locationListener = new LocationListener()
{
public void onLocationChanged(Location location)
{
gps_latitude = location.getLatitude();
gps_longitude = location.getLongitude();
gps_altitude = location.getAltitude();
gps_lastfix = System.currentTimeMillis();
}
public void onStatusChanged(String provider, int status, Bundle extras) {}
public void onProviderEnabled(String provider) {}
public void onProviderDisabled(String provider) {}
};
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, locationListener);
final Button serviceButton = (Button) findViewById(R.id.btn_serviceButton);
final EditText ipText = (EditText) findViewById(R.id.text_ipAddress);
ipText.setText(address);
//CameraStuff here first
mPreview = (SurfaceView) findViewById(R.id.cameraView);
SurfaceHolder videoCaptureViewHolder = mPreview.getHolder();
videoCaptureViewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
videoCaptureViewHolder.addCallback(new Callback()
{
public void surfaceDestroyed(SurfaceHolder holder) { stopVideo(); }
public void surfaceCreated(SurfaceHolder holder) {}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
});
//Click Listener for Button
//starts & stops the service
serviceButton.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
if (connected())
{
Toast.makeText(getApplicationContext(), "Connection terminated.", Toast.LENGTH_SHORT).show();
serviceButton.setText("Connect");
//Stop the app
stopVideo(); //Stop video & sending data
disconnect(); //Stop network services
}
else
{
address = ipText.getText().toString();
String [] part = ipText.getText().toString().split("\\:");
//Validate input
ipAddress = part[0];
port = Integer.parseInt(part[1]);
//int port = 4444;
if (!checkAddress(address))
{
//Invalid ip address specified
Toast.makeText(getApplicationContext(), "Invalid address specified.", Toast.LENGTH_SHORT).show();
Log.d(TAG,"Invalid address specified.");
}
else
{
if (!connect(ipAddress, port))
{
//Connection failed
Toast.makeText(getApplicationContext(), "Could not connect.", Toast.LENGTH_SHORT).show();
Log.d(TAG,"Could not connect.");
}
else
{
//Connection successful
Log.d(TAG,"Connected.");
Toast.makeText(getApplicationContext(), "Connection successful.", Toast.LENGTH_SHORT).show();
serviceButton.setText("Disconnect");
//Start video & sending data
startVideo();
}
}
}
}
});
}
//Don't do anything if configuration is changed (e.g. phone has been rotated)
#Override
public void onConfigurationChanged(Configuration newConfig) {}
}
You must call getSupportedPreviewFpsRange() of com.android.hardware.Camera, and you will get a list of FPS range supported
Select the proper one and call setSupportedPreviewFpsRange()
all that must be called before startPreview() call.
you can refer to SDK guide.