I am using this library javaCV to record video on android. They have provided a sample VideoRecording Activity But There is some bug which I could not figure out what I am doing it wrong or what is missing which causing this bug.
package org.bytedeco.javacv.recordactivity;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import java.io.IOException;
import java.nio.ShortBuffer;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import static org.bytedeco.javacpp.opencv_core.*;
public class RecordActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link = "/mnt/sdcard/stream.flv";
long startTime = 0;
boolean recording = false;
private volatile FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private IplImage yuvIplimage = null;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
/** The number of seconds in the continuous record loop (or 0 to disable loop). */
final int RECORD_LENGTH = 10;
IplImage[] images;
long[] timestamps;
ShortBuffer[] samples;
int imagesIndex, samplesIndex;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.main);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
}
#Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if (cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.main, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if (1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG,"init recorder");
if (RECORD_LENGTH > 0) {
imagesIndex = 0;
images = new IplImage[RECORD_LENGTH * frameRate];
timestamps = new long[images.length];
for (int i = 0; i < images.length; i++) {
images[i] = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
timestamps[i] = -1;
}
} else if (yuvIplimage == null) {
yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
Log.i(LOG_TAG, "create yuvIplimage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("flv");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
if (RECORD_LENGTH > 0) {
Log.v(LOG_TAG,"Writing frames");
try {
int firstIndex = imagesIndex % samples.length;
int lastIndex = (imagesIndex - 1) % images.length;
if (imagesIndex <= images.length) {
firstIndex = 0;
lastIndex = imagesIndex - 1;
}
if ((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
startTime = 0;
}
if (lastIndex < firstIndex) {
lastIndex += images.length;
}
for (int i = firstIndex; i <= lastIndex; i++) {
long t = timestamps[i % timestamps.length] - startTime;
if (t >= 0) {
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(images[i % images.length]);
}
}
firstIndex = samplesIndex % samples.length;
lastIndex = (samplesIndex - 1) % samples.length;
if (samplesIndex <= samples.length) {
firstIndex = 0;
lastIndex = samplesIndex - 1;
}
if (lastIndex < firstIndex) {
lastIndex += samples.length;
}
for (int i = firstIndex; i <= lastIndex; i++) {
recorder.record(samples[i % samples.length]);
}
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
recording = false;
Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
if (RECORD_LENGTH > 0) {
samplesIndex = 0;
samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];
for (int i = 0; i < samples.length; i++) {
samples[i] = ShortBuffer.allocate(bufferSize);
}
} else {
audioData = ShortBuffer.allocate(bufferSize);
}
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
if (RECORD_LENGTH > 0) {
audioData = samples[samplesIndex++ % samples.length];
audioData.position(0).limit(0);
}
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if (bufferReadResult > 0) {
Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
if (RECORD_LENGTH <= 0) try {
recorder.record(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG,"audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera","camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
Camera.Parameters camParams = mCamera.getParameters();
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate());
camParams.setPreviewFrameRate(frameRate);
mCamera.setParameters(camParams);
startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
if (RECORD_LENGTH > 0) {
int i = imagesIndex++ % images.length;
yuvIplimage = images[i];
timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
}
/* get video data */
if (yuvIplimage != null && recording) {
yuvIplimage.getByteBuffer().put(data);
if (RECORD_LENGTH <= 0) try {
Log.v(LOG_TAG,"Writing Frame");
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
#Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
}
I am getting this error
The type org.bytedeco.javacpp.avutil$AVFrame cannot be resolved. It is indirectly referenced from required .class files
at following line in the above code
if (RECORD_LENGTH <= 0) try {
recorder.record(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
}
at recorder.record(audioData). I don't know what I am doing wrong here. New to JavaCV. Any help will be appreciated.
The error message means that a jar containing the class org.bytedeco.javacpp.avutil$AVFrame cannot be found in the java class path. I would suggest to check the class path, the version of javacv used, maybe there were bugs that are now solved, you never know. Actually there are some similar discussions on javacv's github account, which may or may not be connected to your problem: javacv github, javacv github.
What makes me wonder is why you are putting your class in "org.bytedeco.javacv.recordactivity" package? "org.bytedeco.javacv" is javacv's own package, you just don't put your own class into the package structure that do not belong to you. It can only make you troubles in the future, for instance if javacv at some time decides to create package and class with the same name.
Related
I have a problem. I take sound from the microphone of the device with Android Studio and record it as pcm. Since the audio recording is in pcm format, I cannot play it on the device. I'm trying to convert this to mp3 for playback. I tried different libraries but couldn't. Can you help me?
My recording page code:
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
public class MainActivity extends AppCompatActivity {
private static final int AUDIO_SOURCE = MediaRecorder.AudioSource.MIC;
private static final int SAMPLE_RATE = 44100;
private static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_MONO;
private static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
private static final int BUFFER_SIZE = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT);
private static final int REQUEST_RECORD_AUDIO_PERMISSION = 200;
private AudioRecord audioRecord;
private boolean isRecording = false;
private Thread recordingThread;
private String filePath;
private static final int FILTER_LOW_FREQ = 100;
private static final int FILTER_HIGH_FREQ = 20000;
private int bufferSize;
private int sampleRate;
private int channelConfig;
private int audioFormat;
private static final int MAX_VOLUME = 32767;
final int MAX_FREQ = SAMPLE_RATE / 2;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void but_recPressed(View V) {
startRecording();
Toast.makeText(getApplicationContext(), "Kayıt Başlatıldı", Toast.LENGTH_SHORT).show();
}
public void but_stopPressed(View V) {
stopRecording();
Toast.makeText(this, "Kayıt Durduruldu", Toast.LENGTH_SHORT).show();
File file = new File(filePath);
if (file.exists()) {
Toast.makeText(this, "Kaydedilen Dosya Yolu: " + filePath, Toast.LENGTH_LONG).show();
} else {
Toast.makeText(this, "Kaydedilen Dosya Bulunamadı!", Toast.LENGTH_LONG).show();
}
}
private void startRecording() {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO}, REQUEST_RECORD_AUDIO_PERMISSION);
} else {
audioRecord = new AudioRecord(AUDIO_SOURCE, SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT, BUFFER_SIZE);
audioRecord.startRecording();
isRecording = true;
new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
short[] buffer = new short[bufferSize];
while (isRecording) {
Toast.makeText(getApplicationContext(), "döngüdeyim", Toast.LENGTH_LONG).show();
int read = audioRecord.read(buffer, 0, bufferSize);
for (int i = 0; i < read; i++) {
Toast.makeText(getApplicationContext(), "Fordayım", Toast.LENGTH_LONG).show();
int frequency = buffer[i];
if (frequency >= FILTER_LOW_FREQ && frequency <= FILTER_HIGH_FREQ) {
int volume = (int) (frequency / MAX_FREQ * MAX_VOLUME);
buffer[i] = (short) (buffer[i] * volume);
// Do something with the filtered frequency
Toast.makeText(getApplicationContext(), "Frekans Yakalandı", Toast.LENGTH_LONG).show();
}
else{
int volume = (int) (frequency / MAX_FREQ * MAX_VOLUME);
buffer[i] = (short) (buffer[i] * volume);
Toast.makeText(getApplicationContext(), "Frekans Yakalanmadı", Toast.LENGTH_LONG).show();
}
}
}
}
}).start();
}
}
private void stopRecording() {
try {
isRecording = false;
audioRecord.stop();
audioRecord.release();
audioRecord = null;
recordingThread = null;
} catch (Exception e) {
e.printStackTrace();
Log.e("MainActivity", "Error while recording audio: " + e.getMessage());
}
}
public void but_folderPressed(View v) {
Intent intent = new Intent(this, list.class);
startActivity(intent);
}
private void writeAudioDataToFile() {
byte data[] = new byte[BUFFER_SIZE];
filePath = getExternalCacheDir().getAbsolutePath();
filePath += "/" + System.currentTimeMillis() + ".sesimvar" +".pcm";
try (FileOutputStream os = new FileOutputStream(filePath)) {
while (isRecording) {
int read = audioRecord.read(data, 0, BUFFER_SIZE);
if (AudioRecord.ERROR_INVALID_OPERATION != read) {
os.write(data);
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
I tried FFmpeg and the LAME library. I couldn't add the LAME library properly. FFmpeg, on the other hand, gave error lines.
did How to convert .pcm file to .wav or .mp3? not help you out?
For the future, it would help if you post the stack-trace with the thrown exception or error.
I have a custom camera application , everything works fine .But whenever the app gets paused (when the onPause or onDestroyed is called) camera is released and afterwards when onResume is called and capture button is clicked is to take an image ,my Application crashes.How do i fix this ? Please help me , Thanks in advance
CameraActivity Code
package com.example.skmishra.plates.Activities;
import android.app.ActionBar;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.hardware.Camera;
import android.hardware.SensorManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.MotionEvent;
import android.view.OrientationEventListener;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ZoomControls;
import com.example.skmishra.plates.Asyncs.CameraAsync;
import com.example.skmishra.plates.CameraHandler;
import com.example.skmishra.plates.Library.Fonts;
import com.example.skmishra.plates.R;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* Created by skmishra on 12/28/2015.
*/
public class camera extends Activity {
private static final int RESULT_LOAD_IMAGE = 200 ;
private Camera mCamera=null;
private CameraHandler surface_view;
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
public static final String TAG = "Aloo";
int toRotate = 90;
public int currentCameraID = 0;
OrientationEventListener myOrientationEventListener;
private ZoomControls zoomControls;
private double mDist;
Boolean imageSwitchClicked = false;
Boolean mShowFlash = false;
ImageView mSwitch_cam;
ImageView mFlashBut;
FrameLayout preview;
CameraAsync mCamAsync;
ImageView imageGallery;
TextView raleway;
TextView headerCameraText;
Fonts mFonts;
int permCode=4;
Camera.Parameters params;
String recievedType=null;
#Override
protected void onCreate(Bundle savedInstanceState) {
Log.e("I Called Thus "," cda");
mCamAsync=new CameraAsync(this);
mCamAsync.execute();
super.onCreate(savedInstanceState);
setContentView(R.layout.camera);
imageGallery = (ImageView) findViewById(R.id.select_gallery);
mFonts = new Fonts();
preview = (FrameLayout) findViewById(R.id.camera_preview);
mFlashBut = (ImageView) findViewById(R.id.flash);
mSwitch_cam = (ImageView) findViewById(R.id.white_switch);
raleway = (TextView) findViewById(R.id.textView2);
headerCameraText = (TextView) findViewById(R.id.imageHead);
// mFonts.setRalewayBold(this, headerCameraText);
Intent gets = getIntent();
recievedType = gets.getExtras().getString("recievedCameraPurpose");
handleHeaderText(recievedType);
mFonts.setRalewayBold(this, raleway);
myOrientationEventListener
= new OrientationEventListener(this, SensorManager.SENSOR_DELAY_NORMAL) {
#Override
public void onOrientationChanged(int arg0) {
int rotation = arg0;
if (rotation > 340) {
if (currentCameraID == 0) {
toRotate = 90;
} else {
toRotate =270;
Log.e("POSITION_TITLT", "-> Potrait Front camera");
}
} else if (rotation < 80 && rotation > 30) {
toRotate = 180;
Log.e("POSITION_TILT", "-> Landscape Right " + rotation);
} else if (rotation < 280 && rotation > 240) {
toRotate = 0;
Log.e("POSITION_TILT", "-> Landscape Left " + rotation);
}
}
};
if (myOrientationEventListener.canDetectOrientation()) {
myOrientationEventListener.enable();
} else {
Toast.makeText(this, "Can't DetectOrientation", Toast.LENGTH_LONG).show();
finish();
}
}
private boolean checkifCamera(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
return true;
} else {
return false;
}
}
public Camera getCameraInstance() {
Camera c = null;
try {
releaseCameraAndPreview();
c = Camera.open();
} catch (Exception e) {
Toast.makeText(this, "Print error" + e.getMessage(), Toast.LENGTH_LONG).show();
}
return c;
}
public void onCompleteInstanceCameraAysnc(Camera camera)
{
mCamera = camera;
surface_view = new CameraHandler(this, mCamera);
params = mCamera.getParameters();
preview.addView(surface_view);
set_image_gallery();
}
public void switchC(View view) {
if (!imageSwitchClicked) {
mSwitch_cam.setAlpha(1.0f);
imageSwitchClicked = true;
} else {
mSwitch_cam.setAlpha(0.5f);
imageSwitchClicked = false;
}
setCameraID();
mCamera = surface_view.switchCamera();
params=mCamera.getParameters();
}
public void flash_onOf(View view) {
if (!mShowFlash) {
params.setFlashMode(Camera.Parameters.FLASH_MODE_ON);
mFlashBut.setAlpha(1.0f);
mShowFlash = true;
} else {
params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
mFlashBut.setAlpha(0.5f);
mShowFlash = false;
}
}
private void releaseCameraAndPreview() {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
mCamera.release();
mCamera=null;
}
else
{
Log.e("Cert","Lerts");
}
}
#Override
protected void onResume() {
super.onResume();
}
#Override
protected void onDestroy() {
Log.e("LLL", "Dessssdccc");
super.onDestroy();
try {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
myOrientationEventListener.disable();
mCamera.release();
mCamera=null;
permCode=15;
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
protected void onPause() {
Log.e("LLL", "Dessssdccc");
super.onPause();
try {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
myOrientationEventListener.disable();
mCamera.release();
mCamera=null;
permCode=15;
} catch (Exception e) {
e.printStackTrace();
}
}
public void takePH(View view) {
if(mShowFlash && !imageSwitchClicked)
{
params.setFlashMode(Camera.Parameters.FLASH_MODE_ON);
}
params.set("rotation", toRotate);
mCamera.setParameters(params);
mCamera.takePicture(null, null, mPicture);
}
Camera.PictureCallback mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null) {
Log.d(TAG, "Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
Intent i=new Intent(getApplicationContext(),ShowOut.class);
i.putExtra("purpose",recievedType);
i.putExtra("img-url",pictureFile.toString());
startActivity(i);
}
};
/**
* Create a file Uri for saving an image or video
*/
private static Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
/**
* Create a File for saving an image or video
*/
private static File getOutputMediaFile(int type) {
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "Plates");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_" + timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
public void handleHeaderText(String type) {
Log.e("Type",type);
headerCameraText.setText("");
if (type.equals("ADD_COVER_PLATES")) {
headerCameraText.setText("Take a cover image for your plate");
}
else if(type.equals("ADD_PROFILE_USER"))
{
imageGallery.setVisibility(View.GONE);
}
else if(type.equals("PLATE_UPLOAD_SINGLETON")) {
headerCameraText.setText("Click an image for a plate");
}
}
public void setCameraID() {
if (currentCameraID == Camera.CameraInfo.CAMERA_FACING_BACK) {
currentCameraID = Camera.CameraInfo.CAMERA_FACING_FRONT;
toRotate = 270;
} else {
currentCameraID = Camera.CameraInfo.CAMERA_FACING_BACK;
toRotate = 90;
}
}
#Override
public boolean onTouchEvent(MotionEvent event) {
// Get the pointer ID
Camera.Parameters params = mCamera.getParameters();
int action = event.getAction();
if (event.getPointerCount() > 1) {
// handle multi-touch events
if (action == MotionEvent.ACTION_POINTER_DOWN) {
mDist = getFingerSpacing(event);
} else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) {
mCamera.cancelAutoFocus();
handleZoom(event, params);
}
} else {
// handle single touch events
if (action == MotionEvent.ACTION_UP) {
handleFocus(event, params);
}
}
return true;
}
private void handleZoom(MotionEvent event, Camera.Parameters params) {
int maxZoom = params.getMaxZoom();
int zoom = params.getZoom();
double newDist = getFingerSpacing(event);
if (newDist > mDist) {
//zoom in
if (zoom < maxZoom)
zoom++;
} else if (newDist < mDist) {
//zoom out
if (zoom > 0)
zoom--;
}
mDist = newDist;
params.setZoom(zoom);
mCamera.setParameters(params);
}
public void handleFocus(MotionEvent event, Camera.Parameters params) {
int pointerId = event.getPointerId(0);
int pointerIndex = event.findPointerIndex(pointerId);
// Get the pointer's current position
float x = event.getX(pointerIndex);
float y = event.getY(pointerIndex);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
mCamera.autoFocus(new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean b, Camera camera) {
// currently set to auto-focus on single touch
}
});
}
}
/**
* Determine the space between the first two fingers
*/
private double getFingerSpacing(MotionEvent event) {
// ...
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
double pres;
pres = Math.sqrt(x * x + y * y);
return pres;
}
public void set_image_gallery() {
// Find the last picture
String[] projection = new String[]{
MediaStore.Images.ImageColumns._ID,
MediaStore.Images.ImageColumns.DATA,
MediaStore.Images.ImageColumns.BUCKET_DISPLAY_NAME,
MediaStore.Images.ImageColumns.DATE_TAKEN,
MediaStore.Images.ImageColumns.MIME_TYPE
};
final Cursor cursor = getContentResolver()
.query(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, projection, null,
null,MediaStore.Images.ImageColumns._ID + " DESC");
// Put it in the image view
if (cursor.moveToFirst()) {
String imageLocation = cursor.getString(1);
File imageFile = new File(imageLocation);
if (imageFile.exists()) { // TODO: is there a better way to do this?
Bitmap bm=decodeFile(imageFile);
imageGallery.setImageBitmap(bm);
}
}
cursor.close();
}
public Bitmap decodeFile(File f) {
try {
//Decode image size
BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
BitmapFactory.decodeStream(new FileInputStream(f), null, o);
//The new size we want to scale to
final int REQUIRED_SIZE = 490;
//Find the correct scale value. It should be the power of 2.
int scale = 1;
while (o.outWidth / scale / 2 >= REQUIRED_SIZE && o.outHeight / scale / 2 >= REQUIRED_SIZE)
scale *= 2;
//Decode with inSampleSize
BitmapFactory.Options o2 = new BitmapFactory.Options();
o2.inSampleSize = scale;
return BitmapFactory.decodeStream(new FileInputStream(f), null, o2);
} catch (FileNotFoundException e) {
}
return null;
}
public void imagePick(View view)
{
Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
photoPickerIntent.setType("image/*");
startActivityForResult(photoPickerIntent, RESULT_LOAD_IMAGE);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == RESULT_LOAD_IMAGE && resultCode == RESULT_OK && null != data) {
Uri selectedImage = data.getData();
String[] filePathColumn = {MediaStore.Images.Media.DATA};
Cursor cursor = getContentResolver().query(selectedImage,
filePathColumn, null, null, null);
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
String picturePath = cursor.getString(columnIndex);
cursor.close();
Intent transfer=null;
if(recievedType.equals("ADD_COVER_PLATES")) {
transfer = new Intent(this, create_plates.class);
}
else if(recievedType.equals("PLATE_UPLOAD_SINGLETON"))
{
transfer=new Intent(this,plate_select_upload.class);
}
transfer.putExtra("imagUrl",picturePath);
startActivity(transfer);
}
}
}
Camera Handler Code
package com.example.skmishra.plates;
import android.content.Context;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.IOException;
/**
* Created by skmishra on 12/28/2015.
*/
public class CameraHandler extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera=null;
public int currentCameraID=0;
public CameraHandler(Context context,Camera camera) {
super(context);
mCamera=camera;
mHolder=getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_GPU);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
if(mCamera==null)
{
mCamera=Camera.open();
}
mCamera.setPreviewDisplay(holder);
Camera.Parameters p = mCamera.getParameters();
}
catch (IOException e)
{
Log.d("--DS", "Error setting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
fixOr();
if(mHolder.getSurface()==null)
{
return;
}
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e){
Log.d("--DS", "Error starting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
mCamera.release();
mCamera = null;
}
public void fixOr()
{
mCamera.stopPreview();
mCamera.setDisplayOrientation(90);
mCamera.startPreview();
}
public Camera switchCamera() {
mCamera.stopPreview();
mCamera.release();
if(currentCameraID==Camera.CameraInfo.CAMERA_FACING_BACK)
{
currentCameraID = Camera.CameraInfo.CAMERA_FACING_FRONT;
}
else
{
currentCameraID=Camera.CameraInfo.CAMERA_FACING_BACK;
}
mCamera=Camera.open(currentCameraID);
fixOr();
try {
mCamera.setPreviewDisplay(mHolder);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
return mCamera;
}
}
UPDATE **
StackTrace
Process: com.example.skmishra.plates, PID: 10575
java.lang.RuntimeException: Fail to connect to camera service
at android.hardware.Camera.<init>(Camera.java:545)
at android.hardware.Camera.open(Camera.java:403)
at com.example.skmishra.plates.CameraHandler.surfaceCreated(CameraHandler.java:35)
at android.view.SurfaceView.updateWindow(SurfaceView.java:599)
at android.view.SurfaceView.onWindowVisibilityChanged(SurfaceView.java:243)
at android.view.View.dispatchWindowVisibilityChanged(View.java:9034)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:1319)
at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1062)
at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:5873)
at android.view.Choreographer$CallbackRecord.run(Choreographer.java:767)
at android.view.Choreographer.doCallbacks(Choreographer.java:580)
at android.view.Choreographer.doFrame(Choreographer.java:550)
at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:753)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:135)
at android.app.ActivityThread.main(ActivityThread.java:5753)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1405)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1200)
After a lot of research , i finally found out what the problem was . Problem was with the FrameLayout , i had to remove it on Pause and recreate it on OnResume
#Override
protected void onResume() {
super.onResume();
mCamAsync = new CameraAsync(this);//Async task to get the camera instance
mCamAsync.execute();
}
#Override
protected void onPause() {
super.onPause();
releaseCameraAndPreview();
preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.removeViewAt(0);
}
** EDIT **
i also removed the execution of cameraAsync from onCreate , that means i instantiate the camera only in OnResume
I'm trying to convert 3gp to MP3 using Lame. I've got byte buffer after decoding file by built-in Android decoder. After this I put it into RAW file with OutputStream and then create MP3 using Lame. But it doesn't work. There are only noises in the file. Here is my code. Thank's for any help in advance.
public class MainActivityLame extends Activity {
static {
System.loadLibrary("mp3lame");
}
private native void initEncoder(int numChannels, int sampleRate, int bitRate, int mode, int quality);
private native void destroyEncoder();
private native int encodeFile(String sourcePath, String targetPath);
public static final int NUM_CHANNELS = 1;
public static final int SAMPLE_RATE = 44100;
public static final int BITRATE = 128;
public static final int MODE = 1;
public static final int QUALITY = 2;
private AudioRecord mRecorder;
private short[] mBuffer;
private File mRawFile;
private File mEncodedFile;
private TextView mTextViewFile;
private String strFile;
private AudioTrack audioTrack;
private int mChannels;
private ShortBuffer mDecodedSamples;
private ByteBuffer mDecodedBytes;
private int mFileSize;
private int bufferSize;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mTextViewFile = (TextView) findViewById(R.id.textViewFile);
initRecorder();
initEncoder(NUM_CHANNELS, SAMPLE_RATE, BITRATE, MODE, QUALITY);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
public void onClickFolder(View view) throws IOException {
Intent questionIntent = new Intent(MainActivityLame.this, MyListActivity.class);
startActivityForResult(questionIntent, 1);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK) {
String itemName = data.getStringExtra(MyListActivity.ITEM);
mTextViewFile.setText(itemName);
} else {
mTextViewFile.setText("");
}
}
#Override
public void onDestroy() {
mRecorder.release();
destroyEncoder();
super.onDestroy();
}
private void initRecorder() {
bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(), time.format("%Y%m%d%H%M%S") + "." + suffix);
}
public void onClickExample(View view) {
strFile = mTextViewFile.getText().toString().trim();
MediaExtractor extractor = new MediaExtractor();
MediaFormat format = null;
extractor.setDataSource(strFile);
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; ++i) {
format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("audio/")) {
extractor.selectTrack(i);
break;
}
}
MediaCodec codec = MediaCodec.createDecoderByType(format.getString(MediaFormat.KEY_MIME));
codec.configure(format, null, null, 0);
codec.start();
int decodedSamplesSize = 0;
byte[] decodedSamples = null;
ByteBuffer[] inputBuffers = codec.getInputBuffers();
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
int sample_size;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
long presentation_time;
int tot_size_read = 0;
boolean done_reading = false;
mDecodedBytes = ByteBuffer.allocate(1 << 20);
while (true) {
int inputBufferId = codec.dequeueInputBuffer(100);
if (!done_reading && inputBufferId >= 0) {
sample_size = extractor.readSampleData(inputBuffers[inputBufferId], 0);
if (sample_size < 0) {
codec.queueInputBuffer(inputBufferId, 0, 0, -1, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
done_reading = true;
} else {
presentation_time = extractor.getSampleTime();
codec.queueInputBuffer(inputBufferId, 0, sample_size, presentation_time, 0);
extractor.advance();
tot_size_read += sample_size;
}
}
int outputBufferId = codec.dequeueOutputBuffer(info, 100);
if (outputBufferId >= 0 && info.size > 0) {
if (decodedSamplesSize < info.size) {
decodedSamplesSize = info.size;
decodedSamples = new byte[decodedSamplesSize];
}
outputBuffers[outputBufferId].get(decodedSamples, 0, info.size);
outputBuffers[outputBufferId].clear();
if (mDecodedBytes.remaining() < info.size) {
int position = mDecodedBytes.position();
int newSize = (int) ((position * (1.0 * mFileSize / tot_size_read)) * 1.2);
if (newSize - position < info.size + 5 * (1 << 20)) {
newSize = position + info.size + 5 * (1 << 20);
}
ByteBuffer newDecodedBytes = null;
int retry = 10;
while (retry > 0) {
try {
newDecodedBytes = ByteBuffer.allocate(newSize);
break;
} catch (OutOfMemoryError oome) {
retry--;
}
}
if (retry == 0) {
break;
}
mDecodedBytes.rewind();
newDecodedBytes.put(mDecodedBytes);
mDecodedBytes = newDecodedBytes;
mDecodedBytes.position(position);
}
mDecodedBytes.put(decodedSamples, 0, info.size);
codec.releaseOutputBuffer(outputBufferId, false);
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = codec.getOutputBuffers();
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
format = codec.getOutputFormat();
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
extractor.release();
extractor = null;
codec.stop();
codec.release();
codec = null;
mDecodedBytes.rewind();
mDecodedBytes.order(ByteOrder.LITTLE_ENDIAN);
mDecodedSamples = mDecodedBytes.asShortBuffer();
mRawFile = getFile("raw");
OutputStream output = null;
try {
output = new BufferedOutputStream(new FileOutputStream(mRawFile));
try {
output.write(mDecodedBytes.array());
} catch (IOException e) {
e.printStackTrace();
}
} catch (IOException e) {
Toast.makeText(MainActivityLame.this, e.getMessage(), Toast.LENGTH_SHORT).show();
} finally {
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivityLame.this, e.getMessage(), Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivityLame.this, e.getMessage(), Toast.LENGTH_SHORT).show();
}
}
}
}
mEncodedFile = getFile("mp3");
int result = encodeFile(mRawFile.getAbsolutePath(), mEncodedFile.getAbsolutePath());
if (result == 0) {
Toast.makeText(MainActivityLame.this, "Encoded to " + mEncodedFile.getName(), Toast.LENGTH_SHORT).show();
}
}
}
Most OpenCV examples for Android are based on a CameraViewActivity that receives, processes and displays the frames. But similar to this approach I want to build a service that is started when the Android device boots. The service should access the camera and do some continously image processing.
Can anyone tell me how to initialize the camera by OpenCV for this scenario? Or any link for samples?
I found several posts that explained how to do it. Basically you have to create an empty and invisible SurfaceView on the Android Camera and buffers for the preview frames. Here's part of my code.
Here's an interface that I wrote, because we do switch between local hardware and remote network cameras.
public interface ICamera {
boolean supportsRegionOfInterest();
void connect();
void release();
boolean isConnected();
}
Here's the code for the local camera that works without a visible Activity. It receives the frames in a separate thread.
public class HardwareCamera implements CameraAccess.ICamera,
Camera.PreviewCallback {
// see http://developer.android.com/guide/topics/media/camera.html for more
// details
private static final boolean USE_THREAD = true;
private final static String TAG = "HardwareCamera";
private final Context context;
private final int cameraIndex; // example: CameraInfo.CAMERA_FACING_FRONT or
// -1 for
// IP_CAM
private final CameraAccess user;
private Camera mCamera;
private int mFrameWidth;
private int mFrameHeight;
private CameraAccessFrame mCameraFrame;
private CameraHandlerThread mThread = null;
private SurfaceTexture texture = new SurfaceTexture(0);
// needed to avoid OpenCV error:
// "queueBuffer: BufferQueue has been abandoned!"
private byte[] mBuffer;
public HardwareCamera(Context context, CameraAccess user, int cameraIndex) {
this.context = context;
this.cameraIndex = cameraIndex;
this.user = user;
}
// private boolean checkCameraHardware() {
// if (context.getPackageManager().hasSystemFeature(
// PackageManager.FEATURE_CAMERA)) {
// // this device has a camera
// return true;
// } else {
// // no camera on this device
// return false;
// }
// }
public static Camera getCameraInstance(int facing) {
Camera c = null;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
int cameraCount = Camera.getNumberOfCameras();
int index = -1;
for (int camIdx = 0; camIdx < cameraCount; camIdx++) {
Camera.getCameraInfo(camIdx, cameraInfo);
if (cameraInfo.facing == facing) {
try {
c = Camera.open(camIdx);
index = camIdx;
break;
} catch (RuntimeException e) {
Log.e(TAG,
String.format(
"Camera is not available (in use or does not exist). Facing: %s Index: %s Error: %s",
facing, camIdx, e.getMessage()));
continue;
}
}
}
if (c != null)
Log.d(TAG, String.format("Camera opened. Facing: %s Index: %s",
facing, index));
else
Log.e(TAG, "Could not find any camera matching facing: " + facing);
// returns null if camera is unavailable
return c;
}
private synchronized void connectLocalCamera() {
if (!user.isOpenCVLoaded())
return;
if (USE_THREAD) {
if (mThread == null) {
mThread = new CameraHandlerThread(this);
}
synchronized (mThread) {
mThread.openCamera();
}
} else {
oldConnectCamera();
}
user.onCameraInitialized(mFrameWidth, mFrameHeight);
}
private/* synchronized */void oldConnectCamera() {
// synchronized (this) {
if (true) {// checkCameraHardware()) {
mCamera = getCameraInstance(cameraIndex);
if (mCamera == null)
return;
Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
// Camera.Size previewSize = sizes.get(0);
Collections.sort(sizes, new PreviewSizeComparer());
Camera.Size previewSize = null;
for (Camera.Size s : sizes) {
if (s == null)
break;
previewSize = s;
}
// List<Integer> formats = params.getSupportedPictureFormats();
// params.setPreviewFormat(ImageFormat.NV21);
params.setPreviewSize(previewSize.width, previewSize.height);
mCamera.setParameters(params);
params = mCamera.getParameters();
mFrameWidth = params.getPreviewSize().width;
mFrameHeight = params.getPreviewSize().height;
int size = mFrameWidth * mFrameHeight;
size = size
* ImageFormat.getBitsPerPixel(params.getPreviewFormat())
/ 8;
this.mBuffer = new byte[size];
Log.d(TAG, "Created callback buffer of size (bytes): " + size);
Mat mFrame = new Mat(mFrameHeight + (mFrameHeight / 2),
mFrameWidth, CvType.CV_8UC1);
mCameraFrame = new CameraAccessFrame(mFrame, mFrameWidth,
mFrameHeight);
if (this.texture != null)
this.texture.release();
this.texture = new SurfaceTexture(0);
try {
mCamera.setPreviewTexture(texture);
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mCamera.startPreview();
Log.d(TAG,
String.format(
"Camera preview started with %sx%s. Rendering to SurfaceTexture dummy while receiving preview frames.",
mFrameWidth, mFrameHeight));
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
// }
}
#Override
public synchronized void onPreviewFrame(byte[] frame, Camera arg1) {
mCameraFrame.put(frame);
if (user.isAutomaticReceive() || user.waitForReceive(500))
user.onPreviewFrame(mCameraFrame);
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class CameraAccessFrame implements CameraFrame {
private Mat mYuvFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
private Bitmap mCachedBitmap;
private boolean mRgbaConverted;
private boolean mBitmapConverted;
#Override
public Mat gray() {
return mYuvFrameData.submat(0, mHeight, 0, mWidth);
}
#Override
public Mat rgba() {
if (!mRgbaConverted) {
Imgproc.cvtColor(mYuvFrameData, mRgba,
Imgproc.COLOR_YUV2BGR_NV12, 4);
mRgbaConverted = true;
}
return mRgba;
}
// #Override
// public Mat yuv() {
// return mYuvFrameData;
// }
#Override
public synchronized Bitmap toBitmap() {
if (mBitmapConverted)
return mCachedBitmap;
Mat rgba = this.rgba();
Utils.matToBitmap(rgba, mCachedBitmap);
mBitmapConverted = true;
return mCachedBitmap;
}
public CameraAccessFrame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mRgba = new Mat();
this.mCachedBitmap = Bitmap.createBitmap(width, height,
Bitmap.Config.ARGB_8888);
}
public synchronized void put(byte[] frame) {
mYuvFrameData.put(0, 0, frame);
invalidate();
}
public void release() {
mRgba.release();
mCachedBitmap.recycle();
}
public void invalidate() {
mRgbaConverted = false;
mBitmapConverted = false;
}
};
private class PreviewSizeComparer implements Comparator<Camera.Size> {
#Override
public int compare(Size arg0, Size arg1) {
if (arg0 != null && arg1 == null)
return -1;
if (arg0 == null && arg1 != null)
return 1;
if (arg0.width < arg1.width)
return -1;
else if (arg0.width > arg1.width)
return 1;
else
return 0;
}
}
private static class CameraHandlerThread extends HandlerThread {
Handler mHandler;
HardwareCamera owner;
CameraHandlerThread(HardwareCamera owner) {
super("CameraHandlerThread");
this.owner = owner;
start();
mHandler = new Handler(getLooper());
}
synchronized void notifyCameraOpened() {
notify();
}
void openCamera() {
mHandler.post(new Runnable() {
#Override
public void run() {
owner.oldConnectCamera();
notifyCameraOpened();
}
});
try {
wait();
} catch (InterruptedException e) {
Log.w(TAG, "wait was interrupted");
}
}
}
#Override
public boolean supportsRegionOfInterest() {
return false;
}
#Override
public void connect() {
connectLocalCamera();
}
#Override
public void release() {
synchronized (this) {
if (USE_THREAD) {
if (mThread != null) {
mThread.interrupt();
mThread = null;
}
}
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
try {
mCamera.setPreviewTexture(null);
} catch (IOException e) {
Log.e(TAG, "Could not release preview-texture from camera.");
}
mCamera.release();
Log.d(TAG, "Preview stopped and camera released");
}
mCamera = null;
if (mCameraFrame != null) {
mCameraFrame.release();
}
if (texture != null)
texture.release();
}
}
#Override
public boolean isConnected() {
return mCamera != null;
}
}
The camera frame is taken from the OpenCV samples. It's responsible for converting the raw byte-array into OpenCV mat structures. The implementation of that interface is inside the code above.
public interface CameraFrame extends CvCameraViewFrame {
Bitmap toBitmap();
#Override
Mat rgba();
#Override
Mat gray();
}
Currently I'm trying to work with the android camera and I got pretty far with my test project. It worked perfectly fine when tested on my HTC Desire S with Gingerbread Android. However after I updated to ICS pictures taken with the test app only show strange vertical lines (it's the exact same code).
Here is what images are created now all of a sudden:
http://imageshack.us/photo/my-images/191/rebuilder1.jpg/
Here is my code (whole class):
package inter.rebuilder;
import inter.rebuilder.R;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.List;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.PixelFormat;
import android.graphics.Bitmap.CompressFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.ErrorCallback;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.View.OnClickListener;
import android.view.View.OnTouchListener;
public class CameraView extends Activity implements SurfaceHolder.Callback,
OnClickListener {
static final int FOTO_MODE = 0;
private static final String TAG = "CameraTest";
Camera mCamera;
boolean mPreviewRunning = false;
private Context mContext = this;
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//boolean hasCam = checkCameraHardware(mContext);
Log.e(TAG, "onCreate");
Bundle extras = getIntent().getExtras();
getWindow().setFormat(PixelFormat.TRANSLUCENT);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.main);
mSurfaceView = (SurfaceView) findViewById(R.id.surface_camera);
mSurfaceView.setOnClickListener(this);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
}
Camera.PreviewCallback mPreviewCallback = new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
Camera.Parameters parameters = camera.getParameters();
int width = parameters.getPreviewSize().width;
int height = parameters.getPreviewSize().height;
ByteArrayOutputStream outstr = new ByteArrayOutputStream();
Rect rect = new Rect(0, 0, width, height);
YuvImage yuvimage=new YuvImage(data,ImageFormat.NV21,width,height,null);
yuvimage.compressToJpeg(rect, 100, outstr);
Bitmap bmp = BitmapFactory.decodeByteArray(outstr.toByteArray(), 0, outstr.size());
}
};
Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] imageData, Camera c) {
if (imageData != null) {
Intent mIntent = new Intent();
storeByteImage(mContext, imageData, 100);
try {
mCamera.unlock();
mCamera.reconnect();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
setResult(FOTO_MODE, mIntent);
startCameraPreview();
//mCamera.startPreview();
//finish();
//Intent intent = new Intent(CameraView.this, AndroidBoxExample.class);
//CameraView.this.startActivity(intent);
}
}
};
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
protected void onResume() {
Log.e(TAG, "onResume");
super.onResume();
}
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
}
protected void onPause() {
Log.e(TAG, "onPause");
super.onPause();
}
protected void onStop() {
Log.e(TAG, "onStop");
super.onStop();
}
public void surfaceCreated(SurfaceHolder holder) {
Log.e(TAG, "surfaceCreated");
mCamera = Camera.open();
//mCamera.unlock();
//mCamera.setDisplayOrientation(180);
}
private void startCameraPreview() {
Camera.Parameters p = mCamera.getParameters();
p.setPictureFormat(PixelFormat.JPEG);
//p.setPreviewSize(w, h);
List<Size> list = p.getSupportedPreviewSizes();
Camera.Size size = list.get(0);
p.setPreviewSize(size.width, size.height);
mCamera.setParameters(p);
mCamera.startPreview();
mPreviewRunning = true;
}
private void startCameraPreview(SurfaceHolder holder) {
Camera.Parameters p = mCamera.getParameters();
//p.setPreviewSize(w, h);
List<Size> list = p.getSupportedPreviewSizes();
Camera.Size size = list.get(0);
p.setPreviewSize(size.width, size.height);
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mCamera.startPreview();
mPreviewRunning = true;
//setCameraDisplayOrientation(this, 0, mCamera);
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
Log.e(TAG, "surfaceChanged");
// XXX stopPreview() will crash if preview is not running
if (mPreviewRunning) {
mCamera.stopPreview();
mPreviewRunning = false;
}
startCameraPreview(holder);
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.e(TAG, "surfaceDestroyed");
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
// TODO Auto-generated method stub
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
super.onConfigurationChanged(newConfig);
}
#Override
public void onContentChanged() {
// TODO Auto-generated method stub
super.onContentChanged();
}
#Override
public void onContextMenuClosed(Menu menu) {
// TODO Auto-generated method stub
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
super.onContextMenuClosed(menu);
}
private SurfaceView mSurfaceView;
private SurfaceHolder mSurfaceHolder;
public void onClick(View arg0) {
mCamera.takePicture(null, mPictureCallback, mPictureCallback);
}
private static File createDir() throws IOException {
String nameDir = "rebuilder";
File extStorageDir = Environment.getExternalStorageDirectory();
File sdImageMainDirectory = extStorageDir; //new File("/sdcard");
File dirFile = new File(sdImageMainDirectory.getPath()+"/"+nameDir);
boolean fileExisted = dirFile.exists();
if(!fileExisted) {
dirFile.mkdirs();
}
return dirFile;
}
private static File createFile(String name, File dirFile) throws IOException {
int counter = 1;
String fileName = name + counter+".jpg";
File imageFile = new File(dirFile.getPath()+"/"+fileName);
while(imageFile.exists()) {
counter = counter + 1;
fileName = name + counter+".jpg";
imageFile = new File(dirFile.getPath()+"/"+fileName);
}
imageFile.createNewFile();
return imageFile;
}
static Bitmap image1 = null;
static Bitmap image2 = null;
public static void blendTest(Bitmap myImage) throws IOException {
if(image1 == null && image2 == null) {
image1 = myImage;
return;
}
if(image1 != null && image2 != null) {
image2 = null;
image1 = myImage;
return;
}
if(image1 != null && image2 == null) {
image2 = myImage;
}
int width = Math.min(image1.getWidth(), image2.getWidth());
int height = Math.min(image1.getHeight(), image2.getHeight());
int[][] pixels1 = new int[width][height];
int[][] pixels2 = new int[width][height];
for(int i = 0; i < width; i++) {
for(int j = 0; j < height; j++) {
pixels1[i][j] = image1.getPixel(i, j);
}
}
for(int i = 0; i < width; i++) {
for(int j = 0; j < height; j++) {
pixels2[i][j] = image2.getPixel(i, j);
}
}
Bitmap image3 = Bitmap.createBitmap(width, height, image1.getConfig());
for(int i = 0; i < width; i++) {
for(int j = 0; j < height; j++) {
int color1 = pixels1[i][j];
int color2 = pixels2[i][j];
int red1 = Color.red(color1);
int red2 = Color.red(color2);
int green1 = Color.green(color1);
int green2 = Color.green(color2);
int blue1 = Color.blue(color1);
int blue2 = Color.blue(color2);
int newColor = Color.rgb((red1 + red2)/2, (green1 + green2)/2, (blue1 + blue2)/2);
image3.setPixel(i, j, newColor);
}
}
File dirFile = createDir();
File newBlend = createFile("blend", dirFile);
FileOutputStream fileOutputStream = new FileOutputStream(newBlend);
BufferedOutputStream bos = new BufferedOutputStream(
fileOutputStream);
image3.compress(CompressFormat.JPEG, 100, bos);
bos.flush();
bos.close();
}
// public static void setCameraDisplayOrientation(Activity activity,
// int cameraId, android.hardware.Camera camera) {
// android.hardware.Camera.CameraInfo info =
// new android.hardware.Camera.CameraInfo();
// android.hardware.Camera.getCameraInfo(cameraId, info);
// int rotation = activity.getWindowManager().getDefaultDisplay()
// .getRotation();
// int degrees = 0;
// switch (rotation) {
// case Surface.ROTATION_0: degrees = 0; break;
// case Surface.ROTATION_90: degrees = 90; break;
// case Surface.ROTATION_180: degrees = 180; break;
// case Surface.ROTATION_270: degrees = 270; break;
// }
//
// int result;
// if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
// result = (info.orientation + degrees) % 360;
// result = (360 - result) % 360; // compensate the mirror
// } else { // back-facing
// result = (info.orientation - degrees + 360) % 360;
// }
// camera.setDisplayOrientation(result);
// }
public static boolean storeByteImage(Context mContext, byte[] imageData, int quality) {
FileOutputStream fileOutputStream = null;
try {
File dirFile = createDir();
File imageFile = createFile("rebuilder", dirFile);
BitmapFactory.Options options=new BitmapFactory.Options();
options.inSampleSize = 5; //5
options.inDither = false; // Disable Dithering mode
options.inPurgeable = true; // Tell to gc that whether it needs free
// memory, the Bitmap can be cleared
options.inInputShareable = true; // Which kind of reference will be
// used to recover the Bitmap
// data after being clear, when
// it will be used in the future
options.inTempStorage = new byte[32 * 1024];
options.inPreferredConfig = Bitmap.Config.RGB_565;
Bitmap myImage = BitmapFactory.decodeByteArray(imageData, 0,
imageData.length,options);
int orientation;
// others devices
if(myImage.getHeight() < myImage.getWidth()){
orientation = 90;
} else {
orientation = 0;
}
Bitmap bMapRotate;
if (orientation != 0) {
Matrix matrix = new Matrix();
matrix.postRotate(orientation);
bMapRotate = Bitmap.createBitmap(myImage, 0, 0, myImage.getWidth(),
myImage.getHeight(), matrix, true);
} else
bMapRotate = Bitmap.createScaledBitmap(myImage, myImage.getWidth(),
myImage.getHeight(), true);
//blendTest(myImage);
fileOutputStream = new FileOutputStream(imageFile);
BufferedOutputStream bos = new BufferedOutputStream(
fileOutputStream);
bMapRotate.compress(CompressFormat.JPEG, quality, bos);
if (bMapRotate != null) {
bMapRotate.recycle();
bMapRotate = null;
}
bos.flush();
bos.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println("SD Card not ready");
}
return true;
}
}
Camera Hardware permissions and orientation landscape are set in the android manifest.
Please help me here.
Did you installed a custom ROM on your device? if yes then this might be a issue.
Un-Comment the method and all occurences of it (as of OP's code)
// public static void setCameraDisplayOrientation(Activity activity,
// int cameraId, android.hardware.Camera camera) {
// android.hardware.Camera.CameraInfo info =
// new android.hardware.Camera.CameraInfo();
// android.hardware.Camera.getCameraInfo(cameraId, info);
// int rotation = activity.getWindowManager().getDefaultDisplay()
// .getRotation();
// int degrees = 0;
// switch (rotation) {
// case Surface.ROTATION_0: degrees = 0; break;
// case Surface.ROTATION_90: degrees = 90; break;
// case Surface.ROTATION_180: degrees = 180; break;
// case Surface.ROTATION_270: degrees = 270; break;
// }
//
// int result;
// if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
// result = (info.orientation + degrees) % 360;
// result = (360 - result) % 360; // compensate the mirror
// } else { // back-facing
// result = (info.orientation - degrees + 360) % 360;
// }
// camera.setDisplayOrientation(result);
// }