I wish to keep buffered portions of video which is played in a VideoView. I found out that the VideoView's SurfaceDestroyed has release(true); call, caused releasing of mMediaPlayer instance according to this question
VideoView onResume loses buffered portion of the video
I don't want to hack into the internal API so I decides to rewrite SurfaceHolder.Callback and replace it to the mSHCallback field instead to avoid calling release(true); using reflection.
But it seems very strange when I call VideoView.start() in SurfaceCreated override, I've got "Can't play this video' message multiple times but the audio is continue playing through the end even I switch to other activity.
Here is my custom callback
package com.tirkx.aos;
import android.util.Log;
import android.widget.MediaController;
import android.media.MediaPlayer;
import android.view.SurfaceHolder;
import android.widget.VideoView;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
public class CustomVidViewCallbackLoader implements SurfaceHolder.Callback
{
private final String TAG = "CustomCallbackLoader";
private Map<String, Field> PrivateFields;
private Map<String, Method> PrivateMethods;
private VideoView VideoView;
boolean VideoOpened;
public CustomVidViewCallbackLoader(VideoView videoView)
{
VideoView = videoView;
PrivateFields = new HashMap<String, Field>();
PrivateMethods = new HashMap<String, Method>();
Class VideoViewClass = videoView.getClass();
//Reflection things
try
{
PrivateFields.put("mSHCallback", VideoViewClass.getDeclaredField("mSHCallback"));
PrivateFields.put("mSurfaceHolder", VideoViewClass.getDeclaredField("mSurfaceHolder"));
PrivateFields.put("mMediaController", VideoViewClass.getDeclaredField("mMediaController"));
PrivateFields.put("mMediaPlayer", VideoViewClass.getDeclaredField("mMediaPlayer"));
PrivateFields.put("mSurfaceWidth", VideoViewClass.getDeclaredField("mSurfaceWidth"));
PrivateFields.put("mSurfaceHeight", VideoViewClass.getDeclaredField("mSurfaceHeight"));
PrivateFields.put("mTargetState", VideoViewClass.getDeclaredField("mTargetState"));
PrivateFields.put("mVideoWidth", VideoViewClass.getDeclaredField("mVideoWidth"));
PrivateFields.put("mVideoHeight", VideoViewClass.getDeclaredField("mVideoHeight"));
PrivateFields.put("mSeekWhenPrepared", VideoViewClass.getDeclaredField("mSeekWhenPrepared"));
PrivateMethods.put("openVideo", VideoViewClass.getDeclaredMethod("openVideo"));
for(Map.Entry<String, Method> m : PrivateMethods.entrySet())
m.getValue().setAccessible(true);
for(Map.Entry<String, Field> f : PrivateFields.entrySet())
f.getValue().setAccessible(true);
}
catch(Exception ex)
{
ex.printStackTrace();
}
try
{
SurfaceHolder.Callback SHCallback = (SurfaceHolder.Callback) PrivateFields
.get("mSHCallback").get(videoView);
if (SHCallback != null)
{
videoView.getHolder().removeCallback(SHCallback);
PrivateFields.get("mSHCallback").set(videoView, this);
videoView.getHolder().addCallback((SurfaceHolder.Callback) PrivateFields
.get("mSHCallback").get(videoView));
}
}
catch(Exception ex)
{
ex.printStackTrace();
}
}
#Override
public void surfaceCreated(SurfaceHolder holder)
{
try
{
PrivateFields.get("mSurfaceHolder").set(VideoView, holder);
if(!VideoOpened)
{
PrivateMethods.get("openVideo").invoke(VideoView);
VideoOpened = true;
}
else
{
VideoView.start();
}
}
catch(Exception ex)
{
ex.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
try
{
//PrivateFields.get("mSurfaceHolder").set(VideoView, holder);
int mVideoWidth = (int) PrivateFields.get("mVideoWidth").get(VideoView);
int mVideoHeight = (int) PrivateFields.get("mVideoHeight").get(VideoView);
int mTargetState = (int) PrivateFields.get("mTargetState").get(VideoView);
int mSeekWhenPrepared = (int) PrivateFields.get("mSeekWhenPrepared").get(VideoView);
MediaPlayer mMediaPlayer = (MediaPlayer) PrivateFields.get("mMediaPlayer").get(VideoView);
PrivateFields.get("mSurfaceWidth").set(VideoView, width);
PrivateFields.get("mSurfaceHeight").set(VideoView, height);
boolean isValidState = (mTargetState == 3);
boolean hasValidSize = (mVideoWidth == width && mVideoHeight == height);
if(mMediaPlayer != null && isValidState && hasValidSize)
{
if(mSeekWhenPrepared != 0)
VideoView.seekTo(mSeekWhenPrepared);
VideoView.start();
}
}
catch(Exception ex)
{
ex.printStackTrace();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder)
{
try
{
PrivateFields.get("mSurfaceHolder").set(VideoView, null);
MediaController mediaController = (MediaController) PrivateFields.get("mMediaController")
.get(VideoView);
if(mediaController != null)
mediaController.hide();
VideoView.pause();
}
catch(Exception ex)
{
ex.printStackTrace();
}
}
public void setMediaPlayerOnBufferedChanged(MediaPlayer mediaPlayer)
{
if (mediaPlayer != null)
{
mediaPlayer.setOnBufferingUpdateListener(new MediaPlayer.OnBufferingUpdateListener()
{
#Override
public void onBufferingUpdate(MediaPlayer mp, int percent)
{
Log.i(TAG, "-->" + percent);
}
});
}
}
}
Related
Hear is the code of my fragment. I am Using the Media Player for recording
this code work when i use only single camera Like ForntCamera and BackCamera But When i use switch the camera using camaraId then it media Player give an Exception of " Invalid preview surface ". Where am I wrong?
Please Help Me. Thanks is Advance
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.TextClock;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import static android.provider.MediaStore.Files.FileColumns.MEDIA_TYPE_IMAGE;
import static android.provider.MediaStore.Files.FileColumns.MEDIA_TYPE_VIDEO;
import static mms.dweb.buzzcutz.ApplicationContext.TAG;
public class VideoRecordFragmentB extends BaseFragment {
#BindView(R.id.record_btn_frb)
ImageView record_btn;
#BindView(R.id.cameraSwitch)
ImageView cameraSwitch;
#BindView(R.id.mTextField)
TextClock mTextField;
int camId = 0;
#BindView(R.id.camera_preview)
FrameLayout preview;
MediaPlayer mediaPlayer;
String catId;
Boolean back = true;
private View fragmentView;
private MainActivity mainActivity;
private Camera mCamera;
private CameraPreview mPreview;
private MediaRecorder mMediaRecorder;
private boolean isRecording = false;
private CategoryResponce.Responsedata categoryData;
private static Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
/**
* Create a File for saving an image or video
*/
private static File getOutputMediaFile(int type) {
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "Buzzcut");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_" + timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
#OnClick(R.id.cameraSwitch)
void changeCam() {
if (camId == 0) {
camId = 1;
} else {
camId = 0;
}
managView();
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
fragmentView =
inflater.inflate(R.layout.fragment_video_record_fragment_b, container, false);
ButterKnife.bind(this, fragmentView);
initializeView();
return fragmentView;
}
#Override
public void initializeView() {
super.initializeView();
if (getActivity() instanceof MainActivity) {
mainActivity = (MainActivity) getActivity();
}
managView();
}
private void managView() {
mCamera = getCameraInstance(camId);
mPreview = new CameraPreview(getContext(), mCamera);
preview.addView(mPreview);
Bundle bundle = getArguments();
categoryData = (CategoryResponce.Responsedata)
bundle.getSerializable("CategoryResponceData");
catId = categoryData.getId();
Bundle bundle1 = new Bundle();
bundle1.putSerializable("CategoryResponceData", categoryData);
record_btn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (isRecording) {
// stop recording and release camera
mMediaRecorder.stop(); // stop the recording
releaseMediaRecorder(); // release the MediaRecorder object
mCamera.lock();// take camera access back from MediaRecorder
record_btn.setImageResource(R.drawable.record_btn);
// inform the user that recording has stopped
isRecording = false;
Bundle bundle = new Bundle();
bundle.putString("fileUri",
getOutputMediaFileUri(MEDIA_TYPE_VIDEO).toString());
bundle.putSerializable("CategoryResponceData", categoryData);
mainActivity.replaceFragment(FragmentNames.VideoUploadFragment, bundle, false, false);
} else {
// initialize video camera
if (prepareVideoRecorder(camId)) {
// Camera is available and unlocked, MediaRecorder is prepared,
// now you can start recording
record_btn.setImageResource(R.drawable.recoder);
mMediaRecorder.start();
// inform the user that recording has started
isRecording = true;
} else {
// prepare didn't work, release the camera
releaseMediaRecorder();
// inform user
}
}
}
});
}
#Override
public void onStop() {
super.onStop();
}
public Camera getCameraInstance(int camid) {
releaseCameraAndPreview();
Camera c = null;
try {
c = Camera.open(camid); // attempt to get a Camera instance
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
private void releaseCameraAndPreview() {
preview.removeAllViews();
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
private boolean prepareVideoRecorder(int camid) {
mCamera = getCameraInstance(camid);
mMediaRecorder = new MediaRecorder();
mCamera.unlock();
mMediaRecorder.setCamera(mCamera);
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
mMediaRecorder.setOutputFile(getOutputMediaFile(MEDIA_TYPE_VIDEO).toString());
mMediaRecorder.setPreviewDisplay(mPreview.getHolder().getSurface());
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
Log.d(TAG, "IllegalStateException preparing MediaRecorder: " + e.getMessage());
releaseMediaRecorder();
return false;
} catch (IOException e) {
Log.d(TAG, "IOException preparing MediaRecorder: " + e.getMessage());
releaseMediaRecorder();
return false;
}
return true;
}
#Override
public void onPause() {
super.onPause();
releaseMediaRecorder(); // if you are using MediaRecorder, release it first
releaseCamera(); // release the camera immediately on pause event
}
private void releaseMediaRecorder() {
if (mMediaRecorder != null) {
mMediaRecorder.reset(); // clear recorder configuration
mMediaRecorder.release(); // release the recorder object
mMediaRecorder = null;
mCamera.lock(); // lock camera for later use
}
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
}
Hear is the Camera Preview Class. I am Using this class for get Camera
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
private CamcorderProfile camcorderProfile;
public CameraPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
preview.
try {
mCamera.setPreviewDisplay(holder);
mCamera.setDisplayOrientation(90);
mCamera.startPreview();
} catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (mHolder.getSurface() == null){
return;
}
try {
mCamera.stopPreview();
} catch (Exception e){
}
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.setDisplayOrientation(90);
mCamera.startPreview();
} catch (Exception e){
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
}
Anyway please initialize the view in onResume please
#Override
public void onResume() {
super.onResume();
initializeView();
}
have you declared the permission in the manifest please:
<uses-permission android:name="android.permission.RECORD_AUDIO" />
For most versions, this works fine, but for Api 24-25, the MEDIA_ERROR_SERVER_AD error occurs immediately when the recording starts and you must finish the recording.
If there are any specific reasons, can this be handled in order not to interrupt the recording?
if this helps, then I attach the code. Thaks!
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.BatteryManager;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.OrientationEventListener;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import javax.inject.Inject;
public class MainCameraFragment extends BaseFragment implements SurfaceHolder.Callback,
MediaRecorder.OnErrorListener, MediaRecorder.OnInfoListener {
private static final String TAG = "MainCameraFragment";
private static final float MIN_BATTERY_CHARGE = 0.1f;
private Camera camera = null;
private int cameraId = -1;
private MediaRecorder recorder = null;
private boolean inPreview = false;
private Action<RecordingError> recordingErrorAction = null;
/** Surface view container. */
private FrameLayout container;
/** Surface view instance. */
private SurfaceView surfaceView;
/** Orientation listener. */
private OrientationEventListener orientationEventListener;
/** Start recording time. */
private long startRecordingTime = 0;
/** Last video path. */
private File lastVideoPath;
/** Recording manager instance. */
#Inject
RecordingManager recordingManager;
/** Settings manager instance. */
#Inject
SettingsManager settingsManager;
/** Do on resume. */
private Runnable doOnResume;
public enum RecordingError {
ILLEGAL_MEDIA_RECORDER_STATE,
NOT_ENOUGH_MEMORY,
NOT_ENOUGH_BATTERY,
}
/** Start preview from camera. */
private void startPreview() {
if (camera != null && !inPreview) {
try {
camera.setPreviewDisplay(surfaceView.getHolder());
Camera.Parameters parameters = camera.getParameters();
final List<Camera.Size> supportedSizes = parameters.getSupportedPreviewSizes();
Camera.Size bestFitSize = camera.new Size(0, 0);
for( Camera.Size size : supportedSizes ) {
if( size.width <= surfaceView.getWidth() && size.height <= surfaceView.getHeight() ) {
bestFitSize = size;
break;
}
}
if( bestFitSize.width == 0 && bestFitSize.height == 0 ) {
bestFitSize = supportedSizes.get(0);
}
parameters.setPreviewSize(bestFitSize.width, bestFitSize.height);
camera.setParameters(parameters);
camera.startPreview();
inPreview = true;
} catch (IOException e) {
Log.e(TAG, "Error setting camera preview: " + e.getMessage());
}
}
}
/** Stop camera preview. */
private void stopPreview() {
if (camera != null && inPreview) {
try {
camera.stopPreview();
} catch (Exception e) {
Log.i(TAG, "Error setting camera preview: " + e.getMessage());
}
inPreview = false;
}
}
/** Init camera. */
private void initCamera() {
if (camera == null) {
cameraId = recordingManager.getCameraId();
if (cameraId >= 0) {
try {
camera = Camera.open(cameraId);
setupCameraDisplayOrientation();
lockAutoFocus(camera, settingsManager.isLockAutoFocus());
lockAutoExposure(camera, settingsManager.isLockAutoExposure());
}
catch (Exception e) {
Log.e(TAG, e.getMessage(), e);
}
} else {
Log.e(TAG, "Can't find camera id");
}
}
}
/** Release camera. */
private void releaseCamera() {
if (camera != null) {
camera.release();
camera = null;
cameraId = -1;
}
}
/** #return recording state. */
public boolean isRecording() {
return recorder != null;
}
/** Start recording. */
public void startRecording(final Action<RecordingError> recordingErrorAction, boolean micUsage) {
if (camera != null && !isRecording()) {
camera.stopPreview();
final RecordingError recordingError = checkEnoughs();
if (recordingError != null) {
if (recordingErrorAction != null) {
recordingErrorAction.act(recordingError);
}
return;
}
startRecordingTime = System.currentTimeMillis();
final CamcorderProfile profile = recordingManager.getCamcorderProfile(cameraId, camera.getParameters());
stopPreview();
camera.unlock();
MediaRecorder recorder = null;
try {
recorder = new MediaRecorder();
recorder.setCamera(camera);
recorder.setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION);
recorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
recorder.setProfile(profile);
final String ts = new SimpleDateFormat("yyyyMMdd_HHmmss", Locale.US).format(new Date());
lastVideoPath = new File(recordingManager.getRecordsDirPath(), "Video_" + ts + ".mp4");
recorder.setOutputFile(lastVideoPath.getAbsolutePath());
final int orientationHint = getRecorderOrientationHint();
if (orientationHint != -1) {
recorder.setOrientationHint(orientationHint);
}
recorder.setMaxFileSize(Long.MAX_VALUE);
recorder.setMaxDuration(Integer.MAX_VALUE);
stopPreview();
recorder.setPreviewDisplay(surfaceView.getHolder().getSurface());
recorder.setOnErrorListener(this);
recorder.setOnInfoListener(this);
recorder.prepare();
recorder.start();
this.recordingErrorAction = recordingErrorAction;
} catch (IOException e) {
Log.d(TAG, "Error " + e.getMessage(), e);
if (recorder != null) {
recorder.release();
recorder = null;
}
} finally {
this.recorder = recorder;
}
}
}
/** Stop recording. */
public void stopRecording() {
if (isRecording()) {
final MediaRecorder recorder = this.recorder;
this.recorder = null;
recorder.stop();
recorder.reset();
recorder.release();
this.recordingErrorAction = null;
try {
camera.reconnect();
startPreview();
} catch (IOException e) {
Log.w(TAG, e.getMessage(), e);
}
}
}
#Override
public void onError(final MediaRecorder mr, final int what, final int extra) {
Log.e(TAG, "Media recorder error: " + what + " extra: " + extra);
if (recordingErrorAction != null) {
recordingErrorAction.act(RecordingError.ILLEGAL_MEDIA_RECORDER_STATE);
}
}
}
You can catch the MEDIA_ERROR_SERVER_DIED message inside the onError method and try to re instantiate your recorder.
public void onError(final MediaRecorder mr, final int what, final int extra) {
Log.e(TAG, "Media recorder error: " + what + " extra: " + extra);
if (what == MediaPlayer.MEDIA_ERROR_SERVER_DIED) {
Log.i(LOGTAG, "MediaPlayer died, restarting");
recorder.release();
recorder = new MediaRecorder();
}
if (recordingErrorAction != null) {
recordingErrorAction.act(RecordingError.ILLEGAL_MEDIA_RECORDER_STATE);
}
}
In my sample code above I just re instantiate the recorder via the line: recorder = new MediaRecorder();. Perhaps it might need more settings applied on it afterwards.
I have a custom camera application , everything works fine .But whenever the app gets paused (when the onPause or onDestroyed is called) camera is released and afterwards when onResume is called and capture button is clicked is to take an image ,my Application crashes.How do i fix this ? Please help me , Thanks in advance
CameraActivity Code
package com.example.skmishra.plates.Activities;
import android.app.ActionBar;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.hardware.Camera;
import android.hardware.SensorManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.MotionEvent;
import android.view.OrientationEventListener;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ZoomControls;
import com.example.skmishra.plates.Asyncs.CameraAsync;
import com.example.skmishra.plates.CameraHandler;
import com.example.skmishra.plates.Library.Fonts;
import com.example.skmishra.plates.R;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* Created by skmishra on 12/28/2015.
*/
public class camera extends Activity {
private static final int RESULT_LOAD_IMAGE = 200 ;
private Camera mCamera=null;
private CameraHandler surface_view;
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
public static final String TAG = "Aloo";
int toRotate = 90;
public int currentCameraID = 0;
OrientationEventListener myOrientationEventListener;
private ZoomControls zoomControls;
private double mDist;
Boolean imageSwitchClicked = false;
Boolean mShowFlash = false;
ImageView mSwitch_cam;
ImageView mFlashBut;
FrameLayout preview;
CameraAsync mCamAsync;
ImageView imageGallery;
TextView raleway;
TextView headerCameraText;
Fonts mFonts;
int permCode=4;
Camera.Parameters params;
String recievedType=null;
#Override
protected void onCreate(Bundle savedInstanceState) {
Log.e("I Called Thus "," cda");
mCamAsync=new CameraAsync(this);
mCamAsync.execute();
super.onCreate(savedInstanceState);
setContentView(R.layout.camera);
imageGallery = (ImageView) findViewById(R.id.select_gallery);
mFonts = new Fonts();
preview = (FrameLayout) findViewById(R.id.camera_preview);
mFlashBut = (ImageView) findViewById(R.id.flash);
mSwitch_cam = (ImageView) findViewById(R.id.white_switch);
raleway = (TextView) findViewById(R.id.textView2);
headerCameraText = (TextView) findViewById(R.id.imageHead);
// mFonts.setRalewayBold(this, headerCameraText);
Intent gets = getIntent();
recievedType = gets.getExtras().getString("recievedCameraPurpose");
handleHeaderText(recievedType);
mFonts.setRalewayBold(this, raleway);
myOrientationEventListener
= new OrientationEventListener(this, SensorManager.SENSOR_DELAY_NORMAL) {
#Override
public void onOrientationChanged(int arg0) {
int rotation = arg0;
if (rotation > 340) {
if (currentCameraID == 0) {
toRotate = 90;
} else {
toRotate =270;
Log.e("POSITION_TITLT", "-> Potrait Front camera");
}
} else if (rotation < 80 && rotation > 30) {
toRotate = 180;
Log.e("POSITION_TILT", "-> Landscape Right " + rotation);
} else if (rotation < 280 && rotation > 240) {
toRotate = 0;
Log.e("POSITION_TILT", "-> Landscape Left " + rotation);
}
}
};
if (myOrientationEventListener.canDetectOrientation()) {
myOrientationEventListener.enable();
} else {
Toast.makeText(this, "Can't DetectOrientation", Toast.LENGTH_LONG).show();
finish();
}
}
private boolean checkifCamera(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
return true;
} else {
return false;
}
}
public Camera getCameraInstance() {
Camera c = null;
try {
releaseCameraAndPreview();
c = Camera.open();
} catch (Exception e) {
Toast.makeText(this, "Print error" + e.getMessage(), Toast.LENGTH_LONG).show();
}
return c;
}
public void onCompleteInstanceCameraAysnc(Camera camera)
{
mCamera = camera;
surface_view = new CameraHandler(this, mCamera);
params = mCamera.getParameters();
preview.addView(surface_view);
set_image_gallery();
}
public void switchC(View view) {
if (!imageSwitchClicked) {
mSwitch_cam.setAlpha(1.0f);
imageSwitchClicked = true;
} else {
mSwitch_cam.setAlpha(0.5f);
imageSwitchClicked = false;
}
setCameraID();
mCamera = surface_view.switchCamera();
params=mCamera.getParameters();
}
public void flash_onOf(View view) {
if (!mShowFlash) {
params.setFlashMode(Camera.Parameters.FLASH_MODE_ON);
mFlashBut.setAlpha(1.0f);
mShowFlash = true;
} else {
params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
mFlashBut.setAlpha(0.5f);
mShowFlash = false;
}
}
private void releaseCameraAndPreview() {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
mCamera.release();
mCamera=null;
}
else
{
Log.e("Cert","Lerts");
}
}
#Override
protected void onResume() {
super.onResume();
}
#Override
protected void onDestroy() {
Log.e("LLL", "Dessssdccc");
super.onDestroy();
try {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
myOrientationEventListener.disable();
mCamera.release();
mCamera=null;
permCode=15;
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
protected void onPause() {
Log.e("LLL", "Dessssdccc");
super.onPause();
try {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.lock();
myOrientationEventListener.disable();
mCamera.release();
mCamera=null;
permCode=15;
} catch (Exception e) {
e.printStackTrace();
}
}
public void takePH(View view) {
if(mShowFlash && !imageSwitchClicked)
{
params.setFlashMode(Camera.Parameters.FLASH_MODE_ON);
}
params.set("rotation", toRotate);
mCamera.setParameters(params);
mCamera.takePicture(null, null, mPicture);
}
Camera.PictureCallback mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null) {
Log.d(TAG, "Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
Intent i=new Intent(getApplicationContext(),ShowOut.class);
i.putExtra("purpose",recievedType);
i.putExtra("img-url",pictureFile.toString());
startActivity(i);
}
};
/**
* Create a file Uri for saving an image or video
*/
private static Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
/**
* Create a File for saving an image or video
*/
private static File getOutputMediaFile(int type) {
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "Plates");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_" + timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
public void handleHeaderText(String type) {
Log.e("Type",type);
headerCameraText.setText("");
if (type.equals("ADD_COVER_PLATES")) {
headerCameraText.setText("Take a cover image for your plate");
}
else if(type.equals("ADD_PROFILE_USER"))
{
imageGallery.setVisibility(View.GONE);
}
else if(type.equals("PLATE_UPLOAD_SINGLETON")) {
headerCameraText.setText("Click an image for a plate");
}
}
public void setCameraID() {
if (currentCameraID == Camera.CameraInfo.CAMERA_FACING_BACK) {
currentCameraID = Camera.CameraInfo.CAMERA_FACING_FRONT;
toRotate = 270;
} else {
currentCameraID = Camera.CameraInfo.CAMERA_FACING_BACK;
toRotate = 90;
}
}
#Override
public boolean onTouchEvent(MotionEvent event) {
// Get the pointer ID
Camera.Parameters params = mCamera.getParameters();
int action = event.getAction();
if (event.getPointerCount() > 1) {
// handle multi-touch events
if (action == MotionEvent.ACTION_POINTER_DOWN) {
mDist = getFingerSpacing(event);
} else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) {
mCamera.cancelAutoFocus();
handleZoom(event, params);
}
} else {
// handle single touch events
if (action == MotionEvent.ACTION_UP) {
handleFocus(event, params);
}
}
return true;
}
private void handleZoom(MotionEvent event, Camera.Parameters params) {
int maxZoom = params.getMaxZoom();
int zoom = params.getZoom();
double newDist = getFingerSpacing(event);
if (newDist > mDist) {
//zoom in
if (zoom < maxZoom)
zoom++;
} else if (newDist < mDist) {
//zoom out
if (zoom > 0)
zoom--;
}
mDist = newDist;
params.setZoom(zoom);
mCamera.setParameters(params);
}
public void handleFocus(MotionEvent event, Camera.Parameters params) {
int pointerId = event.getPointerId(0);
int pointerIndex = event.findPointerIndex(pointerId);
// Get the pointer's current position
float x = event.getX(pointerIndex);
float y = event.getY(pointerIndex);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
mCamera.autoFocus(new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean b, Camera camera) {
// currently set to auto-focus on single touch
}
});
}
}
/**
* Determine the space between the first two fingers
*/
private double getFingerSpacing(MotionEvent event) {
// ...
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
double pres;
pres = Math.sqrt(x * x + y * y);
return pres;
}
public void set_image_gallery() {
// Find the last picture
String[] projection = new String[]{
MediaStore.Images.ImageColumns._ID,
MediaStore.Images.ImageColumns.DATA,
MediaStore.Images.ImageColumns.BUCKET_DISPLAY_NAME,
MediaStore.Images.ImageColumns.DATE_TAKEN,
MediaStore.Images.ImageColumns.MIME_TYPE
};
final Cursor cursor = getContentResolver()
.query(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, projection, null,
null,MediaStore.Images.ImageColumns._ID + " DESC");
// Put it in the image view
if (cursor.moveToFirst()) {
String imageLocation = cursor.getString(1);
File imageFile = new File(imageLocation);
if (imageFile.exists()) { // TODO: is there a better way to do this?
Bitmap bm=decodeFile(imageFile);
imageGallery.setImageBitmap(bm);
}
}
cursor.close();
}
public Bitmap decodeFile(File f) {
try {
//Decode image size
BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
BitmapFactory.decodeStream(new FileInputStream(f), null, o);
//The new size we want to scale to
final int REQUIRED_SIZE = 490;
//Find the correct scale value. It should be the power of 2.
int scale = 1;
while (o.outWidth / scale / 2 >= REQUIRED_SIZE && o.outHeight / scale / 2 >= REQUIRED_SIZE)
scale *= 2;
//Decode with inSampleSize
BitmapFactory.Options o2 = new BitmapFactory.Options();
o2.inSampleSize = scale;
return BitmapFactory.decodeStream(new FileInputStream(f), null, o2);
} catch (FileNotFoundException e) {
}
return null;
}
public void imagePick(View view)
{
Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
photoPickerIntent.setType("image/*");
startActivityForResult(photoPickerIntent, RESULT_LOAD_IMAGE);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == RESULT_LOAD_IMAGE && resultCode == RESULT_OK && null != data) {
Uri selectedImage = data.getData();
String[] filePathColumn = {MediaStore.Images.Media.DATA};
Cursor cursor = getContentResolver().query(selectedImage,
filePathColumn, null, null, null);
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
String picturePath = cursor.getString(columnIndex);
cursor.close();
Intent transfer=null;
if(recievedType.equals("ADD_COVER_PLATES")) {
transfer = new Intent(this, create_plates.class);
}
else if(recievedType.equals("PLATE_UPLOAD_SINGLETON"))
{
transfer=new Intent(this,plate_select_upload.class);
}
transfer.putExtra("imagUrl",picturePath);
startActivity(transfer);
}
}
}
Camera Handler Code
package com.example.skmishra.plates;
import android.content.Context;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.IOException;
/**
* Created by skmishra on 12/28/2015.
*/
public class CameraHandler extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera=null;
public int currentCameraID=0;
public CameraHandler(Context context,Camera camera) {
super(context);
mCamera=camera;
mHolder=getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_GPU);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
if(mCamera==null)
{
mCamera=Camera.open();
}
mCamera.setPreviewDisplay(holder);
Camera.Parameters p = mCamera.getParameters();
}
catch (IOException e)
{
Log.d("--DS", "Error setting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
fixOr();
if(mHolder.getSurface()==null)
{
return;
}
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e){
Log.d("--DS", "Error starting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
mCamera.release();
mCamera = null;
}
public void fixOr()
{
mCamera.stopPreview();
mCamera.setDisplayOrientation(90);
mCamera.startPreview();
}
public Camera switchCamera() {
mCamera.stopPreview();
mCamera.release();
if(currentCameraID==Camera.CameraInfo.CAMERA_FACING_BACK)
{
currentCameraID = Camera.CameraInfo.CAMERA_FACING_FRONT;
}
else
{
currentCameraID=Camera.CameraInfo.CAMERA_FACING_BACK;
}
mCamera=Camera.open(currentCameraID);
fixOr();
try {
mCamera.setPreviewDisplay(mHolder);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
return mCamera;
}
}
UPDATE **
StackTrace
Process: com.example.skmishra.plates, PID: 10575
java.lang.RuntimeException: Fail to connect to camera service
at android.hardware.Camera.<init>(Camera.java:545)
at android.hardware.Camera.open(Camera.java:403)
at com.example.skmishra.plates.CameraHandler.surfaceCreated(CameraHandler.java:35)
at android.view.SurfaceView.updateWindow(SurfaceView.java:599)
at android.view.SurfaceView.onWindowVisibilityChanged(SurfaceView.java:243)
at android.view.View.dispatchWindowVisibilityChanged(View.java:9034)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:1275)
at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:1319)
at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1062)
at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:5873)
at android.view.Choreographer$CallbackRecord.run(Choreographer.java:767)
at android.view.Choreographer.doCallbacks(Choreographer.java:580)
at android.view.Choreographer.doFrame(Choreographer.java:550)
at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:753)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:135)
at android.app.ActivityThread.main(ActivityThread.java:5753)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1405)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1200)
After a lot of research , i finally found out what the problem was . Problem was with the FrameLayout , i had to remove it on Pause and recreate it on OnResume
#Override
protected void onResume() {
super.onResume();
mCamAsync = new CameraAsync(this);//Async task to get the camera instance
mCamAsync.execute();
}
#Override
protected void onPause() {
super.onPause();
releaseCameraAndPreview();
preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.removeViewAt(0);
}
** EDIT **
i also removed the execution of cameraAsync from onCreate , that means i instantiate the camera only in OnResume
I am new to android app and I am trying Camera using SurfaceTexture. The call back for OnFrameAvailable() is not being called... Please suggest me a solution. The code is below.
What is missing in this? I am not sure if I have made the correct call to setOnFrameListener().
package com.example.cameratest;
import com.example.test.R;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.View;
import android.graphics.SurfaceTexture;
import android.graphics.SurfaceTexture.OnFrameAvailableListener;
import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.opengl.*;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.concurrent.locks.ReentrantLock;
public class MainActivity extends Activity implements OnFrameAvailableListener {
private static final String TAG = "CameraToMpegTest";
private static final boolean VERBOSE = true; // lots of logging
// where to put the output file (note: /sdcard requires WRITE_EXTERNAL_STORAGE permission)
private static final long DURATION_SEC = 8;
// camera state
private Camera mCamera;
private static SurfaceTexture mSurfaceTexture;
private int[] mGlTextures = null;
private Object mFrameSyncObject = new Object();
private boolean mFrameAvailable = false;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void startCamera(View v) {
try {
this.initCamera(0);
this.StartCamera();
} catch (Throwable throwable) {
throwable.printStackTrace();
}
}
private void StartCamera() {
try {
mCamera.startPreview();
long startWhen = System.nanoTime();
long desiredEnd = startWhen + DURATION_SEC * 1000000000L;
int frameCount = 0;
while (System.nanoTime() < desiredEnd) {
// Feed any pending encoder output into the muxer.
awaitNewImage();
}
} finally {
// release everything we grabbed
releaseCamera();
}
}
/**
* Stops camera preview, and releases the camera to the system.
*/
private void releaseCamera() {
if (VERBOSE) Log.d(TAG, "releasing camera");
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
private void initCamera(int cameraId) {
mCamera = Camera.open(cameraId);
if (mCamera == null) {
Log.d(TAG, "No front-facing camera found; opening default");
mCamera = Camera.open(); // opens first back-facing camera
}
if (mCamera == null) {
throw new RuntimeException("Unable to open camera");
}
Camera.Parameters parms = mCamera.getParameters();
parms.setPreviewSize(640, 480);
mGlTextures = new int[1];
GLES20.glGenTextures(1, mGlTextures, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
try {
mCamera.setPreviewTexture(mSurfaceTexture);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mSurfaceTexture.setOnFrameAvailableListener(MainActivity.this);
}
public void awaitNewImage() {
final int TIMEOUT_MS = 4500;
synchronized (mFrameSyncObject) {
while (!mFrameAvailable) {
try {
// Wait for onFrameAvailable() to signal us. Use a timeout to avoid
// stalling the test if it doesn't arrive.
if (VERBOSE) Log.i(TAG, "Waiting for Frame in Thread");
mFrameSyncObject.wait(TIMEOUT_MS);
if (!mFrameAvailable) {
// TODO: if "spurious wakeup", continue while loop
throw new RuntimeException("Camera frame wait timed out");
}
} catch (InterruptedException ie) {
// shouldn't happen
throw new RuntimeException(ie);
}
}
mFrameAvailable = false;
}
}
#Override
public void onFrameAvailable(SurfaceTexture st) {
if (VERBOSE) Log.d(TAG, "new frame available");
synchronized (mFrameSyncObject) {
if (mFrameAvailable) {
throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
}
mFrameAvailable = true;
mFrameSyncObject.notifyAll();
}
}
}
I think you have to call SurfaceTeture.updateTextImage() after your OnFrameAvailable() Callback to tell the camera "I've used your last frame, give me another one".
(Sorry but my English cannot provide a better explanation)
#Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
...
surfaceTexture.updateTexImage();
}
had the same problem, seems like I forgot to call for the updateTexImage()
use method setOnFrameAvailableListener(#Nullable final OnFrameAvailableListener listener, #Nullable Handler handler) replace setOnFrameAvailableListener(#Nullable OnFrameAvailableListener listener).
in your case, you can modify the code as:
frameUpdateThread = new HandlerThread("frameUpdateThread");
frameUpdateThread.start();
mSurfaceTexture.setOnFrameAvailableListener(MainActivity.this, Handler(frameUpdateThread.getLooper()));
In my understanding onFrameAvailable should be used with thread. With that i am not facing the issue and also make sure updatetextImage is called after receiving the frames
i was made a bluetooth printer application (android based) for printing some text using datecs DPP-350 printer device. this program use a datecs external library such as bluetoohconnector and RFComm package. it works nicely, here's the code:
package com.myapp.MobilePrinter1;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import com.djarum.MobilePrinter1.BluetoothConnector;
import com.datecs.api.card.FinancialCard;
import com.datecs.api.printer.Printer;
import com.datecs.api.printer.PrinterInformation;
import com.datecs.api.printer.ProtocolAdapter;
import com.datecs.api.printer.ProtocolAdapter.Channel;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.DialogInterface.OnDismissListener;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.os.Handler;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
public class MobilePrinter1Activity extends Activity {
public static final String CONNECTION_STRING = "connection_string";
private final Handler mHandler = new Handler();
private final Thread mConnectThread = new Thread() {
#Override
public void run() {
String connectionString = "bth://00:01:90:E6:40:52";
showProgress("Connecting");
if (connectionString.startsWith("bth://")) {
String address = connectionString.substring(6);
connectBth(address);
} else {
throw new IllegalArgumentException("Unsupported connection string");
}
dismissProgress();
}
void connectBth(String address) {
//setPrinterInfo(R.drawable.help, address);
try {
mBthConnector = BluetoothConnector.getConnector(MobilePrinter1Activity.this);
mBthConnector.connect(address);
mPrinter = getPrinter(
mBthConnector.getInputStream(),
mBthConnector.getOutputStream());
} catch (IOException e) {
//error(R.drawable.bluetooth, e.getMessage());
return;
}
mPrinterInfo = getPrinterInfo();
}
Printer getPrinter(InputStream in, OutputStream out) throws IOException {
ProtocolAdapter adapter = new ProtocolAdapter(in, out);
Printer printer = null;
if (adapter.isProtocolEnabled()) {
Channel channel = adapter.getChannel(ProtocolAdapter.CHANNEL_PRINTER);
InputStream newIn = channel.getInputStream();
OutputStream newOut = channel.getOutputStream();
printer = new Printer(newIn, newOut);
} else {
printer = new Printer(in, out);
}
return printer;
}
PrinterInformation getPrinterInfo() {
PrinterInformation pi = null;
try {
pi = mPrinter.getInformation();
//setPrinterInfo(R.drawable.printer, pi.getName());
} catch (IOException e) {
e.printStackTrace();
}
return pi;
}
};
private BluetoothConnector mBthConnector;
private Printer mPrinter;
private PrinterInformation mPrinterInfo;
private ProgressDialog mProgressDialog;
private BluetoothConnector mConnector;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
try {
mConnector = BluetoothConnector.getConnector(this);
} catch (IOException e) {
Toast.makeText(this, e.getMessage(), Toast.LENGTH_SHORT);
finish();
}
findViewById(R.id.button1).setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
printText();
}
});
findViewById(R.id.button2).setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
printBarcode();
}
});
findViewById(R.id.button3).setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
printImage();
}
});
}
public void printText() {
new Thread() {
#Override
public void run() {
//showProgress(R.string.printing_text);
doPrintText2();
dismissProgress();
}
}.start();
}
public void printBarcode() {
new Thread() {
#Override
public void run() {
//showProgress(R.string.printing_text);
doPrintBarcode();
dismissProgress();
}
}.start();
}
public void printImage() {
new Thread() {
#Override
public void run() {
//showProgress(R.string.printing_text);
doPrintImage();
dismissProgress();
}
}.start();
}
#Override
protected void onStart() {
super.onStart();
mConnectThread.start();
}
#Override
protected void onStop() {
super.onStop();
if (mBthConnector != null) {
try {
mBthConnector.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void showProgress(final String text) {
mHandler.post(new Runnable() {
#Override
public void run() {
mProgressDialog = ProgressDialog.show(
MobilePrinter1Activity.this,
"Please wait",
text,
true);
}
});
}
private void showProgress(int resId) {
showProgress(getString(resId));
}
private void dismissProgress() {
mHandler.post(new Runnable() {
#Override
public void run() {
mProgressDialog.dismiss();
}
});
}
private void doPrintSelfTest() {
try {
mPrinter.printSelfTest();
} catch (IOException e) {
//error(R.drawable.selftest, getString(R.string.failed_print_self_test) + ". " +
//e.getMessage());
}
}
private void doPrintText2() {
EditText EditText1;
EditText1=(EditText)findViewById(R.id.editText1);
String temp;
try {
mPrinter.reset();
mPrinter.printTaggedText(EditText1.getText().toString());
//mPrinter.printTaggedText("Testing Testing!!");
mPrinter.feedPaper(110);
} catch (IOException e) {
//error(R.drawable.text, getString(R.string.failed_print_text) + ". " +
//e.getMessage());
}
}
private void doPrintBarcode() {
EditText EditText1;
EditText1=(EditText)findViewById(R.id.editText1);
try {
mPrinter.reset();
mPrinter.setBarcode(Printer.ALIGN_CENTER, false, 2, Printer.HRI_BOTH, 100);
mPrinter.printBarcode(Printer.BARCODE_CODE128, EditText1.getText().toString());
mPrinter.feedPaper(38);
mPrinter.feedPaper(110);
} catch (IOException e) {
//error(R.drawable.barcode, getString(R.string.failed_print_barcode) + ". " +
//e.getMessage());
}
}
private void doPrintImage() {
Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.logo_djarum);
final int width = bitmap.getWidth();
final int height = bitmap.getHeight();
final int[] argb = new int[width * height];
bitmap.getPixels(argb, 0, width, 0, 0, width, height);
try {
mPrinter.reset();
mPrinter.printImage(argb, width, height, Printer.ALIGN_LEFT, true);
mPrinter.feedPaper(110);
} catch (IOException e) {
Toast.makeText(MobilePrinter1Activity.this, e.getMessage(), 1).show();
}
}
private void dialog(final int id, final String title, final String msg) {
mHandler.post(new Runnable() {
#Override
public void run() {
AlertDialog dlg = new AlertDialog.Builder(MobilePrinter1Activity.this)
.setTitle(title)
.setMessage(msg)
.create();
dlg.setIcon(id);
dlg.show();
}
});
}
private void error(final int resIconId, final String message) {
mHandler.post(new Runnable() {
#Override
public void run() {
AlertDialog dlg = new AlertDialog.Builder(MobilePrinter1Activity.this)
.setTitle("Error")
.setMessage(message)
.create();
dlg.setIcon(resIconId);
dlg.setOnDismissListener(new OnDismissListener() {
#Override
public void onDismiss(DialogInterface dialog) {
MobilePrinter1Activity.this.finish();
}
});
dlg.show();
}
});
}
private void setPrinterInfo(final int resIconId, final String text) {
mHandler.post(new Runnable() {
#Override
public void run() {
//((ImageView)findViewById(R.id.icon)).setImageResource(resIconId);
//((TextView)findViewById(R.id.name)).setText(text);
}
});
}
}
the main problems now is how to call this program from phonegap? i've tried using droidGap but it will give me error when i start the printer's thread. has anyone know how to solved this?? many thanks..
I dont think that you can invoke too many APIs from the standard android browser (except some like location, contacts n all), but what is possible is other way round embedding a webview in a native app(which can be your above mentioned thread code) and invoking this code from a Javascript event using JavaScript Interface apis of android platform (which is pretty straight forward).