How to set camera live feed as wallpaper in android - java

I am trying to make an android application which sets the camera's live feed as wallpaper. I am almost done with the coding part. The thing is when i click the set wallpaper button, the live wallpaper chooser menu opens. But as soon as i select my app in it, the application crashes. I know its kinda existing app, but its a clients demand.
WelcomeActivity.java
package com.wallpaper.transparenthighdefcamera;
import android.app.Activity;
import android.content.ComponentName;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ImageButton;
public class WelcomeActivity extends Activity {
ImageButton imgButt;
public WelcomeActivity()
{
}
protected void onCreate(Bundle bundle)
{
super.onCreate(bundle);
setContentView(R.layout.activity_main);
imgButt=(ImageButton) findViewById(R.id.setButton);
imgButt.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
Intent intent = new Intent();
if (android.os.Build.VERSION.SDK_INT >= 16)
{
intent.setAction("android.service.wallpaper.CHANGE_LIVE_WALLPAPER");
intent.putExtra("android.service.wallpaper.extra.LIVE_WALLPAPER_COMPONENT", new ComponentName("com.wallpaper.transparenthighdefcamera", "com.wallpaper.transparenthighdefcamera.TransparentWallpaperService"));
} else
intent.setAction("android.service.wallpaper.LIVE_WALLPAPER_CHOOSER");
intent.putExtra("android.service.wallpaper.extra.LIVE_WALLPAPER_COMPONENT", new ComponentName("com.wallpaper.transparenthighdefcamera", "com.wallpaper.transparenthighdefcamera.TransparentWallpaperService"));
//}
startActivity(intent);
}
});
}
}
TransparentWallpaperService.java:
package com.wallpaper.transparenthighdefcamera;
import com.wallpaper.transparenthighdefcamera.GenericaCamera;
import android.service.wallpaper.WallpaperService;
import android.view.SurfaceHolder;
public class TransparentWallpaperService extends WallpaperService {
private class MyWallpaperEngine extends android.service.wallpaper.WallpaperService.Engine
{
GenericaCamera GC;
final TransparentWallpaperService this$0;
public void onCreate(SurfaceHolder surfaceholder)
{
if (GC == null)
{
try
{
if (TransparentWallpaperService.existing != null)
{
TransparentWallpaperService.existing.destroyExisting();
}
}
catch (Exception exception) { }
GC = new GenericaCamera(surfaceholder, getBaseContext());
TransparentWallpaperService.existing = GC;
}
super.onCreate(surfaceholder);
}
private MyWallpaperEngine()
{
super();
this$0 = TransparentWallpaperService.this;
}
MyWallpaperEngine(MyWallpaperEngine mywallpaperengine)
{
this();
}
}
public static GenericaCamera existing;
public TransparentWallpaperService()
{
}
public void onCreate()
{
super.onCreate();
}
public android.service.wallpaper.WallpaperService.Engine onCreateEngine()
{
return new MyWallpaperEngine(null);
}
public void onDestroy()
{
super.onDestroy();
}
}
GenricaCamera.java:
package com.wallpaper.transparenthighdefcamera;
import java.util.Iterator;
import android.content.Context;
import android.hardware.Camera;
import android.view.SurfaceHolder;
import android.widget.Toast;
public class GenericaCamera implements android.view.SurfaceHolder.Callback {
private static boolean isPreviewRunning = false;
private Camera cameraDevice;
private SurfaceHolder cameraSurfaceHolder;
private Context context;
public GenericaCamera(SurfaceHolder surfaceholder, Context context1)
{
cameraDevice = null;
cameraSurfaceHolder = null;
context = context1;
cameraSurfaceHolder = surfaceholder;
cameraSurfaceHolder.setType(3);
cameraSurfaceHolder.addCallback(this);
}
private static android.hardware.Camera.Size getBestPreviewSize(int i, int j, android.hardware.Camera.Parameters parameters)
{
android.hardware.Camera.Size size = null;
Iterator iterator = parameters.getSupportedPreviewSizes().iterator();
do
{
android.hardware.Camera.Size size1;
do
{
if (!iterator.hasNext())
{
return size;
}
size1 = (android.hardware.Camera.Size)iterator.next();
} while (size1.width > i || size1.height > j);
if (size == null)
{
size = size1;
} else
{
int k = size.width * size.height;
if (size1.width * size1.height > k)
{
size = size1;
}
}
} while (true);
}
public void destroyExisting()
{
if (cameraDevice != null)
{
cameraDevice.stopPreview();
cameraDevice.setPreviewCallback(null);
cameraDevice.release();
cameraDevice = null;
}
isPreviewRunning = false;
}
public void surfaceChanged(SurfaceHolder surfaceholder, int i, int j, int k)
{
if (cameraDevice != null)
{
if (isPreviewRunning)
{
cameraDevice.stopPreview();
}
android.hardware.Camera.Parameters parameters = cameraDevice.getParameters();
android.hardware.Camera.Size size = getBestPreviewSize(j, k, parameters);
if (size != null)
{
parameters.setPreviewSize(size.width, size.height);
}
cameraDevice.setParameters(parameters);
cameraDevice.startPreview();
isPreviewRunning = true;
}
}
public void surfaceCreated(SurfaceHolder surfaceholder)
{
try
{
if (cameraDevice == null)
{
cameraDevice = Camera.open();
cameraDevice.setDisplayOrientation(90);
cameraDevice.setPreviewDisplay(cameraSurfaceHolder);
}
cameraDevice.startPreview();
return;
}
catch (Exception exception)
{
Toast.makeText(context, "Can't create preview!", 1).show();
exception.printStackTrace();
return;
}
}
public void surfaceDestroyed(SurfaceHolder surfaceholder)
{
if (cameraDevice == null)
{
return;
} else
{
destroyExisting();
return;
}
}
}
AndroidManifest.xml
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.wallpaper.transparenthighdefcamera"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="8"
android:targetSdkVersion="18" />
<supports-screens android:anyDensity="true" android:smallScreens="true" android:normalScreens="true" android:largeScreens="true" android:xlargeScreens="true" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.SET_WALLPAPER" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.software.live_wallpaper" android:required="true" />
<application
android:allowBackup="true"
android:icon="#drawable/ic_launcher"
android:label="#string/app_name"
android:theme="#style/AppTheme" >
<activity
android:name="com.wallpaper.transparenthighdefcamera.WelcomeActivity"
android:label="#string/app_name" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<service android:name=".services.TransparentCameraHelperService" />
<service android:name=".services.TransparentWallpaperService" android:permission="android.permission.BIND_WALLPAPER" android:enabled="true">
<intent-filter>
<action android:name="android.service.wallpaper.WallpaperService" />
</intent-filter>
<meta-data android:name="android.service.wallpaper" android:resource="#xml/wallpaper" />
</service>
</application>
</manifest>

Related

RuntimeException: startPreview failed in while loop

I know there are similar questions already but mine is bit different. I am working on an android App using Service which records video in background. The objective is to record video and save the clip after every say 10 seconds in local directory. So for that I had to run record_start and record_stop in a loop with a delay of 10 seconds. The issue is I am facing the error after 3rd or 4th iterations. Means the loop successfully start and stops the mediarecorder 2 3 times and stores 10sec clip in folder but after that I face this error.
Thanks.
Any kind of help is appreciated.
Here's the error
I/MediaRecorderJNI: prepare: surface=0x7911d86000
D/START: >>>>>>>>>PREPARED>>>>>>>>>>
D/START: >>>>>>>>>STARTED>>>>>>>>>>
D/STOP: >>>>>>>>>>>>STOPPED>>>>>>>>>>
W/.workingthreads: type=1400 audit(0.0:38226): avc: denied { read } for name="u:object_r:vendor_camera_prop:s0" dev="tmpfs" ino=861 scontext=u:r:untrusted_app_25:s0:c512,c768 tcontext=u:object_r:vendor_camera_prop:s0 tclass=file permissive=0
E/libc: Access denied finding property "vendor.camera.aux.packagelist"
I/MediaRecorderJNI: prepare: surface=0x7911d86000
D/START: >>>>>>>>>PREPARED>>>>>>>>>>
D/START: >>>>>>>>>STARTED>>>>>>>>>>
D/STOP: >>>>>>>>>>>>STOPPED>>>>>>>>>>
W/.workingthreads: type=1400 audit(0.0:38237): avc: denied { read } for name="u:object_r:vendor_camera_prop:s0" dev="tmpfs" ino=861 scontext=u:r:untrusted_app_25:s0:c512,c768 tcontext=u:object_r:vendor_camera_prop:s0 tclass=file permissive=0
E/libc: Access denied finding property "vendor.camera.aux.packagelist"
D/AndroidRuntime: Shutting down VM
E/AndroidRuntime: FATAL EXCEPTION: main
Process: com.example.workingthreads, PID: 24210
java.lang.RuntimeException: Unable to start service com.example.workingthreads.RecorderService#290e84c with Intent { flg=0x10000000 cmp=com.example.workingthreads/.RecorderService }: java.lang.RuntimeException: startPreview failed
at android.app.ActivityThread.handleServiceArgs(ActivityThread.java:3713)
at android.app.ActivityThread.access$1600(ActivityThread.java:202)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1696)
at android.os.Handler.dispatchMessage(Handler.java:107)
at android.os.Looper.loop(Looper.java:198)
at android.app.ActivityThread.main(ActivityThread.java:6729)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
Caused by: java.lang.RuntimeException: startPreview failed
at android.hardware.Camera.startPreview(Native Method)
at com.example.workingthreads.RecorderService.startRecording(RecorderService.java:133)
at com.example.workingthreads.RecorderService.looprun(RecorderService.java:88)
at com.example.workingthreads.RecorderService.onStartCommand(RecorderService.java:68)
at android.app.ActivityThread.handleServiceArgs(ActivityThread.java:3694)
at android.app.ActivityThread.access$1600(ActivityThread.java:202) 
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1696) 
at android.os.Handler.dispatchMessage(Handler.java:107) 
at android.os.Looper.loop(Looper.java:198) 
at android.app.ActivityThread.main(ActivityThread.java:6729) 
at java.lang.reflect.Method.invoke(Native Method) 
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493) 
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858) 
I/Process: Sending signal. PID: 24210 SIG: 9
MainActivity.java
public class MainActivity extends Activity implements SurfaceHolder.Callback{
private static final int REQUEST_EXTERNAL_STORAGE = 1;
private static final int REQUEST_ID_MULTIPLE_PERMISSIONS = 2;
private static final String[] PERMISSIONS_STORAGE = {
Manifest.permission.READ_EXTERNAL_STORAGE,
Manifest.permission.WRITE_EXTERNAL_STORAGE,
RECORD_AUDIO,
CAMERA,
};
public SurfaceView msurfaceView ;
public SurfaceHolder msurfaceHolder;
Surface final_surface;
#Override
public void onCreate(Bundle savedInstanceState) {
File folder = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/" + "VidRecord");
folder.mkdirs();
verifyStoragePermissions(MainActivity.this);
if (!Permissions()) {
Log.d("PERMISSION>>>>>>>>>>", ">>>>>>>>>>>>>>DENIED>>>>>>>>");
}
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
msurfaceView = (SurfaceView) findViewById(R.id.surface_camera);
msurfaceHolder = msurfaceView.getHolder();
msurfaceHolder.addCallback(this);
msurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void startRecordingService(View v) {
Intent serviceIntent = new Intent(this, DoingTwoThings.class);
ContextCompat.startForegroundService(this, serviceIntent);
}
public static void verifyStoragePermissions(Activity activity) {
// Check if we have write permission
int permission = ActivityCompat.checkSelfPermission(activity, Manifest.permission.WRITE_EXTERNAL_STORAGE);
if (permission != PackageManager.PERMISSION_GRANTED) {
// We don't have permission so prompt the user
ActivityCompat.requestPermissions(
activity,
PERMISSIONS_STORAGE,
REQUEST_EXTERNAL_STORAGE
);
}
}
private boolean Permissions(){
int permissionWRITE_EXTERNAL_STORAGE = ContextCompat.checkSelfPermission(this, android.Manifest.permission.WRITE_EXTERNAL_STORAGE);
int permissionRECORD = ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO);
int permissionCAMERA = ContextCompat.checkSelfPermission(this, CAMERA);
List<String> listPermissionsNeeded = new ArrayList<>();
if (permissionWRITE_EXTERNAL_STORAGE != PackageManager.PERMISSION_GRANTED) {
listPermissionsNeeded.add(android.Manifest.permission.WRITE_EXTERNAL_STORAGE);
}
if (permissionRECORD != PackageManager.PERMISSION_GRANTED) {
listPermissionsNeeded.add(android.Manifest.permission.RECORD_AUDIO);
}
if (permissionCAMERA != PackageManager.PERMISSION_GRANTED) {
listPermissionsNeeded.add(CAMERA);
}
if (!listPermissionsNeeded.isEmpty()) {
ActivityCompat.requestPermissions(this, listPermissionsNeeded.toArray(new String[listPermissionsNeeded.size()]), REQUEST_ID_MULTIPLE_PERMISSIONS);
return false;
}
return true;
}
#Override
public void surfaceCreated(#NonNull SurfaceHolder surfaceHolder) {
// final_surface = msurfaceHolder.getSurface();
}
#Override
public void surfaceChanged(#NonNull SurfaceHolder surfaceHolder, int i, int i1, int i2) {
}
#Override
public void surfaceDestroyed(#NonNull SurfaceHolder surfaceHolder) {
}
}
RecorderService.java
public class RecorderService extends Service {
private static final String TAG = "RecorderService";
private SurfaceView mSurfaceView;
private SurfaceHolder mSurfaceHolder;
private static Camera mServiceCamera;
private boolean mRecordingStatus;
private MediaRecorder mMediaRecorder;
String VideoSavePathInDevice = null;
Random random;
String RandomAudioFileName = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
#Override
public void onCreate() {
mRecordingStatus = false;
// mServiceCamera = CameraRecorder.mCamera;
mServiceCamera = Camera.open(1);
mSurfaceView = CameraRecorder.mSurfaceView;
mSurfaceHolder = CameraRecorder.mSurfaceHolder;
super.onCreate();
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
super.onStartCommand(intent, flags, startId);
if (mRecordingStatus == false)
// startRecording();
looprun();
return START_STICKY;
}
#Override
public void onDestroy() {
stopRecording();
mRecordingStatus = false;
super.onDestroy();
}
public void looprun() {
while (true) {
SystemClock.sleep(500);
startRecording();
SystemClock.sleep(3000);
stopRecording();
}
}
public boolean startRecording() {
try {
Toast.makeText(getBaseContext(), "Recording Started", Toast.LENGTH_SHORT).show();
mServiceCamera = Camera.open(1);
Camera.Parameters params = mServiceCamera.getParameters();
// CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
mServiceCamera.setParameters(params);
Camera.Parameters p = mServiceCamera.getParameters();
final List<Camera.Size> listPreviewSize = p.getSupportedPreviewSizes();
Camera.Size optimalPreviewSize = getOptimalPreviewSize(listPreviewSize, 400, 400);
//
// for (Camera.Size size : listPreviewSize) {
// Log.i(TAG, String.format("Supported Preview Size (%d, %d)", size.width, size.height));
// }
// Camera.Size previewSize = listPreviewSize.get(8);
// Log.d(">>>>>PREVIEW SIZE>>>>>", String.valueOf(optimalPreviewSize));
if (optimalPreviewSize != null) {
p.setPreviewSize(optimalPreviewSize.width, optimalPreviewSize.height);
Log.d(">>>>>PREVIEW SIZE>>>>>", String.valueOf(optimalPreviewSize));
mServiceCamera.setParameters(p);
}
// p.setPreviewSize(previewSize.width, previewSize.height);
// mServiceCamera.setParameters(p);
try {
mServiceCamera.setPreviewDisplay(mSurfaceHolder);
mServiceCamera.startPreview();
// try {
// mServiceCamera.startPreview();
// }catch (Exception e)
// {
// Log.e(TAG, e.getMessage());
// }
}
catch (IOException e) {
Log.e(TAG, e.getMessage());
e.printStackTrace();
}
String name = CreateRandomAudioFileName(5);
VideoSavePathInDevice =
Environment.getExternalStorageDirectory().getAbsolutePath() + "/" + "VidRecord" + "/" +
name + "VideoRecording.mp4";
mServiceCamera.unlock();
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setCamera(mServiceCamera);
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setOutputFile(VideoSavePathInDevice);
mMediaRecorder.setPreviewDisplay(mSurfaceHolder.getSurface());
mMediaRecorder.prepare();
Log.d("START", ">>>>>>>>>PREPARED>>>>>>>>>>");
SystemClock.sleep(500);
mMediaRecorder.start();
Log.d("START", ">>>>>>>>>STARTED>>>>>>>>>>");
//
// SystemClock.sleep(5000);
//
// mMediaRecorder.stop();
// Log.d("STOP", ">>>>>>>>>STOPPED>>>>>>>>>>");
//
//
// SystemClock.sleep(1000);
// mMediaRecorder.start();
// Log.d("START", ">>>>>>>>>STARTED AGAIN>>>>>>>>>>");
// mRecordingStatus = true;
return true;
}
catch (IllegalStateException e) {
Log.d(TAG," " , e);
e.printStackTrace();
return false;
} catch (IOException e) {
Log.d(TAG, e.getMessage());
e.printStackTrace();
return false;
}
}
public void stopRecording() {
Log.d("STOP", ">>>>>>>>>>>>STOPPED>>>>>>>>>>");
Toast.makeText(getBaseContext(), "Recording Stopped", Toast.LENGTH_SHORT).show();
try {
mServiceCamera.reconnect();
} catch (IOException e) {
e.printStackTrace();
}
try {
mMediaRecorder.stop();
} catch (RuntimeException e) {
Log.d(TAG," " , e);
e.printStackTrace();
}
mMediaRecorder.reset();
mServiceCamera.stopPreview();
mMediaRecorder.release();
mServiceCamera.release();
mServiceCamera = null;
}
public String CreateRandomAudioFileName(int string){
random = new Random();
StringBuilder stringBuilder = new StringBuilder( string );
int i = 0 ;
while(i < string ) {
stringBuilder.append(RandomAudioFileName.
charAt(random.nextInt(RandomAudioFileName.length())));
i++ ;
}
return stringBuilder.toString();
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int width, int height)
{
Camera.Size optimalSize = null;
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) height / width;
// Try to find a size match which suits the whole screen minus the menu on the left.
for (Camera.Size size : sizes)
{
if (size.height != width) continue;
double ratio = (double) size.width / size.height;
if (ratio <= targetRatio + ASPECT_TOLERANCE && ratio >= targetRatio - ASPECT_TOLERANCE)
{
optimalSize = size;
}
}
// If we cannot find the one that matches the aspect ratio, ignore the requirement.
if (optimalSize == null)
{
// TODO : Backup in case we don't get a size.
}
return optimalSize;
}
}
AndroidManifest.xml
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.workingthreads">
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.camera"/>
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.RECORD_VIDEO"/>
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.MANAGE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.WRITE_INTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.STORAGE" />
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
<application
android:allowBackup="true"
android:icon="#mipmap/ic_launcher"
android:label="#string/app_name"
android:roundIcon="#mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="#style/Theme.WorkingThreads">
<activity
android:name=".CameraRecorder"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<service android:name=".RecorderService"/>
</application>
</manifest>
activity_main.xml
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<SurfaceView
android:id="#+id/surface_camera"
android:layout_marginTop="50dp"
android:layout_width="200dp"
android:layout_height="100dp"
android:layout_marginLeft="60dp"
android:layout_weight="1" />
<Button
android:id="#+id/btnStart"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="StartVid"
android:onClick="startRecordingService"
android:layout_centerHorizontal="true"
android:layout_marginTop="300dp"
/>
</RelativeLayout>

Pass a new activity as main activity to render first

This is ClassifierActivity.java file which is rendering by default:
package org.tensorflow.lite.examples.classification;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Typeface;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.SystemClock;
import android.util.Size;
import android.util.TypedValue;
import android.widget.Toast;
import java.io.IOException;
import java.util.List;
import org.tensorflow.lite.examples.classification.env.BorderedText;
import org.tensorflow.lite.examples.classification.env.Logger;
import org.tensorflow.lite.examples.classification.tflite.Classifier;
import org.tensorflow.lite.examples.classification.tflite.Classifier.Device;
import org.tensorflow.lite.examples.classification.tflite.Classifier.Model;
public class ClassifierActivity extends CameraActivity implements OnImageAvailableListener {
private static final Logger LOGGER = new Logger();
private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480);
private static final float TEXT_SIZE_DIP = 10;
private Bitmap rgbFrameBitmap = null;
private long lastProcessingTimeMs;
private Integer sensorOrientation;
public Classifier classifier;
private BorderedText borderedText;
/** Input image size of the model along x axis. */
private int imageSizeX;
/** Input image size of the model along y axis. */
private int imageSizeY;
#Override
protected int getLayoutId() {
return R.layout.camera_connection_fragment;
}
#Override
protected Size getDesiredPreviewFrameSize() {
return DESIRED_PREVIEW_SIZE;
}
#Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
final float textSizePx =
TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx);
borderedText.setTypeface(Typeface.MONOSPACE);
recreateClassifier(getModel(), getDevice(), getNumThreads());
if (classifier == null) {
LOGGER.e("No classifier on preview!");
return;
}
previewWidth = size.getWidth();
previewHeight = size.getHeight();
sensorOrientation = rotation - getScreenOrientation();
LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation);
LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
}
#Override
protected void processImage() {
rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
final int cropSize = Math.min(previewWidth, previewHeight);
runInBackground(
new Runnable() {
#Override
public void run() {
if (classifier != null) {
final long startTime = SystemClock.uptimeMillis();
final List<Classifier.Recognition> results =
classifier.recognizeImage(rgbFrameBitmap, sensorOrientation);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
LOGGER.v("Detect: %s", results);
runOnUiThread(
new Runnable() {
#Override
public void run() {
showResultsInBottomSheet(results);
showFrameInfo(previewWidth + "x" + previewHeight);
showCropInfo(imageSizeX + "x" + imageSizeY);
showCameraResolution(cropSize + "x" + cropSize);
showRotationInfo(String.valueOf(sensorOrientation));
showInference(lastProcessingTimeMs + "ms");
}
});
}
readyForNextImage();
}
});
}
#Override
protected void onInferenceConfigurationChanged() {
if (rgbFrameBitmap == null) {
// Defer creation until we're getting camera frames.
return;
}
final Device device = getDevice();
final Model model = getModel();
final int numThreads = getNumThreads();
runInBackground(() -> recreateClassifier(model, device, numThreads));
}
private void recreateClassifier(Model model, Device device, int numThreads) {
if (classifier != null) {
LOGGER.d("Closing classifier.");
classifier.close();
classifier = null;
}
if (device == Device.GPU && model == Model.QUANTIZED) {
LOGGER.d("Not creating classifier: GPU doesn't support quantized models.");
runOnUiThread(
() -> {
Toast.makeText(this, "GPU does not yet supported quantized models.", Toast.LENGTH_LONG)
.show();
});
return;
}
try {
LOGGER.d(
"Creating classifier (model=%s, device=%s, numThreads=%d)", model, device, numThreads);
classifier = Classifier.create(this, model, device, numThreads);
} catch (IOException e) {
LOGGER.e(e, "Failed to create classifier.");
}
// Updates the input image size.
imageSizeX = classifier.getImageSizeX();
imageSizeY = classifier.getImageSizeY();
}
}
I created a new activity named Main.java and I want this activity to render first and pass it ClassifierActivity.java as intent by click on button:
package org.tensorflow.lite.examples.classification;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.media.MediaPlayer;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.view.View;
import android.widget.Button;
import android.widget.VideoView;
public class Main extends AppCompatActivity {
VideoView videoView;
private Button btn;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// hide title bar
getSupportActionBar().hide();
// set button on click to scan where open the camera
btn=(Button)findViewById(R.id.button);
btn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
openActivity();
}
});
videoView = findViewById(R.id.videoview);
Uri uri = Uri.parse("android.resource://"+getPackageName()+"/"+R.raw.turkey);
videoView.setVideoURI(uri);
videoView.start();
videoView.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp) {
mp.setLooping(true);
}
});
}
protected void openActivity(){
Intent i = new Intent(this, ClassifierActivity.class);
startActivity(i);
}
#Override
protected void onPostResume() {
videoView.resume();
super.onPostResume();
}
#Override
protected void onRestart() {
videoView.start();
super.onRestart();
}
#Override
protected void onPause() {
videoView.suspend();
super.onPause();
}
#Override
protected void onDestroy() {
videoView.stopPlayback();
super.onDestroy();
}
}
This is old AndroidManifest.xml (app running successfully)
<activity
android:name=".ClassifierActivity"
android:label="#string/activity_name_classification"
android:screenOrientation="portrait"
android:exported="true" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
And I want to run Main first. So, I changed android:name=".ClassifierActivity" from old AndroidManifest.xml to android:name=".Main" (app is stop running):
<activity
android:name=".Main"
android:label="#string/activity_name_classification"
android:screenOrientation="portrait"
android:exported="true" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
All activities need to be defined in the manifest.
You are replacing the original Activity with the new one and now the old one is not defined. So you just need to add it back.
<application
...
/>
<activity
android:name=".ClassifierActivity"
android:label="#string/activity_name_classification"
android:screenOrientation="portrait"
android:exported="true" >
</activity>
<activity
android:name=".Main"
android:label="#string/activity_name_classification"
android:screenOrientation="portrait"
android:exported="true" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>

Detecting incoming call and opening a service in background on top of the dialer app

After detecting the incoming call, I am opening a messenger like chat icon on incoming call. But I am facing two issues :
1.The incoming call is not detected when my application is closed (not running even in background).
2.When my phone is locked, the chat icon does not appear. The chat icon hides behind the dialer app on an incoming call.
I am using Broadcast Receiver to receive the incoming call using PhoneCallReceiver class which calls methods defined under CallReceiver class and on detecting incoming call I am starting the service ChatHeadService which opens a chat like icon. I have attached screenshot of how the chat icon appears. I have been facing this problem since past 6 months and was not able to solve it. Any help would be appreciated.
compileSdkVersion 23
buildToolsVersion '27.0.3'
targetSdkVersion 23
I tested the app on two devices with API level 18 and API level 26. In API level 18, my app worked fine and both of the above issues were fixed. But in API level 26, my app worked didn't work and the chat icon was hidden behind the dialer app.
I am facing the following error on incoming call in Oreo API 26.
06-13 16:22:23.969 1238-4375/? W/BroadcastQueue: Permission Denial: receiving Intent { act=android.intent.action.PHONE_STATE flg=0x1000010 (has extras) } to com.skype.m2/com.skype.nativephone.connector.NativePhoneCallReceiver requires android.permission.READ_PHONE_STATE due to sender android (uid 1000)
API level 26
API level 18
AndroidManifest.xml
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="com.tarun.notifyme2">
<uses-sdk
android:minSdkVersion="16"
android:targetSdkVersion="23" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.CALL_PHONE" />
<uses-permission android:name="android.permission.PROCESS_OUTGOING_CALLS" />
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
<uses-permission android:name="android.permission.Settings.ACTION_MANAGE_OVERLAY_PERMISSION" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission
android:name="android.permission.MODIFY_PHONE_STATE"
tools:ignore="ProtectedPermissions" />
<application
android:allowBackup="true"
android:enabled="true"
android:icon="#drawable/app_icon"
android:label="#string/app_name"
android:supportsRtl="true"
android:theme="#style/AppTheme">
<activity android:name=".SignUp">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity android:name=".SendNoti" />
<receiver android:name=".CallReceiver"
android:enabled="true">
<intent-filter android:priority="1000">
<action android:name="android.intent.action.PHONE_STATE" />
</intent-filter>
<intent-filter>
<action android:name="android.intent.action.NEW_OUTGOING_CALL" />
</intent-filter>
</receiver>
<service
android:name=".ChatHeadService"
android:exported="true"
android:enabled="true"/>
<service android:name=".FirebaseMessagingService">
<intent-filter>
<action android:name="com.google.firebase.MESSAGING_EVENT" />
</intent-filter>
</service>
<service android:name=".FirebaseInstanceIDService">
<intent-filter>
<action android:name="com.google.firebase.INSTANCE_ID_EVENT" />
</intent-filter>
</service>
<activity
android:name=".MainActivity"
android:label="#string/title_activity_main"
android:theme="#style/AppTheme.NoActionBar" />
<activity android:name=".MainChat" />
<activity android:name=".ChatRoom" />
<activity android:name=".Feedback" />
</application>
</manifest>
PhonecallReceiver.java
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.telephony.TelephonyManager;
import java.util.Date;
public abstract class PhonecallReceiver extends BroadcastReceiver
{
private static int lastState = TelephonyManager.CALL_STATE_IDLE;
private static Date callStartTime;
private static boolean isIncoming;
private static String savedNumber;
#Override
public void onReceive(Context context, Intent intent)
{
try
{
if (intent.getAction().equals("android.intent.action.NEW_OUTGOING_CALL"))
{
savedNumber = intent.getExtras().getString("android.intent.extra.PHONE_NUMBER");
}
else
{
String stateStr = intent.getExtras().getString(TelephonyManager.EXTRA_STATE);
String number = intent.getExtras().getString(TelephonyManager.EXTRA_INCOMING_NUMBER);
int state = 0;
if(stateStr.equals(TelephonyManager.EXTRA_STATE_IDLE))
{
state = TelephonyManager.CALL_STATE_IDLE;
}
else if(stateStr.equals(TelephonyManager.EXTRA_STATE_OFFHOOK))
{
state = TelephonyManager.CALL_STATE_OFFHOOK;
}
else if(stateStr.equals(TelephonyManager.EXTRA_STATE_RINGING))
{
state = TelephonyManager.CALL_STATE_RINGING;
}
onCallStateChanged(context, state, number);
}
}
catch (Exception e)
{
e.printStackTrace();
}
}
//Derived classes should override these to respond to specific events of interest
protected void onIncomingCallStarted(Context ctx, String number, Date start){}
protected void onIncomingCallEnded(Context ctx, String number, Date start, Date end){}
public void onCallStateChanged(Context context, int state, String number)
{
if(lastState == state)
{
//No change, debounce extras
return;
}
switch (state)
{
case TelephonyManager.CALL_STATE_RINGING:
isIncoming = true;
callStartTime = new Date();
savedNumber = number;
onIncomingCallStarted(context, number, callStartTime);
break;
case TelephonyManager.CALL_STATE_OFFHOOK:
if (isIncoming)
{
onIncomingCallEnded(context,savedNumber,callStartTime,new Date());
}
case TelephonyManager.CALL_STATE_IDLE:
if(isIncoming)
{
onIncomingCallEnded(context, savedNumber, callStartTime, new Date());
}
}
lastState = state;
}
}
CallReceiver.java
import android.app.Activity;
import android.app.Dialog;
import android.app.Notification;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.drawable.ColorDrawable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.Window;
import android.widget.Button;
import android.widget.Toast;
import android.os.Handler;
import java.util.Date;
public class CallReceiver extends PhonecallReceiver
{
Context context;
#Override
protected void onIncomingCallStarted(final Context ctx, String number, Date start)
{
Toast.makeText(ctx,"New Incoming Call"+ number,Toast.LENGTH_LONG).show();
context = ctx;
final Intent intent = new Intent(context, ChatHeadService.class);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
intent.putExtra("phone_no",number);
SharedPreferences.Editor editor = ctx.getSharedPreferences("Notify", Context.MODE_PRIVATE).edit();
editor.putString("incomingNo",number);
editor.commit();
new Handler().postDelayed(new Runnable()
{
#Override
public void run()
{
//start service which opens a chat icon after 2 seconds wait
context.startService(intent);
}
},2000);
}
#Override
protected void onIncomingCallEnded(Context ctx, String number, Date start, Date end)
{
final Intent intent = new Intent(context, ChatHeadService.class);
ctx.stopService(intent);
Toast.makeText(ctx,"Bye Bye"+ number,Toast.LENGTH_LONG).show();
}
}
ChatHeadService.java
import android.app.Service;
import android.content.Intent;
import android.graphics.PixelFormat;
import android.os.IBinder;
import android.util.Log;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.WindowManager;
import android.widget.ImageView;
import android.widget.Toast;
public class ChatHeadService extends Service {
private WindowManager windowManager;
private ImageView chatHead;
WindowManager.LayoutParams params;
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
int res = super.onStartCommand(intent, flags, startId);
return res;
}
#Override
public void onCreate() {
super.onCreate();
windowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
chatHead = new ImageView(this);
chatHead.setImageResource(R.drawable.bell2);
chatHead.setClickable(true);
params= new WindowManager.LayoutParams(
WindowManager.LayoutParams.WRAP_CONTENT,
WindowManager.LayoutParams.WRAP_CONTENT,
WindowManager.LayoutParams.TYPE_PHONE,
WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE,
PixelFormat.TRANSLUCENT);
params.gravity = Gravity.TOP | Gravity.LEFT;
params.x = 0;
params.y = 400;
windowManager.addView(chatHead, params);
chatHead.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
startActivity(new Intent(ChatHeadService.this, SendNoti.class)
.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK));
stopSelf();
}
});
//this code is for dragging the chat head
chatHead.setOnTouchListener(new View.OnTouchListener() {
private int initialX;
private int initialY;
private float initialTouchX;
private float initialTouchY;
int flag=0;
#Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
initialX = params.x;
initialY = params.y;
initialTouchX = event.getRawX();
initialTouchY = event.getRawY();
if(flag==3){
flag=1;
return true;
}else{
flag=1;
return false;
}
case MotionEvent.ACTION_UP:
if(flag==3){
flag=2;
return true;
}else{
flag=2;
return false;
}
case MotionEvent.ACTION_MOVE:
flag=3;
params.x = initialX
+ (int) (event.getRawX() - initialTouchX);
params.y = initialY
+ (int) (event.getRawY() - initialTouchY);
windowManager.updateViewLayout(chatHead, params);
return true;
default:
Toast.makeText(getApplicationContext(),"You ckiced the imageview",Toast.LENGTH_LONG).show();
Log.i("tag","You clicked the imageview");
/*
Intent i = new Intent(view.getContext(),SendNoti.class);
startActivity(i);
stopSelf();*/
return true;
}
}
});
/*
Snackbar.make(chatHead, "Replace with your own action", Snackbar.LENGTH_LONG)
.setAction("Action", null).show();*/
}
#Override
public void onDestroy() {
super.onDestroy();
if (chatHead != null)
windowManager.removeView(chatHead);
stopSelf();
}
#Override
public IBinder onBind(Intent intent) {
// TODO Auto-generated method stub
return null;
}
}
Some time ago I found this example. I've added only that, call is incoming or outgoing. Pass your data to the service by intent and use it to perform service. Should work in api 23. In newest versions I can't ensure that.
public class CallReceiver extends BroadcastReceiver {
private final static String TAG = "CallReceiver";
private static PhoneCallStartEndDetector listener;
private String outgoingSavedNumber;
protected Context savedContext;
#Override
public void onReceive(Context context, Intent intent) {
this.savedContext = context;
if (listener == null) {
listener = new PhoneCallStartEndDetector();
}
String phoneState = intent.getStringExtra(TelephonyManager.EXTRA_STATE);
if (phoneState == null) {
listener.setOutgoingNumber(intent.getStringExtra(Intent.EXTRA_PHONE_NUMBER));
} else if (phoneState.equals(TelephonyManager.EXTRA_STATE_RINGING)) {
listener.setOutgoingNumber(intent.getStringExtra(TelephonyManager.EXTRA_INCOMING_NUMBER));
}
TelephonyManager telephony = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE);
telephony.listen(listener, PhoneStateListener.LISTEN_CALL_STATE);
}
//Deals with actual events
private class PhoneCallStartEndDetector extends PhoneStateListener {
int lastState = TelephonyManager.CALL_STATE_IDLE;
boolean isIncoming;
boolean isOutgoing;
String savedNumber; //because the passed incoming is only valid in ringing
private PhoneCallStartEndDetector() {
}
//The outgoing number is only sent via a separate intent, so we need to store it out of band
private void setOutgoingNumber(String number) {
savedNumber = number;
}
Intent serviceIntent = new Intent(savedContext, YourService.class);
//Incoming call- goes from IDLE to RINGING when it rings, to OFFHOOK when it's answered, to IDLE when its hung up
//Outgoing call- goes from IDLE to OFFHOOK when it dials out, to IDLE when hung up
#Override
public void onCallStateChanged(int state, String incomingNumber) {
super.onCallStateChanged(state, incomingNumber);
if (lastState == state) {
//No change, debounce extras
return;
}
switch (state) {
case TelephonyManager.CALL_STATE_RINGING:
isIncoming = true;
savedNumber = incomingNumber;
serviceIntent.putExtra("label", value);
savedContext.startService(serviceIntent);
break;
case TelephonyManager.CALL_STATE_OFFHOOK:
//Transition of ringing->offhook are pickups of incoming calls. Nothing donw on them
if (lastState != TelephonyManager.CALL_STATE_RINGING) {
if (!isOutgoing) {
isOutgoing = true;
}
if (!savedNumber.equals("")) {
serviceIntent.putExtra("label", value);
savedContext.startService(serviceIntent);
}
}
break;
case TelephonyManager.CALL_STATE_IDLE:
//Went to idle- this is the end of a call. What type depends on previous state(s)
if (lastState == TelephonyManager.CALL_STATE_RINGING) {
//Ring but no pickup- a miss
savedContext.stopService(serviceIntent);
} else if (isIncoming) {
savedContext.stopService(serviceIntent);
} else {
if (isOutgoing) {
savedContext.stopService(serviceIntent);
isOutgoing = false;
}
}
break;
}
lastState = state;
}
}
}
Register this receiver in manifest, this should work in api 25:
<receiver
android:name=".calls.CallReceiver"
android:enabled="true">
<intent-filter android:priority="-1">
<action android:name="android.intent.action.PHONE_STATE" />
<action android:name="android.intent.action.NEW_OUTGOING_CALL"/>
</intent-filter>
</receiver>
Or register BroadcastReceiver in code, this should work in api 26:
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction("android.intent.action.PHONE_STATE");
CallReceiver receiver = new CallReceiver();
registerReceiver(receiver, intentFilter);
Of course, to use this code, you need grant permission. In manifest for api level less then 23:
<uses-permission android:name="android.permission.READ_PHONE_STATE"/>
And for api 23 and newest, ask user about permission:
Manifest.permission.READ_PHONE_STATE
Call this method after call end
private void alert(Context ctx) {
StringBuffer sb = new StringBuffer();
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.READ_CALL_LOG) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
Cursor cur = getContentResolver().query(CallLog.Calls.CONTENT_URI,
null, null, null, CallLog.Calls.DATE + " DESC limit 1;");
//Cursor cur = getContentResolver().query( CallLog.Calls.CONTENT_URI,null, null,null, android.provider.CallLog.Calls.DATE + " DESC");
int number = cur.getColumnIndex( CallLog.Calls.NUMBER );
int duration = cur.getColumnIndex( CallLog.Calls.DURATION);
int type = cur.getColumnIndex(CallLog.Calls.TYPE);
int date = cur.getColumnIndex(CallLog.Calls.DATE);
sb.append( "Call Details : \n");
phNumber = null;
callDuration = null;
callType = null;
callDate = null;
String dir = null;
String callDayTime = null;
while ( cur.moveToNext() ) {
phNumber = cur.getString( number );
callDuration = cur.getString( duration );
callType = cur.getString( type );
callDate = cur.getString( date );
callDayTime = new Date(Long.valueOf(callDate)).toString();
int dircode = Integer.parseInt(callType);
switch (dircode) {
case CallLog.Calls.OUTGOING_TYPE:
dir = "OUTGOING";
break;
case CallLog.Calls.INCOMING_TYPE:
dir = "INCOMING";
break;
case CallLog.Calls.MISSED_TYPE:
dir = "MISSED";
break;
}
// sb.append( "\nPhone Number:--- "+phNumber +" \nCall duration in sec :--- "+callDuration );
sb.append("\nPhone Number:--- " + phNumber + " \nCall Type:--- " + dir + " \nCall Date:--- " + callDayTime + " \nCall duration in sec :--- " + callDuration);
sb.append("\n----------------------------------");
Log.e("dir",dir);
}
cur.close();
callType=dir;
callDate=callDayTime;
Log.e("call ",phNumber+" duration"+callDuration+" type "+callType+" date "+callDate);
startactivity(ctx);
}
it will give you last call detail's

Android code can't access phone camera...what's wrong?

I'm having trouble getting the camera preview to work. Any ideas what's going on? I don't think my code can connect/find the camera on my phone... I'm very new at programming.
Here's the error messages:
07-31 22:10:29.940 26291-26291/user.cameratest W/CameraBase: An error occurred while connecting to camera: 0
07-31 22:10:30.168 26291-26320/user.cameratest E/Surface: getSlotFromBufferLocked: unknown buffer: 0xb9bfa3e8
07-31 22:10:30.191 26291-26291/user.cameratest W/CameraBase: An error occurred while connecting to camera: 0
07-31 22:10:36.957 26291-26320/user.cameratest E/Surface: getSlotFromBufferLocked: unknown buffer: 0xb9c89198
07-31 22:10:40.458 26291-26320/user.cameratest E/Surface: getSlotFromBufferLocked: unknown buffer: 0xb9b47570
Here is the code:
AndroidManifest.XML
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="user.cameratest">
<uses-feature android:name="android.hardware.camera" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<!--uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" /-->
<application
android:icon="#mipmap/ic_launcher"
android:label="#string/app_name" >
<activity
android:label="#string/app_name"
android:name=".cameraActivity"
android:screenOrientation="landscape" >
<intent-filter >
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
cameraActivity.java
package user.cameratest;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup.LayoutParams;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.Toast;
public class cameraActivity extends Activity { //
private static final String TAG = "cameraActivity";
cameraPreview preview;
Button buttonClick;
Camera camera; //
Activity act;
Context ctx;
#Override
public void onCreate(Bundle savedInstanceState) { //
super.onCreate(savedInstanceState); //
ctx = this;
act = this;
requestWindowFeature(Window.FEATURE_NO_TITLE); //
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); //
setContentView(R.layout.camera_layout); //
preview = new cameraPreview(this, (SurfaceView)findViewById(R.id.surfaceView));
preview.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
((FrameLayout) findViewById(R.id.layout)).addView(preview);
preview.setKeepScreenOn(true);
preview.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View arg0) {
camera.takePicture(shutterCallback, rawCallback, jpegCallback);
}
});
Toast.makeText(ctx, getString(R.string.take_photo_help), Toast.LENGTH_LONG).show();
// buttonClick = (Button) findViewById(R.id.btnCapture);
//
// buttonClick.setOnClickListener(new OnClickListener() {
// public void onClick(View v) {
//// preview.camera.takePicture(shutterCallback, rawCallback, jpegCallback);
// camera.takePicture(shutterCallback, rawCallback, jpegCallback);
// }
// });
//
// buttonClick.setOnLongClickListener(new OnLongClickListener(){
// #Override
// public boolean onLongClick(View arg0) {
// camera.autoFocus(new AutoFocusCallback(){
// #Override
// public void onAutoFocus(boolean arg0, Camera arg1) {
// //camera.takePicture(shutterCallback, rawCallback, jpegCallback);
// }
// });
// return true;
// }
// });
}
#Override
protected void onResume() {
super.onResume();
int numCams = Camera.getNumberOfCameras();
if(numCams > 0){
try{
camera = Camera.open(0);
camera.startPreview();
preview.setCamera(camera);
} catch (RuntimeException ex){
Toast.makeText(ctx, getString(R.string.camera_not_found), Toast.LENGTH_LONG).show();
}
}
}
#Override
protected void onPause() {
if(camera != null) {
camera.stopPreview();
preview.setCamera(null);
camera.release();
camera = null;
}
super.onPause();
}
private void resetCam() {
camera.startPreview();
preview.setCamera(camera);
}
private void refreshGallery(File file) {
Intent mediaScanIntent = new Intent( Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaScanIntent.setData(Uri.fromFile(file));
sendBroadcast(mediaScanIntent);
}
ShutterCallback shutterCallback = new ShutterCallback() {
public void onShutter() {
// Log.d(TAG, "onShutter'd");
}
};
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
// Log.d(TAG, "onPictureTaken - raw");
}
};
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
new SaveImageTask().execute(data);
resetCam();
Log.d(TAG, "onPictureTaken - jpeg");
}
};
private class SaveImageTask extends AsyncTask<byte[], Void, Void> {
#Override
protected Void doInBackground(byte[]... data) {
FileOutputStream outStream = null;
// Write to SD Card
try {
File sdCard = Environment.getExternalStorageDirectory();
File dir = new File (sdCard.getAbsolutePath() + "/camtest");
dir.mkdirs();
String fileName = String.format("%d.jpg", System.currentTimeMillis());
File outFile = new File(dir, fileName);
outStream = new FileOutputStream(outFile);
outStream.write(data[0]);
outStream.flush();
outStream.close();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length + " to " + outFile.getAbsolutePath());
refreshGallery(outFile);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
return null;
}
}
}
cameraPreview.java
package user.cameratest;
/**
* #author Jose Davis Nidhin
*/
import java.io.IOException;
import java.util.List;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
class cameraPreview extends ViewGroup implements SurfaceHolder.Callback { //
private final String TAG = "cameraPreview"; //
SurfaceView mSurfaceView; //
SurfaceHolder mHolder; //
Size mPreviewSize; //
List<Size> mSupportedPreviewSizes; //
Camera mCamera; //
cameraPreview(Context context, SurfaceView sv) { //
super(context); //
mSurfaceView = sv; //
// addView(mSurfaceView);
mHolder = mSurfaceView.getHolder(); //
mHolder.addCallback(this); //
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); //
}
public void setCamera(Camera camera) { //
mCamera = camera; //
if (mCamera != null) { //
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes(); //
requestLayout(); //
// get Camera parameters
Camera.Parameters params = mCamera.getParameters(); //
List<String> focusModes = params.getSupportedFocusModes(); //
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { //
// set the focus mode
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); //
// set Camera parameters
mCamera.setParameters(params); //
}
}
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { //
// We purposely disregard child measurements because act as a
// wrapper to a SurfaceView that centers the camera preview instead
// of stretching it.
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec); //
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec); //
setMeasuredDimension(width, height); //
if (mSupportedPreviewSizes != null) { //
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height); //
}
}
#Override
protected void onLayout(boolean changed, int l, int t, int r, int b) { //
if (changed && getChildCount() > 0) {
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (mPreviewSize != null) {
previewWidth = mPreviewSize.width;
previewHeight = mPreviewSize.height;
}
// Center the child SurfaceView within the parent.
if (width * previewHeight > height * previewWidth) {
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
} else {
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2,
width, (height + scaledChildHeight) / 2);
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
try {
if (mCamera != null) {
mCamera.setPreviewDisplay(holder);
}
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
if (mCamera != null) {
mCamera.stopPreview();
}
}
private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
for (Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if(mCamera != null) {
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
requestLayout();
mCamera.setParameters(parameters);
mCamera.startPreview();
}
}
}
camera_layout.xml
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/layout"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<SurfaceView
android:id="#+id/surfaceView"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<!-- Button
android:id="#+id/btnCapture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_horizontal"
android:background="#drawable/round_button"
android:text="#string/btn_capture" /-->
</FrameLayout>
Are you asking for the permissions?:
https://developer.android.com/training/permissions/requesting.html
For the camera the permission is Manifest.permission.CAMERA.
Also the android.hardware.camera is deprecated you should use android.hardware.camera2 https://developer.android.com/reference/android/hardware/camera2/package-summary.html

My Live Wallpaper won't compile and run

I've followed the instructions in this tutorial: http://code.tutsplus.com/tutorials/create-a-live-wallpaper-on-android-using-an-animated-gif--cms-23088
But I have had a few errors and am unable to run my project.
This is all my code:
My manifest:
<service
android:name=".GIFWallpaperService"
android:enabled="true"
android:label="Raindrops In Paris"
android:permission="android.permission.BIND_WALLPAPER" >
<intent-filter>
<action android:name="android.service.wallpaper.WallpaperService"/>
</intent-filter>
<meta-data
android:name="android.service.wallpaper"
android:resource="#xml/wallpaper" >
</meta-data>
</service>
<uses-feature
android:name="android.software.live_wallpaper"
android:required="true" >
</uses-feature>
My Java class:
package com.gacafw.gina.raindropsinparis;
import android.graphics.Canvas;
import android.graphics.Movie;
import android.os.Handler;
import android.service.wallpaper.WallpaperService;
import android.util.Log;
import android.view.SurfaceHolder;
import java.io.IOException;
public class GIFWallpaperService extends WallpaperService {
#Override
public WallpaperService.Engine onCreateEngine() {
try {
Movie movie = Movie.decodeStream(
getResources().getAssets().open("rainDropAna.gif"));
return new GIFWallpaperEngine(movie);
}catch(IOException e){
Log.d("GIF", "Could not load asset");
return null;
}
}
private Runnable drawGIF = new Runnable() {
public void run() {
draw();
}
};
private void draw() {
if (visible) {
Canvas canvas = holder.lockCanvas();
canvas.save();
// Adjust size and position so that
// the image looks good on your screen
canvas.scale(3f, 3f);
movie.draw(canvas, -100, 0);
canvas.restore();
holder.unlockCanvasAndPost(canvas);
movie.setTime((int) (System.currentTimeMillis() % movie.duration()));
handler.removeCallbacks(drawGIF);
handler.postDelayed(drawGIF, frameDuration);
}
}
#Override
public void onVisibilityChanged(boolean visible) {
this.visible = visible;
if (visible) {
handler.post(drawGIF);
} else {
handler.removeCallbacks(drawGIF);
}
}
private class GIFWallpaperEngine extends WallpaperService.Engine {
private final int frameDuration = 20;
private SurfaceHolder holder;
private Movie movie;
private boolean visible;
private Handler handler;
public GIFWallpaperEngine(Movie movie) {
this.movie = movie;
handler = new Handler();
}
#Override
public void onCreate(SurfaceHolder surfaceHolder) {
super.onCreate(surfaceHolder);
this.holder = surfaceHolder;
}
#Override
public void onDestroy() {
super.onDestroy();
handler.removeCallbacks(drawGIF);
}
}
}
My wallpaper.xml
<?xml version="1.0" encoding="UTF-8"?>
<wallpaper
xmlns:android="http://schemas.android.com/apk/res/android"
android:label="Raindrops In Paris"
android:thumbnail="#drawable/ic_launcher">
</wallpaper>
My errors currently:
The variables visible, holder, movie, handler in the draw() and onVisibilityChanged() are giving the error Cannot Resolve Symbol. I assume this is because they are out of scope in these methods?
I think I interpreted the instructions wrong but I can't figure out where I went wrong.
The tut contains an error - where it says "Add the following code to the GIFWallpaperService class:" it should say add it to the GIFWallpaperEngine class.
I had the same problem.I created an An Activity and passed an intent to run the wallpaper.here is your answer
public class SetWallpaperActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
Intent intent = new Intent(
WallpaperManager.ACTION_CHANGE_LIVE_WALLPAPER);
intent.putExtra(WallpaperManager.EXTRA_LIVE_WALLPAPER_COMPONENT,
new ComponentName(this, GIFWallpaperService.class));
startActivity(intent);
}
}

Categories