I'm working on a basic application that tells the user the amount of contours in the video feed in real time using OpenCV. I have this working, but about a minute into running the application closes and there are no logcat errors. I'm a new developer, but through some research believe this is a memory-leak issue. I think it has something to do with setting the text on each video frame. Here is my code:
import java.util.ArrayList;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.TextView;
public class MainActivity extends Activity implements CvCameraViewListener2 {
static {
if (!OpenCVLoader.initDebug()) {
Log.d("ERROR", "Unable to load OpenCV");
}
else {
Log.d("SUCCESS", "OpenCV loaded");
}
}
private TextView contourCount;
//TAG for log
private static final String TAG = "Contour Count";
//Mats for image processing
private Mat mRgba;
private Mat mGray;
//Declare the cameraview
private JavaCameraView openCvCameraView;
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
//OpenCv
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
//Turn on cameraview
openCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
//OnResume handling
#Override
public void onResume()
{
super.onResume();
openCvCameraView.enableView();
}
//OnCreate tasks
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
//Keep the screen on while app is running
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
//Set layout and initialize cameraView
setContentView(R.layout.fragment_main);
openCvCameraView = (JavaCameraView) findViewById(R.id.HelloOpenCvView);
openCvCameraView.setVisibility(SurfaceView.VISIBLE);
openCvCameraView.setCvCameraViewListener(this);
openCvCameraView.enableView();
}
//Handling onPause tasks - Disable Camera
#Override
public void onPause()
{
super.onPause();
if (openCvCameraView != null)
openCvCameraView.disableView();
}
//Handling onDestroy tasks - Disable Camera
public void onDestroy() {
super.onDestroy();
if (openCvCameraView != null)
openCvCameraView.disableView();
}
//New Mats for first onFrame
public void onCameraViewStarted(int width, int height) {
//Initialization of variables used for video analysis
mGray = new Mat();
mRgba = new Mat();
}
//Release the Mats when camera is stopped
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
Mat imageHSV = new Mat(mRgba.size(), Core.DEPTH_MASK_8U);
Mat imageBlurr = new Mat(mRgba.size(), Core.DEPTH_MASK_8U);
Mat imageA = new Mat(mRgba.size(), Core.DEPTH_MASK_ALL);
Imgproc.cvtColor(mRgba, imageHSV, Imgproc.COLOR_BGR2GRAY);
Imgproc.GaussianBlur(imageHSV, imageBlurr, new Size(5,5), 0);
Imgproc.adaptiveThreshold(imageBlurr, imageA, 255,Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY,7, 5);
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(imageA, contours, new Mat(), Imgproc.RETR_EXTERNAL,Imgproc.CHAIN_APPROX_SIMPLE);
setContours(contours.size());
return mRgba;
}
public void setContours(final int level) {
runOnUiThread(new Runnable() {
#Override
public void run() {
contourCount = (TextView) findViewById(R.id.contourLevel);
contourCount.setText("" + level);
}
});
}
}
The same issue occurs if I declare contourCount in onCreate(). I'm not sure if it has something to do with making level final in the method call for setContours(), but I have also tried to solve this using a handler that is declared in onCreate() and sending a message to it with the contours.size() within onCameraFrame() but this also causes the same issue. What am I doing wrong? How can I avoid this in the future? I am new to this so please be detailed in explaining the issue and future avoidance.
Thank you in advance! :)
EDIT SOLVED:
Figured it out. Thanks to everyone who contributed. Here is the working code for anyone interested or those with the same issue in the future!
The fix comes from releasing the Mats used in onCameraFrame
import java.util.ArrayList;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.TextView;
public class MainActivity extends Activity implements CvCameraViewListener2 {
static {
if (!OpenCVLoader.initDebug()) {
Log.d("ERROR", "Unable to load OpenCV");
}
else {
Log.d("SUCCESS", "OpenCV loaded");
}
}
//TAG for log
private static final String TAG = "Contour Count";
//Mats for image processing
private Mat mRgba;
private Mat mGray;
private Mat imageHSV;
private Mat imageBlurr;
private Mat imageA;
private Mat hierarchy;
//Declare the cameraview
private JavaCameraView openCvCameraView;
private static String level;
private static TextView contourCount;
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
//OpenCv
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
//Turn on cameraview
openCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
//OnResume handling
#Override
public void onResume()
{
super.onResume();
openCvCameraView.enableView();
}
//OnCreate tasks
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
//Keep the screen on while app is running
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
//Set layout and initialize cameraView
setContentView(R.layout.fragment_main);
openCvCameraView = (JavaCameraView) findViewById(R.id.HelloOpenCvView);
openCvCameraView.setVisibility(SurfaceView.VISIBLE);
openCvCameraView.setCvCameraViewListener(this);
openCvCameraView.enableView();
contourCount = (TextView) findViewById(R.id.contourLevel);
}
//Handling onPause tasks - Disable Camera
#Override
public void onPause()
{
super.onPause();
if (openCvCameraView != null)
openCvCameraView.disableView();
}
//Handling onDestroy tasks - Disable Camera
public void onDestroy() {
super.onDestroy();
if (openCvCameraView != null)
openCvCameraView.disableView();
}
//New Mats for first onFrame
public void onCameraViewStarted(int width, int height) {
//Initialization of variables used for video analysis
mGray = new Mat();
mRgba = new Mat();
}
//Release the Mats when camera is stopped
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
//onCameraFrame image processing
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
imageHSV = new Mat(mRgba.size(), Core.DEPTH_MASK_8U);
imageBlurr = new Mat(mRgba.size(), Core.DEPTH_MASK_8U);
imageA = new Mat(mRgba.size(), Core.DEPTH_MASK_ALL);
hierarchy = new Mat();
Imgproc.cvtColor(mRgba, imageHSV, Imgproc.COLOR_BGR2GRAY);
Imgproc.GaussianBlur(imageHSV, imageBlurr, new Size(5,5), 0);
Imgproc.adaptiveThreshold(imageBlurr, imageA, 255,Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY,7, 5);
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(imageA, contours, hierarchy, Imgproc.RETR_EXTERNAL,Imgproc.CHAIN_APPROX_SIMPLE);
//Imgproc.findContours(imageA, contours, new Mat(), Imgproc.RETR_LIST,Imgproc.CHAIN_APPROX_SIMPLE);
level = "" + contours.size();
setContour();
imageHSV.release();
imageBlurr.release();
imageA.release();
hierarchy.release();
return mRgba;
}
public void setContour() {
runOnUiThread(new Runnable() {
#Override
public void run() {
contourCount.setText(level);
}
});
}
}
In the past when creating memory intensive apps I've had to manually increase the amount of memory the app has access to. There may not necessarily be a OOM error, but the app may simply be extremely memory intensive.
Try adding this to your application tag in your manifest:
android:largeHeap="true"
Hopefully this helps :)
Related
MLKit by Google (without Firebase) is new, so I'm having trouble. I'm trying to follow this example here: https://developers.google.com/ml-kit/vision/object-detection/custom-models/android
The app opens fine, & the camera works (As in, I can see things). But the actual detection doesn't seem to work.
Am I missing part of the code to actually detect the object? Or is it an issue with the implementation of CameraX or ImageInput?
package com.example.mlkitobjecttest;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.CameraX;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.core.impl.PreviewConfig;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.LifecycleOwner;
import android.content.pm.PackageManager;
import android.graphics.Rect;
import android.media.Image;
import android.os.Bundle;
import android.text.Layout;
import android.util.Rational;
import android.util.Size;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.mlkit.common.model.LocalModel;
import com.google.mlkit.vision.common.InputImage;
import com.google.mlkit.vision.objects.DetectedObject;
import com.google.mlkit.vision.objects.ObjectDetection;
import com.google.mlkit.vision.objects.ObjectDetector;
import com.google.mlkit.vision.objects.custom.CustomObjectDetectorOptions;
import org.w3c.dom.Text;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class MainActivity extends AppCompatActivity {
private class YourAnalyzer implements ImageAnalysis.Analyzer {
#Override
#androidx.camera.core.ExperimentalGetImage
public void analyze(ImageProxy imageProxy) {
Image mediaImage = imageProxy.getImage();
if (mediaImage != null) {
InputImage image =
InputImage.fromMediaImage(mediaImage, imageProxy.getImageInfo().getRotationDegrees());
// Pass image to an ML Kit Vision API
// ...
LocalModel localModel =
new LocalModel.Builder()
.setAssetFilePath("mobilenet_v1_1.0_128_quantized_1_default_1.tflite")
// or .setAbsoluteFilePath(absolute file path to tflite model)
.build();
CustomObjectDetectorOptions customObjectDetectorOptions =
new CustomObjectDetectorOptions.Builder(localModel)
.setDetectorMode(CustomObjectDetectorOptions.SINGLE_IMAGE_MODE)
.enableMultipleObjects()
.enableClassification()
.setClassificationConfidenceThreshold(0.5f)
.setMaxPerObjectLabelCount(3)
.build();
ObjectDetector objectDetector =
ObjectDetection.getClient(customObjectDetectorOptions);
objectDetector
.process(image)
.addOnFailureListener(new OnFailureListener() {
#Override
public void onFailure(#NonNull Exception e) {
//Toast.makeText(getApplicationContext(), "Fail. Sad!", Toast.LENGTH_SHORT).show();
//textView.setText("Fail. Sad!");
imageProxy.close();
}
})
.addOnSuccessListener(new OnSuccessListener<List<DetectedObject>>() {
#Override
public void onSuccess(List<DetectedObject> results) {
for (DetectedObject detectedObject : results) {
Rect box = detectedObject.getBoundingBox();
for (DetectedObject.Label label : detectedObject.getLabels()) {
String text = label.getText();
int index = label.getIndex();
float confidence = label.getConfidence();
textView.setText(text);
}}
imageProxy.close();
}
});
}
//ImageAnalysis.Builder.fromConfig(new ImageAnalysisConfig).setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST);
}
}
PreviewView prevView;
private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
private ExecutorService executor = Executors.newSingleThreadExecutor();
TextView textView;
private int REQUEST_CODE_PERMISSIONS = 101;
private String[] REQUIRED_PERMISSIONS = new String[]{"android.permission.CAMERA"};
/* #NonNull
#Override
public CameraXConfig getCameraXConfig() {
return CameraXConfig.Builder.fromConfig(Camera2Config.defaultConfig())
.setCameraExecutor(ContextCompat.getMainExecutor(this))
.build();
}
*/
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
prevView = findViewById(R.id.viewFinder);
textView = findViewById(R.id.scan_button);
if(allPermissionsGranted()){
startCamera();
}else{
ActivityCompat.requestPermissions(this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS);
}
}
private void startCamera() {
cameraProviderFuture = ProcessCameraProvider.getInstance(this);
cameraProviderFuture.addListener(new Runnable() {
#Override
public void run() {
try {
ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
bindPreview(cameraProvider);
} catch (ExecutionException | InterruptedException e) {
// No errors need to be handled for this Future.
// This should never be reached.
}
}
}, ContextCompat.getMainExecutor(this));
}
void bindPreview(#NonNull ProcessCameraProvider cameraProvider) {
Preview preview = new Preview.Builder()
.build();
CameraSelector cameraSelector = new CameraSelector.Builder()
.requireLensFacing(CameraSelector.LENS_FACING_BACK)
.build();
preview.setSurfaceProvider(prevView.createSurfaceProvider());
ImageAnalysis imageAnalysis =
new ImageAnalysis.Builder()
.setTargetResolution(new Size(1280, 720))
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build();
imageAnalysis.setAnalyzer(ContextCompat.getMainExecutor(this), new YourAnalyzer());
Camera camera = cameraProvider.bindToLifecycle((LifecycleOwner)this, cameraSelector, preview, imageAnalysis);
}
private boolean allPermissionsGranted() {
for(String permission: REQUIRED_PERMISSIONS){
if(ContextCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED){
return false;
}
}
return true;
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
if(requestCode == REQUEST_CODE_PERMISSIONS){
if(allPermissionsGranted()){
startCamera();
} else{
Toast.makeText(this, "Permissions not granted by the user.", Toast.LENGTH_SHORT).show();
this.finish();
}
}
}
}
Nothing is detected because you defined the wrong path to tflite model file. You emulator or physical device cannot resolve given path as it doesn't exists on mobile device: C:\\Users\\dude\\Documents\\mlkitobjecttest\\app\\src\\main\\assets\\mobilenet_v1_1.0_128_quantized_1_default_1.tflite
Copy your model mobilenet_v1_1.0_128_quantized_1_default_1.tflite into assets directory under you app's project src/main directory.
If you do not have that directory just create a new one named assets.
At the end it should look like this:
After that fix LocalModel initialization code:
LocalModel localModel =
new LocalModel.Builder()
.setAssetFilePath("mobilenet_v1_1.0_128_quantized_1_default_1.tflite")
// or .setAbsoluteFilePath(absolute file path to tflite model)
.build();
Update: one more issue found
ImageAnalysis instance was not bound to CameraProvider:
...
ImageAnalysis imageAnalysis = ...
Camera camera = cameraProvider.bindToLifecycle((LifecycleOwner)this, cameraSelector, preview); // imageAnalysis is not used
To fix it just pass as the last argument imageAnalysis variable into bindToLifecycle method:
Camera camera = cameraProvider.bindToLifecycle((LifecycleOwner)this, cameraSelector, preview, imageAnalysis);
Second update: another one issue found
MLKit cannot process an image because it was closed while it was processing or right before processing started. I'm talking about imageProxy.close() line of code declared inside of public void analyze(ImageProxy imageProxy).
Java documentation of close() method:
/**
* Free up this frame for reuse.
* <p>
* After calling this method, calling any methods on this {#code Image} will
* result in an {#link IllegalStateException}, and attempting to read from
* or write to {#link ByteBuffer ByteBuffers} returned by an earlier
* {#link Plane#getBuffer} call will have undefined behavior. If the image
* was obtained from {#link ImageWriter} via
* {#link ImageWriter#dequeueInputImage()}, after calling this method, any
* image data filled by the application will be lost and the image will be
* returned to {#link ImageWriter} for reuse. Images given to
* {#link ImageWriter#queueInputImage queueInputImage()} are automatically
* closed.
* </p>
*/
To fix that move imageProxy.close() into failure and success listeners:
objectDetector
.process(image)
.addOnFailureListener(new OnFailureListener() {
#Override
public void onFailure(#NonNull Exception e) {
Toast.makeText(getApplicationContext(), "Fail. Sad!", Toast.LENGTH_LONG).show();
...
imageProxy.close();
}
})
.addOnSuccessListener(new OnSuccessListener<List<DetectedObject>>() {
#Override
public void onSuccess(List<DetectedObject> results) {
Toast.makeText(getBaseContext(), "Success...", Toast.LENGTH_LONG).show();
...
imageProxy.close();
}
});
The fixed solution was tested with image classification model from Tensorflow and test was successful.
I was trying to run this code to take a photo and recognize tho photo owner, at first the user clicks the button Train and takes a picture than enter its name, after that he clicks on Recognize button and take a picture if the picture was saved the apps recognize its face if not it shows unknown person for example. The app runs good but when it crashes sometimes with this error:
FATAL EXCEPTION: main
Process: com.facedetection.app, PID: 7442
java.lang.RuntimeException: Unable to stop activity {com.facedetection.app/com.facedetection.app.TrainActivity}: CvException [org.opencv.core.CvException: cv::Exception: OpenCV(4.0.0-pre) E:\AssemCourses\opencv-master\modules\core\src\matrix.cpp:235: error: (-215:Assertion failed) s >= 0 in function 'setSize'
]
at android.app.ActivityThread.performDestroyActivity(ActivityThread.java:4852)
at android.app.ActivityThread.handleDestroyActivity(ActivityThread.java:4915)
at android.app.ActivityThread.access$1600(ActivityThread.java:211)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1759)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:145)
at android.app.ActivityThread.main(ActivityThread.java:6946)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1404)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1199)
Caused by: CvException [org.opencv.core.CvException: cv::Exception: OpenCV(4.0.0-pre) E:\AssemCourses\opencv-master\modules\core\src\matrix.cpp:235: error: (-215:Assertion failed) s >= 0 in function 'setSize'
]
at org.opencv.face.FaceRecognizer.train_0(Native Method)
at org.opencv.face.FaceRecognizer.train(FaceRecognizer.java:133)
at com.facedetection.app.TrainActivity.trainfaces(TrainActivity.java:95)
at com.facedetection.app.TrainActivity.onStop(TrainActivity.java:351)
at android.app.Instrumentation.callActivityOnStop(Instrumentation.java:1305)
at android.app.Activity.performStop(Activity.java:6777)
at android.app.ActivityThread.performDestroyActivity(ActivityThread.java:4847)
Here is the classe mentionned in the error logcat:
Class TrainActivity.java:
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.DialogInterface;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.text.InputType;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.facebook.stetho.Stetho;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Size;
import org.opencv.face.Face;
import org.opencv.face.FaceRecognizer;
import org.opencv.face.LBPHFaceRecognizer;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import static org.opencv.objdetect.Objdetect.CASCADE_SCALE_IMAGE;
/**
* Created by Assem Abozaid on 6/2/2018.
*/
public class TrainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 {
private static String TAG = TrainActivity.class.getSimpleName();
private CameraBridgeViewBase openCVCamera;
private Mat rgba,gray;
private CascadeClassifier classifier;
private MatOfRect faces;
private static final int PERMS_REQUEST_CODE = 123;
private ArrayList<Mat> images;
private ArrayList<String> imagesLabels;
private Storage local;
private String[] uniqueLabels;
FaceRecognizer recognize;
private boolean trainfaces() {
if(images.isEmpty())
return false;
List<Mat> imagesMatrix = new ArrayList<>();
for (int i = 0; i < images.size(); i++)
imagesMatrix.add(images.get(i));
Set<String> uniqueLabelsSet = new HashSet<>(imagesLabels); // Get all unique labels
uniqueLabels = uniqueLabelsSet.toArray(new String[uniqueLabelsSet.size()]); // Convert to String array, so we can read the values from the indices
int[] classesNumbers = new int[uniqueLabels.length];
for (int i = 0; i < classesNumbers.length; i++)
classesNumbers[i] = i + 1; // Create incrementing list for each unique label starting at 1
int[] classes = new int[imagesLabels.size()];
for (int i = 0; i < imagesLabels.size(); i++) {
String label = imagesLabels.get(i);
for (int j = 0; j < uniqueLabels.length; j++) {
if (label.equals(uniqueLabels[j])) {
classes[i] = classesNumbers[j]; // Insert corresponding number
break;
}
}
}
Mat vectorClasses = new Mat(classes.length, 1, CvType.CV_32SC1); // CV_32S == int
vectorClasses.put(0, 0, classes); // Copy int array into a vector
recognize = LBPHFaceRecognizer.create(3,8,8,8,200);
recognize.train(imagesMatrix, vectorClasses);
if(SaveImage())
return true;
return false;
}
public void showLabelsDialog() {
Set<String> uniqueLabelsSet = new HashSet<>(imagesLabels); // Get all unique labels
if (!uniqueLabelsSet.isEmpty()) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Select label:");
builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
images.remove(images.size()-1);
}
});
builder.setCancelable(false); // Prevent the user from closing the dialog
String[] uniqueLabels = uniqueLabelsSet.toArray(new String[uniqueLabelsSet.size()]); // Convert to String array for ArrayAdapter
Arrays.sort(uniqueLabels); // Sort labels alphabetically
final ArrayAdapter<String> arrayAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, uniqueLabels) {
#Override
public #NonNull
View getView(int position, #Nullable View convertView, #NonNull ViewGroup parent) {
TextView textView = (TextView) super.getView(position, convertView, parent);
if (getResources().getBoolean(R.bool.isTablet))
textView.setTextSize(20); // Make text slightly bigger on tablets compared to phones
else
textView.setTextSize(18); // Increase text size a little bit
return textView;
}
};
ListView mListView = new ListView(this);
mListView.setAdapter(arrayAdapter); // Set adapter, so the items actually show up
builder.setView(mListView); // Set the ListView
final AlertDialog dialog = builder.show(); // Show dialog and store in final variable, so it can be dismissed by the ListView
mListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
#Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
dialog.dismiss();
addLabel(arrayAdapter.getItem(position));
Log.i(TAG, "Labels Size "+imagesLabels.size()+"");
}
});
} else {
showEnterLabelDialog();
}
}
private void showEnterLabelDialog() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Please enter your name:");
final EditText input = new EditText(this);
input.setInputType(InputType.TYPE_CLASS_TEXT);
builder.setView(input);
builder.setPositiveButton("Submit", null); // Set up positive button, but do not provide a listener, so we can check the string before dismissing the dialog
builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
images.remove(images.size()-1);
}
});
builder.setCancelable(false); // User has to input a name
AlertDialog dialog = builder.create();
// Source: http://stackoverflow.com/a/7636468/2175837
dialog.setOnShowListener(new DialogInterface.OnShowListener() {
#Override
public void onShow(final DialogInterface dialog) {
Button mButton = ((AlertDialog) dialog).getButton(AlertDialog.BUTTON_POSITIVE);
mButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
String string = input.getText().toString().trim();
if (!string.isEmpty()) { // Make sure the input is valid
// If input is valid, dismiss the dialog and add the label to the array
dialog.dismiss();
addLabel(string);
}
}
});
}
});
// Show keyboard, so the user can start typing straight away
dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE);
dialog.show();
}
private void addLabel(String string) {
String label = string.substring(0, 1).toUpperCase(Locale.US) + string.substring(1).trim().toLowerCase(Locale.US); // Make sure that the name is always uppercase and rest is lowercase
imagesLabels.add(label); // Add label to list of labels
Log.i(TAG, "Label: " + label);
}
public boolean SaveImage() {
File path = new File(Environment.getExternalStorageDirectory(), "TrainedData");
path.mkdirs();
String filename = "lbph_trained_data.xml";
File file = new File(path, filename);
recognize.save(file.toString());
if(file.exists())
return true;
return false;
}
public void cropedImages(Mat mat) {
Rect rect_Crop=null;
for(Rect face: faces.toArray()) {
rect_Crop = new Rect(face.x, face.y, face.width, face.height);
}
Mat croped = new Mat(mat, rect_Crop);
images.add(croped);
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.train_main);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Stetho.initializeWithDefaults(this);
if (hasPermissions()){
Toast.makeText(this, "Permission Granted", Toast.LENGTH_SHORT).show();
Log.i(TAG, "Permission Granted Before");
}
else {
requestPerms();
}
openCVCamera = (CameraBridgeViewBase)findViewById(R.id.java_camera_view);
openCVCamera.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT);
openCVCamera.setVisibility(SurfaceView.VISIBLE);
openCVCamera.setCvCameraViewListener(this);
local = new Storage(this);
Button detect = (Button)findViewById(R.id.take_picture_button);
detect.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
if(gray.total() == 0)
Toast.makeText(getApplicationContext(), "Can't Detect Faces", Toast.LENGTH_SHORT).show();
classifier.detectMultiScale(gray,faces,1.1,3,0|CASCADE_SCALE_IMAGE, new Size(30,30));
if(!faces.empty()) {
if(faces.toArray().length > 1)
Toast.makeText(getApplicationContext(), "Mutliple Faces Are not allowed", Toast.LENGTH_SHORT).show();
else {
if(gray.total() == 0) {
Log.i(TAG, "Empty gray image");
return;
}
cropedImages(gray);
showLabelsDialog();
Toast.makeText(getApplicationContext(), "Face Detected", Toast.LENGTH_SHORT).show();
}
}else
Toast.makeText(getApplicationContext(), "Unknown Face", Toast.LENGTH_SHORT).show();
}
});
}
#SuppressLint("WrongConstant")
private boolean hasPermissions(){
int res = 0;
//string array of permissions,
String[] permissions = new String[]{Manifest.permission.CAMERA};
for (String perms : permissions){
res = checkCallingOrSelfPermission(perms);
if (!(res == PackageManager.PERMISSION_GRANTED)){
return false;
}
}
return true;
}
private void requestPerms(){
String[] permissions = new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE,Manifest.permission.READ_EXTERNAL_STORAGE};
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
requestPermissions(permissions,PERMS_REQUEST_CODE);
}
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
boolean allowed = true;
switch (requestCode){
case PERMS_REQUEST_CODE:
for (int res : grantResults){
// if user granted all permissions.
allowed = allowed && (res == PackageManager.PERMISSION_GRANTED);
}
break;
default:
// if user not granted permissions.
allowed = false;
break;
}
if (allowed){
//user granted all permissions we can perform our task.
Log.i(TAG, "Permission has been added");
}
else {
// we will give warning to user that they haven't granted permissions.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA) || shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE) ||
shouldShowRequestPermissionRationale(Manifest.permission.READ_EXTERNAL_STORAGE)){
Toast.makeText(this, "Permission Denied.", Toast.LENGTH_SHORT).show();
}
}
}
}
private BaseLoaderCallback callbackLoader = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch(status) {
case BaseLoaderCallback.SUCCESS:
faces = new MatOfRect();
openCVCamera.enableView();
images = local.getListMat("images");
imagesLabels = local.getListString("imagesLabels");
break;
default:
super.onManagerConnected(status);
break;
}
}
};
#Override
protected void onPause() {
super.onPause();
if(openCVCamera != null)
openCVCamera.disableView();
}
#Override
protected void onStop() {
super.onStop();
if (images != null && imagesLabels != null) {
local.putListMat("images", images);
local.putListString("imagesLabels", imagesLabels);
Log.i(TAG, "Images have been saved");
if(trainfaces()) {
images.clear();
imagesLabels.clear();
}
}
}
#Override
protected void onDestroy(){
super.onDestroy();
if(openCVCamera != null)
openCVCamera.disableView();
}
#Override
protected void onResume(){
super.onResume();
if(OpenCVLoader.initDebug()) {
Log.i(TAG, "System Library Loaded Successfully");
callbackLoader.onManagerConnected(BaseLoaderCallback.SUCCESS);
} else {
Log.i(TAG, "Unable To Load System Library");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION, this, callbackLoader);
}
}
#Override
public void onCameraViewStarted(int width, int height) {
rgba = new Mat();
gray = new Mat();
classifier = FileUtils.loadXMLS(this, "lbpcascade_frontalface_improved.xml");
}
#Override
public void onCameraViewStopped() {
rgba.release();
gray.release();
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
Mat mGrayTmp = inputFrame.gray();
Mat mRgbaTmp = inputFrame.rgba();
int orientation = openCVCamera.getScreenOrientation();
if (openCVCamera.isEmulator()) // Treat emulators as a special case
Core.flip(mRgbaTmp, mRgbaTmp, 1); // Flip along y-axis
else {
switch (orientation) { // RGB image
case ActivityInfo.SCREEN_ORIENTATION_PORTRAIT:
case ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT:
Core.flip(mRgbaTmp, mRgbaTmp, 0); // Flip along x-axis
break;
case ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE:
case ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE:
Core.flip(mRgbaTmp, mRgbaTmp, 1); // Flip along y-axis
break;
}
switch (orientation) { // Grayscale image
case ActivityInfo.SCREEN_ORIENTATION_PORTRAIT:
Core.transpose(mGrayTmp, mGrayTmp); // Rotate image
Core.flip(mGrayTmp, mGrayTmp, -1); // Flip along both axis
break;
case ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT:
Core.transpose(mGrayTmp, mGrayTmp); // Rotate image
break;
case ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE:
Core.flip(mGrayTmp, mGrayTmp, 1); // Flip along y-axis
break;
case ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE:
Core.flip(mGrayTmp, mGrayTmp, 0); // Flip along x-axis
break;
}
}
gray = mGrayTmp;
rgba = mRgbaTmp;
Imgproc.resize(gray, gray, new Size(200,200.0f/ ((float)gray.width()/ (float)gray.height())));
return rgba;
}
}
The Lines of Errors mentionned in the logcat refers to those java code lines:
In TrainActivity.java:
recognize = LBPHFaceRecognizer.create(3,8,8,8,200);
recognize.train(imagesMatrix, vectorClasses);
And
if(trainfaces()) {
images.clear();
imagesLabels.clear();
}
Do you guys have any idea about this problem and how to fix it?
PS: I am a beginner with OpenCv.
You can solve it by reset preferences onCreate TrainActivity
// -------------------------------
// reset preferences
local = new Storage(this);
images = new ArrayList<>();
imagesLabels = new ArrayList<>();
local.putListMat("images", images);
local.putListString("imagesLabels", imagesLabels);
// -------------------------------
I am creating an android camera app using opencv. First I make it like when I click the start button, it start taking pictures and in every second takes a picture and store it in storage. It works perfectly for me.
Then I tried to make it some tricky by forcing it to take 5 pictures in a second it works well.
But actually I faced problem when I make it to 20 pictures in 1 second. It did not work. The app hangs as soon as I click on the start button. I don't know why. But I think the problem is in threading.
Can someone help me how i will do it.
I have two java files in my project and here is the code.
package org.opencv.samples.tutorial3;
import java.io.FileOutputStream;
import java.util.List;
import org.opencv.android.JavaCameraView;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.Size;
import android.util.AttributeSet;
import android.util.Log;
public class Tutorial3View extends JavaCameraView implements PictureCallback {
private static final String TAG = "Sample::Tutorial3View";
private String mPictureFileName;
public Tutorial3View(Context context, AttributeSet attrs) {
super(context, attrs);
}
public List<String> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}
public String getEffect() {
return mCamera.getParameters().getColorEffect();
}
public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}
public List<Size> getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(Size resolution) {
disconnectCamera();
mMaxHeight = resolution.height;
mMaxWidth = resolution.width;
connectCamera(getWidth(), getHeight());
}
public Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
public void takePicture(final String fileName) {
Log.i(TAG, "Taking picture");
this.mPictureFileName = fileName;
// Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
// Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
mCamera.setPreviewCallback(null);
// PictureCallback is implemented by the current class
mCamera.takePicture(null, null, this);
}
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "Saving a bitmap to file");
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);
// Write the image in a file (in jpeg format)
try {
FileOutputStream fos = new FileOutputStream(mPictureFileName);
fos.write(data);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
}
And the second one is:
package org.opencv.samples.tutorial3;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.ListIterator;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SubMenu;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.Toast;
public class Tutorial3Activity extends Activity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private Tutorial3View mOpenCvCameraView;
private List<Size> mResolutionList;
private MenuItem[] mEffectMenuItems;
private SubMenu mColorEffectsMenu;
private MenuItem[] mResolutionMenuItems;
private SubMenu mResolutionMenu;
Thread thread;
Button start,stop;
boolean loop=false;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial3Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial3_surface_view);
start=(Button)findViewById(R.id.button2) ;
start.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
loop=true;
thread = new Thread(new task());
thread.start();
}
});
stop=(Button) findViewById(R.id.button);
stop.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
loop=false;
}
});
mOpenCvCameraView = (Tutorial3View) findViewById(R.id.tutorial3_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
return inputFrame.rgba();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
List<String> effects = mOpenCvCameraView.getEffectList();
if (effects == null) {
Log.e(TAG, "Color effects are not supported by device!");
return true;
}
mColorEffectsMenu = menu.addSubMenu("Color Effect");
mEffectMenuItems = new MenuItem[effects.size()];
int idx = 0;
ListIterator<String> effectItr = effects.listIterator();
while(effectItr.hasNext()) {
String element = effectItr.next();
mEffectMenuItems[idx] = mColorEffectsMenu.add(1, idx, Menu.NONE, element);
idx++;
}
mResolutionMenu = menu.addSubMenu("Resolution");
mResolutionList = mOpenCvCameraView.getResolutionList();
mResolutionMenuItems = new MenuItem[mResolutionList.size()];
ListIterator<Size> resolutionItr = mResolutionList.listIterator();
idx = 0;
while(resolutionItr.hasNext()) {
Size element = resolutionItr.next();
mResolutionMenuItems[idx] = mResolutionMenu.add(2, idx, Menu.NONE,
Integer.valueOf(element.width).toString() + "x" + Integer.valueOf(element.height).toString());
idx++;
}
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item.getGroupId() == 1)
{
mOpenCvCameraView.setEffect((String) item.getTitle());
Toast.makeText(this, mOpenCvCameraView.getEffect(), Toast.LENGTH_SHORT).show();
}
else if (item.getGroupId() == 2)
{
int id = item.getItemId();
Size resolution = mResolutionList.get(id);
mOpenCvCameraView.setResolution(resolution);
resolution = mOpenCvCameraView.getResolution();
String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
}
return true;
}
class task implements Runnable {
public void run() {
while (loop) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
String currentDateandTime = sdf.format(new Date());
final String fileName = Environment.getExternalStorageDirectory().getPath() +
"/sample_picture_" + currentDateandTime + ".jpg";
mOpenCvCameraView.takePicture(fileName);
runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(Tutorial3Activity.this, fileName + " saved", Toast.LENGTH_SHORT).show();
}
});
try {
thread.sleep(200);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
}
Note: The above code works well for taking 5 pictures in 1 second. But when I change thread.sleep(200) to thread.sleep(50) for taking 20 pictures in 1 second it hangs.
I'm developing a little applicattion for androd to display
gps location witha bitmap. I used some example codes with google mmaps, but i haveto make it with maps forge. unfortunatelly i havent found any example codes or anything about how to write this drawing operation Colud anybody hel me?
My code is:
package hu.uniobuda.nik.k44hvx;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.mapsforge.android.maps.GeoPoint;
import org.mapsforge.android.maps.ItemizedOverlay;
import org.mapsforge.android.maps.MapActivity;
import org.mapsforge.android.maps.MapController;
import org.mapsforge.android.maps.MapView;
import org.mapsforge.android.maps.MapViewMode;
import org.mapsforge.android.maps.Overlay;
import org.mapsforge.android.maps.OverlayItem;
import org.mapsforge.android.maps.Projection;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.drawable.Drawable;
import android.location.Geocoder;
import android.location.Address;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.text.Html;
import android.widget.TextView;
import android.widget.Toast;
public class Night_WatchActivity extends MapActivity {
MapView mapview;
public MapController mc;
private LocationManager locman;
private LocationListener loclis;
List<Overlay> mapOverlays;
static Drawable pointDefaultMarker;
MapOverlay pointsOverlay;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mapview = new MapView(this);
mapview.setClickable(true);
mapview.setBuiltInZoomControls(true);
mapview.setMapViewMode(MapViewMode.MAPNIK_TILE_DOWNLOAD);
// mapview.setMapViewMode(MapViewMode.CANVAS_RENDERER);
//mapview.setMapFile("/sdcard/budapest.map");
mc = mapview.getController();
mc.setCenter(new GeoPoint(47.499619,19.046406));
mc.setZoom(17);
setContentView(mapview);
locman = (LocationManager) getSystemService(Context.LOCATION_SERVICE);
// loclis = new GPSLocationListener();
if(pointDefaultMarker==null){pointDefaultMarker = getResources().getDrawable(R.drawable.kepont);}
mapOverlays = mapview.getOverlays();
pointsOverlay = new MapOverlay(pointDefaultMarker);
mapOverlays.add(pointsOverlay);
locman.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, loclis);
}
#Override
protected void onResume() {
locman.requestLocationUpdates(LocationManager.GPS_PROVIDER, 10*1000, 10, loclis);
super.onResume();
}
#Override
protected void onPause() {
locman.removeUpdates(loclis);
super.onPause();
}
private class GPSLocationListener implements LocationListener {
public void onLocationChanged(Location location) {
if(location!=null){
GeoPoint gp = new GeoPoint(
(int)(location.getLatitude()*1E6),
(int)(location.getLongitude()*1E6));
OverlayItem item = new OverlayItem(gp,null,null);
pointsOverlay.justOneOverlay(item);
mc.setZoom(16);
mc.setCenter(gp);
mapview.invalidate();
String address = PointToLocation(gp);
Toast.makeText(getBaseContext(),
address,
Toast.LENGTH_SHORT).show();
}
}
public void onProviderDisabled(String provider) {
}
public void onProviderEnabled(String provider) {
}
public void onStatusChanged(String provider, int status, Bundle extras) {
}
public String PointToLocation(GeoPoint hely){
String s ="";
Geocoder gcd = new Geocoder(getBaseContext(), Locale.getDefault());
try{
List<Address> adresses = gcd.getFromLocation(
hely.getLatitudeE6() / 1E6,
hely.getLongitudeE6()/1E6 , 1);
if(adresses.size()>0){
for(int i = 0; i < adresses.get(0).getMaxAddressLineIndex(); i++){
s += adresses.get(0).getAddressLine(i) + " ";
}
}
}catch(IOException exception){ exception.printStackTrace();}
return s;
}
}
class MapOverlay extends ItemizedOverlay<OverlayItem>{
private GeoPoint toDraw;
public GeoPoint getToDraw() {
return toDraw;
}
public void setToDraw(GeoPoint toDraw) {
this.toDraw = toDraw;
}
Paint p = new Paint();
private ArrayList<OverlayItem> overlays = new ArrayList<OverlayItem>();
private boolean drawLines = false;
public MapOverlay(Drawable defaultMarker) {
super(boundCenter(defaultMarker));
populate();
}
public void addOverlay(OverlayItem overlay){
overlays.add(overlay);
populate();
}
public void justOneOverlay(OverlayItem newOverlay){
overlays.clear();
addOverlay(newOverlay);
}
#Override
protected OverlayItem createItem(int i) {
return overlays.get(i);
}
#Override
public int size() {
return overlays.size();
}
public boolean isDrawLines() {
return drawLines;
}
public void setDrawLines(boolean drawLines) {
this.drawLines = drawLines;
}
#Override
protected void drawOverlayBitmap(Canvas canvas, Point drawPosition,
Projection projection, byte drawZoomLevel) {
Point ScreenPoint = new Point();
mapview.getProjection().toPixels(toDraw, ScreenPoint);
Bitmap bmp = BitmapFactory.decodeResource(getResources(), R.drawable.kepont);
canvas.drawBitmap(bmp,ScreenPoint.x,ScreenPoint.y-24, null);
}
}
}
I do wallpapers to andoid and I want that the user could choose options. Menu with options is showed but it have problem. When i click any options and go back to wallpapers screen, they dont update with new options. Why?
My code WallpaperService:
public MyWallpaperEngine(WallpaperService ws)
{
context = ws;
prefs = LiveWallpaperService.this.getSharedPreferences(SHARED_PREFS_NAME, 0);
OnSharedPreferenceChangeListener listener
= new SharedPreferences.OnSharedPreferenceChangeListener() {
public void onSharedPreferenceChanged(SharedPreferences prefs, String key)
{
if(key != null){
if(key.equals("BACKREPEAT")){
if(BACKREPEAT)
BACKREPEAT = false;
else
BACKREPEAT = true;
}
}
}
};
prefs.registerOnSharedPreferenceChangeListener(listener);
handler.post(drawRunner);
}
upd:
I have made, as was written in an example, but the result hasn't changed..
LiveWallpaperService code:
package com.samples;
import android.content.SharedPreferences;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.service.wallpaper.WallpaperService;
import android.util.Log;
import android.view.SurfaceHolder;
public class LiveWallpaperService extends WallpaperService
{
public static final String SHARED_PREFS_NAME = "leoSettings01";
#Override
public void onCreate() {
super.onCreate();
}
#Override
public void onDestroy() {
super.onDestroy();
}
#Override
public Engine onCreateEngine() {
return new MyWallpaperEngine();
}
private class MyWallpaperEngine extends Engine implements
SharedPreferences.OnSharedPreferenceChangeListener {
private final Handler handler = new Handler();
int draw_x = 0;
int draw_y = 0;
//...
boolean BACKREPEAT = false;
private final Runnable drawRunner = new Runnable() {
#Override
public void run()
{
draw();
}
};
private boolean visible = true;
private SharedPreferences prefs;
MyWallpaperEngine()
{
prefs = LiveWallpaperService.this.getSharedPreferences(SHARED_PREFS_NAME, 0);
prefs.registerOnSharedPreferenceChangeListener(this);
onSharedPreferenceChanged(prefs, null);
handler.post(drawRunner);
}
public void onSharedPreferenceChanged(SharedPreferences prefs, String key) {
if(key != null)
{
Log.v("key:", key); //no message!
if(key.equals("BACKREPEAT")){
if(BACKREPEAT)
BACKREPEAT = false;
else
BACKREPEAT = true;
}
}
}
#Override
public void onVisibilityChanged(boolean visible) {
this.visible = visible;
if (visible) {
handler.post(drawRunner);
} else {
handler.removeCallbacks(drawRunner);
}
}
#Override
public void onSurfaceDestroyed(SurfaceHolder holder) {
super.onSurfaceDestroyed(holder);
this.visible = false;
handler.removeCallbacks(drawRunner);
}
private void draw()
{
SurfaceHolder holder = getSurfaceHolder();
Canvas canvas = null;
try
{
canvas = holder.lockCanvas();
//...draw draw draw
}
finally
{
if (canvas != null)
holder.unlockCanvasAndPost(canvas);
}
handler.removeCallbacks(drawRunner);
if (visible)
{
handler.postDelayed(drawRunner, DELAY);
}
}
}
}
prefs.xml:
<?xml version="1.0" encoding="utf-8"?>
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android">
<PreferenceCategory android:title="General">
<CheckBoxPreference
android:title="Animation repeat"
android:key="BACKREPEAT"
android:defaultValue="false"
/>
</PreferenceCategory>
</PreferenceScreen>
Your wallpaper will never restart after coming back from settings. The only method invoked will be your Engine's onVisibilityChanged. If your correctly implements SharedPreferences.OnSharedPreferenceChangeListener on your Engine, then onSharedPreferenceChanged should be called too.
Please check if you implemented OnSharedPreferenceChangeListener exactly this way:
http://developer.android.com/resources/samples/CubeLiveWallpaper/src/com/example/android/livecubes/cube2/CubeWallpaper2.html
If you did and it is still not working, please post your entire WallpaperService and settings preference activity code here.