MLKit Object Detection is not detecting objects - java

MLKit by Google (without Firebase) is new, so I'm having trouble. I'm trying to follow this example here: https://developers.google.com/ml-kit/vision/object-detection/custom-models/android
The app opens fine, & the camera works (As in, I can see things). But the actual detection doesn't seem to work.
Am I missing part of the code to actually detect the object? Or is it an issue with the implementation of CameraX or ImageInput?
package com.example.mlkitobjecttest;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.CameraX;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.core.impl.PreviewConfig;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.LifecycleOwner;
import android.content.pm.PackageManager;
import android.graphics.Rect;
import android.media.Image;
import android.os.Bundle;
import android.text.Layout;
import android.util.Rational;
import android.util.Size;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.mlkit.common.model.LocalModel;
import com.google.mlkit.vision.common.InputImage;
import com.google.mlkit.vision.objects.DetectedObject;
import com.google.mlkit.vision.objects.ObjectDetection;
import com.google.mlkit.vision.objects.ObjectDetector;
import com.google.mlkit.vision.objects.custom.CustomObjectDetectorOptions;
import org.w3c.dom.Text;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class MainActivity extends AppCompatActivity {
private class YourAnalyzer implements ImageAnalysis.Analyzer {
#Override
#androidx.camera.core.ExperimentalGetImage
public void analyze(ImageProxy imageProxy) {
Image mediaImage = imageProxy.getImage();
if (mediaImage != null) {
InputImage image =
InputImage.fromMediaImage(mediaImage, imageProxy.getImageInfo().getRotationDegrees());
// Pass image to an ML Kit Vision API
// ...
LocalModel localModel =
new LocalModel.Builder()
.setAssetFilePath("mobilenet_v1_1.0_128_quantized_1_default_1.tflite")
// or .setAbsoluteFilePath(absolute file path to tflite model)
.build();
CustomObjectDetectorOptions customObjectDetectorOptions =
new CustomObjectDetectorOptions.Builder(localModel)
.setDetectorMode(CustomObjectDetectorOptions.SINGLE_IMAGE_MODE)
.enableMultipleObjects()
.enableClassification()
.setClassificationConfidenceThreshold(0.5f)
.setMaxPerObjectLabelCount(3)
.build();
ObjectDetector objectDetector =
ObjectDetection.getClient(customObjectDetectorOptions);
objectDetector
.process(image)
.addOnFailureListener(new OnFailureListener() {
#Override
public void onFailure(#NonNull Exception e) {
//Toast.makeText(getApplicationContext(), "Fail. Sad!", Toast.LENGTH_SHORT).show();
//textView.setText("Fail. Sad!");
imageProxy.close();
}
})
.addOnSuccessListener(new OnSuccessListener<List<DetectedObject>>() {
#Override
public void onSuccess(List<DetectedObject> results) {
for (DetectedObject detectedObject : results) {
Rect box = detectedObject.getBoundingBox();
for (DetectedObject.Label label : detectedObject.getLabels()) {
String text = label.getText();
int index = label.getIndex();
float confidence = label.getConfidence();
textView.setText(text);
}}
imageProxy.close();
}
});
}
//ImageAnalysis.Builder.fromConfig(new ImageAnalysisConfig).setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST);
}
}
PreviewView prevView;
private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
private ExecutorService executor = Executors.newSingleThreadExecutor();
TextView textView;
private int REQUEST_CODE_PERMISSIONS = 101;
private String[] REQUIRED_PERMISSIONS = new String[]{"android.permission.CAMERA"};
/* #NonNull
#Override
public CameraXConfig getCameraXConfig() {
return CameraXConfig.Builder.fromConfig(Camera2Config.defaultConfig())
.setCameraExecutor(ContextCompat.getMainExecutor(this))
.build();
}
*/
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
prevView = findViewById(R.id.viewFinder);
textView = findViewById(R.id.scan_button);
if(allPermissionsGranted()){
startCamera();
}else{
ActivityCompat.requestPermissions(this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS);
}
}
private void startCamera() {
cameraProviderFuture = ProcessCameraProvider.getInstance(this);
cameraProviderFuture.addListener(new Runnable() {
#Override
public void run() {
try {
ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
bindPreview(cameraProvider);
} catch (ExecutionException | InterruptedException e) {
// No errors need to be handled for this Future.
// This should never be reached.
}
}
}, ContextCompat.getMainExecutor(this));
}
void bindPreview(#NonNull ProcessCameraProvider cameraProvider) {
Preview preview = new Preview.Builder()
.build();
CameraSelector cameraSelector = new CameraSelector.Builder()
.requireLensFacing(CameraSelector.LENS_FACING_BACK)
.build();
preview.setSurfaceProvider(prevView.createSurfaceProvider());
ImageAnalysis imageAnalysis =
new ImageAnalysis.Builder()
.setTargetResolution(new Size(1280, 720))
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build();
imageAnalysis.setAnalyzer(ContextCompat.getMainExecutor(this), new YourAnalyzer());
Camera camera = cameraProvider.bindToLifecycle((LifecycleOwner)this, cameraSelector, preview, imageAnalysis);
}
private boolean allPermissionsGranted() {
for(String permission: REQUIRED_PERMISSIONS){
if(ContextCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED){
return false;
}
}
return true;
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
if(requestCode == REQUEST_CODE_PERMISSIONS){
if(allPermissionsGranted()){
startCamera();
} else{
Toast.makeText(this, "Permissions not granted by the user.", Toast.LENGTH_SHORT).show();
this.finish();
}
}
}
}

Nothing is detected because you defined the wrong path to tflite model file. You emulator or physical device cannot resolve given path as it doesn't exists on mobile device: C:\\Users\\dude\\Documents\\mlkitobjecttest\\app\\src\\main\\assets\\mobilenet_v1_1.0_128_quantized_1_default_1.tflite
Copy your model mobilenet_v1_1.0_128_quantized_1_default_1.tflite into assets directory under you app's project src/main directory.
If you do not have that directory just create a new one named assets.
At the end it should look like this:
After that fix LocalModel initialization code:
LocalModel localModel =
new LocalModel.Builder()
.setAssetFilePath("mobilenet_v1_1.0_128_quantized_1_default_1.tflite")
// or .setAbsoluteFilePath(absolute file path to tflite model)
.build();
Update: one more issue found
ImageAnalysis instance was not bound to CameraProvider:
...
ImageAnalysis imageAnalysis = ...
Camera camera = cameraProvider.bindToLifecycle((LifecycleOwner)this, cameraSelector, preview); // imageAnalysis is not used
To fix it just pass as the last argument imageAnalysis variable into bindToLifecycle method:
Camera camera = cameraProvider.bindToLifecycle((LifecycleOwner)this, cameraSelector, preview, imageAnalysis);
Second update: another one issue found
MLKit cannot process an image because it was closed while it was processing or right before processing started. I'm talking about imageProxy.close() line of code declared inside of public void analyze(ImageProxy imageProxy).
Java documentation of close() method:
/**
* Free up this frame for reuse.
* <p>
* After calling this method, calling any methods on this {#code Image} will
* result in an {#link IllegalStateException}, and attempting to read from
* or write to {#link ByteBuffer ByteBuffers} returned by an earlier
* {#link Plane#getBuffer} call will have undefined behavior. If the image
* was obtained from {#link ImageWriter} via
* {#link ImageWriter#dequeueInputImage()}, after calling this method, any
* image data filled by the application will be lost and the image will be
* returned to {#link ImageWriter} for reuse. Images given to
* {#link ImageWriter#queueInputImage queueInputImage()} are automatically
* closed.
* </p>
*/
To fix that move imageProxy.close() into failure and success listeners:
objectDetector
.process(image)
.addOnFailureListener(new OnFailureListener() {
#Override
public void onFailure(#NonNull Exception e) {
Toast.makeText(getApplicationContext(), "Fail. Sad!", Toast.LENGTH_LONG).show();
...
imageProxy.close();
}
})
.addOnSuccessListener(new OnSuccessListener<List<DetectedObject>>() {
#Override
public void onSuccess(List<DetectedObject> results) {
Toast.makeText(getBaseContext(), "Success...", Toast.LENGTH_LONG).show();
...
imageProxy.close();
}
});
The fixed solution was tested with image classification model from Tensorflow and test was successful.

Related

Android Studio CameraX Module

I am working on an app that can open camera to take picture and then show it on Imageview in the next activity or select picture from file and then view it in ImageView on next activity.
I have a java class named CameraActivity for my App.
Below is a code I want to use. It was working before on another app. But with Android Studio Update it is not working in new App.
I guess It's depreciated. I need this code to wokk. Help me with necessary changes.
package com.example.myapplication;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.core.CameraX;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureConfig;
import androidx.camera.core.Preview;
import androidx.camera.core.PreviewConfig;
import androidx.lifecycle.LifecycleOwner;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.graphics.Matrix;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Rational;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.Toast;
import java.io.File;
public class CameraActivity extends AppCompatActivity {
TextureView viewFinder;
ImageView imgCapture;
private static final int CAMERA_REQUEST = 1888;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
viewFinder=findViewById(R.id.viewFinder);
imgCapture = findViewById(R.id.imgCapture);
startCamera();
}
#SuppressLint("RestrictedApi")
public void startCamera(){
CameraX.unbindAll();
Rational aspectRatio = new Rational(viewFinder.getWidth(),viewFinder.getHeight());
Size screen = new Size(viewFinder.getWidth(),viewFinder.getHeight());
PreviewConfig previewConfig = new PreviewConfig.Builder().setTargetAspectRatio(aspectRatio).setTargetResolution(screen).build();
Preview preview = new Preview(previewConfig);
preview.setOnPreviewOutputUpdateListener(new Preview.OnPreviewOutputUpdateListener() {
#Override
public void onUpdated(Preview.PreviewOutput output) {
ViewGroup parent = (ViewGroup)viewFinder.getParent();
parent.removeView(viewFinder);
parent.addView(viewFinder,0);
viewFinder.setSurfaceTexture(output.getSurfaceTexture());
updateTransform();
}
});
ImageCaptureConfig imageCaptureConfig = new ImageCaptureConfig.Builder().setCaptureMode(ImageCapture.CaptureMode.MIN_LATENCY).setTargetRotation(getWindowManager().getDefaultDisplay().getRotation()).build();
final ImageCapture imageCapture = new ImageCapture(imageCaptureConfig);
imgCapture.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
File file = new File(Environment.getExternalStorageDirectory()+"/"+System.currentTimeMillis()+".jpg");
imageCapture.takePicture(file, new ImageCapture.OnImageSavedListener() {
#SuppressLint("ShowToast")
#Override
public void onImageSaved(#NonNull File file) {
Toast.makeText(getApplicationContext(),"Pic Captured at" + file.getAbsolutePath(), Toast.LENGTH_SHORT).show();
Intent intent = new Intent(CameraActivity.this,ShowPhotoActivity.class);
intent.putExtra("path",file.getAbsoluteFile()+"");
startActivity(intent);
}
#Override
public void onError(#NonNull ImageCapture.UseCaseError useCaseError, #NonNull String message, #Nullable Throwable cause) {
Toast.makeText(getApplicationContext(),"Pic Captured Failed! " + message, Toast.LENGTH_SHORT).show();
if(cause!=null){
cause.printStackTrace();
}
}
});
}
});
CameraX.bindToLifecycle((LifecycleOwner)this, preview, imageCapture);
}
public void updateTransform(){
Matrix mx = new Matrix();
float w = viewFinder.getMeasuredWidth();
float h = viewFinder.getMeasuredHeight();
float cx = w/2f;
float cy = h/2f;
int rotationDgr = 90;
int rotation = (int)viewFinder.getRotation();
switch(rotation){
case Surface.ROTATION_0:
rotationDgr = 0;
break;
case Surface.ROTATION_90:
rotationDgr = 90;
break;
case Surface.ROTATION_180:
rotationDgr = 180;
break;
case Surface.ROTATION_270:
rotationDgr = 270;
break;
default:
return;
}
mx.postRotate((float)rotationDgr,cx,cy);
viewFinder.setTransform(mx);
}
}
I am attaching he images below to help with identifying the errors
Use these dependencies.
// CameraX
implementation "androidx.camera:camera-core:1.2.0-beta02"
// link UI's lifecycle with camera-x (It will handle by itself)
implementation "androidx.camera:camera-lifecycle:1.2.0-beta02"
// Get camera-x's previewView
implementation "androidx.camera:camera-view:1.2.0-beta02"
// For bokeh & else
implementation "androidx.camera:camera-extensions:1.2.0-beta02"
// Added for choosing best quality Camera Selection
implementation 'androidx.camera:camera-camera2:1.1.0'

CameraXLivePreview: Can not create image processor: Object Detection

I'm creating an app using a MLkit and finally I made it. But it's not working and keep send me a errors I can't understand why error keep occurs.. here is my code and error pls help me
I just copied from MLKit example quick start sample for only using a pose
detector and it says " Invalid model name" .. I don't know why it does saying can you guys help me??
/*
* Copyright 2020 Google LLC. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.pose;
import androidx.camera.core.ImageAnalysis;
import androidx.lifecycle.ViewModelProvider;
import androidx.lifecycle.ViewModelProvider.AndroidViewModelFactory;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AppCompatActivity;
import android.util.Log;
import android.util.Size;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.ArrayAdapter;
import android.widget.CompoundButton;
import android.widget.ImageView;
import android.widget.Toast;
import android.widget.ToggleButton;
import androidx.camera.core.CameraInfoUnavailableException;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.Preview;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.core.app.ActivityCompat;
import androidx.core.app.ActivityCompat.OnRequestPermissionsResultCallback;
import androidx.core.content.ContextCompat;
import com.google.android.gms.common.annotation.KeepName;
import com.google.mlkit.common.MlKitException;
import com.google.mlkit.common.model.LocalModel;
import com.example.pose.CameraXViewModel;
import com.example.pose.GraphicOverlay;
import com.google.mlkit.vision.demo.R;
import com.example.pose.VisionImageProcessor;
import com.example.pose.posedetector.PoseDetectorProcessor;
import com.example.pose.preference.PreferenceUtils;
import com.google.mlkit.vision.pose.PoseDetectorOptions;
import java.util.ArrayList;
import java.util.List;
/** Live preview demo app for ML Kit APIs using CameraX. */
#KeepName
#RequiresApi(VERSION_CODES.LOLLIPOP)
public final class CameraXLivePreviewActivity extends AppCompatActivity
implements OnRequestPermissionsResultCallback,
OnItemSelectedListener,
CompoundButton.OnCheckedChangeListener {
private static final String TAG = "CameraXLivePreview";
private static final int PERMISSION_REQUESTS = 1;
private static final String OBJECT_DETECTION = "Object Detection";
private static final String POSE_DETECTION = "Pose Detection";
private static final String STATE_SELECTED_MODEL = "selected_model";
private static final String STATE_LENS_FACING = "lens_facing";
private PreviewView previewView;
private GraphicOverlay graphicOverlay;
#Nullable private ProcessCameraProvider cameraProvider;
#Nullable private Preview previewUseCase;
#Nullable private ImageAnalysis analysisUseCase;
#Nullable private VisionImageProcessor imageProcessor;
private boolean needUpdateGraphicOverlayImageSourceInfo;
private String selectedModel = OBJECT_DETECTION;
private int lensFacing = CameraSelector.LENS_FACING_BACK;
private CameraSelector cameraSelector;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d(TAG, "onCreate");
if (VERSION.SDK_INT < VERSION_CODES.LOLLIPOP) {
Toast.makeText(
getApplicationContext(),
"CameraX is only supported on SDK version >=21. Current SDK version is "
+ VERSION.SDK_INT,
Toast.LENGTH_LONG)
.show();
return;
}
if (savedInstanceState != null) {
selectedModel = savedInstanceState.getString(STATE_SELECTED_MODEL, OBJECT_DETECTION);
lensFacing = savedInstanceState.getInt(STATE_LENS_FACING, CameraSelector.LENS_FACING_BACK);
}
cameraSelector = new CameraSelector.Builder().requireLensFacing(lensFacing).build();
setContentView(R.layout.activity_vision_camerax_live_preview);
previewView = findViewById(R.id.preview_view);
if (previewView == null) {
Log.d(TAG, "previewView is null");
}
graphicOverlay = findViewById(R.id.graphic_overlay);
if (graphicOverlay == null) {
Log.d(TAG, "graphicOverlay is null");
}
ToggleButton facingSwitch = findViewById(R.id.facing_switch);
facingSwitch.setOnCheckedChangeListener(this);
new ViewModelProvider(this, AndroidViewModelFactory.getInstance(getApplication()))
.get(CameraXViewModel.class)
.getProcessCameraProvider()
.observe(
this,
provider -> {
cameraProvider = provider;
if (allPermissionsGranted()) {
bindAllCameraUseCases();
}
});
if (!allPermissionsGranted()) {
getRuntimePermissions();
}
}
#Override
protected void onSaveInstanceState(#NonNull Bundle bundle) {
super.onSaveInstanceState(bundle);
bundle.putString(STATE_SELECTED_MODEL, selectedModel);
bundle.putInt(STATE_LENS_FACING, lensFacing);
}
#Override
public synchronized void onItemSelected(AdapterView<?> parent, View view, int pos, long id) {
// An item was selected. You can retrieve the selected item using
// parent.getItemAtPosition(pos)
selectedModel = parent.getItemAtPosition(pos).toString();
Log.d(TAG, "Selected model: " + selectedModel);
bindAnalysisUseCase();
}
#Override
public void onNothingSelected(AdapterView<?> parent) {
// Do nothing.
}
#Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
Log.d(TAG, "Set facing");
if (cameraProvider == null) {
return;
}
int newLensFacing =
lensFacing == CameraSelector.LENS_FACING_FRONT
? CameraSelector.LENS_FACING_BACK
: CameraSelector.LENS_FACING_FRONT;
CameraSelector newCameraSelector =
new CameraSelector.Builder().requireLensFacing(newLensFacing).build();
try {
if (cameraProvider.hasCamera(newCameraSelector)) {
lensFacing = newLensFacing;
cameraSelector = newCameraSelector;
bindAllCameraUseCases();
return;
}
} catch (CameraInfoUnavailableException e) {
// Falls through
}
Toast.makeText(
getApplicationContext(),
"This device does not have lens with facing: " + newLensFacing,
Toast.LENGTH_SHORT)
.show();
}
#Override
public void onResume() {
super.onResume();
bindAllCameraUseCases();
}
#Override
protected void onPause() {
super.onPause();
if (imageProcessor != null) {
imageProcessor.stop();
}
}
#Override
public void onDestroy() {
super.onDestroy();
if (imageProcessor != null) {
imageProcessor.stop();
}
}
private void bindAllCameraUseCases() {
if (cameraProvider != null) {
// As required by CameraX API, unbinds all use cases before trying to re-bind any of them.
cameraProvider.unbindAll();
bindPreviewUseCase();
bindAnalysisUseCase();
}
}
private void bindPreviewUseCase() {
if (!PreferenceUtils.isCameraLiveViewportEnabled(this)) {
return;
}
if (cameraProvider == null) {
return;
}
if (previewUseCase != null) {
cameraProvider.unbind(previewUseCase);
}
previewUseCase = new Preview.Builder().build();
previewUseCase.setSurfaceProvider(previewView.createSurfaceProvider());
cameraProvider.bindToLifecycle(/* lifecycleOwner= */ this, cameraSelector, previewUseCase);
}
private void bindAnalysisUseCase() {
if (cameraProvider == null) {
return;
}
if (analysisUseCase != null) {
cameraProvider.unbind(analysisUseCase);
}
if (imageProcessor != null) {
imageProcessor.stop();
}
try {
switch (selectedModel) {
case POSE_DETECTION:
PoseDetectorOptions poseDetectorOptions =
PreferenceUtils.getPoseDetectorOptionsForLivePreview(this);
boolean shouldShowInFrameLikelihood =
PreferenceUtils.shouldShowPoseDetectionInFrameLikelihoodLivePreview(this);
imageProcessor =
new PoseDetectorProcessor(this, poseDetectorOptions, shouldShowInFrameLikelihood);
break;
default:
throw new IllegalStateException("Invalid model name");
}
} catch (Exception e) {
Log.e(TAG, "Can not create image processor: " + selectedModel, e);
Toast.makeText(
getApplicationContext(),
"Can not create image processor: " + e.getLocalizedMessage(),
Toast.LENGTH_LONG)
.show();
return;
}
ImageAnalysis.Builder builder = new ImageAnalysis.Builder();
Size targetAnalysisSize = PreferenceUtils.getCameraXTargetAnalysisSize(this);
if (targetAnalysisSize != null) {
builder.setTargetResolution(targetAnalysisSize);
}
analysisUseCase = builder.build();
needUpdateGraphicOverlayImageSourceInfo = true;
analysisUseCase.setAnalyzer(
// imageProcessor.processImageProxy will use another thread to run the detection underneath,
// thus we can just runs the analyzer itself on main thread.
ContextCompat.getMainExecutor(this),
imageProxy -> {
if (needUpdateGraphicOverlayImageSourceInfo) {
boolean isImageFlipped = lensFacing == CameraSelector.LENS_FACING_FRONT;
int rotationDegrees = imageProxy.getImageInfo().getRotationDegrees();
if (rotationDegrees == 0 || rotationDegrees == 180) {
graphicOverlay.setImageSourceInfo(
imageProxy.getWidth(), imageProxy.getHeight(), isImageFlipped);
} else {
graphicOverlay.setImageSourceInfo(
imageProxy.getHeight(), imageProxy.getWidth(), isImageFlipped);
}
needUpdateGraphicOverlayImageSourceInfo = false;
}
try {
imageProcessor.processImageProxy(imageProxy, graphicOverlay);
} catch (MlKitException e) {
Log.e(TAG, "Failed to process image. Error: " + e.getLocalizedMessage());
Toast.makeText(getApplicationContext(), e.getLocalizedMessage(), Toast.LENGTH_SHORT)
.show();
}
});
cameraProvider.bindToLifecycle(/* lifecycleOwner= */ this, cameraSelector, analysisUseCase);
}
private String[] getRequiredPermissions() {
try {
PackageInfo info =
this.getPackageManager()
.getPackageInfo(this.getPackageName(), PackageManager.GET_PERMISSIONS);
String[] ps = info.requestedPermissions;
if (ps != null && ps.length > 0) {
return ps;
} else {
return new String[0];
}
} catch (Exception e) {
return new String[0];
}
}
private boolean allPermissionsGranted() {
for (String permission : getRequiredPermissions()) {
if (!isPermissionGranted(this, permission)) {
return false;
}
}
return true;
}
private void getRuntimePermissions() {
List<String> allNeededPermissions = new ArrayList<>();
for (String permission : getRequiredPermissions()) {
if (!isPermissionGranted(this, permission)) {
allNeededPermissions.add(permission);
}
}
if (!allNeededPermissions.isEmpty()) {
ActivityCompat.requestPermissions(
this, allNeededPermissions.toArray(new String[0]), PERMISSION_REQUESTS);
}
}
#Override
public void onRequestPermissionsResult(
int requestCode, String[] permissions, int[] grantResults) {
Log.i(TAG, "Permission granted!");
if (allPermissionsGranted()) {
bindAllCameraUseCases();
}
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
private static boolean isPermissionGranted(Context context, String permission) {
if (ContextCompat.checkSelfPermission(context, permission)
== PackageManager.PERMISSION_GRANTED) {
Log.i(TAG, "Permission granted: " + permission);
return true;
}
Log.i(TAG, "Permission NOT granted: " + permission);
return false;
}
}
E/CameraXLivePreview: Can not create image processor: Object Detection
java.lang.IllegalStateException: Invalid model name
at com.example.pose.CameraXLivePreviewActivity.bindAnalysisUseCase(CameraXLivePreviewActivity.java:271)
at com.example.pose.CameraXLivePreviewActivity.bindAllCameraUseCases(CameraXLivePreviewActivity.java:229)
at com.example.pose.CameraXLivePreviewActivity.onCheckedChanged(CameraXLivePreviewActivity.java:187)
at android.widget.CompoundButton.setChecked(CompoundButton.java:218)
at android.widget.ToggleButton.setChecked(ToggleButton.java:81)
at android.widget.CompoundButton.toggle(CompoundButton.java:137)
at android.widget.CompoundButton.performClick(CompoundButton.java:142)
at android.view.View.performClickInternal(View.java:7425)
at android.view.View.access$3600(View.java:810)
at android.view.View$PerformClick.run(View.java:28305)
at android.os.Handler.handleCallback(Handler.java:938)
at android.os.Handler.dispatchMessage(Handler.java:99)
at android.os.Looper.loop(Looper.java:223)
at android.app.ActivityThread.main(ActivityThread.java:7656)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:592)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:947)
Please change this line to let the feature selector to select Pose detection
"private String selectedModel = OBJECT_DETECTION;"
to
"private String selectedModel = POSE_DETECTION;"

Recycler View Showing Only One Item

Ok, so I was recently working on a small project for my own personal preference, and along the way I implemented a small bit of code that did not work. So, as most people would do, I deleted it. But later, I experienced the same glitch that the code prior was causing, but when I checked, it was not there! I may just be plain stupid, blind, or maybe both, but I keep getting only one card in the recyclerView to show up! Feel free to call me stupid if you can find an answer, and I thank you very much in advance.
P.S. Yes, I am very new to Java, so remember this before you decide to roast me.
My MainActivity:
package com.example.whateverthefuckyouwant;
//Imports, in case you couldn't tell
import android.net.Uri;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.helper.ItemTouchHelper;
import android.util.Log;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.common.api.GoogleApiClient;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GoogleApiAvailability;
import com.google.api.client.extensions.android.http.AndroidHttp;
import com.google.api.client.googleapis.extensions.android.gms.auth.GoogleAccountCredential;
import com.google.api.client.googleapis.extensions.android.gms.auth.GooglePlayServicesAvailabilityIOException;
import com.google.api.client.googleapis.extensions.android.gms.auth.UserRecoverableAuthIOException;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.client.util.ExponentialBackOff;
import com.google.api.services.sheets.v4.SheetsScopes;
import com.google.api.services.sheets.v4.model.*;
import android.Manifest;
import android.accounts.AccountManager;
import android.app.Activity;
import android.app.Dialog;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import android.text.method.ScrollingMovementMethod;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import pub.devrel.easypermissions.AfterPermissionGranted;
import pub.devrel.easypermissions.EasyPermissions;
public class MainActivity extends AppCompatActivity implements EasyPermissions.PermissionCallbacks {
GoogleAccountCredential mCredential;
ProgressDialog mProgress;
static final int REQUEST_ACCOUNT_PICKER = 1000;
static final int REQUEST_AUTHORIZATION = 1001;
static final int REQUEST_GOOGLE_PLAY_SERVICES = 1002;
static final int REQUEST_PERMISSION_GET_ACCOUNTS = 1003;
private static final String PREF_ACCOUNT_NAME = "accountName";
private static final String[] SCOPES = {SheetsScopes.SPREADSHEETS_READONLY };
private RecyclerView mRecyclerView;
private RecyclerView.Adapter mAdapter;
private RecyclerView.LayoutManager mLayoutManager;
//ArrayList holding data from the Google Sheets API
ArrayList<Team> teamAF;
//Number of cards that the user has cycled through
int cardNumber = 1;
//Integers for direction of swipe. 8 = Right, 4 = Left
int RIGHT = 8;
int LEFT = 4;
boolean first = true;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Initialize credentials and service object.
mCredential = GoogleAccountCredential.usingOAuth2(
getApplicationContext(), Arrays.asList(SCOPES))
.setBackOff(new ExponentialBackOff());
//Create Recycler View
mRecyclerView = (RecyclerView) findViewById(R.id.my_recycler_view);
//Specific size of Cards
mRecyclerView.setHasFixedSize(true);
//Design choices using LinearLayoutManager
mLayoutManager = new LinearLayoutManager(this);
mRecyclerView.setLayoutManager(mLayoutManager);
//Create teamAF array list
teamAF = new ArrayList<>();
//Ads another card, so as to make an infinite number of cards
teamAF.add(new Team());
//Creating and setting mAdapter with teamAF
mAdapter = new MyAdapter(teamAF);
mRecyclerView.setAdapter(mAdapter);
//Swipe code for cardView
ItemTouchHelper.SimpleCallback simpleItemTouchCallback = new ItemTouchHelper.SimpleCallback(0, ItemTouchHelper.LEFT | ItemTouchHelper.RIGHT) {
//Don't know what onMove does
#Override
public boolean onMove(RecyclerView recyclerView, RecyclerView.ViewHolder viewHolder, RecyclerView.ViewHolder target) {
Toast.makeText(MainActivity.this, "on Move", Toast.LENGTH_SHORT).show();
return false;
}
// Direction 4 = left, Direction 8 = right
#Override
public void onSwiped(RecyclerView.ViewHolder viewHolder, int swipeDir) {
//Prints swipeDir integer into console
Log.e(".", swipeDir + "");
//Sets variables to null, so they can be reset later
Team t = null;
//String s is reset to a string of code that enables it to check the string in the API with your answer.
String s = "";
//Code for answer. swipeDir == Left(4); Right(8).
if (swipeDir == LEFT) {
//Sets s
s = ((TextView) viewHolder.itemView.findViewById(R.id.left)).getText().toString();
} else if (swipeDir == RIGHT) {
//checks the answer swiped with he API
s = ((TextView) viewHolder.itemView.findViewById(R.id.right)).getText().toString();
}
//Removes card from array
t = teamAF.remove(mRecyclerView.getChildAdapterPosition(viewHolder.itemView));
//Adds some nice animation effect
mAdapter.notifyItemRemoved(mRecyclerView.getChildAdapterPosition(viewHolder.itemView));
//Sends a toast message saying "correct" or "incorrect"
if(t.checkAnswer(s)) {
Toast.makeText(MainActivity.this, "Correct!", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(MainActivity.this, "Incorrect!", Toast.LENGTH_SHORT).show();
}
if(first) {
//This make the recyclerView shuffle
Collections.shuffle(teamAF, new Random());
first = false;
}
//Adding one, so the cards can go on forever
cardNumber++;
//Calls the methods
mAdapter = new MyAdapter(teamAF);
mRecyclerView.setAdapter(mAdapter);
//This is a 'benchmark' type 'achievement'
if (cardNumber == 50) {
Toast.makeText(getApplicationContext(), "You Reached 50 Points Without Losing! You're Doing Great!", Toast.LENGTH_SHORT).show();
} else if (cardNumber == 100) {
Toast.makeText(getApplicationContext(), "You Reached 100 Points Without Losing! You're A Genius!", Toast.LENGTH_SHORT).show();
}
}
};
//Allows the swiping inside the recyclerView
ItemTouchHelper itemTouchHelper = new ItemTouchHelper(simpleItemTouchCallback);
itemTouchHelper.attachToRecyclerView(mRecyclerView);
//Calls the google API
mProgress = new ProgressDialog(this);
mProgress.setMessage("Loading...");
getResultsFromApi();
}
//The green was commented from the site
/**
* Attempt to call the API, after verifying that all the preconditions are
* satisfied. The preconditions are: Google Play Services installed, an
* account was selected and the device currently has online access. If any
* of the preconditions are not satisfied, the app will prompt the user as
* appropriate.
*/
private void getResultsFromApi() {
if (! isGooglePlayServicesAvailable()) {
acquireGooglePlayServices();
} else if (mCredential.getSelectedAccountName() == null) {
chooseAccount();
} else if (! isDeviceOnline()) {
System.out.println("No network connection available.");
} else {
new MakeRequestTask(mCredential).execute();
}
}
/**
* Attempts to set the account used with the API credentials. If an account
* name was previously saved it will use that one; otherwise an account
* picker dialog will be shown to the user. Note that the setting the
* account to use with the credentials object requires the app to have the
* GET_ACCOUNTS permission, which is requested here if it is not already
* present. The AfterPermissionGranted annotation indicates that this
* function will be rerun automatically whenever the GET_ACCOUNTS permission
* is granted.
*/
#AfterPermissionGranted(REQUEST_PERMISSION_GET_ACCOUNTS)
private void chooseAccount() {
if (EasyPermissions.hasPermissions(
this, Manifest.permission.GET_ACCOUNTS)) {
String accountName = getPreferences(Context.MODE_PRIVATE)
.getString(PREF_ACCOUNT_NAME, null);
if (accountName != null) {
mCredential.setSelectedAccountName(accountName);
getResultsFromApi();
} else {
// Start a dialog from which the user can choose an account
startActivityForResult(
mCredential.newChooseAccountIntent(),
REQUEST_ACCOUNT_PICKER);
}
} else {
// Request the GET_ACCOUNTS permission via a user dialog
EasyPermissions.requestPermissions(
this,
"This app needs to access your Google account (via Contacts).",
REQUEST_PERMISSION_GET_ACCOUNTS,
Manifest.permission.GET_ACCOUNTS);
}
}
/**
* Called when an activity launched here (specifically, AccountPicker
* and authorization) exits, giving you the requestCode you started it with,
* the resultCode it returned, and any additional data from it.
* #param requestCode code indicating which activity result is incoming.
* #param resultCode code indicating the result of the incoming
* activity result.
* #param data Intent (containing result data) returned by incoming
* activity result.
*/
#Override
protected void onActivityResult(
int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch(requestCode) {
case REQUEST_GOOGLE_PLAY_SERVICES:
if (resultCode != RESULT_OK) {
} else {
getResultsFromApi();
}
break;
case REQUEST_ACCOUNT_PICKER:
if (resultCode == RESULT_OK && data != null &&
data.getExtras() != null) {
String accountName =
data.getStringExtra(AccountManager.KEY_ACCOUNT_NAME);
if (accountName != null) {
SharedPreferences settings =
getPreferences(Context.MODE_PRIVATE);
SharedPreferences.Editor editor = settings.edit();
editor.putString(PREF_ACCOUNT_NAME, accountName);
editor.apply();
mCredential.setSelectedAccountName(accountName);
getResultsFromApi();
}
}
break;
case REQUEST_AUTHORIZATION:
if (resultCode == RESULT_OK) {
getResultsFromApi();
}
break;
}
}
/**
* Respond to requests for permissions at runtime for API 23 and above.
* #param requestCode The request code passed in
* requestPermissions(android.app.Activity, String, int, String[])
* #param permissions The requested permissions. Never null.
* #param grantResults The grant results for the corresponding permissions
* which is either PERMISSION_GRANTED or PERMISSION_DENIED. Never null.
*/
#Override
public void onRequestPermissionsResult(int requestCode,
#NonNull String[] permissions,
#NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
EasyPermissions.onRequestPermissionsResult(
requestCode, permissions, grantResults, this);
}
/**
* Callback for when a permission is granted using the EasyPermissions
* library.
* #param requestCode The request code associated with the requested
* permission
* #param list The requested permission list. Never null.
*/
#Override
public void onPermissionsGranted(int requestCode, List<String> list) {
// Do nothing.
}
/**
* Callback for when a permission is denied using the EasyPermissions
* library.
* #param requestCode The request code associated with the requested
* permission
* #param list The requested permission list. Never null.
*/
#Override
public void onPermissionsDenied(int requestCode, List<String> list) {
// Do nothing.
}
/**
* Checks whether the device currently has a network connection.
* #return true if the device has a network connection, false otherwise.
*/
private boolean isDeviceOnline() {
ConnectivityManager connMgr =
(ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo = connMgr.getActiveNetworkInfo();
return (networkInfo != null && networkInfo.isConnected());
}
/**
* Check that Google Play services APK is installed and up to date.
* #return true if Google Play Services is available and up to
* date on this device; false otherwise.
*/
private boolean isGooglePlayServicesAvailable() {
GoogleApiAvailability apiAvailability =
GoogleApiAvailability.getInstance();
final int connectionStatusCode =
apiAvailability.isGooglePlayServicesAvailable(this);
return connectionStatusCode == ConnectionResult.SUCCESS;
}
/**
* Attempt to resolve a missing, out-of-date, invalid or disabled Google
* Play Services installation via a user dialog, if possible.
*/
private void acquireGooglePlayServices() {
GoogleApiAvailability apiAvailability =
GoogleApiAvailability.getInstance();
final int connectionStatusCode =
apiAvailability.isGooglePlayServicesAvailable(this);
if (apiAvailability.isUserResolvableError(connectionStatusCode)) {
showGooglePlayServicesAvailabilityErrorDialog(connectionStatusCode);
}
}
/**
* Display an error dialog showing that Google Play Services is missing
* or out of date.
* #param connectionStatusCode code describing the presence (or lack of)
* Google Play Services on this device.
*/
void showGooglePlayServicesAvailabilityErrorDialog(
final int connectionStatusCode) {
GoogleApiAvailability apiAvailability = GoogleApiAvailability.getInstance();
Dialog dialog = apiAvailability.getErrorDialog(
MainActivity.this,
connectionStatusCode,
REQUEST_GOOGLE_PLAY_SERVICES);
dialog.show();
}
/**
* An asynchronous task that handles the Google Sheets API call.
* Placing the API calls in their own task ensures the UI stays responsive.
*/
private class MakeRequestTask extends AsyncTask<Void, Void, List<String>> {
private com.google.api.services.sheets.v4.Sheets mService = null;
private Exception mLastError = null;
MakeRequestTask(GoogleAccountCredential credential) {
HttpTransport transport = AndroidHttp.newCompatibleTransport();
JsonFactory jsonFactory = JacksonFactory.getDefaultInstance();
mService = new com.google.api.services.sheets.v4.Sheets.Builder(
transport, jsonFactory, credential)
.setApplicationName("Google Sheets API Android Quickstart")
.build();
}
/**
* Background task to call Google Sheets API.
* #param params no parameters needed for this task.
*/
#Override
protected List<String> doInBackground(Void... params) {
try {
return getDataFromApi();
} catch (Exception e) {
mLastError = e;
cancel(true);
return null;
}
}
/**
* Fetch a list of names and majors of students in a sample spreadsheet:
* https://docs.google.com/spreadsheets/d/1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms/edit
* #return List of names and majors
*/
private List<String> getDataFromApi() throws IOException {
String spreadsheetId = "1KlflsDsBn0lhGT87OuRuy_RGOuBP2jBU5zpqn_UIYT0";
String range = "Sheet1!A2:B";
List<String> results = new ArrayList<String>();
ValueRange response = this.mService.spreadsheets().values()
.get(spreadsheetId, range)
.execute();
List<List<Object>> values = response.getValues();
for (List row : values) {
//Obtains the row from the Google Sheet
teamAF.add(new Team(row.get(0).toString(), row.get(1).toString()));
}
//Shuffle the cards with the data on them.
Collections.shuffle(teamAF, new Random());
/*for(String s : results) {
Log.e("HERE", s);
}*/
return results;
}
#Override
protected void onPreExecute() {
//mOutputText.setText("");
mProgress.show();
}
#Override
protected void onPostExecute(List<String> output) {
mProgress.hide();
if (output == null || output.size() == 0) {
//mOutputText.setText("No results returned.");
} else {
output.add(0, "Data retrieved using the Google Sheets API:");
//mOutputText.setText(TextUtils.join("\n", output));
}
}
#Override
protected void onCancelled() {
mProgress.hide();
if (mLastError != null) {
if (mLastError instanceof GooglePlayServicesAvailabilityIOException) {
showGooglePlayServicesAvailabilityErrorDialog(
((GooglePlayServicesAvailabilityIOException) mLastError)
.getConnectionStatusCode());
} else if (mLastError instanceof UserRecoverableAuthIOException) {
startActivityForResult(
((UserRecoverableAuthIOException) mLastError).getIntent(),
MainActivity.REQUEST_AUTHORIZATION);
} else {
//mOutputText.setText("The following error occurred:\n"
//+ mLastError.getMessage());
}
} else {
//mOutputText.setText("Request cancelled.");
}
}
}
}
My Adapter:
package com.example.whateverthefuckyouwant;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Random;
/**
* Created by ebuttikofer20 on 1/20/17.
*/
public class MyAdapter extends RecyclerView.Adapter<MyAdapter.ViewHolder> {
private ArrayList<Team> mDataset;
// Provide a reference to the views for each data item
// Complex data items may need more than one view per item, and
// you provide access to all the views for a data item in a view holder
public static class ViewHolder extends RecyclerView.ViewHolder {
// each data item is just a string in this case
public TextView mTextView;
public TextView left;
public TextView right;
public ViewHolder(View v) {
super(v);
mTextView = (TextView) v.findViewById(R.id.info_text);
left = (TextView) v.findViewById(R.id.left);
right = (TextView) v.findViewById(R.id.right);
}
}
// Provide a suitable constructor (depends on the kind of dataset)
public MyAdapter(ArrayList<Team> myDataset) {
mDataset = myDataset;
}
// Create new views (invoked by the layout manager)
#Override
public MyAdapter.ViewHolder onCreateViewHolder(ViewGroup parent,
int viewType) {
// create a new view
View v = LayoutInflater.from(parent.getContext())
.inflate(R.layout.my_text_view, parent, false);
// set the view's size, margins, paddings and layout parameters
ViewHolder vh = new ViewHolder(v);
return vh;
}
// Replace the contents of a view (invoked by the layout manager)
#Override
public void onBindViewHolder(ViewHolder holder, int position) {
// - get element from your data at this position
// - replace the contents of the view with that element
holder.mTextView.setText(mDataset.get(position).getTeamName());
Random gen = new Random();
if(gen.nextBoolean()) {
lefter = mDataset.get(position).getTeamNumber();
righter = mDataset.get(gen.nextInt(mDataset.size())).getTeamNumber();
holder.left.setText(lefter);
holder.right.setText(righter);
} else {
righter = mDataset.get(
position).getTeamNumber();
lefter = mDataset.get(gen.nextInt(mDataset.size())).getTeamNumber();
holder.right.setText(righter);
holder.left.setText(lefter);
}
}
String righter;
String lefter;
public String getLeft() {
return lefter;
}
public String getRighter() {
return righter;
}
// Return the size of your dataset (invoked by the layout manager)
#Override
public int getItemCount() {
return mDataset.size();
}
}
set your parent layout's height in your R.layout.my_text_view file to wrap_content
I had the same challenge but i had to set my parent layout height to android:layout_height="wrap_content"
That worked for me so well

android opencv camera app hanging

I am creating an android camera app using opencv. First I make it like when I click the start button, it start taking pictures and in every second takes a picture and store it in storage. It works perfectly for me.
Then I tried to make it some tricky by forcing it to take 5 pictures in a second it works well.
But actually I faced problem when I make it to 20 pictures in 1 second. It did not work. The app hangs as soon as I click on the start button. I don't know why. But I think the problem is in threading.
Can someone help me how i will do it.
I have two java files in my project and here is the code.
package org.opencv.samples.tutorial3;
import java.io.FileOutputStream;
import java.util.List;
import org.opencv.android.JavaCameraView;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.Size;
import android.util.AttributeSet;
import android.util.Log;
public class Tutorial3View extends JavaCameraView implements PictureCallback {
private static final String TAG = "Sample::Tutorial3View";
private String mPictureFileName;
public Tutorial3View(Context context, AttributeSet attrs) {
super(context, attrs);
}
public List<String> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}
public String getEffect() {
return mCamera.getParameters().getColorEffect();
}
public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}
public List<Size> getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(Size resolution) {
disconnectCamera();
mMaxHeight = resolution.height;
mMaxWidth = resolution.width;
connectCamera(getWidth(), getHeight());
}
public Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
public void takePicture(final String fileName) {
Log.i(TAG, "Taking picture");
this.mPictureFileName = fileName;
// Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
// Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
mCamera.setPreviewCallback(null);
// PictureCallback is implemented by the current class
mCamera.takePicture(null, null, this);
}
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "Saving a bitmap to file");
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);
// Write the image in a file (in jpeg format)
try {
FileOutputStream fos = new FileOutputStream(mPictureFileName);
fos.write(data);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
}
And the second one is:
package org.opencv.samples.tutorial3;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.ListIterator;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SubMenu;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.Toast;
public class Tutorial3Activity extends Activity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private Tutorial3View mOpenCvCameraView;
private List<Size> mResolutionList;
private MenuItem[] mEffectMenuItems;
private SubMenu mColorEffectsMenu;
private MenuItem[] mResolutionMenuItems;
private SubMenu mResolutionMenu;
Thread thread;
Button start,stop;
boolean loop=false;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial3Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial3_surface_view);
start=(Button)findViewById(R.id.button2) ;
start.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
loop=true;
thread = new Thread(new task());
thread.start();
}
});
stop=(Button) findViewById(R.id.button);
stop.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
loop=false;
}
});
mOpenCvCameraView = (Tutorial3View) findViewById(R.id.tutorial3_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
return inputFrame.rgba();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
List<String> effects = mOpenCvCameraView.getEffectList();
if (effects == null) {
Log.e(TAG, "Color effects are not supported by device!");
return true;
}
mColorEffectsMenu = menu.addSubMenu("Color Effect");
mEffectMenuItems = new MenuItem[effects.size()];
int idx = 0;
ListIterator<String> effectItr = effects.listIterator();
while(effectItr.hasNext()) {
String element = effectItr.next();
mEffectMenuItems[idx] = mColorEffectsMenu.add(1, idx, Menu.NONE, element);
idx++;
}
mResolutionMenu = menu.addSubMenu("Resolution");
mResolutionList = mOpenCvCameraView.getResolutionList();
mResolutionMenuItems = new MenuItem[mResolutionList.size()];
ListIterator<Size> resolutionItr = mResolutionList.listIterator();
idx = 0;
while(resolutionItr.hasNext()) {
Size element = resolutionItr.next();
mResolutionMenuItems[idx] = mResolutionMenu.add(2, idx, Menu.NONE,
Integer.valueOf(element.width).toString() + "x" + Integer.valueOf(element.height).toString());
idx++;
}
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item.getGroupId() == 1)
{
mOpenCvCameraView.setEffect((String) item.getTitle());
Toast.makeText(this, mOpenCvCameraView.getEffect(), Toast.LENGTH_SHORT).show();
}
else if (item.getGroupId() == 2)
{
int id = item.getItemId();
Size resolution = mResolutionList.get(id);
mOpenCvCameraView.setResolution(resolution);
resolution = mOpenCvCameraView.getResolution();
String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
}
return true;
}
class task implements Runnable {
public void run() {
while (loop) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
String currentDateandTime = sdf.format(new Date());
final String fileName = Environment.getExternalStorageDirectory().getPath() +
"/sample_picture_" + currentDateandTime + ".jpg";
mOpenCvCameraView.takePicture(fileName);
runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(Tutorial3Activity.this, fileName + " saved", Toast.LENGTH_SHORT).show();
}
});
try {
thread.sleep(200);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
}
Note: The above code works well for taking 5 pictures in 1 second. But when I change thread.sleep(200) to thread.sleep(50) for taking 20 pictures in 1 second it hangs.

Memory Leak Issue with setText (Possibly?)

I'm working on a basic application that tells the user the amount of contours in the video feed in real time using OpenCV. I have this working, but about a minute into running the application closes and there are no logcat errors. I'm a new developer, but through some research believe this is a memory-leak issue. I think it has something to do with setting the text on each video frame. Here is my code:
import java.util.ArrayList;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.TextView;
public class MainActivity extends Activity implements CvCameraViewListener2 {
static {
if (!OpenCVLoader.initDebug()) {
Log.d("ERROR", "Unable to load OpenCV");
}
else {
Log.d("SUCCESS", "OpenCV loaded");
}
}
private TextView contourCount;
//TAG for log
private static final String TAG = "Contour Count";
//Mats for image processing
private Mat mRgba;
private Mat mGray;
//Declare the cameraview
private JavaCameraView openCvCameraView;
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
//OpenCv
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
//Turn on cameraview
openCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
//OnResume handling
#Override
public void onResume()
{
super.onResume();
openCvCameraView.enableView();
}
//OnCreate tasks
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
//Keep the screen on while app is running
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
//Set layout and initialize cameraView
setContentView(R.layout.fragment_main);
openCvCameraView = (JavaCameraView) findViewById(R.id.HelloOpenCvView);
openCvCameraView.setVisibility(SurfaceView.VISIBLE);
openCvCameraView.setCvCameraViewListener(this);
openCvCameraView.enableView();
}
//Handling onPause tasks - Disable Camera
#Override
public void onPause()
{
super.onPause();
if (openCvCameraView != null)
openCvCameraView.disableView();
}
//Handling onDestroy tasks - Disable Camera
public void onDestroy() {
super.onDestroy();
if (openCvCameraView != null)
openCvCameraView.disableView();
}
//New Mats for first onFrame
public void onCameraViewStarted(int width, int height) {
//Initialization of variables used for video analysis
mGray = new Mat();
mRgba = new Mat();
}
//Release the Mats when camera is stopped
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
Mat imageHSV = new Mat(mRgba.size(), Core.DEPTH_MASK_8U);
Mat imageBlurr = new Mat(mRgba.size(), Core.DEPTH_MASK_8U);
Mat imageA = new Mat(mRgba.size(), Core.DEPTH_MASK_ALL);
Imgproc.cvtColor(mRgba, imageHSV, Imgproc.COLOR_BGR2GRAY);
Imgproc.GaussianBlur(imageHSV, imageBlurr, new Size(5,5), 0);
Imgproc.adaptiveThreshold(imageBlurr, imageA, 255,Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY,7, 5);
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(imageA, contours, new Mat(), Imgproc.RETR_EXTERNAL,Imgproc.CHAIN_APPROX_SIMPLE);
setContours(contours.size());
return mRgba;
}
public void setContours(final int level) {
runOnUiThread(new Runnable() {
#Override
public void run() {
contourCount = (TextView) findViewById(R.id.contourLevel);
contourCount.setText("" + level);
}
});
}
}
The same issue occurs if I declare contourCount in onCreate(). I'm not sure if it has something to do with making level final in the method call for setContours(), but I have also tried to solve this using a handler that is declared in onCreate() and sending a message to it with the contours.size() within onCameraFrame() but this also causes the same issue. What am I doing wrong? How can I avoid this in the future? I am new to this so please be detailed in explaining the issue and future avoidance.
Thank you in advance! :)
EDIT SOLVED:
Figured it out. Thanks to everyone who contributed. Here is the working code for anyone interested or those with the same issue in the future!
The fix comes from releasing the Mats used in onCameraFrame
import java.util.ArrayList;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.TextView;
public class MainActivity extends Activity implements CvCameraViewListener2 {
static {
if (!OpenCVLoader.initDebug()) {
Log.d("ERROR", "Unable to load OpenCV");
}
else {
Log.d("SUCCESS", "OpenCV loaded");
}
}
//TAG for log
private static final String TAG = "Contour Count";
//Mats for image processing
private Mat mRgba;
private Mat mGray;
private Mat imageHSV;
private Mat imageBlurr;
private Mat imageA;
private Mat hierarchy;
//Declare the cameraview
private JavaCameraView openCvCameraView;
private static String level;
private static TextView contourCount;
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
//OpenCv
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
//Turn on cameraview
openCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
//OnResume handling
#Override
public void onResume()
{
super.onResume();
openCvCameraView.enableView();
}
//OnCreate tasks
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
//Keep the screen on while app is running
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
//Set layout and initialize cameraView
setContentView(R.layout.fragment_main);
openCvCameraView = (JavaCameraView) findViewById(R.id.HelloOpenCvView);
openCvCameraView.setVisibility(SurfaceView.VISIBLE);
openCvCameraView.setCvCameraViewListener(this);
openCvCameraView.enableView();
contourCount = (TextView) findViewById(R.id.contourLevel);
}
//Handling onPause tasks - Disable Camera
#Override
public void onPause()
{
super.onPause();
if (openCvCameraView != null)
openCvCameraView.disableView();
}
//Handling onDestroy tasks - Disable Camera
public void onDestroy() {
super.onDestroy();
if (openCvCameraView != null)
openCvCameraView.disableView();
}
//New Mats for first onFrame
public void onCameraViewStarted(int width, int height) {
//Initialization of variables used for video analysis
mGray = new Mat();
mRgba = new Mat();
}
//Release the Mats when camera is stopped
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
//onCameraFrame image processing
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
imageHSV = new Mat(mRgba.size(), Core.DEPTH_MASK_8U);
imageBlurr = new Mat(mRgba.size(), Core.DEPTH_MASK_8U);
imageA = new Mat(mRgba.size(), Core.DEPTH_MASK_ALL);
hierarchy = new Mat();
Imgproc.cvtColor(mRgba, imageHSV, Imgproc.COLOR_BGR2GRAY);
Imgproc.GaussianBlur(imageHSV, imageBlurr, new Size(5,5), 0);
Imgproc.adaptiveThreshold(imageBlurr, imageA, 255,Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY,7, 5);
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(imageA, contours, hierarchy, Imgproc.RETR_EXTERNAL,Imgproc.CHAIN_APPROX_SIMPLE);
//Imgproc.findContours(imageA, contours, new Mat(), Imgproc.RETR_LIST,Imgproc.CHAIN_APPROX_SIMPLE);
level = "" + contours.size();
setContour();
imageHSV.release();
imageBlurr.release();
imageA.release();
hierarchy.release();
return mRgba;
}
public void setContour() {
runOnUiThread(new Runnable() {
#Override
public void run() {
contourCount.setText(level);
}
});
}
}
In the past when creating memory intensive apps I've had to manually increase the amount of memory the app has access to. There may not necessarily be a OOM error, but the app may simply be extremely memory intensive.
Try adding this to your application tag in your manifest:
android:largeHeap="true"
Hopefully this helps :)

Categories