Syntax error on token "(", delete this token - java

I'm working on an app for my last year of A-Levels and I'm trying to make a colour averager. I can make the app take the picture and display it in an imageView but I can't seem to get the pixels from the image. I've tried using "imageViewName".getPixels or anything else .getPixels but the brackets after getPixels have the error "Syntax error on token "(", delete this token" and the same for the end bracket. The error occurs here: photoViewForCrop.getPixels(pixels[], x, y, imageWidth, imageHeight); Which is quite close to the end of the code.
Here's my activity that takes the photo and saves it to the SD card, please ignore that it's called 'UploadPhotoActivity', not sure what I was thinking.
Imports or ints that aren't used in the code were added by my teacher when he was messing with it and not helping at all.
package com.colours.javerager;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import android.app.ActionBar;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;
public class UploadPhotoActivity extends Activity {
//Activity request codes
private static final int CAMERA_CAPTURE_IMAGE_REQUEST_CODE = 100;
public static final int MEDIA_TYPE_IMAGE = 1;
//directory for file names
private static final String IMAGE_DIRECTORY_NAME = "JAverager";
private Uri fileUri; //file url to store
private ImageView photoViewForCrop;
private Button takePhotoButton;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_upload_photo);
// Show the Up button in the action bar.
//setupActionBar();
ActionBar actionBar = getActionBar();
// hide the action bar
actionBar.hide();
photoViewForCrop = (ImageView) findViewById(R.id.photoViewForCrop);
takePhotoButton = (Button) findViewById(R.id.takePhotoButton);
/**
* Capture image button click event
*/
takePhotoButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// capture picture
captureImage();
}
});
if (!isDeviceSupportCamera()) {
Toast.makeText(getApplicationContext(),
"Sorry! Your device doesn't support camera",
Toast.LENGTH_LONG).show();
// will close the app if the device does't have camera
finish();
}
}
//checks if device has a camera
private boolean isDeviceSupportCamera() {
if (getApplicationContext().getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA)) {
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
private void captureImage() {
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
fileUri = getOutputMediaFileUri(MEDIA_TYPE_IMAGE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
// start the image capture Intent
startActivityForResult(intent, CAMERA_CAPTURE_IMAGE_REQUEST_CODE);
}
#Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
// save file url in bundle as it will be null on screen orientation
// changes
outState.putParcelable("file_uri", fileUri);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
// get the file url
fileUri = savedInstanceState.getParcelable("file_uri");
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// if the result is capturing Image
if (requestCode == CAMERA_CAPTURE_IMAGE_REQUEST_CODE) {
if (resultCode == RESULT_OK) {
// successfully captured the image
// display it in image view
previewCapturedImage();
} else if (resultCode == RESULT_CANCELED) {
// user cancelled Image capture
Toast.makeText(getApplicationContext(),
"User cancelled image capture", Toast.LENGTH_SHORT)
.show();
} else {
// failed to capture image
Toast.makeText(getApplicationContext(),
"Sorry! Failed to capture image", Toast.LENGTH_SHORT)
.show();
}
}
}
/**
* Display image from a path to ImageView
*/
private void previewCapturedImage() {
try {
photoViewForCrop.setVisibility(View.VISIBLE);
// bitmap factory
BitmapFactory.Options options = new BitmapFactory.Options();
// downsizing image as it throws OutOfMemory Exception for larger
// images
options.inSampleSize = 2;
final Bitmap bitmap = BitmapFactory.decodeFile(fileUri.getPath(),
options);
photoViewForCrop.setImageBitmap(bitmap);
} catch (NullPointerException e) {
e.printStackTrace();
}
}
public Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
/**
* returning image
*/
private static File getOutputMediaFile(int type) {
// External sdcard location
File mediaStorageDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
IMAGE_DIRECTORY_NAME);
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d(IMAGE_DIRECTORY_NAME, "Oops! Failed create "
+ IMAGE_DIRECTORY_NAME + " directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "JAverager_" + timeStamp + ".jpg");
} else {
return null;
}
return mediaFile;
}
int imageWidth = 640;
int imageHeight = 480;
int[] pixels = new int[307200];
int x = 1;
int y = 1;
photoViewForCrop.getPixels(pixels[], x, y, imageWidth, imageHeight);
int pixelTotal= 307200;
int valuethispixel;
int currentBlue;
int currentRed;
int currentGreen;
int totalBlue = 0;
int totalRed = 0;
int totalGreen = 0;
int currentPixel = 1;
int tempNum;
int avRed;
int avGreen;
int avBlue;
{
while(1 < pixelTotal){
tempNum = (Integer) pixels[currentPixel];
currentBlue = Color.blue(tempNum);
currentRed = Color.red(tempNum);
currentGreen = Color.green(tempNum);
totalBlue = totalBlue + currentBlue;
totalRed = totalRed + currentRed;
totalGreen = totalGreen + currentGreen;
currentPixel = currentPixel + 1;
}
totalBlue = totalBlue / pixelTotal;
totalRed = totalRed / pixelTotal;
totalGreen = totalGreen / pixelTotal;
}
};

ImageView does not implement the functionality you want.
Perhaps you should look at PixelGrabber ( http://docs.oracle.com/javase/7/docs/api/java/awt/image/PixelGrabber.html

everything after the getOutputMediaFile method is not inside a method, thats why your method call and after that the while loop definition won't work

Related

I followed a tutorial on youtube for an app on Android, and the app keeps crashing right after it gets an image

this is the main activity code for the app:
`package com.example.projectflower;
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.media.ThumbnailUtils;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import com.example.projectflower.ml.Model;
import org.tensorflow.lite.DataType;
import org.tensorflow.lite.support.tensorbuffer.TensorBuffer;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class MainActivity extends AppCompatActivity {
Button camera, gallery;
ImageView imageView;
TextView result;
int imageSize = 256;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
camera = findViewById(R.id.button);
gallery = findViewById(R.id.button2);
result = findViewById(R.id.result);
imageView = findViewById(R.id.imageView);
camera.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
if (checkSelfPermission(Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) {
Intent cameraIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(cameraIntent, 3);
} else {
requestPermissions(new String[]{Manifest.permission.CAMERA}, 100);
}
}
});
gallery.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Intent cameraIntent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(cameraIntent, 1);
}
});
}
public void classifyImage(Bitmap image){
try {
Model model = Model.newInstance(getApplicationContext());
// Creates inputs for reference.
TensorBuffer inputFeature0 = TensorBuffer.createFixedSize(new int[]{1, 32, 32, 3}, DataType.FLOAT32);
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(4 * imageSize * imageSize * 3);
byteBuffer.order(ByteOrder.nativeOrder());
int[] intValues = new int[imageSize * imageSize];
image.getPixels(intValues, 0, image.getWidth(), 0, 0, image.getWidth(), image.getHeight());
int pixel = 0;
//iterate over each pixel and extract R, G, and B values. Add those values individually to the byte buffer.
for(int i = 0; i < imageSize; i ++){
for(int j = 0; j < imageSize; j++){
int val = intValues[pixel++]; // RGB
byteBuffer.putFloat(((val >> 16) & 0xFF) * (1.f / 255));
byteBuffer.putFloat(((val >> 8) & 0xFF) * (1.f /255));
byteBuffer.putFloat((val & 0xFF) * (1.f / 255));
}
}
inputFeature0.loadBuffer(byteBuffer);
// Runs model inference and gets result.
Model.Outputs outputs = model.process(inputFeature0);
TensorBuffer outputFeature0 = outputs.getOutputFeature0AsTensorBuffer();
float[] confidences = outputFeature0.getFloatArray();
// find the index of the class with the biggest confidence.
int maxPos = 0;
float maxConfidence = 0;
for (int i = 0; i < confidences.length; i++) {
if (confidences[i] > maxConfidence) {
maxConfidence = confidences[i];
maxPos = i;
}
}
String[] classes = {"american footballs", "baseballs","basketballs", "socer balls", "tennis balls"};
result.setText(classes[maxPos]);
// Releases model resources if no longer used.
model.close();
} catch (IOException e) {
// TODO Handle the exception
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, #Nullable Intent data) {
if(resultCode == RESULT_OK){
if(requestCode == 3){
Bitmap image = (Bitmap) data.getExtras().get("data");
int dimension = Math.min(image.getWidth(), image.getHeight());
image = ThumbnailUtils.extractThumbnail(image, dimension, dimension);
imageView.setImageBitmap(image);
image = Bitmap.createScaledBitmap(image, imageSize, imageSize, false);
classifyImage(image);
}else{
Uri dat = data.getData();
Bitmap image = null;
try {
image = MediaStore.Images.Media.getBitmap(this.getContentResolver(), dat);
} catch (IOException e) {
e.printStackTrace();
}
imageView.setImageBitmap(image);
image = Bitmap.createScaledBitmap(image, imageSize, imageSize, false);
classifyImage(image);
}
}
super.onActivityResult(requestCode, resultCode, data);
}
}`type here
the app crashes after selecting an image and I cannot figure out why? when the debugger gets to this line: if(resultCode == RESULT_OK), it shows the right values but it still crashes instantly. my images are of size 256 * 256 * 3. I've also tried running it on my phone, but it still doesn't work.

Separate the images coming from SD card and Device's Internal memory in MediaStore

I am trying to access all the images that come from MediaStore, but I would like to separate the images coming from it into two different folders; External and Internal memory, where internal memory is the Android device primary storage and external would be something like an SD card. How can I achieve this ?
My code is below.
I have this activity:
package com.projects.timely.gallery;
import android.content.Intent;
import android.os.Bundle;
import android.os.Environment;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import com.projects.timely.R;
#SuppressWarnings("ConstantConditions")
public class StorageViewer extends AppCompatActivity implements View.OnClickListener {
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_storage_view);
setSupportActionBar(findViewById(R.id.toolbar));
getSupportActionBar().setTitle("Select Storage");
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
ViewGroup v_internalStorage = findViewById(R.id.internal_storage);
v_internalStorage.setOnClickListener(this);
ViewGroup v_externalStorage = findViewById(R.id.external_storage);
v_externalStorage.setOnClickListener(this);
boolean ext_str_mounted
= Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED);
v_externalStorage.setVisibility(ext_str_mounted ? View.VISIBLE : View.GONE);
}
#Override
public void onClick(View v) {
if (v.getId() == R.id.internal_storage) {
startActivity(new Intent(this, ImageDirectory.class)
.putExtra(ImageDirectory.STORAGE_ACCESS_ROOT, ImageDirectory.INTERNAL));
} else {
startActivity(new Intent(this, ImageDirectory.class)
.putExtra(ImageDirectory.STORAGE_ACCESS_ROOT, ImageDirectory.EXTERNAL));
}
finish();
}
#Override
public boolean onSupportNavigateUp() {
super.onBackPressed();
return true;
}
}
That triggers this code. This code is executed within an activity. It is in a file called ImageDirectory.java
#Override
#SuppressLint("InlinedApi")
public void run() {
String root_extra = getIntent().getStringExtra(STORAGE_ACCESS_ROOT);
Uri storageUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
if (root_extra != null) {
storageUri = root_extra.equals(EXTERNAL) ? MediaStore.Images.Media.EXTERNAL_CONTENT_URI
: MediaStore.Images.Media.INTERNAL_CONTENT_URI;
}
String[] projection = {
MediaStore.Images.Media._ID,
MediaStore.Images.Media.BUCKET_DISPLAY_NAME,
MediaStore.Images.Media.SIZE,
MediaStore.Images.Media.DISPLAY_NAME};
Cursor imgCursor = getApplicationContext()
.getContentResolver()
.query(storageUri, projection, null, null, null);
int bucketId = imgCursor.getColumnIndexOrThrow(MediaStore.Images.Media._ID);
int imgSize = imgCursor.getColumnIndexOrThrow(MediaStore.Images.Media.SIZE);
int name = imgCursor.getColumnIndexOrThrow(MediaStore.Images.Media.DISPLAY_NAME);
int bucketName
= imgCursor.getColumnIndexOrThrow(MediaStore.Images.Media.BUCKET_DISPLAY_NAME);
List<String> dirName = new ArrayList<>();
while (imgCursor.moveToNext()) {
long id = imgCursor.getLong(bucketId);
int size = imgCursor.getInt(imgSize);
String fileName = imgCursor.getString(name);
String folderName = imgCursor.getString(bucketName);
// Updated this part, replaced with storageUri
Uri contentUri
= ContentUris.withAppendedId(storageUri, id);
Image currentImage = new Image(contentUri, size, fileName, folderName);
int directoryIndex = linearSearch(dirName, folderName);
// if search result (directoryIndex) passes this test, then it means that there is
// no such directory in list of directory names
if (directoryIndex < 0) {
imageDirectoryList.add(new ArrayList<>());
dirName.add(folderName);
directoryIndex = linearSearch(dirName, folderName);
if (directoryIndex >= 0)
imageDirectoryList.get(directoryIndex).add(currentImage);
} else {
imageDirectoryList.get(directoryIndex).add(currentImage);
}
}
imgCursor.close();
runOnUiThread(() -> {
imageAdapter.notifyDataSetChanged();
doViewUpdate();
});
}

Face Recognition with OpenCv, NDK and Android

I was trying to run this code to take a photo and recognize tho photo owner, at first the user clicks the button Train and takes a picture than enter its name, after that he clicks on Recognize button and take a picture if the picture was saved the apps recognize its face if not it shows unknown person for example. The app runs good but when it crashes sometimes with this error:
FATAL EXCEPTION: main
Process: com.facedetection.app, PID: 7442
java.lang.RuntimeException: Unable to stop activity {com.facedetection.app/com.facedetection.app.TrainActivity}: CvException [org.opencv.core.CvException: cv::Exception: OpenCV(4.0.0-pre) E:\AssemCourses\opencv-master\modules\core\src\matrix.cpp:235: error: (-215:Assertion failed) s >= 0 in function 'setSize'
]
at android.app.ActivityThread.performDestroyActivity(ActivityThread.java:4852)
at android.app.ActivityThread.handleDestroyActivity(ActivityThread.java:4915)
at android.app.ActivityThread.access$1600(ActivityThread.java:211)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1759)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:145)
at android.app.ActivityThread.main(ActivityThread.java:6946)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1404)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1199)
Caused by: CvException [org.opencv.core.CvException: cv::Exception: OpenCV(4.0.0-pre) E:\AssemCourses\opencv-master\modules\core\src\matrix.cpp:235: error: (-215:Assertion failed) s >= 0 in function 'setSize'
]
at org.opencv.face.FaceRecognizer.train_0(Native Method)
at org.opencv.face.FaceRecognizer.train(FaceRecognizer.java:133)
at com.facedetection.app.TrainActivity.trainfaces(TrainActivity.java:95)
at com.facedetection.app.TrainActivity.onStop(TrainActivity.java:351)
at android.app.Instrumentation.callActivityOnStop(Instrumentation.java:1305)
at android.app.Activity.performStop(Activity.java:6777)
at android.app.ActivityThread.performDestroyActivity(ActivityThread.java:4847)
Here is the classe mentionned in the error logcat:
Class TrainActivity.java:
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.DialogInterface;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.text.InputType;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.facebook.stetho.Stetho;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Size;
import org.opencv.face.Face;
import org.opencv.face.FaceRecognizer;
import org.opencv.face.LBPHFaceRecognizer;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import static org.opencv.objdetect.Objdetect.CASCADE_SCALE_IMAGE;
/**
* Created by Assem Abozaid on 6/2/2018.
*/
public class TrainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 {
private static String TAG = TrainActivity.class.getSimpleName();
private CameraBridgeViewBase openCVCamera;
private Mat rgba,gray;
private CascadeClassifier classifier;
private MatOfRect faces;
private static final int PERMS_REQUEST_CODE = 123;
private ArrayList<Mat> images;
private ArrayList<String> imagesLabels;
private Storage local;
private String[] uniqueLabels;
FaceRecognizer recognize;
private boolean trainfaces() {
if(images.isEmpty())
return false;
List<Mat> imagesMatrix = new ArrayList<>();
for (int i = 0; i < images.size(); i++)
imagesMatrix.add(images.get(i));
Set<String> uniqueLabelsSet = new HashSet<>(imagesLabels); // Get all unique labels
uniqueLabels = uniqueLabelsSet.toArray(new String[uniqueLabelsSet.size()]); // Convert to String array, so we can read the values from the indices
int[] classesNumbers = new int[uniqueLabels.length];
for (int i = 0; i < classesNumbers.length; i++)
classesNumbers[i] = i + 1; // Create incrementing list for each unique label starting at 1
int[] classes = new int[imagesLabels.size()];
for (int i = 0; i < imagesLabels.size(); i++) {
String label = imagesLabels.get(i);
for (int j = 0; j < uniqueLabels.length; j++) {
if (label.equals(uniqueLabels[j])) {
classes[i] = classesNumbers[j]; // Insert corresponding number
break;
}
}
}
Mat vectorClasses = new Mat(classes.length, 1, CvType.CV_32SC1); // CV_32S == int
vectorClasses.put(0, 0, classes); // Copy int array into a vector
recognize = LBPHFaceRecognizer.create(3,8,8,8,200);
recognize.train(imagesMatrix, vectorClasses);
if(SaveImage())
return true;
return false;
}
public void showLabelsDialog() {
Set<String> uniqueLabelsSet = new HashSet<>(imagesLabels); // Get all unique labels
if (!uniqueLabelsSet.isEmpty()) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Select label:");
builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
images.remove(images.size()-1);
}
});
builder.setCancelable(false); // Prevent the user from closing the dialog
String[] uniqueLabels = uniqueLabelsSet.toArray(new String[uniqueLabelsSet.size()]); // Convert to String array for ArrayAdapter
Arrays.sort(uniqueLabels); // Sort labels alphabetically
final ArrayAdapter<String> arrayAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, uniqueLabels) {
#Override
public #NonNull
View getView(int position, #Nullable View convertView, #NonNull ViewGroup parent) {
TextView textView = (TextView) super.getView(position, convertView, parent);
if (getResources().getBoolean(R.bool.isTablet))
textView.setTextSize(20); // Make text slightly bigger on tablets compared to phones
else
textView.setTextSize(18); // Increase text size a little bit
return textView;
}
};
ListView mListView = new ListView(this);
mListView.setAdapter(arrayAdapter); // Set adapter, so the items actually show up
builder.setView(mListView); // Set the ListView
final AlertDialog dialog = builder.show(); // Show dialog and store in final variable, so it can be dismissed by the ListView
mListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
#Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
dialog.dismiss();
addLabel(arrayAdapter.getItem(position));
Log.i(TAG, "Labels Size "+imagesLabels.size()+"");
}
});
} else {
showEnterLabelDialog();
}
}
private void showEnterLabelDialog() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Please enter your name:");
final EditText input = new EditText(this);
input.setInputType(InputType.TYPE_CLASS_TEXT);
builder.setView(input);
builder.setPositiveButton("Submit", null); // Set up positive button, but do not provide a listener, so we can check the string before dismissing the dialog
builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
images.remove(images.size()-1);
}
});
builder.setCancelable(false); // User has to input a name
AlertDialog dialog = builder.create();
// Source: http://stackoverflow.com/a/7636468/2175837
dialog.setOnShowListener(new DialogInterface.OnShowListener() {
#Override
public void onShow(final DialogInterface dialog) {
Button mButton = ((AlertDialog) dialog).getButton(AlertDialog.BUTTON_POSITIVE);
mButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
String string = input.getText().toString().trim();
if (!string.isEmpty()) { // Make sure the input is valid
// If input is valid, dismiss the dialog and add the label to the array
dialog.dismiss();
addLabel(string);
}
}
});
}
});
// Show keyboard, so the user can start typing straight away
dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE);
dialog.show();
}
private void addLabel(String string) {
String label = string.substring(0, 1).toUpperCase(Locale.US) + string.substring(1).trim().toLowerCase(Locale.US); // Make sure that the name is always uppercase and rest is lowercase
imagesLabels.add(label); // Add label to list of labels
Log.i(TAG, "Label: " + label);
}
public boolean SaveImage() {
File path = new File(Environment.getExternalStorageDirectory(), "TrainedData");
path.mkdirs();
String filename = "lbph_trained_data.xml";
File file = new File(path, filename);
recognize.save(file.toString());
if(file.exists())
return true;
return false;
}
public void cropedImages(Mat mat) {
Rect rect_Crop=null;
for(Rect face: faces.toArray()) {
rect_Crop = new Rect(face.x, face.y, face.width, face.height);
}
Mat croped = new Mat(mat, rect_Crop);
images.add(croped);
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.train_main);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Stetho.initializeWithDefaults(this);
if (hasPermissions()){
Toast.makeText(this, "Permission Granted", Toast.LENGTH_SHORT).show();
Log.i(TAG, "Permission Granted Before");
}
else {
requestPerms();
}
openCVCamera = (CameraBridgeViewBase)findViewById(R.id.java_camera_view);
openCVCamera.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT);
openCVCamera.setVisibility(SurfaceView.VISIBLE);
openCVCamera.setCvCameraViewListener(this);
local = new Storage(this);
Button detect = (Button)findViewById(R.id.take_picture_button);
detect.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
if(gray.total() == 0)
Toast.makeText(getApplicationContext(), "Can't Detect Faces", Toast.LENGTH_SHORT).show();
classifier.detectMultiScale(gray,faces,1.1,3,0|CASCADE_SCALE_IMAGE, new Size(30,30));
if(!faces.empty()) {
if(faces.toArray().length > 1)
Toast.makeText(getApplicationContext(), "Mutliple Faces Are not allowed", Toast.LENGTH_SHORT).show();
else {
if(gray.total() == 0) {
Log.i(TAG, "Empty gray image");
return;
}
cropedImages(gray);
showLabelsDialog();
Toast.makeText(getApplicationContext(), "Face Detected", Toast.LENGTH_SHORT).show();
}
}else
Toast.makeText(getApplicationContext(), "Unknown Face", Toast.LENGTH_SHORT).show();
}
});
}
#SuppressLint("WrongConstant")
private boolean hasPermissions(){
int res = 0;
//string array of permissions,
String[] permissions = new String[]{Manifest.permission.CAMERA};
for (String perms : permissions){
res = checkCallingOrSelfPermission(perms);
if (!(res == PackageManager.PERMISSION_GRANTED)){
return false;
}
}
return true;
}
private void requestPerms(){
String[] permissions = new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE,Manifest.permission.READ_EXTERNAL_STORAGE};
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
requestPermissions(permissions,PERMS_REQUEST_CODE);
}
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
boolean allowed = true;
switch (requestCode){
case PERMS_REQUEST_CODE:
for (int res : grantResults){
// if user granted all permissions.
allowed = allowed && (res == PackageManager.PERMISSION_GRANTED);
}
break;
default:
// if user not granted permissions.
allowed = false;
break;
}
if (allowed){
//user granted all permissions we can perform our task.
Log.i(TAG, "Permission has been added");
}
else {
// we will give warning to user that they haven't granted permissions.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA) || shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE) ||
shouldShowRequestPermissionRationale(Manifest.permission.READ_EXTERNAL_STORAGE)){
Toast.makeText(this, "Permission Denied.", Toast.LENGTH_SHORT).show();
}
}
}
}
private BaseLoaderCallback callbackLoader = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch(status) {
case BaseLoaderCallback.SUCCESS:
faces = new MatOfRect();
openCVCamera.enableView();
images = local.getListMat("images");
imagesLabels = local.getListString("imagesLabels");
break;
default:
super.onManagerConnected(status);
break;
}
}
};
#Override
protected void onPause() {
super.onPause();
if(openCVCamera != null)
openCVCamera.disableView();
}
#Override
protected void onStop() {
super.onStop();
if (images != null && imagesLabels != null) {
local.putListMat("images", images);
local.putListString("imagesLabels", imagesLabels);
Log.i(TAG, "Images have been saved");
if(trainfaces()) {
images.clear();
imagesLabels.clear();
}
}
}
#Override
protected void onDestroy(){
super.onDestroy();
if(openCVCamera != null)
openCVCamera.disableView();
}
#Override
protected void onResume(){
super.onResume();
if(OpenCVLoader.initDebug()) {
Log.i(TAG, "System Library Loaded Successfully");
callbackLoader.onManagerConnected(BaseLoaderCallback.SUCCESS);
} else {
Log.i(TAG, "Unable To Load System Library");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION, this, callbackLoader);
}
}
#Override
public void onCameraViewStarted(int width, int height) {
rgba = new Mat();
gray = new Mat();
classifier = FileUtils.loadXMLS(this, "lbpcascade_frontalface_improved.xml");
}
#Override
public void onCameraViewStopped() {
rgba.release();
gray.release();
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
Mat mGrayTmp = inputFrame.gray();
Mat mRgbaTmp = inputFrame.rgba();
int orientation = openCVCamera.getScreenOrientation();
if (openCVCamera.isEmulator()) // Treat emulators as a special case
Core.flip(mRgbaTmp, mRgbaTmp, 1); // Flip along y-axis
else {
switch (orientation) { // RGB image
case ActivityInfo.SCREEN_ORIENTATION_PORTRAIT:
case ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT:
Core.flip(mRgbaTmp, mRgbaTmp, 0); // Flip along x-axis
break;
case ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE:
case ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE:
Core.flip(mRgbaTmp, mRgbaTmp, 1); // Flip along y-axis
break;
}
switch (orientation) { // Grayscale image
case ActivityInfo.SCREEN_ORIENTATION_PORTRAIT:
Core.transpose(mGrayTmp, mGrayTmp); // Rotate image
Core.flip(mGrayTmp, mGrayTmp, -1); // Flip along both axis
break;
case ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT:
Core.transpose(mGrayTmp, mGrayTmp); // Rotate image
break;
case ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE:
Core.flip(mGrayTmp, mGrayTmp, 1); // Flip along y-axis
break;
case ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE:
Core.flip(mGrayTmp, mGrayTmp, 0); // Flip along x-axis
break;
}
}
gray = mGrayTmp;
rgba = mRgbaTmp;
Imgproc.resize(gray, gray, new Size(200,200.0f/ ((float)gray.width()/ (float)gray.height())));
return rgba;
}
}
The Lines of Errors mentionned in the logcat refers to those java code lines:
In TrainActivity.java:
recognize = LBPHFaceRecognizer.create(3,8,8,8,200);
recognize.train(imagesMatrix, vectorClasses);
And
if(trainfaces()) {
images.clear();
imagesLabels.clear();
}
Do you guys have any idea about this problem and how to fix it?
PS: I am a beginner with OpenCv.
You can solve it by reset preferences onCreate TrainActivity
// -------------------------------
// reset preferences
local = new Storage(this);
images = new ArrayList<>();
imagesLabels = new ArrayList<>();
local.putListMat("images", images);
local.putListString("imagesLabels", imagesLabels);
// -------------------------------

How to get recyclerview image data in mainactivity

What i am trying to do is getting images from gallery and camera and placing it in an recyclerview.
Now main parts comes after image is now placedd in the recyclerview ,
but can anyone just tell me how can i get back these images shown in the recyclerview back to the mainActivity only when i click my upload button.
Thank you in advance.
My Main Activity.
package www.welkinfort.com.pphc;
import android.app.DatePickerDialog;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.DatePicker;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.Spinner;
import android.widget.TextView;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import butterknife.ButterKnife;
public class XenImageUploading extends AppCompatActivity implements AdapterView.OnItemSelectedListener, View.OnClickListener {
ArrayAdapter<String> des_dataAdapter;
ArrayList<String> discription_list = new ArrayList<>();
//#BindView(R.id.selct_prject_spinner)
Spinner select_pro_spn;
TextView datetextTextView;
Calendar myCalendar;
static Bitmap rotatedBitmap;
static Bitmap finalrotatedBitmap;
DatePickerDialog.OnDateSetListener date;
static final int REQUEST_TAKE_PHOTO = 1;
private int SELECT_FILE = 2;
private static String bitmap_overdraw_str;
private static ImageButton cameraclick;
private static ImageButton galleryclick;
private static ImageButton videoclick;
private static String mCurrentPhotoPath;
private static RecyclerView recyclerView;
ArrayList<Data_Model> arrayList;
ImageView image_view;
MyAdapter m;
Bitmap finalbitmap;
static String clickpath;
int i = 0;
String path ;
private static final String TAG = "XenImageUploading";
static File photoFile_1 = null;
private String userChoosenTask;
private Uri fileUri;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.xen_image_upload);
ButterKnife.bind(this);
setTitle("XEN IMAGE UPLOAD");
recyclerView = (RecyclerView) findViewById(R.id.recycler_view);
cameraclick = (ImageButton) findViewById(R.id.camera_btn);
galleryclick = (ImageButton) findViewById(R.id.gallery_btn);
image_view = (ImageView) findViewById(R.id.image_view);
arrayList = new ArrayList<>();
Log.d("oncreate", "set adapter");
bitmap_overdraw_str = "Lat:" + "aaaaaaaa" + "\nLong:" + "aaaaaaaa" + "\nDate:" + "aaaaaaaa";
recyclerView.setLayoutManager(new GridLayoutManager(this, 3));
///////////////////////////////////////////////////////////////////
//select_pro_spn = (Spinner) findViewById(R.id.selct_prject_spinner);
//datetextTextView = (TextView) findViewById(R.id.selctdate__txtv);
discription_list.add("Traffic light broken/not working");
discription_list.add("Traffic light pole hit by vehicle");
discription_list.add("No electricity connection");
discription_list.add("Traffic light not visible/partially visible");
// select_pro_spn.setOnItemSelectedListener(this);
// Creating adapter for spinner
des_dataAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, discription_list);
// Drop down layout style - list view with radio button
des_dataAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
// attaching data adapter to spinner
// select_pro_spn.setAdapter(des_dataAdapter);
myCalendar = Calendar.getInstance();
date = new DatePickerDialog.OnDateSetListener() {
#Override
public void onDateSet(DatePicker view, int year, int monthOfYear,
int dayOfMonth) {
// TODO Auto-generated method stub
myCalendar.set(Calendar.YEAR, year);
myCalendar.set(Calendar.MONTH, monthOfYear);
myCalendar.set(Calendar.DAY_OF_MONTH, dayOfMonth);
updateLabel();
}
};
// datetextTextView.setOnClickListener(new View.OnClickListener() {
//
// #Override
// public void onClick(View v) {
// // TODO Auto-generated method stub
// new DatePickerDialog(XenImageUploading.this, date, myCalendar
// .get(Calendar.YEAR), myCalendar.get(Calendar.MONTH),
// myCalendar.get(Calendar.DAY_OF_MONTH)).show();
// }
// });
cameraclick.setOnClickListener(this);
galleryclick.setOnClickListener(this);
recyclerView.setOnClickListener(this);
}
private void updateLabel() {
String myFormat = "MM-dd-yyyy"; //In which you need put here
SimpleDateFormat sdf = new SimpleDateFormat(myFormat, Locale.US);
datetextTextView.setText(sdf.format(myCalendar.getTime()));
}
#Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
switch (parent.getId()) {
// case R.id.selct_prject_spinner:
// String selected_intersection = parent.getSelectedItem().toString();
// Log.e("Selected item ", selected_intersection);
// //parent.notifyAll();
// break;
}
}
#Override
public void onNothingSelected(AdapterView<?> parent) {
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.camera_btn:
selectImage();
break;
case R.id.recycler_view:
getImageall();
break;
case R.id.gallery_btn:
Intent i = new Intent(
Intent.ACTION_PICK,
android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
i.putExtra(Intent.EXTRA_ALLOW_MULTIPLE, true);
i.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(i, SELECT_FILE);
break;
}
}
private void getImageall() {
}
private void selectImage() {
takepicture();
}
public void takepicture() {
Log.d(TAG, "takepicture");
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
if (takePictureIntent.resolveActivity(getPackageManager()) != null) {
try {
photoFile_1 = createImageFile();
} catch (IOException ex) {
// Error occurred while creating the File
}
// Continue only if the File was successfully created
if (photoFile_1 != null) {
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT,
Uri.fromFile(photoFile_1));
startActivityForResult(takePictureIntent, REQUEST_TAKE_PHOTO);
}
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_TAKE_PHOTO && resultCode == RESULT_OK && null != data) {
// Save Image To Gallery
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
File f = new File(mCurrentPhotoPath);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
this.sendBroadcast(mediaScanIntent);
////////////////setting image to adapter on capturing///////////////////////////
clickpath = mCurrentPhotoPath;
Bitmap bitmap = BitmapUtility.decodeSampledBitmapFromResource(clickpath, 560, 300);
setCameraDisplayOrientation(bitmap);
try {
FileOutputStream fos = new FileOutputStream(photoFile_1);
finalrotatedBitmap.compress(Bitmap.CompressFormat.JPEG, 90, fos);
fos.close();
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
Data_Model data_model = new Data_Model();
data_model.setImage(finalrotatedBitmap);
image_view.setImageBitmap(finalrotatedBitmap);
arrayList.add(data_model);
m = new MyAdapter(this, arrayList);
recyclerView.setAdapter(m);
m.notifyDataSetChanged();
}
if (requestCode == SELECT_FILE && resultCode == RESULT_OK && null != data) {
InputStream stream = null;
Uri uri = data.getData();
//for (int i =0 ; i<numberOfImages ;i++){
getImagePath(uri);
Data_Model data_model = new Data_Model();
data_model.setImage(finalbitmap);
image_view.setImageBitmap(finalbitmap);
arrayList.add(data_model);
m = new MyAdapter(this, arrayList);
recyclerView.setAdapter(m);
m.notifyDataSetChanged();
// }
}
}
public String getImagePath(Uri uri) {
Cursor cursor = getContentResolver().query(uri, null, null, null, null);
cursor.moveToFirst();
String document_id = cursor.getString(0);
document_id = document_id.substring(document_id.lastIndexOf(":") + 1);
cursor.close();
cursor = getContentResolver().query(
android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
null, MediaStore.Images.Media._ID + " = ? ", new String[]{document_id}, null);
cursor.moveToFirst();
path = cursor.getString(cursor.getColumnIndex(MediaStore.Images.Media.DATA));
cursor.close();
finalbitmap = BitmapUtility.decodeSampledBitmapFromResource(path, 560, 300);
//targetImage = (ImageView)findViewById(R.id.imageView1);
image_view.setImageBitmap(finalbitmap);
return path;
}
public static void setCameraDisplayOrientation(Bitmap fileresult) {
Log.e(TAG, "setCameraDisplayOrientation");
BitmapFactory.Options bounds = new BitmapFactory.Options();
bounds.inJustDecodeBounds = true;
BitmapFactory.decodeFile(clickpath, bounds);
BitmapFactory.Options opts = new BitmapFactory.Options();
Bitmap bm = BitmapFactory.decodeFile(clickpath, opts);
ExifInterface exif = null;
try {
exif = new ExifInterface(clickpath);
} catch (IOException e) {
e.printStackTrace();
}
String orientString = exif.getAttribute(ExifInterface.TAG_ORIENTATION);
int orientation = orientString != null ? Integer.parseInt(orientString) : ExifInterface.ORIENTATION_NORMAL;
int rotationAngle = 0;
if (orientation == ExifInterface.ORIENTATION_ROTATE_90) rotationAngle = 90;
if (orientation == ExifInterface.ORIENTATION_ROTATE_180) rotationAngle = 180;
if (orientation == ExifInterface.ORIENTATION_ROTATE_270) rotationAngle = 270;
Matrix matrix = new Matrix();
matrix.setRotate(rotationAngle, (float) bm.getWidth() / 2, (float) bm.getHeight() / 2);
rotatedBitmap = Bitmap.createBitmap(bm, 0, 0, bounds.outWidth, bounds.outHeight, matrix, true);
finalrotatedBitmap = AddTextonBitmap.textAsBitmap(rotatedBitmap, bitmap_overdraw_str);
}
private File createImageFile() throws IOException {
// Create an image file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "JPEG_" + timeStamp + "_";
File storageDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
File image = File.createTempFile(
imageFileName, /* prefix */
".jpg", /* suffix */
storageDir /* directory */
);
// Save a file: path for use with ACTION_VIEW intents
mCurrentPhotoPath = image.getAbsolutePath();
return image;
}
}
Adapter class
package www.welkinfort.com.pphc;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import java.util.ArrayList;
/**
* Created by Admin on 13-Jul-17.
*/
class MyAdapter extends RecyclerView.Adapter<MyAdapter.Myviewholder> {
private static final String TAG = "Adapter";
static Data_Model m;
XenImageUploading xenImageUploading;
ArrayList<Data_Model> arrayList;
private Context mContext;
public MyAdapter(XenImageUploading xenImageUploading, ArrayList<Data_Model> arrayList) {
this.arrayList = arrayList;
this.xenImageUploading = xenImageUploading;
}
#Override
public Myviewholder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.abc, parent, false);
Myviewholder myviewholder = new Myviewholder(view);
Log.d("myactivty ", "oncreateViewHolder");
return myviewholder;
}
#Override
public void onBindViewHolder(final Myviewholder holder, final int position) {
m = arrayList.get(position);
Log.d(" myactivty", "onBindviewholder" + position);
holder.imageView.setImageBitmap(m.getImage());
}
#Override
public int getItemCount() {
return arrayList == null ? 0 : arrayList.size();
}
public class Myviewholder extends RecyclerView.ViewHolder {
// public View view;
public ImageView imageView;
public Myviewholder(View itemView) {
super(itemView);
imageView = (ImageView) itemView.findViewById(R.id.image);
}
}
}
try this create one method in your adapter like this
public ArrayList<Data_Model> getArray() {
return arrayList;
}
now on your button click just call this method
upload.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
ArrayList<Data_Model> arrayList= adapter.getArray();
}
});
ask me in case of any query

Integrating ZbarScanner with android app

I am developing a mobile app to integrate with ZbarScanner. However, I am meet with error "ZBarScannerActivity cannot be resolved to a type" and "ZBarConstants cannot be resolved to a variable". please assist to help with the following code:
MainActivity.java:
package com.example.vscanner;
import net.sourceforge.zbar.Symbol;
import android.os.Bundle;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.view.Menu;
import android.view.View;
import android.widget.Toast;
public class MainActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
private static final int ZBAR_SCANNER_REQUEST = 0;
private static final int ZBAR_QR_SCANNER_REQUEST = 1;
public void launchScanner(View v) {
if (isCameraAvailable()) {
Intent intent = new Intent(this, ZBarScannerActivity.class);
startActivityForResult(intent, ZBAR_SCANNER_REQUEST);
} else {
Toast.makeText(this, "Rear Facing Camera Unavailable", Toast.LENGTH_SHORT).show();
}
}
public void launchQRScanner(View v) {
if (isCameraAvailable()) {
Intent intent = new Intent(this, ZBarScannerActivity.class);
intent.putExtra(ZBarConstants.SCAN_MODES, new int[]{Symbol.QRCODE});
startActivityForResult(intent, ZBAR_SCANNER_REQUEST);
} else {
Toast.makeText(this, "Rear Facing Camera Unavailable", Toast.LENGTH_SHORT).show();
}
}
public boolean isCameraAvailable() {
PackageManager pm = getPackageManager();
return pm.hasSystemFeature(PackageManager.FEATURE_CAMERA);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data)
{
if (resultCode == RESULT_OK)
{
// Scan result is available by making a call to data.getStringExtra(ZBarConstants.SCAN_RESULT)
// Type of the scan result is available by making a call to data.getStringExtra(ZBarConstants.SCAN_RESULT_TYPE)
Toast.makeText(this, "Scan Result = " + data.getStringExtra(ZBarConstants.SCAN_RESULT), Toast.LENGTH_SHORT).show();
Toast.makeText(this, "Scan Result Type = " + data.getStringExtra(ZBarConstants.SCAN_RESULT_TYPE), Toast.LENGTH_SHORT).show();
// The value of type indicates one of the symbols listed in Advanced Options below.
} else if(resultCode == RESULT_CANCELED) {
Toast.makeText(this, "Camera unavailable", Toast.LENGTH_SHORT).show();
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
Paste these 3 classes into your source folder.
ZBarConstants.java
package com.example.vscanner;
public interface ZBarConstants {
public static final String SCAN_MODES = "SCAN_MODES";
public static final String SCAN_RESULT = "SCAN_RESULT";
public static final String SCAN_RESULT_TYPE = "SCAN_RESULT_TYPE";
public static final String ERROR_INFO = "ERROR_INFO";
}
ZBarScannerActivity.java
package com.example.vscanner;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.hardware.Camera;
import android.os.Bundle;
import android.os.Handler;
import android.text.TextUtils;
import android.view.Window;
import android.view.WindowManager;
import net.sourceforge.zbar.Config;
import net.sourceforge.zbar.Image;
import net.sourceforge.zbar.ImageScanner;
import net.sourceforge.zbar.Symbol;
import net.sourceforge.zbar.SymbolSet;
public class ZBarScannerActivity extends Activity implements Camera.PreviewCallback, ZBarConstants {
private static final String TAG = "ZBarScannerActivity";
private CameraPreview mPreview;
private Camera mCamera;
private ImageScanner mScanner;
private Handler mAutoFocusHandler;
private boolean mPreviewing = true;
static {
System.loadLibrary("iconv");
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(!isCameraAvailable()) {
// Cancel request if there is no rear-facing camera.
cancelRequest();
return;
}
// Hide the window title.
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
mAutoFocusHandler = new Handler();
// Create and configure the ImageScanner;
setupScanner();
// Create a RelativeLayout container that will hold a SurfaceView,
// and set it as the content of our activity.
mPreview = new CameraPreview(this, this, autoFocusCB);
setContentView(mPreview);
}
public void setupScanner() {
mScanner = new ImageScanner();
mScanner.setConfig(0, Config.X_DENSITY, 3);
mScanner.setConfig(0, Config.Y_DENSITY, 3);
int[] symbols = getIntent().getIntArrayExtra(SCAN_MODES);
if (symbols != null) {
mScanner.setConfig(Symbol.NONE, Config.ENABLE, 0);
for (int symbol : symbols) {
mScanner.setConfig(symbol, Config.ENABLE, 1);
}
}
}
#Override
protected void onResume() {
super.onResume();
// Open the default i.e. the first rear facing camera.
mCamera = Camera.open();
if(mCamera == null) {
// Cancel request if mCamera is null.
cancelRequest();
return;
}
mPreview.setCamera(mCamera);
mPreview.showSurfaceView();
mPreviewing = true;
}
#Override
protected void onPause() {
super.onPause();
// Because the Camera object is a shared resource, it's very
// important to release it when the activity is paused.
if (mCamera != null) {
mPreview.setCamera(null);
mCamera.cancelAutoFocus();
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();
// According to Jason Kuang on http://stackoverflow.com/questions/6519120/how-to-recover-camera-preview-from-sleep,
// there might be surface recreation problems when the device goes to sleep. So lets just hide it and
// recreate on resume
mPreview.hideSurfaceView();
mPreviewing = false;
mCamera = null;
}
}
public boolean isCameraAvailable() {
PackageManager pm = getPackageManager();
return pm.hasSystemFeature(PackageManager.FEATURE_CAMERA);
}
public void cancelRequest() {
Intent dataIntent = new Intent();
dataIntent.putExtra(ERROR_INFO, "Camera unavailable");
setResult(Activity.RESULT_CANCELED, dataIntent);
finish();
}
public void onPreviewFrame(byte[] data, Camera camera) {
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = parameters.getPreviewSize();
Image barcode = new Image(size.width, size.height, "Y800");
barcode.setData(data);
int result = mScanner.scanImage(barcode);
if (result != 0) {
mCamera.cancelAutoFocus();
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewing = false;
SymbolSet syms = mScanner.getResults();
for (Symbol sym : syms) {
String symData = sym.getData();
if (!TextUtils.isEmpty(symData)) {
Intent dataIntent = new Intent();
dataIntent.putExtra(SCAN_RESULT, symData);
dataIntent.putExtra(SCAN_RESULT_TYPE, sym.getType());
setResult(Activity.RESULT_OK, dataIntent);
finish();
break;
}
}
}
}
private Runnable doAutoFocus = new Runnable() {
public void run() {
if(mCamera != null && mPreviewing) {
mCamera.autoFocus(autoFocusCB);
}
}
};
// Mimic continuous auto-focusing
Camera.AutoFocusCallback autoFocusCB = new Camera.AutoFocusCallback() {
public void onAutoFocus(boolean success, Camera camera) {
mAutoFocusHandler.postDelayed(doAutoFocus, 1000);
}
};
}
CameraPreview.java
package com.example.vscanner;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import java.io.IOException;
import java.util.List;
class CameraPreview extends ViewGroup implements SurfaceHolder.Callback {
private final String TAG = "CameraPreview";
SurfaceView mSurfaceView;
SurfaceHolder mHolder;
Size mPreviewSize;
List<Size> mSupportedPreviewSizes;
Camera mCamera;
PreviewCallback mPreviewCallback;
AutoFocusCallback mAutoFocusCallback;
CameraPreview(Context context, PreviewCallback previewCallback, AutoFocusCallback autoFocusCb) {
super(context);
mPreviewCallback = previewCallback;
mAutoFocusCallback = autoFocusCb;
mSurfaceView = new SurfaceView(context);
addView(mSurfaceView);
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCamera(Camera camera) {
mCamera = camera;
if (mCamera != null) {
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();
requestLayout();
}
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
// We purposely disregard child measurements because act as a
// wrapper to a SurfaceView that centers the camera preview instead
// of stretching it.
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if (mSupportedPreviewSizes != null) {
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
}
}
#Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
if (changed && getChildCount() > 0) {
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (mPreviewSize != null) {
previewWidth = mPreviewSize.width;
previewHeight = mPreviewSize.height;
}
// Center the child SurfaceView within the parent.
if (width * previewHeight > height * previewWidth) {
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
} else {
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2,
width, (height + scaledChildHeight) / 2);
}
}
}
public void hideSurfaceView() {
mSurfaceView.setVisibility(View.INVISIBLE);
}
public void showSurfaceView() {
mSurfaceView.setVisibility(View.VISIBLE);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
try {
if (mCamera != null) {
mCamera.setPreviewDisplay(holder);
}
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
if (mCamera != null) {
mCamera.cancelAutoFocus();
mCamera.stopPreview();
}
}
private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
for (Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (holder.getSurface() == null){
// preview surface does not exist
return;
}
if (mCamera != null) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
requestLayout();
mCamera.setParameters(parameters);
mCamera.setPreviewCallback(mPreviewCallback);
mCamera.startPreview();
mCamera.autoFocus(mAutoFocusCallback);
}
}
}

Categories