Why is Google's ML Face Detection Kit crashing on .process() - java

I am creating a face detector app which will detect faces in real time and identify landmarks on faces. The landmarks for the faces are working perfectly fine, however my real time face detection isn't working at all.
I followed the instructions in Google's ML Kit(https://developers.google.com/ml-kit/vision/face-detection/android), but am really struggling to obtain the functionality in real time face detection.
In my debugger, the code crashes at facedetector.process(image).addOnSuccessListener() and instead goes into the onFailure()
This is my code for the realtime face detection part(I have commented some parts + reduced redundancy).
#Override
//process method to detect frame by frame in real time face detection
public void process(#NonNull Frame frame) {
int width = frame.getSize().getWidth();
int height = frame.getSize().getHeight();
InputImage image = InputImage.fromByteArray(
frame.getData(),
/* image width */width,
/* image height */height,
//if camera is facing front rotate image 90, else 270 degrees
(cameraFacing != Facing.FRONT) ? 90 : 270,
InputImage.IMAGE_FORMAT_YUV_420_888 // or IMAGE_FORMAT_YV12
);
FaceDetectorOptions faceDetectorOptions = new FaceDetectorOptions.Builder()
.setContourMode(FaceDetectorOptions.CONTOUR_MODE_ALL)
//setting contour mode to detect all facial contours in real time
.build();
FaceDetector faceDetector = FaceDetection.getClient(faceDetectorOptions);
faceDetector.process(image).addOnSuccessListener(new OnSuccessListener<List<Face>>() {
#Override
public void onSuccess(#NonNull List<Face> faces) {
imageView.setImageBitmap(null);
Bitmap bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
Paint dotPaint = new Paint();
dotPaint.setColor(Color.YELLOW);
dotPaint.setStyle(Paint.Style.FILL);
dotPaint.setStrokeWidth(6f);
Paint linePaint = new Paint();
linePaint.setColor(Color.GREEN);
linePaint.setStyle(Paint.Style.STROKE);
linePaint.setStrokeWidth(4f);
//looping through each face to detect each contour
for (Face face : faces) {
List<PointF> faceContours = face.getContour(
FaceContour.FACE
).getPoints();
for (int i = 0; i < faceContours.size(); i++) {
PointF faceContour = null;
if (i != (faceContours.size() - 1)) {
faceContour = faceContours.get(i);
canvas.drawLine(
faceContour.x, faceContour.y, faceContours.get(i + 1).x, faceContours.get(i + 1).y, linePaint
);
} else {//if at the last point, draw to the first point
canvas.drawLine(faceContour.x, faceContour.y, faceContours.get(0).x, faceContours.get(0).y, linePaint);
}
canvas.drawCircle(faceContour.x, faceContour.y, 4f, dotPaint);
}//end inner loop
List<PointF> leftEyebrowTopCountours = face.getContour(
FaceContour.LEFT_EYEBROW_TOP).getPoints();
for (int i = 0; i < leftEyebrowTopCountours.size(); i++) {
PointF leftEyebrowTopContour = leftEyebrowTopCountours.get(i);
if (i != (leftEyebrowTopCountours.size() - 1))
canvas.drawLine(leftEyebrowTopContour.x, leftEyebrowTopContour.y, leftEyebrowTopCountours.get(i + 1).x, leftEyebrowTopCountours.get(i + 1).y, linePaint);
canvas.drawCircle(leftEyebrowTopContour.x, leftEyebrowTopContour.y, 4f, dotPaint);
}
}
}
Side note: I am using Pixel 2 API 29 in my emulator. I left out the repetitive code since I am just going through contours
Full code for reference:
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.material.bottomsheet.BottomSheetBehavior;
import com.google.mlkit.vision.common.InputImage;
import com.google.mlkit.vision.face.Face;
import com.google.mlkit.vision.face.FaceContour;
import com.google.mlkit.vision.face.FaceDetection;
import com.google.mlkit.vision.face.FaceDetector;
import com.google.mlkit.vision.face.FaceDetectorOptions;
import com.google.mlkit.vision.face.FaceLandmark;
import com.otaliastudios.cameraview.CameraView;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.theartofdev.edmodo.cropper.CropImage;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
public class MainActivity extends AppCompatActivity implements FrameProcessor {
private Facing cameraFacing = Facing.FRONT;
private ImageView imageView;
private CameraView faceDetectionCameraView;
private RecyclerView bottomSheetRecyclerView;
private BottomSheetBehavior bottomSheetBehavior;
private ArrayList<FaceDetectionModel> faceDetectionModels;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
faceDetectionModels = new ArrayList<>();
bottomSheetBehavior = BottomSheetBehavior.from(findViewById(R.id.bottom_sheet));
imageView = findViewById(R.id.face_detection_image_view);
faceDetectionCameraView = findViewById(R.id.face_detection_camera_view);
Button toggle = findViewById(R.id.face_detection_cam_toggle_button);
FrameLayout bottomSheetButton = findViewById(R.id.bottom_sheet_button);
bottomSheetRecyclerView = findViewById(R.id.bottom_sheet_recycler_view);
faceDetectionCameraView.setFacing(cameraFacing);
faceDetectionCameraView.setLifecycleOwner(MainActivity.this);
faceDetectionCameraView.addFrameProcessor(MainActivity.this);
toggle.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
cameraFacing = (cameraFacing == Facing.FRONT) ? Facing.BACK : Facing.FRONT;
faceDetectionCameraView.setFacing(cameraFacing);
}
});
bottomSheetButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
CropImage.activity().start(MainActivity.this);
}
});
bottomSheetRecyclerView.setLayoutManager(new LinearLayoutManager(MainActivity.this));
bottomSheetRecyclerView.setAdapter(new FaceDetectionAdapter(faceDetectionModels, MainActivity.this));
}
#Override
protected void onActivityResult(int requestCode, int resultCode, #Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if(requestCode == CropImage.CROP_IMAGE_ACTIVITY_REQUEST_CODE){
CropImage.ActivityResult result = CropImage.getActivityResult(data);
if(resultCode == RESULT_OK){
Uri imageUri = result.getUri();
try {
analyseImage(MediaStore.Images.Media.getBitmap(getContentResolver(), imageUri));
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
private void analyseImage(Bitmap bitmap) {
if(bitmap == null){
Toast.makeText(this, "There was an error", Toast.LENGTH_SHORT).show();
}
//imageView.setImageBitmap(null);
faceDetectionModels.clear();
Objects.requireNonNull(bottomSheetRecyclerView.getAdapter()).notifyDataSetChanged();
bottomSheetBehavior.setState(BottomSheetBehavior.STATE_COLLAPSED);
showProgress();
InputImage firebaseInputImage = InputImage.fromBitmap(bitmap, 0);
FaceDetectorOptions options =
new FaceDetectorOptions.Builder()
.setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE)
.setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
.setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
.build();
FaceDetector faceDetector = FaceDetection.getClient(options);
faceDetector.process(firebaseInputImage)
.addOnSuccessListener(new OnSuccessListener<List<Face>>() {
#Override
public void onSuccess(#NonNull List<Face> faces) {
Bitmap mutableImage = bitmap.copy(Bitmap.Config.ARGB_8888, true);
detectFaces(faces, mutableImage);
imageView.setImageBitmap(mutableImage);
hideProgress();
bottomSheetRecyclerView.getAdapter().notifyDataSetChanged();
bottomSheetBehavior.setState(BottomSheetBehavior.STATE_EXPANDED);
}
})
.addOnFailureListener(new OnFailureListener() {
#Override
public void onFailure(#NonNull Exception e) {
Toast.makeText(MainActivity.this, "There was an error", Toast.LENGTH_SHORT).show();
hideProgress();
}
});
}
private void detectFaces(List<Face> faces, Bitmap bitmap) {
if(faces == null || bitmap == null) {
Toast.makeText(this, "There was an error", Toast.LENGTH_SHORT).show();
return;
}
Canvas canvas = new Canvas(bitmap);
Paint facePaint = new Paint();
facePaint.setColor(Color.GREEN);
facePaint.setStyle(Paint.Style.STROKE);
facePaint.setStrokeWidth(5f);
Paint faceTextPaint = new Paint();
faceTextPaint.setColor(Color.BLUE);
faceTextPaint.setTextSize(30f);
faceTextPaint.setTypeface(Typeface.SANS_SERIF);
Paint landmarkPaint = new Paint();
landmarkPaint.setColor(Color.YELLOW);
landmarkPaint.setStyle(Paint.Style.FILL);
landmarkPaint.setStrokeWidth(8f);
for(int i = 0; i < faces.size(); i++){
canvas.drawRect(faces.get(i).getBoundingBox(), facePaint);
canvas.drawText("Face" + i,
(faces.get(i).getBoundingBox().centerX()
-(faces.get(i).getBoundingBox().width() >> 1) + 8f),
(faces.get(i).getBoundingBox().centerY() + (faces.get(i).getBoundingBox().height() >> 1) - 8f), facePaint);
Face face = faces.get(i); //get one face
if(face.getLandmark(FaceLandmark.LEFT_EYE) != null){
FaceLandmark leftEye = face.getLandmark(FaceLandmark.LEFT_EYE);
//Now we have our left eye, we draw a little circle
canvas.drawCircle(leftEye.getPosition().x, leftEye.getPosition().y, 8f, landmarkPaint);
}
if(face.getLandmark(FaceLandmark.RIGHT_EYE) != null){
FaceLandmark rightEye = face.getLandmark(FaceLandmark.RIGHT_EYE);
//Now we have our left eye, we draw a little circle
canvas.drawCircle(rightEye.getPosition().x, rightEye.getPosition().y, 8f, landmarkPaint);
}
if(face.getLandmark(FaceLandmark.NOSE_BASE) != null){
FaceLandmark noseBase = face.getLandmark(FaceLandmark.NOSE_BASE);
//Now we have our left eye, we draw a little circle
canvas.drawCircle(noseBase.getPosition().x, noseBase.getPosition().y, 8f, landmarkPaint);
}
if(face.getLandmark(FaceLandmark.LEFT_EAR) != null){
FaceLandmark leftEar = face.getLandmark(FaceLandmark.LEFT_EAR);
//Now we have our left eye, we draw a little circle
canvas.drawCircle(leftEar.getPosition().x, leftEar.getPosition().y, 8f, landmarkPaint);
}
if(face.getLandmark(FaceLandmark.RIGHT_EAR) != null){
FaceLandmark rightEar = face.getLandmark(FaceLandmark.RIGHT_EAR);
//Now we have our left eye, we draw a little circle
canvas.drawCircle(rightEar.getPosition().x, rightEar.getPosition().y, 8f, landmarkPaint);
}
if(face.getLandmark(FaceLandmark.MOUTH_LEFT) != null && face.getLandmark(FaceLandmark.MOUTH_BOTTOM) != null && face.getLandmark(FaceLandmark.MOUTH_RIGHT) != null){
FaceLandmark mouthLeft = face.getLandmark(FaceLandmark.MOUTH_LEFT);
FaceLandmark mouthRight = face.getLandmark(FaceLandmark.MOUTH_RIGHT);
FaceLandmark mouthBottom = face.getLandmark(FaceLandmark.MOUTH_BOTTOM);
//Now we have our left eye, we draw a little circle
canvas.drawLine(mouthLeft.getPosition().x, mouthLeft.getPosition().y, mouthBottom.getPosition().x, mouthBottom.getPosition().y, landmarkPaint);
canvas.drawLine(mouthBottom.getPosition().x, mouthBottom.getPosition().y, mouthRight.getPosition().x, mouthRight.getPosition().y, landmarkPaint);
}
faceDetectionModels.add(new FaceDetectionModel(i, "Smiling probability"
+ face.getSmilingProbability()));
faceDetectionModels.add(new FaceDetectionModel(i, "Left eye open probability"
+ face.getLeftEyeOpenProbability()));
faceDetectionModels.add(new FaceDetectionModel(i, "Right eye open probability"
+ face.getRightEyeOpenProbability()));
}
}
private void showProgress() {
findViewById(R.id.bottom_sheet_button_img).setVisibility(View.GONE);
findViewById(R.id.bottom_sheet_butotn_progress_bar).setVisibility(View.VISIBLE);
}
private void hideProgress() {
findViewById(R.id.bottom_sheet_button_img).setVisibility(View.VISIBLE);
findViewById(R.id.bottom_sheet_butotn_progress_bar).setVisibility(View.GONE);
}
//real-time detection starts HERE
#Override
public void process(#NonNull Frame frame) {
//setting up width and frame height
int width = frame.getSize().getWidth();
int height = frame.getSize().getHeight();
byte[] byteArray = frame.getData();
InputImage image = InputImage.fromByteArray(
//frame.getData()
byteArray,
width,
height,
//rotation
(cameraFacing == Facing.FRONT) ? 90 : 270,
//image format
InputImage.IMAGE_FORMAT_YV12 // or IMAGE_FORMAT_YV12
);
//Contour mode all is real time contour detection
FaceDetectorOptions realTimeOpts = new FaceDetectorOptions.Builder()
.setContourMode(FaceDetectorOptions.CONTOUR_MODE_ALL)
.build();
FaceDetector faceDetector = FaceDetection.getClient(realTimeOpts);
faceDetector.process(image).addOnSuccessListener(new OnSuccessListener<List<Face>>() {
#Override
public void onSuccess(#NonNull List<Face> faces) {
//don't have image yet set to null first
imageView.setImageBitmap(null);
//bitmap stores pixels of image
Bitmap bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888);
//canvas hold the draw calls -- write into the bitmap
Canvas canvas = new Canvas(bitmap);
//paint specifies what the canvas should draw
Paint dotPaint = new Paint();
dotPaint.setColor(Color.YELLOW);
dotPaint.setStyle(Paint.Style.FILL);
dotPaint.setStrokeWidth(6f);
Paint linePaint = new Paint();
linePaint.setColor(Color.GREEN);
linePaint.setStyle(Paint.Style.STROKE);
linePaint.setStrokeWidth(4f);
for (Face face : faces) {
//fetching contours
List<PointF> faceContours = face.getContour(
FaceContour.FACE
).getPoints();
for (int i = 0; i < faceContours.size(); i++) {
PointF faceContour = faceContours.get(i);
if (i != (faceContours.size() - 1)) {
canvas.drawLine(
//if not at last index, continue drawing to next index
faceContour.x, faceContour.y, faceContours.get(i + 1).x, faceContours.get(i + 1).y, linePaint
);
} else {
return;
}
//always draw circle
canvas.drawCircle(faceContour.x, faceContour.y, 4f, dotPaint);
}//end inner loop
List<PointF> leftEyebrowTopCountours = face.getContour(
FaceContour.LEFT_EYEBROW_TOP).getPoints();
for (int i = 0; i < leftEyebrowTopCountours.size(); i++) {
PointF leftEyebrowTopContour = leftEyebrowTopCountours.get(i);
if (i != (leftEyebrowTopCountours.size() - 1)) {
canvas.drawLine(leftEyebrowTopContour.x, leftEyebrowTopContour.y, leftEyebrowTopCountours.get(i + 1).x, leftEyebrowTopCountours.get(i + 1).y, linePaint);
}else{
return;
}
canvas.drawCircle(leftEyebrowTopContour.x, leftEyebrowTopContour.y, 4f, dotPaint);
}
List<PointF> rightEyebrowTopCountours = face.getContour(
FaceContour.RIGHT_EYEBROW_TOP).getPoints();
for (int i = 0; i < rightEyebrowTopCountours.size(); i++) {
PointF rightEyebrowContour = rightEyebrowTopCountours.get(i);
if (i != (rightEyebrowTopCountours.size() - 1)) {
canvas.drawLine(rightEyebrowContour.x, rightEyebrowContour.y, rightEyebrowTopCountours.get(i + 1).x, rightEyebrowTopCountours.get(i + 1).y, linePaint);
}else{
return;
}
canvas.drawCircle(rightEyebrowContour.x, rightEyebrowContour.y, 4f, dotPaint);
}
List<PointF> rightEyebrowBottomCountours = face.getContour(
FaceContour.RIGHT_EYEBROW_BOTTOM).getPoints();
for (int i = 0; i < rightEyebrowBottomCountours.size(); i++) {
PointF rightEyebrowBottomContour = rightEyebrowBottomCountours.get(i);
if (i != (rightEyebrowBottomCountours.size() - 1)) {
canvas.drawLine(rightEyebrowBottomContour.x, rightEyebrowBottomContour.y, rightEyebrowBottomCountours.get(i + 1).x, rightEyebrowBottomCountours.get(i + 1).y, linePaint);
}else{
return;
}
canvas.drawCircle(rightEyebrowBottomContour.x, rightEyebrowBottomContour.y, 4f, dotPaint);
}
List<PointF> leftEyeContours = face.getContour(
FaceContour.LEFT_EYE).getPoints();
for (int i = 0; i < leftEyeContours.size(); i++) {
PointF leftEyeContour = leftEyeContours.get(i);
if (i != (leftEyeContours.size() - 1)) {
canvas.drawLine(leftEyeContour.x, leftEyeContour.y, leftEyeContours.get(i + 1).x, leftEyeContours.get(i + 1).y, linePaint);
} else {
return;
}
canvas.drawCircle(leftEyeContour.x, leftEyeContour.y, 4f, dotPaint);
}
List<PointF> rightEyeContours = face.getContour(
FaceContour.RIGHT_EYE).getPoints();
for (int i = 0; i < rightEyeContours.size(); i++) {
PointF rightEyeContour = rightEyeContours.get(i);
if (i != (rightEyeContours.size() - 1)) {
canvas.drawLine(rightEyeContour.x, rightEyeContour.y, rightEyeContours.get(i + 1).x, rightEyeContours.get(i + 1).y, linePaint);
} else {
return;
}
canvas.drawCircle(rightEyeContour.x, rightEyeContour.y, 4f, dotPaint);
}
List<PointF> upperLipTopContour = face.getContour(
FaceContour.UPPER_LIP_TOP).getPoints();
for (int i = 0; i < upperLipTopContour.size(); i++) {
PointF upperLipContour = upperLipTopContour.get(i);
if (i != (upperLipTopContour.size() - 1)) {
canvas.drawLine(upperLipContour.x, upperLipContour.y,
upperLipTopContour.get(i + 1).x,
upperLipTopContour.get(i + 1).y, linePaint);
}else{
return;
}
canvas.drawCircle(upperLipContour.x, upperLipContour.y, 4f, dotPaint);
}
List<PointF> upperLipBottomContour = face.getContour(
FaceContour.UPPER_LIP_BOTTOM).getPoints();
for (int i = 0; i < upperLipBottomContour.size(); i++) {
PointF upBottom = upperLipBottomContour.get(i);
if (i != (upperLipBottomContour.size() - 1)) {
canvas.drawLine(upBottom.x, upBottom.y, upperLipBottomContour.get(i + 1).x, upperLipBottomContour.get(i + 1).y, linePaint);
}else{
return;
}
canvas.drawCircle(upBottom.x, upBottom.y, 4f, dotPaint);
}
List<PointF> lowerLipTopContour = face.getContour(
FaceContour.LOWER_LIP_TOP).getPoints();
for (int i = 0; i < lowerLipTopContour.size(); i++) {
PointF lowerTop = lowerLipTopContour.get(i);
if (i != (lowerLipTopContour.size() - 1)) {
canvas.drawLine(lowerTop.x, lowerTop.y, lowerLipTopContour.get(i + 1).x, lowerLipTopContour.get(i + 1).y, linePaint);
}
else{
return;
}
canvas.drawCircle(lowerTop.x, lowerTop.y, 4f, dotPaint);
}
List<PointF> lowerLipBottomContour = face.getContour(
FaceContour.LOWER_LIP_BOTTOM).getPoints();
for (int i = 0; i < lowerLipBottomContour.size(); i++) {
PointF lowerBottom = lowerLipBottomContour.get(i);
if (i != (lowerLipBottomContour.size() - 1)) {
canvas.drawLine(lowerBottom.x, lowerBottom.y, lowerLipBottomContour.get(i + 1).x, lowerLipBottomContour.get(i + 1).y, linePaint);
}else{
return;
}
canvas.drawCircle(lowerBottom.x, lowerBottom.y, 4f, dotPaint);
}
List<PointF> noseBridgeContours = face.getContour(
FaceContour.NOSE_BRIDGE).getPoints();
for (int i = 0; i < noseBridgeContours.size(); i++) {
PointF noseBridge = noseBridgeContours.get(i);
if (i != (noseBridgeContours.size() - 1)) {
canvas.drawLine(noseBridge.x, noseBridge.y, noseBridgeContours.get(i + 1).x, noseBridgeContours.get(i + 1).y, linePaint);
}else{
return;
}
canvas.drawCircle(noseBridge.x, noseBridge.y, 4f, dotPaint);
}
List<PointF> noseBottomContours = face.getContour(
FaceContour.NOSE_BOTTOM).getPoints();
for (int i = 0; i < noseBottomContours.size(); i++) {
PointF noseBottom = noseBottomContours.get(i);
if (i != (noseBottomContours.size() - 1)) {
canvas.drawLine(noseBottom.x, noseBottom.y, noseBottomContours.get(i + 1).x, noseBottomContours.get(i + 1).y, linePaint);
}else{
return;
}
canvas.drawCircle(noseBottom.x, noseBottom.y, 4f, dotPaint);
//facing front flip image
if (cameraFacing == Facing.FRONT) {
//Flip image!
Matrix matrix = new Matrix();
matrix.preScale(-1f, 1f);
Bitmap flippedBitmap = Bitmap.createBitmap(bitmap, 0, 0,
bitmap.getWidth(), bitmap.getHeight(),
matrix, true);
imageView.setImageBitmap(flippedBitmap);
} else {
imageView.setImageBitmap(bitmap);
}
}//end outer loop
canvas.save();
}
}
}).addOnFailureListener(new OnFailureListener() {
#Override
public void onFailure (#NonNull Exception e){
imageView.setImageBitmap(null);
}
});
}
}
Edit: I am getting this error
Getting this error now: 2021-04-27 19:12:05.335 538-1065/system_process E/JavaBinder: *** Uncaught remote exception! (Exceptions are not yet supported across processes.)
java.lang.RuntimeException: android.os.RemoteException: Couldn't get ApplicationInfo for package android.frameworks.sensorservice#1.0::ISensorManager
at android.os.Parcel.writeException(Parcel.java:2158)
at android.os.Binder.execTransactInternal(Binder.java:1178)
at android.os.Binder.execTransact(Binder.java:1123)
Caused by: android.os.RemoteException: Couldn't get ApplicationInfo for package android.frameworks.sensorservice#1.0::ISensorManager
at com.android.server.pm.PackageManagerService$PackageManagerNative.getTargetSdkVersionForPackage(PackageManagerService.java:23957)
at android.content.pm.IPackageManagerNative$Stub.onTransact(IPackageManagerNative.java:255)
at android.os.Binder.execTransactInternal(Binder.java:1159)
at android.os.Binder.execTransact(Binder.java:1123) 
Thank you so much!!

if you want to use Camera streaming output with MLKit, you can use the CameraX ImageAnalysis use case. It produces android.media.Image with YUV_420_888 format which can be directly converted to mlkit InputImage.
Alternatively, you can also use the CameraXSource library that ML Kit has just published. The sample code is here. This eliminates the boilerplate code for you to set up camerax usecases and creates MLKit Inputs for you internally from the cameraX output. Note that this is still an beta SDK. We are looking forward to your feedback.
In order to use the API, you need to add the following dependency into your app:
implementation 'com.google.mlkit:camera:16.0.0-beta1'

Related

Printing out a bitmap QR code image with Brother Label Printer SDK prints out a blank label

I need to be able to print out a bitmap QR Code using my Brother QL-720NW.
As of right now, I'm able to generate a QR code bitmap and display it properly in an ImageView. On a button press, the user needs to be able to print that QR code bitmap from the Brother label printer.
I am able to make a connection to the printer, but I can only print out blank labels that do not show the QR code. How can I fix this so that the bitmap appears on the printed label properly?
Method for printing bitmap:
void printImage(Bitmap bitmap) {
// Specify printer
final Printer printer = new Printer();
PrinterInfo settings = printer.getPrinterInfo();
settings.ipAddress = "192.168.2.149";
settings.workPath = "/storage/emulated/0/Download";
settings.printerModel = PrinterInfo.Model.QL_720NW;
settings.port = PrinterInfo.Port.NET;
settings.orientation = PrinterInfo.Orientation.LANDSCAPE;
//settings.paperSize = PrinterInfo.PaperSize.CUSTOM;
settings.align = PrinterInfo.Align.CENTER;
settings.valign = PrinterInfo.VAlign.MIDDLE;
settings.printMode = PrinterInfo.PrintMode.ORIGINAL;
settings.numberOfCopies = 1;
settings.labelNameIndex = LabelInfo.QL700.W62RB.ordinal();
settings.isAutoCut = true;
settings.isCutAtEnd = false;
printer.setPrinterInfo(settings);
// Connect, then print
new Thread(new Runnable() {
#Override
public void run() {
if (printer.startCommunication()) {
Log.e("Tag: ", "Connection made.");
PrinterStatus result = printer.printImage(bitmap);
Log.e("Tag: ", "Printing!");
if (result.errorCode != PrinterInfo.ErrorCode.ERROR_NONE) {
Log.d("TAG", "ERROR - " + result.errorCode);
}
printer.endCommunication();
}
else {
Log.e("Tag: ", "Cannot make a connection.");
}
}
}).start();
}
Generating bitmap:
Bitmap encodeAsBitmap(String str) throws WriterException {
QRCodeWriter writer = new QRCodeWriter();
BitMatrix bitMatrix = writer.encode(str, BarcodeFormat.QR_CODE, 100, 100);
int w = bitMatrix.getWidth();
int h = bitMatrix.getHeight();
int[] pixels = new int[w * h];
for (int y = 0; y < h; y++) {
for (int x = 0; x < w; x++) {
pixels[y * w + x] = bitMatrix.get(x, y) ? Color.BLACK : Color.WHITE;
}
}
Bitmap bitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, w, 0, 0, w, h);
return bitmap;
}
Solved it, I was using LabelInfo.QL700.W62RB.ordinal() for the LabelNameIndex when I should have been using LabelInfo.QL700.W62.ordinal().
Works perfectly now!

Detecting if the Bitmap I have is black and white or a color image

I have an image URI and I am getting the Bitmap from that URI using the code below:
Bitmap bitmap = null;
try {
bitmap = MediaStore.Images.Media.getBitmap(getContext().getContentResolver(), uri);
} catch (IOException e) {
e.printStackTrace();
}
Now I want to check if the Bitmap is a black and white or a color image.
(There will either be a black/white image or a colored one)
How can I do this?
I am using Java on Android.
I created an extension function in kotlin, you can use this or make a similar function in java with same logic
fun Bitmap.isColored(): Boolean{
for (x in 0 until this.width){
for (y in 0 until this.height){
val color = this.getColor(x,y)
val blue = color.blue()
val red = color.red()
val green = color.green()
if(blue!=red || blue!=green || red!=green){
return true
}
}
}
return false
}
to use an extension function just call isColored = yourBitmap.isColored()
Edit: java function
#RequiresApi(Build.VERSION_CODES.Q)
boolean isColored(Bitmap bitmap){
for (int x = 0; x< bitmap.getWidth(); x++){
for (int y = 0 ;y < bitmap.getHeight(); y++){
Color color = bitmap.getColor(x,y);
float blue = color.blue();
float red = color.red();
float green = color.green();
Log.d( "blue: " , Float.toString(blue));
Log.d( "red: " , Float.toString(red));
Log.d( "green: " , Float.toString(green));
if(blue!=red || blue!=green || red!=green){
return true;
}
}
}
return false;
}
Edit 2: Main activity in java showing its use
public class MainActivity2 extends AppCompatActivity {
#RequiresApi(api = Build.VERSION_CODES.Q)
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main2);
InputStream ins= getResources().openRawResource(
getResources().getIdentifier(
"image",
"raw", getPackageName()
)
);
Bitmap bitmap = BitmapFactory.decodeStream(ins);
Log.d( "isColored: ", "" + isColored(bitmap));
}
#RequiresApi(Build.VERSION_CODES.Q)
boolean isColored(Bitmap bitmap){
for (int x = 0; x< bitmap.getWidth(); x++){
for (int y = 0 ;y < bitmap.getHeight(); y++){
Color color = bitmap.getColor(x,y);
float blue = color.blue();
float red = color.red();
float green = color.green();
Log.d( "blue: " , Float.toString(blue));
Log.d( "red: " , Float.toString(red));
Log.d( "green: " , Float.toString(green));
if(blue!=red || blue!=green || red!=green){
return true;
}
}
}
return false;
}
}

The orientation of the rear camera is inverse portrait

I'm trying to switch the camera with a button and i'm also successul in that but the proble is that the preview of the rear camera is in inverse portrait i've tryied the setDiplayOrientation, but no changes... Maybe i've put it in a wrong line this is the code and with the button i call the start camera method:
public class MainRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final String TAG = "MainRenderer";
public static final int DEVICE_ORIENTATION_PORTRAIT = 0;
public static final int DEVICE_ORIENTATION_INVERSE_PORTRAIT = 1;
public static final int DEVICE_ORIENTATION_LANDSCAPE = 2;
public static final int DEVICE_ORIENTATION_INVERSE_LANDSCAPE = 3;
private Camera.CameraInfo cameraInfo;
public volatile int deviceOrientation = DEVICE_ORIENTATION_PORTRAIT;
private FSDK.HTracker tracker;
private int[] textures;
private Camera camera;
private SurfaceTexture surfaceTexture;
private boolean updateSurfaceTexture = false;
private FSDK.FSDK_Features[] trackingFeatures;
private MR.MaskFeatures maskCoords;
private int[] isMaskTexture1Created = new int[]{0};
private int[] isMaskTexture2Created = new int[]{0};
private int width;
private int height;
private ByteBuffer pixelBuffer;
private FSDK.HImage cameraImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE cameraImageMode = new FSDK.FSDK_IMAGEMODE();
private FSDK.HImage snapshotImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE snapshotImageMode = new FSDK.FSDK_IMAGEMODE();
private MainView mainView;
private MainActivity mainActivity;
private volatile boolean isResizeCalled = false;
private volatile boolean isResized = false;
public long IDs[] = new long[MR.MAX_FACES];
public long face_count[] = new long[1];
private long frameCount = 0;
private long startTime = 0;
private AtomicBoolean isTakingSnapshot = new AtomicBoolean(false);
public static final int[][] MASKS = new int[][]{
{R.raw.lips_pink, R.drawable.lips_pink, R.drawable.lips_pink_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_purple, R.drawable.lips_purple, R.drawable.lips_purple_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_red, R.drawable.lips_red, R.drawable.lips_red_normal, MR.SHIFT_TYPE_NO},
};
private int mask = 0;
private int maskLoaded = 0;
private volatile boolean isMaskChanged = false;
private boolean inPreview = false;
public void changeMask(int i) {
mask += i;
isMaskChanged = true;
}
public MainRenderer(MainView view) {
tracker = Application.tracker;
mainView = view;
mainActivity = (MainActivity) mainView.getContext();
trackingFeatures = new FSDK.FSDK_Features[MR.MAX_FACES];
for (int i = 0; i < MR.MAX_FACES; ++i) {
trackingFeatures[i] = new FSDK.FSDK_Features();
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j] = new FSDK.TPoint();
}
}
maskCoords = new MR.MaskFeatures();
}
public void close() {
updateSurfaceTexture = false;
surfaceTexture.release();
camera.stopPreview();
camera.release();
camera = null;
deleteTex();
}
public void startCamera() {
if (inPreview) {
camera.stopPreview();
inPreview = false;
}
//NB: if you don't release the current camera before switching, you app will crash
camera.release();
//swap the id of the camera to be used
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_FRONT;
} else {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_BACK;
}
camera = Camera.open(cameraInfo.facing);
//Code snippet for this method from somewhere on android developers, i forget where
//setCameraDisplayOrientation(mainActivity, cameraInfo.facing, camera);
try {
//this step is critical or preview on new camera will no know where to render to
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
camera.startPreview();
inPreview = true;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceCreated");
isResizeCalled = false;
isResized = false;
initTex();
loadMask(mask);
surfaceTexture = new SurfaceTexture(textures[0]);
surfaceTexture.setOnFrameAvailableListener(this);
// Find the ID of the camera
int cameraId = 0;
boolean frontCameraFound = false;
cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
}
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
frontCameraFound = true;
}
}
if (frontCameraFound) {
camera = Camera.open(cameraId);
} else {
camera = Camera.open();
}
try {
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
GLES11.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); //background color
}
private byte[] readBytes(InputStream inputStream) throws IOException {
ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
int bufferSize = 16384;
byte[] buffer = new byte[bufferSize];
int len;
while ((len = inputStream.read(buffer)) != -1) {
byteBuffer.write(buffer, 0, len);
}
return byteBuffer.toByteArray();
}
// must be called from the thread with OpenGL context!
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public void loadMask(int maskNumber) {
GLES11.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
Log.d(TAG, "Loading mask...");
int[] mask = MASKS[maskNumber];
if (isMaskTexture1Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 1);
}
if (isMaskTexture2Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 2);
}
isMaskTexture1Created[0] = 0;
isMaskTexture2Created[0] = 0;
InputStream stream = mainView.getResources().openRawResource(mask[0]);
int res = MR.LoadMaskCoordsFromStream(stream, maskCoords);
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (res != FSDK.FSDKE_OK) {
Log.e(TAG, "Error loading mask coords from stream: " + res);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
return;
}
BitmapFactory.Options bitmapDecodingOptions = new BitmapFactory.Options();
bitmapDecodingOptions.inScaled = false; // to load original image without scaling
FSDK.HImage img1 = new FSDK.HImage();
if (mask[1] == -1) { // if no image
FSDK.CreateEmptyImage(img1);
} else {
stream = mainView.getResources().openRawResource(mask[1]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img1, data, data.length);
Log.d(TAG, "Load mask image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img1, w);
FSDK.GetImageHeight(img1, h);
Log.d(TAG, "Mask image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask image, using empty image");
FSDK.CreateEmptyImage(img1);
}
}
FSDK.HImage img2 = new FSDK.HImage();
if (mask[2] == -1) { // if no normal image
FSDK.CreateEmptyImage(img2);
} else {
stream = mainView.getResources().openRawResource(mask[2]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img2, data, data.length);
Log.d(TAG, "Load mask normal image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img2, w);
FSDK.GetImageHeight(img2, h);
Log.d(TAG, "Mask normal image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask normal image, using empty image");
FSDK.CreateEmptyImage(img2);
}
}
res = MR.LoadMask(img1, img2, textures[1], textures[2], isMaskTexture1Created, isMaskTexture2Created);
FSDK.FreeImage(img1);
FSDK.FreeImage(img2);
Log.d(TAG, "Mask loaded with result " + res + " texture1Created:" + isMaskTexture1Created[0] + " texture2Created:" + isMaskTexture2Created[0]);
Log.d(TAG, "Mask textures: " + textures[1] + " " + textures[2]);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
}
public void onDrawFrame(GL10 unused) { //call opengl functions only inside these functions!
GLES11.glClear(GLES11.GL_COLOR_BUFFER_BIT);
if (!isResized) {
return;
}
synchronized (this) {
if (updateSurfaceTexture) {
surfaceTexture.updateTexImage();
updateSurfaceTexture = false;
}
}
if (isMaskChanged) {
maskLoaded = mask;
loadMask(mask);
isMaskChanged = false;
}
int rotation = 1;
// First, drawing without mask to get image buffer
int res = MR.DrawGLScene(textures[0], 0, trackingFeatures, rotation, MR.SHIFT_TYPE_NO, textures[1], textures[2], maskCoords, 0, 0, width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the first MR.DrawGLScene call: " + res);
}
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
face_count[0] = 0;
processCameraImage();
// Second, drawing with mask atop of image
res = MR.DrawGLScene(textures[0], (int) face_count[0], trackingFeatures, rotation, MASKS[maskLoaded][3], textures[1], textures[2], maskCoords, isMaskTexture1Created[0], isMaskTexture2Created[0], width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the second MR.DrawGLScene call: " + res);
}
// Save snapshot if needed
if (isTakingSnapshot.compareAndSet(true, false)) {
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
snapshotImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
res = FSDK.LoadImageFromBuffer(snapshotImage, pixelBuffer.array(), width, height, width * 4, snapshotImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading snapshot image to FaceSDK: " + res);
} else {
FSDK.MirrorImage(snapshotImage, false);
String galleryPath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath();
final String filename = galleryPath + "/MirrorRealityDemo" + System.currentTimeMillis() + ".png";
res = FSDK.SaveImageToFile(snapshotImage, filename);
Log.d(TAG, "saving snapshot to " + filename);
FSDK.FreeImage(snapshotImage);
if (FSDK.FSDKE_OK == res) {
mainActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
File f = new File(filename);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
mainActivity.sendBroadcast(mediaScanIntent);
Toast.makeText(mainActivity, "Saved successfully", Toast.LENGTH_SHORT).show();
}
});
}
}
}
// Show fps
++frameCount;
long timeCurrent = System.currentTimeMillis();
if (startTime == 0) startTime = timeCurrent;
long diff = timeCurrent - startTime;
if (diff >= 3000) {
final float fps = frameCount / (diff / 1000.0f);
frameCount = 0;
startTime = 0;
final TextView fpsTextView = mainActivity.fpsTextView();
mainActivity.fpsTextView().post(new Runnable() {
#Override
public void run() {
if (!mainActivity.isFinishing()) {
fpsTextView.setText(fps + " FPS");
}
}
});
}
}
private void processCameraImage() {
//clear previous features
for (int i = 0; i < MR.MAX_FACES; ++i) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = 0;
trackingFeatures[i].features[j].y = 0;
}
}
cameraImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
int res = FSDK.LoadImageFromBuffer(cameraImage, pixelBuffer.array(), width, height, width * 4, cameraImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading camera image to FaceSDK: " + res);
return;
}
FSDK.MirrorImage(cameraImage, false);
int[] widthByReference = new int[1];
int[] heightByReference = new int[1];
FSDK.GetImageWidth(cameraImage, widthByReference);
FSDK.GetImageHeight(cameraImage, heightByReference);
int width = widthByReference[0];
int height = heightByReference[0];
int rotation = 0;
if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_PORTRAIT) {
rotation = 2;
} else if (deviceOrientation == DEVICE_ORIENTATION_LANDSCAPE) {
rotation = 3;
} else if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_LANDSCAPE) {
rotation = 1;
}
if (rotation > 0) {
FSDK.HImage rotated = new FSDK.HImage();
FSDK.CreateEmptyImage(rotated);
FSDK.RotateImage90(cameraImage, rotation, rotated);
FSDK.FeedFrame(tracker, 0, rotated, face_count, IDs);
FSDK.FreeImage(rotated);
} else {
FSDK.FeedFrame(tracker, 0, cameraImage, face_count, IDs);
}
for (int i = 0; i < (int) face_count[0]; ++i) {
FSDK.GetTrackerFacialFeatures(tracker, 0, IDs[i], trackingFeatures[i]);
if (rotation > 0) {
if (rotation == 1) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = height - 1 - x;
}
} else if (rotation == 2) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].y = height - 1 - trackingFeatures[i].features[j].y;
}
} else {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = x;
}
}
}
}
FSDK.FreeImage(cameraImage);
}
public void onSurfaceChanged(GL10 unused, int width, int height) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceChanged");
if (!isResizeCalled) {
isResizeCalled = true;
mainView.resizeForPerformance(width, height);
return;
}
GLES11.glViewport(0, 0, width, height);
Camera.Parameters param = camera.getParameters();
List<Camera.Size> psize = param.getSupportedPreviewSizes();
if (psize.size() > 0) {
int i = 0;
int optDistance = Integer.MAX_VALUE;
Log.d(TAG, "Choosing preview resolution closer to " + width + " x " + height);
double neededScale = height / (double) width;
for (int j = 0; j < psize.size(); ++j) {
double scale = psize.get(j).width / (double) psize.get(j).height;
int distance = (int) (10000 * Math.abs(scale - neededScale));
Log.d(TAG, "Choosing preview resolution, probing " + psize.get(j).width + " x " + psize.get(j).height + " distance: " + distance);
if (distance < optDistance) {
i = j;
optDistance = distance;
} else if (distance == optDistance) {
// try to avoid too low resolution
if ((psize.get(i).width < 300 || psize.get(i).height < 300)
&& psize.get(j).width > psize.get(i).width && psize.get(j).height > psize.get(i).height) {
i = j;
}
}
}
Log.d(TAG, "Using optimal preview size: " + psize.get(i).width + " x " + psize.get(i).height);
param.setPreviewSize(psize.get(i).width, psize.get(i).height);
// adjusting viewport to camera aspect ratio
int viewportHeight = (int) (width * (psize.get(i).width * 1.0f / psize.get(i).height));
GLES11.glViewport(0, 0, width, viewportHeight);
this.width = width;
this.height = viewportHeight;
pixelBuffer = ByteBuffer.allocateDirect(this.width * this.height * 4).order(ByteOrder.nativeOrder());
}
param.set("orientation", "landscape");
camera.setParameters(param);
camera.startPreview();
inPreview = true;
isResized = true;
}
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
private void initTex() {
textures = new int[3];
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
GLES11.glEnable(GL10.GL_TEXTURE_2D);
GLES11.glGenTextures(3, textures, 0);
GLES11.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_S, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_T, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MIN_FILTER, GLES11.GL_NEAREST);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MAG_FILTER, GLES11.GL_NEAREST);
}
private void deleteTex() {
GLES11.glDeleteTextures(3, textures, 0);
}
public synchronized void onFrameAvailable(SurfaceTexture st) {
updateSurfaceTexture = true;
mainView.requestRender();
}
public synchronized void snapshot() {
isTakingSnapshot.set(true);
}
}
Any type of help is appreciated ... thank you so much
here is the preview that i see
I have tried making sense, but there is few knowledge on what API Level your trying to target , that may also be the problem or maybe the problem may be the Test Device.
But lets rewrite the camera Display code:
1)
public void setCameraDisplayOrientation(android.hardware.Camera camera) {
Camera.Parameters parameters = camera.getParameters();
android.hardware.Camera.CameraInfo camInfo =
new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(getBackFacingCameraId(), camInfo);
Display display = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
int rotation = display.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (camInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (camInfo.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (camInfo.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
Call setCameraDisplayOrientation() method in surfaceCreated callback as the following:
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
camera = Camera.open();
setCameraDisplayOrientation(getActivity(), CameraInfo.CAMERA_FACING_BACK, camera);
}
Method 2 is to try:
Get the phone's orientation by using a sensor and this makes life much easier because the orientation will be properly handle
You can find more about this within this link: Is Android's CameraInfo.orientation correctly documented? Incorrectly implemented?
Method 3:
edit your code like
public class MainRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final String TAG = "MainRenderer";
public static final int DEVICE_ORIENTATION_PORTRAIT = 0;
public static final int DEVICE_ORIENTATION_INVERSE_PORTRAIT = 1;
public static final int DEVICE_ORIENTATION_LANDSCAPE = 2;
public static final int DEVICE_ORIENTATION_INVERSE_LANDSCAPE = 3;
private Camera.CameraInfo cameraInfo;
public volatile int deviceOrientation = DEVICE_ORIENTATION_PORTRAIT;
private FSDK.HTracker tracker;
private int[] textures;
private Camera camera;
private SurfaceTexture surfaceTexture;
private boolean updateSurfaceTexture = false;
private FSDK.FSDK_Features[] trackingFeatures;
private MR.MaskFeatures maskCoords;
private int[] isMaskTexture1Created = new int[]{0};
private int[] isMaskTexture2Created = new int[]{0};
private int width;
private int height;
private ByteBuffer pixelBuffer;
private FSDK.HImage cameraImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE cameraImageMode = new FSDK.FSDK_IMAGEMODE();
private FSDK.HImage snapshotImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE snapshotImageMode = new FSDK.FSDK_IMAGEMODE();
private MainView mainView;
private MainActivity mainActivity;
private volatile boolean isResizeCalled = false;
private volatile boolean isResized = false;
public long IDs[] = new long[MR.MAX_FACES];
public long face_count[] = new long[1];
private long frameCount = 0;
private long startTime = 0;
private AtomicBoolean isTakingSnapshot = new AtomicBoolean(false);
public static final int[][] MASKS = new int[][]{
{R.raw.lips_pink, R.drawable.lips_pink, R.drawable.lips_pink_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_purple, R.drawable.lips_purple, R.drawable.lips_purple_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_red, R.drawable.lips_red, R.drawable.lips_red_normal, MR.SHIFT_TYPE_NO},
};
private int mask = 0;
private int maskLoaded = 0;
private volatile boolean isMaskChanged = false;
private boolean inPreview = false;
public void changeMask(int i) {
mask += i;
isMaskChanged = true;
}
public MainRenderer(MainView view) {
tracker = Application.tracker;
mainView = view;
mainActivity = (MainActivity) mainView.getContext();
trackingFeatures = new FSDK.FSDK_Features[MR.MAX_FACES];
for (int i = 0; i < MR.MAX_FACES; ++i) {
trackingFeatures[i] = new FSDK.FSDK_Features();
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j] = new FSDK.TPoint();
}
}
maskCoords = new MR.MaskFeatures();
}
public void close() {
updateSurfaceTexture = false;
surfaceTexture.release();
camera.stopPreview();
camera.release();
camera = null;
deleteTex();
}
public void startCamera() {
if (inPreview) {
camera.stopPreview();
inPreview = false;
}
//NB: if you don't release the current camera before switching, you app will crash
camera.release();
//swap the id of the camera to be used
//Below i have tried to change the camera facing
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_BACK;
} else {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_FRONT;
}
camera = Camera.open(cameraInfo.facing);
//Code snippet for this method from somewhere on android developers, i forget where
//setCameraDisplayOrientation(mainActivity, cameraInfo.facing, camera);
try {
//this step is critical or preview on new camera will no know where to render to
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
camera.startPreview();
inPreview = true;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceCreated");
isResizeCalled = false;
isResized = false;
initTex();
loadMask(mask);
surfaceTexture = new SurfaceTexture(textures[0]);
surfaceTexture.setOnFrameAvailableListener(this);
// Find the ID of the camera
int cameraId = 0;
boolean frontCameraFound = false;
cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
}
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
frontCameraFound = true;
}
}
if (frontCameraFound) {
camera = Camera.open(cameraId);
} else {
camera = Camera.open();
}
try {
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
GLES11.glClearColor
(0.0f, 0.0f, 0.0f, 1.0f); //background color
}
private byte[] readBytes(InputStream inputStream) throws IOException {
ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
int bufferSize = 16384;
byte[] buffer = new byte[bufferSize];
int len;
while ((len = inputStream.read(buffer)) != -1) {
byteBuffer.write(buffer, 0, len);
}
return byteBuffer.toByteArray();
}
// must be called from the thread with OpenGL context!
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public void loadMask(int maskNumber) {
GLES11.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
Log.d(TAG, "Loading mask...");
int[] mask = MASKS[maskNumber];
if (isMaskTexture1Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 1);
}
if (isMaskTexture2Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 2);
}
isMaskTexture1Created[0] = 0;
isMaskTexture2Created[0] = 0;
InputStream stream = mainView.getResources().openRawResource(mask[0]);
int res = MR.LoadMaskCoordsFromStream(stream, maskCoords);
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (res != FSDK.FSDKE_OK) {
Log.e(TAG, "Error loading mask coords from stream: " + res);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
return;
}
BitmapFactory.Options bitmapDecodingOptions = new BitmapFactory.Options();
bitmapDecodingOptions.inScaled = false; // to load original image without scaling
FSDK.HImage img1 = new FSDK.HImage();
if (mask[1] == -1) { // if no image
FSDK.CreateEmptyImage(img1);
} else {
stream = mainView.getResources().openRawResource(mask[1]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img1, data, data.length);
Log.d(TAG, "Load mask image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img1, w);
FSDK.GetImageHeight(img1, h);
Log.d(TAG, "Mask image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask image, using empty image");
FSDK.CreateEmptyImage(img1);
}
}
FSDK.HImage img2 = new FSDK.HImage();
if (mask[2] == -1) { // if no normal image
FSDK.CreateEmptyImage(img2);
} else {
stream = mainView.getResources().openRawResource(mask[2]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img2, data, data.length);
Log.d(TAG, "Load mask normal image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img2, w);
FSDK.GetImageHeight(img2, h);
Log.d(TAG, "Mask normal image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask normal image, using empty image");
FSDK.CreateEmptyImage(img2);
}
}
res = MR.LoadMask(img1, img2, textures[1], textures[2], isMaskTexture1Created, isMaskTexture2Created);
FSDK.FreeImage(img1);
FSDK.FreeImage(img2);
Log.d(TAG, "Mask loaded with result " + res + " texture1Created:" + isMaskTexture1Created[0] + " texture2Created:" + isMaskTexture2Created[0]);
Log.d(TAG, "Mask textures: " + textures[1] + " " + textures[2]);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
}
public void onDrawFrame(GL10 unused) { //call opengl functions only inside these functions!
GLES11.glClear(GLES11.GL_COLOR_BUFFER_BIT);
if (!isResized) {
return;
}
synchronized (this) {
if (updateSurfaceTexture) {
surfaceTexture.updateTexImage();
updateSurfaceTexture = false;
}
}
if (isMaskChanged) {
maskLoaded = mask;
loadMask(mask);
isMaskChanged = false;
}
int rotation = 1;
// First, drawing without mask to get image buffer
int res = MR.DrawGLScene(textures[0], 0, trackingFeatures, rotation, MR.SHIFT_TYPE_NO, textures[1], textures[2], maskCoords, 0, 0, width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the first MR.DrawGLScene call: " + res);
}
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
face_count[0] = 0;
processCameraImage();
// Second, drawing with mask atop of image
res = MR.DrawGLScene(textures[0], (int) face_count[0], trackingFeatures, rotation, MASKS[maskLoaded][3], textures[1], textures[2], maskCoords, isMaskTexture1Created[0], isMaskTexture2Created[0], width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the second MR.DrawGLScene call: " + res);
}
// Save snapshot if needed
if (isTakingSnapshot.compareAndSet(true, false)) {
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
snapshotImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
res = FSDK.LoadImageFromBuffer(snapshotImage, pixelBuffer.array(), width, height, width * 4, snapshotImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading snapshot image to FaceSDK: " + res);
} else {
FSDK.MirrorImage(snapshotImage, false);
String galleryPath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath();
final String filename = galleryPath + "/MirrorRealityDemo" + System.currentTimeMillis() + ".png";
res = FSDK.SaveImageToFile(snapshotImage, filename);
Log.d(TAG, "saving snapshot to " + filename);
FSDK.FreeImage(snapshotImage);
if (FSDK.FSDKE_OK == res) {
mainActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
File f = new File(filename);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
mainActivity.sendBroadcast(mediaScanIntent);
Toast.makeText(mainActivity, "Saved successfully", Toast.LENGTH_SHORT).show();
}
});
}
}
}
// Show fps
++frameCount;
long timeCurrent = System.currentTimeMillis();
if (startTime == 0) startTime = timeCurrent;
long diff = timeCurrent - startTime;
if (diff >= 3000) {
final float fps = frameCount / (diff / 1000.0f);
frameCount = 0;
startTime = 0;
final TextView fpsTextView = mainActivity.fpsTextView();
mainActivity.fpsTextView().post(new Runnable() {
#Override
public void run() {
if (!mainActivity.isFinishing()) {
fpsTextView.setText(fps + " FPS");
}
}
});
}
}
private void processCameraImage() {
//clear previous features
for (int i = 0; i < MR.MAX_FACES; ++i) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = 0;
trackingFeatures[i].features[j].y = 0;
}
}
cameraImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
int res = FSDK.LoadImageFromBuffer(cameraImage, pixelBuffer.array(), width, height, width * 4, cameraImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading camera image to FaceSDK: " + res);
return;
}
FSDK.MirrorImage(cameraImage, false);
int[] widthByReference = new int[1];
int[] heightByReference = new int[1];
FSDK.GetImageWidth(cameraImage, widthByReference);
FSDK.GetImageHeight(cameraImage, heightByReference);
int width = widthByReference[0];
int height = heightByReference[0];
int rotation = 0;
if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_PORTRAIT) {
rotation = 2;
} else if (deviceOrientation == DEVICE_ORIENTATION_LANDSCAPE) {
rotation = 3;
} else if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_LANDSCAPE) {
rotation = 1;
}
if (rotation > 0) {
FSDK.HImage rotated = new FSDK.HImage();
FSDK.CreateEmptyImage(rotated);
FSDK.RotateImage90(cameraImage, rotation, rotated);
FSDK.FeedFrame(tracker, 0, rotated, face_count, IDs);
FSDK.FreeImage(rotated);
} else {
FSDK.FeedFrame(tracker, 0, cameraImage, face_count, IDs);
}
for (int i = 0; i < (int) face_count[0]; ++i) {
FSDK.GetTrackerFacialFeatures(tracker, 0, IDs[i], trackingFeatures[i]);
if (rotation > 0) {
if (rotation == 1) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = height - 1 - x;
}
} else if (rotation == 2) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].y = height - 1 - trackingFeatures[i].features[j].y;
}
} else {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = x;
}
}
}
}
FSDK.FreeImage(cameraImage);
}
public void onSurfaceChanged(GL10 unused, int width, int height) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceChanged");
if (!isResizeCalled) {
isResizeCalled = true;
mainView.resizeForPerformance(width, height);
return;
}
GLES11.glViewport(0, 0, width, height);
Camera.Parameters param = camera.getParameters();
List<Camera.Size> psize = param.getSupportedPreviewSizes();
if (psize.size() > 0) {
int i = 0;
int optDistance = Integer.MAX_VALUE;
Log.d(TAG, "Choosing preview resolution closer to " + width + " x " + height);
double neededScale = height / (double) width;
for (int j = 0; j < psize.size(); ++j) {
double scale = psize.get(j).width / (double) psize.get(j).height;
int distance = (int) (10000 * Math.abs(scale - neededScale));
Log.d(TAG, "Choosing preview resolution, probing " + psize.get(j).width + " x " + psize.get(j).height + " distance: " + distance);
if (distance < optDistance) {
i = j;
optDistance = distance;
} else if (distance == optDistance) {
// try to avoid too low resolution
if ((psize.get(i).width < 300 || psize.get(i).height < 300)
&& psize.get(j).width > psize.get(i).width && psize.get(j).height > psize.get(i).height) {
i = j;
}
}
}
Log.d(TAG, "Using optimal preview size: " + psize.get(i).width + " x " + psize.get(i).height);
param.setPreviewSize(psize.get(i).width, psize.get(i).height);
// adjusting viewport to camera aspect ratio
int viewportHeight = (int) (width * (psize.get(i).width * 1.0f / psize.get(i).height));
GLES11.glViewport(0, 0, width, viewportHeight);
this.width = width;
this.height = viewportHeight;
pixelBuffer = ByteBuffer.allocateDirect(this.width * this.height * 4).order(ByteOrder.nativeOrder());
}
param.set("orientation", "landscape");
camera.setParameters(param);
camera.startPreview();
inPreview = true;
isResized = true;
}
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
private void initTex() {
textures = new int[3];
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
GLES11.glEnable(GL10.GL_TEXTURE_2D);
GLES11.glGenTextures(3, textures, 0);
GLES11.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_S, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_T, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MIN_FILTER, GLES11.GL_NEAREST);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MAG_FILTER, GLES11.GL_NEAREST);
}
private void deleteTex() {
GLES11.glDeleteTextures(3, textures, 0);
}
public synchronized void onFrameAvailable(SurfaceTexture st) {
updateSurfaceTexture = true;
mainView.requestRender();
}
public synchronized void snapshot() {
isTakingSnapshot.set(true);
}
}
This is sometimes due to android not knowing what orientation is the previous activity coming from, try setting the orientation of the application with
android:screenOrientation="nosensor"
or try the following:
Add the orientation attribute in the manifest
android:screenOrientation=["unspecified" | "behind" |
"landscape" | "portrait" |
"reverseLandscape" | "reversePortrait" |
"sensorLandscape" | "sensorPortrait" |
"userLandscape" | "userPortrait" |
"sensor" | "fullSensor" | "nosensor" |
"user" | "fullUser" | "locked"]
So in your case it will be
<activity android:name=".yourCameractivity"
....
android:screenOrientation="sensorPortrait"/>

How to use Sony SmartEyeGlass for Scanning QR Code?

I am trying to develop an app using Zbar library for SmartEyeGlass that scan QR codes. The app based on sample camera extension.But it doesn't work and I can't see what the problem is. Here is my code;
private void cameraEventOperation(CameraEvent event) {
if (event.getErrorStatus() != 0) {
Log.d(Constants.LOG_TAG, "error code = " + event.getErrorStatus());
return;
}
if(event.getIndex() != 0){
Log.d(Constants.LOG_TAG, "not oparate this event");
return;
}
Bitmap bitmap = null;
byte[] data = null;
if ((event.getData() != null) && ((event.getData().length) > 0)) {
data = event.getData();
bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
data1= data;
/* Instance barcode scanner */
scanner = new ImageScanner();
scanner.setConfig(Symbol.QRCODE, Config.X_DENSITY, 2);
scanner.setConfig(Symbol.QRCODE, Config.Y_DENSITY, 2);
Image barcode = new Image(width, height, "Y800");
barcode.setData(data1);
QRCodeStatus= scanner.scanImage(barcode);
if (QRCodeStatus != 0) {
SymbolSet syms = scanner.getResults();
for (Symbol kasa : syms) {
strValueOfScannedQR = String.valueOf(kasa.getData());
intValueOfScannedQR = Integer.valueOf(kasa.getData());
}
}
}
if (bitmap == null) {
Log.d(Constants.LOG_TAG, "bitmap == null");
return;
}
if (saveToSdcard == true) {
String fileName = saveFilePrefix + String.format("%04d", saveFileIndex) + ".jpg";
new SavePhotoTask(saveFolder,fileName).execute(data);
saveFileIndex++;
}
if (recordingMode == SmartEyeglassControl.Intents.CAMERA_MODE_STILL) {
Bitmap basebitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
basebitmap.setDensity(DisplayMetrics.DENSITY_DEFAULT);
Canvas canvas = new Canvas(basebitmap);
Rect rect = new Rect(0, 0, width, height);
Paint paint = new Paint();
paint.setStyle(Paint.Style.FILL);
canvas.drawBitmap(bitmap, rect, rect, paint);
utils.showBitmap(basebitmap);
return;
}
Log.d(Constants.LOG_TAG, "Camera frame was received : #" + saveFileIndex);
updateDisplay();
}
private void updateDisplay()
{
Bitmap displayBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
displayBitmap.setDensity(DisplayMetrics.DENSITY_DEFAULT);
Canvas canvas = new Canvas(displayBitmap);
Paint paint = new Paint();
paint.setStyle(Paint.Style.FILL);
paint.setTextSize(16);
paint.setColor(Color.WHITE);
// Update layout according to the camera mode
switch (recordingMode) {
case SmartEyeglassControl.Intents.CAMERA_MODE_STILL:
canvas.drawText("Tap to capture : STILL", pointX, pointY, paint);
break;
case SmartEyeglassControl.Intents.CAMERA_MODE_STILL_TO_FILE:
canvas.drawText("Tap to capture : STILL TO FILE", pointX, pointY, paint);
break;
case SmartEyeglassControl.Intents.CAMERA_MODE_JPG_STREAM_HIGH_RATE:
if (cameraStarted) {
canvas.drawText("Frame Number: " + Integer.toString(saveFileIndex), pointBaseX, (pointY * 1), paint);
canvas.drawText("Value of QR: " + strValueOfScannedQR, pointBaseX, (pointY * 2), paint);
canvas.drawText("Data1=" + data1, pointBaseX, (pointY * 3), paint);
canvas.drawText("QR status " + QRCodeStatus, pointBaseX, (pointY * 4), paint);
}
else {
canvas.drawText("Tap to start JPEG Stream.", pointBaseX, pointY, paint);
}
break;
case SmartEyeglassControl.Intents.CAMERA_MODE_JPG_STREAM_LOW_RATE:
if (cameraStarted) {
canvas.drawText("JPEG Streaming...", pointBaseX, pointY, paint);
canvas.drawText("Tap to stop.", pointBaseX, (pointY * 2), paint);
canvas.drawText("Frame Number: " + Integer.toString(saveFileIndex), pointBaseX, (pointY * 3), paint);
} else {
canvas.drawText("Tap to start JPEG Stream.", pointBaseX, pointY, paint);
}
break;
default:
canvas.drawText("wrong recording type.", pointBaseX, pointY, paint);
}
utils.showBitmap(displayBitmap);
}
}
Problem seem to be that you are passing data1 as parameter to barcode.setData method.
You should probably pass the bitmap : barcode.setData(bitmap)
This question is rather related to QR code scanning library that you are using. Please also tag it with relevant tag for that library. So you can get support about that library. Please also check what is the requirement for the parameter expected in setData method in API references of your QR code scanning library.

Crop Face Using Image Detection in Android

I am trying to detect faces and crop the face part in rectangular Image. I have done the face detection Part, but still not finding any help about how to crop the face part. Please have a look on my code..!
public class FaceDetect extends Activity {
private MyImageView mIV;
private Bitmap mFaceBitmap;
private int mFaceWidth = 200;
private int mFaceHeight = 200;
int cropXinit = 0;
int cropYint = 0;
int cropXend = 0;
int cropYend = 0;
Bitmap cropedBitmap;
Bitmap b;
private static final int MAX_FACES = 1;
private static String TAG = "FaceDetect";
private static boolean DEBUG = false;
protected static final int GUIUPDATE_SETFACE = 999;
protected Handler mHandler = new Handler() {
// #Override
public void handleMessage(Message msg) {
mIV.invalidate();
super.handleMessage(msg);
}
};
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mIV = new MyImageView(this);
setContentView(mIV, new LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT));
// load the photo
b = ChooseActivity.bitmap;
mFaceBitmap = b.copy(Bitmap.Config.RGB_565, true);
b.recycle();
mFaceWidth = mFaceBitmap.getWidth();
mFaceHeight = mFaceBitmap.getHeight();
mIV.setImageBitmap(mFaceBitmap);
mIV.invalidate();
setFace();
}
public void setFace() {
FaceDetector fd;
FaceDetector.Face[] faces = new FaceDetector.Face[MAX_FACES];
PointF eyescenter = new PointF();
float eyesdist = 0.0f;
int[] fpx = null;
int[] fpy = null;
int count = 0;
try {
fd = new FaceDetector(mFaceWidth, mFaceHeight, MAX_FACES);
count = fd.findFaces(mFaceBitmap, faces);
} catch (Exception e) {
Log.e(TAG, "setFace(): " + e.toString());
return;
}
// check if we detect any faces
if (count > 0) {
fpx = new int[count * 2];
fpy = new int[count * 2];
for (int i = 0; i < count; i++) {
try {
faces[i].getMidPoint(eyescenter);
eyesdist = faces[i].eyesDistance();
// set up left eye location
fpx[2 * i] = (int) (eyescenter.x - eyesdist / 2);
fpy[2 * i] = (int) eyescenter.y;
// set up right eye location
fpx[2 * i + 1] = (int) (eyescenter.x + eyesdist / 2);
fpy[2 * i + 1] = (int) eyescenter.y;
if (DEBUG)
Log.e(TAG,
"setFace(): face "
+ i
+ ": confidence = "
+ faces[i].confidence()
+ ", eyes distance = "
+ faces[i].eyesDistance()
+ ", pose = ("
+ faces[i]
.pose(FaceDetector.Face.EULER_X)
+ ","
+ faces[i]
.pose(FaceDetector.Face.EULER_Y)
+ ","
+ faces[i]
.pose(FaceDetector.Face.EULER_Z)
+ ")" + ", eyes midpoint = ("
+ eyescenter.x + "," + eyescenter.y
+ ")");
} catch (Exception e) {
Log.e(TAG, "setFace(): face " + i + ": " + e.toString());
}
}
}
mIV.setDisplayPoints(fpx, fpy, count * 2, 1);
// if(eyescenter.x -eyesdist >= 0)
// {
// cropXinit = (int) (eyescenter.x -eyesdist) ;
// }
// else
// {
// cropXinit = 0;
// }
// if(eyescenter.x +eyesdist <= mFaceWidth)
// {
// cropXend = (int) (eyescenter.x +eyesdist) ;
// }
// else
// {
// cropXend = mFaceWidth;
// }
// if(eyescenter.y +eyesdist*2 <= mFaceHeight)
// {
// cropYend = (int) (eyescenter.y +eyesdist*2) ;
// }
// else
// {
// cropYend = mFaceHeight;
// }
// if(eyescenter.y -eyesdist >= 0)
// {
// cropYint = (int) (eyescenter.y -eyesdist) ;
// }
// else
// {
// cropYint = 0;
// }
// mIV.setImageBitmap(Bitmap.createBitmap(mFaceBitmap,cropXinit,cropYint,cropXend,cropYend));
}
}
createBitmap(Bitmap source, int x, int y, int width, int height) receives a start X and start Y, and a width and height value, not an end X and end Y. If you change your commented-out code to this it should work:
mIV.setImageBitmap(Bitmap.createBitmap(mFaceBitmap,cropXinit,cropYint,cropXend-cropXinit,cropYend-cropYinit));

Categories