BitmapFactory Not working in Android - java

This line of code always returns null after cropping:
this.bmp = BitmapFactory.decodeFile(this.imageFileUri.getPath());
This is my crop method:
private void performCrop() {
this.imageFileUri = Uri.fromFile(this.file);
Intent var1 = new Intent("com.android.camera.action.CROP");
var1.setDataAndType(this.imageFileUri, "image/*");
System.out.println(this.imageFileUri.getPath());
var1.putExtra("crop", "true");
var1.putExtra("scale", true);
var1.putExtra("return-data", true);
// var1.putExtra("output", this.imageFileUri);
intent.putExtra(MediaStore.EXTRA_OUTPUT, this.imageFileUri);
this.startActivityForResult(var1, 1);
}
I have tried by using a custom method to return the Bitmap it still returns null. This is the method
public static Bitmap decodeFile(String path) {
int orientation;
try {
if (path == null) {
return null;
}
// decode image size
BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
// Find the correct scale value. It should be the power of 2.
final int REQUIRED_SIZE = 70;
int width_tmp = o.outWidth, height_tmp = o.outHeight;
int scale = 1;
BitmapFactory.Options o2 = new BitmapFactory.Options();
o2.inSampleSize = scale;
Bitmap bm = BitmapFactory.decodeFile(path, o2);
Bitmap bitmap = bm;
ExifInterface exif = new ExifInterface(path);
orientation = exif
.getAttributeInt(ExifInterface.TAG_ORIENTATION, 1);
Log.e("ExifInteface .........", "rotation =" + orientation);
// exif.setAttribute(ExifInterface.ORIENTATION_ROTATE_90, 90);
Log.e("orientation", "" + orientation);
Matrix m = new Matrix();
if ((orientation == ExifInterface.ORIENTATION_ROTATE_180)) {
m.postRotate(180);
// m.postScale((float) bm.getWidth(), (float) bm.getHeight());
// if(m.preRotate(90)){
Log.e("in orientation", "" + orientation);
bitmap = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(),
bm.getHeight(), m, true);
return bitmap;
} else if (orientation == ExifInterface.ORIENTATION_ROTATE_90) {
m.postRotate(90);
Log.e("in orientation", "" + orientation);
bitmap = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(),
bm.getHeight(), m, true);
return bitmap;
} else if (orientation == ExifInterface.ORIENTATION_ROTATE_270) {
m.postRotate(270);
Log.e("in orientation", "" + orientation);
bitmap = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(),
bm.getHeight(), m, true);
return bitmap;
}
return bitmap;
} catch (Exception e) {
return null;
}
}
And calling the above method this way:
this.bmp = MyClass.decodeFile(this.imageFileUri.getPath());
Kindly assist!

Related

Compressing multiple images before uploading them to firebase android

I am working on application users have to upload multiple images. Now the problem I am facing is their size. As users can upload multiple images it takes a lot of time to upload them through the application because of their heavy size which users do not like and also make my application and database heavy. Can you please guide me on how can I compress multiple images before uploading them to firebase?
CODE TO COMPRESS IMAGES
galleryResultLauncher = registerForActivityResult(new ActivityResultContracts.StartActivityForResult(), new ActivityResultCallback<ActivityResult>() {
#Override
public void onActivityResult(ActivityResult result) {
if (result.getResultCode() == RESULT_OK && null != result.getData() ) {
if (result.getData().getClipData() != null) {
int countofImages = result.getData().getClipData().getItemCount();
//this part is to select multiple images
for (int i = 0; i < countofImages; i++) {
if (uri.size() < 11) {
Uri imageuri = result.getData().getClipData().getItemAt(i).getUri();
imageList.add(new CustomModel(getfilename(imageuri), imageuri));
} else {
Toast.makeText(getContext(), "Can't select more than 11 images", Toast.LENGTH_SHORT).show();
}
}
//then notify the adapter
adapter.notifyDataSetChanged();
rentSell3Binding.totalphotos.setText("Photos (" + imageList.size() + ")");
}
//this part is to select single image
else
{
if (uri.size()<11) {
Uri imageuri = result.getData().getData();
//and add the code to arryalist
imageList.add(new CustomModel(getfilename(imageuri), imageuri));
}else
{
Toast.makeText(getContext(), "Can't select more than 11 images", Toast.LENGTH_SHORT).show();
}
//notify the adapter
adapter.notifyDataSetChanged();
rentSell3Binding.totalphotos.setText("Photos (" + imageList.size() + ")");
}
}else
{
Toast.makeText(getContext(), "You haven't selected any images", Toast.LENGTH_SHORT).show();
}
}
});
MODEL CLASS
public class CustomModel {
String imageName;
Uri imageURI;
public CustomModel(String imageName, Uri imageURI) {
this.imageName = imageName;
this.imageURI = imageURI;
}
public String getImageName() {
return imageName;
}
public void setImageName(String imageName) {
this.imageName = imageName;
}
public Uri getImageURI() {
return imageURI;
}
public void setImageURI(Uri imageURI) {
this.imageURI = imageURI;
}
}
Throwing exception
null pointer exception on the line File file = new File(SiliCompressor.with(getContext()).compress(FileUtils.getPath(getContext(), imageuri), new File(getContext().getCacheDir(), "temp")));
yup, you can compress your image file by the following function
#Throws(IOException::class)
fun File.compressImage(
reqWidth: Float,
reqHeight: Float,
compressFormat: Bitmap.CompressFormat,
quality: Int,
destinationPath: String
): File {
var fileOutputStream: FileOutputStream? = null
val file = File(destinationPath).parentFile ?: return File(destinationPath)
if (!file.exists()) {
file.mkdirs()
}
try {
fileOutputStream = FileOutputStream(destinationPath)
// write the compressed bitmap at the destination specified by destinationPath.
decodeSampledBitmapFromFile(this, reqWidth, reqHeight)?.compress(
compressFormat,
quality,
fileOutputStream
)
} finally {
if (fileOutputStream != null) {
fileOutputStream.flush()
fileOutputStream.close()
}
}
return File(destinationPath)
}
#Throws(IOException::class)
private fun decodeSampledBitmapFromFile(
imageFile: File,
reqWidth: Float,
reqHeight: Float
): Bitmap? {
// First decode with inJustDecodeBounds=true to check dimensions
val options = BitmapFactory.Options()
options.inJustDecodeBounds = true
var bmp: Bitmap? = BitmapFactory.decodeFile(imageFile.absolutePath, options)
var actualHeight = options.outHeight
var actualWidth = options.outWidth
var imgRatio = actualWidth.toFloat() / actualHeight.toFloat()
val maxRatio = reqWidth / reqHeight
if (actualHeight > reqHeight || actualWidth > reqWidth) {
// If Height is greater
when {
imgRatio < maxRatio -> {
imgRatio = reqHeight / actualHeight
actualWidth = (imgRatio * actualWidth).toInt()
actualHeight = reqHeight.toInt()
} // If Width is greater
imgRatio > maxRatio -> {
imgRatio = reqWidth / actualWidth
actualHeight = (imgRatio * actualHeight).toInt()
actualWidth = reqWidth.toInt()
}
else -> {
actualHeight = reqHeight.toInt()
actualWidth = reqWidth.toInt()
}
}
}
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, actualWidth, actualHeight)
options.inJustDecodeBounds = false
if (bmp != null && canUseForInBitmap(bmp, options)) {
// inBitmap only works with mutable bitmaps, so force the decoder to
// return mutable bitmaps.
options.inMutable = true
options.inBitmap = bmp
}
options.inTempStorage = ByteArray(16 * 1024)
try {
bmp = BitmapFactory.decodeFile(imageFile.absolutePath, options)
} catch (exception: OutOfMemoryError) {
exception.printStackTrace()
}
var scaledBitmap: Bitmap? = null
try {
scaledBitmap = Bitmap.createBitmap(actualWidth, actualHeight, Bitmap.Config.ARGB_8888)
} catch (exception: OutOfMemoryError) {
exception.printStackTrace()
}
val ratioX = actualWidth / options.outWidth.toFloat()
val ratioY = actualHeight / options.outHeight.toFloat()
val middleX = actualWidth / 2.0f
val middleY = actualHeight / 2.0f
val scaleMatrix = Matrix()
scaleMatrix.setScale(ratioX, ratioY, middleX, middleY)
val canvas = Canvas(scaledBitmap!!)
canvas.setMatrix(scaleMatrix)
canvas.drawBitmap(
bmp!!, middleX - bmp.width / 2,
middleY - bmp.height / 2, Paint(Paint.FILTER_BITMAP_FLAG)
)
bmp.recycle()
val exif: ExifInterface
try {
exif = ExifInterface(imageFile.absolutePath)
val orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, 0)
val matrix = Matrix()
if (orientation == 6) {
matrix.postRotate(90f)
} else if (orientation == 3) {
matrix.postRotate(180f)
} else if (orientation == 8) {
matrix.postRotate(270f)
}
scaledBitmap = Bitmap.createBitmap(
scaledBitmap, 0, 0, scaledBitmap.width,
scaledBitmap.height, matrix, true
)
} catch (e: IOException) {
e.printStackTrace()
}
return scaledBitmap
}
private fun calculateInSampleSize(
options: BitmapFactory.Options,
reqWidth: Int,
reqHeight: Int
): Int {
// Raw height and width of image
val height = options.outHeight
val width = options.outWidth
var inSampleSize = 1
if (height > reqHeight || width > reqWidth) {
inSampleSize *= 2
val halfHeight = height / 2
val halfWidth = width / 2
// Calculate the largest inSampleSize value that is a power of 2 and keeps both
// height and width larger than the requested height and width.
while (halfHeight / inSampleSize >= reqHeight && halfWidth / inSampleSize >= reqWidth) {
inSampleSize *= 2
}
}
return inSampleSize
}
/**
* Ref: https://developer.android.com/topic/performance/graphics/manage-memory#kotlin
*/
private fun canUseForInBitmap(candidate: Bitmap, targetOptions: BitmapFactory.Options): Boolean {
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
// From Android 4.4 (KitKat) onward we can re-use if the byte size of
// the new bitmap is smaller than the reusable bitmap candidate
// allocation byte count.
val width: Int = targetOptions.outWidth / targetOptions.inSampleSize
val height: Int = targetOptions.outHeight / targetOptions.inSampleSize
val byteCount: Int = width * height * getBytesPerPixel(candidate.config)
byteCount <= candidate.allocationByteCount
} else {
// On earlier versions, the dimensions must match exactly and the inSampleSize must be 1
candidate.width == targetOptions.outWidth &&
candidate.height == targetOptions.outHeight &&
targetOptions.inSampleSize == 1
}
}
/**
* A helper function to return the byte usage per pixel of a bitmap based on its configuration.
*/
private fun getBytesPerPixel(config: Bitmap.Config): Int {
return when (config) {
Bitmap.Config.ARGB_8888 -> 4
Bitmap.Config.RGB_565, Bitmap.Config.ARGB_4444 -> 2
Bitmap.Config.ALPHA_8 -> 1
else -> 1
}
}
#Throws(IOException::class)
fun Context.createImageFile(): File {
// Create an image file name
val storageDir: File? = getExternalFilesDir(Environment.DIRECTORY_PICTURES)
return File.createTempFile(
"JPEG_${System.currentTimeMillis()}_", /* prefix */
".jpg", /* suffix */
storageDir /* directory */
)
}
and simply call the following method to create compressed image file. here file is you image file
file.compressImage(
1024f,
1024f,
Bitmap.CompressFormat.JPEG,
70,
context.createImageFile().path
)

The orientation of the rear camera is inverse portrait

I'm trying to switch the camera with a button and i'm also successul in that but the proble is that the preview of the rear camera is in inverse portrait i've tryied the setDiplayOrientation, but no changes... Maybe i've put it in a wrong line this is the code and with the button i call the start camera method:
public class MainRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final String TAG = "MainRenderer";
public static final int DEVICE_ORIENTATION_PORTRAIT = 0;
public static final int DEVICE_ORIENTATION_INVERSE_PORTRAIT = 1;
public static final int DEVICE_ORIENTATION_LANDSCAPE = 2;
public static final int DEVICE_ORIENTATION_INVERSE_LANDSCAPE = 3;
private Camera.CameraInfo cameraInfo;
public volatile int deviceOrientation = DEVICE_ORIENTATION_PORTRAIT;
private FSDK.HTracker tracker;
private int[] textures;
private Camera camera;
private SurfaceTexture surfaceTexture;
private boolean updateSurfaceTexture = false;
private FSDK.FSDK_Features[] trackingFeatures;
private MR.MaskFeatures maskCoords;
private int[] isMaskTexture1Created = new int[]{0};
private int[] isMaskTexture2Created = new int[]{0};
private int width;
private int height;
private ByteBuffer pixelBuffer;
private FSDK.HImage cameraImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE cameraImageMode = new FSDK.FSDK_IMAGEMODE();
private FSDK.HImage snapshotImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE snapshotImageMode = new FSDK.FSDK_IMAGEMODE();
private MainView mainView;
private MainActivity mainActivity;
private volatile boolean isResizeCalled = false;
private volatile boolean isResized = false;
public long IDs[] = new long[MR.MAX_FACES];
public long face_count[] = new long[1];
private long frameCount = 0;
private long startTime = 0;
private AtomicBoolean isTakingSnapshot = new AtomicBoolean(false);
public static final int[][] MASKS = new int[][]{
{R.raw.lips_pink, R.drawable.lips_pink, R.drawable.lips_pink_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_purple, R.drawable.lips_purple, R.drawable.lips_purple_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_red, R.drawable.lips_red, R.drawable.lips_red_normal, MR.SHIFT_TYPE_NO},
};
private int mask = 0;
private int maskLoaded = 0;
private volatile boolean isMaskChanged = false;
private boolean inPreview = false;
public void changeMask(int i) {
mask += i;
isMaskChanged = true;
}
public MainRenderer(MainView view) {
tracker = Application.tracker;
mainView = view;
mainActivity = (MainActivity) mainView.getContext();
trackingFeatures = new FSDK.FSDK_Features[MR.MAX_FACES];
for (int i = 0; i < MR.MAX_FACES; ++i) {
trackingFeatures[i] = new FSDK.FSDK_Features();
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j] = new FSDK.TPoint();
}
}
maskCoords = new MR.MaskFeatures();
}
public void close() {
updateSurfaceTexture = false;
surfaceTexture.release();
camera.stopPreview();
camera.release();
camera = null;
deleteTex();
}
public void startCamera() {
if (inPreview) {
camera.stopPreview();
inPreview = false;
}
//NB: if you don't release the current camera before switching, you app will crash
camera.release();
//swap the id of the camera to be used
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_FRONT;
} else {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_BACK;
}
camera = Camera.open(cameraInfo.facing);
//Code snippet for this method from somewhere on android developers, i forget where
//setCameraDisplayOrientation(mainActivity, cameraInfo.facing, camera);
try {
//this step is critical or preview on new camera will no know where to render to
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
camera.startPreview();
inPreview = true;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceCreated");
isResizeCalled = false;
isResized = false;
initTex();
loadMask(mask);
surfaceTexture = new SurfaceTexture(textures[0]);
surfaceTexture.setOnFrameAvailableListener(this);
// Find the ID of the camera
int cameraId = 0;
boolean frontCameraFound = false;
cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
}
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
frontCameraFound = true;
}
}
if (frontCameraFound) {
camera = Camera.open(cameraId);
} else {
camera = Camera.open();
}
try {
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
GLES11.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); //background color
}
private byte[] readBytes(InputStream inputStream) throws IOException {
ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
int bufferSize = 16384;
byte[] buffer = new byte[bufferSize];
int len;
while ((len = inputStream.read(buffer)) != -1) {
byteBuffer.write(buffer, 0, len);
}
return byteBuffer.toByteArray();
}
// must be called from the thread with OpenGL context!
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public void loadMask(int maskNumber) {
GLES11.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
Log.d(TAG, "Loading mask...");
int[] mask = MASKS[maskNumber];
if (isMaskTexture1Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 1);
}
if (isMaskTexture2Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 2);
}
isMaskTexture1Created[0] = 0;
isMaskTexture2Created[0] = 0;
InputStream stream = mainView.getResources().openRawResource(mask[0]);
int res = MR.LoadMaskCoordsFromStream(stream, maskCoords);
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (res != FSDK.FSDKE_OK) {
Log.e(TAG, "Error loading mask coords from stream: " + res);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
return;
}
BitmapFactory.Options bitmapDecodingOptions = new BitmapFactory.Options();
bitmapDecodingOptions.inScaled = false; // to load original image without scaling
FSDK.HImage img1 = new FSDK.HImage();
if (mask[1] == -1) { // if no image
FSDK.CreateEmptyImage(img1);
} else {
stream = mainView.getResources().openRawResource(mask[1]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img1, data, data.length);
Log.d(TAG, "Load mask image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img1, w);
FSDK.GetImageHeight(img1, h);
Log.d(TAG, "Mask image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask image, using empty image");
FSDK.CreateEmptyImage(img1);
}
}
FSDK.HImage img2 = new FSDK.HImage();
if (mask[2] == -1) { // if no normal image
FSDK.CreateEmptyImage(img2);
} else {
stream = mainView.getResources().openRawResource(mask[2]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img2, data, data.length);
Log.d(TAG, "Load mask normal image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img2, w);
FSDK.GetImageHeight(img2, h);
Log.d(TAG, "Mask normal image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask normal image, using empty image");
FSDK.CreateEmptyImage(img2);
}
}
res = MR.LoadMask(img1, img2, textures[1], textures[2], isMaskTexture1Created, isMaskTexture2Created);
FSDK.FreeImage(img1);
FSDK.FreeImage(img2);
Log.d(TAG, "Mask loaded with result " + res + " texture1Created:" + isMaskTexture1Created[0] + " texture2Created:" + isMaskTexture2Created[0]);
Log.d(TAG, "Mask textures: " + textures[1] + " " + textures[2]);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
}
public void onDrawFrame(GL10 unused) { //call opengl functions only inside these functions!
GLES11.glClear(GLES11.GL_COLOR_BUFFER_BIT);
if (!isResized) {
return;
}
synchronized (this) {
if (updateSurfaceTexture) {
surfaceTexture.updateTexImage();
updateSurfaceTexture = false;
}
}
if (isMaskChanged) {
maskLoaded = mask;
loadMask(mask);
isMaskChanged = false;
}
int rotation = 1;
// First, drawing without mask to get image buffer
int res = MR.DrawGLScene(textures[0], 0, trackingFeatures, rotation, MR.SHIFT_TYPE_NO, textures[1], textures[2], maskCoords, 0, 0, width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the first MR.DrawGLScene call: " + res);
}
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
face_count[0] = 0;
processCameraImage();
// Second, drawing with mask atop of image
res = MR.DrawGLScene(textures[0], (int) face_count[0], trackingFeatures, rotation, MASKS[maskLoaded][3], textures[1], textures[2], maskCoords, isMaskTexture1Created[0], isMaskTexture2Created[0], width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the second MR.DrawGLScene call: " + res);
}
// Save snapshot if needed
if (isTakingSnapshot.compareAndSet(true, false)) {
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
snapshotImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
res = FSDK.LoadImageFromBuffer(snapshotImage, pixelBuffer.array(), width, height, width * 4, snapshotImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading snapshot image to FaceSDK: " + res);
} else {
FSDK.MirrorImage(snapshotImage, false);
String galleryPath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath();
final String filename = galleryPath + "/MirrorRealityDemo" + System.currentTimeMillis() + ".png";
res = FSDK.SaveImageToFile(snapshotImage, filename);
Log.d(TAG, "saving snapshot to " + filename);
FSDK.FreeImage(snapshotImage);
if (FSDK.FSDKE_OK == res) {
mainActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
File f = new File(filename);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
mainActivity.sendBroadcast(mediaScanIntent);
Toast.makeText(mainActivity, "Saved successfully", Toast.LENGTH_SHORT).show();
}
});
}
}
}
// Show fps
++frameCount;
long timeCurrent = System.currentTimeMillis();
if (startTime == 0) startTime = timeCurrent;
long diff = timeCurrent - startTime;
if (diff >= 3000) {
final float fps = frameCount / (diff / 1000.0f);
frameCount = 0;
startTime = 0;
final TextView fpsTextView = mainActivity.fpsTextView();
mainActivity.fpsTextView().post(new Runnable() {
#Override
public void run() {
if (!mainActivity.isFinishing()) {
fpsTextView.setText(fps + " FPS");
}
}
});
}
}
private void processCameraImage() {
//clear previous features
for (int i = 0; i < MR.MAX_FACES; ++i) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = 0;
trackingFeatures[i].features[j].y = 0;
}
}
cameraImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
int res = FSDK.LoadImageFromBuffer(cameraImage, pixelBuffer.array(), width, height, width * 4, cameraImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading camera image to FaceSDK: " + res);
return;
}
FSDK.MirrorImage(cameraImage, false);
int[] widthByReference = new int[1];
int[] heightByReference = new int[1];
FSDK.GetImageWidth(cameraImage, widthByReference);
FSDK.GetImageHeight(cameraImage, heightByReference);
int width = widthByReference[0];
int height = heightByReference[0];
int rotation = 0;
if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_PORTRAIT) {
rotation = 2;
} else if (deviceOrientation == DEVICE_ORIENTATION_LANDSCAPE) {
rotation = 3;
} else if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_LANDSCAPE) {
rotation = 1;
}
if (rotation > 0) {
FSDK.HImage rotated = new FSDK.HImage();
FSDK.CreateEmptyImage(rotated);
FSDK.RotateImage90(cameraImage, rotation, rotated);
FSDK.FeedFrame(tracker, 0, rotated, face_count, IDs);
FSDK.FreeImage(rotated);
} else {
FSDK.FeedFrame(tracker, 0, cameraImage, face_count, IDs);
}
for (int i = 0; i < (int) face_count[0]; ++i) {
FSDK.GetTrackerFacialFeatures(tracker, 0, IDs[i], trackingFeatures[i]);
if (rotation > 0) {
if (rotation == 1) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = height - 1 - x;
}
} else if (rotation == 2) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].y = height - 1 - trackingFeatures[i].features[j].y;
}
} else {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = x;
}
}
}
}
FSDK.FreeImage(cameraImage);
}
public void onSurfaceChanged(GL10 unused, int width, int height) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceChanged");
if (!isResizeCalled) {
isResizeCalled = true;
mainView.resizeForPerformance(width, height);
return;
}
GLES11.glViewport(0, 0, width, height);
Camera.Parameters param = camera.getParameters();
List<Camera.Size> psize = param.getSupportedPreviewSizes();
if (psize.size() > 0) {
int i = 0;
int optDistance = Integer.MAX_VALUE;
Log.d(TAG, "Choosing preview resolution closer to " + width + " x " + height);
double neededScale = height / (double) width;
for (int j = 0; j < psize.size(); ++j) {
double scale = psize.get(j).width / (double) psize.get(j).height;
int distance = (int) (10000 * Math.abs(scale - neededScale));
Log.d(TAG, "Choosing preview resolution, probing " + psize.get(j).width + " x " + psize.get(j).height + " distance: " + distance);
if (distance < optDistance) {
i = j;
optDistance = distance;
} else if (distance == optDistance) {
// try to avoid too low resolution
if ((psize.get(i).width < 300 || psize.get(i).height < 300)
&& psize.get(j).width > psize.get(i).width && psize.get(j).height > psize.get(i).height) {
i = j;
}
}
}
Log.d(TAG, "Using optimal preview size: " + psize.get(i).width + " x " + psize.get(i).height);
param.setPreviewSize(psize.get(i).width, psize.get(i).height);
// adjusting viewport to camera aspect ratio
int viewportHeight = (int) (width * (psize.get(i).width * 1.0f / psize.get(i).height));
GLES11.glViewport(0, 0, width, viewportHeight);
this.width = width;
this.height = viewportHeight;
pixelBuffer = ByteBuffer.allocateDirect(this.width * this.height * 4).order(ByteOrder.nativeOrder());
}
param.set("orientation", "landscape");
camera.setParameters(param);
camera.startPreview();
inPreview = true;
isResized = true;
}
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
private void initTex() {
textures = new int[3];
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
GLES11.glEnable(GL10.GL_TEXTURE_2D);
GLES11.glGenTextures(3, textures, 0);
GLES11.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_S, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_T, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MIN_FILTER, GLES11.GL_NEAREST);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MAG_FILTER, GLES11.GL_NEAREST);
}
private void deleteTex() {
GLES11.glDeleteTextures(3, textures, 0);
}
public synchronized void onFrameAvailable(SurfaceTexture st) {
updateSurfaceTexture = true;
mainView.requestRender();
}
public synchronized void snapshot() {
isTakingSnapshot.set(true);
}
}
Any type of help is appreciated ... thank you so much
here is the preview that i see
I have tried making sense, but there is few knowledge on what API Level your trying to target , that may also be the problem or maybe the problem may be the Test Device.
But lets rewrite the camera Display code:
1)
public void setCameraDisplayOrientation(android.hardware.Camera camera) {
Camera.Parameters parameters = camera.getParameters();
android.hardware.Camera.CameraInfo camInfo =
new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(getBackFacingCameraId(), camInfo);
Display display = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
int rotation = display.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (camInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (camInfo.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (camInfo.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
Call setCameraDisplayOrientation() method in surfaceCreated callback as the following:
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
camera = Camera.open();
setCameraDisplayOrientation(getActivity(), CameraInfo.CAMERA_FACING_BACK, camera);
}
Method 2 is to try:
Get the phone's orientation by using a sensor and this makes life much easier because the orientation will be properly handle
You can find more about this within this link: Is Android's CameraInfo.orientation correctly documented? Incorrectly implemented?
Method 3:
edit your code like
public class MainRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final String TAG = "MainRenderer";
public static final int DEVICE_ORIENTATION_PORTRAIT = 0;
public static final int DEVICE_ORIENTATION_INVERSE_PORTRAIT = 1;
public static final int DEVICE_ORIENTATION_LANDSCAPE = 2;
public static final int DEVICE_ORIENTATION_INVERSE_LANDSCAPE = 3;
private Camera.CameraInfo cameraInfo;
public volatile int deviceOrientation = DEVICE_ORIENTATION_PORTRAIT;
private FSDK.HTracker tracker;
private int[] textures;
private Camera camera;
private SurfaceTexture surfaceTexture;
private boolean updateSurfaceTexture = false;
private FSDK.FSDK_Features[] trackingFeatures;
private MR.MaskFeatures maskCoords;
private int[] isMaskTexture1Created = new int[]{0};
private int[] isMaskTexture2Created = new int[]{0};
private int width;
private int height;
private ByteBuffer pixelBuffer;
private FSDK.HImage cameraImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE cameraImageMode = new FSDK.FSDK_IMAGEMODE();
private FSDK.HImage snapshotImage = new FSDK.HImage();
private FSDK.FSDK_IMAGEMODE snapshotImageMode = new FSDK.FSDK_IMAGEMODE();
private MainView mainView;
private MainActivity mainActivity;
private volatile boolean isResizeCalled = false;
private volatile boolean isResized = false;
public long IDs[] = new long[MR.MAX_FACES];
public long face_count[] = new long[1];
private long frameCount = 0;
private long startTime = 0;
private AtomicBoolean isTakingSnapshot = new AtomicBoolean(false);
public static final int[][] MASKS = new int[][]{
{R.raw.lips_pink, R.drawable.lips_pink, R.drawable.lips_pink_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_purple, R.drawable.lips_purple, R.drawable.lips_purple_normal, MR.SHIFT_TYPE_NO},
{R.raw.lips_red, R.drawable.lips_red, R.drawable.lips_red_normal, MR.SHIFT_TYPE_NO},
};
private int mask = 0;
private int maskLoaded = 0;
private volatile boolean isMaskChanged = false;
private boolean inPreview = false;
public void changeMask(int i) {
mask += i;
isMaskChanged = true;
}
public MainRenderer(MainView view) {
tracker = Application.tracker;
mainView = view;
mainActivity = (MainActivity) mainView.getContext();
trackingFeatures = new FSDK.FSDK_Features[MR.MAX_FACES];
for (int i = 0; i < MR.MAX_FACES; ++i) {
trackingFeatures[i] = new FSDK.FSDK_Features();
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j] = new FSDK.TPoint();
}
}
maskCoords = new MR.MaskFeatures();
}
public void close() {
updateSurfaceTexture = false;
surfaceTexture.release();
camera.stopPreview();
camera.release();
camera = null;
deleteTex();
}
public void startCamera() {
if (inPreview) {
camera.stopPreview();
inPreview = false;
}
//NB: if you don't release the current camera before switching, you app will crash
camera.release();
//swap the id of the camera to be used
//Below i have tried to change the camera facing
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_BACK;
} else {
cameraInfo.facing = Camera.CameraInfo.CAMERA_FACING_FRONT;
}
camera = Camera.open(cameraInfo.facing);
//Code snippet for this method from somewhere on android developers, i forget where
//setCameraDisplayOrientation(mainActivity, cameraInfo.facing, camera);
try {
//this step is critical or preview on new camera will no know where to render to
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
camera.startPreview();
inPreview = true;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceCreated");
isResizeCalled = false;
isResized = false;
initTex();
loadMask(mask);
surfaceTexture = new SurfaceTexture(textures[0]);
surfaceTexture.setOnFrameAvailableListener(this);
// Find the ID of the camera
int cameraId = 0;
boolean frontCameraFound = false;
cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
}
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
frontCameraFound = true;
}
}
if (frontCameraFound) {
camera = Camera.open(cameraId);
} else {
camera = Camera.open();
}
try {
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
GLES11.glClearColor
(0.0f, 0.0f, 0.0f, 1.0f); //background color
}
private byte[] readBytes(InputStream inputStream) throws IOException {
ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
int bufferSize = 16384;
byte[] buffer = new byte[bufferSize];
int len;
while ((len = inputStream.read(buffer)) != -1) {
byteBuffer.write(buffer, 0, len);
}
return byteBuffer.toByteArray();
}
// must be called from the thread with OpenGL context!
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public void loadMask(int maskNumber) {
GLES11.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
Log.d(TAG, "Loading mask...");
int[] mask = MASKS[maskNumber];
if (isMaskTexture1Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 1);
}
if (isMaskTexture2Created[0] > 0) {
GLES11.glDeleteTextures(1, textures, 2);
}
isMaskTexture1Created[0] = 0;
isMaskTexture2Created[0] = 0;
InputStream stream = mainView.getResources().openRawResource(mask[0]);
int res = MR.LoadMaskCoordsFromStream(stream, maskCoords);
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (res != FSDK.FSDKE_OK) {
Log.e(TAG, "Error loading mask coords from stream: " + res);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
return;
}
BitmapFactory.Options bitmapDecodingOptions = new BitmapFactory.Options();
bitmapDecodingOptions.inScaled = false; // to load original image without scaling
FSDK.HImage img1 = new FSDK.HImage();
if (mask[1] == -1) { // if no image
FSDK.CreateEmptyImage(img1);
} else {
stream = mainView.getResources().openRawResource(mask[1]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img1, data, data.length);
Log.d(TAG, "Load mask image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img1, w);
FSDK.GetImageHeight(img1, h);
Log.d(TAG, "Mask image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask image, using empty image");
FSDK.CreateEmptyImage(img1);
}
}
FSDK.HImage img2 = new FSDK.HImage();
if (mask[2] == -1) { // if no normal image
FSDK.CreateEmptyImage(img2);
} else {
stream = mainView.getResources().openRawResource(mask[2]);
byte[] data = null;
try {
data = readBytes(stream);
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
if (data != null) {
res = FSDK.LoadImageFromPngBufferWithAlpha(img2, data, data.length);
Log.d(TAG, "Load mask normal image of size " + data.length + " with result " + res);
int[] w = new int[]{0};
int[] h = new int[]{0};
FSDK.GetImageWidth(img2, w);
FSDK.GetImageHeight(img2, h);
Log.d(TAG, "Mask normal image size: " + w[0] + " x " + h[0]);
} else {
Log.w(TAG, "Error loading mask normal image, using empty image");
FSDK.CreateEmptyImage(img2);
}
}
res = MR.LoadMask(img1, img2, textures[1], textures[2], isMaskTexture1Created, isMaskTexture2Created);
FSDK.FreeImage(img1);
FSDK.FreeImage(img2);
Log.d(TAG, "Mask loaded with result " + res + " texture1Created:" + isMaskTexture1Created[0] + " texture2Created:" + isMaskTexture2Created[0]);
Log.d(TAG, "Mask textures: " + textures[1] + " " + textures[2]);
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
}
public void onDrawFrame(GL10 unused) { //call opengl functions only inside these functions!
GLES11.glClear(GLES11.GL_COLOR_BUFFER_BIT);
if (!isResized) {
return;
}
synchronized (this) {
if (updateSurfaceTexture) {
surfaceTexture.updateTexImage();
updateSurfaceTexture = false;
}
}
if (isMaskChanged) {
maskLoaded = mask;
loadMask(mask);
isMaskChanged = false;
}
int rotation = 1;
// First, drawing without mask to get image buffer
int res = MR.DrawGLScene(textures[0], 0, trackingFeatures, rotation, MR.SHIFT_TYPE_NO, textures[1], textures[2], maskCoords, 0, 0, width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the first MR.DrawGLScene call: " + res);
}
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
face_count[0] = 0;
processCameraImage();
// Second, drawing with mask atop of image
res = MR.DrawGLScene(textures[0], (int) face_count[0], trackingFeatures, rotation, MASKS[maskLoaded][3], textures[1], textures[2], maskCoords, isMaskTexture1Created[0], isMaskTexture2Created[0], width, height);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error in the second MR.DrawGLScene call: " + res);
}
// Save snapshot if needed
if (isTakingSnapshot.compareAndSet(true, false)) {
GLES11.glReadPixels(0, 0, width, height, GLES11.GL_RGBA, GLES11.GL_UNSIGNED_BYTE, pixelBuffer);
snapshotImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
res = FSDK.LoadImageFromBuffer(snapshotImage, pixelBuffer.array(), width, height, width * 4, snapshotImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading snapshot image to FaceSDK: " + res);
} else {
FSDK.MirrorImage(snapshotImage, false);
String galleryPath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath();
final String filename = galleryPath + "/MirrorRealityDemo" + System.currentTimeMillis() + ".png";
res = FSDK.SaveImageToFile(snapshotImage, filename);
Log.d(TAG, "saving snapshot to " + filename);
FSDK.FreeImage(snapshotImage);
if (FSDK.FSDKE_OK == res) {
mainActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
File f = new File(filename);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
mainActivity.sendBroadcast(mediaScanIntent);
Toast.makeText(mainActivity, "Saved successfully", Toast.LENGTH_SHORT).show();
}
});
}
}
}
// Show fps
++frameCount;
long timeCurrent = System.currentTimeMillis();
if (startTime == 0) startTime = timeCurrent;
long diff = timeCurrent - startTime;
if (diff >= 3000) {
final float fps = frameCount / (diff / 1000.0f);
frameCount = 0;
startTime = 0;
final TextView fpsTextView = mainActivity.fpsTextView();
mainActivity.fpsTextView().post(new Runnable() {
#Override
public void run() {
if (!mainActivity.isFinishing()) {
fpsTextView.setText(fps + " FPS");
}
}
});
}
}
private void processCameraImage() {
//clear previous features
for (int i = 0; i < MR.MAX_FACES; ++i) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = 0;
trackingFeatures[i].features[j].y = 0;
}
}
cameraImageMode.mode = FSDK.FSDK_IMAGEMODE.FSDK_IMAGE_COLOR_32BIT;
int res = FSDK.LoadImageFromBuffer(cameraImage, pixelBuffer.array(), width, height, width * 4, cameraImageMode);
if (FSDK.FSDKE_OK != res) {
Log.e(TAG, "Error loading camera image to FaceSDK: " + res);
return;
}
FSDK.MirrorImage(cameraImage, false);
int[] widthByReference = new int[1];
int[] heightByReference = new int[1];
FSDK.GetImageWidth(cameraImage, widthByReference);
FSDK.GetImageHeight(cameraImage, heightByReference);
int width = widthByReference[0];
int height = heightByReference[0];
int rotation = 0;
if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_PORTRAIT) {
rotation = 2;
} else if (deviceOrientation == DEVICE_ORIENTATION_LANDSCAPE) {
rotation = 3;
} else if (deviceOrientation == DEVICE_ORIENTATION_INVERSE_LANDSCAPE) {
rotation = 1;
}
if (rotation > 0) {
FSDK.HImage rotated = new FSDK.HImage();
FSDK.CreateEmptyImage(rotated);
FSDK.RotateImage90(cameraImage, rotation, rotated);
FSDK.FeedFrame(tracker, 0, rotated, face_count, IDs);
FSDK.FreeImage(rotated);
} else {
FSDK.FeedFrame(tracker, 0, cameraImage, face_count, IDs);
}
for (int i = 0; i < (int) face_count[0]; ++i) {
FSDK.GetTrackerFacialFeatures(tracker, 0, IDs[i], trackingFeatures[i]);
if (rotation > 0) {
if (rotation == 1) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = height - 1 - x;
}
} else if (rotation == 2) {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].y = height - 1 - trackingFeatures[i].features[j].y;
}
} else {
for (int j = 0; j < FSDK.FSDK_FACIAL_FEATURE_COUNT; ++j) {
int x = trackingFeatures[i].features[j].x;
trackingFeatures[i].features[j].x = width - 1 - trackingFeatures[i].features[j].y;
trackingFeatures[i].features[j].y = x;
}
}
}
}
FSDK.FreeImage(cameraImage);
}
public void onSurfaceChanged(GL10 unused, int width, int height) { //call opengl functions only inside these functions!
Log.d(TAG, "surfaceChanged");
if (!isResizeCalled) {
isResizeCalled = true;
mainView.resizeForPerformance(width, height);
return;
}
GLES11.glViewport(0, 0, width, height);
Camera.Parameters param = camera.getParameters();
List<Camera.Size> psize = param.getSupportedPreviewSizes();
if (psize.size() > 0) {
int i = 0;
int optDistance = Integer.MAX_VALUE;
Log.d(TAG, "Choosing preview resolution closer to " + width + " x " + height);
double neededScale = height / (double) width;
for (int j = 0; j < psize.size(); ++j) {
double scale = psize.get(j).width / (double) psize.get(j).height;
int distance = (int) (10000 * Math.abs(scale - neededScale));
Log.d(TAG, "Choosing preview resolution, probing " + psize.get(j).width + " x " + psize.get(j).height + " distance: " + distance);
if (distance < optDistance) {
i = j;
optDistance = distance;
} else if (distance == optDistance) {
// try to avoid too low resolution
if ((psize.get(i).width < 300 || psize.get(i).height < 300)
&& psize.get(j).width > psize.get(i).width && psize.get(j).height > psize.get(i).height) {
i = j;
}
}
}
Log.d(TAG, "Using optimal preview size: " + psize.get(i).width + " x " + psize.get(i).height);
param.setPreviewSize(psize.get(i).width, psize.get(i).height);
// adjusting viewport to camera aspect ratio
int viewportHeight = (int) (width * (psize.get(i).width * 1.0f / psize.get(i).height));
GLES11.glViewport(0, 0, width, viewportHeight);
this.width = width;
this.height = viewportHeight;
pixelBuffer = ByteBuffer.allocateDirect(this.width * this.height * 4).order(ByteOrder.nativeOrder());
}
param.set("orientation", "landscape");
camera.setParameters(param);
camera.startPreview();
inPreview = true;
isResized = true;
}
#TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
private void initTex() {
textures = new int[3];
GLES11.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
GLES11.glEnable(GL10.GL_TEXTURE_2D);
GLES11.glGenTextures(3, textures, 0);
GLES11.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_S, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_WRAP_T, GLES11.GL_CLAMP_TO_EDGE);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MIN_FILTER, GLES11.GL_NEAREST);
GLES11.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES11.GL_TEXTURE_MAG_FILTER, GLES11.GL_NEAREST);
}
private void deleteTex() {
GLES11.glDeleteTextures(3, textures, 0);
}
public synchronized void onFrameAvailable(SurfaceTexture st) {
updateSurfaceTexture = true;
mainView.requestRender();
}
public synchronized void snapshot() {
isTakingSnapshot.set(true);
}
}
This is sometimes due to android not knowing what orientation is the previous activity coming from, try setting the orientation of the application with
android:screenOrientation="nosensor"
or try the following:
Add the orientation attribute in the manifest
android:screenOrientation=["unspecified" | "behind" |
"landscape" | "portrait" |
"reverseLandscape" | "reversePortrait" |
"sensorLandscape" | "sensorPortrait" |
"userLandscape" | "userPortrait" |
"sensor" | "fullSensor" | "nosensor" |
"user" | "fullUser" | "locked"]
So in your case it will be
<activity android:name=".yourCameractivity"
....
android:screenOrientation="sensorPortrait"/>

How to use Sony SmartEyeGlass for Scanning QR Code?

I am trying to develop an app using Zbar library for SmartEyeGlass that scan QR codes. The app based on sample camera extension.But it doesn't work and I can't see what the problem is. Here is my code;
private void cameraEventOperation(CameraEvent event) {
if (event.getErrorStatus() != 0) {
Log.d(Constants.LOG_TAG, "error code = " + event.getErrorStatus());
return;
}
if(event.getIndex() != 0){
Log.d(Constants.LOG_TAG, "not oparate this event");
return;
}
Bitmap bitmap = null;
byte[] data = null;
if ((event.getData() != null) && ((event.getData().length) > 0)) {
data = event.getData();
bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
data1= data;
/* Instance barcode scanner */
scanner = new ImageScanner();
scanner.setConfig(Symbol.QRCODE, Config.X_DENSITY, 2);
scanner.setConfig(Symbol.QRCODE, Config.Y_DENSITY, 2);
Image barcode = new Image(width, height, "Y800");
barcode.setData(data1);
QRCodeStatus= scanner.scanImage(barcode);
if (QRCodeStatus != 0) {
SymbolSet syms = scanner.getResults();
for (Symbol kasa : syms) {
strValueOfScannedQR = String.valueOf(kasa.getData());
intValueOfScannedQR = Integer.valueOf(kasa.getData());
}
}
}
if (bitmap == null) {
Log.d(Constants.LOG_TAG, "bitmap == null");
return;
}
if (saveToSdcard == true) {
String fileName = saveFilePrefix + String.format("%04d", saveFileIndex) + ".jpg";
new SavePhotoTask(saveFolder,fileName).execute(data);
saveFileIndex++;
}
if (recordingMode == SmartEyeglassControl.Intents.CAMERA_MODE_STILL) {
Bitmap basebitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
basebitmap.setDensity(DisplayMetrics.DENSITY_DEFAULT);
Canvas canvas = new Canvas(basebitmap);
Rect rect = new Rect(0, 0, width, height);
Paint paint = new Paint();
paint.setStyle(Paint.Style.FILL);
canvas.drawBitmap(bitmap, rect, rect, paint);
utils.showBitmap(basebitmap);
return;
}
Log.d(Constants.LOG_TAG, "Camera frame was received : #" + saveFileIndex);
updateDisplay();
}
private void updateDisplay()
{
Bitmap displayBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
displayBitmap.setDensity(DisplayMetrics.DENSITY_DEFAULT);
Canvas canvas = new Canvas(displayBitmap);
Paint paint = new Paint();
paint.setStyle(Paint.Style.FILL);
paint.setTextSize(16);
paint.setColor(Color.WHITE);
// Update layout according to the camera mode
switch (recordingMode) {
case SmartEyeglassControl.Intents.CAMERA_MODE_STILL:
canvas.drawText("Tap to capture : STILL", pointX, pointY, paint);
break;
case SmartEyeglassControl.Intents.CAMERA_MODE_STILL_TO_FILE:
canvas.drawText("Tap to capture : STILL TO FILE", pointX, pointY, paint);
break;
case SmartEyeglassControl.Intents.CAMERA_MODE_JPG_STREAM_HIGH_RATE:
if (cameraStarted) {
canvas.drawText("Frame Number: " + Integer.toString(saveFileIndex), pointBaseX, (pointY * 1), paint);
canvas.drawText("Value of QR: " + strValueOfScannedQR, pointBaseX, (pointY * 2), paint);
canvas.drawText("Data1=" + data1, pointBaseX, (pointY * 3), paint);
canvas.drawText("QR status " + QRCodeStatus, pointBaseX, (pointY * 4), paint);
}
else {
canvas.drawText("Tap to start JPEG Stream.", pointBaseX, pointY, paint);
}
break;
case SmartEyeglassControl.Intents.CAMERA_MODE_JPG_STREAM_LOW_RATE:
if (cameraStarted) {
canvas.drawText("JPEG Streaming...", pointBaseX, pointY, paint);
canvas.drawText("Tap to stop.", pointBaseX, (pointY * 2), paint);
canvas.drawText("Frame Number: " + Integer.toString(saveFileIndex), pointBaseX, (pointY * 3), paint);
} else {
canvas.drawText("Tap to start JPEG Stream.", pointBaseX, pointY, paint);
}
break;
default:
canvas.drawText("wrong recording type.", pointBaseX, pointY, paint);
}
utils.showBitmap(displayBitmap);
}
}
Problem seem to be that you are passing data1 as parameter to barcode.setData method.
You should probably pass the bitmap : barcode.setData(bitmap)
This question is rather related to QR code scanning library that you are using. Please also tag it with relevant tag for that library. So you can get support about that library. Please also check what is the requirement for the parameter expected in setData method in API references of your QR code scanning library.

Save Mat (openCV) to SharedPreferences Android

I'm writting app, which using KNearest. I wrote code to train model, but every restart app, I must train data again, so I would like to save train data to SharedPreferences once and using it after.
I know that I must convert Mat to byte[] and then to String, but decode is not working, I got error:
(layout == ROW_SAMPLE && responses.rows == nsamples)
|| (layout == COL_SAMPLE && responses.cols == nsamples)
in function void cv::ml::TrainDataImpl::setData(cv::InputArray,
int, cv::InputArray, cv::InputArray,
cv::InputArray, cv::InputArray, cv::InputArray, cv::InputArray)
Code:
protected Void doInBackground(Void... args) {
// Constants.TRAIN_SAMPLES = 10
Mat trainData = new Mat(0, 200 * 200, CvType.CV_32FC1); // 0 x 40 000
Mat trainClasses = new Mat(Constants.TRAIN_SAMPLES, 1, CvType.CV_32FC1); // 10 x 1
float[] myint = new float[Constants.TRAIN_SAMPLES + 1];
for (i = 1; i <= Constants.TRAIN_SAMPLES; i++)
myint[i] = (float) i;
trainClasses.put(0, 0, myint);
KNearest knn = KNearest.create();
String val = " ";
val = sharedPref.getString("key", " ");
// empty SharedPreferences
if (val.equals(" ")) {
// get all images from external storage
for (i = 1; i <= Constants.TRAIN_SAMPLES; i++) {
String photoPath = Environment.getExternalStorageDirectory().toString() + "/ramki/ramka_" + i + ".png";
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
Bitmap bitmap = BitmapFactory.decodeFile(photoPath, options);
Utils.bitmapToMat(bitmap, img);
if (img.channels() == 3) {
Imgproc.cvtColor(img, img, Imgproc.COLOR_RGB2GRAY);
} else if (img.channels() == 4) {
Imgproc.cvtColor(img, img, Imgproc.COLOR_RGBA2GRAY);
}
Imgproc.resize(img, img, new Size(200, 200));
img.convertTo(img, CvType.CV_32FC1);
img = img.reshape(1, 1); // 1 x 40 000 ( 200x200 )
trainData.push_back(img);
publishProgress(i);
}
trainData.convertTo(trainData, CvType.CV_8U);
// save this trainData (Mat) to SharedPreferences
saveMatToPref(trainData);
} else {
// get trainData from SharedPreferences
val = sharedPref.getString("key", " ");
byte[] data = Base64.decode(val, Base64.DEFAULT);
trainData.convertTo(trainData, CvType.CV_8U);
trainData.put(0, 0, data);
}
trainData.convertTo(trainData, CvType.CV_32FC1);
knn.train(trainData, Ml.ROW_SAMPLE, trainClasses);
trainClasses.release();
trainData.release();
img.release();
onPostExecute();
return null;
}
public void saveMatToPref(Mat mat) {
if (mat.isContinuous()) {
int cols = mat.cols();
int rows = mat.rows();
byte[] data = new byte[cols * rows];
// there, data contains {0,0,0,0,0,0 ..... } 400 000 items
mat.get(0, 0, data);
String dataString = new String(Base64.encode(data, Base64.DEFAULT));
SharedPreferences.Editor mEdit1 = sharedPref.edit();
mEdit1.putString("key", dataString);
mEdit1.commit();
} else {
Log.i(TAG, "Mat not continuous.");
}
}
When I decode, my trainData look like this:
Mat [ 0*40000*CV_32FC1 ..]
but should: Mat [ 10*40000*CV_32FC1 ..]
Can anybody help me to encode and decode Mat? Thx for help.
As #Miki mention, problem was in types. Now it works, but only with Mat size around 200 x 40 000 in my case, if it's bigger, I have outOfMemory excepion...
String val = " ";
val = sharedPref.getString("key", " ");
// empty SharedPreferences
if ( ! val.equals(" ")) {
// get all images from external storage
for (i = 1; i <= Constants.TRAIN_SAMPLES; i++) {
String photoPath = Environment.getExternalStorageDirectory().toString() + "/ramki/ramka_" + i + ".png";
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
Bitmap bitmap = BitmapFactory.decodeFile(photoPath, options);
Utils.bitmapToMat(bitmap, img);
if (img.channels() == 3) {
Imgproc.cvtColor(img, img, Imgproc.COLOR_RGB2GRAY);
} else if (img.channels() == 4) {
Imgproc.cvtColor(img, img, Imgproc.COLOR_RGBA2GRAY);
}
Imgproc.resize(img, img, new Size(200, 200));
img.convertTo(img, CvType.CV_32FC1);
img = img.reshape(1, 1); // 1 x 40 000 ( 200x200 )
trainData.push_back(img);
publishProgress(i);
}
// save this trainData (Mat) to SharedPreferences
saveMatToPref(trainData);
} else {
// get trainData from SharedPreferences
val = sharedPref.getString("key", " ");
byte[] data = Base64.decode(val, Base64.DEFAULT);
trainData = new Mat(Constants.TRAIN_SAMPLES, 200 * 200, CvType.CV_32FC1);
float[] f = toFloatArray(data);
trainData.put(0, 0, f);
}
knn.train(trainData, Ml.ROW_SAMPLE, trainClasses);
public void saveMatToPref(Mat mat) {
if (mat.isContinuous()) {
int size = (int)( mat.total() * mat.channels() );
float[] data = new float[ size ];
byte[] b = new byte[ size ];
mat.get(0, 0, data);
b = FloatArray2ByteArray(data);
String dataString = new String(Base64.encode(b, Base64.DEFAULT));
SharedPreferences.Editor mEdit1 = sharedPref.edit();
mEdit1.putString("key", dataString);
mEdit1.commit();
} else {
Log.i(TAG, "Mat not continuous.");
}
}
private static float[] toFloatArray(byte[] bytes) {
ByteBuffer buffer = ByteBuffer.wrap(bytes);
FloatBuffer fb = buffer.asFloatBuffer();
float[] floatArray = new float[fb.limit()];
fb.get(floatArray);
return floatArray;
}
public static byte[] FloatArray2ByteArray(float[] values){
ByteBuffer buffer = ByteBuffer.allocate(4 * values.length);
for (float value : values)
buffer.putFloat(value);
return buffer.array();
}
If someone have better solution, please add.

Capturing images crashing on HTC devices

Hi I am developing a app that captures images and email it capturing the images is working fine on Samsung galaxy and Sony Ericsson xperia but it's not working on HTC devices anyone know a reason why ?? here is my code for capturing images
try {
String fileName = Image_name+".jpg";
//create parameters for Intent with filename
ContentValues values = new ContentValues();
values.put(MediaStore.Images.Media.TITLE, fileName);
values.put(MediaStore.Images.Media.DESCRIPTION,"Image capture by camera");
//imageUri is the current activity attribute, define and save it for later usage (also in onSaveInstanceState)
outuri = getContentResolver().insert(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
Intent cameraIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
//outuri = Uri.fromFile(photo);
cameraIntent.putExtra(MediaStore.EXTRA_OUTPUT, outuri);
cameraIntent.putExtra("return-data", true);
startActivityForResult(cameraIntent, 2);
} catch (Exception e) {
Toast.makeText(preview.this, ""+e, Toast.LENGTH_LONG).show();
}
and here is code that I use to retrieve images
path = convertImageUriToFile(outuri, preview.this).getAbsolutePath();
arr.add(path);
try {
bitmap = getImage(path);
public static File convertImageUriToFile (Uri imageUri, Activity activity) {
Cursor cursor = null;
try {
String [] proj={MediaStore.Images.Media.DATA, MediaStore.Images.Media._ID, MediaStore.Images.ImageColumns.ORIENTATION};
cursor = activity.managedQuery(imageUri, proj, // Which columns to return
null, // WHERE clause; which rows to return (all rows)
null, // WHERE clause selection arguments (none)
null); // Order-by clause (ascending by name)
int file_ColumnIndex = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
int orientation_ColumnIndex = cursor.getColumnIndexOrThrow(MediaStore.Images.ImageColumns.ORIENTATION);
if (cursor.moveToFirst()) {
String orientation = cursor.getString(orientation_ColumnIndex);
return new File(cursor.getString(file_ColumnIndex));
}
return null;
} finally {
if (cursor != null) {
cursor.close();
}
}
}
public Bitmap getImage(String path) throws IOException
{
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(path, options);
Bitmap targetBitmap=null;
int srcWidth = options.outWidth;
int srcHeight = options.outHeight;
int[] newWH = new int[2];
newWH[0] = 1024;
newWH[1] = (1024*srcHeight)/srcWidth;
int inSampleSize = 1;
while(srcWidth / 2 > newWH[0]){
srcWidth /= 2;
srcHeight /= 2;
inSampleSize *= 2;
}
// float desiredScale = (float) newWH[0] / srcWidth;
// Decode with inSampleSize
options.inJustDecodeBounds = false;
options.inDither = false;
options.inSampleSize = inSampleSize;
options.inScaled = false;
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
Bitmap sampledSrcBitmap = BitmapFactory.decodeFile(path,options);
ExifInterface exif = new ExifInterface(path);
String s=exif.getAttribute(ExifInterface.TAG_ORIENTATION);
System.out.println("Orientation>>>>>>>>>>>>>>>>>>>>"+s);
Matrix matrix = new Matrix();
float rotation = rotationForImage(preview.this, Uri.fromFile(new File(path)));
if (rotation != 0f) {
matrix.preRotate(rotation);
}
int newh = ( w * sampledSrcBitmap.getHeight() ) /sampledSrcBitmap.getWidth();
Bitmap r=Bitmap.createScaledBitmap(sampledSrcBitmap, w, newh, true);
Bitmap resizedBitmap = Bitmap.createBitmap(
r, 0, 0, w, newh, matrix, true);
return resizedBitmap;
}
}
well, there's a known bug in the Intent.putExtra(MediaStore.EXTRA_OUTPUT) and it causes a crash in the app.
check out the answer I got when asking the same question: https://stackoverflow.com/a/10613299/1056359

Categories