I am working on application users have to upload multiple images. Now the problem I am facing is their size. As users can upload multiple images it takes a lot of time to upload them through the application because of their heavy size which users do not like and also make my application and database heavy. Can you please guide me on how can I compress multiple images before uploading them to firebase?
CODE TO COMPRESS IMAGES
galleryResultLauncher = registerForActivityResult(new ActivityResultContracts.StartActivityForResult(), new ActivityResultCallback<ActivityResult>() {
#Override
public void onActivityResult(ActivityResult result) {
if (result.getResultCode() == RESULT_OK && null != result.getData() ) {
if (result.getData().getClipData() != null) {
int countofImages = result.getData().getClipData().getItemCount();
//this part is to select multiple images
for (int i = 0; i < countofImages; i++) {
if (uri.size() < 11) {
Uri imageuri = result.getData().getClipData().getItemAt(i).getUri();
imageList.add(new CustomModel(getfilename(imageuri), imageuri));
} else {
Toast.makeText(getContext(), "Can't select more than 11 images", Toast.LENGTH_SHORT).show();
}
}
//then notify the adapter
adapter.notifyDataSetChanged();
rentSell3Binding.totalphotos.setText("Photos (" + imageList.size() + ")");
}
//this part is to select single image
else
{
if (uri.size()<11) {
Uri imageuri = result.getData().getData();
//and add the code to arryalist
imageList.add(new CustomModel(getfilename(imageuri), imageuri));
}else
{
Toast.makeText(getContext(), "Can't select more than 11 images", Toast.LENGTH_SHORT).show();
}
//notify the adapter
adapter.notifyDataSetChanged();
rentSell3Binding.totalphotos.setText("Photos (" + imageList.size() + ")");
}
}else
{
Toast.makeText(getContext(), "You haven't selected any images", Toast.LENGTH_SHORT).show();
}
}
});
MODEL CLASS
public class CustomModel {
String imageName;
Uri imageURI;
public CustomModel(String imageName, Uri imageURI) {
this.imageName = imageName;
this.imageURI = imageURI;
}
public String getImageName() {
return imageName;
}
public void setImageName(String imageName) {
this.imageName = imageName;
}
public Uri getImageURI() {
return imageURI;
}
public void setImageURI(Uri imageURI) {
this.imageURI = imageURI;
}
}
Throwing exception
null pointer exception on the line File file = new File(SiliCompressor.with(getContext()).compress(FileUtils.getPath(getContext(), imageuri), new File(getContext().getCacheDir(), "temp")));
yup, you can compress your image file by the following function
#Throws(IOException::class)
fun File.compressImage(
reqWidth: Float,
reqHeight: Float,
compressFormat: Bitmap.CompressFormat,
quality: Int,
destinationPath: String
): File {
var fileOutputStream: FileOutputStream? = null
val file = File(destinationPath).parentFile ?: return File(destinationPath)
if (!file.exists()) {
file.mkdirs()
}
try {
fileOutputStream = FileOutputStream(destinationPath)
// write the compressed bitmap at the destination specified by destinationPath.
decodeSampledBitmapFromFile(this, reqWidth, reqHeight)?.compress(
compressFormat,
quality,
fileOutputStream
)
} finally {
if (fileOutputStream != null) {
fileOutputStream.flush()
fileOutputStream.close()
}
}
return File(destinationPath)
}
#Throws(IOException::class)
private fun decodeSampledBitmapFromFile(
imageFile: File,
reqWidth: Float,
reqHeight: Float
): Bitmap? {
// First decode with inJustDecodeBounds=true to check dimensions
val options = BitmapFactory.Options()
options.inJustDecodeBounds = true
var bmp: Bitmap? = BitmapFactory.decodeFile(imageFile.absolutePath, options)
var actualHeight = options.outHeight
var actualWidth = options.outWidth
var imgRatio = actualWidth.toFloat() / actualHeight.toFloat()
val maxRatio = reqWidth / reqHeight
if (actualHeight > reqHeight || actualWidth > reqWidth) {
// If Height is greater
when {
imgRatio < maxRatio -> {
imgRatio = reqHeight / actualHeight
actualWidth = (imgRatio * actualWidth).toInt()
actualHeight = reqHeight.toInt()
} // If Width is greater
imgRatio > maxRatio -> {
imgRatio = reqWidth / actualWidth
actualHeight = (imgRatio * actualHeight).toInt()
actualWidth = reqWidth.toInt()
}
else -> {
actualHeight = reqHeight.toInt()
actualWidth = reqWidth.toInt()
}
}
}
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, actualWidth, actualHeight)
options.inJustDecodeBounds = false
if (bmp != null && canUseForInBitmap(bmp, options)) {
// inBitmap only works with mutable bitmaps, so force the decoder to
// return mutable bitmaps.
options.inMutable = true
options.inBitmap = bmp
}
options.inTempStorage = ByteArray(16 * 1024)
try {
bmp = BitmapFactory.decodeFile(imageFile.absolutePath, options)
} catch (exception: OutOfMemoryError) {
exception.printStackTrace()
}
var scaledBitmap: Bitmap? = null
try {
scaledBitmap = Bitmap.createBitmap(actualWidth, actualHeight, Bitmap.Config.ARGB_8888)
} catch (exception: OutOfMemoryError) {
exception.printStackTrace()
}
val ratioX = actualWidth / options.outWidth.toFloat()
val ratioY = actualHeight / options.outHeight.toFloat()
val middleX = actualWidth / 2.0f
val middleY = actualHeight / 2.0f
val scaleMatrix = Matrix()
scaleMatrix.setScale(ratioX, ratioY, middleX, middleY)
val canvas = Canvas(scaledBitmap!!)
canvas.setMatrix(scaleMatrix)
canvas.drawBitmap(
bmp!!, middleX - bmp.width / 2,
middleY - bmp.height / 2, Paint(Paint.FILTER_BITMAP_FLAG)
)
bmp.recycle()
val exif: ExifInterface
try {
exif = ExifInterface(imageFile.absolutePath)
val orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, 0)
val matrix = Matrix()
if (orientation == 6) {
matrix.postRotate(90f)
} else if (orientation == 3) {
matrix.postRotate(180f)
} else if (orientation == 8) {
matrix.postRotate(270f)
}
scaledBitmap = Bitmap.createBitmap(
scaledBitmap, 0, 0, scaledBitmap.width,
scaledBitmap.height, matrix, true
)
} catch (e: IOException) {
e.printStackTrace()
}
return scaledBitmap
}
private fun calculateInSampleSize(
options: BitmapFactory.Options,
reqWidth: Int,
reqHeight: Int
): Int {
// Raw height and width of image
val height = options.outHeight
val width = options.outWidth
var inSampleSize = 1
if (height > reqHeight || width > reqWidth) {
inSampleSize *= 2
val halfHeight = height / 2
val halfWidth = width / 2
// Calculate the largest inSampleSize value that is a power of 2 and keeps both
// height and width larger than the requested height and width.
while (halfHeight / inSampleSize >= reqHeight && halfWidth / inSampleSize >= reqWidth) {
inSampleSize *= 2
}
}
return inSampleSize
}
/**
* Ref: https://developer.android.com/topic/performance/graphics/manage-memory#kotlin
*/
private fun canUseForInBitmap(candidate: Bitmap, targetOptions: BitmapFactory.Options): Boolean {
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
// From Android 4.4 (KitKat) onward we can re-use if the byte size of
// the new bitmap is smaller than the reusable bitmap candidate
// allocation byte count.
val width: Int = targetOptions.outWidth / targetOptions.inSampleSize
val height: Int = targetOptions.outHeight / targetOptions.inSampleSize
val byteCount: Int = width * height * getBytesPerPixel(candidate.config)
byteCount <= candidate.allocationByteCount
} else {
// On earlier versions, the dimensions must match exactly and the inSampleSize must be 1
candidate.width == targetOptions.outWidth &&
candidate.height == targetOptions.outHeight &&
targetOptions.inSampleSize == 1
}
}
/**
* A helper function to return the byte usage per pixel of a bitmap based on its configuration.
*/
private fun getBytesPerPixel(config: Bitmap.Config): Int {
return when (config) {
Bitmap.Config.ARGB_8888 -> 4
Bitmap.Config.RGB_565, Bitmap.Config.ARGB_4444 -> 2
Bitmap.Config.ALPHA_8 -> 1
else -> 1
}
}
#Throws(IOException::class)
fun Context.createImageFile(): File {
// Create an image file name
val storageDir: File? = getExternalFilesDir(Environment.DIRECTORY_PICTURES)
return File.createTempFile(
"JPEG_${System.currentTimeMillis()}_", /* prefix */
".jpg", /* suffix */
storageDir /* directory */
)
}
and simply call the following method to create compressed image file. here file is you image file
file.compressImage(
1024f,
1024f,
Bitmap.CompressFormat.JPEG,
70,
context.createImageFile().path
)
Related
I have an image URI and I am getting the Bitmap from that URI using the code below:
Bitmap bitmap = null;
try {
bitmap = MediaStore.Images.Media.getBitmap(getContext().getContentResolver(), uri);
} catch (IOException e) {
e.printStackTrace();
}
Now I want to check if the Bitmap is a black and white or a color image.
(There will either be a black/white image or a colored one)
How can I do this?
I am using Java on Android.
I created an extension function in kotlin, you can use this or make a similar function in java with same logic
fun Bitmap.isColored(): Boolean{
for (x in 0 until this.width){
for (y in 0 until this.height){
val color = this.getColor(x,y)
val blue = color.blue()
val red = color.red()
val green = color.green()
if(blue!=red || blue!=green || red!=green){
return true
}
}
}
return false
}
to use an extension function just call isColored = yourBitmap.isColored()
Edit: java function
#RequiresApi(Build.VERSION_CODES.Q)
boolean isColored(Bitmap bitmap){
for (int x = 0; x< bitmap.getWidth(); x++){
for (int y = 0 ;y < bitmap.getHeight(); y++){
Color color = bitmap.getColor(x,y);
float blue = color.blue();
float red = color.red();
float green = color.green();
Log.d( "blue: " , Float.toString(blue));
Log.d( "red: " , Float.toString(red));
Log.d( "green: " , Float.toString(green));
if(blue!=red || blue!=green || red!=green){
return true;
}
}
}
return false;
}
Edit 2: Main activity in java showing its use
public class MainActivity2 extends AppCompatActivity {
#RequiresApi(api = Build.VERSION_CODES.Q)
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main2);
InputStream ins= getResources().openRawResource(
getResources().getIdentifier(
"image",
"raw", getPackageName()
)
);
Bitmap bitmap = BitmapFactory.decodeStream(ins);
Log.d( "isColored: ", "" + isColored(bitmap));
}
#RequiresApi(Build.VERSION_CODES.Q)
boolean isColored(Bitmap bitmap){
for (int x = 0; x< bitmap.getWidth(); x++){
for (int y = 0 ;y < bitmap.getHeight(); y++){
Color color = bitmap.getColor(x,y);
float blue = color.blue();
float red = color.red();
float green = color.green();
Log.d( "blue: " , Float.toString(blue));
Log.d( "red: " , Float.toString(red));
Log.d( "green: " , Float.toString(green));
if(blue!=red || blue!=green || red!=green){
return true;
}
}
}
return false;
}
}
I was getting out of memory errors loading custom images. I read https://developer.android.com/training/displaying-bitmaps/load-bitmap.html for assistance.
I'm following the example to decode the stream to get size information first, then decode. Still crashing on that first decoding. Is there a way around this?
ava.lang.OutOfMemoryError: Failed to allocate a 48771084 byte allocation with 16776928 free bytes and 25MB until OOM
BackgroundImageManager.java, line 84
dalvik.system.VMRuntime.newNonMovableArray Native Method
2 android.graphics.BitmapFactory.nativeDecodeStream Native Method
3 android.graphics.BitmapFactory.decodeStreamInternal BitmapFactory.java, line 882
4 android.graphics.BitmapFactory.decodeStream BitmapFactory.java, line 858
5 android.graphics.BitmapFactory.decodeStream BitmapFactory.java, line 896
6 com.myapp.Utils.BackgroundImageManager.background BackgroundImageManager.java, line 8
public class BackgroundImageManager {
private final static String TAG = BackgroundImageManager.class.getSimpleName();
private static InputStream currentBackgroundImage;
public static int calculateInSampleSize(
BitmapFactory.Options options, int reqWidth, int reqHeight) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
final int halfHeight = height / 2;
final int halfWidth = width / 2;
// Calculate the largest inSampleSize value that is a power of 2 and keeps both
// height and width larger than the requested height and width.
while ((halfHeight / inSampleSize) >= reqHeight
&& (halfWidth / inSampleSize) >= reqWidth) {
inSampleSize *= 2;
}
}
Log.v("Biscuit-Sample", String.valueOf(inSampleSize));
if (inSampleSize < 4) {
inSampleSize = 4;
}
return inSampleSize;
}
public static Drawable background(Context context, Store store) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
String bgUri = null;
int bgId = 0;
if (store != null) {
bgUri = store.backgroundImageURI;
bgId = store.backgroundImageNumber;
}
if (currentBackgroundImage != null) {
try {
currentBackgroundImage.close();
Log.v(TAG, "Current background image closed.");
} catch (IOException e) {
Log.e(TAG, "Could not close background image.", e);
}
}
if(bgUri != null && !bgUri.isEmpty()) {
try {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
Activity activity = (Activity) context;
Display display = activity.getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
int width = size.x;
int height = size.y;
BitmapFactory.decodeStream( context.getContentResolver().openInputStream(Uri.parse(bgUri)) );
options.inSampleSize = BackgroundImageManager.calculateInSampleSize(options, width, height);
Bitmap bitmap = BitmapFactory.decodeStream( context.getContentResolver().openInputStream(Uri.parse(bgUri)) );
Drawable d = new BitmapDrawable(context.getResources(), bitmap);
return d;
} catch (FileNotFoundException e) {
Log.e(TAG, "Custom background image file could not be found.", e);
} catch (IOException e) {
Log.e(TAG, "Could not close custom background image after creating drawable", e);
}
}
if(bgId != 0) {
try {
return context.getResources().getDrawable(bgId);
} catch (Exception e) {
e.printStackTrace();
}
}
return context.getResources().getDrawable(R.drawable.bg_default);
}
To handle bitmpas you can use one of the many opensource libraries available. E.g Fresco
to your issue:
First you are decoding the same bitmap twice.
BitmapFactory.decodeStream( context.getContentResolver().openInputStream(Uri.parse(bgUri)) );
options.inSampleSize = BackgroundImageManager.calculateInSampleSize(options, width, height);
Bitmap bitmap = BitmapFactory.decodeStream( context.getContentResolver().openInputStream(Uri.parse(bgUri)) );
It is probably a wrong copy/paste. In the first line the bitmap is decode and not used. Delete the first BitmapFactory.decodeStream
the problem lies here
Bitmap bitmap = BitmapFactory.decodeStream( context.getContentResolver().openInputStream(Uri.parse(bgUri)) );
it should be
Bitmap bitmap = BitmapFactory.decodeStream( context.getContentResolver().openInputStream(Uri.parse(bgUri)), null, options);
the option's object has to be part of the method's call in order to be used.
Better way to manage images is with the Picasso library because it manages cache and ram, and therefore avoiding OutOfMemory crash.
Example: Picasso.with(Context).load("your_url").into(yourImageView);
More info here:
Picasso library
I'm writting app, which using KNearest. I wrote code to train model, but every restart app, I must train data again, so I would like to save train data to SharedPreferences once and using it after.
I know that I must convert Mat to byte[] and then to String, but decode is not working, I got error:
(layout == ROW_SAMPLE && responses.rows == nsamples)
|| (layout == COL_SAMPLE && responses.cols == nsamples)
in function void cv::ml::TrainDataImpl::setData(cv::InputArray,
int, cv::InputArray, cv::InputArray,
cv::InputArray, cv::InputArray, cv::InputArray, cv::InputArray)
Code:
protected Void doInBackground(Void... args) {
// Constants.TRAIN_SAMPLES = 10
Mat trainData = new Mat(0, 200 * 200, CvType.CV_32FC1); // 0 x 40 000
Mat trainClasses = new Mat(Constants.TRAIN_SAMPLES, 1, CvType.CV_32FC1); // 10 x 1
float[] myint = new float[Constants.TRAIN_SAMPLES + 1];
for (i = 1; i <= Constants.TRAIN_SAMPLES; i++)
myint[i] = (float) i;
trainClasses.put(0, 0, myint);
KNearest knn = KNearest.create();
String val = " ";
val = sharedPref.getString("key", " ");
// empty SharedPreferences
if (val.equals(" ")) {
// get all images from external storage
for (i = 1; i <= Constants.TRAIN_SAMPLES; i++) {
String photoPath = Environment.getExternalStorageDirectory().toString() + "/ramki/ramka_" + i + ".png";
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
Bitmap bitmap = BitmapFactory.decodeFile(photoPath, options);
Utils.bitmapToMat(bitmap, img);
if (img.channels() == 3) {
Imgproc.cvtColor(img, img, Imgproc.COLOR_RGB2GRAY);
} else if (img.channels() == 4) {
Imgproc.cvtColor(img, img, Imgproc.COLOR_RGBA2GRAY);
}
Imgproc.resize(img, img, new Size(200, 200));
img.convertTo(img, CvType.CV_32FC1);
img = img.reshape(1, 1); // 1 x 40 000 ( 200x200 )
trainData.push_back(img);
publishProgress(i);
}
trainData.convertTo(trainData, CvType.CV_8U);
// save this trainData (Mat) to SharedPreferences
saveMatToPref(trainData);
} else {
// get trainData from SharedPreferences
val = sharedPref.getString("key", " ");
byte[] data = Base64.decode(val, Base64.DEFAULT);
trainData.convertTo(trainData, CvType.CV_8U);
trainData.put(0, 0, data);
}
trainData.convertTo(trainData, CvType.CV_32FC1);
knn.train(trainData, Ml.ROW_SAMPLE, trainClasses);
trainClasses.release();
trainData.release();
img.release();
onPostExecute();
return null;
}
public void saveMatToPref(Mat mat) {
if (mat.isContinuous()) {
int cols = mat.cols();
int rows = mat.rows();
byte[] data = new byte[cols * rows];
// there, data contains {0,0,0,0,0,0 ..... } 400 000 items
mat.get(0, 0, data);
String dataString = new String(Base64.encode(data, Base64.DEFAULT));
SharedPreferences.Editor mEdit1 = sharedPref.edit();
mEdit1.putString("key", dataString);
mEdit1.commit();
} else {
Log.i(TAG, "Mat not continuous.");
}
}
When I decode, my trainData look like this:
Mat [ 0*40000*CV_32FC1 ..]
but should: Mat [ 10*40000*CV_32FC1 ..]
Can anybody help me to encode and decode Mat? Thx for help.
As #Miki mention, problem was in types. Now it works, but only with Mat size around 200 x 40 000 in my case, if it's bigger, I have outOfMemory excepion...
String val = " ";
val = sharedPref.getString("key", " ");
// empty SharedPreferences
if ( ! val.equals(" ")) {
// get all images from external storage
for (i = 1; i <= Constants.TRAIN_SAMPLES; i++) {
String photoPath = Environment.getExternalStorageDirectory().toString() + "/ramki/ramka_" + i + ".png";
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
Bitmap bitmap = BitmapFactory.decodeFile(photoPath, options);
Utils.bitmapToMat(bitmap, img);
if (img.channels() == 3) {
Imgproc.cvtColor(img, img, Imgproc.COLOR_RGB2GRAY);
} else if (img.channels() == 4) {
Imgproc.cvtColor(img, img, Imgproc.COLOR_RGBA2GRAY);
}
Imgproc.resize(img, img, new Size(200, 200));
img.convertTo(img, CvType.CV_32FC1);
img = img.reshape(1, 1); // 1 x 40 000 ( 200x200 )
trainData.push_back(img);
publishProgress(i);
}
// save this trainData (Mat) to SharedPreferences
saveMatToPref(trainData);
} else {
// get trainData from SharedPreferences
val = sharedPref.getString("key", " ");
byte[] data = Base64.decode(val, Base64.DEFAULT);
trainData = new Mat(Constants.TRAIN_SAMPLES, 200 * 200, CvType.CV_32FC1);
float[] f = toFloatArray(data);
trainData.put(0, 0, f);
}
knn.train(trainData, Ml.ROW_SAMPLE, trainClasses);
public void saveMatToPref(Mat mat) {
if (mat.isContinuous()) {
int size = (int)( mat.total() * mat.channels() );
float[] data = new float[ size ];
byte[] b = new byte[ size ];
mat.get(0, 0, data);
b = FloatArray2ByteArray(data);
String dataString = new String(Base64.encode(b, Base64.DEFAULT));
SharedPreferences.Editor mEdit1 = sharedPref.edit();
mEdit1.putString("key", dataString);
mEdit1.commit();
} else {
Log.i(TAG, "Mat not continuous.");
}
}
private static float[] toFloatArray(byte[] bytes) {
ByteBuffer buffer = ByteBuffer.wrap(bytes);
FloatBuffer fb = buffer.asFloatBuffer();
float[] floatArray = new float[fb.limit()];
fb.get(floatArray);
return floatArray;
}
public static byte[] FloatArray2ByteArray(float[] values){
ByteBuffer buffer = ByteBuffer.allocate(4 * values.length);
for (float value : values)
buffer.putFloat(value);
return buffer.array();
}
If someone have better solution, please add.
I've already read a lot of posts,still can't solve it.
I have an app that have one problem related to OOM. It can show a picture stored(PNG type,approximately 1.33MB, 3072*1728 pixel) on server. My system heap size is 125MB
get by: long maxMemory = rt.maxMemory();
I new a thread( AsyncTask )to decode it by using new a handle to manage it.
I also down scale the picture when it is too big. Image can show in local device.
But, it still happen Grow heap (frag case) to 10.389MB for 2654224-byte allocation
And, it will take 2~10 sec to collect the garbage.
It's bad user experience. Did any one meet the same problem? Any Suggestion?
public class DownloadImageTask extends AsyncTask<String, Void, Bitmap> {
public static String TAG = "DownloadImageTask";
public enum DOWNLOAD_STATE {
eIdle, eProgressing, eError, eSuccess,
}
private DOWNLOAD_STATE state = DOWNLOAD_STATE.eIdle;
private Bitmap dlBitmap;
public DownloadImageTask(String imageUrl) {
execute(imageUrl);
}
public DOWNLOAD_STATE getDownloadState() {
return state;
}
public Bitmap getDownloadBitmap() {
return dlBitmap;
}
public Drawable getDownloadDrawable() {
#SuppressWarnings("deprecation")
Drawable dlDrawable = new BitmapDrawable(dlBitmap);
return dlDrawable;
}
protected Bitmap doInBackground(String... urls) {
String urldisplay = urls[0];
try {
dlBitmap = getBitmap(urldisplay);
}
if(dlBitmap==null)
state = DOWNLOAD_STATE.eError;
else
state = DOWNLOAD_STATE.eSuccess;
} catch (Exception e) {
dlBitmap = null;
state = DOWNLOAD_STATE.eError;
}
return dlBitmap;
}
protected void onPostExecute(Bitmap result) {
result = null;
}
public Bitmap getBitmap(String urldisplay) {
try {
Log.d(TAG,"getBitmap File");
InputStream in = new java.net.URL(urldisplay).openStream();
if(in == null)
Log.d(TAG,"in null");
// first decode, get the length & width of picture,didn't load the picture into memory
BitmapFactory.Options opts = new BitmapFactory.Options();
opts.inJustDecodeBounds = true;
BitmapFactory.decodeStream(in, null, opts);
//in.close();
//compute the SampleSize (power of 2 is great)
int sampleSize = computeSampleSize(opts, -1, 1920 * 1080);//monitor limitation
Log.d(TAG,"samplesize ="+sampleSize);
// second decode,set sample size , generate thumbnail
in = new java.net.URL(urldisplay).openStream();
opts = new BitmapFactory.Options();
opts.inPreferredConfig = Bitmap.Config.RGB_565;
opts.inSampleSize = sampleSize;
opts.inInputShareable = true;
opts.inPurgeable = true;
Bitmap bmp = BitmapFactory.decodeStream(in, null, opts);
in.close();
return bmp;
} catch (Exception err) {
Log.e(TAG, "error: " + err.toString());
return null;
}
}
public static int computeSampleSize(BitmapFactory.Options options,
int minSideLength, int maxNumOfPixels) {
Log.d(TAG,"computeSampleSize");
int initialSize = computeInitialSampleSize(options, minSideLength,
maxNumOfPixels);
int roundedSize;
if (initialSize <= 8) {
roundedSize = 1;
while (roundedSize < initialSize) {
roundedSize <<= 1;
}
} else {
roundedSize = (initialSize + 7) / 8 * 8;
}
return roundedSize;
}
private static int computeInitialSampleSize(BitmapFactory.Options options,
int minSideLength, int maxNumOfPixels) {
Log.d(TAG,"computeInitialSampleSize");
try{
double w = options.outWidth;
double h = options.outHeight;
int lowerBound = (maxNumOfPixels == -1) ? 1 : (int) Math.ceil(Math
.sqrt(w * h / maxNumOfPixels));
int upperBound = (minSideLength == -1) ? 128 : (int) Math.min(
Math.floor(w / minSideLength), Math.floor(h / minSideLength));
if (upperBound < lowerBound) {
// return the larger one when there is no overlapping zone.
return lowerBound;
}
if ((maxNumOfPixels == -1) && (minSideLength == -1)) {
return 1;
} else if (minSideLength == -1) {
return lowerBound;
} else {
return upperBound;
}
}catch (Exception e){
Log.d(TAG,"computeInitialSampleSize err:"+e.toString());
return 1;
}
}
}
And, show the picture in ImageSwitcher
myImageSwitcher.setImageDrawable(task.getDownloadDrawable());
Hi I am developing a app that captures images and email it capturing the images is working fine on Samsung galaxy and Sony Ericsson xperia but it's not working on HTC devices anyone know a reason why ?? here is my code for capturing images
try {
String fileName = Image_name+".jpg";
//create parameters for Intent with filename
ContentValues values = new ContentValues();
values.put(MediaStore.Images.Media.TITLE, fileName);
values.put(MediaStore.Images.Media.DESCRIPTION,"Image capture by camera");
//imageUri is the current activity attribute, define and save it for later usage (also in onSaveInstanceState)
outuri = getContentResolver().insert(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
Intent cameraIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
//outuri = Uri.fromFile(photo);
cameraIntent.putExtra(MediaStore.EXTRA_OUTPUT, outuri);
cameraIntent.putExtra("return-data", true);
startActivityForResult(cameraIntent, 2);
} catch (Exception e) {
Toast.makeText(preview.this, ""+e, Toast.LENGTH_LONG).show();
}
and here is code that I use to retrieve images
path = convertImageUriToFile(outuri, preview.this).getAbsolutePath();
arr.add(path);
try {
bitmap = getImage(path);
public static File convertImageUriToFile (Uri imageUri, Activity activity) {
Cursor cursor = null;
try {
String [] proj={MediaStore.Images.Media.DATA, MediaStore.Images.Media._ID, MediaStore.Images.ImageColumns.ORIENTATION};
cursor = activity.managedQuery(imageUri, proj, // Which columns to return
null, // WHERE clause; which rows to return (all rows)
null, // WHERE clause selection arguments (none)
null); // Order-by clause (ascending by name)
int file_ColumnIndex = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
int orientation_ColumnIndex = cursor.getColumnIndexOrThrow(MediaStore.Images.ImageColumns.ORIENTATION);
if (cursor.moveToFirst()) {
String orientation = cursor.getString(orientation_ColumnIndex);
return new File(cursor.getString(file_ColumnIndex));
}
return null;
} finally {
if (cursor != null) {
cursor.close();
}
}
}
public Bitmap getImage(String path) throws IOException
{
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(path, options);
Bitmap targetBitmap=null;
int srcWidth = options.outWidth;
int srcHeight = options.outHeight;
int[] newWH = new int[2];
newWH[0] = 1024;
newWH[1] = (1024*srcHeight)/srcWidth;
int inSampleSize = 1;
while(srcWidth / 2 > newWH[0]){
srcWidth /= 2;
srcHeight /= 2;
inSampleSize *= 2;
}
// float desiredScale = (float) newWH[0] / srcWidth;
// Decode with inSampleSize
options.inJustDecodeBounds = false;
options.inDither = false;
options.inSampleSize = inSampleSize;
options.inScaled = false;
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
Bitmap sampledSrcBitmap = BitmapFactory.decodeFile(path,options);
ExifInterface exif = new ExifInterface(path);
String s=exif.getAttribute(ExifInterface.TAG_ORIENTATION);
System.out.println("Orientation>>>>>>>>>>>>>>>>>>>>"+s);
Matrix matrix = new Matrix();
float rotation = rotationForImage(preview.this, Uri.fromFile(new File(path)));
if (rotation != 0f) {
matrix.preRotate(rotation);
}
int newh = ( w * sampledSrcBitmap.getHeight() ) /sampledSrcBitmap.getWidth();
Bitmap r=Bitmap.createScaledBitmap(sampledSrcBitmap, w, newh, true);
Bitmap resizedBitmap = Bitmap.createBitmap(
r, 0, 0, w, newh, matrix, true);
return resizedBitmap;
}
}
well, there's a known bug in the Intent.putExtra(MediaStore.EXTRA_OUTPUT) and it causes a crash in the app.
check out the answer I got when asking the same question: https://stackoverflow.com/a/10613299/1056359