Why is my Bitmap null when feeding through tensorflow pipeline? - java

Using an AR glass' API to get its camera frames to do object detection
I tried converting the frames ( Byte Buffer ) to a bitmap and feeding it into my object detection pipeline. I realised there was no detection going on and debugged to find my bitmap is being read as null. Is it possible to input the bytebuffer straight to the interpreter and do object detection? if not my byte is valid and tested just converting to bitmap returns a toast messafe that is invalid and null.
package com.example.jjsdkcameratest;
import androidx.appcompat.app.AppCompatActivity;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.app.Activity;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.os.Bundle;
import android.os.FileUtils;
import android.util.Log;
import android.view.SurfaceView;
import android.widget.Button;
import com.example.jjsdkcameratest.ml.SsdMobilenetV11Metadata1;
import com.jorjin.jjsdk.camera.CameraManager;
import com.jorjin.jjsdk.camera.FrameListener;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.widget.ImageView;
import android.widget.Toast;
import org.tensorflow.lite.DataType;
import org.tensorflow.lite.support.common.FileUtil;
import org.tensorflow.lite.support.image.ImageProcessor;
import org.tensorflow.lite.support.image.TensorImage;
import org.tensorflow.lite.support.image.ops.ResizeOp;
import org.tensorflow.lite.support.image.ImageProcessor;
import org.tensorflow.lite.support.image.TensorImage;
import org.tensorflow.lite.support.tensorbuffer.TensorBuffer;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
public class MainActivity extends Activity {
private CameraManager cameraManager;
private SurfaceView cameraSurface;
private Context context;
private ImageView imageView;
private SsdMobilenetV11Metadata1 model;
private ImageProcessor imageProcessor;
private Paint paint;
private List<String> labels;
private List<Integer> colors = Arrays.asList(Color.BLUE, Color.GREEN, Color.CYAN, Color.GRAY, Color.BLACK, Color.DKGRAY, Color.MAGENTA, Color.YELLOW, Color.RED);
private FrameListener frameListener = (buffer, width, height, format) -> {
// Access the data buffer of the previewing frame here
byte[] bytes = buffer.array();
if (bytes == null || bytes.length == 0) {
Log.e("MainActivity", "Frame data is invalid or empty");
Toast.makeText(context, "Error: Frame data not valid", Toast.LENGTH_SHORT).show();
return;
}
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
if (bitmap == null) {
Log.e("MainActivity", "Failed to decode frame data into a bitmap");
Toast.makeText(context, "Error: Failed to decode frame data into a bitmap", Toast.LENGTH_SHORT).show();
}
// Creates inputs for reference.
try {
TensorImage image = TensorImage.fromBitmap(bitmap);
image = imageProcessor.process(image);
// Runs model inference and gets result.
SsdMobilenetV11Metadata1.Outputs outputs = model.process(image);
float[] locations = outputs.getLocationsAsTensorBuffer().getFloatArray();
float[] classes = outputs.getClassesAsTensorBuffer().getFloatArray();
float[] scores = outputs.getScoresAsTensorBuffer().getFloatArray();
float[] numberOfDetections = outputs.getNumberOfDetectionsAsTensorBuffer().getFloatArray();
Bitmap mutable = bitmap.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(mutable);
int h = mutable.getHeight();
int w = mutable.getWidth();
paint.setTextSize(h/15f);
paint.setStrokeWidth(h/85f);
int x = 0;
for (int index = 0; index < scores.length; index++) {
float fl = scores[index];
x = index;
x *= 4;
if (fl > 0.5) {
paint.setColor(colors.get(index));
paint.setStyle(Paint.Style.STROKE);
canvas.drawRect(new RectF(locations[x+1]*w, locations[x]*h, locations[x+3]*w, locations[x+2]*h), paint);
paint.setStyle(Paint.Style.FILL);
canvas.drawText(labels.get((int) classes[index]) + " " + Float.toString(fl), locations[x+1]*w, locations[x]*h, paint);
}
}
if (imageView != null) {
imageView.setImageBitmap(bitmap);
}
} catch (NullPointerException npe) {
Log.e("MainActivity", "Null pointer exception occurred", npe);
Toast.makeText(this, "Null pointer exception occurred", Toast.LENGTH_SHORT);
}
};
public ImageView getImageView() {
if (imageView == null) {
imageView = findViewById(R.id.image_view);
}
return imageView;
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
try {
labels = FileUtil.loadLabels(this, "labels.txt");
} catch (IOException e) {
e.printStackTrace();
}
context = this;
getImageView();
cameraSurface = findViewById(R.id.surface_camera);
cameraManager = new CameraManager(context);
cameraManager.addSurfaceHolder(cameraSurface.getHolder());
cameraManager.setCameraFrameListener(frameListener);
cameraManager.setResolutionIndex(0);
cameraManager.startCamera(CameraManager.COLOR_FORMAT_RGBA);
imageProcessor = new ImageProcessor.Builder().add(new ResizeOp(300, 300, ResizeOp.ResizeMethod.BILINEAR)).build();
try {
SsdMobilenetV11Metadata1 model = SsdMobilenetV11Metadata1.newInstance(this);
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
protected void onDestroy() {
super.onDestroy();
// Releases model resources if no longer used.
model.close();
cameraManager.stopCamera();
}
}

Related

Flappy Bird Game I am getting error while adding pipe

I have a problem. I'm working for a flappy bird game.
I added the bird, tested it and there is no problem.
but when I test when adding pipes, I encounter such an error.
It gives an error on line 76. I could not solve this error.
Since my Java coding is not very good, I searched a lot for the error but couldn't find it. That's why I felt the need to open a topic here. I would be very happy if those who have knowledge can review and give information about the problem.
GameView.java
package com.woxiapps.hopbird;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.os.Handler;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.logging.LogRecord;
public class GameView extends View {
private Bird bird;
private android.os.Handler handler;
private Runnable r;
private ArrayList<Pipe> arrPipes;
private int sumpipe, distance;
public GameView(Context context, #Nullable AttributeSet attrs) {
super(context, attrs);
initBird();
initPipe();
handler = new Handler();
r = new Runnable() {
#Override
public void run() {
invalidate();
}
};
}
private void initPipe() {
sumpipe = 6;
distance = 300*Constants.SCREEN_HEIGHT/1920;
arrPipes = new ArrayList<>();
for (int i = 0; i < sumpipe/2; i++){
if (i < sumpipe/2) {
this.arrPipes.add(new Pipe(Constants.SCREEN_WIDTH+i*((Constants.SCREEN_WIDTH+200*Constants.SCREEN_WIDTH/1080)/(sumpipe/2)),
0,200*Constants.SCREEN_WIDTH/1080, Constants.SCREEN_WIDTH/2));
this.arrPipes.get(this.arrPipes.size()-1).setBm(BitmapFactory.decodeResource(this.getResources(),R.drawable.pipe2));
this.arrPipes.get(this.arrPipes.size()-1).randomY();
}else {
this.arrPipes.add(new Pipe(this.arrPipes.get(i-sumpipe/2).getX(),this.arrPipes.get(i-sumpipe/2).getY()
+this.arrPipes.get(i-sumpipe/2).getHeight() + this.distance, 200*Constants.SCREEN_WIDTH/1080, Constants.SCREEN_HEIGHT/2));
this.arrPipes.get(this.arrPipes.size()-1).setBm(BitmapFactory.decodeResource(this.getResources(), R.drawable.pipe1));
}
}
}
private void initBird() {
bird = new Bird();
bird.setWidth(100*Constants.SCREEN_WIDTH/1080);
bird.setHeight(100*Constants.SCREEN_HEIGHT/1920);
bird.setX(100*Constants.SCREEN_WIDTH/1080);
bird.setY(Constants.SCREEN_HEIGHT/2-bird.getHeight()/2);
ArrayList<Bitmap> ArrBms = new ArrayList<>();
ArrBms.add(BitmapFactory.decodeResource(this.getResources(),R.drawable.bird1));
ArrBms.add(BitmapFactory.decodeResource(this.getResources(),R.drawable.bird2));
bird.setArrBms(ArrBms);
}
public void draw(Canvas canvas){
super.draw(canvas);
bird.draw(canvas);
for (int i = 0; i < sumpipe; i++) {
if (this.arrPipes.get(i).getX() < -arrPipes.get(i).getWidth()){
this.arrPipes.get(i).setX(Constants.SCREEN_WIDTH);
if(i < sumpipe/2){
arrPipes.get(i).randomY();
}else {
arrPipes.get(i).setY(this.arrPipes.get(i-sumpipe/2).getY()
+this.arrPipes.get(i-sumpipe/2).getHeight() + this.distance);
}
}
this.arrPipes.get(i).draw(canvas);
}
handler.postDelayed(r, 10);
}
#Override
public boolean onTouchEvent(MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_DOWN) {
bird.setDrop(-15);
}
return true;
}
}
enter image description here

Label number 3 mismatch the shape on axis 1

I'm currently trying to create an image classification Android app using a TensorFlow Lite model. When I open the Android App and try to perform classification, I keep getting this error message
java.lang.IllegalArgumentException: Label number 3 mismatch the shape on axis 1
Here's the content inside my label file
0 A
1 B
2 C
And here's the code of my Classifier class:
package com.ukzn.signchat;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Bitmap;
import android.media.Image;
import android.util.Log;
import androidx.camera.core.ImageProxy;
import org.tensorflow.lite.DataType;
import org.tensorflow.lite.Interpreter;
import org.tensorflow.lite.support.common.FileUtil;
import org.tensorflow.lite.support.common.TensorProcessor;
import org.tensorflow.lite.support.common.ops.NormalizeOp;
import org.tensorflow.lite.support.image.ImageProcessor;
import org.tensorflow.lite.support.image.TensorImage;
import org.tensorflow.lite.support.image.ops.ResizeOp;
import org.tensorflow.lite.support.image.ops.ResizeWithCropOrPadOp;
import org.tensorflow.lite.support.image.ops.Rot90Op;
import org.tensorflow.lite.support.label.TensorLabel;
import org.tensorflow.lite.support.tensorbuffer.TensorBuffer;
import java.io.IOException;
import java.nio.MappedByteBuffer;
import java.util.List;
import java.util.Map;
public class Classifier {
private Context context;
Interpreter tflite;
final String ASSOCIATED_AXIS_LABELS = "labels.txt";
List<String> associatedAxisLabels = null;
public Classifier(Context context) {
this.context = context;
// load labels to a List<String>
try {
associatedAxisLabels = FileUtil.loadLabels(context, ASSOCIATED_AXIS_LABELS);
} catch (IOException e) {
Log.e("tfliteSupport", "Error reading label file", e);
}
// load model to interpreter
try {
MappedByteBuffer tfliteModel = FileUtil.loadMappedFile(context, "model.tflite");
tflite = new Interpreter(tfliteModel);
} catch (IOException e) {
Log.e("tfliteSupport", "Error reading model", e);
}
}
public String classify(ImageProxy image) {
#SuppressLint("UnsafeExperimentalUsageError")
Image img = image.getImage();
Bitmap bitmap = Utils.toBitmap(img);
int rotation = Utils.getImageRotation(image);
int width = bitmap.getWidth();
int height = bitmap.getHeight();
int size = height > width ? width : height;
ImageProcessor imageProcessor = new ImageProcessor.Builder()
.add(new ResizeWithCropOrPadOp(size, size))
.add(new ResizeOp(224, 224, ResizeOp.ResizeMethod.BILINEAR)) // changed from 128x128
.add(new Rot90Op(rotation))
.build();
TensorImage tensorImage = new TensorImage(DataType.UINT8);
tensorImage.load(bitmap);
tensorImage = imageProcessor.process(tensorImage);
TensorBuffer probabilityBuffer = TensorBuffer.createFixedSize(new int[]{1, 224, 224, 3}, DataType.UINT8);
if (null != tflite) {
tflite.run(tensorImage.getBuffer(), probabilityBuffer.getBuffer());
}
TensorProcessor probabilityProcessor = new TensorProcessor.Builder().add(new NormalizeOp(0, 255)).build();
String result = "";
if (null != associatedAxisLabels) {
// Map of labels and their corresponding probability
TensorLabel labels = new TensorLabel(associatedAxisLabels, probabilityProcessor.process(probabilityBuffer));
// Create a map to access the result based on label
Map<String, Float> floatMap = labels.getMapWithFloatValue();
result = Utils.writeResults(floatMap);
}
return result;
}
}
The classifier is probably based on the MobileNet label format, which requires that labels start from 1. Since you have 0, 1, 2 & it ignores the 0, it doesn't find the 3.

BebopVideoView to Mat

I come up against the great problem.
I`m try to BebopVideoView to Mat.
(BebopVideoView is parrot drone source code)
But I was failed for several days.
Here is the code.
package com.hyeonjung.dronecontroll.view;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.os.Environment;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.parrot.arsdk.arcontroller.ARCONTROLLER_STREAM_CODEC_TYPE_ENUM;
import com.parrot.arsdk.arcontroller.ARControllerCodec;
import com.parrot.arsdk.arcontroller.ARFrame;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
public class BebopVideoView extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "BebopVideoView";
private static final String VIDEO_MIME_TYPE = "video/avc";
private static final int VIDEO_DEQUEUE_TIMEOUT = 33000;
private MediaCodec mMediaCodec;
private Lock mReadyLock;
private boolean mIsCodecConfigured = false;
private ByteBuffer mSpsBuffer;
private ByteBuffer mPpsBuffer;
private ByteBuffer[] mBuffers;
private static final int VIDEO_WIDTH = 640;
private static final int VIDEO_HEIGHT = 368;
public byte[] a;
public Mat k;
public BebopVideoView(Context context) {
super(context);
customInit();
}
public BebopVideoView(Context context, AttributeSet attrs) {
super(context, attrs);
customInit();
}
public BebopVideoView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
customInit();
}
private void customInit() {
mReadyLock = new ReentrantLock();
getHolder().addCallback(this);
}
public void displayFrame(ARFrame frame) {
mReadyLock.lock();
if ((mMediaCodec != null)) {
if (mIsCodecConfigured) {
// Here we have either a good PFrame, or an IFrame
int index = -1;
try {
index = mMediaCodec.dequeueInputBuffer(VIDEO_DEQUEUE_TIMEOUT);
} catch (IllegalStateException e) {
Log.e(TAG, "Error while dequeue input buffer");
}
if (index >= 0) {
ByteBuffer b;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
b = mMediaCodec.getInputBuffer(index); // fill inputBuffer with valid data
}
else {
b = mBuffers[index]; // fill inputBuffer with valid data
b.clear();
}
if (b != null) {
b.put(frame.getByteData(), 0, frame.getDataSize()); //write to b.
getMat(frame);
saveMat(k);
}
try {
mMediaCodec.queueInputBuffer(index, 0, frame.getDataSize(), 0, 0); //end of stream
} catch (IllegalStateException e) {
Log.e(TAG, "Error while queue input buffer");
}
}
}
// Try to display previous frame
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int outIndex;
try {
outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);
while (outIndex >= 0) {
mMediaCodec.releaseOutputBuffer(outIndex, true);
outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);
}
} catch (IllegalStateException e) {
Log.e(TAG, "Error while dequeue input buffer (outIndex)");
}
}
mReadyLock.unlock();
}
public void configureDecoder(ARControllerCodec codec) {
mReadyLock.lock();
if (codec.getType() == ARCONTROLLER_STREAM_CODEC_TYPE_ENUM.ARCONTROLLER_STREAM_CODEC_TYPE_H264) {
ARControllerCodec.H264 codecH264 = codec.getAsH264();
mSpsBuffer = ByteBuffer.wrap(codecH264.getSps().getByteData());
mPpsBuffer = ByteBuffer.wrap(codecH264.getPps().getByteData());
}
if ((mMediaCodec != null) && (mSpsBuffer != null)) {
configureMediaCodec();
}
mReadyLock.unlock();
}
private void configureMediaCodec() {
MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
format.setByteBuffer("csd-0", mSpsBuffer);
format.setByteBuffer("csd-1", mPpsBuffer);
mMediaCodec.configure(format, getHolder().getSurface(), null, 0);
mMediaCodec.start();
if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
mBuffers = mMediaCodec.getInputBuffers();
}
mIsCodecConfigured = true;
}
private void initMediaCodec(String type) {
try {
mMediaCodec = MediaCodec.createDecoderByType(type);
} catch (IOException e) {
Log.e(TAG, "Exception", e);
}
if ((mMediaCodec != null) && (mSpsBuffer != null)) {
configureMediaCodec();
}
}
private void releaseMediaCodec() {
if (mMediaCodec != null) {
if (mIsCodecConfigured) {
mMediaCodec.stop();
mMediaCodec.release();
}
mIsCodecConfigured = false;
mMediaCodec = null;
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mReadyLock.lock();
initMediaCodec(VIDEO_MIME_TYPE);
mReadyLock.unlock();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mReadyLock.lock();
releaseMediaCodec();
mReadyLock.unlock();
}
public void getMat(ARFrame frame) {
k = new Mat();
k.get(150, 150, frame.getByteData());
k.put(150, 150, frame.getByteData());
//or
//byte[] a= new byte[b.remaining()];
//b.get(a);
//k.get(150, 150, a);
//k.put(150, 150, a);
}
public void saveMat (Mat mat) {
Mat mIntermediateMat = new Mat(150, 150, CvType.CV_8UC1);
Imgproc.cvtColor(mat, mIntermediateMat, Imgproc.COLOR_GRAY2BGR);
File path = new File(Environment.getExternalStorageDirectory() + "/data");
path.mkdirs();
File file = new File(path, "image.png");
String filename = file.toString();
Boolean bool = Imgcodecs.imwrite(filename, mIntermediateMat);
if (bool)
Log.i(TAG, "SUCCESS writing image to external storage");
else
Log.i(TAG, "Fail writing image to external storage");
}
}
I think I can get an image related data from ByteBuffer b or frame.get ByteData ().
I was confirmed ByteBuffer b and frame.getByteData().
There was char data type with a range of -128 to 127.
So I was confirmed the result of getMat, saveMat and the result was a NULL(Mat k).
What is wrong?
Please help me T.T
If you use a TextureView you can simply grab a bitmap of it and convert it to a Mat. You need to use the TextureView's provided surface rather than the typical SurfaceView holder. This will require some additional refactoring of the mediaCodec lifecycle, but is a fairly trivial change.
public class BebopVideoView extends TextureView implements TextureView.SurfaceTextureListener {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
this.surface = new Surface(surface);
surfaceCreated = true;
}
...
}
And inside of configureMediaCodec use the class level surface captured in onSurfaceTextureAvailable instead....
mediaCodec.configure(format, surface, null, 0);
With a couple other minor tweaks you now have a lot more control over the view. You can do things like setTransform() and more importantly in your case getBitmap:
Mat mat = new Mat();
Utils.bitmapToMat(getBitmap(), mat);

Show The Desired/Specific Frequency

I've made an android apps about visualize spectrum frequency from recording sound. I'm using package from www.netlib.org/fftpack/jfftpack.tgz to perform FFT. We know that Each line of spectrum handle frequency from this formula : "((frequency_sample/2)/samples)". Its easy to see the first, second, and third spectrum that occurs. But, above fifth spectrum its hard to determine which spectrum it is.
I want to know the specific frequency (for example freq 200Hz) is occur or not and show it as toast or something like that. Is there any good source library I could refer to do this?
Thanks :)
package com.example.agita.stethoscopeandroid;
/**
* Created by Agita on 5/21/2015.
*/
import java.io.BufferedOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import ca.uol.aig.fftpack.RealDoubleFFT;
public class RekamActivity extends Activity implements OnClickListener {
Button startRecordingButton, stopRecordingButton;
TextView statusText;
File recordingFile;
boolean isRecording = false;
int frequency = 8000;
int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private RealDoubleFFT transformer;
int blockSize = 256;
RecordAudio recordTask;
ImageView imageView;
Bitmap bitmap;
Canvas canvas;
Paint paint;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.rekam);
statusText = (TextView) this.findViewById(R.id.StatusTextView);
startRecordingButton = (Button) this .findViewById(R.id.StartRecordingButton);
stopRecordingButton = (Button) this .findViewById(R.id.StopRecordingButton);
startRecordingButton.setOnClickListener(this);
stopRecordingButton.setOnClickListener(this);
stopRecordingButton.setEnabled(false);
transformer = new RealDoubleFFT(blockSize);
imageView = (ImageView) this.findViewById(R.id.ImageView01);
bitmap = Bitmap.createBitmap(256, 100, Bitmap.Config.ARGB_8888);
canvas = new Canvas(bitmap);
paint = new Paint();
paint.setColor(Color.GREEN);
imageView.setImageBitmap(bitmap);
String recordedAudioFile = getRecoredAudioFile();
File path = new File(Environment.getExternalStorageDirectory()
.getAbsolutePath() + "/Datarekaman/");
path.mkdirs();
try {
recordingFile = File.createTempFile("recording"+recordedAudioFile, ".wav", path);
} catch (IOException e) {
throw new RuntimeException("Couldn't create file on SD card", e);
}
}
private String getRecoredAudioFile() {
String returnAudio;
java.util.Date date= new java.util.Date();
returnAudio = new SimpleDateFormat("yyyyMMdd_HHmmss").format(date.getTime());
return returnAudio;
}
public void onClick(View v) {
if (v == startRecordingButton) {
record();
} else if (v == stopRecordingButton) {
stopRecording();
}
}
public void record() {
startRecordingButton.setEnabled(false);
stopRecordingButton.setEnabled(true);
recordTask = new RecordAudio();
recordTask.execute();
}
public void stopRecording() {
isRecording = false;
}
private class RecordAudio extends AsyncTask<Void, double[], Void> {
#Override
protected Void doInBackground(Void... params) {
isRecording = true;
try {
DataOutputStream dos = new DataOutputStream(
new BufferedOutputStream(new FileOutputStream(
recordingFile)));
int bufferSize = AudioRecord.getMinBufferSize(frequency,
channelConfiguration, audioEncoding);
AudioRecord audioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC, frequency,
channelConfiguration, audioEncoding, bufferSize);
short[] buffer = new short[blockSize];
double[] toTransform = new double[blockSize];
audioRecord.startRecording();
while (isRecording) {
int bufferReadResult = audioRecord.read(buffer, 0,
blockSize);
for (int i = 0; i < blockSize && i < bufferReadResult; i++) {
dos.writeShort(buffer[i]);
toTransform[i] = (double) buffer[i]/32768.0;
}
transformer.ft(toTransform);
publishProgress(toTransform);
}
audioRecord.stop();
dos.close();
} catch (Throwable t) {
Log.e("AudioRecord", "Recording Failed");
}
return null;
}
/*protected void onProgressUpdate(Integer... progress) {
statusText.setText(progress[0].toString());
}*/
protected void onProgressUpdate (double[]... toTransform) {
canvas.drawColor(Color.BLACK);
for (int i = 0; i < toTransform[0].length; i++) {
int x;
x = i;
int downy = (int) (100 - (toTransform[0][i] * 10));
int upy = 100;
//canvas.drawRect(x * 3, downy, x * 3 + 4, upy, paint);
canvas.drawLine(x, downy, x, upy, paint);
imageView.invalidate();
}
}
protected void onPostExecute(Void result) {
startRecordingButton.setEnabled(true);
stopRecordingButton.setEnabled(false);
}
}
}

image quality becomes bad after many drawBitmap's

my program draws a bitmap on the live wallpaper canvas.
It works, but after some time the image becomes very bad (http://img855.imageshack.us/img855/9756/deviceq.png)
any ideas why?
package com.tripr;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import java.util.List;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.Paint.Align;
import android.location.Address;
import android.location.Geocoder;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.os.Handler;
import android.service.wallpaper.WallpaperService;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
/*
* This animated wallpaper draws a rotating wireframe cube.
*/
public class MyWallpaperService extends WallpaperService{
private final String TAG = "tripr";
#Override
public void onCreate() {
super.onCreate();
}
#Override
public void onDestroy() {
super.onDestroy();
}
#Override
public Engine onCreateEngine() {
return new CubeEngine(this);
}
class CubeEngine extends Engine implements LocationListener{
private MyWallpaperService mws;
private float xOffset;
private float xStep;
private int xPixels;
private String lastPhotoUrl = "";
private LocationManager lm;
private Bitmap bmp = null;
private String locationName;
private String status = "waiting for location update...";
private final Handler mHandler = new Handler();
private final Runnable mDrawBtmp = new Runnable() {
public void run() {
drawFrame();
}
};
private boolean mVisible;
CubeEngine(MyWallpaperService mymws) {
mws = mymws;
lm = (LocationManager)getSystemService(Context.LOCATION_SERVICE);
requestLocationUpdates();
MyThread myThread = new MyThread(lm.getLastKnownLocation(LocationManager.NETWORK_PROVIDER));
myThread.start();
}
//taken from http://p-xr.com/android-tutorial-how-to-parse-read-json-data-into-a-android-listview/
private JSONObject getJSONfromURL(String url){
//initialize
InputStream is = null;
String result = "";
JSONObject jArray = null;
//http post
try{
HttpClient httpclient = new DefaultHttpClient();
HttpGet httpget = new HttpGet(url);
HttpResponse response = httpclient.execute(httpget);
HttpEntity entity = response.getEntity();
is = entity.getContent();
}catch(Exception e){
Log.e(TAG, "Error in http connection "+e.toString());
}
//convert response to string
try{
BufferedReader reader = new BufferedReader(new InputStreamReader(is,"iso-8859-1"), 8);
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
sb.append(line + "\n");
}
is.close();
result=sb.toString();
}catch(Exception e){
Log.e(TAG, "Error converting result "+e.toString());
}
//try parse the string to a JSON object
//Log.d(TAG, result);
try{
jArray = new JSONObject(result);
}catch(JSONException e){
Log.e(TAG, "Error parsing data "+e.toString());
}
return jArray;
}
String getFlickrUrl(double lat, double lon, double radius){
return "http://api.flickr.com/services/rest/?" +
"method=flickr.photos.search" +
"&api_key=a6d9db5ff2885dd2f8949590e7a44762" +
"&tags=architecture" +
"&lat=" + lat +
"&lon=" + lon +
"&radius=" + radius +
"&radius_units=km" +
"&extras=geo%2Curl_z%2Ctags" +
"&per_page=250" +
"&format=json" +
"&sort=interestingness-desc" +
"&nojsoncallback=1";
}
void requestLocationUpdates(){
if (!(lm.isProviderEnabled(LocationManager.NETWORK_PROVIDER))){
status = "locating over network disabled";
//bmp = null;
}
lm.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 1000*30, 10, this);
}
void removeUpdates(){
lm.removeUpdates(this);
}
#Override
public void onLocationChanged(Location location) {
MyThread myThread = new MyThread(location);
myThread.start();
}
#Override
public void onProviderDisabled(String provider) {}
#Override
public void onProviderEnabled(String provider) {}
#Override
public void onStatusChanged(String provider, int status, Bundle extras) {}
#Override
public void onDestroy() {
super.onDestroy();
mHandler.removeCallbacks(mDrawBtmp);
}
#Override
public void onVisibilityChanged(boolean visible) {
mVisible = visible;
if (visible) {
requestLocationUpdates();
drawFrame();
} else {
mHandler.removeCallbacks(mDrawBtmp);
removeUpdates();
}
}
#Override
public void onSurfaceChanged(SurfaceHolder holder, int format, int width, int height) {
super.onSurfaceChanged(holder, format, width, height);
drawFrame();
}
#Override
public void onSurfaceCreated(SurfaceHolder holder) {
super.onSurfaceCreated(holder);
}
#Override
public void onSurfaceDestroyed(SurfaceHolder holder) {
super.onSurfaceDestroyed(holder);
mVisible = false;
mHandler.removeCallbacks(mDrawBtmp);
}
#Override
public void onOffsetsChanged(float xOffset, float yOffset,
float xStep, float yStep, int xPixels, int yPixels) {
/*Log.d(TAG, "onOffsetsChanged");
Log.d(TAG, "xOffset: " + String.valueOf(xOffset));
Log.d(TAG, "yOffset: " + String.valueOf(yOffset));
Log.d(TAG, "xStep: " + String.valueOf(xStep));
Log.d(TAG, "yStep: " + String.valueOf(yStep));
Log.d(TAG, "xPixels: " + String.valueOf(xPixels));
Log.d(TAG, "yPixels: " + String.valueOf(yPixels));
//Log.d(TAG, String.valueOf((xOffset / xStep)));
Log.d(TAG, " ");*/
this.xPixels = xPixels;
this.xStep = xStep;
this.xOffset = xOffset;
drawFrame();
}
/*
* Store the position of the touch event so we can use it for drawing later
*/
#Override
public void onTouchEvent(MotionEvent event) {
super.onTouchEvent(event);
}
/*
* Draw one frame of the animation. This method gets called repeatedly
* by posting a delayed Runnable. You can do any drawing you want in
* here. This example draws a wireframe cube.
*/
void drawFrame() {
final SurfaceHolder holder = getSurfaceHolder();
Canvas c = null;
try {
c = holder.lockCanvas();
if (c != null) {
// draw something
drawBmp(c);
}
} finally {
if (c != null) holder.unlockCanvasAndPost(c);
}
// Reschedule the next redraw
mHandler.removeCallbacks(mDrawBtmp);
if (mVisible) {
mHandler.postDelayed(mDrawBtmp, 1000 / 5);
}
}
private class MyThread extends Thread {
Location loc;
MyThread(Location loc){
this.loc = loc;
}
public synchronized void run(){ // bringt synchronized was? weil wir kreieren ja immer eine neue intanz...
try{
if (loc == null){
Log.d(TAG, "location is null");
return;
}
<SNIP>
//the main code, update `status` and `bmp`
}
void drawBmp(Canvas canvas) {
//canvas.restore();
//canvas.save();
//canvas.translate(0, 0);
//canvas.drawColor(0xff00aa00);
canvas.drawColor(Color.BLACK);
if (bmp != null){
//int width = (int) (((double)bmp.getHeight()/(double)canvas.getHeight())*canvas.getWidth());
if (bmp.getHeight() != canvas.getHeight()){
Float width = new Float(bmp.getWidth());
Float height = new Float(bmp.getHeight());
Float ratio = width/height;
Log.d(TAG, "scaling");
bmp = Bitmap.createScaledBitmap(bmp, (int)(canvas.getHeight()*ratio), canvas.getHeight(), true);
}
int x;
if (bmp.getWidth() >= canvas.getWidth()){
x = -1*(int)((xOffset*(bmp.getWidth()-canvas.getWidth())));
}else{
x = (bmp.getWidth()-canvas.getWidth())/2;
}
//Log.d(TAG, String.valueOf(scale));
//Log.d(TAG, String.valueOf(canvas.getWidth()));
//Log.d(TAG, " ");
canvas.drawBitmap(bmp, x, 0, null);
//canvas.drawLine(0, 0, bmp.getWidth(), 0, new Paint());
}else if(status != null){
Paint textPaint = new Paint();
textPaint.setColor(Color.WHITE);
textPaint.setAntiAlias(true);
textPaint.setTextSize(25);
textPaint.setAlpha(120);
textPaint.setTextAlign(Align.CENTER);
canvas.drawText(status, canvas.getWidth()/2, canvas.getHeight()/2, textPaint);
}
if (locationName != null){
Paint textPaint = new Paint();
textPaint.setColor(Color.BLACK);
textPaint.setAntiAlias(true);
textPaint.setTextSize(30);
textPaint.setAlpha(150);
textPaint.setTextAlign(Align.LEFT);
Rect rect = new Rect();
textPaint.getTextBounds(locationName, 0, locationName.length(), rect);
//Log.d(TAG, String.valueOf(textPaint.getTextSize()));
canvas.drawText(locationName, 0, 70, textPaint);
}
}
}
}
In drawBmp() you repeatedly take your bitmap (bmp), scale it (the call to createScaledBitmap) and then assign it back to bmp. Over time all these scaling operations are going to result in the artifacts that you see.
To solve this, store the original bitmap in a different variable (e.g., private Bitmap originalImage;) and create the scaled bitmap from original.
bmp = Bitmap.createScaledBitmap(originalImage, (int)(canvas.getHeight()*ratio), canvas.getHeight(), true);

Categories