Show The Desired/Specific Frequency - java

I've made an android apps about visualize spectrum frequency from recording sound. I'm using package from www.netlib.org/fftpack/jfftpack.tgz to perform FFT. We know that Each line of spectrum handle frequency from this formula : "((frequency_sample/2)/samples)". Its easy to see the first, second, and third spectrum that occurs. But, above fifth spectrum its hard to determine which spectrum it is.
I want to know the specific frequency (for example freq 200Hz) is occur or not and show it as toast or something like that. Is there any good source library I could refer to do this?
Thanks :)
package com.example.agita.stethoscopeandroid;
/**
* Created by Agita on 5/21/2015.
*/
import java.io.BufferedOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import ca.uol.aig.fftpack.RealDoubleFFT;
public class RekamActivity extends Activity implements OnClickListener {
Button startRecordingButton, stopRecordingButton;
TextView statusText;
File recordingFile;
boolean isRecording = false;
int frequency = 8000;
int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private RealDoubleFFT transformer;
int blockSize = 256;
RecordAudio recordTask;
ImageView imageView;
Bitmap bitmap;
Canvas canvas;
Paint paint;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.rekam);
statusText = (TextView) this.findViewById(R.id.StatusTextView);
startRecordingButton = (Button) this .findViewById(R.id.StartRecordingButton);
stopRecordingButton = (Button) this .findViewById(R.id.StopRecordingButton);
startRecordingButton.setOnClickListener(this);
stopRecordingButton.setOnClickListener(this);
stopRecordingButton.setEnabled(false);
transformer = new RealDoubleFFT(blockSize);
imageView = (ImageView) this.findViewById(R.id.ImageView01);
bitmap = Bitmap.createBitmap(256, 100, Bitmap.Config.ARGB_8888);
canvas = new Canvas(bitmap);
paint = new Paint();
paint.setColor(Color.GREEN);
imageView.setImageBitmap(bitmap);
String recordedAudioFile = getRecoredAudioFile();
File path = new File(Environment.getExternalStorageDirectory()
.getAbsolutePath() + "/Datarekaman/");
path.mkdirs();
try {
recordingFile = File.createTempFile("recording"+recordedAudioFile, ".wav", path);
} catch (IOException e) {
throw new RuntimeException("Couldn't create file on SD card", e);
}
}
private String getRecoredAudioFile() {
String returnAudio;
java.util.Date date= new java.util.Date();
returnAudio = new SimpleDateFormat("yyyyMMdd_HHmmss").format(date.getTime());
return returnAudio;
}
public void onClick(View v) {
if (v == startRecordingButton) {
record();
} else if (v == stopRecordingButton) {
stopRecording();
}
}
public void record() {
startRecordingButton.setEnabled(false);
stopRecordingButton.setEnabled(true);
recordTask = new RecordAudio();
recordTask.execute();
}
public void stopRecording() {
isRecording = false;
}
private class RecordAudio extends AsyncTask<Void, double[], Void> {
#Override
protected Void doInBackground(Void... params) {
isRecording = true;
try {
DataOutputStream dos = new DataOutputStream(
new BufferedOutputStream(new FileOutputStream(
recordingFile)));
int bufferSize = AudioRecord.getMinBufferSize(frequency,
channelConfiguration, audioEncoding);
AudioRecord audioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC, frequency,
channelConfiguration, audioEncoding, bufferSize);
short[] buffer = new short[blockSize];
double[] toTransform = new double[blockSize];
audioRecord.startRecording();
while (isRecording) {
int bufferReadResult = audioRecord.read(buffer, 0,
blockSize);
for (int i = 0; i < blockSize && i < bufferReadResult; i++) {
dos.writeShort(buffer[i]);
toTransform[i] = (double) buffer[i]/32768.0;
}
transformer.ft(toTransform);
publishProgress(toTransform);
}
audioRecord.stop();
dos.close();
} catch (Throwable t) {
Log.e("AudioRecord", "Recording Failed");
}
return null;
}
/*protected void onProgressUpdate(Integer... progress) {
statusText.setText(progress[0].toString());
}*/
protected void onProgressUpdate (double[]... toTransform) {
canvas.drawColor(Color.BLACK);
for (int i = 0; i < toTransform[0].length; i++) {
int x;
x = i;
int downy = (int) (100 - (toTransform[0][i] * 10));
int upy = 100;
//canvas.drawRect(x * 3, downy, x * 3 + 4, upy, paint);
canvas.drawLine(x, downy, x, upy, paint);
imageView.invalidate();
}
}
protected void onPostExecute(Void result) {
startRecordingButton.setEnabled(true);
stopRecordingButton.setEnabled(false);
}
}
}

Related

Why is my Bitmap null when feeding through tensorflow pipeline?

Using an AR glass' API to get its camera frames to do object detection
I tried converting the frames ( Byte Buffer ) to a bitmap and feeding it into my object detection pipeline. I realised there was no detection going on and debugged to find my bitmap is being read as null. Is it possible to input the bytebuffer straight to the interpreter and do object detection? if not my byte is valid and tested just converting to bitmap returns a toast messafe that is invalid and null.
package com.example.jjsdkcameratest;
import androidx.appcompat.app.AppCompatActivity;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.app.Activity;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.os.Bundle;
import android.os.FileUtils;
import android.util.Log;
import android.view.SurfaceView;
import android.widget.Button;
import com.example.jjsdkcameratest.ml.SsdMobilenetV11Metadata1;
import com.jorjin.jjsdk.camera.CameraManager;
import com.jorjin.jjsdk.camera.FrameListener;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.widget.ImageView;
import android.widget.Toast;
import org.tensorflow.lite.DataType;
import org.tensorflow.lite.support.common.FileUtil;
import org.tensorflow.lite.support.image.ImageProcessor;
import org.tensorflow.lite.support.image.TensorImage;
import org.tensorflow.lite.support.image.ops.ResizeOp;
import org.tensorflow.lite.support.image.ImageProcessor;
import org.tensorflow.lite.support.image.TensorImage;
import org.tensorflow.lite.support.tensorbuffer.TensorBuffer;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
public class MainActivity extends Activity {
private CameraManager cameraManager;
private SurfaceView cameraSurface;
private Context context;
private ImageView imageView;
private SsdMobilenetV11Metadata1 model;
private ImageProcessor imageProcessor;
private Paint paint;
private List<String> labels;
private List<Integer> colors = Arrays.asList(Color.BLUE, Color.GREEN, Color.CYAN, Color.GRAY, Color.BLACK, Color.DKGRAY, Color.MAGENTA, Color.YELLOW, Color.RED);
private FrameListener frameListener = (buffer, width, height, format) -> {
// Access the data buffer of the previewing frame here
byte[] bytes = buffer.array();
if (bytes == null || bytes.length == 0) {
Log.e("MainActivity", "Frame data is invalid or empty");
Toast.makeText(context, "Error: Frame data not valid", Toast.LENGTH_SHORT).show();
return;
}
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
if (bitmap == null) {
Log.e("MainActivity", "Failed to decode frame data into a bitmap");
Toast.makeText(context, "Error: Failed to decode frame data into a bitmap", Toast.LENGTH_SHORT).show();
}
// Creates inputs for reference.
try {
TensorImage image = TensorImage.fromBitmap(bitmap);
image = imageProcessor.process(image);
// Runs model inference and gets result.
SsdMobilenetV11Metadata1.Outputs outputs = model.process(image);
float[] locations = outputs.getLocationsAsTensorBuffer().getFloatArray();
float[] classes = outputs.getClassesAsTensorBuffer().getFloatArray();
float[] scores = outputs.getScoresAsTensorBuffer().getFloatArray();
float[] numberOfDetections = outputs.getNumberOfDetectionsAsTensorBuffer().getFloatArray();
Bitmap mutable = bitmap.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(mutable);
int h = mutable.getHeight();
int w = mutable.getWidth();
paint.setTextSize(h/15f);
paint.setStrokeWidth(h/85f);
int x = 0;
for (int index = 0; index < scores.length; index++) {
float fl = scores[index];
x = index;
x *= 4;
if (fl > 0.5) {
paint.setColor(colors.get(index));
paint.setStyle(Paint.Style.STROKE);
canvas.drawRect(new RectF(locations[x+1]*w, locations[x]*h, locations[x+3]*w, locations[x+2]*h), paint);
paint.setStyle(Paint.Style.FILL);
canvas.drawText(labels.get((int) classes[index]) + " " + Float.toString(fl), locations[x+1]*w, locations[x]*h, paint);
}
}
if (imageView != null) {
imageView.setImageBitmap(bitmap);
}
} catch (NullPointerException npe) {
Log.e("MainActivity", "Null pointer exception occurred", npe);
Toast.makeText(this, "Null pointer exception occurred", Toast.LENGTH_SHORT);
}
};
public ImageView getImageView() {
if (imageView == null) {
imageView = findViewById(R.id.image_view);
}
return imageView;
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
try {
labels = FileUtil.loadLabels(this, "labels.txt");
} catch (IOException e) {
e.printStackTrace();
}
context = this;
getImageView();
cameraSurface = findViewById(R.id.surface_camera);
cameraManager = new CameraManager(context);
cameraManager.addSurfaceHolder(cameraSurface.getHolder());
cameraManager.setCameraFrameListener(frameListener);
cameraManager.setResolutionIndex(0);
cameraManager.startCamera(CameraManager.COLOR_FORMAT_RGBA);
imageProcessor = new ImageProcessor.Builder().add(new ResizeOp(300, 300, ResizeOp.ResizeMethod.BILINEAR)).build();
try {
SsdMobilenetV11Metadata1 model = SsdMobilenetV11Metadata1.newInstance(this);
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
protected void onDestroy() {
super.onDestroy();
// Releases model resources if no longer used.
model.close();
cameraManager.stopCamera();
}
}

Java Android Studio game randomly ends when conditions are not met

I am trying to create a game in Android Studio that is similar to flappy bird. I'm following a YouTube tutorial as I am new to java and i believe I've followed it very closely.
My problem is that sometimes the game will terminate early, despite the gameOver conditions not being met. I have a function "updateAndDrawTubes" that draws the pipes to the canvas and also checks whether the player is touching the pipes. When the player does touch the pipe, the game finishes and goes to the gameOver screen, displaying the score. However, I am finding that even if there is no collision, sometimes the game will finish and go to the gameOver screen early.
My code is a mess, so apologies for that but any help would really be appreciated as I've exhausted all other options.
GameEngine.java
package com.example.mymainuniproj;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.Log;
import java.util.ArrayList;
import java.util.Random;
public class GameEngine {
BackgroundImage backgroundImage;
Player player;
static int gameState;
ArrayList<Tube> tubes;
Random random;
int score;
int scoringTube;
Paint scorePaint;
public GameEngine() {
backgroundImage = new BackgroundImage();
player = new Player();
//0-not started, 1-playing, 2-gameover
gameState = 0;
tubes = new ArrayList<>();
random = new Random();
for (int i = 0; i < AppConstants.numberOfTubes; i++) {
int tubeX = AppConstants.SCREEN_WIDTH + i * AppConstants.distanceBetweenTubes;
int topTubeOffsetY = AppConstants.minTubeOffsetY + random.nextInt(AppConstants.maxTubeOffsetY - AppConstants.minTubeOffsetY + 1);
Tube tube = new Tube(tubeX, topTubeOffsetY);
tubes.add(tube);
}
score = 0;
scoringTube = 0;
scorePaint = new Paint();
scorePaint.setColor(Color.RED);
scorePaint.setTextSize(100);
scorePaint.setTextAlign(Paint.Align.LEFT);
}
// this function is responsible for telling coins and enemies where to spawn
public void updateAndDrawTubes(Canvas canvas) {
if (gameState == 1) {
if ((tubes.get(scoringTube).getTubeX() < player.getPlayerX() + AppConstants.getBitmapBank().getPlayerWidth())
//if current tubes X value is < player X value
&& (tubes.get(scoringTube).getTopTubeOffsetY() > player.getPlayerY())
|| (tubes.get(scoringTube).getBottomTubeY() < (player.getPlayerY() + AppConstants.getBitmapBank().getPlayerHeight()))) {
//game over
gameState = 2;
Context context = AppConstants.gameActivityContext;
Intent intent = new Intent(context, GameOver.class);
intent.putExtra("score",score);
context.startActivity(intent);
((Activity)context).finish();
} else if (tubes.get(scoringTube).getTubeX() < player.getPlayerX() - AppConstants.getBitmapBank().getTubeWidth()) {
score++;
scoringTube++;
if (scoringTube > AppConstants.numberOfTubes - 1) {
scoringTube = 0;
}
}
// repeats twice, once for the enemy, once for the coin at each X coord
for (int i = 0; i < AppConstants.numberOfTubes; i++) {
//if tube(x) in the array's X value is less than tube width (which is 90px)
if (tubes.get(i).getTubeX() < -AppConstants.getBitmapBank().getTubeWidth()) {
tubes.get(i).setTubeX(tubes.get(i).getTubeX() + AppConstants.numberOfTubes * AppConstants.distanceBetweenTubes);
int topTubeOffsetY = AppConstants.minTubeOffsetY + random.nextInt(AppConstants.maxTubeOffsetY - AppConstants.minTubeOffsetY + 1);
tubes.get(i).setTopTubeOffsetY(topTubeOffsetY);
}
tubes.get(i).setTubeX(tubes.get(i).getTubeX() - AppConstants.tubeVelocity);
canvas.drawBitmap(AppConstants.getBitmapBank().getTubeTop(), tubes.get(i).getTubeX(), tubes.get(i).getTopTubeY(), null);
canvas.drawBitmap(AppConstants.getBitmapBank().getTubeBottom(), tubes.get(i).getTubeX(), tubes.get(i).getBottomTubeY(), null);
}
canvas.drawText("Score: " + score, 0, 110, scorePaint);
}
}
public void updateAndDrawBackgroundImage(Canvas canvas) {
backgroundImage.setX(backgroundImage.getX() - backgroundImage.getVelocity());
if (backgroundImage.getX() < -AppConstants.getBitmapBank().getBackgroundWidth()) {
backgroundImage.setX(0);
}
canvas.drawBitmap(AppConstants.getBitmapBank().getBackground(), backgroundImage.getX(), backgroundImage.getY(), null);
if (backgroundImage.getX() < -(AppConstants.getBitmapBank().getBackgroundWidth() - AppConstants.SCREEN_WIDTH)) {
canvas.drawBitmap(AppConstants.getBitmapBank().getBackground(), backgroundImage.getX() + AppConstants.getBitmapBank().getBackgroundWidth(), backgroundImage.getY(), null);
}
}
public void updateAndDrawPlayer(Canvas canvas) {
if (gameState == 1) {
if (player.getPlayerY() < (AppConstants.SCREEN_HEIGHT - AppConstants.getBitmapBank().getPlayerHeight()) || player.getVelocity() < 0) {
player.setVelocity(player.getVelocity() + AppConstants.gravity);
player.setPlayerY(player.getPlayerY() + player.getVelocity());
}
}
int currentFrame = player.getCurrentFrame();
canvas.drawBitmap(AppConstants.getBitmapBank().getPlayer(currentFrame), player.getPlayerX(), player.getPlayerY(), null);
currentFrame++;
if (currentFrame > player.maxFrame) {
currentFrame = 0;
}
player.setCurrentFrame(currentFrame);
}
}
GameOver.java
package com.example.mymainuniproj;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.os.PersistableBundle;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
public class GameOver extends AppCompatActivity {
TextView tvScore, tvPersonalBest;
#Override
public void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.game_over);
int score = getIntent().getExtras().getInt("score");
SharedPreferences pref = getSharedPreferences("MyPref",0);
int scoreSP = pref.getInt("scoreSP",0);
SharedPreferences.Editor editor = pref.edit();
//update high score
if(score > scoreSP){
scoreSP = score;
editor.putInt("scoreSP",scoreSP);
editor.commit();
}
tvScore = findViewById(R.id.tvScore);
//tvPersonalBest = findViewById(R.id.tvPersonalScore);
tvScore.setText(""+score);
//tvPersonalBest.setText(""+scoreSP);*/
}
public void showToastOnClick(View view){
String text = "";
switch (view.getId()){
case R.id.buttonRestart: restart(); break;
}
Toast.makeText(getApplicationContext(), text, Toast.LENGTH_SHORT).show();
}
public void restart(){
Intent intent = new Intent(GameOver.this , GameActivity.class);
startActivity(intent);
finish();
}
public void exit(View view){
finish();
}
}
AppConstants.java
package com.example.mymainuniproj;
import android.content.Context;
import android.util.DisplayMetrics;
import android.view.Display;
import android.view.WindowManager;
public class AppConstants {
static BitmapBank bitmapBank; //bitmap object ref
static GameEngine gameEngine; //game engine object ref
static int SCREEN_WIDTH, SCREEN_HEIGHT;
static int gravity;
static int VELOCITY_WHEN_JUMPED;
static int gapBetweenTopAndBottomTubes;
static int numberOfTubes;
static int tubeVelocity;
static int minTubeOffsetY;
static int maxTubeOffsetY;
static int distanceBetweenTubes;
static Context gameActivityContext;
public static void initialization(Context context){
setScreenSize(context);
bitmapBank = new BitmapBank(context.getResources() );
setGameConstants();
gameEngine = new GameEngine();
}
public static void setGameConstants(){
AppConstants.gravity = 3;
AppConstants.VELOCITY_WHEN_JUMPED = -40;
gapBetweenTopAndBottomTubes = 800;
AppConstants.numberOfTubes = 2;
AppConstants.tubeVelocity = 12;
AppConstants.minTubeOffsetY = (int)(AppConstants.gapBetweenTopAndBottomTubes / 2.0);
AppConstants.maxTubeOffsetY = AppConstants.SCREEN_HEIGHT - AppConstants.minTubeOffsetY - AppConstants.gapBetweenTopAndBottomTubes;
AppConstants.distanceBetweenTubes = AppConstants.SCREEN_WIDTH * 3 / 4;
}
//return bitmapBank instance
public static BitmapBank getBitmapBank(){
return bitmapBank;
}
//return gameEngine instance
public static GameEngine getGameEngine(){
return gameEngine;
}
// get screen height and widtch
private static void setScreenSize(Context context) {
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
Display display = wm.getDefaultDisplay();
DisplayMetrics metrics = new DisplayMetrics();
display.getMetrics(metrics);
int width = metrics.widthPixels;
int height = metrics.heightPixels;
AppConstants.SCREEN_WIDTH = width;
AppConstants.SCREEN_HEIGHT = height;
}
}
This is my first time posting anywhere like this so please go easy on me if the format is bad or if I'm missing anything important!
If there is any other parts of the code that would help please just let me know and I'll try to be quick!
Thank you!

BebopVideoView to Mat

I come up against the great problem.
I`m try to BebopVideoView to Mat.
(BebopVideoView is parrot drone source code)
But I was failed for several days.
Here is the code.
package com.hyeonjung.dronecontroll.view;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.os.Environment;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.parrot.arsdk.arcontroller.ARCONTROLLER_STREAM_CODEC_TYPE_ENUM;
import com.parrot.arsdk.arcontroller.ARControllerCodec;
import com.parrot.arsdk.arcontroller.ARFrame;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
public class BebopVideoView extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "BebopVideoView";
private static final String VIDEO_MIME_TYPE = "video/avc";
private static final int VIDEO_DEQUEUE_TIMEOUT = 33000;
private MediaCodec mMediaCodec;
private Lock mReadyLock;
private boolean mIsCodecConfigured = false;
private ByteBuffer mSpsBuffer;
private ByteBuffer mPpsBuffer;
private ByteBuffer[] mBuffers;
private static final int VIDEO_WIDTH = 640;
private static final int VIDEO_HEIGHT = 368;
public byte[] a;
public Mat k;
public BebopVideoView(Context context) {
super(context);
customInit();
}
public BebopVideoView(Context context, AttributeSet attrs) {
super(context, attrs);
customInit();
}
public BebopVideoView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
customInit();
}
private void customInit() {
mReadyLock = new ReentrantLock();
getHolder().addCallback(this);
}
public void displayFrame(ARFrame frame) {
mReadyLock.lock();
if ((mMediaCodec != null)) {
if (mIsCodecConfigured) {
// Here we have either a good PFrame, or an IFrame
int index = -1;
try {
index = mMediaCodec.dequeueInputBuffer(VIDEO_DEQUEUE_TIMEOUT);
} catch (IllegalStateException e) {
Log.e(TAG, "Error while dequeue input buffer");
}
if (index >= 0) {
ByteBuffer b;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
b = mMediaCodec.getInputBuffer(index); // fill inputBuffer with valid data
}
else {
b = mBuffers[index]; // fill inputBuffer with valid data
b.clear();
}
if (b != null) {
b.put(frame.getByteData(), 0, frame.getDataSize()); //write to b.
getMat(frame);
saveMat(k);
}
try {
mMediaCodec.queueInputBuffer(index, 0, frame.getDataSize(), 0, 0); //end of stream
} catch (IllegalStateException e) {
Log.e(TAG, "Error while queue input buffer");
}
}
}
// Try to display previous frame
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int outIndex;
try {
outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);
while (outIndex >= 0) {
mMediaCodec.releaseOutputBuffer(outIndex, true);
outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);
}
} catch (IllegalStateException e) {
Log.e(TAG, "Error while dequeue input buffer (outIndex)");
}
}
mReadyLock.unlock();
}
public void configureDecoder(ARControllerCodec codec) {
mReadyLock.lock();
if (codec.getType() == ARCONTROLLER_STREAM_CODEC_TYPE_ENUM.ARCONTROLLER_STREAM_CODEC_TYPE_H264) {
ARControllerCodec.H264 codecH264 = codec.getAsH264();
mSpsBuffer = ByteBuffer.wrap(codecH264.getSps().getByteData());
mPpsBuffer = ByteBuffer.wrap(codecH264.getPps().getByteData());
}
if ((mMediaCodec != null) && (mSpsBuffer != null)) {
configureMediaCodec();
}
mReadyLock.unlock();
}
private void configureMediaCodec() {
MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
format.setByteBuffer("csd-0", mSpsBuffer);
format.setByteBuffer("csd-1", mPpsBuffer);
mMediaCodec.configure(format, getHolder().getSurface(), null, 0);
mMediaCodec.start();
if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
mBuffers = mMediaCodec.getInputBuffers();
}
mIsCodecConfigured = true;
}
private void initMediaCodec(String type) {
try {
mMediaCodec = MediaCodec.createDecoderByType(type);
} catch (IOException e) {
Log.e(TAG, "Exception", e);
}
if ((mMediaCodec != null) && (mSpsBuffer != null)) {
configureMediaCodec();
}
}
private void releaseMediaCodec() {
if (mMediaCodec != null) {
if (mIsCodecConfigured) {
mMediaCodec.stop();
mMediaCodec.release();
}
mIsCodecConfigured = false;
mMediaCodec = null;
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mReadyLock.lock();
initMediaCodec(VIDEO_MIME_TYPE);
mReadyLock.unlock();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mReadyLock.lock();
releaseMediaCodec();
mReadyLock.unlock();
}
public void getMat(ARFrame frame) {
k = new Mat();
k.get(150, 150, frame.getByteData());
k.put(150, 150, frame.getByteData());
//or
//byte[] a= new byte[b.remaining()];
//b.get(a);
//k.get(150, 150, a);
//k.put(150, 150, a);
}
public void saveMat (Mat mat) {
Mat mIntermediateMat = new Mat(150, 150, CvType.CV_8UC1);
Imgproc.cvtColor(mat, mIntermediateMat, Imgproc.COLOR_GRAY2BGR);
File path = new File(Environment.getExternalStorageDirectory() + "/data");
path.mkdirs();
File file = new File(path, "image.png");
String filename = file.toString();
Boolean bool = Imgcodecs.imwrite(filename, mIntermediateMat);
if (bool)
Log.i(TAG, "SUCCESS writing image to external storage");
else
Log.i(TAG, "Fail writing image to external storage");
}
}
I think I can get an image related data from ByteBuffer b or frame.get ByteData ().
I was confirmed ByteBuffer b and frame.getByteData().
There was char data type with a range of -128 to 127.
So I was confirmed the result of getMat, saveMat and the result was a NULL(Mat k).
What is wrong?
Please help me T.T
If you use a TextureView you can simply grab a bitmap of it and convert it to a Mat. You need to use the TextureView's provided surface rather than the typical SurfaceView holder. This will require some additional refactoring of the mediaCodec lifecycle, but is a fairly trivial change.
public class BebopVideoView extends TextureView implements TextureView.SurfaceTextureListener {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
this.surface = new Surface(surface);
surfaceCreated = true;
}
...
}
And inside of configureMediaCodec use the class level surface captured in onSurfaceTextureAvailable instead....
mediaCodec.configure(format, surface, null, 0);
With a couple other minor tweaks you now have a lot more control over the view. You can do things like setTransform() and more importantly in your case getBitmap:
Mat mat = new Mat();
Utils.bitmapToMat(getBitmap(), mat);

How I can get array which values were get in method of another class

In mainActivity I made timer,which you click in button each 3 seconds show value of available memory and write in array.And after writting finished, the second button "see chart" stay clickable,we click and transwer to the next activity(second class).
In second class creates canvas with lines, and must be create chart which can show available memory in time. But I can't carry array in first class to second class.
I need carry array "Masiv " at method run() to my second class.
First class
package foxstrot.p3;
import android.annotation.TargetApi;
import android.app.ActivityManager;
import android.content.Intent;
import android.graphics.Canvas;
import android.os.Build;
import android.support.v7.app.ActionBarActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.TimeUnit;
public class MainActivity extends ActionBarActivity {
private Timer timer;
private MyTimerTask myTimerTask;
private Intent i;
long[] Masiv = new long[50];
int k;
private long freeMemory;
private Canvas canvas;
private G.DrawView d;
private int t = 0;
private long j;
private TextView tv;
private long r;
private Button b2;
MyTimerTask myTimerTask1 = new MyTimerTask();
int ii = 0;
ArrayList <Integer> AL = new ArrayList<Integer>() ;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
tv = (TextView) findViewById(R.id.textView);
b2 = (Button)findViewById(R.id.b2);
b2.setClickable(false);
//myTimerTask1.getMas();
//actManager.getMemoryInfo(memInfo);
//freeMemory = memInfo.availMem;
//tv.setText("freeMemory: " + freeMemory);
//Canvas canvas = new Canvas();
/*G g = new G();
G.DrawView d = g.new DrawView(this);*/
}
public void start(View v) {
timer = new Timer();
myTimerTask = new MyTimerTask();
timer.schedule(myTimerTask, 3000, 3000);
}
public void chart(View v){
i = new Intent(this,G.class);
startActivity(i);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
class MyTimerTask extends TimerTask {
#TargetApi(Build.VERSION_CODES.JELLY_BEAN)
long getFreeMemory(){
ActivityManager actManager = (ActivityManager) getSystemService(ACTIVITY_SERVICE);
ActivityManager.MemoryInfo memInfo = new ActivityManager.MemoryInfo();
actManager.getMemoryInfo(memInfo);
freeMemory = memInfo.availMem;
return freeMemory;
}
runOnUiThread(new Runnable() {
#Override
public void run() {
r = getFreeMemory();
k = (int)r/100000;
//setMas(k)
AL.add(k);
tv.setText("Free memory : " + k);
t++;
if(t == 5){
cancel();
b2.setClickable(true);
}
}
});
}
public void setMas(long p){
//if(ii==5){}
//Log.d("LOG_TAG", "SetMy_Array[0]: " + k + "SetMy_Array[1]: " + k);
ii++;
}
/*public long getMas(){
Log.d("LOG_TAG", "GetMy_Array[0]: " + k + "GetMy_Array[1]: " + k);
return k;
}*/
}
}
Second class
package foxstrot.p3;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.ActivityManager;
import android.content.Context;
import android.content.Intent;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.TimeUnit;
/**
* Created by Admin on 14.04.2015.
*/
public class G extends Activity {
int ii;
private long[] My_Array ;
private MainActivity mainActivity = new MainActivity();
//private MainActivity.MyTimerTask myTimerTask = mainActivity.new MyTimerTask();
//private long[] G_mas = myTimerTask.getMas();
ArrayList<Integer> AL = new ArrayList<Integer>() ;
Intent intent;
private boolean drawGraph = true;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(new DrawView(this));
intent = getIntent();
//AL = intent.getIntegerArrayListExtra("Joker");
//Log.d("LOG_TAG","Al: " + AL.get(0) + "AL: " + AL.get(0));
//My_Array = myTimerTask.getMas();
//Log.d("LOG_TAG","My_Array[0]: " + myTimerTask.getMas() + "My_Array[1]: " + myTimerTask.getMas());
/*for(int i=0;i<G_mas.length;i++){
max = Math.max(max,G_mas[i]);
min = G_mas[i];
min = Math.min(min, G_mas[i]);
}*/
}
public class DrawView extends View{
Paint p;
Rect rect;
int uN = 0;
int uT = 0;
int y1 = 0;
int x1 = 0;
int n = 2000;
int t = 0;
public DrawView(Context context){
super(context);
p = new Paint();
rect = new Rect();
}
#Override
public void onDraw(Canvas canvas) {
final int cHeight = canvas.getHeight();
final int cWidth = canvas.getWidth();
Paint paint = new Paint();
paint.setColor(Color.WHITE);
canvas.drawPaint(paint);
paint.setColor(Color.BLACK);
paint.setTextSize(30);
canvas.drawLine(80,0,80,cHeight-60,paint);
canvas.drawLine(80,cHeight-60,cWidth,cHeight-60,paint);
for(int i=0;i<50;i++){
canvas.drawText(uT + "" , 10 , 25 + y1, paint);
uT = uT + 3;
y1 = y1 + (cHeight/50);
}
for(int i=0;i<9;i++){
canvas.drawText("|"+ uN, 75 + x1, cHeight - 25 , paint);
uN = uN + 1000;
x1 = x1 + (cWidth/9)-25;
/*int joker = 10;
canvas.drawPoint(20+ joker,20,paint);
joker = joker + 40;*/
}
paint.setStrokeWidth(10);
/*for(int i=0;i<My_Array.length;i++){
long xc = (My_Array[i] * (cWidth-80)) / 8000 ;
int yc = (t * (cHeight-60))/147;
t = t + 3;
canvas.drawPoint(xc, yc, paint);
Log.d("LOG_TAG","xc: " + xc + "yc: " + yc);
}*/
/*Paint paint2 = new Paint();
paint2.setColor(Color.RED);
canvas.drawPoint(cWidth, cHeight, paint2);*/
}
}
}
Just use putExtra() and getLongArrayExtra().
Documentation: putExtra(String name, long[] value) and getLongArrayExtra(String name)
In MainActivity.java:
public void chart(View v){
i = new Intent(this,G.class);
i.putExtra("foxstrot.p3.masiv", Masiv);
startActivity(i);
}
In G.java:
private boolean drawGraph = true;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(new DrawView(this));
intent = getIntent();
My_Array = intent.getLongArrayExtra("foxstrot.p3.masiv");
//...................

image quality becomes bad after many drawBitmap's

my program draws a bitmap on the live wallpaper canvas.
It works, but after some time the image becomes very bad (http://img855.imageshack.us/img855/9756/deviceq.png)
any ideas why?
package com.tripr;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import java.util.List;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.Paint.Align;
import android.location.Address;
import android.location.Geocoder;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.os.Handler;
import android.service.wallpaper.WallpaperService;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
/*
* This animated wallpaper draws a rotating wireframe cube.
*/
public class MyWallpaperService extends WallpaperService{
private final String TAG = "tripr";
#Override
public void onCreate() {
super.onCreate();
}
#Override
public void onDestroy() {
super.onDestroy();
}
#Override
public Engine onCreateEngine() {
return new CubeEngine(this);
}
class CubeEngine extends Engine implements LocationListener{
private MyWallpaperService mws;
private float xOffset;
private float xStep;
private int xPixels;
private String lastPhotoUrl = "";
private LocationManager lm;
private Bitmap bmp = null;
private String locationName;
private String status = "waiting for location update...";
private final Handler mHandler = new Handler();
private final Runnable mDrawBtmp = new Runnable() {
public void run() {
drawFrame();
}
};
private boolean mVisible;
CubeEngine(MyWallpaperService mymws) {
mws = mymws;
lm = (LocationManager)getSystemService(Context.LOCATION_SERVICE);
requestLocationUpdates();
MyThread myThread = new MyThread(lm.getLastKnownLocation(LocationManager.NETWORK_PROVIDER));
myThread.start();
}
//taken from http://p-xr.com/android-tutorial-how-to-parse-read-json-data-into-a-android-listview/
private JSONObject getJSONfromURL(String url){
//initialize
InputStream is = null;
String result = "";
JSONObject jArray = null;
//http post
try{
HttpClient httpclient = new DefaultHttpClient();
HttpGet httpget = new HttpGet(url);
HttpResponse response = httpclient.execute(httpget);
HttpEntity entity = response.getEntity();
is = entity.getContent();
}catch(Exception e){
Log.e(TAG, "Error in http connection "+e.toString());
}
//convert response to string
try{
BufferedReader reader = new BufferedReader(new InputStreamReader(is,"iso-8859-1"), 8);
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
sb.append(line + "\n");
}
is.close();
result=sb.toString();
}catch(Exception e){
Log.e(TAG, "Error converting result "+e.toString());
}
//try parse the string to a JSON object
//Log.d(TAG, result);
try{
jArray = new JSONObject(result);
}catch(JSONException e){
Log.e(TAG, "Error parsing data "+e.toString());
}
return jArray;
}
String getFlickrUrl(double lat, double lon, double radius){
return "http://api.flickr.com/services/rest/?" +
"method=flickr.photos.search" +
"&api_key=a6d9db5ff2885dd2f8949590e7a44762" +
"&tags=architecture" +
"&lat=" + lat +
"&lon=" + lon +
"&radius=" + radius +
"&radius_units=km" +
"&extras=geo%2Curl_z%2Ctags" +
"&per_page=250" +
"&format=json" +
"&sort=interestingness-desc" +
"&nojsoncallback=1";
}
void requestLocationUpdates(){
if (!(lm.isProviderEnabled(LocationManager.NETWORK_PROVIDER))){
status = "locating over network disabled";
//bmp = null;
}
lm.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 1000*30, 10, this);
}
void removeUpdates(){
lm.removeUpdates(this);
}
#Override
public void onLocationChanged(Location location) {
MyThread myThread = new MyThread(location);
myThread.start();
}
#Override
public void onProviderDisabled(String provider) {}
#Override
public void onProviderEnabled(String provider) {}
#Override
public void onStatusChanged(String provider, int status, Bundle extras) {}
#Override
public void onDestroy() {
super.onDestroy();
mHandler.removeCallbacks(mDrawBtmp);
}
#Override
public void onVisibilityChanged(boolean visible) {
mVisible = visible;
if (visible) {
requestLocationUpdates();
drawFrame();
} else {
mHandler.removeCallbacks(mDrawBtmp);
removeUpdates();
}
}
#Override
public void onSurfaceChanged(SurfaceHolder holder, int format, int width, int height) {
super.onSurfaceChanged(holder, format, width, height);
drawFrame();
}
#Override
public void onSurfaceCreated(SurfaceHolder holder) {
super.onSurfaceCreated(holder);
}
#Override
public void onSurfaceDestroyed(SurfaceHolder holder) {
super.onSurfaceDestroyed(holder);
mVisible = false;
mHandler.removeCallbacks(mDrawBtmp);
}
#Override
public void onOffsetsChanged(float xOffset, float yOffset,
float xStep, float yStep, int xPixels, int yPixels) {
/*Log.d(TAG, "onOffsetsChanged");
Log.d(TAG, "xOffset: " + String.valueOf(xOffset));
Log.d(TAG, "yOffset: " + String.valueOf(yOffset));
Log.d(TAG, "xStep: " + String.valueOf(xStep));
Log.d(TAG, "yStep: " + String.valueOf(yStep));
Log.d(TAG, "xPixels: " + String.valueOf(xPixels));
Log.d(TAG, "yPixels: " + String.valueOf(yPixels));
//Log.d(TAG, String.valueOf((xOffset / xStep)));
Log.d(TAG, " ");*/
this.xPixels = xPixels;
this.xStep = xStep;
this.xOffset = xOffset;
drawFrame();
}
/*
* Store the position of the touch event so we can use it for drawing later
*/
#Override
public void onTouchEvent(MotionEvent event) {
super.onTouchEvent(event);
}
/*
* Draw one frame of the animation. This method gets called repeatedly
* by posting a delayed Runnable. You can do any drawing you want in
* here. This example draws a wireframe cube.
*/
void drawFrame() {
final SurfaceHolder holder = getSurfaceHolder();
Canvas c = null;
try {
c = holder.lockCanvas();
if (c != null) {
// draw something
drawBmp(c);
}
} finally {
if (c != null) holder.unlockCanvasAndPost(c);
}
// Reschedule the next redraw
mHandler.removeCallbacks(mDrawBtmp);
if (mVisible) {
mHandler.postDelayed(mDrawBtmp, 1000 / 5);
}
}
private class MyThread extends Thread {
Location loc;
MyThread(Location loc){
this.loc = loc;
}
public synchronized void run(){ // bringt synchronized was? weil wir kreieren ja immer eine neue intanz...
try{
if (loc == null){
Log.d(TAG, "location is null");
return;
}
<SNIP>
//the main code, update `status` and `bmp`
}
void drawBmp(Canvas canvas) {
//canvas.restore();
//canvas.save();
//canvas.translate(0, 0);
//canvas.drawColor(0xff00aa00);
canvas.drawColor(Color.BLACK);
if (bmp != null){
//int width = (int) (((double)bmp.getHeight()/(double)canvas.getHeight())*canvas.getWidth());
if (bmp.getHeight() != canvas.getHeight()){
Float width = new Float(bmp.getWidth());
Float height = new Float(bmp.getHeight());
Float ratio = width/height;
Log.d(TAG, "scaling");
bmp = Bitmap.createScaledBitmap(bmp, (int)(canvas.getHeight()*ratio), canvas.getHeight(), true);
}
int x;
if (bmp.getWidth() >= canvas.getWidth()){
x = -1*(int)((xOffset*(bmp.getWidth()-canvas.getWidth())));
}else{
x = (bmp.getWidth()-canvas.getWidth())/2;
}
//Log.d(TAG, String.valueOf(scale));
//Log.d(TAG, String.valueOf(canvas.getWidth()));
//Log.d(TAG, " ");
canvas.drawBitmap(bmp, x, 0, null);
//canvas.drawLine(0, 0, bmp.getWidth(), 0, new Paint());
}else if(status != null){
Paint textPaint = new Paint();
textPaint.setColor(Color.WHITE);
textPaint.setAntiAlias(true);
textPaint.setTextSize(25);
textPaint.setAlpha(120);
textPaint.setTextAlign(Align.CENTER);
canvas.drawText(status, canvas.getWidth()/2, canvas.getHeight()/2, textPaint);
}
if (locationName != null){
Paint textPaint = new Paint();
textPaint.setColor(Color.BLACK);
textPaint.setAntiAlias(true);
textPaint.setTextSize(30);
textPaint.setAlpha(150);
textPaint.setTextAlign(Align.LEFT);
Rect rect = new Rect();
textPaint.getTextBounds(locationName, 0, locationName.length(), rect);
//Log.d(TAG, String.valueOf(textPaint.getTextSize()));
canvas.drawText(locationName, 0, 70, textPaint);
}
}
}
}
In drawBmp() you repeatedly take your bitmap (bmp), scale it (the call to createScaledBitmap) and then assign it back to bmp. Over time all these scaling operations are going to result in the artifacts that you see.
To solve this, store the original bitmap in a different variable (e.g., private Bitmap originalImage;) and create the scaled bitmap from original.
bmp = Bitmap.createScaledBitmap(originalImage, (int)(canvas.getHeight()*ratio), canvas.getHeight(), true);

Categories