Faster way of displaying offscreen bitmap using android/java - java

I have started to learn android and java using the android studio beta. As a first simple test app I am trying to get a basic Mandelbrot renderer working. I have gotten it to display, but now I want it faster. Can anyone give advice on the following?
The docs say canvas.drawbitmap is depreciated. What should I use instead? What is the fastest ways to show a bitmap onscreen?
How can I show the progress of the calculations? If I unremark the 2 lines marked "update display after each line has been calculated" there is no updating during the calculations and the extra calls to canvas.drawbitmap really slow it all down (79 seconds compared to 31 seconds without it).
Is there anything I can do to speed up the general math calls?
I have tried to keep it as simple as possible for this example.
Many thanks for any tips for a newbie. I don't want to learn bad habits from the start if possible.
The layout has a single imageview aligned to the screen. The full code is
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.os.Bundle;
import android.os.SystemClock;
import android.view.Display;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageView;
import java.text.DecimalFormat;
import java.util.Random;
import static android.graphics.Color.argb;
import static android.graphics.Color.rgb;
public class myActivity extends Activity {
myView myview;
Bitmap bmp;
Canvas myCanvas;
ImageView img;
Paint paint;
Boolean started=false;
Integer ih,iw,i,redraws,fw,fh;
long startms,finishms;
Random rnd;
double xmin,xmax,ymin,ymax,padding,bailout,bailout_squared,stepx,stepy,x,y,magnitude;
double realiters,tweenval;
Integer col,colval1,colval2;
Integer iterations,maxiterations,superx,supery,samplepixels,square_samplepixels,xloop,yloop;
double zr,zi,cr,ci,xloopdouble,yloopdouble;
int[] colorpalette = new int[256];
int[] pixels;
int r,g,b,r1,g1,b1,r2,g2,b2,totalr,totalg,totalb;
private void init(){
//screen size
Display display = getWindowManager().getDefaultDisplay();
Point sizePoint = new Point();
paint = new Paint();
display.getSize(sizePoint);
iw=sizePoint.x;
ih=sizePoint.y;
//pixels array
fw=iw;
fh=ih;
pixels=new int[fw*fh];
//create bitmap
bmp=Bitmap.createBitmap(iw, ih, Bitmap.Config.RGB_565);
//create canvas
myCanvas =new Canvas();
myCanvas.setBitmap(bmp);
img = (ImageView) findViewById(R.id.imageView1);
rnd = new Random();
}
// calculates and displays the Mandelbrot fractal
private void Mandelbrot(){
startms= SystemClock.uptimeMillis();
//coordinates
// xmin=-1.6345100402832;
// xmax=-1.63043992784288;
// ymin=-0.00209962230258512;
// ymax=0.00209259351094558;
xmin=-2.3;
xmax=2.3;
ymin=-2.1;
ymax=2.1;
fw=iw;
fh=ih;
//adjust coords to match screen aspect
if (iw<ih) {
padding=(xmax-xmin)/iw*(ih-iw);
ymin=ymin-padding/2.0;
ymax=ymax+padding/2.0;
} else {
padding=(ymax-ymin)/ih*(iw-ih);
xmin=xmin-padding/2.0;
xmax=xmax+padding/2.0;
}
bailout=8.0; //needs to be higher than default 2 for the CPM coloring to be smooth
bailout_squared=bailout*bailout;
maxiterations=64;
samplepixels=1;
square_samplepixels=samplepixels*samplepixels;
//random color palette
for (col=0;col<256;col++){
colorpalette[col]=android.graphics.Color.argb(255,rnd.nextInt(256),rnd.nextInt(256),rnd.nextInt(256));
}
stepx=(xmax-xmin)/fw/samplepixels;
stepy=(ymax-ymin)/fh/samplepixels;
for (yloop=0;yloop<fh;yloop++){
for (xloop=0;xloop<fw;xloop++){
totalr=0;
totalg=0;
totalb=0;
r=0;
g=0;
b=0;
xloopdouble=(double)xloop;
yloopdouble=(double)yloop;
for (supery=0;supery<samplepixels;supery++)
{
for (superx=0;superx<samplepixels;superx++)
{
cr = xmin+xloopdouble/(double)fw*(xmax-xmin)+(stepx*(double)superx);
ci = ymin+yloopdouble/(double)fh*(ymax-ymin)+(stepy*(double)supery);
zr = 0.0;
zi = 0.0;
magnitude=0.0;
for(iterations=0; iterations<maxiterations; iterations++)
{
i=iterations;
x = (zr * zr - zi * zi) + cr;
y = (zi * zr + zr * zi) + ci;
magnitude=(x * x + y * y);
if(magnitude>bailout_squared) break;
zr = x;
zi = y;
}
if (iterations>=maxiterations) {
r=0;
g=0;
b=0;
} else {
//CPM smooth colors
realiters=iterations+1-((Math.log(Math.log(Math.sqrt(magnitude)))/Math.log(2.0)));
colval1=(int) Math.floor(realiters % 255);
colval2=(colval1+1) % 255;
tweenval=realiters-Math.floor(realiters);
r1=Color.red(colorpalette[colval1]);
g1=Color.green(colorpalette[colval1]);
b1=Color.blue(colorpalette[colval1]);
r2=Color.red(colorpalette[colval2]);
g2=Color.green(colorpalette[colval2]);
b2=Color.blue(colorpalette[colval2]);
r=(int) (r1+((r2-r1)*tweenval));
g=(int) (g1+((g2-g1)*tweenval));
b=(int) (b1+((b2-b1)*tweenval));
}
totalr=totalr+r;
totalg=totalg+g;
totalb=totalb+b;
}
}
r=(int) totalr/square_samplepixels;
g=(int) totalg/square_samplepixels;
b=(int) totalb/square_samplepixels;
//update pixels array
pixels[xloop+yloop*fw]=rgb(r, g, b);
}
//update display after each line has been calculated
//myCanvas.drawBitmap(pixels,0,fw,0,0,fw,fh,false,null);
//if (img != null) img.invalidate();
}
myCanvas.drawBitmap(pixels,0,fw,0,0,fw,fh,false,null);
finishms=SystemClock.uptimeMillis();
}
private void updateTimeTaken(){
//turn antialiasing on
paint.setAntiAlias(true);
// draw some text using FILL style
paint.setStyle(Paint.Style.FILL);
paint.setTextSize(30);
DecimalFormat myFormatter = new DecimalFormat("#,###,###");
paint.setColor(Color.BLACK);
myCanvas.drawText("Time taken = " + myFormatter.format(finishms - startms) + " ms", 15, 45, paint);
paint.setColor(Color.WHITE);
myCanvas.drawText("Time taken = " + myFormatter.format(finishms - startms) + " ms", 14, 44, paint);
paint.setColor(Color.BLACK);
myCanvas.drawText("Screen size = " + String.valueOf(iw) + " x " + String.valueOf(ih), 15, 85, paint);
paint.setColor(Color.WHITE);
myCanvas.drawText("Screen size = " + String.valueOf(iw) + " x " + String.valueOf(ih), 14, 84, paint);
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
//fullscreen no menu
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
redraws=0;
super.onCreate(savedInstanceState);
myview = new myView(this);
setContentView(myview);
init();
started=true;
Mandelbrot();
updateTimeTaken();
}
private class myView extends View{
public myView(Context context){
super(context);
}
#Override protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
//draw off screen bitmap to screen
if (started==true){
canvas.drawBitmap(bmp,0,0,paint);
}
}
}
}

Related

Loading Bay Lights Detection with OpenCV

I am not a programmer but I am trying to make a simple app on android. It is supposed to recognize which light is currently on ( RED OR GREEN ). I did lots of research and followed few tutorials.This was one of my guides how to do it. My case is much more simple as light is always stationary and user have aim camera on lights. I did blue box with adjusted size on a screen and only in that area program is looking for red and green lights. But the accuracy of detecting an object is it that good as I wish too. What can I do to improve it? Without using deep Learning which is black magic at this moment for me :) I thought my next step would be creating a mask with red and green circles and compare which circle got brighter pixels so this way i could decide which light is on.Something smilar to that tutorial
There is a code which i got so far. This is how program looks like You can see that app is a bit laggy on this movie but after i started using SubMat insted of whole Mat it isnt laggy anymore.
If anyone can help me somehow with it I would be very grateful
What if i would like to use deep learning ? How to start?
package com.gavreel.jrs.baylightalarm;
import android.content.pm.ActivityInfo;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.SeekBar;
import android.widget.TextView;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2{
SeekBar seekBar;
double areaOfIntrestSize ;
Mat mRgba;
Mat grayScaleRED;
Mat imgHSV;
Mat imgREDThreshold;
Mat imgREDThreshold2;
Mat greenThreshold ;
Mat red_Threshold;
Mat red_rgb_image;
Mat green_rgb;
Mat green_greyScale;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
mOpenCvCameraView.enableView();
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
seekBar = findViewById(R.id.seekBar);
seekBar.setMax(20);
seekBar.setProgress(10);
areaOfIntrestSize = 0.04 * seekBar.getProgress();
seekBar.setOnSeekBarChangeListener(
new SeekBar.OnSeekBarChangeListener() {
#Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
if(seekBar.getProgress()==0) seekBar.setProgress(1);
areaOfIntrestSize = 0.04 * seekBar.getProgress();
}
#Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
#Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.CV_cameraView);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
mOpenCvCameraView.setCameraIndex(0);
}
#Override
protected void onResume() {
super.onResume();
super.onResume();
if (!OpenCVLoader.initDebug()) {
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION, this, mLoaderCallback);
} else {
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
#Override
public void onCameraViewStarted(int width, int height) {
grayScaleRED = new Mat();
mRgba = new Mat();
imgHSV = new Mat();
imgREDThreshold = new Mat();
imgREDThreshold2 = new Mat();
greenThreshold = new Mat();
red_Threshold= new Mat();
red_rgb_image = new Mat();
green_rgb = new Mat();
green_greyScale = new Mat();
}
#Override
protected void onPause() {
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onCameraViewStopped() {
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
Size orig = mRgba.size();
double offx = 0.5 * (1.0-areaOfIntrestSize) * orig.width;
double offy = 0.5 * (1.0-areaOfIntrestSize) * orig.height;
// Mat cropped = Mat.zeros(mRgba.size(),mRgba.type());
//Imgproc.rectangle(cropped,new Point(offx,offy), new Point(orig.width-offx,orig.height-offy), new Scalar(255,255,255),-1);
Imgproc.rectangle(mRgba,new Point(offx,offy), new Point(orig.width-offx,orig.height-offy), new Scalar(48,151,255),8);
Mat cropped = mRgba.submat((int)offy,(int)(orig.height-offy),(int) offx,(int)(orig.width-offx));
/*
float zoom = 0.2f;
Size orig = mRgba.size();
double offx = 0.5 * (1.0-zoom) * orig.width;
double offy = 0.5 * (1.0-zoom) * orig.height;
Mat cropped2 = mRgba.submat((int)offy,(int)(orig.height-offy),(int) offx,(int)(orig.width-offx));
// resize to original:
Imgproc.resize(cropped, cropped, orig);
Imgproc.rectangle(mRgba,new Point(offx,offy), new Point(orig.width-offx,orig.height-offy), new Scalar(255),8);
*/
Imgproc.cvtColor(cropped,imgHSV,Imgproc.COLOR_RGBA2RGB);
Imgproc.cvtColor(imgHSV,imgHSV,Imgproc.COLOR_RGB2HSV);
Imgproc.medianBlur(imgHSV,imgHSV,3);
Core.inRange(imgHSV, new Scalar(45,60,60),new Scalar(75,255,255),greenThreshold); // green
Core.inRange(imgHSV, new Scalar(0,100,100),new Scalar(3,255,255), imgREDThreshold2); // red
Core.inRange(imgHSV, new Scalar(170,100,100), new Scalar(179,255,255), imgREDThreshold); // red
imgHSV.release();
Core.addWeighted(imgREDThreshold2,1.0, imgREDThreshold,1.0,0.0,red_Threshold); // redThreshold + redThreshold2
imgREDThreshold.release();
imgREDThreshold2.release();
cropped.copyTo(green_rgb,greenThreshold);
cropped.copyTo(red_rgb_image,red_Threshold);
cropped.release();
Imgproc.cvtColor(red_rgb_image,grayScaleRED,Imgproc.COLOR_RGBA2GRAY);
Imgproc.cvtColor(green_rgb,green_greyScale,Imgproc.COLOR_RGBA2GRAY);
circleDetection(grayScaleRED,offx,offy,new Scalar(255));
circleDetection(green_greyScale,offx,offy,new Scalar( 50,180,50));
// Imgproc.cvtColor(result,grayScaleLD,Imgproc.COLOR_RGBA2GRAY);
// Imgproc.threshold(grayScaleLD,brighestRed,150,255,Imgproc.THRESH_BINARY);
//Core.MinMaxLocResult minMaxLocResultBlur = Core.minMaxLoc(grayScaleLD);
// mRgba.copyTo(light,brighestRed);
green_greyScale.release();
grayScaleRED.release();
greenThreshold.release();
red_Threshold.release();
red_rgb_image.release();
green_rgb.release();
return mRgba;
}
void circleDetection ( Mat mGrey,double xoffset , double yoffset, Scalar scalarColor ){
Mat circles = new Mat();
Imgproc.GaussianBlur(mGrey,mGrey,new Size(5,5),0,0);
Mat kelner = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE,new Size(2,2));
Imgproc.erode(mGrey,mGrey,kelner);
Imgproc.dilate(mGrey,mGrey,kelner);
Imgproc.threshold(mGrey,mGrey,0,255, Imgproc.THRESH_OTSU);
Imgproc.HoughCircles(mGrey,circles,Imgproc.CV_HOUGH_GRADIENT, 1, 200, 120, 10, 2, 30);
double x = 0.0;
double y = 0.0;
int r = 0;
for( int i = 0; i < circles.cols(); i++ ) {
double[] data = circles.get(0, i);
for (int j = 0; j < data.length; j++) {
x = data[0];
y = data[1];
r = (int) data[2];
}
Imgproc.circle(mRgba,new Point(x+xoffset,y+yoffset),r,scalarColor,6);
}
circles.release();
kelner.release();
mGrey.release();
}
}

Android doubling number of touches?

This android app is acting as if I had double tapped it each time I touch the screen, can anyone see why is this happening? I have it log "touch # " + [the number of touches] and it does "touch 1" and "touch 2" at the same time, same as "touch 3" and "touch 4"
package com.example.mondrianmaker;
import java.util.ArrayList;
import java.util.Random;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.drawable.BitmapDrawable;
import android.os.Bundle;
import android.util.Log;
import android.view.Display;
import android.view.MotionEvent;
import android.view.Window;
import android.view.WindowManager;
import android.widget.LinearLayout;
import android.widget.TextView;
public class MainActivity extends Activity {
int width, height, x, y, color, touch;
ArrayList<Rectangle> rectangles, childsRect;
Bitmap bg;
Canvas canvas;
Display display;
LinearLayout ll;
Point size;
Paint paint;
Random rn;
Rectangle parentRect;
TextView text, info;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//***FULL SCREEN***
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
//---FULL SCREEN---
setContentView(R.layout.activity_main);
display = getWindowManager().getDefaultDisplay();
size = new Point();
display.getSize(size);
width = size.x;
height = size.y;
paint = new Paint(); //paint to color rectangle OBLI
bg = Bitmap.createBitmap(480, 800, Bitmap.Config.ARGB_8888);
canvas = new Canvas(bg);
ll = (LinearLayout) findViewById(R.id.mondrian);
rectangles = new ArrayList<Rectangle>();
text = (TextView) findViewById(R.id.coordinates);
info = (TextView) findViewById(R.id.info);
parentRect = new Rectangle(0, 0, 480, 800); //create super parent rectangle
rectangles.add(parentRect); //add super parent rectangle to list
touch = 0;
}
#Override
public boolean onTouchEvent(MotionEvent event) {
//Get Coordinate
x = (int) event.getX();
y = (int) event.getY();
//set text view to clicked coordinates and number of rectangles on list
text.setText(x + ", " + y + " rectangles: "+rectangles.size());
//set rectangle color *randomly
setColor();
paint.setColor(color);
//get rectangle clicked
parentRect = getRectParent(x, y);
//make children of rectangle clicked
childsRect = makeChilds(parentRect, x, y);
addChildsToRectanglesDeleteParentRect(childsRect, parentRect);
//check on childs
// text.setText(childsRect.get(0) + " " + childsRect.get(1));
//get onTouch action
switch (event.getAction()) {
// case MotionEvent.ACTION_DOWN:
// text.setText(x + ", " + y);
// ll.setBackgroundDrawable(new BitmapDrawable(bg));
// case MotionEvent.ACTION_MOVE:
// ll.setBackgroundDrawable(new BitmapDrawable(bg));
// System.out.println("22222");
case MotionEvent.ACTION_UP:
touch++;
Log.i("info", "touch # " + touch);
// ll.setBackgroundDrawable(new BitmapDrawable(bg));
// System.out.println("33333");
}
return false;
}
public void addChildsToRectanglesDeleteParentRect(ArrayList<Rectangle> childsRect, Rectangle parentRect){
rectangles.remove(parentRect);
rectangles.add(childsRect.get(0));
rectangles.add(childsRect.get(1));
int i = 0;
Log.i("info", "length= "+rectangles.size());
for(Rectangle e : rectangles){
Log.i("info", i+"--> "+ e.toString());
i++;
}
}
public ArrayList<Rectangle> makeChilds(Rectangle parent, int x, int y){
ArrayList<Rectangle> defaults = new ArrayList<Rectangle>();
defaults.add(new Rectangle(0,0,240,800));
defaults.add(new Rectangle(240,0, 480, 800));
ArrayList<Rectangle> childs = new ArrayList<Rectangle>();
for(Rectangle rect : rectangles){
if(x>=rect.getX1() && x<=rect.getX2() && y>=rect.getY1() && y<=rect.getY2()){
childs.add(new Rectangle(rect.getX1(), rect.getY1(), rect.getX2()/2, rect.getY2()/2));
childs.add(new Rectangle(rect.getX2()/2, rect.getY2()/2, rect.getX2(), rect.getY2()));
}
}
if(childs.size()>0){
return childs;
}else{
info.setText(x + ", " + y + " " + parent + "no childs for this parent");
return defaults;
}
}
public Rectangle getRectParent(int x, int y){
for(Rectangle g : rectangles){
if (x > g.getX1() && x < g.getX2() && y > g.getY1() && y < g.getY2()){
info.setText(info.getText() + "\n " + g.getX1()+"_x=" + x +"_"+g.getX2() +"\n"+g.getY1()+"_y="+y+"_"+g.getY2());
return g;
}
}
return null;
}
public void setColor(){
rn = new Random();
switch ((int) Math.floor(rn.nextDouble() * 5)) {
case 0:
color = Color.BLACK;
break;
case 1:
color = Color.RED;
break;
case 2:
color = Color.YELLOW;
break;
case 3:
color = Color.WHITE;
break;
case 4:
color = Color.GREEN;
break;
}
}
}
You're probably getting an ACTION_DOWN and an ACTION_MOVE and counting both.
Try moving your ++ to inside the switch case for ACTION_DOWN.
I tested your code and works without any issue however you can try using if statement. Also make sure your switch statement return true or break; tho it works without using any of them; here how you can use if statement:
if(event.getAction() == MotionEvent.ACTION_UP){
// up is called
touch++;
Log.i("info", "touch # " + touch);
return true;
}

Android: Audio Recording with voice level visualization

I need to create a android application which is for recording voice while showing the voice(sound) level visualization.
I already created an audio recording application but I can not add sound level visualization. How can I do it?
please someone help me a giving suggestion or a sample tutorials link or code.
Create a xml activity_recording.xml like this.
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="180dp"
android:layout_alignParentBottom="true"
android:background="#231f20" >
<ali.visualiser.VisualizerView
android:id="#+id/visualizer"
android:layout_width="220dp"
android:layout_height="75dp"
android:layout_centerHorizontal="true"
android:layout_margin="5dp" />
<TextView
android:id="#+id/txtRecord"
android:layout_width="wrap_content"
android:layout_height="40dp"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="25dp"
android:gravity="center"
android:text="Start Recording"
android:textColor="#android:color/white"
android:textSize="30sp" />
</RelativeLayout>
Create a custom visualizerView as given below.
package ali.visualiser;
import java.util.ArrayList;
import java.util.List;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.view.View;
public class VisualizerView extends View {
private static final int LINE_WIDTH = 1; // width of visualizer lines
private static final int LINE_SCALE = 75; // scales visualizer lines
private List<Float> amplitudes; // amplitudes for line lengths
private int width; // width of this View
private int height; // height of this View
private Paint linePaint; // specifies line drawing characteristics
// constructor
public VisualizerView(Context context, AttributeSet attrs) {
super(context, attrs); // call superclass constructor
linePaint = new Paint(); // create Paint for lines
linePaint.setColor(Color.GREEN); // set color to green
linePaint.setStrokeWidth(LINE_WIDTH); // set stroke width
}
// called when the dimensions of the View change
#Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
width = w; // new width of this View
height = h; // new height of this View
amplitudes = new ArrayList<Float>(width / LINE_WIDTH);
}
// clear all amplitudes to prepare for a new visualization
public void clear() {
amplitudes.clear();
}
// add the given amplitude to the amplitudes ArrayList
public void addAmplitude(float amplitude) {
amplitudes.add(amplitude); // add newest to the amplitudes ArrayList
// if the power lines completely fill the VisualizerView
if (amplitudes.size() * LINE_WIDTH >= width) {
amplitudes.remove(0); // remove oldest power value
}
}
// draw the visualizer with scaled lines representing the amplitudes
#Override
public void onDraw(Canvas canvas) {
int middle = height / 2; // get the middle of the View
float curX = 0; // start curX at zero
// for each item in the amplitudes ArrayList
for (float power : amplitudes) {
float scaledHeight = power / LINE_SCALE; // scale the power
curX += LINE_WIDTH; // increase X by LINE_WIDTH
// draw a line representing this item in the amplitudes ArrayList
canvas.drawLine(curX, middle + scaledHeight / 2, curX, middle
- scaledHeight / 2, linePaint);
}
}
}
Create RecordingActivity class as given below.
package ali.visualiser;
import java.io.File;
import java.io.IOException;
import android.app.Activity;
import android.media.MediaRecorder;
import android.media.MediaRecorder.OnErrorListener;
import android.media.MediaRecorder.OnInfoListener;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.TextView;
public class RecordingActivity extends Activity {
public static final String DIRECTORY_NAME_TEMP = "AudioTemp";
public static final int REPEAT_INTERVAL = 40;
private TextView txtRecord;
VisualizerView visualizerView;
private MediaRecorder recorder = null;
File audioDirTemp;
private boolean isRecording = false;
private Handler handler; // Handler for updating the visualizer
// private boolean recording; // are we currently recording?
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_recording);
visualizerView = (VisualizerView) findViewById(R.id.visualizer);
txtRecord = (TextView) findViewById(R.id.txtRecord);
txtRecord.setOnClickListener(recordClick);
audioDirTemp = new File(Environment.getExternalStorageDirectory(),
DIRECTORY_NAME_TEMP);
if (audioDirTemp.exists()) {
deleteFilesInDir(audioDirTemp);
} else {
audioDirTemp.mkdirs();
}
// create the Handler for visualizer update
handler = new Handler();
}
OnClickListener recordClick = new OnClickListener() {
#Override
public void onClick(View v) {
if (!isRecording) {
// isRecording = true;
txtRecord.setText("Stop Recording");
recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile(audioDirTemp + "/audio_file"
+ ".mp3");
OnErrorListener errorListener = null;
recorder.setOnErrorListener(errorListener);
OnInfoListener infoListener = null;
recorder.setOnInfoListener(infoListener);
try {
recorder.prepare();
recorder.start();
isRecording = true; // we are currently recording
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
handler.post(updateVisualizer);
} else {
txtRecord.setText("Start Recording");
releaseRecorder();
}
}
};
private void releaseRecorder() {
if (recorder != null) {
isRecording = false; // stop recording
handler.removeCallbacks(updateVisualizer);
visualizerView.clear();
recorder.stop();
recorder.reset();
recorder.release();
recorder = null;
}
}
public static boolean deleteFilesInDir(File path) {
if( path.exists() ) {
File[] files = path.listFiles();
if (files == null) {
return true;
}
for(int i=0; i<files.length; i++) {
if(files[i].isDirectory()) {
}
else {
files[i].delete();
}
}
}
return true;
}
#Override
protected void onDestroy() {
super.onDestroy();
releaseRecorder();
}
// updates the visualizer every 50 milliseconds
Runnable updateVisualizer = new Runnable() {
#Override
public void run() {
if (isRecording) // if we are already recording
{
// get the current amplitude
int x = recorder.getMaxAmplitude();
visualizerView.addAmplitude(x); // update the VisualizeView
visualizerView.invalidate(); // refresh the VisualizerView
// update in 40 milliseconds
handler.postDelayed(this, REPEAT_INTERVAL);
}
}
};
}
Result
This is how it looks:
https://www.youtube.com/watch?v=BoFG6S02GH0
When it reaches the end, the animation continues as expected: erasing the beginning of the graph.
I like Ali's answer, but here's a simpler version that performs much better. The real speed comes from making the view class's onDraw method as fast as possible. Store the correct values in memory first by doing any computations not required for drawing outside the draw loop, and pass fully populated structures to draw routines to allow the hardware to optimize drawing many lines.
I launched my RecordingActivity and set it full screen, but you can create a layout resource or add the view anywhere.
Actvity:
public class RecordingActivity extends Activity {
private VisualizerView visualizerView;
private MediaRecorder recorder = new MediaRecorder();
private Handler handler = new Handler();
final Runnable updater = new Runnable() {
public void run() {
handler.postDelayed(this, 1);
int maxAmplitude = recorder.getMaxAmplitude();
if (maxAmplitude != 0) {
visualizerView.addAmplitude(maxAmplitude);
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_recording);
visualizerView = (VisualizerView) findViewById(R.id.visualizer);
try {
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile("/dev/null");
recorder.prepare();
recorder.start();
} catch (IllegalStateException | IOException ignored) {
}
}
#Override
protected void onDestroy() {
super.onDestroy();
handler.removeCallbacks(updater);
recorder.stop();
recorder.reset();
recorder.release();
}
#Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
handler.post(updater);
}
}
View:
public class VisualizerView extends View {
private static final int MAX_AMPLITUDE = 32767;
private float[] amplitudes;
private float[] vectors;
private int insertIdx = 0;
private Paint pointPaint;
private Paint linePaint;
private int width;
private int height;
public VisualizerView(Context context, AttributeSet attrs) {
super(context, attrs);
linePaint = new Paint();
linePaint.setColor(Color.GREEN);
linePaint.setStrokeWidth(1);
pointPaint = new Paint();
pointPaint.setColor(Color.BLUE);
pointPaint.setStrokeWidth(1);
}
#Override
protected void onSizeChanged(int width, int h, int oldw, int oldh) {
this.width = width;
height = h;
amplitudes = new float[this.width * 2]; // xy for each point across the width
vectors = new float[this.width * 4]; // xxyy for each line across the width
}
/**
* modifies draw arrays. cycles back to zero when amplitude samples reach max screen size
*/
public void addAmplitude(int amplitude) {
invalidate();
float scaledHeight = ((float) amplitude / MAX_AMPLITUDE) * (height - 1);
int ampIdx = insertIdx * 2;
amplitudes[ampIdx++] = insertIdx; // x
amplitudes[ampIdx] = scaledHeight; // y
int vectorIdx = insertIdx * 4;
vectors[vectorIdx++] = insertIdx; // x0
vectors[vectorIdx++] = 0; // y0
vectors[vectorIdx++] = insertIdx; // x1
vectors[vectorIdx] = scaledHeight; // y1
// insert index must be shorter than screen width
insertIdx = ++insertIdx >= width ? 0 : insertIdx;
}
#Override
public void onDraw(Canvas canvas) {
canvas.drawLines(vectors, linePaint);
canvas.drawPoints(amplitudes, pointPaint);
}
}
If you're using the MediaRecorder class and visualization based on peak amplitude is ok you can use the getMaxAmplitude() method to continuously poll for the "maximum absolute amplitude that was sampled since the last call".
Scale that amplitude down into an index that determines how many of your app's graphical volume bars to light up and you're set.
My approach to this is based on activedecay's and Ali's answers, and I added a display DPI scale, since dp is scaled by the screen density so, 1 pixel in 320 dpi is not 1 pixel in 420 dpi. I had that problem that the visualizer was not moving at the same rate in different screens.
I also haven't found out, why the canvas doesn't start drawing from the beginning of the view API 28 only. But is not looking bad in any way.
Info about dpi scaling:
Android Developers/Support different pixel densities
package com.example.mediarecorderdemo.views;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import static com.example.mediarecorderdemo.RecordingActivity.DEBUG;
public class VisualizerView extends View {
private static final int MAX_AMPLITUDE = 32767;
private ArrayList<Float> amplitudes;
private Paint linePaint;
private int width;
private int height;
private int density;
private float stroke;
public VisualizerView(Context context, #Nullable AttributeSet attrs) {
super(context, attrs);
density = this.getResources().getDisplayMetrics().densityDpi; //Get the display DPI
linePaint = new Paint();
linePaint.setColor(Color.GREEN);
linePaint.setAntiAlias(true); //Add AntiAlias for displaying strokes that are less than 1
}
#Override
protected void onSizeChanged(int w, int h, int oldW, int oldH) {
width = w;
height = h;
amplitudes = new ArrayList<>(width * 2);
stroke =(width * ((float)density / 160)) / 1000; //Calculate actual pixel size for the view based on view width and dpi
linePaint.setStrokeWidth(stroke);
}
/**
* Add a new value of int to the visualizer array
* #param amplitude Int value
*/
public void addAmplitude(int amplitude){
invalidate();
float scaledHeight = ((float) amplitude / MAX_AMPLITUDE) * (height -1);
amplitudes.add(scaledHeight);
}
/**
* Clears Visualization
*/
public void clear(){
amplitudes.clear();
}
#Override
protected void onDraw(Canvas canvas) {
int middle = height / 2; // get the middle of the View
float curX = 0; // start curX at zero
// for each item in the amplitudes ArrayList
for (float power : amplitudes) {
// draw a line representing this item in the amplitudes ArrayList
canvas.drawLine(curX, middle + power / 2, curX, middle
- power / 2, linePaint);
curX += stroke; // increase X by line width
}
}
}

Android edittext gets cut off

I created a custom EditText view in onDraw that is meant to have a red margin line. The problem is that when you scroll down, the line disappears. This code simply draws a margin whos width is pre-defined in the values folder and then colors it with a pre-defined color from colors.xml. Here's my code:
package com.lioncode.notepad;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.widget.EditText;
Public class LinedEditText extends EditText {
private static Paint linePaint;
static {
linePaint = new Paint();
linePaint.setColor(Color.DKGRAY);
linePaint.setStyle(Style.STROKE);
}
public LinedEditText(Context context, AttributeSet attributes) {
super(context, attributes);
init();
}
private Paint marginPaint;
private float margin;
private int paperColor;
private void init() {
Resources myResources = getResources();
marginPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
marginPaint.setColor(myResources.getColor(R.color.notepad_margin));
paperColor = myResources.getColor(R.color.notepad_paper);
margin = myResources.getDimension(R.dimen.notepaper_margin);
}
#Override
protected void onDraw(Canvas canvas) {
Rect bounds = new Rect();
int firstLineY = getLineBounds(0, bounds);
int lineHeight = getLineHeight();
int totalLines = Math.max(getLineCount(), getHeight() / lineHeight);
for (int i = 0; i < totalLines; i++) {
int lineY = firstLineY + i * lineHeight;
canvas.drawLine(bounds.left, lineY, bounds.right, lineY, linePaint);
}
// draw margin
canvas.drawLine(margin, 0, margin, getMeasuredHeight(), marginPaint);
// move the text from along the margin
canvas.save();
canvas.translate(margin, 0);
super.onDraw(canvas);
}
}
Replace total lines with a high number the user will probably never reach.

Android is trying to use recycled image

I'm getting
Canvas: trying to use a recycled bitmap
android.graphics.Bitmap#4057a3a8
everytime i'm trying to show one image.
Image
When i delete bmp.recycle() everything goes well but i dont use this image in my code so i dont understand where the problem is.
package com.example.photobooth;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import android.os.Bundle;
import android.os.Environment;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Rect;
import android.util.DisplayMetrics;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnTouchListener;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.ImageView;
public class EditorActivity extends Activity implements OnClickListener{
String path = null;
private int screen_height;
private int screen_width;
private Bitmap setUpImage(Bitmap image) {
int min_side = Math.min(screen_height, screen_width);
float scale_factor = (float) (((float) min_side / image.getWidth()) * 1.5);
float[] scalef = { scale_factor, scale_factor };
Bitmap scaled_image = ImageUtilities.scaleImage(image, scalef);
return scaled_image;
}
private void setUp() {
Bundle b = getIntent().getExtras();
if (b != null) {
path = b.getString("path");
}
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
this.screen_height = metrics.heightPixels;
this.screen_width = metrics.widthPixels;
int min_measure = Math.min(screen_width, screen_height);
// Make ImageView square
ImageView img = (ImageView) findViewById(R.id.photo_holder);
android.view.ViewGroup.LayoutParams lp = img.getLayoutParams();
lp.height = min_measure;
img.setLayoutParams(lp);
Bitmap bmp = BitmapFactory.decodeFile(path);
final Bitmap ready_image = setUpImage(bmp);
bmp.recycle();
ImageView iv = (ImageView) findViewById(R.id.photo_holder);
iv.setImageBitmap(ready_image);
// set up touch event for imageview(photo_holder)
img.setOnTouchListener(new OnTouchListener() {
float touch_x, touch_y, scrolled_x = 0.0f, scrolled_y = 0.0f;
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
touch_x = event.getX();
touch_y = event.getY();
break;
case MotionEvent.ACTION_MOVE:
float cur_x = event.getX();
float cur_y = event.getY();
float scroll_x = -cur_x + touch_x;
float scroll_y = -cur_y + touch_y;
scrolled_x += scroll_x;
scrolled_y += scroll_y;
if (scrolled_x > (ready_image.getWidth() - screen_width)/2
|| scrolled_x < -(ready_image.getWidth() - screen_width)/2){
scrolled_x -= scroll_x;
scroll_x = 0;
}
if (scrolled_y > (ready_image.getHeight() - screen_width)/2
|| scrolled_y < -(ready_image.getHeight() - screen_width)/2){
scrolled_y -= scroll_y;
scroll_y = 0;
}
v.scrollBy((int) (scroll_x),
(int) (scroll_y));
touch_x = cur_x;
touch_y = cur_y;
break;
}
return true;
}
});
//Set up buttons
Button btn = (Button)findViewById(R.id.save);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
ImageView img = (ImageView)findViewById(R.id.photo_holder);
int scroll_x = img.getScrollX();
int scroll_y = img.getScrollY();
int left = (ready_image.getWidth() - screen_width)/2
+ scroll_x;
int top = (ready_image.getHeight() - screen_width)/2
+ scroll_y;
int right = left + screen_width;
int bottom = top + screen_width;
Rect r = new Rect(left, top, right, bottom);
Bitmap croped_image = ImageUtilities.cropImage(ready_image,
r,
screen_width,
screen_width);
String path_to_folder = Environment.getExternalStorageDirectory()
.getAbsolutePath();
String pic_path = path_to_folder + File.separator + MainActivity.app_name;
File f = new File(pic_path);
File picture = null;
try {
picture = File.createTempFile("photo_", ".jpg", f);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
FileOutputStream fos = new FileOutputStream(picture);
croped_image.compress(Bitmap.CompressFormat.JPEG, 100, fos);
fos.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
});
}
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (requestWindowFeature(Window.FEATURE_NO_TITLE))
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_editor);
setUp();
}
public void onClick(View v) {
// TODO Auto-generated method stub
}
}
bmp is recycled in setUp() method.
ImageUtility is
package com.example.photobooth;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PorterDuff.Mode;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.RectF;
public class ImageUtilities {
public static Bitmap getRoundedCornerBitmap(Context context, Bitmap input,
int pixels, int w, int h, boolean squareTL, boolean squareTR,
boolean squareBL, boolean squareBR, boolean border) {
Bitmap output = Bitmap.createBitmap(w, h, Config.ARGB_8888);
Canvas canvas = new Canvas(output);
final float densityMultiplier = context.getResources()
.getDisplayMetrics().density;
final int color = 0xff424242;
final Paint paint = new Paint();
final Rect rect = new Rect(0, 0, w, h);
final RectF rectF = new RectF(rect);
// make sure that our rounded corner is scaled appropriately
final float roundPx = pixels * densityMultiplier;
paint.setAntiAlias(true);
canvas.drawARGB(0, 0, 0, 0);
paint.setColor(color);
canvas.drawRoundRect(rectF, roundPx, roundPx, paint);
// draw rectangles over the corners we want to be square
if (squareTL) {
canvas.drawRect(0, 0, w / 2, h / 2, paint);
}
if (squareTR) {
canvas.drawRect(w / 2, 0, w, h / 2, paint);
}
if (squareBL) {
canvas.drawRect(0, h / 2, w / 2, h, paint);
}
if (squareBR) {
canvas.drawRect(w / 2, h / 2, w, h, paint);
}
paint.setXfermode(new PorterDuffXfermode(Mode.SRC_IN));
canvas.drawBitmap(input, 0, 0, paint);
if (border) {
paint.setStyle(Paint.Style.STROKE);
paint.setColor(Color.WHITE);
paint.setStrokeWidth(3);
canvas.drawRoundRect(rectF, roundPx, roundPx, paint);
}
return output;
}
public static Bitmap cropImage(Bitmap origina_bmp, Rect rec, int w, int h) {
Bitmap target_bitmap = Bitmap.createBitmap(w, h,
Bitmap.Config.ARGB_8888);
target_bitmap.setDensity(origina_bmp.getDensity());
Canvas canvas = new Canvas(target_bitmap);
canvas.drawBitmap(origina_bmp, new Rect(rec.left, rec.top, rec.right,
rec.bottom), new Rect(0, 0, w, h), null);
return target_bitmap;
}
public static Bitmap makeSquareImage(Bitmap original_image, int size){
int min_side = Math.min(original_image.getWidth(),
original_image.getHeight());
int side_size = ImageUtilities.get2del(min_side);
int crop_to;
Bitmap croped_image = null;
if (min_side == original_image.getWidth()){
crop_to = (original_image.getHeight() - side_size) / 2;
croped_image = ImageUtilities.cropImage(original_image, new Rect(
0, crop_to, original_image.getWidth(),
original_image.getHeight() - crop_to), size, size);
}else{
crop_to = (original_image.getWidth() - side_size) / 2;
croped_image = ImageUtilities.cropImage(original_image, new Rect(
crop_to, 0, original_image.getWidth() - crop_to,
original_image.getHeight()), size, size);
}
return croped_image;
}
public static int get2del(int num) {
while (num % 2 != 0)
num++;
return num;
}
public static Bitmap scaleImage(Bitmap originalBMP, float[] scaleFactor) {
Matrix scaleMat = new Matrix();
scaleMat.postScale(scaleFactor[0], scaleFactor[1]);
Bitmap scaledImage = Bitmap.createBitmap(originalBMP, 0, 0,
originalBMP.getWidth(), originalBMP.getHeight(), scaleMat,
false);
return scaledImage;
}
}
so it doesn't.
If i write bmp = null instead of bmp.recycle() everything is ok but i wonder why in the second chance application is crashed.
What is ImageUtilities? Maybe scaleImage may reuse the same image.
Does your program work correctly if you do:
bmp = null;
instead of
bmp.recycle();
?
The official documentation of recycle says:
"This is an advanced call, and normally need not be called, since the normal GC process will free up this memory when there are no more references to this bitmap. "
So using "bmp = null" should be better than "bmp.recycle()".

Categories