Loading Bay Lights Detection with OpenCV - java

I am not a programmer but I am trying to make a simple app on android. It is supposed to recognize which light is currently on ( RED OR GREEN ). I did lots of research and followed few tutorials.This was one of my guides how to do it. My case is much more simple as light is always stationary and user have aim camera on lights. I did blue box with adjusted size on a screen and only in that area program is looking for red and green lights. But the accuracy of detecting an object is it that good as I wish too. What can I do to improve it? Without using deep Learning which is black magic at this moment for me :) I thought my next step would be creating a mask with red and green circles and compare which circle got brighter pixels so this way i could decide which light is on.Something smilar to that tutorial
There is a code which i got so far. This is how program looks like You can see that app is a bit laggy on this movie but after i started using SubMat insted of whole Mat it isnt laggy anymore.
If anyone can help me somehow with it I would be very grateful
What if i would like to use deep learning ? How to start?
package com.gavreel.jrs.baylightalarm;
import android.content.pm.ActivityInfo;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.SeekBar;
import android.widget.TextView;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2{
SeekBar seekBar;
double areaOfIntrestSize ;
Mat mRgba;
Mat grayScaleRED;
Mat imgHSV;
Mat imgREDThreshold;
Mat imgREDThreshold2;
Mat greenThreshold ;
Mat red_Threshold;
Mat red_rgb_image;
Mat green_rgb;
Mat green_greyScale;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
mOpenCvCameraView.enableView();
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
seekBar = findViewById(R.id.seekBar);
seekBar.setMax(20);
seekBar.setProgress(10);
areaOfIntrestSize = 0.04 * seekBar.getProgress();
seekBar.setOnSeekBarChangeListener(
new SeekBar.OnSeekBarChangeListener() {
#Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
if(seekBar.getProgress()==0) seekBar.setProgress(1);
areaOfIntrestSize = 0.04 * seekBar.getProgress();
}
#Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
#Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.CV_cameraView);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
mOpenCvCameraView.setCameraIndex(0);
}
#Override
protected void onResume() {
super.onResume();
super.onResume();
if (!OpenCVLoader.initDebug()) {
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION, this, mLoaderCallback);
} else {
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
#Override
public void onCameraViewStarted(int width, int height) {
grayScaleRED = new Mat();
mRgba = new Mat();
imgHSV = new Mat();
imgREDThreshold = new Mat();
imgREDThreshold2 = new Mat();
greenThreshold = new Mat();
red_Threshold= new Mat();
red_rgb_image = new Mat();
green_rgb = new Mat();
green_greyScale = new Mat();
}
#Override
protected void onPause() {
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onCameraViewStopped() {
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
Size orig = mRgba.size();
double offx = 0.5 * (1.0-areaOfIntrestSize) * orig.width;
double offy = 0.5 * (1.0-areaOfIntrestSize) * orig.height;
// Mat cropped = Mat.zeros(mRgba.size(),mRgba.type());
//Imgproc.rectangle(cropped,new Point(offx,offy), new Point(orig.width-offx,orig.height-offy), new Scalar(255,255,255),-1);
Imgproc.rectangle(mRgba,new Point(offx,offy), new Point(orig.width-offx,orig.height-offy), new Scalar(48,151,255),8);
Mat cropped = mRgba.submat((int)offy,(int)(orig.height-offy),(int) offx,(int)(orig.width-offx));
/*
float zoom = 0.2f;
Size orig = mRgba.size();
double offx = 0.5 * (1.0-zoom) * orig.width;
double offy = 0.5 * (1.0-zoom) * orig.height;
Mat cropped2 = mRgba.submat((int)offy,(int)(orig.height-offy),(int) offx,(int)(orig.width-offx));
// resize to original:
Imgproc.resize(cropped, cropped, orig);
Imgproc.rectangle(mRgba,new Point(offx,offy), new Point(orig.width-offx,orig.height-offy), new Scalar(255),8);
*/
Imgproc.cvtColor(cropped,imgHSV,Imgproc.COLOR_RGBA2RGB);
Imgproc.cvtColor(imgHSV,imgHSV,Imgproc.COLOR_RGB2HSV);
Imgproc.medianBlur(imgHSV,imgHSV,3);
Core.inRange(imgHSV, new Scalar(45,60,60),new Scalar(75,255,255),greenThreshold); // green
Core.inRange(imgHSV, new Scalar(0,100,100),new Scalar(3,255,255), imgREDThreshold2); // red
Core.inRange(imgHSV, new Scalar(170,100,100), new Scalar(179,255,255), imgREDThreshold); // red
imgHSV.release();
Core.addWeighted(imgREDThreshold2,1.0, imgREDThreshold,1.0,0.0,red_Threshold); // redThreshold + redThreshold2
imgREDThreshold.release();
imgREDThreshold2.release();
cropped.copyTo(green_rgb,greenThreshold);
cropped.copyTo(red_rgb_image,red_Threshold);
cropped.release();
Imgproc.cvtColor(red_rgb_image,grayScaleRED,Imgproc.COLOR_RGBA2GRAY);
Imgproc.cvtColor(green_rgb,green_greyScale,Imgproc.COLOR_RGBA2GRAY);
circleDetection(grayScaleRED,offx,offy,new Scalar(255));
circleDetection(green_greyScale,offx,offy,new Scalar( 50,180,50));
// Imgproc.cvtColor(result,grayScaleLD,Imgproc.COLOR_RGBA2GRAY);
// Imgproc.threshold(grayScaleLD,brighestRed,150,255,Imgproc.THRESH_BINARY);
//Core.MinMaxLocResult minMaxLocResultBlur = Core.minMaxLoc(grayScaleLD);
// mRgba.copyTo(light,brighestRed);
green_greyScale.release();
grayScaleRED.release();
greenThreshold.release();
red_Threshold.release();
red_rgb_image.release();
green_rgb.release();
return mRgba;
}
void circleDetection ( Mat mGrey,double xoffset , double yoffset, Scalar scalarColor ){
Mat circles = new Mat();
Imgproc.GaussianBlur(mGrey,mGrey,new Size(5,5),0,0);
Mat kelner = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE,new Size(2,2));
Imgproc.erode(mGrey,mGrey,kelner);
Imgproc.dilate(mGrey,mGrey,kelner);
Imgproc.threshold(mGrey,mGrey,0,255, Imgproc.THRESH_OTSU);
Imgproc.HoughCircles(mGrey,circles,Imgproc.CV_HOUGH_GRADIENT, 1, 200, 120, 10, 2, 30);
double x = 0.0;
double y = 0.0;
int r = 0;
for( int i = 0; i < circles.cols(); i++ ) {
double[] data = circles.get(0, i);
for (int j = 0; j < data.length; j++) {
x = data[0];
y = data[1];
r = (int) data[2];
}
Imgproc.circle(mRgba,new Point(x+xoffset,y+yoffset),r,scalarColor,6);
}
circles.release();
kelner.release();
mGrey.release();
}
}

Related

I can't view the full image when trying to display camera output in full screen

I have had this issue for a few days already and I'm not sure how to fix it. I have a code to open the camera and display the output in full screen. but the displied image is not full. a part of it is not there. I added this function and it fixed the image but the display is not in full screen anymore.
here are the two images before and after adding the function
Before
After
the function I added
private void configureTransform(int viewWidth, int viewHeight) {
if (null == mTextureView || null == mPreviewSize) {
return;
}
int rotation = getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
mTextureView.setTransform(matrix);
}
my Full code for the camera is here
package com.example.myapplication;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import android.animation.ValueAnimator;
import android.annotation.SuppressLint;
import android.app.Fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.DisplayMetrics;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import static android.content.ContentValues.TAG;
import static java.lang.Math.abs;
import static java.lang.Math.pow;
public class camera_ocr extends AppCompatActivity {
static int CAMERA_FRONT_BACK = 0;
private MeteringRectangle[] mAFRegions = AutoFocusHelperLib.getZeroWeightRegion();
private MeteringRectangle[] mAERegions = AutoFocusHelperLib.getZeroWeightRegion();
private Rect mCropRegion;
private Boolean cameraReady = false;
private static String versionName = "V7.7.5";
private int flag = 0;
private int[] position;
private View linearLayout;
private static final String TAG = "Camera Error";
private int[] focusMode = {};
private CameraCharacteristics cameraChar;
private Fragment fragment;
private Bitmap output;
private static int totalRotation;
ImageButton camera;
ImageButton flashLight ;
Button btnSnapPhoto;
private static Boolean displayResult = false;
Boolean cameraGranted = false;
public static String licenseKey ;
ValueAnimator animation = null ;
private int[] boxPosition = new int[4];
private float [] cardPosation = new float[8];
private static final int CAMERA_PERMISSION = 123;
private String mCameraId;
private Size mPreviewSize;
private CaptureRequest.Builder mCaptureRequestBuilder;
private HandlerThread mBackgroundHandlerThread;
private Handler mBackgroundHandler;
private static SparseIntArray ORIENTATIONS = new SparseIntArray();
private float screenWidth ;
private float screenHeight ;
CameraCharacteristics characteristics;
private CaptureRequest mPreviewRequest;
private CameraCaptureSession mCaptureSession;
private int mSensorOrientation;
private CaptureRequest.Builder mPreviewRequestBuilder;
private int mState = STATE_PREVIEW;
private ImageReader mImageReader;
private float cameraFocusDistance = 4.5f ;
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 270);
}
private static CameraDevice mCameraDevice;
/**
* Camera state: Showing camera preview.
*/
private static final int STATE_PREVIEW = 0;
/**
* Camera state: Waiting for the focus to be locked.
*/
private static final int STATE_WAITING_LOCK = 1;
/**
* Camera state: Waiting for the exposure to be precapture state.
*/
private static final int STATE_WAITING_PRECAPTURE = 2;
/**
* Camera state: Waiting for the exposure state to be something other than precapture.
*/
private static final int STATE_WAITING_NON_PRECAPTURE = 3;
/**
* Camera state: Picture was taken.
*/
private static final int STATE_PICTURE_TAKEN = 4;
/**
* Max preview width that is guaranteed by Camera2 API
*/
private static final int MAX_PREVIEW_WIDTH = 1920;
/**
* Max preview height that is guaranteed by Camera2 API
*/
private static final int MAX_PREVIEW_HEIGHT = 1080;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
CameraManager manager = null ;
private AutoFitTextureViewLib mTextureView;
#SuppressLint("MissingPermission")
private void openCamera(int width, int height, int camera_front_back) {
setUpCameraOutputs(width, height, camera_front_back);
configureTransform(width, height);
manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
// getCurrentPhoto();
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
private static Size chooseOptimalSize(Size[] choices, int textureViewWidth,
int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
// Collect the supported resolutions that are smaller than the preview Surface
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight &&
option.getHeight() == option.getWidth() * h / w) {
if (option.getWidth() >= textureViewWidth &&
option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
// Pick the smallest of those big enough. If there is no one big enough, pick the
// largest of those not big enough.
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
private void setUpCameraOutputs(int width, int height, int camera_front_back) {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
String cameraId = manager.getCameraIdList()[camera_front_back];
characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
return;
}
// For still image captures, we use the largest available size.
Size largest = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
new CompareSizesByArea());
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
ImageFormat.JPEG, /*maxImages*/2);
// Find out if we need to swap dimension to get the preview size relative to sensor
// coordinate.
int displayRotation = getWindowManager().getDefaultDisplay().getRotation();
//noinspection ConstantConditions
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
default:
Log.e(TAG, "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
maxPreviewHeight, largest);
// We fit the aspect ratio of TextureView to the size of preview we picked.
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(
mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(
mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
mCropRegion = AutoFocusHelperLib.cropRegionForZoom(characteristics,
CameraConstantsLib.ZOOM_REGION_DEFAULT);
mCameraId = cameraId;
return;
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
}
}
private void configureTransform(int viewWidth, int viewHeight) {
if (null == mTextureView || null == mPreviewSize) {
return;
}
int rotation = getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
// This method is called when the camera is opened. We start camera preview here.
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
finish();
}
};
private void createCameraPreviewSession() {
try {
final int[] x = new int[1];
final int[] y = new int[1];
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
// This is the output Surface we need to start preview.
Surface surface = new Surface(texture);
// We set up a CaptureRequest.Builder with the output Surface.
mPreviewRequestBuilder
= mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == mCameraDevice) {
return;
}
// GET SCREEN SIZE
MeteringRectangle screenSize[] = mPreviewRequestBuilder.get(CaptureRequest.CONTROL_AF_REGIONS);
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
long loopTime = System.currentTimeMillis();
long time = System.currentTimeMillis();
// GET SCREEN WIDTH
DisplayMetrics displayMetrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(displayMetrics);
screenWidth = screenSize[0].getWidth();
screenHeight = screenSize[0].getHeight();
// CREATE NEW FOCUS POINT
// testing
for(int i = 0; i < 1 ; i++) {
setManualFocusAt(x[0], y[0]);
// GET CURRENT FOCUS POINT
MeteringRectangle currentFocusArea[] = mPreviewRequestBuilder.get(CaptureRequest.CONTROL_AF_REGIONS);
time = System.currentTimeMillis();
// WAIT FOR FOCUS TO READY
while((System.currentTimeMillis() - time) < 800){
Log.d("focus time:" , Float.toString(System.currentTimeMillis() - time));
Log.e("focus time:" , Float.toString(System.currentTimeMillis() - time));
Log.wtf("focus time:" , Float.toString(System.currentTimeMillis() - time));
};
}
long x = System.currentTimeMillis() - loopTime;
Log.d("focus time:" , Float.toString(x));
cameraReady = true;
}
#Override
public void onConfigureFailed(
#NonNull CameraCaptureSession cameraCaptureSession) {
}
}, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Initiate a still image capture.
*/
void setManualFocusAt(int x, int y) {
int mDisplayOrientation = getWindowManager().getDefaultDisplay().getRotation();
float points[] = new float[2];
points[0] = (float) x / mTextureView.getWidth();
points[1] = (float) y / mTextureView.getHeight();
Matrix rotationMatrix = new Matrix();
rotationMatrix.setRotate(mDisplayOrientation, 0.5f, 0.5f);
rotationMatrix.mapPoints(points);
if (mPreviewRequestBuilder != null) {
// mIsManualFocusing = true;
updateManualFocus(points[0], points[1]);
if (mCaptureSession != null) {
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_START);
mCaptureSession.capture(mPreviewRequestBuilder.build(), null, mBackgroundHandler);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(),
null, mBackgroundHandler);
} catch (CameraAccessException | IllegalStateException e) {
Log.e(TAG, "Failed to set manual focus.", e);
}
}
// resumeAutoFocusAfterManualFocus();
}
}
void updateManualFocus(float x, float y) {
#SuppressWarnings("ConstantConditions")
int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
mAFRegions = AutoFocusHelperLib.afRegionsForNormalizedCoord(x, y, mCropRegion, sensorOrientation);
mAERegions = AutoFocusHelperLib.aeRegionsForNormalizedCoord(x, y, mCropRegion, sensorOrientation);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, mAFRegions);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, mAERegions);
// mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
// fixe distance focuse // testing
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,CaptureRequest.CONTROL_AF_MODE_OFF);
mPreviewRequestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, cameraFocusDistance);
}
/**
* Lock the focus as the first step for a still image capture.
*/
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private final TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
openCamera(width, height,CAMERA_FRONT_BACK);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera_ocr);
mTextureView = findViewById(R.id.textureView);
}
#Override
protected void onResume(){
super.onResume();
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight(),CAMERA_FRONT_BACK);
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
}
It looks like you got height and width backwards on this line:
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
Try the following and you should get the correct result:
RectF bufferRect = new RectF(0, 0, mPreviewSize.getWidth(), mPreviewSize.getHeight());

How to make a Google Map Marker with a photo inside round speech-bubble?

I've searched enough but have not found it yet. How to make a GoogleMaps Marker with a photo Example:
I thought I would put 2 markers in the same place one with the image of the marker and another with the photo, but I think it is not the best to do do. Can someone help me?
This answer is similar to my other answer here, however, this is different due to the round bubble around the image.
First, ensure you have the latest version of Picasso:
dependencies {
compile 'com.squareup.picasso:picasso:2.71828'
//....
}
Here is a transformation that creates the round bubble and bottom triangle around the image:
import android.graphics.Bitmap;
import android.graphics.BitmapShader;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.Shader;
public class CircleBubbleTransformation implements com.squareup.picasso.Transformation {
private static final int photoMargin = 30;
private static final int margin = 20;
private static final int triangleMargin = 10;
#Override
public Bitmap transform(final Bitmap source) {
int size = Math.min(source.getWidth(), source.getHeight());
float r = size/2f;
Bitmap output = Bitmap.createBitmap(size+triangleMargin, size+triangleMargin, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(output);
Paint paintBorder = new Paint();
paintBorder.setAntiAlias(true);
paintBorder.setColor(Color.parseColor("#333030"));
paintBorder.setStrokeWidth(margin);
canvas.drawCircle(r, r, r-margin, paintBorder);
Paint trianglePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
trianglePaint.setStrokeWidth(2);
trianglePaint.setColor(Color.parseColor("#333030"));
trianglePaint.setStyle(Paint.Style.FILL_AND_STROKE);
trianglePaint.setAntiAlias(true);
Path triangle = new Path();
triangle.setFillType(Path.FillType.EVEN_ODD);
triangle.moveTo(size-margin, size / 2);
triangle.lineTo(size/2, size+triangleMargin);
triangle.lineTo(margin, size/2);
triangle.close();
canvas.drawPath(triangle, trianglePaint);
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setShader(new BitmapShader(source, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP));
canvas.drawCircle(r, r, r-photoMargin, paint);
if (source != output) {
source.recycle();
}
return output;
}
#Override
public String key() {
return "circlebubble";
}
}
In this example I get the LatLng from a PlaceAutocompleteFragment:
PlaceAutocompleteFragment placeAutoComplete = (PlaceAutocompleteFragment) getFragmentManager().findFragmentById(R.id.place_autocomplete);
placeAutoComplete.setOnPlaceSelectedListener(new PlaceSelectionListener() {
#Override
public void onPlaceSelected(Place place) {
Log.d("Maps", "Place selected: " + place.getName());
mLatLng = place.getLatLng();
Picasso.get()
.load(user_photo_url)
.resize(200,200)
.centerCrop()
.transform(new CircleBubbleTransformation())
.into(mTarget);
}
#Override
public void onError(Status status) {
Log.d("Maps", "An error occurred: " + status);
}
});
Define the Target:
Target mTarget = new Target() {
#Override
public void onBitmapLoaded(Bitmap bitmap, Picasso.LoadedFrom from) {
Marker driver_marker = mMap.addMarker(new MarkerOptions()
.position(mLatLng)
.icon(BitmapDescriptorFactory.fromBitmap(bitmap))
.title("test")
.snippet("test address")
);
}
#Override
public void onBitmapFailed(Exception ex, Drawable errorDrawable) {
Log.d("picasso", "onBitmapFailed");
}
#Override
public void onPrepareLoad(Drawable placeHolderDrawable) {
}
};
The result:

Faster way of displaying offscreen bitmap using android/java

I have started to learn android and java using the android studio beta. As a first simple test app I am trying to get a basic Mandelbrot renderer working. I have gotten it to display, but now I want it faster. Can anyone give advice on the following?
The docs say canvas.drawbitmap is depreciated. What should I use instead? What is the fastest ways to show a bitmap onscreen?
How can I show the progress of the calculations? If I unremark the 2 lines marked "update display after each line has been calculated" there is no updating during the calculations and the extra calls to canvas.drawbitmap really slow it all down (79 seconds compared to 31 seconds without it).
Is there anything I can do to speed up the general math calls?
I have tried to keep it as simple as possible for this example.
Many thanks for any tips for a newbie. I don't want to learn bad habits from the start if possible.
The layout has a single imageview aligned to the screen. The full code is
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.os.Bundle;
import android.os.SystemClock;
import android.view.Display;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageView;
import java.text.DecimalFormat;
import java.util.Random;
import static android.graphics.Color.argb;
import static android.graphics.Color.rgb;
public class myActivity extends Activity {
myView myview;
Bitmap bmp;
Canvas myCanvas;
ImageView img;
Paint paint;
Boolean started=false;
Integer ih,iw,i,redraws,fw,fh;
long startms,finishms;
Random rnd;
double xmin,xmax,ymin,ymax,padding,bailout,bailout_squared,stepx,stepy,x,y,magnitude;
double realiters,tweenval;
Integer col,colval1,colval2;
Integer iterations,maxiterations,superx,supery,samplepixels,square_samplepixels,xloop,yloop;
double zr,zi,cr,ci,xloopdouble,yloopdouble;
int[] colorpalette = new int[256];
int[] pixels;
int r,g,b,r1,g1,b1,r2,g2,b2,totalr,totalg,totalb;
private void init(){
//screen size
Display display = getWindowManager().getDefaultDisplay();
Point sizePoint = new Point();
paint = new Paint();
display.getSize(sizePoint);
iw=sizePoint.x;
ih=sizePoint.y;
//pixels array
fw=iw;
fh=ih;
pixels=new int[fw*fh];
//create bitmap
bmp=Bitmap.createBitmap(iw, ih, Bitmap.Config.RGB_565);
//create canvas
myCanvas =new Canvas();
myCanvas.setBitmap(bmp);
img = (ImageView) findViewById(R.id.imageView1);
rnd = new Random();
}
// calculates and displays the Mandelbrot fractal
private void Mandelbrot(){
startms= SystemClock.uptimeMillis();
//coordinates
// xmin=-1.6345100402832;
// xmax=-1.63043992784288;
// ymin=-0.00209962230258512;
// ymax=0.00209259351094558;
xmin=-2.3;
xmax=2.3;
ymin=-2.1;
ymax=2.1;
fw=iw;
fh=ih;
//adjust coords to match screen aspect
if (iw<ih) {
padding=(xmax-xmin)/iw*(ih-iw);
ymin=ymin-padding/2.0;
ymax=ymax+padding/2.0;
} else {
padding=(ymax-ymin)/ih*(iw-ih);
xmin=xmin-padding/2.0;
xmax=xmax+padding/2.0;
}
bailout=8.0; //needs to be higher than default 2 for the CPM coloring to be smooth
bailout_squared=bailout*bailout;
maxiterations=64;
samplepixels=1;
square_samplepixels=samplepixels*samplepixels;
//random color palette
for (col=0;col<256;col++){
colorpalette[col]=android.graphics.Color.argb(255,rnd.nextInt(256),rnd.nextInt(256),rnd.nextInt(256));
}
stepx=(xmax-xmin)/fw/samplepixels;
stepy=(ymax-ymin)/fh/samplepixels;
for (yloop=0;yloop<fh;yloop++){
for (xloop=0;xloop<fw;xloop++){
totalr=0;
totalg=0;
totalb=0;
r=0;
g=0;
b=0;
xloopdouble=(double)xloop;
yloopdouble=(double)yloop;
for (supery=0;supery<samplepixels;supery++)
{
for (superx=0;superx<samplepixels;superx++)
{
cr = xmin+xloopdouble/(double)fw*(xmax-xmin)+(stepx*(double)superx);
ci = ymin+yloopdouble/(double)fh*(ymax-ymin)+(stepy*(double)supery);
zr = 0.0;
zi = 0.0;
magnitude=0.0;
for(iterations=0; iterations<maxiterations; iterations++)
{
i=iterations;
x = (zr * zr - zi * zi) + cr;
y = (zi * zr + zr * zi) + ci;
magnitude=(x * x + y * y);
if(magnitude>bailout_squared) break;
zr = x;
zi = y;
}
if (iterations>=maxiterations) {
r=0;
g=0;
b=0;
} else {
//CPM smooth colors
realiters=iterations+1-((Math.log(Math.log(Math.sqrt(magnitude)))/Math.log(2.0)));
colval1=(int) Math.floor(realiters % 255);
colval2=(colval1+1) % 255;
tweenval=realiters-Math.floor(realiters);
r1=Color.red(colorpalette[colval1]);
g1=Color.green(colorpalette[colval1]);
b1=Color.blue(colorpalette[colval1]);
r2=Color.red(colorpalette[colval2]);
g2=Color.green(colorpalette[colval2]);
b2=Color.blue(colorpalette[colval2]);
r=(int) (r1+((r2-r1)*tweenval));
g=(int) (g1+((g2-g1)*tweenval));
b=(int) (b1+((b2-b1)*tweenval));
}
totalr=totalr+r;
totalg=totalg+g;
totalb=totalb+b;
}
}
r=(int) totalr/square_samplepixels;
g=(int) totalg/square_samplepixels;
b=(int) totalb/square_samplepixels;
//update pixels array
pixels[xloop+yloop*fw]=rgb(r, g, b);
}
//update display after each line has been calculated
//myCanvas.drawBitmap(pixels,0,fw,0,0,fw,fh,false,null);
//if (img != null) img.invalidate();
}
myCanvas.drawBitmap(pixels,0,fw,0,0,fw,fh,false,null);
finishms=SystemClock.uptimeMillis();
}
private void updateTimeTaken(){
//turn antialiasing on
paint.setAntiAlias(true);
// draw some text using FILL style
paint.setStyle(Paint.Style.FILL);
paint.setTextSize(30);
DecimalFormat myFormatter = new DecimalFormat("#,###,###");
paint.setColor(Color.BLACK);
myCanvas.drawText("Time taken = " + myFormatter.format(finishms - startms) + " ms", 15, 45, paint);
paint.setColor(Color.WHITE);
myCanvas.drawText("Time taken = " + myFormatter.format(finishms - startms) + " ms", 14, 44, paint);
paint.setColor(Color.BLACK);
myCanvas.drawText("Screen size = " + String.valueOf(iw) + " x " + String.valueOf(ih), 15, 85, paint);
paint.setColor(Color.WHITE);
myCanvas.drawText("Screen size = " + String.valueOf(iw) + " x " + String.valueOf(ih), 14, 84, paint);
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
//fullscreen no menu
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
redraws=0;
super.onCreate(savedInstanceState);
myview = new myView(this);
setContentView(myview);
init();
started=true;
Mandelbrot();
updateTimeTaken();
}
private class myView extends View{
public myView(Context context){
super(context);
}
#Override protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
//draw off screen bitmap to screen
if (started==true){
canvas.drawBitmap(bmp,0,0,paint);
}
}
}
}

Android: Audio Recording with voice level visualization

I need to create a android application which is for recording voice while showing the voice(sound) level visualization.
I already created an audio recording application but I can not add sound level visualization. How can I do it?
please someone help me a giving suggestion or a sample tutorials link or code.
Create a xml activity_recording.xml like this.
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="180dp"
android:layout_alignParentBottom="true"
android:background="#231f20" >
<ali.visualiser.VisualizerView
android:id="#+id/visualizer"
android:layout_width="220dp"
android:layout_height="75dp"
android:layout_centerHorizontal="true"
android:layout_margin="5dp" />
<TextView
android:id="#+id/txtRecord"
android:layout_width="wrap_content"
android:layout_height="40dp"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="25dp"
android:gravity="center"
android:text="Start Recording"
android:textColor="#android:color/white"
android:textSize="30sp" />
</RelativeLayout>
Create a custom visualizerView as given below.
package ali.visualiser;
import java.util.ArrayList;
import java.util.List;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.view.View;
public class VisualizerView extends View {
private static final int LINE_WIDTH = 1; // width of visualizer lines
private static final int LINE_SCALE = 75; // scales visualizer lines
private List<Float> amplitudes; // amplitudes for line lengths
private int width; // width of this View
private int height; // height of this View
private Paint linePaint; // specifies line drawing characteristics
// constructor
public VisualizerView(Context context, AttributeSet attrs) {
super(context, attrs); // call superclass constructor
linePaint = new Paint(); // create Paint for lines
linePaint.setColor(Color.GREEN); // set color to green
linePaint.setStrokeWidth(LINE_WIDTH); // set stroke width
}
// called when the dimensions of the View change
#Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
width = w; // new width of this View
height = h; // new height of this View
amplitudes = new ArrayList<Float>(width / LINE_WIDTH);
}
// clear all amplitudes to prepare for a new visualization
public void clear() {
amplitudes.clear();
}
// add the given amplitude to the amplitudes ArrayList
public void addAmplitude(float amplitude) {
amplitudes.add(amplitude); // add newest to the amplitudes ArrayList
// if the power lines completely fill the VisualizerView
if (amplitudes.size() * LINE_WIDTH >= width) {
amplitudes.remove(0); // remove oldest power value
}
}
// draw the visualizer with scaled lines representing the amplitudes
#Override
public void onDraw(Canvas canvas) {
int middle = height / 2; // get the middle of the View
float curX = 0; // start curX at zero
// for each item in the amplitudes ArrayList
for (float power : amplitudes) {
float scaledHeight = power / LINE_SCALE; // scale the power
curX += LINE_WIDTH; // increase X by LINE_WIDTH
// draw a line representing this item in the amplitudes ArrayList
canvas.drawLine(curX, middle + scaledHeight / 2, curX, middle
- scaledHeight / 2, linePaint);
}
}
}
Create RecordingActivity class as given below.
package ali.visualiser;
import java.io.File;
import java.io.IOException;
import android.app.Activity;
import android.media.MediaRecorder;
import android.media.MediaRecorder.OnErrorListener;
import android.media.MediaRecorder.OnInfoListener;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.TextView;
public class RecordingActivity extends Activity {
public static final String DIRECTORY_NAME_TEMP = "AudioTemp";
public static final int REPEAT_INTERVAL = 40;
private TextView txtRecord;
VisualizerView visualizerView;
private MediaRecorder recorder = null;
File audioDirTemp;
private boolean isRecording = false;
private Handler handler; // Handler for updating the visualizer
// private boolean recording; // are we currently recording?
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_recording);
visualizerView = (VisualizerView) findViewById(R.id.visualizer);
txtRecord = (TextView) findViewById(R.id.txtRecord);
txtRecord.setOnClickListener(recordClick);
audioDirTemp = new File(Environment.getExternalStorageDirectory(),
DIRECTORY_NAME_TEMP);
if (audioDirTemp.exists()) {
deleteFilesInDir(audioDirTemp);
} else {
audioDirTemp.mkdirs();
}
// create the Handler for visualizer update
handler = new Handler();
}
OnClickListener recordClick = new OnClickListener() {
#Override
public void onClick(View v) {
if (!isRecording) {
// isRecording = true;
txtRecord.setText("Stop Recording");
recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile(audioDirTemp + "/audio_file"
+ ".mp3");
OnErrorListener errorListener = null;
recorder.setOnErrorListener(errorListener);
OnInfoListener infoListener = null;
recorder.setOnInfoListener(infoListener);
try {
recorder.prepare();
recorder.start();
isRecording = true; // we are currently recording
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
handler.post(updateVisualizer);
} else {
txtRecord.setText("Start Recording");
releaseRecorder();
}
}
};
private void releaseRecorder() {
if (recorder != null) {
isRecording = false; // stop recording
handler.removeCallbacks(updateVisualizer);
visualizerView.clear();
recorder.stop();
recorder.reset();
recorder.release();
recorder = null;
}
}
public static boolean deleteFilesInDir(File path) {
if( path.exists() ) {
File[] files = path.listFiles();
if (files == null) {
return true;
}
for(int i=0; i<files.length; i++) {
if(files[i].isDirectory()) {
}
else {
files[i].delete();
}
}
}
return true;
}
#Override
protected void onDestroy() {
super.onDestroy();
releaseRecorder();
}
// updates the visualizer every 50 milliseconds
Runnable updateVisualizer = new Runnable() {
#Override
public void run() {
if (isRecording) // if we are already recording
{
// get the current amplitude
int x = recorder.getMaxAmplitude();
visualizerView.addAmplitude(x); // update the VisualizeView
visualizerView.invalidate(); // refresh the VisualizerView
// update in 40 milliseconds
handler.postDelayed(this, REPEAT_INTERVAL);
}
}
};
}
Result
This is how it looks:
https://www.youtube.com/watch?v=BoFG6S02GH0
When it reaches the end, the animation continues as expected: erasing the beginning of the graph.
I like Ali's answer, but here's a simpler version that performs much better. The real speed comes from making the view class's onDraw method as fast as possible. Store the correct values in memory first by doing any computations not required for drawing outside the draw loop, and pass fully populated structures to draw routines to allow the hardware to optimize drawing many lines.
I launched my RecordingActivity and set it full screen, but you can create a layout resource or add the view anywhere.
Actvity:
public class RecordingActivity extends Activity {
private VisualizerView visualizerView;
private MediaRecorder recorder = new MediaRecorder();
private Handler handler = new Handler();
final Runnable updater = new Runnable() {
public void run() {
handler.postDelayed(this, 1);
int maxAmplitude = recorder.getMaxAmplitude();
if (maxAmplitude != 0) {
visualizerView.addAmplitude(maxAmplitude);
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_recording);
visualizerView = (VisualizerView) findViewById(R.id.visualizer);
try {
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile("/dev/null");
recorder.prepare();
recorder.start();
} catch (IllegalStateException | IOException ignored) {
}
}
#Override
protected void onDestroy() {
super.onDestroy();
handler.removeCallbacks(updater);
recorder.stop();
recorder.reset();
recorder.release();
}
#Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
handler.post(updater);
}
}
View:
public class VisualizerView extends View {
private static final int MAX_AMPLITUDE = 32767;
private float[] amplitudes;
private float[] vectors;
private int insertIdx = 0;
private Paint pointPaint;
private Paint linePaint;
private int width;
private int height;
public VisualizerView(Context context, AttributeSet attrs) {
super(context, attrs);
linePaint = new Paint();
linePaint.setColor(Color.GREEN);
linePaint.setStrokeWidth(1);
pointPaint = new Paint();
pointPaint.setColor(Color.BLUE);
pointPaint.setStrokeWidth(1);
}
#Override
protected void onSizeChanged(int width, int h, int oldw, int oldh) {
this.width = width;
height = h;
amplitudes = new float[this.width * 2]; // xy for each point across the width
vectors = new float[this.width * 4]; // xxyy for each line across the width
}
/**
* modifies draw arrays. cycles back to zero when amplitude samples reach max screen size
*/
public void addAmplitude(int amplitude) {
invalidate();
float scaledHeight = ((float) amplitude / MAX_AMPLITUDE) * (height - 1);
int ampIdx = insertIdx * 2;
amplitudes[ampIdx++] = insertIdx; // x
amplitudes[ampIdx] = scaledHeight; // y
int vectorIdx = insertIdx * 4;
vectors[vectorIdx++] = insertIdx; // x0
vectors[vectorIdx++] = 0; // y0
vectors[vectorIdx++] = insertIdx; // x1
vectors[vectorIdx] = scaledHeight; // y1
// insert index must be shorter than screen width
insertIdx = ++insertIdx >= width ? 0 : insertIdx;
}
#Override
public void onDraw(Canvas canvas) {
canvas.drawLines(vectors, linePaint);
canvas.drawPoints(amplitudes, pointPaint);
}
}
If you're using the MediaRecorder class and visualization based on peak amplitude is ok you can use the getMaxAmplitude() method to continuously poll for the "maximum absolute amplitude that was sampled since the last call".
Scale that amplitude down into an index that determines how many of your app's graphical volume bars to light up and you're set.
My approach to this is based on activedecay's and Ali's answers, and I added a display DPI scale, since dp is scaled by the screen density so, 1 pixel in 320 dpi is not 1 pixel in 420 dpi. I had that problem that the visualizer was not moving at the same rate in different screens.
I also haven't found out, why the canvas doesn't start drawing from the beginning of the view API 28 only. But is not looking bad in any way.
Info about dpi scaling:
Android Developers/Support different pixel densities
package com.example.mediarecorderdemo.views;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import static com.example.mediarecorderdemo.RecordingActivity.DEBUG;
public class VisualizerView extends View {
private static final int MAX_AMPLITUDE = 32767;
private ArrayList<Float> amplitudes;
private Paint linePaint;
private int width;
private int height;
private int density;
private float stroke;
public VisualizerView(Context context, #Nullable AttributeSet attrs) {
super(context, attrs);
density = this.getResources().getDisplayMetrics().densityDpi; //Get the display DPI
linePaint = new Paint();
linePaint.setColor(Color.GREEN);
linePaint.setAntiAlias(true); //Add AntiAlias for displaying strokes that are less than 1
}
#Override
protected void onSizeChanged(int w, int h, int oldW, int oldH) {
width = w;
height = h;
amplitudes = new ArrayList<>(width * 2);
stroke =(width * ((float)density / 160)) / 1000; //Calculate actual pixel size for the view based on view width and dpi
linePaint.setStrokeWidth(stroke);
}
/**
* Add a new value of int to the visualizer array
* #param amplitude Int value
*/
public void addAmplitude(int amplitude){
invalidate();
float scaledHeight = ((float) amplitude / MAX_AMPLITUDE) * (height -1);
amplitudes.add(scaledHeight);
}
/**
* Clears Visualization
*/
public void clear(){
amplitudes.clear();
}
#Override
protected void onDraw(Canvas canvas) {
int middle = height / 2; // get the middle of the View
float curX = 0; // start curX at zero
// for each item in the amplitudes ArrayList
for (float power : amplitudes) {
// draw a line representing this item in the amplitudes ArrayList
canvas.drawLine(curX, middle + power / 2, curX, middle
- power / 2, linePaint);
curX += stroke; // increase X by line width
}
}
}

Android is trying to use recycled image

I'm getting
Canvas: trying to use a recycled bitmap
android.graphics.Bitmap#4057a3a8
everytime i'm trying to show one image.
Image
When i delete bmp.recycle() everything goes well but i dont use this image in my code so i dont understand where the problem is.
package com.example.photobooth;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import android.os.Bundle;
import android.os.Environment;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Rect;
import android.util.DisplayMetrics;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnTouchListener;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.ImageView;
public class EditorActivity extends Activity implements OnClickListener{
String path = null;
private int screen_height;
private int screen_width;
private Bitmap setUpImage(Bitmap image) {
int min_side = Math.min(screen_height, screen_width);
float scale_factor = (float) (((float) min_side / image.getWidth()) * 1.5);
float[] scalef = { scale_factor, scale_factor };
Bitmap scaled_image = ImageUtilities.scaleImage(image, scalef);
return scaled_image;
}
private void setUp() {
Bundle b = getIntent().getExtras();
if (b != null) {
path = b.getString("path");
}
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
this.screen_height = metrics.heightPixels;
this.screen_width = metrics.widthPixels;
int min_measure = Math.min(screen_width, screen_height);
// Make ImageView square
ImageView img = (ImageView) findViewById(R.id.photo_holder);
android.view.ViewGroup.LayoutParams lp = img.getLayoutParams();
lp.height = min_measure;
img.setLayoutParams(lp);
Bitmap bmp = BitmapFactory.decodeFile(path);
final Bitmap ready_image = setUpImage(bmp);
bmp.recycle();
ImageView iv = (ImageView) findViewById(R.id.photo_holder);
iv.setImageBitmap(ready_image);
// set up touch event for imageview(photo_holder)
img.setOnTouchListener(new OnTouchListener() {
float touch_x, touch_y, scrolled_x = 0.0f, scrolled_y = 0.0f;
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
touch_x = event.getX();
touch_y = event.getY();
break;
case MotionEvent.ACTION_MOVE:
float cur_x = event.getX();
float cur_y = event.getY();
float scroll_x = -cur_x + touch_x;
float scroll_y = -cur_y + touch_y;
scrolled_x += scroll_x;
scrolled_y += scroll_y;
if (scrolled_x > (ready_image.getWidth() - screen_width)/2
|| scrolled_x < -(ready_image.getWidth() - screen_width)/2){
scrolled_x -= scroll_x;
scroll_x = 0;
}
if (scrolled_y > (ready_image.getHeight() - screen_width)/2
|| scrolled_y < -(ready_image.getHeight() - screen_width)/2){
scrolled_y -= scroll_y;
scroll_y = 0;
}
v.scrollBy((int) (scroll_x),
(int) (scroll_y));
touch_x = cur_x;
touch_y = cur_y;
break;
}
return true;
}
});
//Set up buttons
Button btn = (Button)findViewById(R.id.save);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
ImageView img = (ImageView)findViewById(R.id.photo_holder);
int scroll_x = img.getScrollX();
int scroll_y = img.getScrollY();
int left = (ready_image.getWidth() - screen_width)/2
+ scroll_x;
int top = (ready_image.getHeight() - screen_width)/2
+ scroll_y;
int right = left + screen_width;
int bottom = top + screen_width;
Rect r = new Rect(left, top, right, bottom);
Bitmap croped_image = ImageUtilities.cropImage(ready_image,
r,
screen_width,
screen_width);
String path_to_folder = Environment.getExternalStorageDirectory()
.getAbsolutePath();
String pic_path = path_to_folder + File.separator + MainActivity.app_name;
File f = new File(pic_path);
File picture = null;
try {
picture = File.createTempFile("photo_", ".jpg", f);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
FileOutputStream fos = new FileOutputStream(picture);
croped_image.compress(Bitmap.CompressFormat.JPEG, 100, fos);
fos.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
});
}
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (requestWindowFeature(Window.FEATURE_NO_TITLE))
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_editor);
setUp();
}
public void onClick(View v) {
// TODO Auto-generated method stub
}
}
bmp is recycled in setUp() method.
ImageUtility is
package com.example.photobooth;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PorterDuff.Mode;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.RectF;
public class ImageUtilities {
public static Bitmap getRoundedCornerBitmap(Context context, Bitmap input,
int pixels, int w, int h, boolean squareTL, boolean squareTR,
boolean squareBL, boolean squareBR, boolean border) {
Bitmap output = Bitmap.createBitmap(w, h, Config.ARGB_8888);
Canvas canvas = new Canvas(output);
final float densityMultiplier = context.getResources()
.getDisplayMetrics().density;
final int color = 0xff424242;
final Paint paint = new Paint();
final Rect rect = new Rect(0, 0, w, h);
final RectF rectF = new RectF(rect);
// make sure that our rounded corner is scaled appropriately
final float roundPx = pixels * densityMultiplier;
paint.setAntiAlias(true);
canvas.drawARGB(0, 0, 0, 0);
paint.setColor(color);
canvas.drawRoundRect(rectF, roundPx, roundPx, paint);
// draw rectangles over the corners we want to be square
if (squareTL) {
canvas.drawRect(0, 0, w / 2, h / 2, paint);
}
if (squareTR) {
canvas.drawRect(w / 2, 0, w, h / 2, paint);
}
if (squareBL) {
canvas.drawRect(0, h / 2, w / 2, h, paint);
}
if (squareBR) {
canvas.drawRect(w / 2, h / 2, w, h, paint);
}
paint.setXfermode(new PorterDuffXfermode(Mode.SRC_IN));
canvas.drawBitmap(input, 0, 0, paint);
if (border) {
paint.setStyle(Paint.Style.STROKE);
paint.setColor(Color.WHITE);
paint.setStrokeWidth(3);
canvas.drawRoundRect(rectF, roundPx, roundPx, paint);
}
return output;
}
public static Bitmap cropImage(Bitmap origina_bmp, Rect rec, int w, int h) {
Bitmap target_bitmap = Bitmap.createBitmap(w, h,
Bitmap.Config.ARGB_8888);
target_bitmap.setDensity(origina_bmp.getDensity());
Canvas canvas = new Canvas(target_bitmap);
canvas.drawBitmap(origina_bmp, new Rect(rec.left, rec.top, rec.right,
rec.bottom), new Rect(0, 0, w, h), null);
return target_bitmap;
}
public static Bitmap makeSquareImage(Bitmap original_image, int size){
int min_side = Math.min(original_image.getWidth(),
original_image.getHeight());
int side_size = ImageUtilities.get2del(min_side);
int crop_to;
Bitmap croped_image = null;
if (min_side == original_image.getWidth()){
crop_to = (original_image.getHeight() - side_size) / 2;
croped_image = ImageUtilities.cropImage(original_image, new Rect(
0, crop_to, original_image.getWidth(),
original_image.getHeight() - crop_to), size, size);
}else{
crop_to = (original_image.getWidth() - side_size) / 2;
croped_image = ImageUtilities.cropImage(original_image, new Rect(
crop_to, 0, original_image.getWidth() - crop_to,
original_image.getHeight()), size, size);
}
return croped_image;
}
public static int get2del(int num) {
while (num % 2 != 0)
num++;
return num;
}
public static Bitmap scaleImage(Bitmap originalBMP, float[] scaleFactor) {
Matrix scaleMat = new Matrix();
scaleMat.postScale(scaleFactor[0], scaleFactor[1]);
Bitmap scaledImage = Bitmap.createBitmap(originalBMP, 0, 0,
originalBMP.getWidth(), originalBMP.getHeight(), scaleMat,
false);
return scaledImage;
}
}
so it doesn't.
If i write bmp = null instead of bmp.recycle() everything is ok but i wonder why in the second chance application is crashed.
What is ImageUtilities? Maybe scaleImage may reuse the same image.
Does your program work correctly if you do:
bmp = null;
instead of
bmp.recycle();
?
The official documentation of recycle says:
"This is an advanced call, and normally need not be called, since the normal GC process will free up this memory when there are no more references to this bitmap. "
So using "bmp = null" should be better than "bmp.recycle()".

Categories