How to optimize dlib landmark detection? - java

How to optimize dlib landmark detection?
Bitmap 160x120 was processed 7 second.
I want to 50 or 100ms.
My code:
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
ArrayList<android.graphics.Point> points = new ArrayList();
try {
long startTime = System.currentTimeMillis();
points = LandmarkDetection.getLandmark(matToBitmap(mRgba), this, landmarkPath);
long endTime = System.currentTimeMillis();
Log.i(TAG +"Time cost: ", String.valueOf((endTime - startTime) / 1000f) + " sec");
//drawPoint(points);
Log.i(TAG, "size = " + String.valueOf(points.size()));
}catch (Exception e) {
Log.i(TAG, "bitmap error! " + e.getMessage());
}
return mRgba;
}
private Bitmap matToBitmap(#NonNull Mat mat) {
Bitmap bmp;
try {
Mat resized = new Mat();
Imgproc.resize(mat, resized, new Size(160, 120));
bmp = Bitmap.createBitmap(resized.cols(), resized.rows(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(resized, bmp);
}catch(Exception e) {
Log.e(TAG + ":matToBitmap", e.getMessage());
return null;
}
return bmp;
}
And LandmarkDetection class(This method takes all the time):
public static ArrayList<Point> getLandmark(#NonNull Bitmap bmp, Context context, String landmarkPath) {
mFaceDet = new FaceDet(landmarkPath);
Log.i(AndroidLauncher.TAG, String.valueOf(new File(context.getExternalCacheDir() + "/shape_predictor_68_face_landmarks.dat").exists()));
Log.i(AndroidLauncher.TAG, "Ширина: " + String.valueOf(bmp.getWidth()) + "\nВысота: " + String.valueOf(bmp.getHeight()));
results = mFaceDet.detect(bmp);
if (results != null) {
for (final VisionDetRet ret : results) {
landmarks = ret.getFaceLandmarks();
}
}
return landmarks;
}
What's wrong with my code?

A lot of things can optimize your code:
do not construct face_detector and shape_predictor for every face detection. it can take several seconds. you can use one shape_predictor for all your threads, but face_detector should be one per thread
mFaceDet code is unclear. may be you are resizing image there or doing other operations
http://dlib.net/faq.html#Whyisdlibslow

Related

How to calculate mean and standard deviation of an image file

I'm new to coding environment, I'm working in an imaging processing project - I have a video file of n size and I'm trying to find a mean and standard deviation of (Height, width and number of frames)that file . the image file(imgfile_500) is in MAT
here is my code
public class Image_conv {
public static void main(String[] args) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
String filePath = "C:\\Video_500.h264";
if (!Paths.get(filePath).toFile().exists()){
System.out.println("File " + filePath + " does not exist!");
return;
}
VideoCapture video500 = new VideoCapture(filePath);
if (!video500.isOpened()) {
System.out.println("Error! video500 can't be opened!");
return;
}
int ntime=20;
int fps= 60;
int ds_fac=4;
int nf = (ntime*fps);
int wd_ds = 480/ds_fac;
int hg_ds = 640/ds_fac;
int vsize = wd_ds*hg_ds*nf;
Mat frame = new Mat(480,640,CvType.CV_64FC3);
Mat frame500 = new Mat(480,640,CvType.CV_64FC3);
Mat imgfile_500 = new Mat();
if (video500.isOpened()) {
while(true){
if (video500.read(frame))
{
Imgproc.cvtColor(frame, frame500, Imgproc.COLOR_RGB2GRAY);
//System.out.println(frame500.size());
Imgproc.pyrDown( frame500, frame500, new Size( frame500.cols()/2, frame500.rows()/2 ) );
Imgproc.pyrDown( frame500, frame500, new Size( frame500.cols()/2, frame500.rows()/2 ) );
// Imgcodecs.imwrite(i+"led500.jpg", frame500);
// Push a Mat back into MatVector
imgfile_500.push_back(frame500);
}else break;
}
}
//System.out.println(imgfile_500.size());
}
}

record and save video stream use opencv in java

my question is about : how to record and save with the time specified like after two hours this app must done record and save in one folder.
public class per1 {
public static void main(String[] args) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
Scanner scan = new Scanner(System.in);
VideoCapture camera = new VideoCapture(0);
String cc = String.valueOf(camera.get(Videoio.CAP_PROP_FOURCC));
int fps = (int) camera.get(Videoio.CAP_PROP_FPS);
int width = (int) camera.get(Videoio.CAP_PROP_FRAME_WIDTH);
int height = (int) camera.get(Videoio.CAP_PROP_FRAME_HEIGHT);
final Size frameSize = new Size((int) camera.get(Videoio.CAP_PROP_FRAME_WIDTH), (int) camera.get(Videoio.CAP_PROP_FRAME_HEIGHT));
VideoWriter save = new VideoWriter("D:/video.mpg", Videoio.CAP_PROP_FOURCC, fps, frameSize, true);
if (camera.isOpened()) {
System.out.println("ON");
Mat framecam = new Mat();
boolean cekframe = camera.read(framecam);
System.out.println("cekframe " + cekframe);
try {
while (cekframe) {
camera.read(framecam);
save.write(framecam);
}
Thread.sleep(4000);
} catch (Exception e) {
System.out.println("OFF \n" + e);
}
camera.release();
save.release();
System.exit(1);
System.out.println("DOne");
}
}

OpenCV - Android : java.lang.IllegalArgumentException: bmp == null

I'm trying to capture an image from the JavaCameraView and load the captured image into another activity and is supposed to be processed (Hough Circles).
private void takePhoto(final Mat rgba) {
// Determine the path and metadata for the photo.
final long currentTimeMillis = System.currentTimeMillis();
final String appName = getString(R.string.app_name);
final String galleryPath =
Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES).toString();
final String albumPath = galleryPath + File.separator +
appName;
final String photoPath = albumPath + File.separator +
currentTimeMillis + LabActivity.PHOTO_FILE_EXTENSION;
final ContentValues values = new ContentValues();
values.put(MediaStore.MediaColumns.DATA, photoPath);
values.put(Images.Media.MIME_TYPE,
LabActivity.PHOTO_MIME_TYPE);
values.put(Images.Media.TITLE, appName);
values.put(Images.Media.DESCRIPTION, appName);
values.put(Images.Media.DATE_TAKEN, currentTimeMillis);
// Ensure that the album directory exists.
File album = new File(albumPath);
if (!album.isDirectory() && !album.mkdirs()) {
Log.e(TAG, "Failed to create album directory at " +
albumPath);
onTakePhotoFailed();
return;
}
/*
// Try to create the photo.
Imgproc.cvtColor(rgba, mBgr, Imgproc.COLOR_RGBA2BGR, 3);
if (!Imgcodecs.imwrite(photoPath, mBgr)) {
Log.e(TAG, "Failed to save photo to " + photoPath);
onTakePhotoFailed();
}
Log.d(TAG, "Photo saved successfully to " + photoPath);
*/
Mat grayMat = new Mat();
Mat cannyEdges = new Mat();
Mat lines = new Mat();
Imgproc.cvtColor(rgba, mBgr, Imgproc.COLOR_RGBA2BGR, 3);
//Converting the image to grayscale
Imgproc.cvtColor(mBgr, grayMat, Imgproc.COLOR_BGR2GRAY);
Imgproc.Canny(grayMat, cannyEdges, 10, 100);
Imgproc.HoughLinesP(cannyEdges, lines, 1, Math.PI / 180, 50, 20, 20);
Mat houghLines = new Mat();
houghLines.create(cannyEdges.rows(), cannyEdges.cols(), CvType.CV_8UC1);
//Drawing lines on the image
for (int i = 0; i < lines.cols(); i++) {
double[] points = lines.get(0, i);
double x1, y1, x2, y2;
x1 = points[0];
y1 = points[1];
x2 = points[2];
y2 = points[3];
Point pt1 = new Point(x1, y1);
Point pt2 = new Point(x2, y2);
//Drawing lines on an image
Imgproc.line(houghLines, pt1, pt2, new Scalar(255, 0, 0), 1);
}
//Converting Mat back to Bitmap
Utils.matToBitmap(houghLines, currentBitmap);
Log.d(TAG, "Photo saved successfully to " + photoPath);
// Try to insert the photo into the MediaStore.
Uri uri;
try {
uri = getContentResolver().insert(
Images.Media.EXTERNAL_CONTENT_URI, values);
} catch (final Exception e) {
Log.e(TAG, "Failed to insert photo into MediaStore");
e.printStackTrace();
// Since the insertion failed, delete the photo.
File photo = new File(photoPath);
if (!photo.delete()) {
Log.e(TAG, "Failed to delete non-inserted photo");
}
onTakePhotoFailed();
return;
}
// Open the photo in LabActivity.
final Intent intent = new Intent(this, LabActivity.class);
intent.putExtra(LabActivity.EXTRA_PHOTO_URI, uri);
intent.putExtra(LabActivity.EXTRA_PHOTO_DATA_PATH,
photoPath);
runOnUiThread(new Runnable() {
#Override
public void run() {
startActivity(intent);
}
});
}
The error occurs after i click the capture option.
12-07 00:15:45.420 9205-9933/? E/AndroidRuntime﹕ FATAL EXCEPTION: Thread-8672
Process: com.example.alexies.cameratesting, PID: 9205
java.lang.IllegalArgumentException: bmp == null
at org.opencv.android.Utils.matToBitmap(Utils.java:122)
at org.opencv.android.Utils.matToBitmap(Utils.java:132)
at com.example.alexies.cameratesting.MainActivity.takePhoto(MainActivity.java:380)
currentBitmap is null in your code.
Either you didn't copy the part where it's assigned a bitmap value or it's never assigned. If there's some part of your code missing please add it in your question, if not, your problem is that you never get the bitmap.
EDIT
You never initiate currentBitmap. The docs state that the provided bitmap must be the same size as the Mat object (your houghLines) and the type of your bitmap should be ARGB_8888 or RGB_565.

Tapping android screen solves media skipping, how to fix?

I wrote an Android app that plays multi-track audio files and it works completely in the simulator. On the device, it plays for a few seconds and then starts skipping and popping every few seconds. If I continuously tap the screen in the dead space of the app, the skipping doesn't occur and then recurs about 5 seconds after screen tapping ceases. I presume that this has something to do with thread priority, but I log the thread priority in the play loop and it never changes.
I'm hoping that somebody can tell me either:
a hack where I can simulate a screen tap every second so that I can run a beta test without the app skipping
explain a way to debug activity/thread/etc priority when it seems that my thread priority isn't changing when it seems like it is.
Here is how the player code is executed:
private class DecodeOperation extends AsyncTask<Void, Void, Void> {
#Override
protected Void doInBackground(Void... values) {
AudioTrackPlayer.this.decodeLoop();
return null;
}
#Override
protected void onPreExecute() {
}
#Override
protected void onProgressUpdate(Void... values) {
}
}
Here is the relevant player code:
private void decodeLoop()
{
ByteBuffer[] codecInputBuffers;
ByteBuffer[] codecOutputBuffers;
// extractor gets information about the stream
extractor = new MediaExtractor();
try {
extractor.setDataSource(this.mUrlString);
} catch (Exception e) {
mDelegateHandler.onRadioPlayerError(AudioTrackPlayer.this);
return;
}
MediaFormat format = extractor.getTrackFormat(0);
String mime = format.getString(MediaFormat.KEY_MIME);
// the actual decoder
codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
codec.start();
codecInputBuffers = codec.getInputBuffers();
codecOutputBuffers = codec.getOutputBuffers();
// get the sample rate to configure AudioTrack
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
Log.i(LOG_TAG,"mime "+mime);
Log.i(LOG_TAG,"sampleRate "+sampleRate);
// create our AudioTrack instance
audioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
sampleRate,
AudioFormat.CHANNEL_OUT_5POINT1,
AudioFormat.ENCODING_PCM_16BIT,
AudioTrack.getMinBufferSize (
sampleRate,
AudioFormat.CHANNEL_OUT_5POINT1,
AudioFormat.ENCODING_PCM_16BIT
),
AudioTrack.MODE_STREAM
);
// start playing, we will feed you later
audioTrack.play();
extractor.selectTrack(0);
// start decoding
final long kTimeOutUs = 10000;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
int noOutputCounter = 0;
int noOutputCounterLimit = 50;
while (!sawOutputEOS && noOutputCounter < noOutputCounterLimit && !doStop) {
//Log.i(LOG_TAG, "loop ");
noOutputCounter++;
if (!sawInputEOS) {
inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
bufIndexCheck++;
// Log.d(LOG_TAG, " bufIndexCheck " + bufIndexCheck);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
int sampleSize =
extractor.readSampleData(dstBuf, 0 /* offset */);
//Log.d(LOG_TAG, "SampleLength = " + String.valueOf(sampleSize));
long presentationTimeUs = 0;
if (sampleSize < 0) {
Log.d(LOG_TAG, "saw input EOS.");
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
}
// can throw illegal state exception (???)
codec.queueInputBuffer(
inputBufIndex,
0 /* offset */,
sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
extractor.advance();
}
}
else
{
Log.e(LOG_TAG, "inputBufIndex " +inputBufIndex);
}
}
int res = codec.dequeueOutputBuffer(info, kTimeOutUs);
if (res >= 0) {
//Log.d(LOG_TAG, "got frame, size " + info.size + "/" + info.presentationTimeUs);
if (info.size > 0) {
noOutputCounter = 0;
}
int outputBufIndex = res;
ByteBuffer buf = codecOutputBuffers[outputBufIndex];
final byte[] chunk = new byte[info.size];
buf.get(chunk);
buf.clear();
audioTrack.write(chunk,0,chunk.length);
if(this.mState != State.Playing)
{
mDelegateHandler.onRadioPlayerPlaybackStarted(AudioTrackPlayer.this);
}
this.mState = State.Playing;
}
codec.releaseOutputBuffer(outputBufIndex, false /* render */);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(LOG_TAG, "saw output EOS.");
sawOutputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
Log.d(LOG_TAG, "output buffers have changed.");
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat oformat = codec.getOutputFormat();
Log.d(LOG_TAG, "output format has changed to " + oformat);
} else {
Log.d(LOG_TAG, "dequeueOutputBuffer returned " + res);
}
}
Log.d(LOG_TAG, "stopping...");
relaxResources(true);
this.mState = State.Stopped;
doStop = true;
// attempt reconnect
if(sawOutputEOS)
{
try {
AudioTrackPlayer.this.play();
return;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
if(noOutputCounter >= noOutputCounterLimit)
{
mDelegateHandler.onRadioPlayerError(AudioTrackPlayer.this);
}
else
{
mDelegateHandler.onRadioPlayerStopped(AudioTrackPlayer.this);
}
}
Have you monitored the CPU frequency while your application is running? The CPU governor is probably scaling the CPU up on touch and scaling back down on a timer. Increasing the priority on your background thread to THREAD_PRIORITY_DEFAULT will probably fix the issue, the default priority for AsyncTask is quite low and not appropriate for Audio.
You could also increase the size of the AudioTrack's buffer to some multiple of the value returned by getMinBufferSize, that method only returns the minimum possible buffer for the Class to operate, it does not guarantee smooth playback.

Draw multiple shapes using loop - Android Canvas

I am very new to android development and have been trying to draw a square comprised of multiple smaller rectangles of different colours... Like a Mosaic essentially.
Basically at the moment I am reading values from a file which assigns the colour to the smaller Rects. I am using a pair of nested for loops to try to draw the small Rects sequentially, line by line. However when the program finishes there is only one small Rect drawn which is the last one to be drawn and its colour corresponds to the first value read from the file.
Here is some of my code to show you what I mean:
public SnapshotDraw(Context context) {
super(context);
for(int a = 0; a < 63; a++){
for(int b = 0; b < 63; b++){
fileName = PREFIX + "2" + EXTENSION;
try {
bf = new BufferedReader(new FileReader(fileName));
tokens = new StringTokenizer(bf.readLine(), " \n");
weight = Byte.parseByte(tokens.nextToken());
x_scalar = b*MAG;
y_scalar = a*MAG;
mDrawable = new ShapeDrawable(new RectShape());
mDrawable.getPaint().setColor(colour.getColour(weight));
mDrawable.setBounds((X_OFFSET + x_scalar), (Y_OFFSET + y_scalar), ((MAG + X_OFFSET) + x_scalar), ((MAG + Y_OFFSET) + y_scalar));
} catch (FileNotFoundException ex) {
Logger.getLogger(NetworkUtilities.class.getName()).log(Level.SEVERE, null, ex);
} catch (IOException ex) {
Logger.getLogger(NetworkUtilities.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
protected void onDraw(Canvas canvas) {
mDrawable.draw(canvas);
}
This except is from a class which extends View and is called inside an onCreate() method in an Activity.
I would appreciate any guidance in this and thanks in advance!!
Cheers.
You are constructing the BufferedReader inside the loops, so bf.readLine() will always return the same line. Try moving bf and tokens (be aware that the use of StringTokenizer is discouraged) out of the loops.
Ok I got it sorted! Here is what I did to solve it:
public SnapshotDraw(Context context) {
super(context);
setFocusable(true);
mBitmap = Bitmap.createBitmap(475, 720, Bitmap.Config.ALPHA_8);
}
#Override
protected void onDraw(Canvas canvas) {
canvas.drawColor(Color.BLACK);
Paint p = new Paint();
float y = 10;
try {
fileName = PREFIX + "2" + EXTENSION;
bf = new BufferedReader(new FileReader(fileName));
for(int a = 0; a < 63; a++){
tokens = bf.readLine().split(" \n");
for(int b = 0; b < 63; b++){
weight = Byte.parseByte(tokens[b]);
x_scalar = b*MAG;
y_scalar = a*MAG;
p.setColor(new Colour().getColour(weight));
canvas.drawRect((X_OFFSET + x_scalar), (Y_OFFSET + y_scalar), ((MAG + X_OFFSET) + x_scalar), ((MAG + Y_OFFSET) + y_scalar), p);
}
}
} catch (FileNotFoundException ex) {
Logger.getLogger(NetworkUtilities.class.getName()).log(Level.SEVERE, null, ex);
} catch (IOException ex) {
Logger.getLogger(NetworkUtilities.class.getName()).log(Level.SEVERE, null, ex);
}
canvas.drawBitmap(mBitmap, 10, y, p);
}
Much the same as before but changed the way I draw to the Bitmap. It looks beautiful btw!!

Categories