CountdownTimer in Java: Every x seconds do something - java

I have created a Counter from the class CountdownTimer.java and I'd like to adjust this Timer so that it stores a co-ordinate, say every 2 seconds. This should be general at a later point. For now I'm happy if the result is storing a coordinate each second.
The problem I am having is that the Timer is too fast. After pressing Start at the Timer he prints the coordinates within 1 second!
I have printed out the result in a file with a data stamp, which is why I know that it is too fast.
So here is my code:
package com.bosch.uadclient;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.osmdroid.util.GeoPoint;
import android.annotation.SuppressLint;
import android.app.AlarmManager;
import android.app.Fragment;
import android.app.PendingIntent;
import android.content.Intent;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.os.Environment;
import android.os.SystemClock;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.example.uadclient.R;
public class PartyFragment extends Fragment {
private MyCountdownTimer countDownTimer;
private long pointsCollected;
private boolean timerHasStarted = false;
private TextView text;
private TextView EdittextTime;
private TextView EdittextIntervall;
private TextView timeElapsedView;
private ArrayList<String> arrayList = new ArrayList<String>();
private long mPauseTime;
private long startTime;
private long interval;
private int point = 0;
private Button buttonTimer;
// LocationProviderService locService;
String s1 = "";
String s2 = "";
String lon = "";
String lat = "";
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
LinearLayout rr = (LinearLayout) inflater.inflate(
R.layout.capturefragment, container, false);
EdittextTime = (TextView) rr.findViewById(R.id.editText1);
EdittextIntervall = (TextView) rr.findViewById(R.id.EditText01);
text = (TextView) rr.findViewById(R.id.timer);
timeElapsedView = (TextView) rr.findViewById(R.id.pointsCollected);
buttonTimer = (Button) rr.findViewById(R.id.button);
buttonTimer.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Fragment fragment = new FindFragment();
android.app.FragmentTransaction ft = getFragmentManager()
.beginTransaction();
switch (v.getId()) {
case R.id.button: {
String text0 = EdittextTime.getText().toString();
point = Integer.valueOf(text0);
long capture = point;
// Upload
String text1 = EdittextIntervall.getText().toString();
int sendinterval = Integer.valueOf(text1);
int sendintervall = sendinterval;
// Timer is self iniciated
startTime = 20000; // value in ms
interval = 1000;
countDownTimer = new MyCountdownTimer(startTime, interval);
text.setText(text.getText() + String.valueOf(startTime));
// ReadData and Parse File of coordinates
readfromFile();
String str1;
str1 = s1.toString();
readfromFile1();
String str2;
str2 = s2.toString();
GeoPoint geopoint = new GeoPoint(Double.parseDouble(str1),
Double.parseDouble(str2));
int AmountPoints = (int) ((startTime / 1000) / point);
if ((sendintervall > 0) && (capture > 0)) {
if (!timerHasStarted) {
countDownTimer.start();
timerHasStarted = true;
buttonTimer.setText("Tracing Started");
while (AmountPoints > 0) {
countDownTimer.onTick(1000, geopoint);
AmountPoints--;
}
} else {
countDownTimer.cancel();
timerHasStarted = false;
buttonTimer.setText("RESET");
}
} else {
System.out.println("Please type in all numbers");
}
}
buttonTimer.setText("Tracing Stopped");
ft.commit();
}
}
});
return rr;
}
// CountDownTimer class
public class MyCountdownTimer extends CountDownTimer {
public MyCountdownTimer(long startTime, long interval) {
super(startTime, interval);
}
public void onTick(long millisUntilFinished, GeoPoint geopoint) {
pointsCollected = (int) ((startTime / 1000) / point);
try {
storeGPS(geopoint);
} catch (IOException e) {
System.out.println("Point can not be written");
e.printStackTrace();
}
}
/**
* Pause the countdown.
*/
public long pause() {
long mStopTimeInFuture = 0;
mPauseTime = mStopTimeInFuture - SystemClock.elapsedRealtime();
boolean mPaused = true;
return mPauseTime;
}
#Override
public void onFinish() {
text.setText("Points are sent!");
timeElapsedView.setText("Points collected: "
+ String.valueOf(pointsCollected));
}
#SuppressLint("SimpleDateFormat")
public void storeGPS(GeoPoint gPt) throws IOException {
String stringbb = gPt.toString();
// SD card Access needed as a lot of points are saved
File mediaDir = new File(Environment.getExternalStorageDirectory()
.getPath());
if (!mediaDir.exists()) {
mediaDir.mkdir();
}
SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss ");
String uhrzeit = sdf.format(new Date());
File file = new File(mediaDir, "PointData.txt");
// file.createNewFile();
FileOutputStream fos = new FileOutputStream(file, true);
fos.write(uhrzeit.getBytes());
fos.write(stringbb.getBytes());
fos.write(System.getProperty("line.separator").getBytes());
fos.close();
System.out.println("Your file has been written");
}
#Override
public void onTick(long millisUntilFinished) {
text.setText("Time in seconds remain:" + millisUntilFinished / 1000);
pointsCollected = (int) (startTime / 1000 / point);
timeElapsedView.setText("Points collected: "
+ String.valueOf(pointsCollected));
}
}
}
Here is the output of the written file:
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9891183,52113450,0
19:26:11 9893183,52113450,0
19:26:11 9892283,52113450,0
Best Wishes, Marita

try this code,
In this code change the schedule method argument as per your need
schedule(TimerTask task,long delay,long period)
import java.util.*;
public class TimerDemo {
public static void main(String[] args) {
// creating timer task, timer
TimerTask tasknew = new TimerSchedulePeriod();
Timer timer = new Timer();
// scheduling the task at interval
timer.schedule(tasknew,100, 100);
}
// this method performs the task
public void run() {
System.out.println("timer working");
}
}

I have found a solution to my problem. Though I am not sure whether this is thread-safe?
try {
storeGPS(geopoint);
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (IOException e) {
System.out.println("Point can not be written");
e.printStackTrace();
}

Related

Media Player Notification In Android

I have created an android app which has the activity of a music player which I have created but I want that when there is music play then there is a notification show of a media player which has pause / stop and an image show.
Like This:-
But I do not know how to do it! Please Someone Help Me😢
My Codes:-
player_ui.java
package com.musicwala.djaman;
import android.app.Activity;
import android.os.Bundle;
import android.widget.TextView;
import android.widget.SeekBar;
import android.widget.Button;
import android.media.MediaPlayer;
import android.net.Uri;
import java.io.IOException;
import android.widget.SearchView.OnCloseListener;
import java.util.Timer;
import java.util.TimerTask;
import android.media.PlaybackParams;
import android.graphics.PorterDuff;
import android.view.View;
import android.support.v4.app.NotificationCompat;
import android.content.ContentResolver;
import android.app.NotificationManager;
import android.content.Context;
//import wseemann.media.FFmpegMediaMetadataRetriever;
import android.graphics.BitmapFactory;
import android.graphics.Bitmap;
import android.media.MediaMetadataRetriever;
import android.media.Image;
import android.widget.ImageView;
public class play_ui extends Activity
{
static MediaPlayer mp;
TextView songtext;
String path;
SeekBar sb;
Button pause;
Button previous;
Button next;
Thread updateSeekBar;
#Override
protected void onCreate(Bundle savedInstanceState)
{
// TODO: Implement this method
super.onCreate(savedInstanceState);
setContentView(R.layout.music_player_ui);
songtext = (TextView) findViewById(R.id.txtSongLabel);
songtext.setSelected(true);
String name = getIntent().getStringExtra("file");
path = (String) getIntent().getStringExtra("path");
songtext.setText(name);
pause = (Button) findViewById(R.id.pause);
previous = (Button)findViewById(R.id.previous);
next = (Button)findViewById(R.id.next);
//final SeekBar seekbar = (SeekBar) findViewById(R.id.seekBar);
sb=(SeekBar)findViewById(R.id.seekBar);
MediaMetadataRetriever mmr = new MediaMetadataRetriever();
byte[] rawArt;
Bitmap art = null;
BitmapFactory.Options bfo=new BitmapFactory.Options();
mmr.setDataSource(path);
rawArt = mmr.getEmbeddedPicture();
final ImageView image = (ImageView) findViewById(R.id.album_art);
// if rawArt is null then no cover art is embedded in the file or is not
// recognized as such.
if (null != rawArt)
art = BitmapFactory.decodeByteArray(rawArt, 0, rawArt.length, bfo);
image.setImageBitmap(art);
// Code that uses the cover art retrieved below.
/* updateSeekBar=new Thread(){
#Override
public void run(){
int totalDuration = mp.getDuration();
int currentPosition = 0;
while(currentPosition < totalDuration){
try{
sleep(500);
currentPosition=mp.getCurrentPosition();
sb.setProgress(currentPosition);
}
catch (InterruptedException e){
}
}
}
};*/
updateSeekBar = new Thread() {
#Override
public void run() {
int runtime = mp.getDuration();
int currentPosition = 0;
int adv = 0;
while ((adv = ((adv = runtime - currentPosition) < 500)?adv:500) > 2) {
try {
currentPosition = mp.getCurrentPosition();
if (sb != null) {
sb.setProgress(currentPosition);
}
sleep(adv);
} catch (InterruptedException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
sb.setProgress(runtime);
break;
}
}
}
};
if(mp != null){
mp.stop();
mp.release();
}
//int pos = 0;
mp = new MediaPlayer();
try
{
mp.setDataSource(path);
mp.prepare();
//sb=(SeekBar)findViewById(R.id.seekBar);
//sb.setMax(mp.getDuration());
}
catch (IOException e)
{}
catch (IllegalArgumentException e)
{}
catch (SecurityException e)
{}
catch (IllegalStateException e)
{}
mp.start();
//Find the seek bar by Id (which you have to create in layout)
// Set seekBar max with length of audio
// You need a Timer variable to set progress with position of audio
sb.setMax(mp.getDuration());
updateSeekBar.start();
sb.getProgressDrawable().setColorFilter(getResources().getColor(R.color.colorPrimary), PorterDuff.Mode.MULTIPLY);
sb.getThumb().setColorFilter(getResources().getColor(R.color.colorPrimary), PorterDuff.Mode.SRC_IN);
sb.setOnSeekBarChangeListener(new
SeekBar.OnSeekBarChangeListener() {
#Override
public void onProgressChanged(SeekBar seekBar, int i,
boolean b) {
}
#Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
#Override
public void onStopTrackingTouch(SeekBar seekBar) {
mp.seekTo(seekBar.getProgress());
}
});
pause.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
sb.setMax(mp.getDuration());
if(mp.isPlaying()){
pause.setBackgroundResource(R.drawable.ic_play_arrow_black_24dp);
mp.pause();
}
else {
pause.setBackgroundResource(R.drawable.pause);
mp.start();
}
}
});
}
}

How to change screen orientation from landscape to portrait?

I have applied the following post answers but unable to solve the problem.
OpenCV camera orientation issue
I am getting the following exception.
Exception locking surface
java.lang.IllegalArgumentException
at android.view.Surface.nativeLockCanvas(Native Method)
at android.view.Surface.lockCanvas(Surface.java:264)
at android.view.SurfaceView$4.internalLockCanvas(SurfaceView.java:825)
at android.view.SurfaceView$4.lockCanvas(SurfaceView.java:793)
at org.opencv.android.CameraBridgeViewBase.deliverAndDrawFrame(CameraBridgeViewBase.java:403)
at org.opencv.android.JavaCameraView$CameraWorker.run(JavaCameraView.java:365)
at java.lang.Thread.run(Thread.java:818)
Here is opencv library function which throw the exception:
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
Canvas canvas = null ;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
canvas = getHolder().lockCanvas();
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
getHolder().unlockCanvasAndPost(canvas);
}
}
}
Here is my main activity where i use opencv library to call this method:
package org.opencv.samples.facedetect;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Camera;
import android.media.AudioManager;
import android.net.ConnectivityManager;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.provider.Settings;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import org.opencv.samples.facedetect.DetectionBasedTracker;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class FdActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 , SharedPreferences.OnSharedPreferenceChangeListener {
private static final String TAG = "OCVSample::Activity";
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
//private MenuItem mItemExit;
private MenuItem mItemSettings ;
private MenuItem showCounterValues ;
private TextView faceCounterTv ;
private Button resetButton ;
private Button savebtn ;
private Button quitButton ;
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mJavaDetector;
private DetectionBasedTracker mNativeDetector;
private int mDetectorType = JAVA_DETECTOR;
// private String[] mDetectorName;
private float mRelativeFaceSize = 0.2f;
private int mAbsoluteFaceSize = 0;
private float scaleFactor ;
private int minNeighbour ;
private int delayTime ;
private boolean isFaces_detect ;
private boolean isFaces_detect_pre ;
private boolean count_Face_Logic ;
private float countFace = 0.0f ;
private long startTime ;
private AudioManager mAudioManager ;
private static final int MY_PERMISSIONS_REQUEST_ACCOUNTS = 1;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("detectionBasedTracker");
try {
// load cascade file from application resources
InputStream is = getResources().openRawResource(R.raw.haarcascade_frontalface_default);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "haarcascade_frontalface_default.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mJavaDetector.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
mOpenCvCameraView.enableView();
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
public FdActivity() {
isFaces_detect = false ;
isFaces_detect_pre = false ;
count_Face_Logic = true ;
startTime = System.currentTimeMillis();
mAbsoluteFaceSize = 200 ;
scaleFactor = 1.2f ;
minNeighbour = 1 ;
delayTime = 1 ;
Log.i(TAG, "Instantiated new " + this.getClass());
}
/**
* Called when the activity is first created.
*/
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
//getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.face_detect_surface_view);
setSupportActionBar((Toolbar) findViewById(R.id.toolbar));
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view);
faceCounterTv = (TextView) findViewById(R.id.faceCountertv);
resetButton = (Button) findViewById(R.id.resetbtn);
savebtn = (Button) findViewById(R.id.savebtn);
quitButton = (Button) findViewById(R.id.quitbtn);
mAudioManager = (AudioManager) getSystemService(AUDIO_SERVICE);
resetButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
countFace = 0 ;
Toast.makeText(getApplicationContext() , "Reset the Face Counter" , Toast.LENGTH_LONG).show();
}
});
savebtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
saveFaceCounter();
Toast.makeText(getApplicationContext() , "Counter Value Saved" , Toast.LENGTH_SHORT).show();
}
});
quitButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
System.exit(0);
}
});
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
// mOpenCvCameraView.setAlpha(0);
mOpenCvCameraView.setCameraIndex(1);
mOpenCvCameraView.setCvCameraViewListener(this);
//if (checkAndRequestPermissions()){
// Toast.makeText(getApplicationContext() , "OnCreate" , Toast.LENGTH_LONG).show();
//setSharedPreferences();
//}
// check current state first
// boolean state = isAirplaneMode();
// // toggle the state
// if (state)
// toggleAirplaneMode(0, state);
// else
// toggleAirplaneMode(1, state);
}
#Override
public void onPause() {
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume() {
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
PreferenceManager.getDefaultSharedPreferences(this).unregisterOnSharedPreferenceChangeListener(this);
}
public void onCameraViewStarted(int width, int height) {
mGray = new Mat();
mRgba = new Mat();
}
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
// Mat mRgbaT = mRgba.t();
// Core.flip(mRgba.t(), mRgbaT, 1);
// Imgproc.resize(mRgbaT, mRgbaT, mRgba.size());
mGray = inputFrame.gray();
//Core.transpose(mGray, mGray);
//Core.flip(mGray, mGray, 0);
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, scaleFactor, minNeighbour, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
} else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
} else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++) {
Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
}
countDetectedFace(faces);
runOnUiThread(new Runnable() {
#Override
public void run() {
faceCounterTv.setText(String.valueOf(countFace));
}
});
return mRgba;
}
public void countDetectedFace(MatOfRect faces){
// do{
// This block is to make sure the it only count face when it appears. e.g. : no detected face --> face --> no detected face (count as 1)
if (faces.empty()){
isFaces_detect = isFaces_detect_pre = false ;
}
else{
isFaces_detect = true ;
}
// Only count when previous frame = 0 and current frame = 1. Eliminate counting when successive frame have face detected
if ((isFaces_detect_pre == false) && (isFaces_detect == true) && (count_Face_Logic == true)){
countFace += 0.25 ; // four times it detect face equal to 1
startTime = System.currentTimeMillis(); // store new time value so that it do not count every miliseconds
isFaces_detect_pre = true ;
Log.d(TAG , String.valueOf(countFace));
}
if ((System.currentTimeMillis() - startTime) < delayTime){ // to make sure it doesnt count every frame, buffer of 1 seconds
count_Face_Logic = false ;
}
else{
count_Face_Logic = true ;
}
// }while(!isAppExit);
}
}
How can I get rid of this exception?

Implementing next button in audio player android

I have been trying to implement next song button in my audio player app. I copied some code from a tutorial but its not working.The button for next song is btnNext and the method is cde(), its the last method in the code. The button gets clicked but next song is not played, current song keeps playing.How do I fix this ?
package com.example.dell_1.myapp3;
import android.app.Activity;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.media.MediaMetadataRetriever;
import android.media.MediaPlayer;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static android.R.attr.path;
public class PlayListActivity extends Activity {
private String[] mAudioPath;
private MediaPlayer mMediaPlayer;
private String[] mMusicList;
int currentPosition = 0;
private List<String> songs = new ArrayList<>();
MediaMetadataRetriever metaRetriver;
byte[] art;
ImageView album_art;
TextView album;
TextView artist;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_play_list);
mMediaPlayer = new MediaPlayer();
ListView mListView = (ListView) findViewById(R.id.list);
mMusicList = getAudioList();
ArrayAdapter<String> mAdapter = new ArrayAdapter<>(this,
android.R.layout.simple_list_item_1, mMusicList);
mListView.setAdapter(mAdapter);
mListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
#Override
public void onItemClick(AdapterView<?> arg0, View view, int arg2,
long arg3) {
try {
playSong(mAudioPath[arg2]);
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
private String[] getAudioList() {
final Cursor mCursor = getContentResolver().query(
MediaStore.Audio.Media.EXTERNAL_CONTENT_URI,
new String[]{MediaStore.Audio.Media.DISPLAY_NAME, MediaStore.Audio.Media.DATA}, null, null,
"LOWER(" + MediaStore.Audio.Media.TITLE + ") ASC");
int count = mCursor.getCount();
String[] songs = new String[count];
mAudioPath = new String[count];
int i = 0;
if (mCursor.moveToFirst()) {
do {
songs[i] = mCursor.getString(mCursor.getColumnIndexOrThrow(MediaStore.Audio.Media.DISPLAY_NAME));
mAudioPath[i] = mCursor.getString(mCursor.getColumnIndexOrThrow(MediaStore.Audio.Media.DATA));
i++;
} while (mCursor.moveToNext());
}
mCursor.close();
return songs;
}
private void playSong(String path) throws IllegalArgumentException,
IllegalStateException, IOException {
setContentView(R.layout.activity_android_building_music_player);
Log.d("ringtone", "playSong :: " + path);
mMediaPlayer.reset();
mMediaPlayer.setDataSource(path);
//mMediaPlayer.setLooping(true);
mMediaPlayer.prepare();
mMediaPlayer.start();
acv(path);
abc();
cde();
}
public void acv(String path) {
getInit();
metaRetriver = new MediaMetadataRetriever();
metaRetriver.setDataSource(path);
try {
art = metaRetriver.getEmbeddedPicture();
Bitmap songImage = BitmapFactory.decodeByteArray(art, 0, art.length);
album_art.setImageBitmap(songImage);
album.setText(metaRetriver
.extractMetadata(MediaMetadataRetriever.METADATA_KEY_ALBUM));
artist.setText(metaRetriver
.extractMetadata(MediaMetadataRetriever.METADATA_KEY_ARTIST));
} catch (Exception e) {
album_art.setBackgroundColor(Color.GRAY);
album.setText("Unknown Album");
artist.setText("Unknown Artist");
}
}
public void getInit() {
album_art = (ImageView) findViewById(R.id.coverart1);
album = (TextView) findViewById(R.id.Album);
artist = (TextView) findViewById(R.id.artist_name);
}
public void abc() {
ImageButton btnPlay1 = (ImageButton) findViewById(R.id.btnPlay1);
btnPlay1.setBackgroundColor(Color.TRANSPARENT);
btnPlay1.setOnClickListener(
new View.OnClickListener() {
public void onClick(View v) {
if (mMediaPlayer.isPlaying()) {
mMediaPlayer.pause();
} else {
mMediaPlayer.start();
}
}
});
}
public void cde() {
ImageButton btnNext = (ImageButton) findViewById(R.id.btnNext); //this is the button for playing next song.
btnNext.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg0) {
try {
currentPosition=currentPosition+1;
playSong(path + songs.get(currentPosition));
} catch (IOException ex) {
ex.printStackTrace();
}
}
});
}
}
Add this in onCreate method:
Bundle bundle = getIntent().getExtras();
position = bundle.getInt("position");
And change next button listener to
btnNext.setOnClickListener(new View.OnClickListener() //this is the button
#Override
public void onClick(View arg0) {
if (mMediaPlayer!= null && mMediaPlayer.isPlaying()) {
mMediaPlayer.stop();
}
uri = Uri.parse(mAudioPath[position + 1]);
mMediaPlayer.setDataSource(getApplicationContext(), uri);
mMediaPlayer.prepare();
mMediaPlayer.start();
}
});
int currentPosition = 0;
if (++currentPosition >= songs.size()) {
currentPosition = 0;
} else
try {
playSong(path + songs.get(currentPosition));
} catch (IOException ex) {
ex.printStackTrace();
}
}
The above code is your code from the onClick method.
As you can see, you are initializing the currentPosition inside onClick.
So to show you what this implies:
onClick -> position = 0 -> position++ (position = 1) -> playSong(songUri)
When you want:
onClick -> position++ -> playSong(songUri)
So, before setting the onCLickListener, you add:
currentPosition = 0;
currentPosition is declared in the class now, so make sure you add it. It should look like this:
int currentPosition;
..other code
public void cde(){
..code here
currentPosition = 0;
... set onClickListener
}
Remove int currentPosition = 0; from the onClick method.
I assume there is a position 0 as well. Here is the refactored code that would handle that:
try {
playSong(path + songs.get(currentPosition));
if (++currentPosition >= songs.size()) {
currentPosition = 0;
}
} catch (IOException ex) {
ex.printStackTrace();
}
The above code is addressing another issue you would be likely to meet. Song 0 would never play on the first round.
Another thing you want to check for (not giving you the code for it as it is easy) is to not play or allow next song if there are no songs. If songs.size == 0 it would never play but set the position to 0 over and over.

two actions for one button android (android)

There is a problem with the code for android applications
I tried to assign multiple actions to a single button.Here's the code:
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
public class MainActivity extends Activity implements OnClickListener {
AudioRecord audioRecord;
private Thread recordingThread = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
start_button = (Button) findViewById(R.id.button1);
stop_button = (Button) findViewById(R.id.button2);
start_button.setOnClickListener(this);
stop_button.setOnClickListener(this);
createAudioRecorder();
}
private void createAudioRecorder() {
int sampleRate = 16000;
int channelConfig = AudioFormat.CHANNEL_IN_MONO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int minInternalBufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
int internalBufferSize = minInternalBufferSize * 4;
Log.d(TAG, "minInternalBufferSize = " + minInternalBufferSize
+ ", internalBufferSize = " + internalBufferSize
+ ", myBufferSize = " + myBufferSize);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, internalBufferSize);
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.button1:
reading = true;
startRecorder();
break;
case R.id.button2:
reading = false;
stopRecorder();
break;
}
}
private void startRecorder() {
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
AudioData();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
protected void AudioData() {
audioRecord.startRecording();
byte data[] = new byte[myBufferSize];
byte[] myBuffer = new byte[myBufferSize];
int readCount = 0;
int totalCount = 0;
while (reading) {
readCount = audioRecord.read(myBuffer, 0, myBufferSize);
data = myBuffer;
totalCount += readCount;
}
}
private void stopRecorder() {
if (null != audioRecord) {
reading = false;
audioRecord.stop();
audioRecord.release();
audioRecord = null;
recordingThread = null;
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
When I run this code the first time it works, but not following..
Logs show that the error here somewhere (NullPointerException):
audioRecord.startRecording();
Help solve the problem!
When button2 is clicked, stopRecorder() is called, which assigns null to audioRecord. Therefore, the next time button1 is clicked, the new thread that is spawned will throw a NullPointerException when it calls AudioData().
Perhaps you can eliminate createAudioRecorder() and move its logic to the beginning of AudioData().
Also, I would rename AudioData(), which looks like a class name, to recordAudioData(). That would conform better to standard Java naming conventions.

My app freezes, how to implement Threads in it?

Could you help me figure out how to implement Threads to this, so it won't freeze while it waits for the answer from the server?
I've tried for 5 hours or so, i simple can't find a way to use it in a thread, and then return it to set the text with tv.setText();
package zee.rhs.dk;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
import java.net.UnknownHostException;
import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
public class AndroidClientActivity extends Activity {
private String ip = "90.184.254.246";
private int port = 8081;
private String line;
private TextView tv;
private Button btn;
private Socket socket;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
tv = (TextView) findViewById(R.id.tv);
btn = (Button) findViewById(R.id.btn);
btn.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
BufferedReader in = null;
PrintWriter out = null;
try {
socket = new Socket(ip, port);
out = new PrintWriter(socket.getOutputStream(), true);
out.println("update");
in = new BufferedReader(new InputStreamReader(socket
.getInputStream()));
while ((line = in.readLine()) == null) {
}
tv.setText(line);
} catch (UnknownHostException e) {
Toast.makeText(AndroidClientActivity.this,
"Can't reach ip: " + ip, Toast.LENGTH_LONG).show();
e.printStackTrace();
} catch (IOException e) {
Toast.makeText(AndroidClientActivity.this,
"Accept failed at port: " + port, Toast.LENGTH_LONG)
.show();
e.printStackTrace();
} finally {
out.close();
}
}
});
}
}
AsyncTask is what you're looking for. From the help page:
private class DownloadFilesTask extends AsyncTask<URL, Integer, Long> {
protected Long doInBackground(URL... urls) {
int count = urls.length;
long totalSize = 0;
for (int i = 0; i < count; i++) {
totalSize += Downloader.downloadFile(urls[i]);
publishProgress((int) ((i / (float) count) * 100));
}
return totalSize;
}
protected void onProgressUpdate(Integer... progress) {
setProgressPercent(progress[0]);
}
protected void onPostExecute(Long result) {
showDialog("Downloaded " + result + " bytes");
}
}
Remember that doInBackground runs in a separate thread, and onPostExecute runs in the UI thread after doInBackground completes.
Put your code, that is responsible for request to server into a separate thread:
Thread thread = new Thread() {
#Override
public void run() {
try {
// put your socket operations here
} catch (InterruptedException e) {
// handle exception if you need
}
}
};
thread.start();

Categories