How to keep track of audio playback position? - java

I created a thread to play an mp3 file in Java by converting it to an array of bytes.
I'm wondering if I can keep track of the current play position as the mp3 is being played.
First, I set up my music stream like so:
try {
AudioInputStream in = AudioSystem.getAudioInputStream(file);
musicInputStream = AudioSystem.getAudioInputStream(MUSIC_FORMAT, in);
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, MUSIC_FORMAT);
musicDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
musicDataLine.open(MUSIC_FORMAT);
musicDataLine.start();
startMusicThread();
} catch(Exception e) {
e.printStackTrace();
}
Next, my music thread looks like this:
private class MusicThread extends Thread {
byte musicBuffer[] = new byte[BUFFER_SIZE];
public void run() {
try {
int musicCount = 0;
while(writeOutput){
if(writeMusic && (musicCount = musicInputStream.read(musicBuffer, 0, musicBuffer.length)) > 0){
musicDataLine.write(musicBuffer, 0, musicCount);
}
}
} catch (Exception e) {
System.out.println("AudioStream Exception - Music Thread"+e);
e.printStackTrace();
}
}
}
I thought of one possibility, to create another thread with a timer that slowly ticks down, second by second, to show the remaining amount of time for the mp3 song. But that doesn't seem like a good solution at all.

Your int musicCount (the return value from AudioInputStream.read(...)) tells you the number of bytes read, so with that you can do a small computation to figure out your place in the stream always. (DataLine has some methods to do some of the math for you but they can't always be used...see below.)
int musicCount = 0;
int totalBytes = 0;
while ( loop stuff ) {
// accumulate it
// and do whatever you need with it
totalBytes += musicCount;
musicDataLine.write(...);
}
To get the number of seconds elapsed, you can do the following things:
AudioFormat fmt = musicInputStream.getFormat();
long framesRead = totalBytes / fmt.getFrameSize();
long totalFrames = musicInputStream.getFrameLength();
double totalSeconds = (double) totalFrames / fmt.getSampleRate();
double elapsedSeconds =
((double) framesRead / (double) totalFrames) * totalSeconds;
So you'd just get the elapsed time each loop and put it wherever you need it to go. Note that the accuracy of this kind of depends on the size of your buffer. The smaller the buffer, the more accurate.
Also, Clip has some methods to query this for you (but you'd probably have to change what you're doing a lot).
These methods (get(Long)FramePosition/getMicrosecondPosition) are inherited from DataLine, so you can also call them on the SourceDataLine as well if you don't want to do the math yourself. However, you basically need to make a new line for every file you play, so it depends on how you're using the line. (Personally I'd rather just do the division myself since asking the line is kind of opaque.)
BTW:
musicDataLine.open(MUSIC_FORMAT);
You should open the line with your own buffer size specified, using the (AudioFormat, int) overload. SourceDataLine.write(...) only blocks when its internal buffer is full, so if it's a different size from your byte array, sometimes your loop is blocking, other times it's just spinning.
MCVE for good measure:
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import java.util.*;
import javax.sound.sampled.*;
public class SimplePlaybackProgress
extends WindowAdapter implements Runnable, ActionListener {
class AudioPlayer extends Thread {
volatile boolean shouldPlay = true;
final int bufferSize;
final AudioFormat fmt;
final AudioInputStream audioIn;
final SourceDataLine audioOut;
final long frameSize;
final long totalFrames;
final double sampleRate;
AudioPlayer(File file)
throws UnsupportedAudioFileException,
IOException,
LineUnavailableException {
audioIn = AudioSystem.getAudioInputStream(file);
fmt = audioIn.getFormat();
bufferSize = fmt.getFrameSize() * 8192;
frameSize = fmt.getFrameSize();
totalFrames = audioIn.getFrameLength();
sampleRate = fmt.getSampleRate();
try {
audioOut = AudioSystem.getSourceDataLine(audioIn.getFormat());
audioOut.open(fmt, bufferSize);
} catch (LineUnavailableException x) {
try {
audioIn.close();
} catch(IOException suppressed) {
// Java 7+
// x.addSuppressed(suppressed);
}
throw x;
}
}
#Override
public void run() {
final byte[] buffer = new byte[bufferSize];
long framePosition = 0;
try {
audioOut.start();
while (shouldPlay) {
int bytesRead = audioIn.read(buffer);
if (bytesRead < 0) {
break;
}
int bytesWritten = audioOut.write(buffer, 0, bytesRead);
if (bytesWritten != bytesRead) {
// shouldn't happen
throw new RuntimeException(String.format(
"read: %d, wrote: %d", bytesWritten, bytesRead));
}
framePosition += bytesRead / frameSize;
// or
// framePosition = audioOut.getLongFramePosition();
updateProgressBar(framePosition);
}
audioOut.drain();
audioOut.stop();
} catch (Throwable x) {
showErrorMessage(x);
} finally {
updateProgressBar(0);
try {
audioIn.close();
} catch (IOException x) {
showErrorMessage(x);
}
audioOut.close();
}
}
void updateProgressBar(
final long framePosition) {
SwingUtilities.invokeLater(new Runnable() {
#Override
public void run() {
double fractionalProgress =
(double) framePosition / (double) totalFrames;
int progressValue = (int) Math.round(
fractionalProgress * theProgressBar.getMaximum());
theProgressBar.setValue(progressValue);
int secondsElapsed = (int) Math.round(
(double) framePosition / sampleRate);
int minutes = secondsElapsed / 60;
int seconds = secondsElapsed % 60;
theProgressBar.setString(String.format(
"%d:%02d", minutes, seconds));
}
});
}
void stopPlaybackAndDrain() throws InterruptedException {
shouldPlay = false;
this.join();
}
}
/* * */
public static void main(String[] args) {
SwingUtilities.invokeLater(new SimplePlaybackProgress());
}
JFrame theFrame;
JButton theButton;
JProgressBar theProgressBar;
// this should only ever have 1 thing in it...
// multithreaded code with poor behavior just bugs me,
// even for improbable cases, so the queue makes it more robust
final Queue<AudioPlayer> thePlayerQueue = new ArrayDeque<AudioPlayer>();
#Override
public void run() {
theFrame = new JFrame("Playback Progress");
theFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
theButton = new JButton("Open");
theProgressBar = new JProgressBar(
SwingConstants.HORIZONTAL, 0, 1000);
theProgressBar.setStringPainted(true);
theProgressBar.setString("0:00");
Container contentPane = theFrame.getContentPane();
((JPanel) contentPane).setBorder(
BorderFactory.createEmptyBorder(8, 8, 8, 8));
contentPane.add(theButton, BorderLayout.WEST);
contentPane.add(theProgressBar, BorderLayout.CENTER);
theFrame.pack();
theFrame.setResizable(false);
theFrame.setLocationRelativeTo(null);
theFrame.setVisible(true);
theButton.addActionListener(this);
theFrame.addWindowListener(this);
}
#Override
public void actionPerformed(ActionEvent ae) {
JFileChooser dialog = new JFileChooser();
int option = dialog.showOpenDialog(theFrame);
if (option == JFileChooser.APPROVE_OPTION) {
File file = dialog.getSelectedFile();
try {
enqueueNewPlayer(new AudioPlayer(file));
} catch (UnsupportedAudioFileException x) { // ew, Java 6
showErrorMessage(x); //
} catch (IOException x) { //
showErrorMessage(x); //
} catch (LineUnavailableException x) { //
showErrorMessage(x); //
} //
}
}
#Override
public void windowClosing(WindowEvent we) {
stopEverything();
}
void enqueueNewPlayer(final AudioPlayer newPlayer) {
// stopPlaybackAndDrain calls join
// so we want to do it off the EDT
new Thread() {
#Override
public void run() {
synchronized (thePlayerQueue) {
stopEverything();
newPlayer.start();
thePlayerQueue.add(newPlayer);
}
}
}.start();
}
void stopEverything() {
synchronized (thePlayerQueue) {
while (!thePlayerQueue.isEmpty()) {
try {
thePlayerQueue.remove().stopPlaybackAndDrain();
} catch (InterruptedException x) {
// shouldn't happen
showErrorMessage(x);
}
}
}
}
void showErrorMessage(Throwable x) {
x.printStackTrace(System.out);
String errorMsg = String.format(
"%s:%n\"%s\"", x.getClass().getSimpleName(), x.getMessage());
JOptionPane.showMessageDialog(theFrame, errorMsg);
}
}
For Clip, you'd just have something like a Swing timer (or other side-thread) and query it however often:
new javax.swing.Timer(100, new ActionListener() {
#Override
public void actionPerformed(ActionEvent ae) {
long usPosition = theClip.getMicrosecondPosition();
// put it somewhere
}
}).start();
Related:
How to calculate the level/amplitude/db of audio signal in java?
How to make waveform rendering more interesting?

Related

Increase rate at which samples are taken from AudioRecord

Hey so I am trying to record data from the audio device and graph it. It is recording from a piezo that outputs in the 16khz to 19khz range. Below I will have the relevant code. My current issue is that the data isnt be calculated or read at a fast enough rate. I am no sound engineer and a lot of the FFT work was just pulled from multiple resources.
here is a picture of what I am talking about. As you can see I am getting 1 datapoint from the first R wave in the qrs complex. At first I was getting one. Then I changed my min buffer size * 500 and that seemed to give me two datapoints. I would like to have several in the really short amount of time to get an accurate reading.
TLDR: what do i change to make is so I can get more frequency readings per second.
https://dl2.pushbulletusercontent.com/mugoZmNLGtbCta4Si5Pu4RUdJOgMqILK/Screenshot_20160705-094146.png
and my code...
the buffer is being added to a list that has already been declared. The FFT method I am using takes a short[]
public int audioSource = MediaRecorder.AudioSource.MIC;
public int channelConfig = AudioFormat.CHANNEL_IN_MONO;
public int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
public AudioRecord audioRecord = null;
public int blockSize = 256; // deal with this many samples at a time
public int sampleRate = 44100; // Sample rate in Hz
public void audioRecordLoop() throws Exception {
new Thread(new Runnable() {
#Override
public void run() {
Log.e(TAG, "start audioRecordLoop");
int bufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioEncoding);
audioRecord = new AudioRecord(audioSource, sampleRate, channelConfig, audioEncoding, bufferSize * 500);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
Log.e(TAG, "AudioRecord init failed");
return;
}
final short[] buffer = new short[blockSize];
audioRecord.startRecording();
int len = 0;
while (isRecording == true) {
len = audioRecord.read(buffer, 0, blockSize);
shortList.add(buffer);
if (len < 0) {
Log.e(TAG, "read error " + len);
return;
}
}
if (audioRecord != null)
audioRecord.release();
}
}).start();
}
to calculate
public void calcFrequency() {
new Thread(new Runnable() {
#Override
public void run() {
FrequencyScanner frequencyScanner = new FrequencyScanner();
while (isRecording) {
if (shortList != null && shortList.size() > 0) {
short[] shorts = shortList.get(0);
final double frequencys = frequencyScanner.extractFrequency(shorts, sampleRate);
frequencyList.add(frequencys);
}
}
}
}).start();
}
then to graph
public void graphFrequenecy() {
new Thread(new Runnable() {
#Override
public void run() {
while (isRecording) {
try {
if (frequencyList != null && frequencyList.size() > 0) {
final double frequency = frequencyList.get(0);
if (frequency > 13000) {
Log.d(TAG, "run: " + frequency);
runOnUiThread(new Runnable() {
#Override
public void run() {
series.appendData(new DataPoint(series.getHighestValueX() + 1, frequency), true, 3000);
}
});
}
frequencyList.remove(0);
}
} catch (Exception e) {
e.printStackTrace();
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}).start();
}

Need get correct AVI Player class to play all AVI video files inside android application

I try to get correct AVI video player to play AVI file since my current AVI Player class looks like not work really good anymore.
Some AVI files can play correctly, but the others can not.
People who know which AVI Video player class play all avi files exactly,
Please help me,
Thank you,
p/s :
I don't want intent to 3rd application to play AVI file.
Below codes is current I used to play AVI files:
AVI Player.java
package runnable;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import iterface.IVideoSink;
public class VideoPlayer implements Runnable {
private final int FPS = 24;
/**
* String section
*/
private boolean IS_ALIVE = true;
private long LAST_FRAME_TIME;
/**
* Data section
*/
private ArrayList<IVideoSink> mAlVideoSinks;
/**
* Others section
*/
private BufferedInputStream mBufferedInputStream;
public VideoPlayer(String filename) {
mAlVideoSinks = new ArrayList<IVideoSink>();
try {
mBufferedInputStream = new BufferedInputStream(new FileInputStream(filename));
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void addVideoSink(IVideoSink videoSink) {
synchronized (mAlVideoSinks) {
mAlVideoSinks.add(videoSink);
}
}
public void removeVideoSink(IVideoSink videoSink) {
synchronized (mAlVideoSinks) {
if (mAlVideoSinks.contains(videoSink))
mAlVideoSinks.remove(videoSink);
}
}
#Override
public void run() {
int count = 0;
while (IS_ALIVE) {
if (LAST_FRAME_TIME == 0) {
LAST_FRAME_TIME = System.currentTimeMillis();
}
try {
long currentTime = System.currentTimeMillis();
if (currentTime - LAST_FRAME_TIME < 1000 / FPS) {
Thread.sleep(1000 / FPS - (currentTime - LAST_FRAME_TIME));
}
LAST_FRAME_TIME = System.currentTimeMillis();
int b0 = mBufferedInputStream.read();
if (b0 == -1) break;
int b1 = mBufferedInputStream.read();
int b2 = mBufferedInputStream.read();
int b3 = mBufferedInputStream.read();
count = b0 + (b1 << 8) + (b2 << 16) + (b3 << 24);
byte[] buffer = new byte[count];
int readCount = mBufferedInputStream.read(buffer, 0, count);
for (IVideoSink videoSink : mAlVideoSinks) {
videoSink.onFrame(buffer, null);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
try {
mBufferedInputStream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
for (IVideoSink videoSink : mAlVideoSinks) {
videoSink.onVideoEnd();
}
}
public void stop() {
IS_ALIVE = false;
}
}
PCM Player.java
package runnable;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import java.util.ArrayList;
public class PCMPlayer implements Runnable {
/**
* String section
*/
private boolean IS_ALIVE = true;
/**
* Data section
*/
private ArrayList<byte[]> mAlBuffers = new ArrayList<byte[]>();
/**
* Other section
*/
private AudioTrack mAudioTrack;
public PCMPlayer() {
}
#Override
public void run() {
int bufSize = AudioTrack.getMinBufferSize(8000,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
8000,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufSize,
AudioTrack.MODE_STREAM);
mAudioTrack.play();
while (IS_ALIVE) {
byte[] buffer = null;
boolean dataFlag = true;
while (dataFlag) {
synchronized (mAlBuffers) {
if (mAlBuffers.size() > 0) {
buffer = mAlBuffers.remove(0);
} else {
dataFlag = false;
break;
}
}
mAudioTrack.write(buffer, 0, buffer.length);
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
mAudioTrack.stop();
mAudioTrack.release();
}
public void writePCM(byte[] pcm) {
synchronized (mAlBuffers) {
byte[] buffer = new byte[pcm.length];
System.arraycopy(pcm, 0, buffer, 0, buffer.length);
mAlBuffers.add(buffer);
}
}
public void stop() {
IS_ALIVE = false;
}
}
I don't know your intentions. But in your place, I would use a library for that. For examlpe jVLC .
The problem is that AVI is just a container for sound and video data, that can be encoded by any codecs. The sound can be mp3, ogg, or whatever and the video can be mpeg, divx, xvid of several versions.
You can not count on that the all AVIs will contain the same sound/video formats. That is why some of your AVIs can not be played by you current program.

JSlider issue audio player

I am trying to build an audio player with an integrated JSlider, which updates the interface every microsecond.
In order to do so I am using the following:
sliderTime.setMinimum(0);
sliderTime.setMaximum((int) audioClip.getMicrosecondPosition(););
I have the feeling that this is not the best implementation out there (any suggestions to improve it is highly appreciated)
By the way, the issue I am facing is that for the first second the JSlider does not update.
Please find MCVE below:
It plays only wav uncompressed files
Main
public class Main
{
public static void main(final String[] args)
{
SwingUtilities.invokeLater(new Runnable()
{
#Override
public void run()
{
JFrame f = new JFrame();
PlayerView pw = new PlayerView();
Border border = new EmptyBorder(15,15,15,15);
pw.setBorder(border);
f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
f.getContentPane().setLayout(new BorderLayout());
f.getContentPane().add(pw, BorderLayout.CENTER);
f.pack();
f.setLocationRelativeTo(null);
f.setVisible(true);
}
});
}
}
AudioPlayer
public class AudioPlayer implements LineListener
{
private SimpleDateFormat dateFormater = new SimpleDateFormat("HH:mm:ss.SSS");
private TimeZone timeZone = Calendar.getInstance().getTimeZone();
public static final int REWIND_IN_MICROSECONDS = 3000000;
public static final int FORWARD_IN_MICROSECONDS = 3000000;
private boolean playCompleted;
private boolean isStopped;
private boolean isPaused;
private boolean isRewinded;
private boolean isForwarded;
private Clip audioClip;
public Clip getAudioClip()
{
return audioClip;
}
public void load(String audioFilePath) throws UnsupportedAudioFileException, IOException, LineUnavailableException
{
File encodedFile = new File(audioFilePath);
AudioInputStream pcmStream = AudioSystem.getAudioInputStream(encodedFile);
AudioFormat format =pcmStream.getFormat();
DataLine.Info info = new DataLine.Info(Clip.class, format);
audioClip = (Clip) AudioSystem.getLine(info);
audioClip.addLineListener(this);
audioClip.open(pcmStream);
}
public long getClipMicroSecondLength()
{
return audioClip.getMicrosecondLength();
}
public long getClipMicroSecondPosition()
{
return audioClip.getMicrosecondPosition();
}
public String getClipLengthString()
{
long yourmilliseconds = audioClip.getMicrosecondLength() / 1_000;
Date resultdate = new Date(yourmilliseconds);
dateFormater.setTimeZone(TimeZone.getTimeZone(timeZone.getDisplayName(false, TimeZone.SHORT)));
return dateFormater.format(resultdate);
}
public void play() throws IOException
{
audioClip.start();
playCompleted = false;
isStopped = false;
while (!playCompleted)
{
try
{
Thread.sleep(30);
}
catch (InterruptedException ex)
{
if (isStopped)
{
audioClip.stop();
break;
}
else if (isPaused)
{
audioClip.stop();
}
else if (isRewinded)
{
if( audioClip.getMicrosecondPosition() <= REWIND_IN_MICROSECONDS)
{
audioClip.setMicrosecondPosition(0);
isRewinded =false;
}
else
{
audioClip.setMicrosecondPosition(audioClip.getMicrosecondPosition() - REWIND_IN_MICROSECONDS);
isRewinded =false;
}
}
else if (isForwarded)
{
if((audioClip.getMicrosecondLength() - audioClip.getMicrosecondPosition()) >= FORWARD_IN_MICROSECONDS)
{
audioClip.setMicrosecondPosition(audioClip.getMicrosecondPosition() + FORWARD_IN_MICROSECONDS);
isForwarded =false;
}
else
{
audioClip.stop();
isForwarded =false;
}
}
else
{
audioClip.start();
}
}
}
audioClip.close();
}
public void stop()
{
isStopped = true;
}
public void pause()
{
isPaused = true;
}
public void resume()
{
isPaused = false;
}
public void rewind()
{
isRewinded = true;
}
public void forward()
{
isForwarded = true;
}
#Override
public void update(LineEvent event)
{
Type type = event.getType();
if (type == Type.STOP)
{
if (isStopped || !isPaused)
{
playCompleted = true;
}
}
}
}
PlayingTimer
public class PlayingTimer extends Thread
{
private SimpleDateFormat dateFormater = new SimpleDateFormat("HH:mm:ss.SSS");
private TimeZone timeZone = Calendar.getInstance().getTimeZone();
private boolean isRunning = false;
private boolean isPause = false;
private boolean isReset = false;
private boolean isRewinded = false;
private boolean isForwarded = false;
private long startTime;
private long pauseTime;
private long rewindTime;
private long forwardTime;
private JLabel labelRecordTime;
private JSlider slider;
private Clip audioClip;
public void setAudioClip(Clip audioClip)
{
this.audioClip = audioClip;
}
public PlayingTimer(JLabel labelRecordTime, JSlider slider)
{
this.labelRecordTime = labelRecordTime;
this.slider = slider;
dateFormater.setTimeZone(TimeZone.getTimeZone(timeZone.getDisplayName(false, TimeZone.SHORT)));
}
public void run()
{
isRunning = true;
startTime = System.currentTimeMillis();
while (isRunning)
{
try
{
Thread.sleep(30);
if (!isPause)
{
if (audioClip != null && audioClip.isRunning())
{
long currentMicros = audioClip.getMicrosecondPosition();
// Compute the progress as a value between 0.0 and 1.0
double progress =
(double)currentMicros / audioClip.getMicrosecondLength();
// Compute the slider value to indicate the progress
final int sliderValue = (int)(progress * slider.getMaximum());
// Update the slider with the new value, on the Event Dispatch Thread
SwingUtilities.invokeLater(new Runnable()
{
#Override
public void run()
{
labelRecordTime.setText(toTimeString());
slider.setValue(sliderValue);
}
});
}
}
else
{
pauseTime += 30;
}
}
catch (InterruptedException ex)
{
if (isReset)
{
slider.setValue(0);
labelRecordTime.setText("00:00:00.000");
isRunning = false;
break;
}
if (isRewinded)
{
if( audioClip.getMicrosecondPosition() <= AudioPlayer.REWIND_IN_MICROSECONDS)
{
//go back to start
rewindTime += audioClip.getMicrosecondPosition() / 1_000;
}
else
{
rewindTime += 3000;
}
isRewinded =false;
}
if (isForwarded)
{
if((audioClip.getMicrosecondLength()- audioClip.getMicrosecondPosition()) <= AudioPlayer.FORWARD_IN_MICROSECONDS)
{
forwardTime -= (audioClip.getMicrosecondLength()- audioClip.getMicrosecondPosition())/1_000;
}
else
{
forwardTime -= 3000;
}
isForwarded=false;
}
}
}
}
public void reset()
{
isReset = true;
isRunning = false;
}
public void rewind()
{
isRewinded = true;
}
public void forward()
{
isForwarded = true;
}
public void pauseTimer()
{
isPause = true;
}
public void resumeTimer()
{
isPause = false;
}
private String toTimeString()
{
long now = System.currentTimeMillis();
Date resultdate = new Date(now - startTime - pauseTime - rewindTime - forwardTime);
return dateFormater.format(resultdate);
}
}
PlayerView
public class PlayerView extends JPanel implements ActionListener
{
private static final int BUTTON_HEIGTH =60;
private static final int BUTTON_WIDTH =120;
private AudioPlayer player = new AudioPlayer();
private Thread playbackThread;
private PlayingTimer timer;
private boolean isPlaying = false;
private boolean isPause = false;
private String audioFilePath;
private String lastOpenPath;
private JLabel labelFileName;
private JLabel labelTimeCounter;
private JLabel labelDuration;
private JButton buttonOpen;
private JButton buttonPlay;
private JButton buttonPause;
private JButton buttonRewind;
private JButton buttonForward;
private JSlider sliderTime;
private Dimension buttonDimension = new Dimension(BUTTON_WIDTH,BUTTON_HEIGTH);
public PlayerView()
{
setLayout(new BorderLayout());
labelFileName = new JLabel("File Loaded:");
labelTimeCounter = new JLabel("00:00:00.000");
labelDuration = new JLabel("00:00:00.000");
sliderTime = new JSlider(0, 1000, 0);;
sliderTime.setValue(0);
sliderTime.setEnabled(false);
buttonOpen = new JButton("Open");
buttonOpen.setPreferredSize(buttonDimension);
buttonOpen.addActionListener(this);
buttonPlay = new JButton("Play");
buttonPlay.setEnabled(false);
buttonPlay.setPreferredSize(buttonDimension);
buttonPlay.addActionListener(this);
buttonPause = new JButton("Pause");
buttonPause.setEnabled(false);
buttonPause.setPreferredSize(buttonDimension);
buttonPause.addActionListener(this);
buttonRewind = new JButton("Rewind");
buttonRewind.setEnabled(false);
buttonRewind.setPreferredSize(buttonDimension);
buttonRewind.addActionListener(this);
buttonForward= new JButton("Forward");
buttonForward.setEnabled(false);
buttonForward.setPreferredSize(buttonDimension);
buttonForward.addActionListener(this);
init();
}
public void enableButtonPlay()
{
buttonPlay.setEnabled(true);
}
#Override
public void actionPerformed(ActionEvent event)
{
Object source = event.getSource();
if (source instanceof JButton)
{
JButton button = (JButton) source;
if (button == buttonOpen)
{
openFile();
}
else if (button == buttonPlay)
{
if (!isPlaying)
{
playBack();
}
else
{
stopPlaying();
}
}
else if (button == buttonPause)
{
if (!isPause)
{
pausePlaying();
}
else
{
resumePlaying();
}
}
else if (button == buttonRewind)
{
if (!isPause)
{
rewind();
}
}
else if (button == buttonForward)
{
if (!isPause)
{
forward();
}
}
}
}
public void openFile(String path)
{
audioFilePath = path ;
if (isPlaying || isPause)
{
stopPlaying();
while (player.getAudioClip().isRunning())
{
try
{
Thread.sleep(100);
}
catch (InterruptedException ex)
{
ex.printStackTrace();
}
}
}
playBack();
}
private void openFile()
{
JFileChooser fileChooser = null;
if (lastOpenPath != null && !lastOpenPath.equals(""))
{
fileChooser = new JFileChooser(lastOpenPath);
}
else
{
fileChooser = new JFileChooser();
}
FileFilter wavFilter = new FileFilter()
{
#Override
public String getDescription()
{
return "Sound file (*.WAV)";
}
#Override
public boolean accept(File file)
{
if (file.isDirectory())
{
return true;
}
else
{
return file.getName().toLowerCase().endsWith(".wav");
}
}
};
fileChooser.setFileFilter(wavFilter);
fileChooser.setDialogTitle("Open Audio File");
fileChooser.setAcceptAllFileFilterUsed(false);
int userChoice = fileChooser.showOpenDialog(this);
if (userChoice == JFileChooser.APPROVE_OPTION)
{
audioFilePath = fileChooser.getSelectedFile().getAbsolutePath();
lastOpenPath = fileChooser.getSelectedFile().getParent();
if (isPlaying || isPause)
{
stopPlaying();
while (player.getAudioClip().isRunning())
{
try
{
Thread.sleep(100);
}
catch (InterruptedException ex)
{
ex.printStackTrace();
}
}
}
playBack();
}
}
private void playBack()
{
timer = new PlayingTimer(labelTimeCounter, sliderTime);
timer.start();
isPlaying = true;
playbackThread = new Thread(new Runnable()
{
#Override
public void run()
{
try
{
buttonPlay.setText("Stop");
buttonPlay.setEnabled(true);
buttonRewind.setEnabled(true);
buttonForward.setEnabled(true);
buttonPause.setText("Pause");
buttonPause.setEnabled(true);
player.load(audioFilePath);
timer.setAudioClip(player.getAudioClip());
labelFileName.setText("Playing File: " + ((File)new File(audioFilePath)).getName());
sliderTime.setMinimum(0);
sliderTime.setMaximum((int)player.getClipMicroSecondLength());
labelDuration.setText(player.getClipLengthString());
player.play();
labelFileName.setText("File Loaded: " + ((File)new File(audioFilePath)).getName());
resetControls();
}
catch (UnsupportedAudioFileException ex)
{
JOptionPane.showMessageDialog(
PlayerView.this,
"The audio format is unsupported!",
"Error",
JOptionPane.ERROR_MESSAGE);
resetControls();
}
catch (LineUnavailableException ex)
{
JOptionPane.showMessageDialog(
PlayerView.this,
"Could not play the audio file because line is unavailable!",
"Error",
JOptionPane.ERROR_MESSAGE);
resetControls();
}
catch (IOException ex)
{
JOptionPane.showMessageDialog(
PlayerView.this,
"I/O error while playing the audio file!",
"Error",
JOptionPane.ERROR_MESSAGE);
resetControls();
}
}
});
playbackThread.start();
}
private void stopPlaying()
{
isPause = false;
buttonPause.setText(" Pause ");
buttonPause.setEnabled(false);
buttonRewind.setEnabled(false);
buttonForward.setEnabled(false);
timer.reset();
timer.interrupt();
player.stop();
playbackThread.interrupt();
}
private void pausePlaying()
{
labelFileName.setText("File Loaded: " + ((File)new File(audioFilePath)).getName());
buttonRewind.setEnabled(false);
buttonForward.setEnabled(false);
buttonPause.setText("Resume");
isPause = true;
player.pause();
timer.pauseTimer();
playbackThread.interrupt();
}
private void resumePlaying()
{
labelFileName.setText("Playing File: " + ((File)new File(audioFilePath)).getName());
buttonPause.setText(" Pause ");
buttonRewind.setEnabled(true);
buttonForward.setEnabled(true);
isPause = false;
player.resume();
timer.resumeTimer();
playbackThread.interrupt();
}
private void rewind()
{
player.rewind();
timer.rewind();
timer.interrupt();
playbackThread.interrupt();
}
private void forward()
{
player.forward();
timer.forward();
timer.interrupt();
playbackThread.interrupt();
}
private void resetControls()
{
timer.reset();
timer.interrupt();
isPlaying = false;
buttonPlay.setText("Play");
buttonPause.setEnabled(false);
buttonRewind.setEnabled(false);
buttonForward.setEnabled(false);
}
private void init()
{
add(labelFileName, BorderLayout.NORTH);
add(labelTimeCounter, BorderLayout.WEST);
add(labelDuration, BorderLayout.EAST);
add(sliderTime, BorderLayout.CENTER);
JPanel buttonContainer =new JPanel();
add(buttonContainer, BorderLayout.SOUTH);
buttonContainer.add(buttonOpen);
buttonContainer.add(buttonPlay);
buttonContainer.add(buttonPause);
buttonContainer.add(buttonRewind);
buttonContainer.add(buttonForward);
}
}
Okay, so, the issue with Clip. Here is an MCVE that, from the way you've described the problem, may reproduce it:
class TestFramePosition {
public static void main(String[] a) throws Exception {
File file = new File(a.length > 0 ? a[0] : "path/to/file.extension");
AudioInputStream ais = AudioSystem.getAudioInputStream(file);
final Clip clip = AudioSystem.getClip();
clip.open(ais);
clip.start();
new Thread(new Runnable() {
#Override
public void run() {
while(clip.isRunning()) {
try {
System.out.println(clip.getMicrosecondPosition());
Thread.sleep(1000 / 10);
} catch(InterruptedException ignored) {}
}
}
}).start();
System.in.read();
System.exit(0);
}
}
I was unable to reproduce it on OSX 10.6.8 and Windows XP, but you may run that code to see if it does on your particular platform.
So, the issue here is that, as I said in comments, since sound playback is dependent on platform-specific stuff, classes like Clip will have varied implementations. These will behave slightly differently.
For example, I found that when a Clip is done playing, the Clip on my Mac computer (a com.sun.media.sound.MixerClip) returns 0 for the position, while the Clip on my Windows computer (a com.sun.media.sound.DirectAudioDevice$DirectClip) returns the maximum value for the position. Just another small example of implementations being programmed differently.
The issue is that the contract for these methods is defined a little vaguely but, specifically, it is defined by 'the number of sample frames captured by, or rendered from, the line since it was opened'. This means it may not accurately represent the playback position, rather it is the amount of data read and written.
I did spend awhile yesterday perusing JDK source code but I was unable to find anything that would point towards the behavior you are seeing.
Anyway, what it comes down to is whether you are OK with slightly anomalous behavioral differences from platform to platform. What you are seeing may be a bug and if the above MCVE reproduces it, you may report it; however I would not personally expect it to get fixed in any timely manner because this is a section of the JDK that does not get a lot of attention. Also it is gradually being superseded by JavaFX.
Some other things:
You are sharing state between threads without synchronization. This leads to memory errors. You should read the concurrency tutorials, specifically synchronization.
You should always cap frame rate when working with Swing. Swing will not paint at 1000FPS, it will merge repaints aggressively. Updating the slider at this rate was just flooding the EDT.
You may use SourceDataLine because it gives you much greater control over the buffering behavior. The downside is that you have to basically reimplement the functionality of Clip.
Here is an MCVE demonstrating a playback loop to power a JSlider.
This example doesn't demonstrate seeking. Also since, AudioInputStream does not generally support mark operations, seeking backwards is a bit of a hassle. A backwards seek process is:
Stop the current playback and discard it.
Create a new AudioInputStream and seek forwards.
Start the new playback.
Also, if you are planning to use the JSlider to seek, you will probably run in to an issue where calling setValue on a JSlider will cause it to fire a ChangeEvent. So you can't update the slider's value programmatically and also listen to it without rejiggering it. This is really a Q&A itself so if you experience this problem I recommend you ask a new question.
import javax.sound.sampled.*;
import javax.swing.*;
import java.awt.event.*;
import java.awt.Dimension;
import java.awt.BorderLayout;
import java.io.File;
import java.io.IOException;
public class PlaybackSlider implements Runnable, ActionListener {
public static void main(String[] args) {
SwingUtilities.invokeLater(new PlaybackSlider());
}
JButton open;
JButton play;
JSlider slider;
JLabel label;
File file;
PlaybackLoop player;
#Override
public void run() {
JFrame frame = new JFrame("Playback Slider");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
JPanel content = new JPanel(new BorderLayout()) {
#Override
public Dimension getPreferredSize() {
Dimension pref = super.getPreferredSize();
pref.width = 480;
return pref;
}
};
slider = new JSlider(JSlider.HORIZONTAL, 0, 1000, 0);
content.add(slider, BorderLayout.CENTER);
JToolBar bar = new JToolBar(JToolBar.HORIZONTAL);
bar.setFloatable(false);
content.add(bar, BorderLayout.SOUTH);
open = new JButton("Open");
play = new JButton("Play");
open.addActionListener(this);
play.addActionListener(this);
label = new JLabel("");
bar.add(open);
bar.add(new JLabel(" "));
bar.add(play);
bar.add(new JLabel(" "));
bar.add(label);
frame.setContentPane(content);
frame.pack();
frame.setResizable(false);
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
#Override
public void actionPerformed(ActionEvent ae) {
Object source = ae.getSource();
if(source == open) {
File f = getFile();
if(f != null) {
file = f;
label.setText(file.getName());
stop();
}
}
if(source == play) {
if(file != null) {
if(player != null) {
stop();
} else {
start();
}
}
}
}
File getFile() {
JFileChooser diag = new JFileChooser();
int choice = diag.showOpenDialog(null);
if(choice == JFileChooser.APPROVE_OPTION) {
return diag.getSelectedFile();
} else {
return null;
}
}
void start() {
try {
player = new PlaybackLoop(file);
new Thread(player).start();
play.setText("Stop");
} catch(Exception e) {
player = null;
showError("the file couldn't be played", e);
}
}
void stop() {
if(player != null) {
player.stop();
}
}
void showError(String msg, Throwable cause) {
JOptionPane.showMessageDialog(null,
"There was an error because " + msg +
(cause == null ? "." : "\n(" + cause + ").")
);
}
class PlaybackLoop implements Runnable {
AudioInputStream in;
SourceDataLine line;
AudioFormat fmt;
int bufferSize;
boolean stopped;
PlaybackLoop(File file) throws Exception {
try {
in = AudioSystem.getAudioInputStream(file);
fmt = in.getFormat();
bufferSize = (int)(fmt.getFrameSize() * (fmt.getSampleRate() / 15));
line = AudioSystem.getSourceDataLine(fmt);
line.open(fmt, bufferSize);
} catch(Exception e) {
if(in != null)
in.close();
if(line != null)
line.close();
throw e;
}
}
void stop() {
synchronized(this) {
this.stopped = true;
}
}
#Override
public void run() {
line.start();
byte[] buf = new byte[bufferSize];
try {
try {
int b;
long elapsed = 0;
long total = in.getFrameLength();
for(;;) {
synchronized(this) {
if(stopped) {
break;
}
}
b = in.read(buf, 0, buf.length);
if(b < 0) {
break;
}
elapsed += b / fmt.getFrameSize();
updateSlider(elapsed, total);
line.write(buf, 0, b);
}
} finally {
line.close();
in.close();
}
} catch(IOException e) {
e.printStackTrace(System.err);
showError("there was a problem during playback", e);
}
endOnEDT();
}
void updateSlider(double elapsed, double total) {
final double amt = elapsed / total;
SwingUtilities.invokeLater(new Runnable() {
#Override
public void run() {
slider.setValue((int)Math.round(slider.getMaximum() * amt));
}
});
}
void endOnEDT() {
SwingUtilities.invokeLater(new Runnable() {
#Override
public void run() {
player = null;
slider.setValue(0);
play.setText("Play");
}
});
}
}
}
I assume you are wanting to use the JSlider as a progress bar and that at the moment you are setting the max value, the current position is at the end of the audioClip. (Are you dealing with Clip or AudioClip? AudioClip doesn't have a way to read its position AFAIK.) If you are using Clip, it would be safer to set the max with audioClip.getMicrosecondLength().
Since the audio has to play on a different thread than the one where the JSlider is being updated, I'd recommend making your audioClip a volatile variable. That might help with cross-thread weirdness that sometimes occurs.
Thread.sleep(1) at best can only update every millisecond. On some systems (older Windows) the method's reliance on the system clock means the actual updates are as slow as 16 millis apart. But updating the JSlider at more than 60 fps is probably moot. Screen monitors often are set to 60Hz, and there's only so much the human eye can take in.
Also there is only so much the ear can discern in terms of timing. For example, it is hard to tell if two percussive events happen at the same time if there is less than a couple milliseconds difference.
There are several issues with your code.
As Phil Freihofner pointed out, the sleep(1) and the treatment of the isRunning and isPause fields look highly dubious. To some extent, this is unrelated to your actual question, but worth noting here, because it may also cause problems later.
Regardless of that, the approach that Zoran Regvart showed is basically the way to go. The code in the given form may have suffered from some rounding issues. However, the general idea for cases like this is always the same:
You have a source interval [minA...maxA]
You have a target interval [minB...maxB]
You want a mapping between the two
In this case, it's a good practice to normalize the intervals. That is, to map the value from the source interval to a value between 0.0 and 1.0, and then map this normalized value to the target interval.
In the most generic form, this can be written as
long minA = ...
long maxA = ...
long a = ... // The current value in the source interval
int minB = ...
int maxB = ...
int b; // The value to compute in the target interval
// Map the first value to a value between 0.0 and 1.0
double normalized = (double)(a - minA)/(maxA-minA);
b = (int)(minB + normalized * (maxB - minB));
Fortunately, your "min" values are all zero here, so it's a bit simpler. Here is a MCVE (with some dummy classes). The most relevant part is the updateSlider method at the bottom.
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JSlider;
import javax.swing.SwingUtilities;
public class SliderMappingTest
{
public static void main(String[] args)
{
SwingUtilities.invokeLater(new Runnable()
{
#Override
public void run()
{
createAndShowGUI();
}
});
}
private static void createAndShowGUI()
{
JFrame f = new JFrame();
f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
final JButton startButton = new JButton("Start");
final JSlider progressSlider = new JSlider(0, 1000, 0);
startButton.addActionListener(new ActionListener()
{
#Override
public void actionPerformed(ActionEvent e)
{
startButton.setEnabled(false);
SliderMappingDummyAudioClip audioClip =
new SliderMappingDummyAudioClip();
SliderMappingDummyPlayer player =
new SliderMappingDummyPlayer(progressSlider, audioClip);
player.start();
}
});
f.getContentPane().setLayout(new GridLayout());
f.getContentPane().add(startButton);
f.getContentPane().add(progressSlider);
f.pack();
f.setLocationRelativeTo(null);
f.setVisible(true);
}
}
class SliderMappingDummyAudioClip
{
private long startMicros;
void start()
{
startMicros = System.nanoTime() / 1000L;
}
long getMicrosecondLength()
{
// 10 seconds
return 10L * 1000L * 1000L;
}
long getMicrosecondPosition()
{
return (System.nanoTime() / 1000L) - startMicros;
}
public boolean isRunning()
{
return getMicrosecondPosition() <= getMicrosecondLength();
}
}
class SliderMappingDummyPlayer
{
private final SliderMappingDummyAudioClip audioClip;
private final JSlider slider;
SliderMappingDummyPlayer(
JSlider slider,
SliderMappingDummyAudioClip audioClip)
{
this.slider = slider;
this.audioClip = audioClip;
}
void start()
{
Thread t = new Thread(new Runnable()
{
#Override
public void run()
{
doRun();
}
});
t.setDaemon(true);
t.start();
}
private void doRun()
{
audioClip.start();
while (audioClip.isRunning())
{
updateSlider();
try
{
Thread.sleep(30);
}
catch (InterruptedException ex)
{
Thread.currentThread().interrupt();
return;
}
}
}
private void updateSlider()
{
long currentMicros = audioClip.getMicrosecondPosition();
// Compute the progress as a value between 0.0 and 1.0
double progress =
(double)currentMicros / audioClip.getMicrosecondLength();
// Compute the slider value to indicate the progress
final int sliderValue = (int)(progress * slider.getMaximum());
System.out.println("update "+progress);
// Update the slider with the new value, on the Event Dispatch Thread
SwingUtilities.invokeLater(new Runnable()
{
#Override
public void run()
{
slider.setValue(sliderValue);
}
});
}
}
Are you sure you want to set the maximum to current position?
How about mapping longs to ints by division:
long coefficient = clip.getMicrosecondLength() / Integer.MAX_VALUE;
slider.setMinimum(0);
slider.setMaximum((int) (clip.getMicrosecondLength() / coefficient));
...
slider.setValue((int) (clip.getMicrosecondPosition() / coefficient));

Huge delay when socketing in java

(UPDATED CODE)
I'm trying to make clients communicate with server (I've made simple client-server apps, like a chatroom). The communication is created, but there is a huge delay (I send coordinates from the client to the server). It's over 10 seconds (sometimes even more). What could be the problem?
The client:
public class GameComponent extends Canvas implements Runnable {
private static final long serialVersionUID = 1L;
private static final int WIDTH = 320;
private static final int HEIGHT = 240;
private static final int SCALE = 2;
private boolean running;
private JFrame frame;
Thread thread;
public static final int GRID_W = 16;
public static final int GRID_H = 16;
private Socket socket;
private DataInputStream reader;
private DataOutputStream writer;
private HashMap<Integer, OtherPlayer> oPlayers;
private ArrayList<OtherPlayer> opList;
private int maxID = 1;
private int ID;
Player player;
public GameComponent() {
//GUI code..
oPlayers = new HashMap<Integer, OtherPlayer>(); //Hash map to be able to get players by their ID's
opList = new ArrayList<OtherPlayer>(); //And an array list for easier drawing
setUpNetworking();
start();
}
public void start() {
if (running)
return;
running = true;
thread = new Thread(this);
player = new Player(GRID_W * 2, GRID_H * 2);
thread.start();
}
public void stop() {
if (!running)
return;
running = false;
}
public void run() { //The main loop, ticks 60 times every second
long lastTime = System.nanoTime();
double nsPerTick = 1000000000D / 60D;
int frames = 0;
int ticks = 0;
long lastTimer = System.currentTimeMillis();
double delta = 0;
while (running) {
long now = System.nanoTime();
delta += (now - lastTime) / nsPerTick;
lastTime = now;
boolean shouldRender = true;
while (delta >= 1) {
ticks++;
tick(delta);
delta -= 1;
shouldRender = true;
}
try {
Thread.sleep(2);
} catch (InterruptedException e) {
e.printStackTrace();
}
if (shouldRender) {
frames++;
render();
}
if (System.currentTimeMillis() - lastTimer >= 1000) {
lastTimer += 1000;
frames = 0;
ticks = 0;
}
}
}
private void tick(double delta) { //main logic
player.move();
try {
writer.writeInt(ID); //I send the player data here (id, x, y)
writer.writeInt(player.getX());
writer.writeInt(player.getY());
writer.flush();
} catch (IOException e) {
e.printStackTrace();
}
}
private void render(Graphics2D g2d) {
//rendering the stuff
for (OtherPlayer i : opList) { //drawing a black rectangle for every other player
g2d.fillRect(i.getX(), i.getY(), GRID_W, GRID_H);
}
}
private void render() {
//more rendering...
}
public static void main(String[] args) {
new GameComponent();
}
class TKeyListener implements KeyListener {
//movement methods...
}
private void setUpNetworking() { //This is where I make my message reader and data IO
try {
socket = new Socket("127.0.0.1", 5099);
reader = new DataInputStream(socket.getInputStream());
writer = new DataOutputStream(socket.getOutputStream());
Thread rT = new Thread(new msgReader());
rT.start();
} catch (Exception e) {
e.printStackTrace();
}
}
class msgReader implements Runnable { //where I read messages
public void run() {
try {
ID = reader.readInt(); //when I connect, I get an id from the server
while(true) { //my main loop
int oid = reader.readInt(); //get the read data id
int ox, oy;
ox = reader.readInt(); //get the read player's x and y
oy = reader.readInt();
if (oid != ID){ //If not reading myself
if (oPlayers.containsKey(oid)) { //If a player with this id exists
OtherPlayer op = (OtherPlayer) oPlayers.get(oid);
op.setX(ox); //set it's x, y
op.setY(oy);
} else { //if it doesn't exist, create him
OtherPlayer op = new OtherPlayer(ox, oy);
opList.add(op);
oPlayers.put(oid, op);
}
}
maxID = reader.readInt(); //Allways read the highest current id from server
}
} catch(Exception ex) {
ex.printStackTrace();
}
}
}
}
And the server:
public class ServerBase {
ServerSocket serverSocket;
ArrayList<DataOutputStream> clients;
private int id = 1;
SyncSend ss = new SyncSend();
class ClientHandler implements Runnable {
private Socket soc;
private DataInputStream reader;
private int x;
private int y;
private int id;
private boolean run = true;
public ClientHandler(Socket s) {
soc = s;
try {
reader = new DataInputStream(soc.getInputStream());
} catch (IOException e) {
e.printStackTrace();
}
}
public void run() {
try {
while (run) {
id = reader.readInt();
x = reader.readInt();
y = reader.readInt();
if (id == 2)
System.out.println("x: " + x + " y: " + y);
int[] tmb = {id, x, y};
ss.sendEveryone(tmb);
}
} catch (Exception e) {
run = false;
clients.remove(this);
}
}
}
class SyncSend {
public synchronized void sendEveryone(int[] a) throws SocketException {
ArrayList<DataOutputStream> cl = (ArrayList<DataOutputStream>) clients.clone();
Iterator<DataOutputStream> it = cl.iterator();
while(it.hasNext()){
try {
DataOutputStream writer = (DataOutputStream) it.next();
writer.writeInt(a[0]);
writer.writeInt(a[1]);
writer.writeInt(a[2]);
writer.writeInt(id-1);
writer.flush();
} catch (Exception ex) {
throw new SocketException();
}
}
}
}
public void init() {
clients = new ArrayList<DataOutputStream>();
try {
serverSocket = new ServerSocket(5099);
while(true) {
Socket clientSocket = serverSocket.accept();
DataOutputStream clientWriter = new DataOutputStream(clientSocket.getOutputStream());
clients.add(clientWriter);
clientWriter.writeInt(id);
id++;
Thread t = new Thread(new ClientHandler(clientSocket));
t.start();
}
} catch (Exception e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
new ServerBase().init();
}
}
What causes the delay? I've been searching for the reason for hours now, but with no success.
You most likely need to call flush() on the client-side. Even if this is not your current problem, it is probably a good idea.
Streams may buffer their content, meaning they may not send the data to its destination (whether that be a disk or over the wire to a server) the instant you call write (or writeInt in this case). Instead, they may wait until they get a sufficient amount of data to make the transfer "worth it". If they did not behave in this way, they would end up making lots of inefficient, smaller transfers. The downside to all of this is that you may need to call flush to tell the stream that you are done sending data for a while and that the stream should go ahead and initiate the transfer.
try to put your codes into several threads everywhere you can and then call threads, I mean you don't need to wait for every Socket and simply run all of them at same time...or something like this :)
for example in Port Scanners, you should use many threads to speed up searching...
Be aware that your call to ss.sendEveryone(tmb) is synchronized on the ss object. I am assuming this is a static variable somewhere that holds a reference to all of the clients. This means that if there are several clients sending data at the same time, a lot of calls to sendEveryone will happen all at once and they will all line up in a queue waiting for the others to finish, before those threads can go back and read more data from the client again.
As a diagnostic exercise, you may want to remove this call and see if you still have your problem.

OggStreamer-Thread class (to play music in my game) only works 1/3 of the time

First of all, I love you guys! This is THE best site for finding answers to weird and difficult programming questions. This is the first problem I have not been able to find a solution for on this site, so thanks for that.
So, I have a runnable Game.class and a runnable OggStreamer.class, to allow the music to run in its own separate thread, which I send as a parameter for the Game-class to use. When I run the game from my IDE the OggStreamer always works, but when I've exported it to a .jar-file, it only works 1/3 of the times I start it up. And it's not like the first piece of music doesn't start, and then the next piece of music started will play..it doesn't work at all, until I've started the game a few times.
Has any of you good people had a similar problem? I could understand it if it didn't work at all, which could indicate there was something wrong with the file-references to the music inside the jar-file...but it DOES work, just not consistently.
NOTE: This is my first attempt at Game-programming, and I know it's not very pretty and that I am a total newbie :) There are many things I'd change about the general design, but I'm using this as a project to help me understand the problems I'll encounter when I sit down to design a real framework for my next game.
Start.class
public static void main(String[] args) throws InterruptedException{
ExecutorService threadExecutor = Executors.newCachedThreadPool();
Game game = new Game(musicStreamer);
game.init(); // loads stuff
threadExecutor.execute( musicStreamer ); // start task1
threadExecutor.execute( game ); // start task2
threadExecutor.shutdown();
}
OggStreamer.class
public class OggStreamer implements Runnable{
private URL url;
private AudioInputStream stream;
private AudioInputStream decodedStream;
private AudioFormat format;
private AudioFormat decodedFormat;
private boolean stop, running;
String filename = "";
SourceDataLine line = null;
public OggStreamer() {
this.stop = true;
this.running = true;
this.url = null;
}
public void run() {
while(running){
while (!this.stop) {
System.out.println("Playing Loop");
try {
// Get AudioInputStream from given file.
this.stream = AudioSystem.getAudioInputStream(this.url);
this.decodedStream = null;
if (this.stream != null) {
this.format = this.stream.getFormat();
this.decodedFormat = new AudioFormat(
AudioFormat.Encoding.PCM_SIGNED,
this.format.getSampleRate(), 16,
this.format.getChannels(),
this.format.getChannels() * 2,
this.format.getSampleRate(), false);
// Get AudioInputStream that will be decoded by underlying
// VorbisSPI
this.decodedStream = AudioSystem.getAudioInputStream(
this.decodedFormat, this.stream);
}else{
JOptionPane.showMessageDialog(null, "Stream = null!");
}
} catch (Exception e) {
// Do nothing
System.out.println("Could not get or decode audiostream");
}
line = null;
try {
line = this.getSourceDataLine(this.decodedFormat);
FloatControl volume = (FloatControl)line.getControl(FloatControl.Type.MASTER_GAIN);
volume.setValue(1);
} catch (LineUnavailableException lue) {
// Do nothing
JOptionPane.showMessageDialog(null, "Line is unavailable!");
}
if (line != null) {
try {
byte[] data = new byte[4096];
// Start
line.start();
int nBytesRead = 0;
while (nBytesRead != -1) {
nBytesRead = this.decodedStream.read(data, 0,
data.length);
if (nBytesRead != -1) {
line.write(data, 0, nBytesRead);
}
if (this.stop) {
break;
}
}
// Stop
line.drain();
line.stop();
line.close();
} catch (IOException io) {
// Do nothing
JOptionPane.showMessageDialog(null, "Line cannot start!");
}
}
}
}
}
private SourceDataLine getSourceDataLine(AudioFormat audioFormat)
throws LineUnavailableException {
SourceDataLine res = null;
DataLine.Info info = new DataLine.Info(SourceDataLine.class,
audioFormat);
res = (SourceDataLine) AudioSystem.getLine(info);
res.open(audioFormat);
return res;
}
public void startLoop(String filenameString) {
this.filename = filenameString;
System.out.println("Starting loop with: "+filenameString);
this.url = this.getClass().getResource(filenameString);
this.stop = false;
}
public void stopLoop() {
System.out.println("Stopping loop");
try {
if(this.decodedStream!=null)this.decodedStream.close();
if(this.stream!=null)this.stream.close();
} catch (IOException e) {
}
this.stop = true;
this.url = null;
}
public boolean isStop() {
return stop;
}
public void setStop(boolean stop) {
this.stop = stop;
}
public URL getUrl() {
return url;
}
public void setUrl(String string) {
this.filename = string;
this.url = this.getClass().getResource(string);
}
public String getFilename() {
return filename;
}
}
Game.class uses the OggStreamer-class as such:
if(musicStreamer!=null && !musicStreamer.getFilename().equals("/snm/sound/oggs/Prelude.ogg")){
if(!musicStreamer.isStop())musicStreamer.stopLoop();
musicStreamer.startLoop("/snm/sound/oggs/Prelude.ogg");
}

Categories