How to play .wav files in java without lag for games - java

I have a game that I want to play audio in. What I want to be able to do is that I should be able to declare a ClipPlayer object with the arguments of a path name. Then the constructor should be able to load the sound and then I can just make a call with that object so it won't lag. This is the code that I am using currently for sound:
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.UnsupportedAudioFileException;
public class ClipPlayer {
AudioInputStream in;
AudioFormat decodedFormat;
AudioInputStream din;
AudioFormat baseFormat;
SourceDataLine line;
private boolean loop;
private BufferedInputStream stream;
// private ByteArrayInputStream stream;
/**
* recreate the stream
*
*/
public void reset() {
try {
stream.reset();
in = AudioSystem.getAudioInputStream(stream);
din = AudioSystem.getAudioInputStream(decodedFormat, in);
line = getLine(decodedFormat);
} catch (Exception e) {
e.printStackTrace();
}
}
public void close() {
try {
line.close();
din.close();
in.close();
} catch (IOException e) {
}
}
ClipPlayer(String filename, boolean loop) {
this(filename);
this.loop = loop;
}
ClipPlayer(String filename) {
this.loop = false;
try {
InputStream raw = Object.class.getResourceAsStream(filename);
stream = new BufferedInputStream(raw);
// ByteArrayOutputStream out = new ByteArrayOutputStream();
// byte[] buffer = new byte[1024];
// int read = raw.read(buffer);
// while( read > 0 ) {
// out.write(buffer, 0, read);
// read = raw.read(buffer);
// }
// stream = new ByteArrayInputStream(out.toByteArray());
in = AudioSystem.getAudioInputStream(stream);
din = null;
if (in != null) {
baseFormat = in.getFormat();
decodedFormat = new AudioFormat(
AudioFormat.Encoding.PCM_SIGNED, baseFormat
.getSampleRate(), 16, baseFormat.getChannels(),
baseFormat.getChannels() * 2, baseFormat
.getSampleRate(), false);
din = AudioSystem.getAudioInputStream(decodedFormat, in);
line = getLine(decodedFormat);
}
} catch (UnsupportedAudioFileException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (LineUnavailableException e) {
e.printStackTrace();
}
}
private SourceDataLine getLine(AudioFormat audioFormat)
throws LineUnavailableException {
SourceDataLine res = null;
DataLine.Info info = new DataLine.Info(SourceDataLine.class,
audioFormat);
res = (SourceDataLine) AudioSystem.getLine(info);
res.open(audioFormat);
return res;
}
public void play() {
try {
boolean firstTime = true;
while (firstTime || loop) {
firstTime = false;
byte[] data = new byte[4096];
if (line != null) {
line.start();
int nBytesRead = 0;
while (nBytesRead != -1) {
nBytesRead = din.read(data, 0, data.length);
if (nBytesRead != -1)
line.write(data, 0, nBytesRead);
}
line.drain();
line.stop();
line.close();
reset();
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
}

Declare, load and cache the sounds as class attributes. Then when it is time to hear them, play them in a Clip.

Related

How do I stop audio lag in java?

I am making a skype like program. I have a "VoiceChat" class to handle the voice chat connections. I have a "ClientAudio" class to handle the client audio output. I also have a "ClientAudioRec" to handle receiving audio from the server and playing it. In the "ClientAudio" class I store the audio data in a buffer until it is full to try and reduce lag. Then I send it to the server. If I have two people in the call it lags unless the buffer is at 1000. When I add another person to the call it lags so much that you can't understand anyone. But I don't want to extend the buffer every time someone else joins because 1000 is already like a two-second delay. If anyone knows how to reduce the lag or make it almost instant that would really help. thank you in advance!!!
VoiceChat (Server Side)
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.Socket;
public class VoiceChat extends Thread {
private Socket s;
private ObjectOutputStream out;
private ObjectInputStream in;
private Thread acceptThread;
private int VoicePort;
private int num;
public VoiceChat(Socket sVoice, int VoicePort, Thread acceptThread) {
s = sVoice;
this.VoicePort = VoicePort;
this.acceptThread = acceptThread;
num = chat.threads.indexOf(this.acceptThread)+1;
try {
out = new ObjectOutputStream(s.getOutputStream());
in = new ObjectInputStream(s.getInputStream());
if(VoicePort <= 65511) {
chat.users1_Voice.add(out);
}else {
chat.users2.add(out);
}
} catch (IOException e) {
System.out.println("ERROR2");
e.printStackTrace();
}
}
public void run() {
int bytesRead = 0;
byte[] inBytes = new byte[100];
while(bytesRead != -1) {
try {
bytesRead = in.read(inBytes, 0, inBytes.length);
}catch (IOException e) {
System.out.println("ERROR2");
if(VoicePort <= 65511) {
chat.users1_Voice.remove(out);
}else {
chat.users2.remove(out);
}
}
if(bytesRead > 0) {
System.out.println(chat.users1_Voice);
send(inBytes, bytesRead, out);
System.out.println("SENDING DATA");
}
}
}
public void send(byte[] soundData, int bytesRead, ObjectOutputStream out) {
if(VoicePort <= 65511) {
for(ObjectOutputStream o : chat.users1_Voice) {
try {
if(o != out) {
o.write(soundData, 0, bytesRead);
o.flush();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}else {
for(ObjectOutputStream o : chat.users2) {
try {
if(o != out) {
o.write(soundData, 0, bytesRead);
o.flush();
}
} catch (IOException e) {
}
}
}
}
}
ClientAudio
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.Socket;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.TargetDataLine;
public class ClientAudio implements Runnable {
private ObjectOutputStream out = null;
private AudioFormat af;
private TargetDataLine microphone;
ClientAudio(AudioFormat af) {
this.af = af;
}
public void run() {
try {
client.voiceSocket = new Socket("***IP***", client.VoicePort);
Thread car = new Thread(new ClientAudioRec(af));
car.start();
out = new ObjectOutputStream(client.voiceSocket.getOutputStream());
DataLine.Info info = new DataLine.Info(TargetDataLine.class, af);
microphone = (TargetDataLine)AudioSystem.getLine(info);
microphone.open(af);
microphone.start();
int bytesRead = 0;
int offset = 0;
byte[] soundData = new byte[1000];
while(bytesRead != -1 && client.callRunning == true) {
bytesRead = microphone.read(soundData, 0, soundData.length);
if(bytesRead >= 0) {
offset += bytesRead;
if(offset == soundData.length) {
out.write(soundData, 0, bytesRead);
offset = 0;
}
}
}
microphone.close();
try {
client.voiceSocket.close();
} catch (IOException e1) {
e1.printStackTrace();
}
System.out.println("Client Audio Stopped Because Client Disconnected From Call");
} catch (IOException | LineUnavailableException e) {
microphone.close();
try {
client.voiceSocket.close();
} catch (IOException e1) {
System.out.println("Plug Microphone In...");
}
System.out.println("Client Audio Stopped...");
}
}
}
ClientAudioRec
import java.io.IOException;
import java.io.ObjectInputStream;
import java.net.Socket;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
public class ClientAudioRec implements Runnable {
private SourceDataLine inSpeaker = null;
private AudioFormat audioformat;
private ObjectInputStream in;
ClientAudioRec(AudioFormat af) {
audioformat = af;
try {
in = new ObjectInputStream(client.voiceSocket.getInputStream());
} catch (IOException e) {
e.printStackTrace();
}
}
public void run() {
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioformat);
try {
inSpeaker = (SourceDataLine)AudioSystem.getLine(info);
inSpeaker.open(audioformat);
} catch (LineUnavailableException e1) {
System.out.println("ERROR");
e1.printStackTrace();
}
int bytesRead = 0;
byte[] inSound = new byte[100000];
inSpeaker.start();
while(bytesRead != -1 && client.callRunning == true) {
try{bytesRead = in.read(inSound, 0, inSound.length);} catch (Exception e){
inSpeaker.close();
System.out.println("Speaker Closed");
}
if(bytesRead >= 0) {
inSpeaker.write(inSound, 0, bytesRead);
}
}
inSpeaker.close();
System.out.println("Speaker Closed Because Client Disconneted From Call");
}
}

Send audio over UDP

So, I was looking for a microphone data sending tut, but I haven't found any.
So I read Oracles tut about line opening and I am able to record the audio to a ByteArrayOutputStream, but now I have 2 problems!
First:
How to play the recorded audio.
Second: if I am recording it to a BAOS how would i dynamically send it.
I suppose I would send the data array, but would it be too processor hoggy to write to a BAOS every time I recieve it, or could I do it differently?
Current code:
import java.io.ByteArrayOutputStream;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.TargetDataLine;
public class MicrophoneRecorder {
static boolean stopped = false;
public static void main(String[] args) {
AudioFormat format = new AudioFormat(8000.0f, 16, 1, true, true);
TargetDataLine line = null;
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
if (!AudioSystem.isLineSupported(info)) {
System.out.println("Not supported!");
}
try {
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
} catch (LineUnavailableException ex) {
ex.printStackTrace();
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
int numBytesRead = 0;
byte[] data = new byte[line.getBufferSize() / 5];
line.start();
new Thread(new Runnable() {
#Override
public void run() {
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
e.printStackTrace();
}
stopped = true;
}
}).start();
while (!stopped) {
numBytesRead = line.read(data, 0, data.length);
out.write(data, 0, numBytesRead);
}
}
}
Thanks for any help given.
Sincerely, Roberto Anić Banić
P.S.
Seen this, doesn't work http://javasolution.blogspot.com/2007/04/voice-chat-using-java.html
P.P.S.
Is UDP a good soulution or should I use RTSP
Here is a sample code that helped me in order to stream and consuming audio via UDP. You can changed the infinite loop in order to limit the duration of the audio stream. Below is the client and server code. The audio input is from microphone.
server:
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.TargetDataLine;
public class Sender {
public static void main(String[] args) throws IOException {
AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100, 16, 2, 4, 44100, true);
TargetDataLine microphone;
SourceDataLine speakers;
try {
microphone = AudioSystem.getTargetDataLine(format);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
microphone = (TargetDataLine) AudioSystem.getLine(info);
microphone.open(format);
ByteArrayOutputStream out = new ByteArrayOutputStream();
int numBytesRead;
int CHUNK_SIZE = 1024;
byte[] data = new byte[microphone.getBufferSize() / 5];
microphone.start();
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, format);
speakers = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
speakers.open(format);
speakers.start();
// Configure the ip and port
String hostname = "localhost";
int port = 5555;
InetAddress address = InetAddress.getByName(hostname);
DatagramSocket socket = new DatagramSocket();
byte[] buffer = new byte[1024];
for(;;) {
numBytesRead = microphone.read(data, 0, CHUNK_SIZE);
// bytesRead += numBytesRead;
// write the mic data to a stream for use later
out.write(data, 0, numBytesRead);
// write mic data to stream for immediate playback
speakers.write(data, 0, numBytesRead);
DatagramPacket request = new DatagramPacket(data,numBytesRead, address, port);
socket.send(request);
}
} catch (LineUnavailableException e) {
e.printStackTrace();
}
}}
client:
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketTimeoutException;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.TargetDataLine;
public class UdpClient {
public static void main(String[] args) throws LineUnavailableException {
AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100, 16, 2, 4, 44100, true);
TargetDataLine microphone;
SourceDataLine speakers;
microphone = AudioSystem.getTargetDataLine(format);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
microphone = (TargetDataLine) AudioSystem.getLine(info);
microphone.open(format);
ByteArrayOutputStream out = new ByteArrayOutputStream();
int numBytesRead;
int CHUNK_SIZE = 1024;
byte[] data = new byte[microphone.getBufferSize() / 5];
microphone.start();
int bytesRead = 0;
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, format);
speakers = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
speakers.open(format);
speakers.start();
String hostname = "localhost";
int port = 5555;
try {
InetAddress address = InetAddress.getByName(hostname);
DatagramSocket socket = new DatagramSocket();
DatagramSocket serverSocket = new DatagramSocket(5555);
byte[] receiveData = new byte[1024];
byte[] sendData = new byte[1024];
while (true) {
byte[] buffer = new byte[1024];
DatagramPacket response = new DatagramPacket(buffer, buffer.length);
serverSocket.receive(response);
out.write(response.getData(), 0, response.getData().length);
speakers.write(response.getData(), 0, response.getData().length);
String quote = new String(buffer, 0, response.getLength());
System.out.println(quote);
System.out.println();
//Thread.sleep(10000);
}
} catch (SocketTimeoutException ex) {
System.out.println("Timeout error: " + ex.getMessage());
ex.printStackTrace();
} catch (IOException ex) {
System.out.println("Client error: " + ex.getMessage());
ex.printStackTrace();
}/* catch (InterruptedException ex) {
ex.printStackTrace();
}*/
}}
Here's an implementation of sending audio over UDP.
Below is the client and server code. Basically the client code sends captured audio to the server, which plays it on receiving. The client can also play the captured audio.
Client code: VUClient.java
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import java.net.*;
import javax.sound.sampled.*;
public class VUClient extends JFrame {
boolean stopaudioCapture = false;
ByteArrayOutputStream byteOutputStream;
AudioFormat adFormat;
TargetDataLine targetDataLine;
AudioInputStream InputStream;
SourceDataLine sourceLine;
Graphics g;
public static void main(String args[]) {
new VUClient();
}
public VUClient() {
final JButton capture = new JButton("Capture");
final JButton stop = new JButton("Stop");
final JButton play = new JButton("Playback");
capture.setEnabled(true);
stop.setEnabled(false);
play.setEnabled(false);
capture.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
capture.setEnabled(false);
stop.setEnabled(true);
play.setEnabled(false);
captureAudio();
}
});
getContentPane().add(capture);
stop.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
capture.setEnabled(true);
stop.setEnabled(false);
play.setEnabled(true);
stopaudioCapture = true;
targetDataLine.close();
}
});
getContentPane().add(stop);
play.addActionListener(new ActionListener() {
#Override
public void actionPerformed(ActionEvent e) {
playAudio();
}
});
getContentPane().add(play);
getContentPane().setLayout(new FlowLayout());
setTitle("Capture/Playback Demo");
setDefaultCloseOperation(EXIT_ON_CLOSE);
setSize(400, 100);
getContentPane().setBackground(Color.white);
setVisible(true);
g = (Graphics) this.getGraphics();
}
private void captureAudio() {
try {
adFormat = getAudioFormat();
DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, adFormat);
targetDataLine = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
targetDataLine.open(adFormat);
targetDataLine.start();
Thread captureThread = new Thread(new CaptureThread());
captureThread.start();
} catch (Exception e) {
StackTraceElement stackEle[] = e.getStackTrace();
for (StackTraceElement val : stackEle) {
System.out.println(val);
}
System.exit(0);
}
}
private void playAudio() {
try {
byte audioData[] = byteOutputStream.toByteArray();
InputStream byteInputStream = new ByteArrayInputStream(audioData);
AudioFormat adFormat = getAudioFormat();
InputStream = new AudioInputStream(byteInputStream, adFormat, audioData.length / adFormat.getFrameSize());
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, adFormat);
sourceLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
sourceLine.open(adFormat);
sourceLine.start();
Thread playThread = new Thread(new PlayThread());
playThread.start();
} catch (Exception e) {
System.out.println(e);
System.exit(0);
}
}
private AudioFormat getAudioFormat() {
float sampleRate = 16000.0F;
int sampleInbits = 16;
int channels = 1;
boolean signed = true;
boolean bigEndian = false;
return new AudioFormat(sampleRate, sampleInbits, channels, signed, bigEndian);
}
class CaptureThread extends Thread {
byte tempBuffer[] = new byte[10000];
public void run() {
byteOutputStream = new ByteArrayOutputStream();
stopaudioCapture = false;
try {
DatagramSocket clientSocket = new DatagramSocket(8786);
InetAddress IPAddress = InetAddress.getByName("127.0.0.1");
while (!stopaudioCapture) {
int cnt = targetDataLine.read(tempBuffer, 0, tempBuffer.length);
if (cnt > 0) {
DatagramPacket sendPacket = new DatagramPacket(tempBuffer, tempBuffer.length, IPAddress, 9786);
clientSocket.send(sendPacket);
byteOutputStream.write(tempBuffer, 0, cnt);
}
}
byteOutputStream.close();
} catch (Exception e) {
System.out.println("CaptureThread::run()" + e);
System.exit(0);
}
}
}
class PlayThread extends Thread {
byte tempBuffer[] = new byte[10000];
public void run() {
try {
int cnt;
while ((cnt = InputStream.read(tempBuffer, 0, tempBuffer.length)) != -1) {
if (cnt > 0) {
sourceLine.write(tempBuffer, 0, cnt);
}
}
// sourceLine.drain();
// sourceLine.close();
} catch (Exception e) {
System.out.println(e);
System.exit(0);
}
}
}
}
Server code: VUServer.java
import java.io.*;
import java.net.*;
import javax.sound.sampled.*;
public class VUServer {
ByteArrayOutputStream byteOutputStream;
AudioFormat adFormat;
TargetDataLine targetDataLine;
AudioInputStream InputStream;
SourceDataLine sourceLine;
private AudioFormat getAudioFormat() {
float sampleRate = 16000.0F;
int sampleInbits = 16;
int channels = 1;
boolean signed = true;
boolean bigEndian = false;
return new AudioFormat(sampleRate, sampleInbits, channels, signed, bigEndian);
}
public static void main(String args[]) {
new VUServer().runVOIP();
}
public void runVOIP() {
try {
DatagramSocket serverSocket = new DatagramSocket(9786);
byte[] receiveData = new byte[10000];
while (true) {
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
serverSocket.receive(receivePacket);
System.out.println("RECEIVED: " + receivePacket.getAddress().getHostAddress() + " " + receivePacket.getPort());
try {
byte audioData[] = receivePacket.getData();
InputStream byteInputStream = new ByteArrayInputStream(audioData);
AudioFormat adFormat = getAudioFormat();
InputStream = new AudioInputStream(byteInputStream, adFormat, audioData.length / adFormat.getFrameSize());
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, adFormat);
sourceLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
sourceLine.open(adFormat);
sourceLine.start();
Thread playThread = new Thread(new PlayThread());
playThread.start();
} catch (Exception e) {
System.out.println(e);
System.exit(0);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
class PlayThread extends Thread {
byte tempBuffer[] = new byte[10000];
public void run() {
try {
int cnt;
while ((cnt = InputStream.read(tempBuffer, 0, tempBuffer.length)) != -1) {
if (cnt > 0) {
sourceLine.write(tempBuffer, 0, cnt);
}
}
// sourceLine.drain();
// sourceLine.close();
} catch (Exception e) {
System.out.println(e);
System.exit(0);
}
}
}
}

java - for loop is executing more times than it should - metronome program

I am creating a metronome program and the for loop is executing +1 times than it should.
public class Tempo {
String file;
int bpm;
public Tempo(int bpm, String file){
this.bpm=bpm;
this.file=file;
}
public void tempoPlay () throws InterruptedException{
new Play(file).start();
Thread.sleep(60000/bpm);
}
public static void main(String[] args) throws InterruptedException {
Tempo t = new Tempo(120, "C:\\Users\\Korisnik\\Desktop\\dome3.wav");
for(int i=0;i<20;i++){
t.tempoPlay();
}
}
}
The first beat is rapidly followed by the second one but later as it goes it is sounding compliant. I've counted it plays 21 beats but it should play 20.
Here's the Play class:
import java.io.File;
import java.io.IOException;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.FloatControl;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.UnsupportedAudioFileException;
class Play extends Thread {
private String filename;
private Position curPosition;
private final int EXTERNAL_BUFFER_SIZE = 524288; // 128Kb
enum Position {
LEFT, RIGHT, NORMAL
};
public Play(String wavfile) {
filename = wavfile;
curPosition = Position.NORMAL;
}
public Play(String wavfile, Position p) {
filename = wavfile;
curPosition = p;
}
#Override
public void run() {
File soundFile = new File(filename);
if (!soundFile.exists()) {
System.err.println("Wave file not found: " + filename);
return;
}
AudioInputStream audioInputStream = null;
try {
audioInputStream = AudioSystem.getAudioInputStream(soundFile);
} catch (UnsupportedAudioFileException e1) {
e1.printStackTrace();
return;
} catch (IOException e1) {
e1.printStackTrace();
return;
}
AudioFormat format = audioInputStream.getFormat();
SourceDataLine auline = null;
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
try {
auline = (SourceDataLine) AudioSystem.getLine(info);
auline.open(format);
} catch (LineUnavailableException e) {
e.printStackTrace();
return;
} catch (Exception e) {
e.printStackTrace();
return;
}
if (auline.isControlSupported(FloatControl.Type.PAN)) {
FloatControl pan = (FloatControl) auline
.getControl(FloatControl.Type.PAN);
if (curPosition == Position.RIGHT) {
pan.setValue(1.0f);
} else if (curPosition == Position.LEFT) {
pan.setValue(-1.0f);
}
}
auline.start();
int nBytesRead = 0;
byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
try {
while (nBytesRead != -1) {
nBytesRead = audioInputStream.read(abData, 0, abData.length);
if (nBytesRead >= 0) {
auline.write(abData, 0, nBytesRead);
}
}
} catch (IOException e) {
e.printStackTrace();
return;
} finally {
auline.drain();
auline.close();
}
}
}
Shot in the dark: It might be worthwhile to read the file into memory completely for testing purposes. A guess as to what might be happening is that the I/O from reading the file is interfering with the timing of the playback.
You might be able to get away with this to test.
Tempo t = new Tempo(120, "C:\\Users\\Korisnik\\Desktop\\dome3.wav");
t.tempoPlay() // ignore this
Thread.sleep(10);
for(int i=0;i<20;i++){
t.tempoPlay();
}
Or better yet, have Tempo cache the read in before playing the sound.

getting mp3 audio signal as array in java

I have been trying to get audio stream of mp3 as array of floating points. I have got the array with the below sample code. I am not sure whether I can use this array for applying FFT. Because this array is not matching[or similar] to the one which I got from C++'s code which uses LAME.
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.UnsupportedAudioFileException;
import javazoom.spi.mpeg.sampled.file.MpegAudioFileReader;
import org.tritonus.share.sampled.FloatSampleTools;
public class onjava {
public static void main(String[] args) {
try {
File file = new File("mymp3file.mp3");
MpegAudioFileReader mpegAudioFileReader = new MpegAudioFileReader();
int fl = mpegAudioFileReader.getAudioFileFormat(file).getFrameLength();
System.out.println(fl);
AudioInputStream in= AudioSystem.getAudioInputStream(file);
AudioInputStream din = null;
AudioFormat baseFormat = in.getFormat();
onjava oj = new onjava();
AudioFormat decodedFormat =
new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,
baseFormat.getSampleRate(),
16,
baseFormat.getChannels(),
baseFormat.getChannels() * 2,
baseFormat.getSampleRate(),
false);
int len = (int)file.length();
din = AudioSystem.getAudioInputStream(decodedFormat, in);
oj.rawplay(decodedFormat, din, len, fl);
in.close();
} catch (IOException e) {
e.printStackTrace();
} catch (LineUnavailableException e) {
e.printStackTrace();
} catch (UnsupportedAudioFileException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private void rawplay(AudioFormat targetFormat, AudioInputStream din , int len, int frameLength)
throws IOException, LineUnavailableException {
byte[] data = new byte[len-1];
float[] floatArray = new float[len-1];
SourceDataLine line = getLine(targetFormat);
File textFile = new File("outputfile.txt");
PrintStream printStream = new PrintStream(textFile);
System.setOut(printStream);
if (line != null) {
// Start
line.start();
int nBytesRead = 0, nBytesWritten = 0;
while (nBytesRead != -1) {
nBytesRead = din.read(data, 0, 8000);
if (nBytesRead != -1) {
//Please tell me if something is wrong with the arguments passed below
FloatSampleTools.byte2floatGeneric(data, 0, targetFormat.getFrameSize(), floatArray, 0, 1000, targetFormat);
for (int i = 0; i < nBytesRead; i++) {
if(floatArray[i] != 0.0)
System.out.println(floatArray[i]);
}
}
}
// Stop
line.drain();
line.stop();
line.close();
din.close();
}
}
private SourceDataLine getLine(AudioFormat audioFormat)
throws LineUnavailableException {
SourceDataLine res = null;
DataLine.Info info = new DataLine.Info(SourceDataLine.class,
audioFormat);
res = (SourceDataLine) AudioSystem.getLine(info);
res.open(audioFormat);
return res;
}
}
Please suggest if anything is wrong in the above code. Also if any other pure java API is available for processing mp3 file. I need array of floats from mp3 audio stream.
Also let me know about pure java implementation of LAME if available.
Thanks!!
javazoom.spi.mpeg.sampled.file.MpegAudioFileReader
&
org.tritonus.share.sampled.FloatSampleTools
are not found. Specify the library you have used for those functions.

How to play .wav files with java

I am trying to play a *.wav file with Java. I want it to do the following:
When a button is pressed, play a short beep sound.
I have googled it, but most of the code wasn't working. Can someone give me a simple code snippet to play a .wav file?
Finally I managed to do the following and it works fine
import java.io.File;
import java.io.IOException;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
public class MakeSound {
private final int BUFFER_SIZE = 128000;
private File soundFile;
private AudioInputStream audioStream;
private AudioFormat audioFormat;
private SourceDataLine sourceLine;
/**
* #param filename the name of the file that is going to be played
*/
public void playSound(String filename){
String strFilename = filename;
try {
soundFile = new File(strFilename);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
try {
audioStream = AudioSystem.getAudioInputStream(soundFile);
} catch (Exception e){
e.printStackTrace();
System.exit(1);
}
audioFormat = audioStream.getFormat();
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
try {
sourceLine = (SourceDataLine) AudioSystem.getLine(info);
sourceLine.open(audioFormat);
} catch (LineUnavailableException e) {
e.printStackTrace();
System.exit(1);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
sourceLine.start();
int nBytesRead = 0;
byte[] abData = new byte[BUFFER_SIZE];
while (nBytesRead != -1) {
try {
nBytesRead = audioStream.read(abData, 0, abData.length);
} catch (IOException e) {
e.printStackTrace();
}
if (nBytesRead >= 0) {
#SuppressWarnings("unused")
int nBytesWritten = sourceLine.write(abData, 0, nBytesRead);
}
}
sourceLine.drain();
sourceLine.close();
}
}
Here is the most elegant form I could come up without using sun.*:
import java.io.*;
import javax.sound.sampled.*;
try {
File yourFile;
AudioInputStream stream;
AudioFormat format;
DataLine.Info info;
Clip clip;
stream = AudioSystem.getAudioInputStream(yourFile);
format = stream.getFormat();
info = new DataLine.Info(Clip.class, format);
clip = (Clip) AudioSystem.getLine(info);
clip.open(stream);
clip.start();
}
catch (Exception e) {
//whatevers
}
Shortest form (without having to install random libraries) ?
public static void play(String filename)
{
try
{
Clip clip = AudioSystem.getClip();
clip.open(AudioSystem.getAudioInputStream(new File(filename)));
clip.start();
}
catch (Exception exc)
{
exc.printStackTrace(System.out);
}
}
The only problem is there is no good way to make this method blocking to close and dispose the data after *.wav finishes.
clip.drain() says it's blocking but it's not. The clip isn't running RIGHT AFTER start().
The only working but UGLY way I found is:
// ...
clip.start();
while (!clip.isRunning())
Thread.sleep(10);
while (clip.isRunning())
Thread.sleep(10);
clip.close();
You can use an event listener to close the clip after it is played
import java.io.File;
import javax.sound.sampled.*;
public void play(File file)
{
try
{
final Clip clip = (Clip)AudioSystem.getLine(new Line.Info(Clip.class));
clip.addLineListener(new LineListener()
{
#Override
public void update(LineEvent event)
{
if (event.getType() == LineEvent.Type.STOP)
clip.close();
}
});
clip.open(AudioSystem.getAudioInputStream(file));
clip.start();
}
catch (Exception exc)
{
exc.printStackTrace(System.out);
}
}
The snippet here works fine, tested with windows sound:
public static void main(String[] args) {
AePlayWave aw = new AePlayWave( "C:\\WINDOWS\\Media\\tada.wav" );
aw.start();
}
A class that will play a WAV file, blocking until the sound has finished playing:
class Sound implements Playable {
private final Path wavPath;
private final CyclicBarrier barrier = new CyclicBarrier(2);
Sound(final Path wavPath) {
this.wavPath = wavPath;
}
#Override
public void play() throws LineUnavailableException, IOException, UnsupportedAudioFileException {
try (final AudioInputStream audioIn = AudioSystem.getAudioInputStream(wavPath.toFile());
final Clip clip = AudioSystem.getClip()) {
listenForEndOf(clip);
clip.open(audioIn);
clip.start();
waitForSoundEnd();
}
}
private void listenForEndOf(final Clip clip) {
clip.addLineListener(event -> {
if (event.getType() == LineEvent.Type.STOP) waitOnBarrier();
});
}
private void waitOnBarrier() {
try {
barrier.await();
} catch (final InterruptedException ignored) {
} catch (final BrokenBarrierException e) {
throw new RuntimeException(e);
}
}
private void waitForSoundEnd() {
waitOnBarrier();
}
}
Another way of doing it with AudioInputStream:
import java.io.File;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
import javax.sound.sampled.Line;
import javax.sound.sampled.LineEvent;
import javax.sound.sampled.LineListener;
import javax.swing.JDialog;
import javax.swing.JFileChooser;
public class CoreJavaSound extends Object implements LineListener {
File soundFile;
JDialog playingDialog;
Clip clip;
public static void main(String[] args) throws Exception {
CoreJavaSound s = new CoreJavaSound();
}
public CoreJavaSound() throws Exception {
JFileChooser chooser = new JFileChooser();
chooser.showOpenDialog(null);
soundFile = chooser.getSelectedFile();
System.out.println("Playing " + soundFile.getName());
Line.Info linfo = new Line.Info(Clip.class);
Line line = AudioSystem.getLine(linfo);
clip = (Clip) line;
clip.addLineListener(this);
AudioInputStream ais = AudioSystem.getAudioInputStream(soundFile);
clip.open(ais);
clip.start();
}
public void update(LineEvent le) {
LineEvent.Type type = le.getType();
if (type == LineEvent.Type.OPEN) {
System.out.println("OPEN");
} else if (type == LineEvent.Type.CLOSE) {
System.out.println("CLOSE");
System.exit(0);
} else if (type == LineEvent.Type.START) {
System.out.println("START");
playingDialog.setVisible(true);
} else if (type == LineEvent.Type.STOP) {
System.out.println("STOP");
playingDialog.setVisible(false);
clip.close();
}
}
}
A solution without java reflection DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat)
Java reflection decrease performance.
to run: java playsound absoluteFilePathTo/file.wav
import javax.sound.sampled.*;
import java.io.*;
public class playsound {
public static void main (String args[]) throws Exception {
playSound (args[0]);
}
public static void playSound () throws Exception {
AudioInputStream
audioStream = AudioSystem.getAudioInputStream(new File (filename));
int BUFFER_SIZE = 128000;
AudioFormat audioFormat = null;
SourceDataLine sourceLine = null;
audioFormat = audioStream.getFormat();
sourceLine = AudioSystem.getSourceDataLine(audioFormat);
sourceLine.open(audioFormat);
sourceLine.start();
int nBytesRead = 0;
byte[] abData = new byte[BUFFER_SIZE];
while (nBytesRead != -1) {
try {
nBytesRead =
audioStream.read(abData, 0, abData.length);
} catch (IOException e) {
e.printStackTrace();
}
if (nBytesRead >= 0) {
int nBytesWritten = sourceLine.write(abData, 0, nBytesRead);
}
}
sourceLine.drain();
sourceLine.close();
}
}
You can use AudioStream this way as well:
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JPanel;
import sun.audio.AudioPlayer;
import sun.audio.AudioStream;
public class AudioWizz extends JPanel implements ActionListener {
private static final long serialVersionUID = 1L; //you like your cereal and the program likes their "serial"
static AudioWizz a;
static JButton playBuddon;
static JFrame frame;
public static void main(String arguments[]){
frame= new JFrame("AudioWizz");
frame.setSize(300,300);
frame.setVisible(true);
a= new AudioWizz();
playBuddon= new JButton("PUSH ME");
playBuddon.setBounds(10,10,80,30);
playBuddon.addActionListener(a);
frame.add(playBuddon);
frame.add(a);
}
public void actionPerformed(ActionEvent e){ //an eventListener
if (e.getSource() == playBuddon) {
try {
InputStream in = new FileInputStream("*.wav");
AudioStream sound = new AudioStream(in);
AudioPlayer.player.start(sound);
} catch(FileNotFoundException e1) {
e1.printStackTrace();
} catch (IOException e1) {
e1.printStackTrace();
}
}
}
}
I took #greenLizard's code and made it more robust.
I closed the AudioInputStream.
I used a BufferedInputStream. The AudioSystem getAudioInputStream was throwing an occasional IOException because the getAutoInputSytream method couldn't back up the input stream and start over.
Hopefully, there are no more exceptions to be found.
Here's the modified code. The ErrorDisplayDialog shows an exception as a JDialog in a Java Swing application. Just replace with e.printStackTrace();.
private void playWavFile(String fileName) {
InputStream inputStream = getClass().getResourceAsStream(fileName);
BufferedInputStream bufferedInputStream = new BufferedInputStream(
inputStream);
AudioInputStream audioStream = null;
AudioFormat audioFormat = null;
try {
audioStream = AudioSystem.getAudioInputStream(bufferedInputStream);
audioFormat = audioStream.getFormat();
} catch (UnsupportedAudioFileException e) {
new ErrorDisplayDialog(view.getFrame(),
"UnsupportedAudioFileException", e);
return;
} catch (IOException e) {
new ErrorDisplayDialog(view.getFrame(), "IOException", e);
return;
}
DataLine.Info info = new DataLine.Info(SourceDataLine.class,
audioFormat);
SourceDataLine sourceLine;
try {
sourceLine = (SourceDataLine) AudioSystem.getLine(info);
sourceLine.open(audioFormat);
} catch (LineUnavailableException e) {
new ErrorDisplayDialog(view.getFrame(), "LineUnavailableException",
e);
return;
}
sourceLine.start();
int nBytesRead = 0;
byte[] abData = new byte[128000];
while (nBytesRead != -1) {
try {
nBytesRead = audioStream.read(abData, 0, abData.length);
} catch (IOException e) {
new ErrorDisplayDialog(view.getFrame(), "IOException", e);
return;
}
if (nBytesRead >= 0) {
sourceLine.write(abData, 0, nBytesRead);
}
}
sourceLine.drain();
sourceLine.close();
try {
audioStream.close();
} catch (IOException e) {
new ErrorDisplayDialog(view.getFrame(), "IOException", e);
}
}

Categories