Continuous audio stream in java - java

I am learning to use the sound API of Java. I've watched a tutorial on YouTube where the instructor simply creates SourceDataLine and TargetDataLine instances and uses them in separate threads. He calls the threads one after another with a Thread.sleep() method in between. Within that sleeping period, the required sound is captured and then the sound is heard.
Now, in the program below, I've tried to extend the idea and tried to achieve a continuous stream of audio. That is, I will speak and the sound will be heard automatically. But it cannot be achieved. I know I am at wrong as I'm still a newbie in this regard. What changes should I make and where? It won't be a problem if there is a satisfying delay between recording and playing the sound.
P.S. I will try to use this with OpenCV video sharing in another program. If you know something about that, please feel free to share it. Thanks!
import javax.sound.sampled.*;
import java.io.ByteArrayOutputStream;
public class Main {
public boolean recording = true;
public int rate = 0;
public static void main(String[] args) throws Exception{
AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100, 16, 2, 4, 44100, false);
final ByteArrayOutputStream out = new ByteArrayOutputStream();
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
final TargetDataLine targetLine = (TargetDataLine) AudioSystem.getLine(info);
info = new DataLine.Info(SourceDataLine.class, format);
final SourceDataLine sourceLine = (SourceDataLine) AudioSystem.getLine(info);
Main m = new Main();
new record(m, format, out, targetLine);
new play(m, format, out, sourceLine);
}
synchronized public void record(TargetDataLine targetLine, ByteArrayOutputStream out){
while(!recording){
try{
wait();
}catch(Exception e){
System.out.println(e);
}
}
byte[] data = new byte[targetLine.getBufferSize()/5];
int readBytes;
readBytes = targetLine.read(data, 0, data.length);
out.write(data, 0, readBytes);
}
synchronized public void play(SourceDataLine sourceLine, ByteArrayOutputStream out){
while(recording){
try{
wait();
}catch(Exception e){
System.out.println(e);
}
}
sourceLine.write(out.toByteArray(), 0, out.size());
}
synchronized public int change(){
rate++;
if(rate > 6000 && recording){
rate = 0;
recording = false;
notifyAll();
return 1;
}
else if(rate > 6000 && !recording){
rate = 0;
recording = true;
notifyAll();
return 1;
}
return 0;
}
}
class record implements Runnable{
private Main m;
private AudioFormat format;
private ByteArrayOutputStream out;
final TargetDataLine targetLine;
public record(Main m, AudioFormat format, ByteArrayOutputStream out, TargetDataLine targetLine) throws Exception{
this.m = m;
this.format = format;
this.out = out;
this.targetLine = targetLine;
targetLine.open();
System.out.println("Started recording...");
new Thread(this).start();
}
#Override
public void run() {
targetLine.start();
while(true){
m.record(targetLine, out);
while(m.change() == 1) targetLine.stop();
targetLine.start();
}
}
}
class play implements Runnable{
private Main m;
private AudioFormat format;
private ByteArrayOutputStream out;
final SourceDataLine sourceLine;
public play(Main m, AudioFormat format, ByteArrayOutputStream out, SourceDataLine sourceLine) throws Exception{
this.m = m;
this.format = format;
this.out = out;
this.sourceLine = sourceLine;
sourceLine.open();
System.out.println("Started playing...");
new Thread(this).start();
}
#Override
public void run() {
sourceLine.start();
while(true){
m.play(sourceLine, out);
while(m.change() == 1) sourceLine.stop();
sourceLine.start();
}
}
}
Edit:
I can get two streams run one after another as follows, but I have to hard-code the threads. I wrote four threads individually. How can I write efficient code, i.e. make use of the two earlier threads and record-play sound continuously? My synchronization doesn't seem to work.
import javax.sound.sampled.*;
import java.io.ByteArrayOutputStream;
import java.math.BigInteger;
public class Main {
public static void main(String[] args) throws Exception{
AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100, 16, 2, 4, 44100, false);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
final SourceDataLine sourceLine = (SourceDataLine) AudioSystem.getLine(info);
sourceLine.open();
info = new DataLine.Info(TargetDataLine.class, format);
final TargetDataLine targetLine = (TargetDataLine) AudioSystem.getLine(info);
targetLine.open();
final ByteArrayOutputStream out = new ByteArrayOutputStream();
Thread record = new Thread(){
#Override
public void run(){
targetLine.start();
byte[] data = new byte[targetLine.getBufferSize()/5];
int readBytes;
while(true){
readBytes = targetLine.read(data, 0, data.length);
out.write(data, 0, readBytes);
}
}
};
Thread play = new Thread(){
#Override
public void run(){
sourceLine.start();
while(true){
sourceLine.write(out.toByteArray(), 0, out.toByteArray().length);
}
}
};
final ByteArrayOutputStream out1 = new ByteArrayOutputStream();
Thread record1 = new Thread(() -> {
targetLine.start();
byte[] data = new byte[targetLine.getBufferSize()/5];
int readBytes;
while(true){
readBytes = targetLine.read(data, 0, data.length);
out1.write(data, 0, readBytes);
}
});
Thread play1 = new Thread(() -> {
sourceLine.start();
while(true){
sourceLine.write(out1.toByteArray(), 0, out1.toByteArray().length);
}
});
record.start();
System.out.println("Recording...");
Thread.sleep(4000);
targetLine.stop();
targetLine.drain();
targetLine.close();
play.start();
Thread.sleep(4000);
System.out.println("Playing...");
sourceLine.stop();
sourceLine.drain();
sourceLine.close();
targetLine.open();
sourceLine.open();
record1.start();
System.out.println("Recording...");
Thread.sleep(4000);
targetLine.stop();
targetLine.close();
play1.start();
Thread.sleep(4000);
System.out.println("Playing...");
sourceLine.stop();
sourceLine.close();
}
}

I know the question is a couple years old, but incase anyone else is also unsure, I'll give this a stab. I don't believe you need to have your TargetDataLine and SourceDataLine in seperate threads if you just want to record audio and simultaenously playback that audio instantly (as quick as the Java Sound API will allow anyway...).
If you don't want your program to hang then you'll need at least one thread for reading in audio, and outputting audio to your system. If you use 2 threads I believe the delay would be quite noticeable.
Below is a simple implementation for real-time recording and playback in a single thread. When I tested this with a microphone and speakers there is some latency, I'd guess < 1000ms. There's probably a much better way to do this with lower latency if you do some research. This may also be of interest: How to synchronize a TargetDataLine and SourceDataLine in Java (Synchronize audio recording and playback)
import javax.sound.sampled.*;
public class Main {
public static void main(String[] args) throws Exception {
AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100, 16, 2, 4, 44100, false);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
final SourceDataLine sourceLine = (SourceDataLine) AudioSystem.getLine(info);
sourceLine.open();
info = new DataLine.Info(TargetDataLine.class, format);
final TargetDataLine targetLine = (TargetDataLine) AudioSystem.getLine(info);
targetLine.open();
byte[] data = new byte[1024];
sourceLine.start();
targetLine.start();
Thread thread = new Thread() {
#Override
public void run() {
while (true) {
targetLine.read(data, 0, data.length);
sourceLine.write(data, 0, data.length);
}
}
};
thread.start();
}
}

Related

How to capture pc sound and send it via sockets using java

I am trying to capture audio from pc(from speaker/headphones) and send it via socket(UDP, if possible) to another computer, which must play it back. I have found some code to do this:
Server:
import javax.sound.sampled.*;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.ServerSocket;
import java.net.Socket;
public class Server {
ServerSocket MyService;
Socket clientSocket = null;
InputStream input;
AudioFormat audioFormat;
SourceDataLine sourceDataLine;
byte tempBuffer[] = new byte[10000];
static Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo();
Server() throws LineUnavailableException {
try {
Mixer mixer_ = AudioSystem.getMixer(mixerInfo[0]);
audioFormat = getAudioFormat();
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, audioFormat);
sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
sourceDataLine.open(audioFormat);
sourceDataLine.start();
MyService = new ServerSocket(500);
clientSocket = MyService.accept();
input = new BufferedInputStream(clientSocket.getInputStream());
while (input.read(tempBuffer) != -1) {
sourceDataLine.write(tempBuffer, 0, 10000);
}
} catch (IOException e) {
e.printStackTrace();
}
}
private AudioFormat getAudioFormat() {
float sampleRate = 8000.0F;
int sampleSizeInBits = 8;
int channels = 1;
boolean signed = true;
boolean bigEndian = false;
return new AudioFormat(
sampleRate,
sampleSizeInBits,
channels,
signed,
bigEndian);
}
public static void main(String s[]) throws LineUnavailableException {
Server s2 = new Server();
}}
Client:
import javax.sound.sampled.*;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.net.Socket;
public class Client {
boolean stopCapture = false;
AudioFormat audioFormat;
TargetDataLine targetDataLine;
BufferedOutputStream out = null;
BufferedInputStream in = null;
Socket sock = null;
public static void main(String[] args) {
Client tx = new Client();
tx.captureAudio();
}
private void captureAudio() {
try {
sock = new Socket("192.168.1.38", 500);
out = new BufferedOutputStream(sock.getOutputStream());
in = new BufferedInputStream(sock.getInputStream());
Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo();
audioFormat = getAudioFormat();
DataLine.Info dataLineInfo = new DataLine.Info(
TargetDataLine.class, audioFormat);
Mixer mixer = AudioSystem.getMixer(mixerInfo[2]);
targetDataLine = (TargetDataLine) mixer.getLine(dataLineInfo);
targetDataLine.open(audioFormat);
targetDataLine.start();
Thread captureThread = new CaptureThread();
captureThread.start();
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
}
class CaptureThread extends Thread {
byte tempBuffer[] = new byte[10000];
#Override
public void run() {
stopCapture = false;
try {
while (!stopCapture) {
int cnt = targetDataLine.read(tempBuffer, 0,
tempBuffer.length);
out.write(tempBuffer);
}
} catch (Exception e) {
e.printStackTrace();
System.exit(0);
}
}
}
private AudioFormat getAudioFormat() {
float sampleRate = 8000.0F;
int sampleSizeInBits = 8;
int channels = 1;
boolean signed = true;
boolean bigEndian = false;
return new AudioFormat(sampleRate, sampleSizeInBits, channels, signed,
bigEndian);
}}
But client throws
java.lang.IllegalArgumentException: Line unsupported: interface TargetDataLine supporting format PCM_SIGNED 8000.0 Hz, 8 bit, mono, 1 bytes/frame,
at java.desktop/com.sun.media.sound.DirectAudioDevice.getLine(DirectAudioDevice.java:175)
at Client.captureAudio(Client.java:28)
at Client.main(Client.java:15)
and i do not know what to do(I know it is not UDP socket, but i firstly want to have some code which work). Advance thanks.

Capture 8kHz, 16-bit Linear Samples with 4 frames of 20ms audio in each RTP Packet

I need to write simple Java Client program to capture live audio streaming.
Requirement
RTP Audio Packets.
8kHz, 16-bit Linear Samples (Linear PCM).
4 frames of 20ms audio will be sent in each RTP Packet.
After some search I found sample code on internet to capture the audio but it play beep sound.
Code
import java.io.ByteArrayInputStream;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.SourceDataLine;
public class Server {
AudioInputStream audioInputStream;
static AudioInputStream ais;
static AudioFormat format;
static boolean status = true;
static int port = 31007;
static int sampleRate = 44100;
static DataLine.Info dataLineInfo;
static SourceDataLine sourceDataLine;
public static void main(String args[]) throws Exception
{
System.out.println("Server started at port:"+port);
#SuppressWarnings("resource")
DatagramSocket serverSocket = new DatagramSocket(port);
/**
* Formula for lag = (byte_size/sample_rate)*2
* Byte size 9728 will produce ~ 0.45 seconds of lag. Voice slightly broken.
* Byte size 1400 will produce ~ 0.06 seconds of lag. Voice extremely broken.
* Byte size 4000 will produce ~ 0.18 seconds of lag. Voice slightly more broken then 9728.
*/
byte[] receiveData = new byte[4096];
format = new AudioFormat(sampleRate, 16, 2, true, false);
dataLineInfo = new DataLine.Info(SourceDataLine.class, format);
sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
sourceDataLine.open(format);
sourceDataLine.start();
//FloatControl volumeControl = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN);
//volumeControl.setValue(1.00f);
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
ByteArrayInputStream baiss = new ByteArrayInputStream(receivePacket.getData());
while (status == true)
{
System.out.println("Reciving Packets");
serverSocket.receive(receivePacket);
ais = new AudioInputStream(baiss, format, receivePacket.getLength());
toSpeaker(receivePacket.getData());
}
sourceDataLine.drain();
sourceDataLine.close();
}
public static void toSpeaker(byte soundbytes[]) {
try
{
System.out.println("At the speaker");
sourceDataLine.write(soundbytes, 0, soundbytes.length);
} catch (Exception e) {
System.out.println("Not working in speakers...");
e.printStackTrace();
}
}
}
I think I can not able to find the proper format to capture packets send in given format ?
Can any one help me to find find the proper AudioFormat to capture this audio streaming or any link pointing to same will be helpful for me... Thanks... :)
Answer
float sampleRate = 8000;
int sampleSizeInBits = 16;
int channels = 1;
boolean signed = true;
boolean bigEndian = true;
AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
UDP + RTP Packet Format
While buffering minus 12 bytes from data as it contains RTP header information.
receivePacket = new DatagramPacket(receiveData, receiveData.length);
byte[] packet = new byte[receivePacket.getLength() - 12];
serverSocket.receive(receivePacket);
packet = Arrays.copyOfRange(receivePacket.getData(), 12, receivePacket.getLength());
hope this will help you in future or feel free to correct if its wrong Thanks..
You can try this implementation of Client and Server based on Datagram Sockets. It uses a mono 8000Hz 16bit signed big endian audio format. Server is running on port number 9786, while the client is using port number 8786. I guess the code is quite simple to understand.
Server:
import java.io.*;
import java.net.*;
import javax.sound.sampled.*;
public class Server {
ByteArrayOutputStream byteOutputStream;
AudioFormat adFormat;
TargetDataLine targetDataLine;
AudioInputStream InputStream;
SourceDataLine sourceLine;
private AudioFormat getAudioFormat() {
float sampleRate = 8000.0F;
int sampleSizeInBits = 16;
int channels = 1;
boolean signed = true;
boolean bigEndian = true;
return new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
}
public static void main(String args[]) {
new Server().runVOIP();
}
public void runVOIP() {
try {
DatagramSocket serverSocket = new DatagramSocket(9786);
byte[] receiveData = new byte[4096];
while (true) {
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
serverSocket.receive(receivePacket);
System.out.println("RECEIVED: " + receivePacket.getAddress().getHostAddress() + " " + receivePacket.getPort());
try {
byte audioData[] = receivePacket.getData();
InputStream byteInputStream = new ByteArrayInputStream(audioData);
AudioFormat adFormat = getAudioFormat();
InputStream = new AudioInputStream(byteInputStream, adFormat, audioData.length / adFormat.getFrameSize());
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, adFormat);
sourceLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
sourceLine.open(adFormat);
sourceLine.start();
Thread playThread = new Thread(new PlayThread());
playThread.start();
} catch (Exception e) {
System.out.println(e);
System.exit(0);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
class PlayThread extends Thread {
byte tempBuffer[] = new byte[4096];
public void run() {
try {
int cnt;
while ((cnt = InputStream.read(tempBuffer, 0, tempBuffer.length)) != -1) {
if (cnt > 0) {
sourceLine.write(tempBuffer, 0, cnt);
}
}
} catch (Exception e) {
System.out.println(e);
System.exit(0);
}
}
}
}
Client:
import java.io.*;
import java.net.*;
import javax.sound.sampled.*;
public class Client {
boolean stopaudioCapture = false;
ByteArrayOutputStream byteOutputStream;
AudioFormat adFormat;
TargetDataLine targetDataLine;
AudioInputStream InputStream;
SourceDataLine sourceLine;
public static void main(String args[]) {
new Client();
}
public Client() {
captureAudio();
}
private AudioFormat getAudioFormat() {
float sampleRate = 8000.0F;
int sampleSizeInBits = 16;
int channels = 1;
boolean signed = true;
boolean bigEndian = true;
return new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
}
private void captureAudio() {
try {
adFormat = getAudioFormat();
DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, adFormat);
targetDataLine = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
targetDataLine.open(adFormat);
targetDataLine.start();
Thread captureThread = new Thread(new CaptureThread());
captureThread.start();
} catch (Exception e) {
StackTraceElement stackEle[] = e.getStackTrace();
for (StackTraceElement val : stackEle) {
System.out.println(val);
}
System.exit(0);
}
}
class CaptureThread extends Thread {
byte tempBuffer[] = new byte[4096];
#Override
public void run() {
stopaudioCapture = false;
try {
DatagramSocket clientSocket = new DatagramSocket(8786);
InetAddress IPAddress = InetAddress.getByName("127.0.0.1");
int cnt;
while (!stopaudioCapture) {
cnt = targetDataLine.read(tempBuffer, 0, tempBuffer.length);
if (cnt > 0) {
DatagramPacket sendPacket = new DatagramPacket(tempBuffer, tempBuffer.length, IPAddress, 9786);
clientSocket.send(sendPacket);
}
}
} catch (Exception e) {
System.out.println("CaptureThread::run()" + e);
System.exit(0);
}
}
}
}

Send audio over UDP

So, I was looking for a microphone data sending tut, but I haven't found any.
So I read Oracles tut about line opening and I am able to record the audio to a ByteArrayOutputStream, but now I have 2 problems!
First:
How to play the recorded audio.
Second: if I am recording it to a BAOS how would i dynamically send it.
I suppose I would send the data array, but would it be too processor hoggy to write to a BAOS every time I recieve it, or could I do it differently?
Current code:
import java.io.ByteArrayOutputStream;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.TargetDataLine;
public class MicrophoneRecorder {
static boolean stopped = false;
public static void main(String[] args) {
AudioFormat format = new AudioFormat(8000.0f, 16, 1, true, true);
TargetDataLine line = null;
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
if (!AudioSystem.isLineSupported(info)) {
System.out.println("Not supported!");
}
try {
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
} catch (LineUnavailableException ex) {
ex.printStackTrace();
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
int numBytesRead = 0;
byte[] data = new byte[line.getBufferSize() / 5];
line.start();
new Thread(new Runnable() {
#Override
public void run() {
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
e.printStackTrace();
}
stopped = true;
}
}).start();
while (!stopped) {
numBytesRead = line.read(data, 0, data.length);
out.write(data, 0, numBytesRead);
}
}
}
Thanks for any help given.
Sincerely, Roberto Anić Banić
P.S.
Seen this, doesn't work http://javasolution.blogspot.com/2007/04/voice-chat-using-java.html
P.P.S.
Is UDP a good soulution or should I use RTSP
Here is a sample code that helped me in order to stream and consuming audio via UDP. You can changed the infinite loop in order to limit the duration of the audio stream. Below is the client and server code. The audio input is from microphone.
server:
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.TargetDataLine;
public class Sender {
public static void main(String[] args) throws IOException {
AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100, 16, 2, 4, 44100, true);
TargetDataLine microphone;
SourceDataLine speakers;
try {
microphone = AudioSystem.getTargetDataLine(format);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
microphone = (TargetDataLine) AudioSystem.getLine(info);
microphone.open(format);
ByteArrayOutputStream out = new ByteArrayOutputStream();
int numBytesRead;
int CHUNK_SIZE = 1024;
byte[] data = new byte[microphone.getBufferSize() / 5];
microphone.start();
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, format);
speakers = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
speakers.open(format);
speakers.start();
// Configure the ip and port
String hostname = "localhost";
int port = 5555;
InetAddress address = InetAddress.getByName(hostname);
DatagramSocket socket = new DatagramSocket();
byte[] buffer = new byte[1024];
for(;;) {
numBytesRead = microphone.read(data, 0, CHUNK_SIZE);
// bytesRead += numBytesRead;
// write the mic data to a stream for use later
out.write(data, 0, numBytesRead);
// write mic data to stream for immediate playback
speakers.write(data, 0, numBytesRead);
DatagramPacket request = new DatagramPacket(data,numBytesRead, address, port);
socket.send(request);
}
} catch (LineUnavailableException e) {
e.printStackTrace();
}
}}
client:
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketTimeoutException;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.TargetDataLine;
public class UdpClient {
public static void main(String[] args) throws LineUnavailableException {
AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100, 16, 2, 4, 44100, true);
TargetDataLine microphone;
SourceDataLine speakers;
microphone = AudioSystem.getTargetDataLine(format);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
microphone = (TargetDataLine) AudioSystem.getLine(info);
microphone.open(format);
ByteArrayOutputStream out = new ByteArrayOutputStream();
int numBytesRead;
int CHUNK_SIZE = 1024;
byte[] data = new byte[microphone.getBufferSize() / 5];
microphone.start();
int bytesRead = 0;
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, format);
speakers = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
speakers.open(format);
speakers.start();
String hostname = "localhost";
int port = 5555;
try {
InetAddress address = InetAddress.getByName(hostname);
DatagramSocket socket = new DatagramSocket();
DatagramSocket serverSocket = new DatagramSocket(5555);
byte[] receiveData = new byte[1024];
byte[] sendData = new byte[1024];
while (true) {
byte[] buffer = new byte[1024];
DatagramPacket response = new DatagramPacket(buffer, buffer.length);
serverSocket.receive(response);
out.write(response.getData(), 0, response.getData().length);
speakers.write(response.getData(), 0, response.getData().length);
String quote = new String(buffer, 0, response.getLength());
System.out.println(quote);
System.out.println();
//Thread.sleep(10000);
}
} catch (SocketTimeoutException ex) {
System.out.println("Timeout error: " + ex.getMessage());
ex.printStackTrace();
} catch (IOException ex) {
System.out.println("Client error: " + ex.getMessage());
ex.printStackTrace();
}/* catch (InterruptedException ex) {
ex.printStackTrace();
}*/
}}
Here's an implementation of sending audio over UDP.
Below is the client and server code. Basically the client code sends captured audio to the server, which plays it on receiving. The client can also play the captured audio.
Client code: VUClient.java
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import java.net.*;
import javax.sound.sampled.*;
public class VUClient extends JFrame {
boolean stopaudioCapture = false;
ByteArrayOutputStream byteOutputStream;
AudioFormat adFormat;
TargetDataLine targetDataLine;
AudioInputStream InputStream;
SourceDataLine sourceLine;
Graphics g;
public static void main(String args[]) {
new VUClient();
}
public VUClient() {
final JButton capture = new JButton("Capture");
final JButton stop = new JButton("Stop");
final JButton play = new JButton("Playback");
capture.setEnabled(true);
stop.setEnabled(false);
play.setEnabled(false);
capture.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
capture.setEnabled(false);
stop.setEnabled(true);
play.setEnabled(false);
captureAudio();
}
});
getContentPane().add(capture);
stop.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
capture.setEnabled(true);
stop.setEnabled(false);
play.setEnabled(true);
stopaudioCapture = true;
targetDataLine.close();
}
});
getContentPane().add(stop);
play.addActionListener(new ActionListener() {
#Override
public void actionPerformed(ActionEvent e) {
playAudio();
}
});
getContentPane().add(play);
getContentPane().setLayout(new FlowLayout());
setTitle("Capture/Playback Demo");
setDefaultCloseOperation(EXIT_ON_CLOSE);
setSize(400, 100);
getContentPane().setBackground(Color.white);
setVisible(true);
g = (Graphics) this.getGraphics();
}
private void captureAudio() {
try {
adFormat = getAudioFormat();
DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, adFormat);
targetDataLine = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
targetDataLine.open(adFormat);
targetDataLine.start();
Thread captureThread = new Thread(new CaptureThread());
captureThread.start();
} catch (Exception e) {
StackTraceElement stackEle[] = e.getStackTrace();
for (StackTraceElement val : stackEle) {
System.out.println(val);
}
System.exit(0);
}
}
private void playAudio() {
try {
byte audioData[] = byteOutputStream.toByteArray();
InputStream byteInputStream = new ByteArrayInputStream(audioData);
AudioFormat adFormat = getAudioFormat();
InputStream = new AudioInputStream(byteInputStream, adFormat, audioData.length / adFormat.getFrameSize());
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, adFormat);
sourceLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
sourceLine.open(adFormat);
sourceLine.start();
Thread playThread = new Thread(new PlayThread());
playThread.start();
} catch (Exception e) {
System.out.println(e);
System.exit(0);
}
}
private AudioFormat getAudioFormat() {
float sampleRate = 16000.0F;
int sampleInbits = 16;
int channels = 1;
boolean signed = true;
boolean bigEndian = false;
return new AudioFormat(sampleRate, sampleInbits, channels, signed, bigEndian);
}
class CaptureThread extends Thread {
byte tempBuffer[] = new byte[10000];
public void run() {
byteOutputStream = new ByteArrayOutputStream();
stopaudioCapture = false;
try {
DatagramSocket clientSocket = new DatagramSocket(8786);
InetAddress IPAddress = InetAddress.getByName("127.0.0.1");
while (!stopaudioCapture) {
int cnt = targetDataLine.read(tempBuffer, 0, tempBuffer.length);
if (cnt > 0) {
DatagramPacket sendPacket = new DatagramPacket(tempBuffer, tempBuffer.length, IPAddress, 9786);
clientSocket.send(sendPacket);
byteOutputStream.write(tempBuffer, 0, cnt);
}
}
byteOutputStream.close();
} catch (Exception e) {
System.out.println("CaptureThread::run()" + e);
System.exit(0);
}
}
}
class PlayThread extends Thread {
byte tempBuffer[] = new byte[10000];
public void run() {
try {
int cnt;
while ((cnt = InputStream.read(tempBuffer, 0, tempBuffer.length)) != -1) {
if (cnt > 0) {
sourceLine.write(tempBuffer, 0, cnt);
}
}
// sourceLine.drain();
// sourceLine.close();
} catch (Exception e) {
System.out.println(e);
System.exit(0);
}
}
}
}
Server code: VUServer.java
import java.io.*;
import java.net.*;
import javax.sound.sampled.*;
public class VUServer {
ByteArrayOutputStream byteOutputStream;
AudioFormat adFormat;
TargetDataLine targetDataLine;
AudioInputStream InputStream;
SourceDataLine sourceLine;
private AudioFormat getAudioFormat() {
float sampleRate = 16000.0F;
int sampleInbits = 16;
int channels = 1;
boolean signed = true;
boolean bigEndian = false;
return new AudioFormat(sampleRate, sampleInbits, channels, signed, bigEndian);
}
public static void main(String args[]) {
new VUServer().runVOIP();
}
public void runVOIP() {
try {
DatagramSocket serverSocket = new DatagramSocket(9786);
byte[] receiveData = new byte[10000];
while (true) {
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
serverSocket.receive(receivePacket);
System.out.println("RECEIVED: " + receivePacket.getAddress().getHostAddress() + " " + receivePacket.getPort());
try {
byte audioData[] = receivePacket.getData();
InputStream byteInputStream = new ByteArrayInputStream(audioData);
AudioFormat adFormat = getAudioFormat();
InputStream = new AudioInputStream(byteInputStream, adFormat, audioData.length / adFormat.getFrameSize());
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, adFormat);
sourceLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
sourceLine.open(adFormat);
sourceLine.start();
Thread playThread = new Thread(new PlayThread());
playThread.start();
} catch (Exception e) {
System.out.println(e);
System.exit(0);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
class PlayThread extends Thread {
byte tempBuffer[] = new byte[10000];
public void run() {
try {
int cnt;
while ((cnt = InputStream.read(tempBuffer, 0, tempBuffer.length)) != -1) {
if (cnt > 0) {
sourceLine.write(tempBuffer, 0, cnt);
}
}
// sourceLine.drain();
// sourceLine.close();
} catch (Exception e) {
System.out.println(e);
System.exit(0);
}
}
}
}

Java Sound Recorder, doesn't work

This code does not work. I added some System.out.println("Start capturing...3"); statements to understand where the bug is, and I saw that the bug is in the line.open(format); command. Why am I getting a bug?
import javax.sound.sampled.*;
import java.io.*;
public class JavaSoundRecorder {
// record duration, in milliseconds
static final long RECORD_TIME = 4000;
File wavFile = new File("C:\\Users\\kecia\\R\\RecordAudio.wav");
AudioFileFormat.Type fileType = AudioFileFormat.Type.WAVE;
TargetDataLine line;
AudioFormat getAudioFormat()
{
float sampleRate = 16000;
//8000,11025,16000,22050,44100
int sampleSizeInBits = 8;
//8,16
int channels = 2;
//1,2
boolean signed = true;
//true,false
boolean bigEndian = true;
//true,false
return new AudioFormat(
sampleRate,
sampleSizeInBits,
channels,
signed,
bigEndian);
}
void start() {
try {
System.out.println("Start capturing...1");
AudioFormat format = getAudioFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
System.out.println("Start capturing...2");
// checks if system supports the data line
if (!AudioSystem.isLineSupported(info)) {
System.out.println("Line not supported");
System.exit(0);
}
System.out.println("Start capturing...3");
line = (TargetDataLine) AudioSystem.getLine(info);
System.out.println("Start capturing...4");
////////////////////////////////////////////////////////////////////////
line.open(format);
////////////////////////////////////////////////////////////////////////
System.out.println("Start capturing...5");
line.start(); // start capturing
System.out.println("Start capturing...6");
AudioInputStream ais = new AudioInputStream(line);
System.out.println("Start recording...");
// start recording
AudioSystem.write(ais, fileType, wavFile);
} catch (LineUnavailableException ex) {
ex.printStackTrace();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
void finish() {
line.stop();
line.close();
System.out.println("END");
}
public static void main(String[] args) {
final JavaSoundRecorder recorder = new JavaSoundRecorder();
// creates a new thread that waits for a specified
// of time before stopping
Thread stopper = new Thread(new Runnable() {
public void run() {
try {
Thread.sleep(RECORD_TIME);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
recorder.finish();
}
});
stopper.start();
recorder.start();
}
}
See my response to a similar question here:
Sound recording not working in java
That works perfectly for me.It captures and saves sound from mic, into a file.
And it's easy to use

Java Sound not very clear when streaming

Hi I've been writing a chat client and wanted to test the Java Sound API. I've managed to get sound working from the mic to the speakers on different computers via UDP. However the sound isn't very clear. To check whether this was because of lost packets etc in the UDP protocol I wrote a small test for the sound to go to the speakers on the same machine as the mic. The sound isn't any different which makes me think I have some settings wrong for reading or writing the sound. Can anybody have a look at my code and tell me how to make the sound clearer?
package test;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import javax.sound.sampled.*;
import javax.swing.*;
#SuppressWarnings("serial")
public class VoiceTest extends JFrame {
private JButton chat = new JButton("Voice");
private GUIListener gl = new GUIListener();
private IncomingSoundListener isl = new IncomingSoundListener();
private OutgoingSoundListener osl = new OutgoingSoundListener();
private boolean inVoice = true;
private boolean outVoice = false;
AudioFormat format = getAudioFormat();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
public VoiceTest() throws IOException {
super ("Test");
//new Thread(tl).start();
new Thread(isl).start();
Container contentPane = this.getContentPane();
this.setSize(200,100);
this.setLocationRelativeTo(null);
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
chat.setBounds(10,10,80,30);
chat.addActionListener(gl);
contentPane.add(chat);
this.setVisible(true);
}
private AudioFormat getAudioFormat() {
float sampleRate = 8000.0F;
int sampleSizeBits = 16;
int channels = 1;
boolean signed = true;
boolean bigEndian = false;
//AudioFormat.Encoding.ULAW
return new AudioFormat(sampleRate, sampleSizeBits, channels, signed, bigEndian);
}
class GUIListener implements ActionListener {
public void actionPerformed(ActionEvent actionevent) {
String action = actionevent.getActionCommand();
switch (action) {
case "Mute":
outVoice = false;
chat.setText("Voice");
break;
case "Voice":
new Thread(osl).start();
outVoice = true;
chat.setText("Mute");
break;
}
}
}
class IncomingSoundListener implements Runnable {
#Override
public void run() {
try {
System.out.println("Listening for incoming sound");
DataLine.Info speakerInfo = new DataLine.Info(SourceDataLine.class, format);
SourceDataLine speaker = (SourceDataLine) AudioSystem.getLine(speakerInfo);
speaker.open(format);
speaker.start();
while(inVoice) {
byte[] data = baos.toByteArray();
baos.reset();
ByteArrayInputStream bais = new ByteArrayInputStream(data);
AudioInputStream ais = new AudioInputStream(bais,format,data.length);
int numBytesRead = 0;
if ((numBytesRead = ais.read(data)) != -1) speaker.write(data, 0, numBytesRead);
ais.close();
bais.close();
}
speaker.drain();
speaker.close();
System.out.println("Stopped listening for incoming sound");
} catch (Exception e) {
e.printStackTrace();
}
}
}
class OutgoingSoundListener implements Runnable {
#Override
public void run() {
try {
System.out.println("Listening for outgoing sound");
DataLine.Info micInfo = new DataLine.Info(TargetDataLine.class, format);
TargetDataLine mic = (TargetDataLine) AudioSystem.getLine(micInfo);
mic.open(format);
byte tmpBuff[] = new byte[mic.getBufferSize()/5];
mic.start();
while(outVoice) {
int count = mic.read(tmpBuff,0,tmpBuff.length);
if (count > 0) baos.write(tmpBuff, 0, count);
}
mic.drain();
mic.close();
System.out.println("Stopped listening for outgoing sound");
} catch (Exception e) {
e.printStackTrace();
}
}
}
/**
* #param args
* #throws IOException
*/
public static void main(String[] args) throws IOException {
new VoiceTest();
}
}
You should try higher sampling rates and try to find acceptable quality/size ratio for your audio stream.
Checking the AudioFormat reference is also a good start for getting the idea.
Try changing local variables in your getAudioFormat() method to this:
private AudioFormat getAudioFormat() {
float sampleRate = 16000.0F;
int sampleSizeBits = 16;
int channels = 1;
...
}
This is equivalent to a 256 kbps Mono audio file.

Categories