Why Applet shuts the server down? - java

I am implementing a recording system using Java applet for my project but i am facing one problem during closing the applet, the applet shuts the server down.Is there any way to resolve this issue? I want to continue work on my project after completing the recording part but after recording when I close the applet it stops the server also So i need to restart the server again.
Any help or suggestion?
Code :-
public class Main extends JPanel implements ActionListener {
public Main() {
setLayout(new BorderLayout());
EmptyBorder eb = new EmptyBorder(5, 5, 5, 5);
SoftBevelBorder sbb = new SoftBevelBorder(SoftBevelBorder.LOWERED);
setBorder(new EmptyBorder(5, 5, 5, 5));
JPanel p1 = new JPanel();
// p1.setLayout(new BoxLayout(p1, BoxLayout.X_AXIS));
JPanel p2 = new JPanel();
p2.setBorder(sbb);
p2.setLayout(new BoxLayout(p2, BoxLayout.X_AXIS));
JPanel buttonsPanel = new JPanel();
buttonsPanel.setBorder(new EmptyBorder(10, 0, 5, 0));
radioGroup1 = new CheckboxGroup();
radio1 = new Checkbox("Record : ", radioGroup1,true);
p2.add(radio1);
playB = addButton("Play", buttonsPanel, false);
captB = addButton("Record", buttonsPanel, true);
closeA = addButton("Close", buttonsPanel, true);
p2.add(buttonsPanel);
p1.add(p2);
add(p1);
}
public void open() {
}
public void close() {
if (playback.thread != null) {
playB.doClick(0);
}
if (capture.thread != null) {
captB.doClick(0);
}
}
private JButton addButton(String name, JPanel p, boolean state) {
JButton b = new JButton(name);
b.addActionListener(this);
b.setEnabled(state);
p.add(b);
return b;
}
public void actionPerformed(ActionEvent e) {
Object obj = e.getSource();
if (obj.equals(playB)) {
if (playB.getText().startsWith("Play")) {
playback.start();
captB.setEnabled(false);
playB.setText("Stop");
} else {
playback.stop();
captB.setEnabled(true);
playB.setText("Play");
}
} else if (obj.equals(captB)) {
if (captB.getText().startsWith("Record")) {
capture.start();
playB.setEnabled(false);
captB.setText("Stop");
} else {
capture.stop();
playB.setEnabled(true);
}
}
else if(obj.equals(closeA)) {
System.exit(0);
}
}
public class Playback implements Runnable {
SourceDataLine line;
Thread thread;
public void start() {
errStr = null;
thread = new Thread(this);
thread.setName("Playback");
thread.start();
}
public void stop() {
thread = null;
}
private void shutDown(String message) {
if ((errStr = message) != null) {
System.err.println(errStr);
}
if (thread != null) {
thread = null;
captB.setEnabled(true);
playB.setText("Play");
}
}
public void run() {
AudioFormat format = getAudioFormat();
try {
audioInputStream = AudioSystem.getAudioInputStream(wavFile);
} catch (UnsupportedAudioFileException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
AudioInputStream playbackInputStream = AudioSystem.getAudioInputStream(format,
audioInputStream);
if (playbackInputStream == null) {
shutDown("Unable to convert stream of format " + audioInputStream + " to format " + format);
return;
}
// get and open the source data line for playback.
try {
line = (SourceDataLine) AudioSystem.getLine(info);
line.open(format, bufSize);
} catch (LineUnavailableException ex) {
shutDown("Unable to open the line: " + ex);
return;
}
// play back the captured audio data
int frameSizeInBytes = format.getFrameSize();
int bufferLengthInFrames = line.getBufferSize() / 8;
int bufferLengthInBytes = bufferLengthInFrames * frameSizeInBytes;
byte[] data = new byte[bufferLengthInBytes];
int numBytesRead = 0;
// start the source data line
line.start();
while (thread != null) {
try {
if ((numBytesRead = playbackInputStream.read(data)) == -1) {
break;
}
int numBytesRemaining = numBytesRead;
while (numBytesRemaining > 0) {
numBytesRemaining -= line.write(data, 0, numBytesRemaining);
}
} catch (Exception e) {
shutDown("Error during playback: " + e);
break;
}
}
// we reached the end of the stream.
// let the data play out, then
// stop and close the line.
if (thread != null) {
line.drain();
}
line.stop();
line.close();
line = null;
shutDown(null);
}
} // End class Playback
/**
* Reads data from the input channel and writes to the output stream
*/
class Capture implements Runnable {
TargetDataLine line;
Thread thread;
public void start() {
errStr = null;
thread = new Thread(this);
thread.setName("Capture");
thread.start();
}
public void stop() {
thread = null;
line.close();
//thread.stop();
}
private void shutDown(String message) {
if ((errStr = message) != null && thread != null) {
thread = null;
playB.setEnabled(true);
captB.setText("Record");
System.err.println(errStr);
}
}
public void run() {
duration = 0;
audioInputStream = null;
Playback pb = new Playback();
AudioFormat format = pb.getAudioFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
// get and open the target data line for capture.
try {
line = (TargetDataLine) AudioSystem.getLine(info);
// line.open(format, line.getBufferSize());
line.open(format);
line.start();
// saving audio file
AudioInputStream ais = new AudioInputStream(line);
// start recording
AudioSystem.write(ais, fileType, wavFile);
} catch (LineUnavailableException ex) {
shutDown("Unable to open the line: " + ex);
return;
} catch (SecurityException ex) {
shutDown(ex.toString());
//JavaSound.showInfoDialog();
return;
} catch (Exception ex) {
shutDown(ex.toString());
return;
}
// we reached the end of the stream.
// stop and close the line.
line.stop();
line.close();
line = null;
// stop and close the output stream
try {
out.flush();
out.close();
} catch (IOException ex) {
ex.printStackTrace();
}
// load bytes into the audio input stream for playback
byte audioBytes[] = out.toByteArray();
ByteArrayInputStream bais = new ByteArrayInputStream(audioBytes);
audioInputStream = new AudioInputStream(bais, format, audioBytes.length / frameSizeInBytes);
long milliseconds = (long) ((audioInputStream.getFrameLength() * 1000) / format
.getFrameRate());
duration = milliseconds / 1000.0;
try {
audioInputStream.reset();
} catch (Exception ex) {
ex.printStackTrace();
return;
}
}
} // End class Capture
}

This code don't look so good
else if(obj.equals(closeA)) {
System.exit(0);
}
this will cause the JVM to shutdown. I would have thought that you just want the applet to be in a stopped state.

Related

Java Voice Chat Error

I am making a voice chat program. I have two servers one for voice and one for messages. When I connect two people I get this error, Thank you in advance. I attached the client code, ClientAudio code and the Client receive code
java.io.StreamCorruptedException: invalid type code: 00
at java.io.ObjectInputStream$BlockDataInputStream.readBlockHeader(ObjectInputStream.java:2508)
at java.io.ObjectInputStream$BlockDataInputStream.refill(ObjectInputStream.java:2543)
at java.io.ObjectInputStream$BlockDataInputStream.read(ObjectInputStream.java:2702)
at java.io.ObjectInputStream.read(ObjectInputStream.java:865)
at client.chat$ClientAudioRec.run(chat.java:388)
at java.lang.Thread.run(Thread.java:745)
its calling the error on
try {
bytesRead = ((ObjectInput) i2).read(inSound, 0, inSound.length);
} catch (Exception e) {
e.printStackTrace();
}
Code
public class Client implements Runnable { // CLIENT
private String msg;
public void run() {
try {
s1 = new Socket(ipAddress, port);
s2 = new Socket(ipAddress, 1210);
o1 = new ObjectOutputStream(s1.getOutputStream());
o1.writeObject(name);
serverListModel.addElement(name);
i1 = new ObjectInputStream(s1.getInputStream());
Thread voice = new Thread(new ClientAudio());
voice.start();
while(true) {
msg = (String) i1.readObject();
String[] namePart = msg.split("-");
if(namePart[0].equals("AddName") && !namePart[1].equals(name) && !serverListModel.contains(namePart[1])) {
serverListModel.addElement(namePart[1]);
}
if(namePart[0].equals("RemoveName") && !namePart[1].equals(name)) {
serverListModel.removeElement(namePart[1]);
}
if(!msg.equals(null) && !namePart[0].equals("AddName") && !namePart[0].equals("RemoveName")) {
chatWindow.append(msg+"\n");
}
}
} catch (IOException | ClassNotFoundException e) {
chatWindow.append("Server Closed");
e.printStackTrace();
try {
s1.close();
} catch (IOException e1) {
e1.printStackTrace();
}
mainWindow(true);
}
}
}
public class ClientAudio implements Runnable { // CLIENT AUDIO
public void run() {
try {
o2 = new ObjectOutputStream(s2.getOutputStream());
System.out.println("AUDIO");
int bytesRead = 0;
byte[] soundData = new byte[1];
Thread car = new Thread(new ClientAudioRec());
car.start();
while(true) {
bytesRead = mic.read(soundData, 0, bytesRead);
if(bytesRead >= 0) {
o2.write(soundData, 0, bytesRead);
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
public class ClientAudioRec implements Runnable { // CLIENT AUDIO REC
public void run() {
i2 = new ObjectInputStream(s2.getInputStream());
System.out.println("REC");
SourceDataLine inSpeaker = null;
DataLine.Info info = new DataLine.Info(SourceDataLine.class, af);
try {
inSpeaker = (SourceDataLine)AudioSystem.getLine(info);
inSpeaker.open(af);
} catch (LineUnavailableException e1) {
System.out.println("ERROR 22");
e1.printStackTrace();
}
int bytesRead = 0;
byte[] inSound = new byte[100];
inSpeaker.start();
while(bytesRead != -1)
{
try{
bytesRead = ((ObjectInput) i2).read(inSound, 0, inSound.length);
} catch (Exception e){
e.printStackTrace();
}
if(bytesRead >= 0)
{
inSpeaker.write(inSound, 0, bytesRead);
}
}
}
}

How can I print sound as byte array? or int array?

I'm making anti-phase sound with java.(anti-phase is reflected wave. x-coordinate is not changed, but y-coordinate is upside down.)
Before reflecting sound wave, I have to get byte array(or int array) from sound.
I get sound from microphone of my laptop.
Here is my CODE(I got original code, which record sound as file, in web. I modified it little)
public class NoiseController extends Thread{
private TargetDataLine line;
private AudioInputStream audioInputStream;
public NoiseController(TargetDataLine line) {
this.line = line;
this.audioInputStream = new AudioInputStream(line);
}
public void start() {
line.start();
super.start();
}
public void stopRecording() {
line.stop();
line.close();
}
public void run() {
try {
int packet;
while((packet = audioInputStream.read()) != -1)
System.out.println(packet);
}
catch(IOException ioe) {
ioe.getStackTrace();
}
}
public static void main(String[] args) {
AudioFormat audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100.0F, 16, 2, 4, 44100.0F, false);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, audioFormat);
TargetDataLine targetDataLine = null;
try {
targetDataLine = (TargetDataLine)AudioSystem.getLine(info);
targetDataLine.open(audioFormat);
}
catch(LineUnavailableException lue) {
out("unable to get a recording line");
lue.printStackTrace();
System.exit(-1);
}
AudioFileFormat.Type targetType = AudioFileFormat.Type.WAVE;
NoiseController recorder = new NoiseController(targetDataLine);
System.out.println(targetDataLine);
System.out.println(targetType);
out("Press ENTER to start the recording.");
try {
System.in.read();
}
catch(IOException ioe) {
ioe.printStackTrace();
}
recorder.start();
out("Recording...");
out("Press ENTER to stop the recording.");
try {
System.in.read();
System.in.read();
}
catch(IOException ioe) {
ioe.getStackTrace();
}
recorder.stopRecording();
out("Recording stopped.");
}
private static void out(String msg) {
System.out.println(msg);
}
}
However, Console doesn't print anything while recording...
It shows just
com.sun.media.sound.DirectAudioDevice$DirectTDL#25154f_
WAVE
Press ENTER to start the recording.
Recording...
Press ENTER to stop the recording.
Recording stopped.
If I edit run() like AudioSystem.write(stream, fileType, out);
instead of
int packet;
while((packet = audioInputStream.read()) != -1)
System.out.println(packet);
program saves wav file.
What is wrong in my program?
you are not printing the Exception as Uwe Allner said.
I´ve also try to correct it and I think the result must be like this:
public class NoiseController extends Thread {
private final TargetDataLine line;
private final AudioInputStream audioInputStream;
public NoiseController(final TargetDataLine line) {
this.line = line;
this.audioInputStream = new AudioInputStream(line);
}
#Override
public void start() {
line.start();
super.start();
}
public void stopRecording() {
line.stop();
line.close();
try {
audioInputStream.close();
} catch (final IOException e) {
e.printStackTrace();
}
}
#Override
public void run() {
try {
final int bufferSize = 1024;
int read = 0;
final byte[] frame = new byte[bufferSize];
while ((read = audioInputStream.read(frame)) != -1 && line.isOpen()) {
// only the first read bytes are valid
System.out.println(Arrays.toString(frame));
}
} catch (final IOException ioe) {
ioe.printStackTrace();
}
}
public static void main(final String[] args) {
final AudioFormat audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100.0F, 16, 2, 4, 44100.0F, false);
final DataLine.Info info = new DataLine.Info(TargetDataLine.class, audioFormat);
TargetDataLine targetDataLine = null;
try {
targetDataLine = (TargetDataLine) AudioSystem.getLine(info);
targetDataLine.open(audioFormat);
} catch (final LineUnavailableException lue) {
out("unable to get a recording line");
lue.printStackTrace();
System.exit(-1);
}
final AudioFileFormat.Type targetType = AudioFileFormat.Type.WAVE;
final NoiseController recorder = new NoiseController(targetDataLine);
System.out.println(targetDataLine);
System.out.println(targetType);
out("Press ENTER to start the recording.");
try {
System.in.read();
} catch (final IOException ioe) {
ioe.printStackTrace();
}
recorder.start();
out("Recording...");
out("Press ENTER to stop the recording.");
try {
System.in.read();
System.in.read();
} catch (final IOException ioe) {
ioe.printStackTrace();
}
recorder.stopRecording();
out("Recording stopped.");
}
private static void out(final String msg) {
System.out.println(msg);
}
}

I am unable to record voice by using the following program with applet in java

I GOT THIS CODE FROM HERE
I am using the following program to record voice.But when i click on record voice is not recording.I am not getting any errors.What might be the problem.
//imports
//Main class
public class AudioApplet extends JApplet implements ActionListener, ChangeListener, ItemListener {
//declarations
public void init()
{
setLayout(null);
JLabel recorder = new JLabel("Recorder");
JLabel fileName = new JLabel("Please Enter File Name");
JLabel server = new JLabel("Listen From Server");
JLabel status = new JLabel("Status...");
fnametxt = new JTextField("FileName");
servercombo = new JComboBox();
statustxt = new JTextField("Check your status here...");
record = new JButton("Record");
play = new JButton("Play");
pause = new JButton("Pause");
stop = new JButton("Stop");
send = new JButton("Upload");
listen = new JButton("Listen");
save = new JButton("Save");
progress = new JSlider(0, audioLength, 0);
time = new JLabel("0:00");
mute = new JToggleButton("Mute");
vol1 = new JLabel("Volume -");
vol2 = new JLabel("+");
volslider = new JSlider(0,100);
volslider.setToolTipText("Volume");
volslider.setPaintTicks(true);
volslider.setMinorTickSpacing(10);
//properties related to size
add(recorder);
add(record);
add(play);
add(pause);
add(stop);
add(save);
add(fileName);
add(fnametxt);
add(send);
add(server);
add(servercombo);
add(listen);
add(status);
add(statustxt);
add(progress);
add(time);
add(vol1);
add(volslider);
add(vol2);
add(mute);
record.setEnabled(true);
pause.setEnabled(true);
play.setEnabled(true);
stop.setEnabled(true);
save.setEnabled(true);
send.setEnabled(true);
listen.setEnabled(true);
record.addActionListener(this);
play.addActionListener(this);
pause.addActionListener(this);
stop.addActionListener(this);
save.addActionListener(this);
send.addActionListener(this);
listen.addActionListener(this);
mute.addActionListener(this);
progress.addChangeListener(this);
volslider.addChangeListener(this);
servercombo.addItemListener(this);
}//End of init method
//***************************************************/
//******* StateChanged method for ChangeListener*****/
//***************************************************/
public void stateChanged(ChangeEvent e) {
if (e.getSource()==volslider) {
volumeControl();
}else {
int value = progress.getValue();
time.setText(value / 1000 + "." + (value % 1000) / 100);
}
}
public void itemStateChanged(ItemEvent ie) {
msg = " Listening from server [buffering]...";
statustxt.setText(msg);
listenAudio();
}
//***************************************************/
//***** ActionPerformed method for ActionListener****/
//***************************************************/
public void actionPerformed(ActionEvent e) {
if(e.getSource()==record){
msg = " Capturing audio from mic.....";
statustxt.setText(msg);
record.setEnabled(false);
pause.setEnabled(true);
stop.setEnabled(true);
play.setEnabled(false);
save.setEnabled(true);
if(paused)
{
resumeRecord();
}
else
{
recordAudio();
}
}
else if (e.getSource()==play) {
msg = " Playing recorded audio.....";
statustxt.setText(msg);
stop.setEnabled(true);
if(first)
{
playAudio();
}
else
{
resumePlay();
}
}
else if (e.getSource()==pause) {
msg = "Paused....";
statustxt.setText(msg);
record.setEnabled(true);
pause.setEnabled(true);
pauseAudio();
first=false;
}
else if (e.getSource()==stop) {
msg = " Action stopped by user.....";
statustxt.setText(msg);
progress.setValue(0);
record.setEnabled(true);
stop.setEnabled(false);
play.setEnabled(true);
running = false;
stopAudio();
}
else if (e.getSource()==save) {
msg = " Saving file to user's System....";
statustxt.setText(msg);
saveAudio();
}
else if (e.getSource()==send) {
msg = " Sending recorded file to server...";
statustxt.setText(msg);
uploadAudio();
}
else if(e.getSource()==listen){
msg = " Listening from server [buffering]...";
statustxt.setText(msg);
//code for listen audio
}
else {
muteControl();
}
}
//******************************************/
//************** Method Declarations ****/
//******************************************/
private void recordAudio() {
first=true;
try {
final AudioFileFormat.Type fileType = AudioFileFormat.Type.AU;
final AudioFormat format = getFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
line = (TargetDataLine)AudioSystem.getLine(info);
line.open(format);
line.start();
Runnable runner = new Runnable() {
int bufferSize = (int) format.getSampleRate()* format.getFrameSize();
byte buffer[] = new byte[bufferSize];
public void run() {
out = new ByteArrayOutputStream();
running = true;
try {
while (running) {
int count = line.read(buffer, 0, buffer.length);
if (count > 0) {
out.write(buffer, 0, count);
InputStream input = new ByteArrayInputStream(buffer);
final AudioInputStream ais = new AudioInputStream(input, format, buffer.length /format.getFrameSize());
}
}
out.close();
}catch (IOException e) {
System.exit(-1);
}
}
};
Thread recordThread = new Thread(runner);
recordThread.start();
}catch(LineUnavailableException e) {
System.err.println("Line Unavailable:"+ e);
e.printStackTrace();
System.exit(-2);
}
catch (Exception e) {
System.out.println("Direct Upload Error");
e.printStackTrace();
}
}//End of RecordAudio method
private void playAudio() {
try{
byte audio[] = out.toByteArray();
InputStream input = new ByteArrayInputStream(audio);
final AudioFormat format = getFormat();
final AudioInputStream ais = new AudioInputStream(input, format, audio.length /format.getFrameSize());
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
sline = (SourceDataLine)AudioSystem.getLine(info);
sline.open(format);
sline.start();
Float audioLen = (audio.length / format.getFrameSize()) * format.getFrameRate();
Runnable runner = new Runnable() {
int bufferSize = (int) format.getSampleRate() * format.getFrameSize();
byte buffer[] = new byte[bufferSize];
public void run() {
try {
int count;
synchronized(lock){
while((count = ais.read( buffer, 0, buffer.length)) != -1) {
while(paused) {
if(sline.isRunning()) {
sline.stop();
}
try{
lock.wait();
}
catch(InterruptedException e) {
}
}
if(!sline.isRunning()) {
sline.start();
}
if(count > 0) {
sline.write(buffer, 0, count);
}
}
}
first=true;
sline.drain();
sline.close();
}catch(IOException e) {
System.err.println("I/O problems:" + e);
System.exit(-3);
}
}
};
Thread playThread = new Thread(runner);
playThread.start();
}catch(LineUnavailableException e) {
System.exit(-4);
}
}//End of PlayAudio method
private void resumeRecord(){
synchronized(lock) {
paused = false;
lock.notifyAll();
first = true;
}
}//End of ResumeRecord method
private void stopAudio() {
if (sline != null) {
sline.stop();
sline.close();
}else {
line.stop();
line.close();
}
}//End of StopAudio method
private void resumePlay(){
synchronized(lock) {
paused = false;
lock.notifyAll();
System.out.println("inside resumeplay method");
}
}//End of ResumePlay method
private void pauseAudio(){
paused = true;
}
private void saveAudio() {
Thread thread = new saveThread();
thread.start();
}
private void uploadAudio() {
Thread th= new uploadThread();
th.start();
}
private void listenAudio() {
Thread thread = new listenThread();
thread.start();
}
private AudioFormat getFormat() {
Encoding encoding = AudioFormat.Encoding.PCM_SIGNED;
float sampleRate = 44100.0F;
int sampleSizeInBits = 16;
int channels = 2;
int frameSize = 4;
float frameRate = 44100.0F;
boolean bigEndian = false;
return new AudioFormat(encoding, sampleRate, sampleSizeInBits, channels, frameSize, frameRate, bigEndian);
}//End of getAudioFormat method
class saveThread extends Thread {
public void run(){
AudioFileFormat.Type fileType = AudioFileFormat.Type.WAVE;
FileDialog fd = new FileDialog(new Frame(), "Save as WAVE", FileDialog.SAVE);
fd.setFile("*.wav");
fd.setVisible(true);
String name = fd.getDirectory() + fd.getFile();
File file = new File(name);
try{
byte audio[] = out.toByteArray();
InputStream input = new ByteArrayInputStream(audio);
final AudioFormat format = getFormat();
final AudioInputStream ais = new AudioInputStream(input, format, audio.length /format.getFrameSize());
AudioSystem.write(ais,fileType,file);
}catch (Exception e){
e.printStackTrace();
}
}
}//End of inner class saveThread
class uploadThread extends Thread{
public void run(){
AudioFileFormat.Type fileType = AudioFileFormat.Type.AU;
try{
line.flush();
line.close();
}
catch(Exception e){
e.printStackTrace();
System.err.println("Error during upload");
}
}
}//End of inner class uploadThread
class listenThread extends Thread{
public void run() {
try {
URL upload=new URL("http://localhost:8080/TapasApplet/upload");
HttpURLConnection conn = (HttpURLConnection) upload.openConnection();
conn.setRequestMethod("POST");
conn.setDoOutput(true);
conn.setDoInput(true);
conn.setUseCaches(false);
conn.setDefaultUseCaches(false);
conn.setChunkedStreamingMode(1000);
conn.setRequestProperty("Content-Type", "application/octet-stream");
InputStream is = conn.getInputStream();
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String serfile = br.readLine();
while(line != null){
//un complete code here
serfile=br.readLine();
}
} catch (IOException e) {
System.err.println("Error in UserThread run() method");
e.printStackTrace();
}
}
}
public void volumeControl() {
try {
if(AudioSystem.isLineSupported(Port.Info.LINE_OUT))
{
lineIn = (Port)AudioSystem.getLine(Port.Info.LINE_OUT);
lineIn.open();
}
else if(AudioSystem.isLineSupported(Port.Info.HEADPHONE))
{
lineIn = (Port)AudioSystem.getLine(Port.Info.HEADPHONE);
lineIn.open();
}
else if(AudioSystem.isLineSupported(Port.Info.SPEAKER))
{
lineIn = (Port)AudioSystem.getLine(Port.Info.SPEAKER);
lineIn.open();
}
else
{
System.out.println("Unable to get Output Port");
return;
}
final FloatControl controlIn = (FloatControl)lineIn.getControl(FloatControl.Type.VOLUME);
final float volume = 100 * (controlIn.getValue() / controlIn.getMaximum());
System.out.println(volume);
int sliderValue=volslider.getValue();
controlIn.setValue((float)sliderValue / 100);
} catch (Exception e) {
System.out.println(" VOLUME control: exception = " + e);
}
}//End of volumeControl method
public void muteControl() {
BooleanControl mControl;
try {
mControl = (BooleanControl) sline.getControl(BooleanControl.Type.MUTE);
if (mControl.getValue() == true)
{
mControl.setValue(false);
}
else
{
mControl.setValue(true);
}
} catch (Exception e) {
System.out.println(" MUTE control: exception = " + e);
}
}
}//End of main class AudioBroadcast
Thanks in advance...

Audio on J2ME, I don't know where is wrong?

I want to do 2 task at same time:
Play an audio file.
Read raw data of it to do something
Here is my code:
String wavPath = "file:///" + currentPath + fileName;
FileConnection fc;
try {
fc = (FileConnection) Connector.open( wavPath );
if ( !fc.exists() ) {
throw new IOException( "File does not exists." );
}
InputStream is = fc.openInputStream();
// to do something with raw data as print samples of it
Player player = Manager.createPlayer( wavPath );
player.realize();
player.prefetch();
player.start();
} catch ( IOException e1 ) {
e1.printStackTrace();
}
But nothing run, audio file doesn't not run. If I remove line:
InputStream is = fc.openInputStream();
Audio file run very well. But I want to do 2 task at same time, i don't know how to do it. Anybody can help me ?
I have tried use 2 thread, but it still doesn't work, audio file run (thread 1) but thread 2 not run:
new Thread( new Runnable() {
public void run() {
try {
Manager.createPlayer( "file:///E:/" + fileName ).start();
} catch ( MediaException e ) {
e.printStackTrace();
} catch ( IOException e ) {
e.printStackTrace();
}
}
}).start();
new Thread( new Runnable() {
public void run() {
FileConnection fc;
try {
fc = (FileConnection) Connector.open( "file:///E:/" + fileName );
InputStream is = fc.openInputStream();
byte[] b = new byte[10];
int length = is.read( b, 0, 10 );
for ( int i = 0; i < length; i++ ) {
form.append( b[i] + "" );
}
} catch ( IOException e ) {
e.printStackTrace();
}
}
}).start();
why don't you use threading...
create a thread to play the wav file
and use another thread to do read a file...
refer here for further details on threading in J2ME
public class View_PlayMidlet extends MIDlet {
PlayerThread musicPlayer = new PlayerThread();
public void startApp() {
String fileName = "file://e:/abcd.wav";
musicPlayer.setPlayableFile(fileName);
FileConnection fc = null;
InputStream is = null;
String fileContent = null;
try {
fc = (FileConnection) Connector.open("file:///E:/" + fileName);
is = fc.openInputStream();
byte[] b = new byte[10];
int length = is.read(b, 0, 10);
fileContent = new String(b);
// display the content of fileContent variable in a form.
} catch (Exception e) {
e.printStackTrace();
} finally {
if (is != null) {
try {
is.close();
} catch (IOException ex) {
}
}
if (fc != null) {
try {
fc.close();
} catch (IOException ex) {
}
}
}
// by this time the file is displayed & you can start playing the file.
Thread t = new Thread(musicPlayer);
t.start();
}
public void pauseApp() {
}
public void destroyApp(boolean unconditional) {
}
}
public class PlayerThread implements Runnable {
private String fileName;
private Player player;
public PlayerThread() {
}
public void run() {
try {
player = Manager.createPlayer(fileName);
player.prefetch();
player.start();
} catch (Exception e) {
e.printStackTrace();
}
}
public void stopMusic() {
try {
player.stop();
} catch (Exception e) {
e.printStackTrace();
}
}
public void setPlayableFile(String fileName) {
this.fileName=fileName;
}
}
this would roughly be what you are looking for.
Your Player instances are garbage collected. Also, file access during playback will likely be 'implementation specific', meaning that it will work on some models and not others. So read the data first, copy it to another file etc, if you want something solid.
You can always go the DataSource-path too.

Xuggler screenRecording code affecting sound recording

I am working on an application for screencast with audio. screen recording with sound is working fine but the issue is that suppose I do recording of 5 mins then generated video file is of 5 min but generated audio file is of 4 min 45 sec. So basically the issue is that both audio and video are not in sync, audio file duration is less as compared to video file.
Both audio and video file recording are running in separate thread but still something is wrong.
VideoCapturing code:
public void run() {
setVideoParameters();
FRAME_RATE = frameRate;
// let's make a IMediaWriter to write the file.
writer = ToolFactory.makeWriter(movieFile.getName());
screenBounds = new Rectangle(RecorderSettings.m_CapRectX,
RecorderSettings.m_CapRecY,
(int) RecorderSettings.m_CapRectWidth,
(int) RecorderSettings.m_CapRecHeight);
// We tell it we're going to add one video stream, with id 0,
// at position 0, and that it will have a fixed frame rate of
// FRAME_RATE.
// ScreenWidth && ScreenHeight multiplied by 3/4 to reduce pixel to 3/4
// of actual.
// writer.addVideoStream(0, 0, ICodec.ID.CODEC_ID_MPEG4,
// screenBounds.width , screenBounds.height );
writer.addVideoStream(0, 0, vcodec.getID(),
(screenBounds.width * upperLimit) / lowerLimit,
(screenBounds.height * upperLimit) / lowerLimit);
// To have start time of recording
startTime = System.nanoTime();
while (isStopProceesBtnClk) {
try {
if (!isStopProceesBtnClk) {
break;
} else {
synchronized (this) {
while (isPauseProceesBtnClk) {
try {
// catches starting time of pause.
pauseStartTime = System.nanoTime();
wait();
} catch (Exception e) {
e.printStackTrace();
}
}
}
BufferedImage screen = getDesktopScreenshot();
// convert to the right image type
BufferedImage bgrScreen = convertToType(screen, BufferedImage.TYPE_3BYTE_BGR);
// encode the image to stream #0
if (totalPauseTime > 0) {
writer.encodeVideo(0, bgrScreen, (System.nanoTime() - startTime)- totalPauseTime, TimeUnit.NANOSECONDS);
} else {
writer.encodeVideo(0, bgrScreen, System.nanoTime() - startTime, TimeUnit.NANOSECONDS);
}
// sleep for frame rate milliseconds
try {
Thread.sleep((long) (1000 / FRAME_RATE));
} catch (InterruptedException e) {
// ignore
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
try {
writer.close();
writer = null;
Runtime.getRuntime().gc();
} catch (Exception e) {
// ignore errors
}
// tell the writer to close and write the trailer if needed
}
public static BufferedImage convertToType(BufferedImage sourceImage, int targetType) {
BufferedImage image;
// if the source image is already the target type, return the source
// image
if (sourceImage.getType() == targetType) {
image = sourceImage;
}
// otherwise create a new image of the target type and draw the new
// image
else {
image = new BufferedImage(sourceImage.getWidth(), sourceImage.getHeight(), targetType);
if (true) {
int x = MouseInfo.getPointerInfo().getLocation().x - 25;
int y = MouseInfo.getPointerInfo().getLocation().y - 37;
Graphics2D graphics2D = sourceImage.createGraphics();// getGraphics().drawImage(m_MouseIcon,
// x, y, 48, 48, null);
graphics2D.drawImage(SimpleWebBrowserExample.m_MouseIcon, x, y,
48, 48, null);
}
image.getGraphics().drawImage(sourceImage, 0, 0, null);
}
return image;
}
private BufferedImage getDesktopScreenshot() {
try {
// Robot captures screen shot
Robot robot = new Robot();
Rectangle captureSize = new Rectangle(screenBounds);
return robot.createScreenCapture(captureSize);
} catch (AWTException e) {
e.printStackTrace();
return null;
}
}
AudioCapturing Code:
public void run() {
init();
DataLine.Info info = new DataLine.Info(TargetDataLine.class,audioFormat,(int) (m_AudioFreq * sampleSizeInBytes));
try
{
m_TargetLine = (TargetDataLine) AudioSystem.getLine(info);
m_TargetLine.open(audioFormat, info.getMaxBufferSize());
}
catch(Exception exp){
exp.printStackTrace();
}
AudioFileFormat.Type targetType = AudioFileFormat.Type.WAVE;
try
{
m_outputFile = new File(bufferFileName);
while (m_outputFile.exists() && !m_outputFile.delete())
{
m_outputFile = BerylsUtility.getNextFile(m_outputFile);
}
FileOutputStream outFileStream = new FileOutputStream(m_outputFile);
audioOutStream = new BufferedOutputStream(outFileStream,memoryBufferSize);
}
catch (FileNotFoundException fe){
System.out.println("FileNotFoundException in VoiceCapturing.java :: " + fe);
}
catch (OutOfMemoryError oe){
System.out.println("OutOfMemoryError in VoiceCapturing.java " + oe);
}
while (isStopProceesBtnClk) {
try {
if (!isStopProceesBtnClk) {
break;
} else {
synchronized (this) {
while (isPauseProceesBtnClk) {
try {
wait();
} catch (Exception e) {
e.printStackTrace();
}
}
}
try
{
m_TargetLine.start();
int cnt = m_TargetLine.read(tempBuffer,0,tempBuffer.length);
if(cnt > 0){
audioOutStream.write(tempBuffer,0,cnt);
}
}
catch (Exception e){
System.out.println("Exception in VoiceCapturing.java :: " + e);
}
/*finally{
finish();
}*/
}
} catch (Exception e) {
e.printStackTrace();
}
}
finish();
}
public synchronized void finish()
{
try
{
System.out.println("AudioFinish");
audioOutStream.close();
FileInputStream audioInAgain = new FileInputStream(m_outputFile);
long sampleBytes = m_outputFile.length();
long sizeOfFrame = (long) m_SampleRate * m_Channels / 8;
BufferedInputStream buffAudioIn = new BufferedInputStream(audioInAgain, memoryBufferSize);
AudioInputStream a_input = new AudioInputStream(buffAudioIn, audioFormat, sampleBytes / sizeOfFrame);
while (m_AudioFile.exists() && !m_AudioFile.canWrite())
{
m_AudioFile = BerylsUtility.getNextFile(m_AudioFile);
}
AudioSystem.write(a_input, m_targetType, m_AudioFile);
buffAudioIn.close();
m_outputFile.delete();
}
catch (Exception e)
{
e.printStackTrace();
}
}
could someone guide me on this...
Thanks.

Categories