Problem in java programming on windows7 (working well in windows xp) - java

I am capturing video from webcam connected to pc.I am using the following code to do so:
import java.util.*;
import javax.media.*;
import javax.media.protocol.*;
import javax.media.control.*;
import javax.media.format.*;
import java.awt.*;
/**
* This is the primary class to run. It gathers an image stream and drives the processing.
*
*/
public class jmfcam05v
{
DataSource dataSource;
PushBufferStream pbs;
Vector camImgSize = new Vector();
Vector camCapDevice = new Vector();
Vector camCapFormat = new Vector();
int camFPS;
int camImgSel;
Processor processor = null;
DataSink datasink = null;
/**
* Main method to instance and run class
*
*/
public static void main(String[] args)
{
jmfcam05v jmfcam = new jmfcam05v();
}
/**
* Constructor and processing method for image stream from a cam
*
*/
public jmfcam05v()
{
// Select webcam format
fetchDeviceFormats();
camImgSel=0; // first format, or otherwise as desired
camFPS = 20; // framerate
// Setup data source
fetchDeviceDataSource();
createPBDSource();
createProcessor(dataSource);
startCapture();
try{Thread.sleep(90000);}catch(Exception e){} // 20 seconds
stopCapture();
}
/**
* Gathers info on a camera
*
*/
boolean fetchDeviceFormats()
{
Vector deviceList = CaptureDeviceManager.getDeviceList(new VideoFormat(null));
CaptureDeviceInfo CapDevice = null;
Format CapFormat = null;
String type = "N/A";
CaptureDeviceInfo deviceInfo=null;boolean VideoFormatMatch=false;
for(int i=0;i<deviceList.size();i++)
{
// search for video device
deviceInfo = (CaptureDeviceInfo)deviceList.elementAt(i);
if(deviceInfo.getName().indexOf("vfw:")<0)continue;
Format deviceFormat[] = deviceInfo.getFormats();
for (int f=0;f<deviceFormat.length;f++)
{
if(deviceFormat[f] instanceof RGBFormat)type="RGB";
if(deviceFormat[f] instanceof YUVFormat)type="YUV";
if(deviceFormat[f] instanceof JPEGFormat)type="JPG";
Dimension size = ((VideoFormat)deviceFormat[f]).getSize();
camImgSize.addElement(type+" "+size.width+"x"+size.height);
CapDevice = deviceInfo;
camCapDevice.addElement(CapDevice);
//System.out.println("Video device = " + deviceInfo.getName());
CapFormat = (VideoFormat)deviceFormat[f];
camCapFormat.addElement(CapFormat);
//System.out.println("Video format = " + deviceFormat[f].toString());
VideoFormatMatch=true; // at least one
}
}
if(VideoFormatMatch==false)
{
if(deviceInfo!=null)System.out.println(deviceInfo);
System.out.println("Video Format not found");
return false;
}
return true;
}
/**
* Finds a camera and sets it up
*
*/
void fetchDeviceDataSource()
{
CaptureDeviceInfo CapDevice = (CaptureDeviceInfo)camCapDevice.elementAt(camImgSel);
System.out.println("Video device = " + CapDevice.getName());
Format CapFormat = (Format)camCapFormat.elementAt(camImgSel);
System.out.println("Video format = " + CapFormat.toString());
MediaLocator loc = CapDevice.getLocator();
try
{
dataSource = Manager.createDataSource(loc);
}
catch(Exception e){}
try
{
// ensures 30 fps or as otherwise preferred, subject to available cam rates but this is frequency of windows request to stream
FormatControl formCont=((CaptureDevice)dataSource).getFormatControls()[0];
VideoFormat formatVideoNew = new VideoFormat(null,null,-1,null,(float)camFPS);
formCont.setFormat(CapFormat.intersects(formatVideoNew));
}
catch(Exception e){}
}
/**
* Gets a stream from the camera (and sets debug)
*
*/
void createPBDSource()
{
try
{
pbs=((PushBufferDataSource)dataSource).getStreams()[0];
}
catch(Exception e){}
}
public void createProcessor(DataSource datasource)
{
FileTypeDescriptor ftd = new FileTypeDescriptor(FileTypeDescriptor.MSVIDEO);
Format[] formats = new Format[] {new VideoFormat(VideoFormat.INDEO50)};
ProcessorModel pm = new ProcessorModel(datasource, formats, ftd);
try
{
processor = Manager.createRealizedProcessor(pm);
}
catch(Exception me)
{
System.out.println(me);
// Make sure the capture devices are released
datasource.disconnect();
return;
}
}
private void startCapture()
{
// Get the processor's output, create a DataSink and connect the two.
DataSource outputDS = processor.getDataOutput();
try
{
MediaLocator ml = new MediaLocator("file:capture.avi");
datasink = Manager.createDataSink(outputDS, ml);
datasink.open();
datasink.start();
}catch (Exception e)
{
System.out.println(e);
}
processor.start();
System.out.println("Started saving...");
}
private void pauseCapture()
{
processor.stop();
}
private void resumeCapture()
{
processor.start();
}
private void stopCapture()
{
// Stop the capture and the file writer (DataSink)
processor.stop();
processor.close();
datasink.close();
processor = null;
System.out.println("Done saving.");
}
}
this program works well on windows xp(desktop) and wen i try to use it on windows7(laptop) it gives me the following error:
run: Video Format not found
Exception in thread "main" java.lang.ArrayIndexOutOfBoundsException: 0 >= 0
at java.util.Vector.elementAt(Vector.java:427)
at jmfcam05v.fetchDeviceDataSource(jmfcam05v.java:112)
at jmfcam05v.<init>(jmfcam05v.java:49)
at jmfcam05v.main(jmfcam05v.java:34) Java Result: 1 BUILD SUCCESSFUL (total time: 0 seconds)
my program is not detectin my inbuild webcam on laptop nor it is detecting external web cam.i am using jmf to capture the video and all my webcam's are vfw supported.
Please help me to solve this issue.

Are you mixing 32 and 64-bit installs? I had a similar problem under Windows 7, and it was due to 64-bit incompatibilities between Windows 7, JRE and JMF. In short, JMF is only 32-bit and won't recognize devices if your JRE is 64-bit.
After following these instructions, I was able to recognize my camera and avoid the "Video Format not found" as well as the jmfstudio not detecting the video capture device.

Is it possible that Windows 7 security is preventing you from accessing the device, thus your list shows up as empty from your fetchDeviceDataSource() call.
You can try turning off UAC and see if it fixes your problem.

Related

Raspberry pi with java application high CPU usage

I have a java application running on my raspberry pi but it crashes most of the time. Whenever it crashes it usually has a very high CPU usage (> 100%) from java. How my application works: I have a RFID reader that reads tags and whenever a tag is read, a messageReceived method is called. That method stores the read tags in a specific set. Then I create a new thread which listens to a socket and while the socket is open and when the set has changed, the thread calls some javafx methods to open new screens. However, when I deploy the application to my raspberry pi, it crashes randomly and has a high CPU usage with java. Feel free to ask any questions if I forgot to explain anything .
Edit 1: my thread class.
Edit 2: My question now is: why do I have such a high CPU usage and how can I fix it.
public class RFIDThread implements Runnable {
/**
* The socket for the connection to the LLRP Reader
*/
private Socket socket;
private JSONArray valid_tags;
private JSONArray found_tags;
private TagsListController controller;
private RFIDSet rfidset;
/**
* Thread for constant reading of the stream
*
* #param socket
* #param controller
* #param tags
*/
public RFIDThread(Socket socket, TagsListController controller, JSONArray tags, RFIDSet rfidset) {
this.socket = socket;
this.controller = controller;
this.rfidset = rfidset;
this.found_tags = new JSONArray();
this.valid_tags = tags;
}
/**
* Runnable for this thread.
* First get all the found tags from the xml controller
* Then loop over the rfid set to find any new tags.
* If there are any, display them.
*/
#Override
public void run() {
CopyOnWriteArrayList<Tag> originalSet = new CopyOnWriteArrayList<>();
originalSet.addAll(rfidset.getSet());
boolean started = true;
if (socket.isConnected()) {
while (!socket.isClosed()) {
CopyOnWriteArrayList<Tag> set = new CopyOnWriteArrayList<>();
set.addAll(rfidset.getSet());
if(started || !originalSet.equals(set)) {
started = false;
CopyOnWriteArrayList<String> found_set = new CopyOnWriteArrayList<>();
found_set.addAll(controller.found_tags_list.getItems());
this.found_tags.clear();
this.found_tags.addAll(found_set);
for (Tag found_tag : set) {
if (found_tags.indexOf(found_tag.getId()) < 0) {
Integer index = valid_tags.indexOf(found_tag.getId());
if (index >= 0) {
Platform.runLater(() -> controller.showValid(found_tag.getId()));
} else {
Platform.runLater(() -> controller.showError(found_tag.getId()));
}
found_tags.add(found_tag.getId());
pause(5000);
}
}
originalSet = set;
pause(5000);
}
}
}
}
/**
* Close the socket
*/
public void shutdown() {
try {
this.socket.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void pause(long ms) {
try {
Thread.sleep(ms);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
Try moving the pause(5000); outside the if (started || !originalSet.equals(set)) { statement.
High cpu usage is usually a tight loop with no pause or I/O or waiting for stuff. In your case whenever the originalSet.equals(set) you will not pause.
You may prefer to just use:
if (started || !originalSet.equals(set)) {
// ...
} else {
pause(0);
}
or similar.

Force stop Java Files.copy() running on external thread

The answer here seemed to be a valid solution before Java 8:
How to cancel Files.copy() in Java?
But now it doesn't work, because ExtendedCopyOption.INTERRUPTIBLE is private.
Basically, I need to download a file from some given URL and save it to my local file-system using Files.copy().
Currently, I am using a JavaFX Service because I need to show the progress in a ProgressBar.
However, I don't know how to block the thread running Files.copy() if the operation takes too long.
Using Thread.stop() is at least not wanted. Even Thread.interrupt() fails.
I also want the operation to terminate gracefully if the internet connection becomes unavailable.
To test the case when no internet connection is available, I'm removing my ethernet cable and putting it back after 3 seconds.
Unfortunately, Files.copy() returns only when I put back the ethernet cable, while I would like it to fail immediately.
As I can see, internally Files.copy() is running a loop, which prevents the thread from exiting.
Tester(Downloading OBS Studio exe):
/**
* #author GOXR3PLUS
*
*/
public class TestDownloader extends Application {
/**
* #param args
*/
public static void main(String[] args) {
launch(args);
}
#Override
public void start(Stage primaryStage) throws Exception {
// Block From exiting
Platform.setImplicitExit(false);
// Try to download the File from URL
new DownloadService().startDownload(
"https://github.com/jp9000/obs-studio/releases/download/17.0.2/OBS-Studio-17.0.2-Small-Installer.exe",
System.getProperty("user.home") + File.separator + "Desktop" + File.separator + "OBS-Studio-17.0.2-Small-Installer.exe");
}
}
DownloadService:
Using #sillyfly comment with FileChannel and removing File.copy seems to work only with calling Thread.interrupt() but it is not exiting when the internet is not available..
import java.io.File;
import java.net.URL;
import java.net.URLConnection;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.file.StandardOpenOption;
import java.util.logging.Level;
import java.util.logging.Logger;
import javafx.concurrent.Service;
import javafx.concurrent.Task;
/**
* JavaFX Service which is Capable of Downloading Files from the Internet to the
* LocalHost
*
* #author GOXR3PLUS
*
*/
public class DownloadService extends Service<Boolean> {
// -----
private long totalBytes;
private boolean succeeded = false;
private volatile boolean stopThread;
// CopyThread
private Thread copyThread = null;
// ----
private String urlString;
private String destination;
/**
* The logger of the class
*/
private static final Logger LOGGER = Logger.getLogger(DownloadService.class.getName());
/**
* Constructor
*/
public DownloadService() {
setOnFailed(f -> System.out.println("Failed with value: " + super.getValue()+" , Copy Thread is Alive? "+copyThread.isAlive()));
setOnSucceeded(s -> System.out.println("Succeeded with value: " + super.getValue()+" , Copy Thread is Alive? "+copyThread.isAlive()));
setOnCancelled(c -> System.out.println("Succeeded with value: " + super.getValue()+" , Copy Thread is Alive? "+copyThread.isAlive()));
}
/**
* Start the Download Service
*
* #param urlString
* The source File URL
* #param destination
* The destination File
*/
public void startDownload(String urlString, String destination) {
if (!super.isRunning()) {
this.urlString = urlString;
this.destination = destination;
totalBytes = 0;
restart();
}
}
#Override
protected Task<Boolean> createTask() {
return new Task<Boolean>() {
#Override
protected Boolean call() throws Exception {
// Succeeded boolean
succeeded = true;
// URL and LocalFile
URL urlFile = new URL(java.net.URLDecoder.decode(urlString, "UTF-8"));
File destinationFile = new File(destination);
try {
// Open the connection and get totalBytes
URLConnection connection = urlFile.openConnection();
totalBytes = Long.parseLong(connection.getHeaderField("Content-Length"));
// --------------------- Copy the File to External Thread-----------
copyThread = new Thread(() -> {
// Start File Copy
try (FileChannel zip = FileChannel.open(destinationFile.toPath(), StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE)) {
zip.transferFrom(Channels.newChannel(connection.getInputStream()), 0, Long.MAX_VALUE);
// Files.copy(dl.openStream(), fl.toPath(),StandardCopyOption.REPLACE_EXISTING)
} catch (Exception ex) {
stopThread = true;
LOGGER.log(Level.WARNING, "DownloadService failed", ex);
}
System.out.println("Copy Thread exited...");
});
// Set to Daemon
copyThread.setDaemon(true);
// Start the Thread
copyThread.start();
// -------------------- End of Copy the File to External Thread-------
// ---------------------------Check the %100 Progress--------------------
long outPutFileLength;
long previousLength = 0;
int failCounter = 0;
// While Loop
while ((outPutFileLength = destinationFile.length()) < totalBytes && !stopThread) {
// Check the previous length
if (previousLength != outPutFileLength) {
previousLength = outPutFileLength;
failCounter = 0;
} else
++failCounter;
// 2 Seconds passed without response
if (failCounter == 40 || stopThread)
break;
// Update Progress
super.updateProgress((outPutFileLength * 100) / totalBytes, 100);
System.out.println("Current Bytes:" + outPutFileLength + " ,|, TotalBytes:" + totalBytes
+ " ,|, Current Progress: " + (outPutFileLength * 100) / totalBytes + " %");
// Sleep
try {
Thread.sleep(50);
} catch (InterruptedException ex) {
LOGGER.log(Level.WARNING, "", ex);
}
}
// 2 Seconds passed without response
if (failCounter == 40)
succeeded = false;
// --------------------------End of Check the %100 Progress--------------------
} catch (Exception ex) {
succeeded = false;
// Stop the External Thread which is updating the %100
// progress
stopThread = true;
LOGGER.log(Level.WARNING, "DownloadService failed", ex);
}
//----------------------Finally------------------------------
System.out.println("Trying to interrupt[shoot with an assault rifle] the copy Thread");
// ---FORCE STOP COPY FILES
if (copyThread != null && copyThread.isAlive()) {
copyThread.interrupt();
System.out.println("Done an interrupt to the copy Thread");
// Run a Looping checking if the copyThread has stopped...
while (copyThread.isAlive()) {
System.out.println("Copy Thread is still Alive,refusing to die.");
Thread.sleep(50);
}
}
System.out.println("Download Service exited:[Value=" + succeeded + "] Copy Thread is Alive? "
+ (copyThread == null ? "" : copyThread.isAlive()));
//---------------------- End of Finally------------------------------
return succeeded;
}
};
}
}
Interesting questions:
1-> What does java.lang.Thread.interrupt() do?
I strongly encourage you to use a FileChannel.
It has the transferFrom() method which returns immediately when the thread running it is interrupted.
(The Javadoc here says that it should raise a ClosedByInterruptException, but it doesn't.)
try (FileChannel channel = FileChannel.open(Paths.get(...), StandardOpenOption.CREATE,
StandardOpenOption.WRITE)) {
channel.transferFrom(Channels.newChannel(new URL(...).openStream()), 0, Long.MAX_VALUE);
}
It also has the potential to perform much better than its java.io alternative.
(However, it turns out that the implementation of Files.copy() may elect to delegate to this method instead of actually performing the copy by itself.)
Here's an example of a reusable JavaFX Service that lets you fetch a resource from the internet and save it to your local file-system, with automatic graceful termination if the operation takes too long.
The service task (spawned by createTask()) is the user of the file-channel API.
A separate ScheduledExecutorService is used to handle the time constraint.
Always stick to the good practices for extending Service.
If you choose to use such an high-level method, you won't be able to track down the progress of the task.
If the connection becomes unavailable, transferFrom() should eventually return without throwing an exception.
To start the service (may be done from any thread):
DownloadService downloadService = new DownloadService();
downloadService.setRemoteResourceLocation(new URL("http://speedtest.ftp.otenet.gr/files/test1Gb.db"));
downloadService.setPathToLocalResource(Paths.get("C:", "test1Gb.db"));
downloadService.start();
and then to cancel it (otherwise it will be automatically cancelled after the time expires):
downloadService.cancel();
Note that the same service can be reused, just be sure to reset it before starting again:
downloadService.reset();
Here is the DownloadService class:
public class DownloadService extends Service<Void> {
private static final long TIME_BUDGET = 2; // In seconds
private final ScheduledExecutorService watchdogService =
Executors.newSingleThreadScheduledExecutor(new ThreadFactory() {
private final ThreadFactory delegate = Executors.defaultThreadFactory();
#Override
public Thread newThread(Runnable r) {
Thread thread = delegate.newThread(r);
thread.setDaemon(true);
return thread;
}
});
private Future<?> watchdogThread;
private final ObjectProperty<URL> remoteResourceLocation = new SimpleObjectProperty<>();
private final ObjectProperty<Path> pathToLocalResource = new SimpleObjectProperty<>();
public final URL getRemoteResourceLocation() {
return remoteResourceLocation.get();
}
public final void setRemoteResourceLocation(URL remoteResourceLocation) {
this.remoteResourceLocation.set(remoteResourceLocation);
}
public ObjectProperty<URL> remoteResourceLocationProperty() {
return remoteResourceLocation;
}
public final Path getPathToLocalResource() {
return pathToLocalResource.get();
}
public final void setPathToLocalResource(Path pathToLocalResource) {
this.pathToLocalResource.set(pathToLocalResource);
}
public ObjectProperty<Path> pathToLocalResourceProperty() {
return pathToLocalResource;
}
#Override
protected Task<Void> createTask() {
final Path pathToLocalResource = getPathToLocalResource();
final URL remoteResourceLocation = getRemoteResourceLocation();
if (pathToLocalResource == null) {
throw new IllegalStateException("pathToLocalResource property value is null");
}
if (remoteResourceLocation == null) {
throw new IllegalStateException("remoteResourceLocation property value is null");
}
return new Task<Void>() {
#Override
protected Void call() throws IOException {
try (FileChannel channel = FileChannel.open(pathToLocalResource, StandardOpenOption.CREATE,
StandardOpenOption.WRITE)) {
channel.transferFrom(Channels.newChannel(remoteResourceLocation.openStream()), 0, Long.MAX_VALUE);
}
return null;
}
};
}
#Override
protected void running() {
watchdogThread = watchdogService.schedule(() -> {
Platform.runLater(() -> cancel());
}, TIME_BUDGET, TimeUnit.SECONDS);
}
#Override
protected void succeeded() {
watchdogThread.cancel(false);
}
#Override
protected void cancelled() {
watchdogThread.cancel(false);
}
#Override
protected void failed() {
watchdogThread.cancel(false);
}
}
There is one important aspect not covered by the other answers/comments; and that is a wrong assumption of yours:
What I want is it to fail immediately when no internet connection is there.
It is not that easy. The TCP stack/state machine is actually a pretty complicated thing; and depending on your context (OS type; TCP stack implementation, kernel parameters, ...), there can be situations where a network partition takes place and a sender doesn't notice for 15 or more minutes. Listen here for more details on that.
In other words: "just pulling the plug" is no way equal to "immediately breaking" your existing TCP connection. And just for the record: you don't need to plug cables manually to simulate network outages. In a reasonable test setup, tools like iptables aka firewalls can do that for you.
You seem to need an Asynchronous/Cancellable HTTP GET which can be tough.
The problem is that if read stalls waiting for more data (cable is pulled) it won't quit until either the socket dies or new data comes in.
There are a few path you could follow, tinkering with socket factories to set a good timeout, using http client with timeouts and others.
I would have a look at Apache Http Components which has non blocking HTTP based on java NIO Sockets.

Why do I lose the console output?

I have this code in a JUnit Test :
public class CvsCommandTest {
...
#Test
public void test() {
PServerConnection con = new PServerConnection(root);
GlobalOptions globalOptions = new GlobalOptions();
globalOptions.setCVSRoot(root.toString());
Client client = new Client(con, new StandardAdminHandler());
client.setLocalPath(LOCAL_PATH);
client.getEventManager().addCVSListener(new BasicListener());
CheckoutCommand checkoutCmd = new CheckoutCommand();
checkoutCmd.setBuilder(null);
checkoutCmd.setModule("Outils");
try {
client.getConnection().open();
LOG.info("CVS checkout : " + checkoutCmd.getCVSCommand());
boolean successCheckout = client.executeCommand(checkoutCmd,globalOptions );
LOG.info("Checkout COMPLETED : " + successCheckout);
...
The output, while debugging, is :
[INFO] fr.package.CvsCommandTest - CVS checkout : checkout
-N Outils
cvs checkout: Updating Outils
The first line is my log, the second comes from the listener but I don't get the remaining of my logs.
The basicListener is defined this way :
import java.io.PrintStream;
import org.netbeans.lib.cvsclient.event.CVSAdapter;
import org.netbeans.lib.cvsclient.event.MessageEvent;
public class BasicListener extends CVSAdapter {
/** * Stores a tagged line */
private final StringBuffer taggedLine = new StringBuffer();
/**
* Called when the server wants to send a message to be displayed to the
* user. The message is only for information purposes and clients can choose
* to ignore these messages if they wish.
*
* #param e
* the event
*/
public void messageSent(MessageEvent e) {
String line = e.getMessage();
PrintStream stream = e.isError() ? System.err : System.out;
if (e.isTagged()) {
String message = MessageEvent.parseTaggedMessage(taggedLine, line);
if (message != null) {
stream.println(message);
}
} else {
stream.println(line);
}
stream.close();
}
}
What have I missed?
Turned comment in to answer
System.out stream.close(); --> good night...
Explanation:
Since he is using the System.out to output he's log message when he close the System.out #see end of public void messageSent(MessageEvent e) ,stream.close(); the System.out is closed and can not be used anymore, so good night to System.out
The solution is:
Removing the stream.close(); command

Rectify JMF Video Capture

I saw the following code in stackoverflow.com and copied to my system submitted by you on JMF:
import Logging.LogRunner; //Logging.LogRunner is absent s error occurs
import java.util.Vector;
import java.awt.*;
import java.awt.event.*;
import javax.media.*;
import javax.media.format.*;
import javax.media.protocol.*;
import javax.media.datasink.*;
import javax.swing.JButton;
import javax.swing.JOptionPane;
/**
* Records audio and video from a usb camera and saves it to disk. Essentially what is happening
* is we are creating two video streams and an audio stream. We use two video streams
* so one can be sent to the file, and the other can be sent to the gui to show the
* user what they are currently recording. This class is configured to use a certain
* audio and video format, but this can easily be changed to fit dirrenet needs.
* This class was created with
* help from http://www.codeproject.com/KB/audio-video/Java_Video_Capture.aspx.
* Please refer to that site for more information.
* #author dvargo
*/
public class VideoRecorder
{
/**
* The source of the video stream
*/
CaptureDeviceInfo device = null;
/**
* The location of media content
*/
MediaLocator ml = null;
/**
* MediaHandler for rendering and controlling time based media data
*/
Player player = null;
/**
* The screen that will display the video as it is being recorded
*/
Component videoScreen = null;
/**
* The Processor processes data and creates an output in the destination
* format required
*/
Processor processor = null;
/**
* takes a DataSource as input and renders the output to a specified destination
*/
DataSink dataSink = null;
/**
*
*/
TheDataSinkListener dataSinkListener = null;
/**
* The form for the video and other gui compoenents
*/
Frame frm = null;
/**
* A reference to the Starter class.
*/
VideoStarter theForm = null; //VideoStarter does not exist so error occurs
/**
* Used to determine if something went wrong
*/
boolean error = false;
/**
* Basic Constructor to begin recording video to a specified file path. This
* constructor initalizes everything needed to begin recording.
* #param saveFileTo The path and file name of where to save the video.
* #param inForm Reference to the Starter class that initatied this recording.
*/
public VideoRecorder(String saveFileTo,VideoStarter inForm)
{
theForm = inForm;
try
{
//gets a list of devices how support the given video format
Vector deviceList = CaptureDeviceManager.getDeviceList(new YUVFormat());
//if we couldnt find anything to record from
if(deviceList.size() == 0)
{
LogRunner.dialogMessage(this.getClass(),"No video capture devices could be found.");
error = true;
return;
}
// get video device - the first one is almost always the only available camera
device = (CaptureDeviceInfo) deviceList.firstElement();
ml = device.getLocator();
//create a source from the device
DataSource ods = null;
ods = Manager.createDataSource(ml);
/*
* Clone the video source so it can be displayed and used to capture
* the video at the same time. Trying to use the same source for two
* purposes would cause a "source is in use" error
*/
DataSource cloneableDS = Manager.createCloneableDataSource(ods);
DataSource PlayerVidDS = cloneableDS;
// The video capture code will use the clone which is controlled by the player
DataSource CaptureVidDS = ((javax.media.protocol.SourceCloneable) cloneableDS).createClone();
/*
* Display video by starting the player on the source clonable data source
* the clones are fed data stopping the player will stop the video flow
* to the clone data source
*/
player = Manager.createRealizedPlayer(PlayerVidDS);
player.start();
// get an audio device and create an audio data source
deviceList = CaptureDeviceManager.getDeviceList(new javax.media.format.AudioFormat(null));
device = (CaptureDeviceInfo) deviceList.firstElement();
ml = device.getLocator();
DataSource audioDataSource = Manager.createDataSource(ml);
// merge audio and video data sources
DataSource mixedDataSource = null;
DataSource dsArray[] = new DataSource[2];
dsArray[0] = CaptureVidDS; // this is a cloned datasource and is controlled by the master clonable data source
dsArray[1] = audioDataSource;
try
{
mixedDataSource = javax.media.Manager.createMergingDataSource(dsArray);
}
catch (Exception e)
{
//exception handling here
error = true;
System.out.println("Error 1");
e.printStackTrace();
}
// setup output file format to msvideo
FileTypeDescriptor outputType = new FileTypeDescriptor(FileTypeDescriptor.MSVIDEO);
// setup output video and audio data format
Format outputFormat[] = new Format[2];
//outputFormat[0] = new VideoFormat(VideoFormat.RGB);
outputFormat[0] = new VideoFormat(VideoFormat.YUV);
outputFormat[1] = new AudioFormat(AudioFormat.LINEAR);
// create a new processor
ProcessorModel processorModel = new ProcessorModel(mixedDataSource, outputFormat, outputType);
try
{
processor = Manager.createRealizedProcessor(processorModel);
}
catch (Exception e) {
// exception handling here
error = true;
System.out.println("Error 2");
e.printStackTrace();
}
try
{
// get the output of the processor to be used as the datasink input
DataSource source = processor.getDataOutput();
// create a File protocol MediaLocator with the location of the file to which bits are to be written
MediaLocator mediadestination = new MediaLocator("file:" + saveFileTo);
// create a datasink to create the video file
dataSink = Manager.createDataSink(source, mediadestination);
// create a listener to control the datasink
dataSinkListener = new TheDataSinkListener();
dataSink.addDataSinkListener(dataSinkListener);
dataSink.open();
// now start the datasink and processor
dataSink.start();
processor.start();
}
catch (Exception e)
{
// exception handling here
error = true;
System.out.println("Error 3");
e.printStackTrace();
}
//set up the gui
createGui();
}
catch (Exception e)
{
System.out.println("Error 4");
LogRunner.getLogger().warning("Error recording video - " + e.getMessage());
e.printStackTrace();
error = true;
}
}
/**
* Flag that determines if something went wrong
* #return True if something did go wrong, false if everything is fine.
*/
public boolean getError()
{
return error;
}
/**
* Creates the gui used to display what is currently being recorded.
*/
private void createGui()
{
videoScreen = player.getVisualComponent();
frm = new Frame("Recording");
frm.setSize(300, 300);
frm.setLayout(new BorderLayout());
frm.add(videoScreen,BorderLayout.CENTER);
JButton close = new JButton("Click Here when done");
frm.add(close,BorderLayout.SOUTH);
close.setVisible(true);
close.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(ActionEvent e) {
// Stop the processor doing the movie capture first
processor.stop();
processor.close();
// Closing the processor will end the data stream to the data sink.
// Wait for the end of stream to occur before closing the datasink
dataSinkListener.waitEndOfStream(10);
dataSink.close();
// stop and close the player which closes the video data source
player.stop();
player.close();
theForm.done();
// dispose of the frame and close the application
frm.setVisible(false);
}
});
frm.setAlwaysOnTop(true);
frm.setUndecorated(true);
frm.setLocationRelativeTo(theForm.mainWindow);
frm.setVisible(true);
}
}
/**
*
* Control the ending of the program prior to closing the data sink
*/
class TheDataSinkListener implements DataSinkListener {
boolean endOfStream = false;
// Flag the ending of the data stream
public void dataSinkUpdate(DataSinkEvent event)
{
if (event instanceof javax.media.datasink.EndOfStreamEvent)
{
endOfStream = true;
}
}
/**
* Cause the current thread to sleep if the data stream is still available.
* This makes certain that JMF threads are done prior to closing the data sink
* and finalizing the output file
*/
public void waitEndOfStream(long checkTimeMs) {
while (!endOfStream)
{
try
{
//Thread.currentThread().sleep(checkTimeMs);
Thread.sleep(checkTimeMs);
}
catch (InterruptedException ie)
{
System.out.println("Error 5");
e.printStackTrace();
}
}
}
public static void main(String []args)
{
new VideoRecorder("d:/yusuf/abc.mpg",theForm);
}
}
But where can i get the Logging.LogRunner and VideoStarter. For these my program shows error during compile. The following error messages are shown
1.
symbol: variable LogRunner
location: class VideoRecorder
F:\Tutorials\Java Tutorial\Programs\VideoRecorder\src\VideoRecorder.java:206: error: cannot find symbol
LogRunner.getLogger().warning("Error recording video - " + e.getMessage());
2.
public VideoRecorder(String saveFileTo,VideoStarter inForm)
^
symbol: class VideoStarter
location: class VideoRecorder
Please help
Im the one that acutally wrote that. Sorry for the delayed reply as im sure your far past this, but it may be useful to someone in the future. The LogRunner is a class we use to write our log messages out. You can just remove that, or use whatever youd like for logging.
The VideoStarter is just a JWindow that starts the video recording process. It also is not needed for this class to work.

MP3 won't stream with JMF

Basic idea is to access .mp3 file and send it through RTP stream to other client, who will want to play that song.
Here is RTPServer.java, which I found online and modified it to my liking.
package server;
import java.net.InetAddress;
import javax.media.rtp.*;
import javax.media.rtp.rtcp.*;
import javax.media.*;
import javax.media.protocol.*;
import javax.media.control.*;
public class RTPServer implements ControllerListener, Runnable {
private boolean realized = false;
private boolean configured = false;
private String ipAddress;
Processor p;
MediaLocator src;
public static void main (String[] args) {
RTPServer rtp = new RTPServer("192.168.1.101", "04 - Blue.mp3");
Thread t = new Thread(rtp);
t.start();
}
public RTPServer(String ip, String song) {
ipAddress = ip;
String srcFile = "Muzika\\" + song;
src = new MediaLocator("file:" + srcFile);
}
private void setTrackFormat(Processor p) {
// Get the tracks from the processor
TrackControl [] tracks = p.getTrackControls();
// Do we have atleast one track?
if (tracks == null || tracks.length < 1) {
System.out.println("Couldn't find tracks in processor");
System.exit(1);
}
// Set the output content descriptor to RAW_RTP
// This will limit the supported formats reported from
// Track.getSupportedFormats to only valid RTP formats.
ContentDescriptor cd = new ContentDescriptor(ContentDescriptor.RAW_RTP);
p.setContentDescriptor(cd);
Format supported[];
Format chosen;
boolean atLeastOneTrack = false;
// Program the tracks.
for (int i = 0; i < tracks.length; i++) {
Format format = tracks[i].getFormat();
System.out.println("Trenutni format je " +format.getEncoding());
if (tracks[i].isEnabled()) {
supported = tracks[i].getSupportedFormats();
for (int n = 0; n < supported.length; n++)
System.out.println("Supported format: " + supported[n]);
// We've set the output content to the RAW_RTP.
// So all the supported formats should work with RTP.
// We'll just pick the first one.
if (supported.length > 0) {
chosen = supported[0]; // this is where I tried changing formats
tracks[i].setFormat(chosen);
System.err.println("Track " + i + " is set to transmit as: " +chosen);
atLeastOneTrack = true;
} else
tracks[i].setEnabled(false);
} else
tracks[i].setEnabled(false);
}
}
private void transmit(Processor p) {
try {
DataSource output = p.getDataOutput();
PushBufferDataSource pbds = (PushBufferDataSource) output;
RTPManager rtpMgr = RTPManager.newInstance();
SessionAddress localAddr, destAddr;
SendStream sendStream;
int port = 42050;
SourceDescription srcDesList[];
localAddr = new SessionAddress( InetAddress.getLocalHost(), port);
InetAddress ipAddr = InetAddress.getByName(ipAddress);
destAddr = new SessionAddress( ipAddr, port);
rtpMgr.initialize(localAddr);
rtpMgr.addTarget(destAddr);
sendStream = rtpMgr.createSendStream(output, 0);
sendStream.start();
System.err.println( "Created RTP session: " + ipAddress + " " + port);
p.start();
} catch(Exception e) {
e.printStackTrace();
}
}
public synchronized void controllerUpdate(ControllerEvent evt) {
if (evt instanceof RealizeCompleteEvent) {
realized = true;
} else if (evt instanceof ConfigureCompleteEvent) {
configured = true;
} else if (evt instanceof EndOfMediaEvent) {
System.exit(0);
} else {
// System.out.println(evt.toString());
}
}
public void run() {
try {
p = Manager.createProcessor(src);
p.addControllerListener(this);
p.configure();
while (! configured) {
try {
Thread.currentThread().sleep(100L);;
} catch (InterruptedException e) {
// ignore
}
}
setTrackFormat(p);
p.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW_RTP));
p.realize();
while (! realized) {
try {
Thread.currentThread().sleep(100L);;
} catch (InterruptedException e) {
// ignore
}
}
transmit(p);
} catch(Exception e) {
e.printStackTrace();
System.exit(1);
}
}
}
And here is receiving end, RTPClient:
package client;
import javax.media.*;
public class RTPClient implements ControllerListener, Runnable {
Player p;
MediaLocator src;
public static void main(String[] args) {
RTPClient rtp = new RTPClient("192.168.1.100");
Thread t = new Thread(rtp);
t.start();
}
public RTPClient(String ip) {
String srcUrl = "rtp://" + ip + ":42050/audio/1";
DataSink sink;
src = new MediaLocator(srcUrl);
}
public void run() {
try {
p = Manager.createPlayer(src);
p.addControllerListener(this);
p.start();
} catch(Exception e) {
e.printStackTrace();
System.exit(1);
}
}
public synchronized void controllerUpdate(ControllerEvent evt) {
if (evt instanceof EndOfMediaEvent) {
System.exit(0);
} else {
System.out.println(evt.toString());
}
}
}
I figured, it successfully sends the whatever file I choose, but when I send .mp3, Client won't play it. I get:
RTP Handler internal error:
javax.media.ControllerErrorEvent[source=com.sun.media.content.unknown.Handler#9ed927,message=Internal
module com.sun.media.BasicRendererModule#1386000: failed to handle a data
format change!]
Interesting thing is, .wav is sent perfectly. So my guess was is the format set prior to sending. And I tried changing format to some other supported format, but then I get bunch of other errors.
Failed to build a graph for the given custom options.
Failed to realize: com.sun.media.ProcessEngine#eee36c
Cannot build a flow graph with the customized options:
Unable to transcode format: mpegaudio, 48000.0 Hz, 16-bit, Stereo, LittleEndian, Signed, 20000.0 frame rate, FrameSize=11264 bits
to: ULAW/rtp, 8000.0 Hz, 8-bit, Stereo
outputting to: RAW/RTP
Error: Unable to realize com.sun.media.ProcessEngine#eee36c
Finally, I opened JMStudio (the built-in app for sending/receiving media streams in JMF), and when I try to stream .mp3, I get exact same error as when running my app. JMF is set up fine, I checked PATH and CLASSPATH, also I installed mp3plugin which is also setup fine. Everything seems fine, but it just doesn't work! At least .mp3 is not.
So, how can I make .mp3 "go to the other end"?
Solved.
All I had to do is add these lines in constructor for sender/receiver.
Format input1 = new AudioFormat(AudioFormat.MPEGLAYER3);
Format input2 = new AudioFormat(AudioFormat.MPEG);
Format output = new AudioFormat(AudioFormat.LINEAR);
PlugInManager.addPlugIn(
"com.sun.media.codec.audio.mp3.JavaDecoder",
new Format[]{input1, input2},
new Format[]{output},
PlugInManager.CODEC);
Might help somebody else with this problem :)
Still don't know why JMStudio isn't working... Not that I care anymore.
My environment cannot detect the newly added plugin. I would have to hardcode the codec into the track. It works but the mp3 is cluttering. .wav is perfectly fine though.
javax.media.Codec codec = (javax.media.Codec) (Class.forName(plugins.get(0)).newInstance());
com.sun.media.codec.audio.mp3.JavaDecoder decoder = new com.sun.media.codec.audio.mp3.JavaDecoder();
Codec[] cc = new Codec[2];
cc[0] = codec;
cc[1] = decoder;
try {
tracks[0].setCodecChain(cc);
} catch (UnsupportedPlugInException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NotConfiguredError e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
There are a couple things to do to make the code in question works:
put mp3plugin.jar in the classpath. It is a mp3 plugin for JMF. You may find it online.
put the following code in the main method to register the newly added plugin.
Format input1 = new AudioFormat(AudioFormat.MPEGLAYER3);
Format input2 = new AudioFormat(AudioFormat.MPEG);
Format output = new AudioFormat(AudioFormat.LINEAR);
PlugInManager.addPlugIn(
"com.sun.media.codec.audio.mp3.JavaDecoder",
new Format[]{input1, input2},
new Format[]{output},
PlugInManager.CODEC);
set the track format to AduioFormat.DVI_RTP in the RTPServer.java to convert your mp3 music to a format that RTPClient can play.
Before
if (supported.length > 0) {
chosen = supported[0]; // this is where I tried changing formats
tracks[i].setFormat(chosen);
System.err.println("Track " + i + " is set to transmit as: " +chosen);
atLeastOneTrack = true;
} else
After ( replace "chosen" with "new AudioFormat(AudioFormat.DVI_RTP)" )
if (supported.length > 0) {
chosen = supported[0]; // this is where I tried changing formats
tracks[i].setFormat(new AudioFormat(AudioFormat.DVI_RTP));
atLeastOneTrack = true;
} else
Then everything should work just fine.
Here is my RTPServer
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.net.MalformedURLException;
import javax.media.rtp.*;
import javax.media.rtp.rtcp.*;
import javax.media.*;
import javax.media.protocol.*;
import javax.media.control.*;
import javax.media.format.AudioFormat;
public class RTPServerMP3 implements ControllerListener {
private String ipAddress;
Processor p;
public static void main(String[] args) throws NoProcessorException, IOException {
Format input1 = new AudioFormat(AudioFormat.MPEGLAYER3);
Format input2 = new AudioFormat(AudioFormat.MPEG);
Format output = new AudioFormat(AudioFormat.LINEAR);
PlugInManager.addPlugIn(
"com.sun.media.codec.audio.mp3.JavaDecoder",
new Format[]{input1, input2},
new Format[]{output},
PlugInManager.CODEC);
RTPServerMP3 rtp = new RTPServerMP3("192.168.1.86");
rtp.p = Manager.createProcessor(new MediaLocator((new File( "roar_of_future.mp3")).toURL()));
rtp.p.addControllerListener(rtp);
rtp.p.configure();
}
public RTPServerMP3(String ip) throws MalformedURLException {
ipAddress = ip;
}
private void setTrackFormat(Processor p) {
// Get the tracks from the processor
TrackControl[] tracks = p.getTrackControls();
// Do we have atleast one track?
if (tracks == null || tracks.length < 1) {
System.out.println("Couldn't find tracks in processor");
System.exit(1);
}
// Set the output content descriptor to RAW_RTP
// This will limit the supported formats reported from
// Track.getSupportedFormats to only valid RTP formats.
ContentDescriptor cd = new ContentDescriptor(ContentDescriptor.RAW_RTP);
p.setContentDescriptor(cd);
Format supported[];
Format chosen;
boolean atLeastOneTrack = false;
// Program the tracks.
for (int i = 0; i < tracks.length; i++) {
Format format = tracks[i].getFormat();
System.out.println("seeing format " + format.getEncoding() + " for track " + i);
if (tracks[i].isEnabled()) {
supported = tracks[i].getSupportedFormats();
for (int n = 0; n < supported.length; n++)
System.out.println("Supported format: " + supported[n]);
// We've set the output content to the RAW_RTP.
// So all the supported formats should work with RTP.
// We'll just pick the first one.
if (supported.length > 0) {
chosen = supported[0]; // this is where I tried changing formats
tracks[i].setFormat(new AudioFormat(AudioFormat.DVI_RTP));
System.err.println("Track " + i + " is set to transmit as: " + chosen);
atLeastOneTrack = true;
} else
tracks[i].setEnabled(false);
} else
tracks[i].setEnabled(false);
}
}
private void transmit(Processor p) {
try {
DataSource output = p.getDataOutput();
PushBufferDataSource pbds = (PushBufferDataSource) output;
RTPManager rtpMgr = RTPManager.newInstance();
SessionAddress localAddr, destAddr;
SendStream sendStream;
int port = 49150;
SourceDescription srcDesList[];
localAddr = new SessionAddress(InetAddress.getLocalHost(), port/2+10);
InetAddress ipAddr = InetAddress.getByName(ipAddress);
destAddr = new SessionAddress(ipAddr, port);
rtpMgr.initialize(localAddr);
rtpMgr.addTarget(destAddr);
sendStream = rtpMgr.createSendStream(output, 0);
sendStream.start();
System.err.println("Created RTP session: " + ipAddress + " " + port);
p.start();
} catch (Exception e) {
e.printStackTrace();
}
}
public synchronized void controllerUpdate(ControllerEvent evt) {
if (evt instanceof RealizeCompleteEvent) {
transmit(p);
} else if (evt instanceof ConfigureCompleteEvent) {
setTrackFormat(p);
p.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW_RTP));
p.realize();
} else if (evt instanceof EndOfMediaEvent) {
System.exit(0);
}
}
}
Here is my RTPClient
import java.io.IOException;
import javax.media.*;
public class RTPClientMP3 {
public static void main(String[] args) throws NoPlayerException, CannotRealizeException, IOException {
String srcUrl = "rtp://192.168.1.86:49150/audio/1";
MediaLocator src = new MediaLocator(srcUrl);
Player player = Manager.createRealizedPlayer(src);
player.start();
}
}

Categories