jmf问题!大家看看
private String createProcessor() {
DataSource audioDS=null;
DataSource videoDS=null;
DataSource mergeDS=null;
StateListener stateListener=new StateListener();
//create the DataSource
//it can be a 'video' DataSource, an 'audio' DataSource
//or a combination of audio and video by merging both
if (videoLocator == null && audioLocator == null)
return "Locator is null";
if (audioLocator != null){
try {
//create the 'audio' DataSource
audioDS= javax.media.Manager.createDataSource(audioLocator);
} catch (Exception e) {
System.out.println("-> Couldn't connect to audio capture device");
}
}
if (videoLocator != null){
try {
//create the 'video' DataSource
videoDS = javax.media.Manager.createDataSource(videoLocator);
} catch (Exception e) {
System.out.println("-> Couldn't connect to video capture device");
}
}
if(videoDS!=null && audioDS!=null){
try {
//create the 'audio' and 'video' DataSource
mergeDS = javax.media.Manager.createMergingDataSource(new DataSource [] {audioDS, videoDS});
} catch (Exception e) {
System.out.println("-> Couldn't connect to audio or video capture device");
}
try{
//Create the processor from the merging DataSource
processor = javax.media.Manager.createProcessor(mergeDS);
}
catch (NoProcessorException npe) {
return "Couldn't create processor";
} catch (IOException ioe) {
return "IOException creating processor";
}
}
//if the processor has not been created from the merging DataSource
if(processor==null){
try {
if(audioDS!=null)
//Create the processor from the 'audio' DataSource
processor = javax.media.Manager.createProcessor(audioDS);
else
//Create the processor from the 'video' DataSource
processor = javax.media.Manager.createProcessor(videoDS);
} catch (NoProcessorException npe) {
return "Couldn't create processor";
} catch (IOException ioe) {
return "IOException creating processor";
}
}
// Wait for it to configure
boolean result = stateListener.waitForState(processor, Processor.Configured);
if (result == false)
return "Couldn't configure processor";
// Get the tracks from the processor
TrackControl [] tracks = processor.getTrackControls();
// Do we have atleast one track?
if (tracks == null || tracks.length < 1)
return "Couldn't find tracks in processor";
// Set the output content descriptor to RAW_RTP
// This will limit the supported formats reported from
// Track.getSupportedFormats to only valid RTP formats.
ContentDescriptor cd = new ContentDescriptor(ContentDescriptor.RAW_RTP);
processor.setContentDescriptor(cd);
Format supported[];
Format chosen=null;
boolean atLeastOneTrack = false;
// Program the tracks.
for (int i = 0; i < tracks.length; i++) {
chosen = null;
Format format = tracks[i].getFormat();
if (tracks[i].isEnabled()) {
/*if (tracks[i] instanceof VideoFormat)
System.out.println("Supported Video Formats :");
else
System.out.println("Supported Audio Formats :");*/
supported = tracks[i].getSupportedFormats();
/*System.out.println("track : "+ i);
for(int j=0;j<supported.length;j++)
System.out.println("Supported format : "+supported[j].getEncoding());*/
// We've set the output content to the RAW_RTP.
// So all the supported formats should work with RTP.
if (supported.length > 0) {
for(int j=0;j<supported.length;j++){
//System.out.println("Supported format : "+supported[j].toString().toLowerCase());
if (supported[j] instanceof VideoFormat) {
// For video formats, we should double check the
// sizes since not all formats work in all sizes.
if(sessionDescription.getVideoFormat()!=null)
if(supported[j].toString().toLowerCase().indexOf(
sessionDescription.getVideoFormat().toLowerCase())!=-1)
chosen = checkForVideoSizes(tracks[i].getFormat(),
supported[j]);
} else {
if(sessionDescription.getAudioFormat()!=null)
if(supported[j].toString().toLowerCase().indexOf(
sessionDescription.getAudioFormat().toLowerCase())!=-1)
chosen = supported[j];
}
}
if(chosen!=null){
tracks[i].setFormat(chosen);
System.err.println("Track " + i + " is set to transmit as:");
System.err.println(" " + chosen);
atLeastOneTrack = true;
}
} else
tracks[i].setEnabled(false);
} else
tracks[i].setEnabled(false);
}
if (!atLeastOneTrack)
return "Couldn't set any of the tracks to a valid RTP format";
// Realize the processor. This will internally create a flow
// graph and attempt to create an output datasource for JPEG/RTP
// audio frames.
/****************这里出错********************************************************/
result = stateListener.waitForState(processor, Controller.Realized);
/*******************************************************************************/
if (result == false)
return "Couldn't realize processor";
// Set the JPEG quality to .5.
setJPEGQuality(processor, 0.25f);
// Get the output data source of the processor
dataOutput = processor.getDataOutput();
return null;
}
public synchronized boolean waitForState(Processor p, int state) {
p.addControllerListener(this);
failed = false;
// Call the required method on the processor
if (state == Processor.Configured) {
p.configure();
} else if (state == Processor.Realized) {
p.realize();
}
// Wait until we get an event that confirms the
// success of the method, or a failure event.
// See StateListener inner class
while (p.getState() < state && !failed) {
synchronized (getStateLock()) {
try {
getStateLock().wait();
} catch (InterruptedException ie) {
return false;
}
}
}
if (failed)
return false;
else
return true;
}
到底哪里出现了问题