
org.jitsi.impl.neomedia.VideoMediaStreamImpl Maven / Gradle / Ivy
/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.impl.neomedia;
import java.awt.*;
import java.util.*;
import java.util.List;
import java.util.regex.*;
import javax.media.*;
import javax.media.control.*;
import javax.media.format.*;
import javax.media.protocol.*;
import org.jitsi.impl.neomedia.control.*;
import org.jitsi.impl.neomedia.device.*;
import org.jitsi.impl.neomedia.rtcp.*;
import org.jitsi.impl.neomedia.rtp.*;
import org.jitsi.impl.neomedia.rtp.remotebitrateestimator.*;
import org.jitsi.impl.neomedia.rtp.sendsidebandwidthestimation.*;
import org.jitsi.impl.neomedia.transform.*;
import org.jitsi.impl.neomedia.transform.fec.*;
import org.jitsi.service.configuration.*;
import org.jitsi.service.libjitsi.*;
import org.jitsi.service.neomedia.*;
import org.jitsi.service.neomedia.QualityControl;
import org.jitsi.service.neomedia.control.*;
import org.jitsi.service.neomedia.control.KeyFrameControl;
import org.jitsi.service.neomedia.device.*;
import org.jitsi.service.neomedia.format.*;
import org.jitsi.service.neomedia.rtp.*;
import org.jitsi.util.*;
import org.jitsi.util.event.*;
import org.jitsi.utils.concurrent.*;
import org.jitsi.utils.logging.*;
/**
* Extends MediaStreamImpl in order to provide an implementation of
* VideoMediaStream.
*
* @author Lyubomir Marinov
* @author Sebastien Vincent
* @author George Politis
*/
public class VideoMediaStreamImpl
extends MediaStreamImpl
implements VideoMediaStream
{
/**
* The Logger used by the VideoMediaStreamImpl class and
* its instances for logging output.
*/
private static final Logger logger
= Logger.getLogger(VideoMediaStreamImpl.class);
/**
* The indicator which determines whether RTCP feedback Picture Loss
* Indication messages are to be used.
*/
private static final boolean USE_RTCP_FEEDBACK_PLI = true;
/**
* The RecurringRunnableExecutor to be utilized by the
* MediaStreamImpl class and its instances.
*/
private static final RecurringRunnableExecutor
recurringRunnableExecutor = new RecurringRunnableExecutor(
VideoMediaStreamImpl.class.getSimpleName());
/**
* Extracts and returns maximum resolution can receive from the image
* attribute.
*
* @param imgattr send/recv resolution string
* @return maximum resolution array (first element is send, second one is
* recv). Elements could be null if image attribute is not present or if
* resolution is a wildcard.
*/
public static java.awt.Dimension[] parseSendRecvResolution(String imgattr)
{
java.awt.Dimension res[] = new java.awt.Dimension[2];
String token = null;
Pattern pSendSingle = Pattern.compile("send \\[x=[0-9]+,y=[0-9]+\\]");
Pattern pRecvSingle = Pattern.compile("recv \\[x=[0-9]+,y=[0-9]+\\]");
Pattern pSendRange = Pattern.compile(
"send \\[x=\\[[0-9]+(-|:)[0-9]+\\],y=\\[[0-9]+(-|:)[0-9]+\\]\\]");
Pattern pRecvRange = Pattern.compile(
"recv \\[x=\\[[0-9]+(-|:)[0-9]+\\],y=\\[[0-9]+(-|:)[0-9]+\\]\\]");
Pattern pNumeric = Pattern.compile("[0-9]+");
Matcher mSingle = null;
Matcher mRange = null;
Matcher m = null;
/* resolution (width and height) can be on four forms
*
* - single value [x=1920,y=1200]
* - range of values [x=[800:1024],y=[600:768]]
* - fixed range of values [x=[800,1024],y=[600,768]]
* - range of values with step [x=[800:32:1024],y=[600:32:768]]
*
* For the moment we only support the first two forms.
*/
/* send part */
mSingle = pSendSingle.matcher(imgattr);
mRange = pSendRange.matcher(imgattr);
if(mSingle.find())
{
int val[] = new int[2];
int i = 0;
token = imgattr.substring(mSingle.start(), mSingle.end());
m = pNumeric.matcher(token);
while(m.find() && i < 2)
{
val[i] = Integer.parseInt(token.substring(m.start(), m.end()));
}
res[0] = new java.awt.Dimension(val[0], val[1]);
}
else if(mRange.find()) /* try with range */
{
/* have two value for width and two for height (min:max) */
int val[] = new int[4];
int i = 0;
token = imgattr.substring(mRange.start(), mRange.end());
m = pNumeric.matcher(token);
while(m.find() && i < 4)
{
val[i] = Integer.parseInt(token.substring(m.start(), m.end()));
i++;
}
res[0] = new java.awt.Dimension(val[1], val[3]);
}
/* recv part */
mSingle = pRecvSingle.matcher(imgattr);
mRange = pRecvRange.matcher(imgattr);
if(mSingle.find())
{
int val[] = new int[2];
int i = 0;
token = imgattr.substring(mSingle.start(), mSingle.end());
m = pNumeric.matcher(token);
while(m.find() && i < 2)
{
val[i] = Integer.parseInt(token.substring(m.start(), m.end()));
}
res[1] = new java.awt.Dimension(val[0], val[1]);
}
else if(mRange.find()) /* try with range */
{
/* have two value for width and two for height (min:max) */
int val[] = new int[4];
int i = 0;
token = imgattr.substring(mRange.start(), mRange.end());
m = pNumeric.matcher(token);
while(m.find() && i < 4)
{
val[i] = Integer.parseInt(token.substring(m.start(), m.end()));
i++;
}
res[1] = new java.awt.Dimension(val[1], val[3]);
}
return res;
}
/**
* Selects the VideoFormat from the list of supported formats of a
* specific video DataSource which has a size as close as possible
* to a specific size and sets it as the format of the specified video
* DataSource.
*
* @param videoDS the video DataSource which is to have its
* supported formats examined and its format changed to the
* VideoFormat which is as close as possible to the specified
* preferredWidth and preferredHeight
* @param preferredWidth the width of the VideoFormat to be
* selected
* @param preferredHeight the height of the VideoFormat to be
* selected
* @return the size of the VideoFormat from the list of supported
* formats of videoDS which is as close as possible to
* preferredWidth and preferredHeight and which has been
* set as the format of videoDS
*/
public static Dimension selectVideoSize(
DataSource videoDS,
final int preferredWidth, final int preferredHeight)
{
if (videoDS == null)
return null;
FormatControl formatControl
= (FormatControl) videoDS.getControl(FormatControl.class.getName());
if (formatControl == null)
return null;
Format[] formats = formatControl.getSupportedFormats();
final int count = formats.length;
if (count < 1)
return null;
VideoFormat selectedFormat = null;
if (count == 1)
selectedFormat = (VideoFormat) formats[0];
else
{
class FormatInfo
{
public final double difference;
public final Dimension dimension;
public final VideoFormat format;
public FormatInfo(Dimension size)
{
this.format = null;
this.dimension = size;
this.difference = getDifference(this.dimension);
}
public FormatInfo(VideoFormat format)
{
this.format = format;
this.dimension = format.getSize();
this.difference = getDifference(this.dimension);
}
private double getDifference(Dimension size)
{
int width = (size == null) ? 0 : size.width;
double xScale;
if (width == 0)
xScale = Double.POSITIVE_INFINITY;
else if (width == preferredWidth)
xScale = 1;
else
xScale = (preferredWidth / (double) width);
int height = (size == null) ? 0 : size.height;
double yScale;
if (height == 0)
yScale = Double.POSITIVE_INFINITY;
else if (height == preferredHeight)
yScale = 1;
else
yScale = (preferredHeight / (double) height);
return Math.abs(1 - Math.min(xScale, yScale));
}
}
FormatInfo[] infos = new FormatInfo[count];
for (int i = 0; i < count; i++)
{
FormatInfo info
= infos[i]
= new FormatInfo((VideoFormat) formats[i]);
if (info.difference == 0)
{
selectedFormat = info.format;
break;
}
}
if (selectedFormat == null)
{
Arrays.sort(infos, new Comparator()
{
public int compare(FormatInfo info0, FormatInfo info1)
{
return
Double.compare(info0.difference, info1.difference);
}
});
selectedFormat = infos[0].format;
}
/*
* If videoDS states to support any size, use the sizes that we
* support which is closest(or smaller) to the preferred one.
*/
if ((selectedFormat != null)
&& (selectedFormat.getSize() == null))
{
VideoFormat currentFormat
= (VideoFormat) formatControl.getFormat();
Dimension currentSize = null;
int width = preferredWidth;
int height = preferredHeight;
// Try to preserve the aspect ratio
if (currentFormat != null)
currentSize = currentFormat.getSize();
// sort supported resolutions by aspect
FormatInfo[] supportedInfos
= new FormatInfo[
DeviceConfiguration.SUPPORTED_RESOLUTIONS.length];
for (int i = 0; i < supportedInfos.length; i++)
{
supportedInfos[i]
= new FormatInfo(
DeviceConfiguration.SUPPORTED_RESOLUTIONS[i]);
}
Arrays.sort(infos, new Comparator()
{
public int compare(FormatInfo info0, FormatInfo info1)
{
return
Double.compare(info0.difference, info1.difference);
}
});
FormatInfo preferredFormat
= new FormatInfo(
new Dimension(preferredWidth, preferredHeight));
Dimension closestAspect = null;
// Let's choose the closest size to the preferred one, finding
// the first suitable aspect
for(FormatInfo supported : supportedInfos)
{
// find the first matching aspect
if(preferredFormat.difference > supported.difference)
continue;
else if(closestAspect == null)
closestAspect = supported.dimension;
if(supported.dimension.height <= preferredHeight
&& supported.dimension.width <= preferredWidth)
{
currentSize = supported.dimension;
}
}
if(currentSize == null)
currentSize = closestAspect;
if ((currentSize.width > 0) && (currentSize.height > 0))
{
width = currentSize.width;
height = currentSize.height;
}
selectedFormat
= (VideoFormat)
new VideoFormat(
null,
new Dimension(width, height),
Format.NOT_SPECIFIED,
null,
Format.NOT_SPECIFIED)
.intersects(selectedFormat);
}
}
Format setFormat = formatControl.setFormat(selectedFormat);
return
(setFormat instanceof VideoFormat)
? ((VideoFormat) setFormat).getSize()
: null;
}
/**
* The VideoListener which handles VideoEvents from the
* MediaDeviceSession of this instance and fires respective
* VideoEvents from this VideoMediaStream to its
* VideoListeners.
*/
private VideoListener deviceSessionVideoListener;
/**
* The KeyFrameControl of this VideoMediaStream.
*/
private KeyFrameControl keyFrameControl;
/**
* Negotiated output size of the video stream.
* It may need to scale original capture device stream.
*/
private Dimension outputSize;
/**
* The QualityControl of this VideoMediaStream.
*/
private final QualityControlImpl qualityControl = new QualityControlImpl();
/**
* The instance that is aware of all of the {@link RTPEncodingDesc} of the
* remote endpoint.
*/
private final MediaStreamTrackReceiver mediaStreamTrackReceiver
= new VideoMediaStreamTrackReceiver(this);
/**
* The transformer which handles outgoing rtx (RFC-4588) packets for this
* {@link VideoMediaStreamImpl}.
*/
private final RtxTransformer rtxTransformer = new RtxTransformer(this);
/**
* The transformer which handles incoming and outgoing fec
*/
private TransformEngineWrapper fecTransformEngineWrapper =
new TransformEngineWrapper<>();
/**
* The instance that terminates RRs and REMBs.
*/
private final RTCPReceiverFeedbackTermination rtcpFeedbackTermination
= new RTCPReceiverFeedbackTermination(this);
/**
*
*/
private final PaddingTermination paddingTermination = new PaddingTermination();
/**
* The RemoteBitrateEstimator which computes bitrate estimates for
* the incoming RTP streams.
*/
private final RemoteBitrateEstimatorWrapper remoteBitrateEstimator
= new RemoteBitrateEstimatorWrapper(
new RemoteBitrateObserver()
{
@Override
public void onReceiveBitrateChanged(
Collection ssrcs,
long bitrate)
{
VideoMediaStreamImpl.this
.remoteBitrateEstimatorOnReceiveBitrateChanged(
ssrcs,
bitrate);
}
}, getDiagnosticContext());
/**
* The facility which aids this instance in managing a list of
* VideoListeners and firing VideoEvents to them.
*
* Since the videoNotifierSupport of this
* VideoMediaStreamImpl just forwards the VideoEvents of
* the associated VideoMediaDeviceSession at the time of this
* writing, it does not make sense to have videoNotifierSupport
* executing asynchronously because it does not know whether it has to wait
* for the delivery of the VideoEvents and thus it has to default
* to waiting anyway.
*/
private final VideoNotifierSupport videoNotifierSupport
= new VideoNotifierSupport(this, true);
/**
* The {@link BandwidthEstimator} which estimates the available bandwidth
* from this endpoint to the remote peer.
*/
private BandwidthEstimatorImpl bandwidthEstimator;
/**
* The {@link CachingTransformer} which caches outgoing/incoming packets
* from/to this {@link VideoMediaStreamImpl}.
*/
private CachingTransformer cachingTransformer;
/**
* Whether the remote end supports RTCP FIR.
*/
private boolean supportsFir = false;
/**
* Whether the remote end supports RTCP PLI.
*/
private boolean supportsPli = false;
/**
* Initializes a new VideoMediaStreamImpl instance which will use
* the specified MediaDevice for both capture and playback of video
* exchanged via the specified StreamConnector.
*
* @param connector the StreamConnector the new instance is to use
* for sending and receiving video
* @param device the MediaDevice the new instance is to use for
* both capture and playback of video exchanged via the specified
* StreamConnector
* @param srtpControl a control which is already created, used to control
* the srtp operations.
*/
public VideoMediaStreamImpl(StreamConnector connector, MediaDevice device,
SrtpControl srtpControl)
{
super(connector, device, srtpControl);
recurringRunnableExecutor.registerRecurringRunnable(rtcpFeedbackTermination);
}
/**
* {@inheritDoc}
*/
@Override
public RtxTransformer getRtxTransformer()
{
return rtxTransformer;
}
/**
* {@inheritDoc}
*/
@Override
protected TransformEngineWrapper getFecTransformEngine()
{
return this.fecTransformEngineWrapper;
}
/**
* {@inheritDoc}
*/
@Override
protected void setFecTransformEngine(FECTransformEngine fecTransformEngine)
{
this.fecTransformEngineWrapper.setWrapped(fecTransformEngine);
}
/**
* Sets the value of the flag which indicates whether the remote end
* supports RTCP FIR or not.
* @param supportsFir the value to set.
*/
public void setSupportsFir(boolean supportsFir)
{
this.supportsFir = supportsFir;
}
/**
* Sets the value of the flag which indicates whether the remote end
* supports RTCP PLI or not.
* @param supportsPli the value to set.
*/
public void setSupportsPli(boolean supportsPli)
{
this.supportsPli = supportsPli;
}
/**
* Sets the value of the flag which indicates whether the remote end
* supports RTCP REMB or not.
* @param supportsRemb the value to set.
*/
public void setSupportsRemb(boolean supportsRemb)
{
remoteBitrateEstimator.setSupportsRemb(supportsRemb);
}
/**
* @return {@code true} iff the remote end supports RTCP FIR.
*/
public boolean supportsFir()
{
return supportsFir;
}
/**
* @return {@code true} iff the remote end supports RTCP PLI.
*/
public boolean supportsPli()
{
return supportsPli;
}
/**
* Set remote SSRC.
*
* @param ssrc remote SSRC
*/
@Override
protected void addRemoteSourceID(long ssrc)
{
super.addRemoteSourceID(ssrc);
MediaDeviceSession deviceSession = getDeviceSession();
if (deviceSession instanceof VideoMediaDeviceSession)
((VideoMediaDeviceSession) deviceSession).setRemoteSSRC(ssrc);
}
/**
* Adds a specific VideoListener to this VideoMediaStream
* in order to receive notifications when visual/video Components
* are being added and removed.
*
* Adding a listener which has already been added does nothing i.e. it is
* not added more than once and thus does not receive one and the same
* VideoEvent multiple times.
*
* @param listener the VideoListener to be notified when
* visual/video Components are being added or removed in this
* VideoMediaStream
*/
public void addVideoListener(VideoListener listener)
{
videoNotifierSupport.addVideoListener(listener);
}
/**
* {@inheritDoc}
*/
@Override
public void close()
{
try
{
super.close();
}
finally
{
if (cachingTransformer != null)
{
recurringRunnableExecutor.deRegisterRecurringRunnable(
cachingTransformer);
}
if (bandwidthEstimator != null)
{
recurringRunnableExecutor
.deRegisterRecurringRunnable(bandwidthEstimator);
}
if (rtcpFeedbackTermination != null)
{
recurringRunnableExecutor
.deRegisterRecurringRunnable(rtcpFeedbackTermination);
}
}
}
/**
* Performs any optional configuration on a specific
* RTPConnectorOuputStream of an RTPManager to be used by
* this MediaStreamImpl.
*
* @param dataOutputStream the RTPConnectorOutputStream to be used
* by an RTPManager of this MediaStreamImpl and to be
* configured
*/
@Override
protected void configureDataOutputStream(
RTPConnectorOutputStream dataOutputStream)
{
super.configureDataOutputStream(dataOutputStream);
/*
* XXX Android's current video CaptureDevice is based on MediaRecorder
* which gives no control over the number and the size of the packets,
* frame dropping is not implemented because it is hard since
* MediaRecorder generates encoded video.
*/
if (!OSUtils.IS_ANDROID)
{
int maxBandwidth
= NeomediaServiceUtils
.getMediaServiceImpl()
.getDeviceConfiguration()
.getVideoRTPPacingThreshold();
// Ignore the case of maxBandwidth > 1000, because in this case
// setMaxPacketsPerMillis fails. Effectively, this means that no
// pacing is performed when the user deliberately set the setting to
// over 1000 (1MByte/s according to the GUI). This is probably close
// to what the user expects, and makes more sense than failing with
// an exception.
// TODO: proper handling of maxBandwidth values >1000
if (maxBandwidth <= 1000)
{
// maximum one packet for X milliseconds(the settings are for
// one second)
dataOutputStream.setMaxPacketsPerMillis(1, 1000 / maxBandwidth);
}
}
}
/**
* Performs any optional configuration on the BufferControl of the
* specified RTPManager which is to be used as the
* RTPManager of this MediaStreamImpl.
*
* @param rtpManager the RTPManager which is to be used by this
* MediaStreamImpl
* @param bufferControl the BufferControl of rtpManager on
* which any optional configuration is to be performed
*/
@Override
protected void configureRTPManagerBufferControl(
StreamRTPManager rtpManager,
BufferControl bufferControl)
{
super.configureRTPManagerBufferControl(rtpManager, bufferControl);
bufferControl.setBufferLength(BufferControl.MAX_VALUE);
}
/**
* {@inheritDoc}
*/
@Override
public MediaStreamTrackReceiver getMediaStreamTrackReceiver()
{
return mediaStreamTrackReceiver;
}
/**
* Notifies this MediaStream that the MediaDevice (and
* respectively the MediaDeviceSession with it) which this instance
* uses for capture and playback of media has been changed. Makes sure that
* the VideoListeners of this instance get VideoEvents for
* the new/current VideoMediaDeviceSession and not for the old one.
*
* Note: this overloaded method gets executed in the
* MediaStreamImpl constructor. As a consequence we cannot assume
* proper initialization of the fields specific to
* VideoMediaStreamImpl.
*
* @param oldValue the MediaDeviceSession with the
* MediaDevice this instance used work with
* @param newValue the MediaDeviceSession with the
* MediaDevice this instance is to work with
* @see MediaStreamImpl#deviceSessionChanged(MediaDeviceSession,
* MediaDeviceSession)
*/
@Override
protected void deviceSessionChanged(
MediaDeviceSession oldValue,
MediaDeviceSession newValue)
{
super.deviceSessionChanged(oldValue, newValue);
if (oldValue instanceof VideoMediaDeviceSession)
{
VideoMediaDeviceSession oldVideoMediaDeviceSession
= (VideoMediaDeviceSession) oldValue;
if (deviceSessionVideoListener != null)
oldVideoMediaDeviceSession.removeVideoListener(
deviceSessionVideoListener);
/*
* The oldVideoMediaDeviceSession is being disconnected from this
* VideoMediaStreamImpl so do not let it continue using its
* keyFrameControl.
*/
oldVideoMediaDeviceSession.setKeyFrameControl(null);
}
if (newValue instanceof VideoMediaDeviceSession)
{
VideoMediaDeviceSession newVideoMediaDeviceSession
= (VideoMediaDeviceSession) newValue;
if (deviceSessionVideoListener == null)
{
deviceSessionVideoListener = new VideoListener()
{
/**
* {@inheritDoc}
*
* Notifies that a visual Component depicting video
* was reported added by the provider this listener is added
* to.
*/
public void videoAdded(VideoEvent e)
{
if (fireVideoEvent(
e.getType(),
e.getVisualComponent(),
e.getOrigin(),
true))
e.consume();
}
/**
* {@inheritDoc}
*
* Notifies that a visual Component depicting video
* was reported removed by the provider this listener is
* added to.
*/
public void videoRemoved(VideoEvent e)
{
videoAdded(e);
}
/**
* {@inheritDoc}
*
* Notifies that a visual Component depicting video
* was reported updated by the provider this listener is
* added to.
*/
public void videoUpdate(VideoEvent e)
{
fireVideoEvent(e, true);
}
};
}
newVideoMediaDeviceSession.addVideoListener(
deviceSessionVideoListener);
newVideoMediaDeviceSession.setOutputSize(outputSize);
AbstractRTPConnector rtpConnector = getRTPConnector();
if (rtpConnector != null)
newVideoMediaDeviceSession.setConnector(rtpConnector);
newVideoMediaDeviceSession.setRTCPFeedbackPLI(
USE_RTCP_FEEDBACK_PLI);
/*
* The newVideoMediaDeviceSession is being connected to this
* VideoMediaStreamImpl so the key frame-related logic will be
* controlled by the keyFrameControl of this VideoMediaStreamImpl.
*/
newVideoMediaDeviceSession.setKeyFrameControl(getKeyFrameControl());
}
}
/**
* Notifies the VideoListeners registered with this
* VideoMediaStream about a specific type of change in the
* availability of a specific visual Component depicting video.
*
* @param type the type of change as defined by VideoEvent in the
* availability of the specified visual Component depicting video
* @param visualComponent the visual Component depicting video
* which has been added or removed in this VideoMediaStream
* @param origin {@link VideoEvent#LOCAL} if the origin of the video is
* local (e.g. it is being locally captured); {@link VideoEvent#REMOTE} if
* the origin of the video is remote (e.g. a remote peer is streaming it)
* @param wait true if the call is to wait till the specified
* VideoEvent has been delivered to the VideoListeners;
* otherwise, false
* @return true if this event and, more specifically, the visual
* Component it describes have been consumed and should be
* considered owned, referenced (which is important because
* Components belong to a single Container at a time);
* otherwise, false
*/
protected boolean fireVideoEvent(
int type, Component visualComponent, int origin,
boolean wait)
{
if (logger.isTraceEnabled())
logger
.trace(
"Firing VideoEvent with type "
+ VideoEvent.typeToString(type)
+ " and origin "
+ VideoEvent.originToString(origin));
return
videoNotifierSupport.fireVideoEvent(
type, visualComponent, origin,
wait);
}
/**
* Notifies the VideoListeners registered with this instance about
* a specific VideoEvent.
*
* @param event the VideoEvent to be fired to the
* VideoListeners registered with this instance
* @param wait true if the call is to wait till the specified
* VideoEvent has been delivered to the VideoListeners;
* otherwise, false
*/
protected void fireVideoEvent(VideoEvent event, boolean wait)
{
videoNotifierSupport.fireVideoEvent(event, wait);
}
/**
* Implements {@link VideoMediaStream#getKeyFrameControl()}.
*
* {@inheritDoc}
* @see VideoMediaStream#getKeyFrameControl()
*/
public KeyFrameControl getKeyFrameControl()
{
if (keyFrameControl == null)
keyFrameControl = new KeyFrameControlAdapter();
return keyFrameControl;
}
/**
* Gets the visual Component, if any, depicting the video streamed
* from the local peer to the remote peer.
*
* @return the visual Component depicting the local video if local
* video is actually being streamed from the local peer to the remote peer;
* otherwise, null
*/
public Component getLocalVisualComponent()
{
MediaDeviceSession deviceSession = getDeviceSession();
return
(deviceSession instanceof VideoMediaDeviceSession)
? ((VideoMediaDeviceSession) deviceSession)
.getLocalVisualComponent()
: null;
}
/**
* The priority of the video is 5, which is meant to be higher than
* other threads and lower than the audio one.
* @return video priority.
*/
@Override
protected int getPriority()
{
return 5;
}
/**
* Gets the QualityControl of this VideoMediaStream.
*
* @return the QualityControl of this VideoMediaStream
*/
public QualityControl getQualityControl()
{
return qualityControl;
}
/**
* {@inheritDoc}
*/
@Override
public RemoteBitrateEstimatorWrapper getRemoteBitrateEstimator()
{
return remoteBitrateEstimator;
}
/**
* Gets the visual Component where video from the remote peer is
* being rendered or null if no video is currently being rendered.
*
* @return the visual Component where video from the remote peer is
* being rendered or null if no video is currently being rendered
* @see VideoMediaStream#getVisualComponent()
*/
@Deprecated
public Component getVisualComponent()
{
List visualComponents = getVisualComponents();
return visualComponents.isEmpty() ? null : visualComponents.get(0);
}
/**
* Gets the visual Components rendering the ReceiveStream
* corresponding to the given ssrc.
*
* @param ssrc the src-id of the receive stream, which visual
* Component we're looking for
* @return the visual Component rendering the
* ReceiveStream corresponding to the given ssrc
*/
public Component getVisualComponent(long ssrc)
{
MediaDeviceSession deviceSession = getDeviceSession();
return
(deviceSession instanceof VideoMediaDeviceSession)
? ((VideoMediaDeviceSession) deviceSession).getVisualComponent(
ssrc)
: null;
}
/**
* Gets a list of the visual Components where video from the remote
* peer is being rendered.
*
* @return a list of the visual Components where video from the
* remote peer is being rendered
* @see VideoMediaStream#getVisualComponents()
*/
public List getVisualComponents()
{
MediaDeviceSession deviceSession = getDeviceSession();
List visualComponents;
if (deviceSession instanceof VideoMediaDeviceSession)
{
visualComponents
= ((VideoMediaDeviceSession) deviceSession)
.getVisualComponents();
}
else
visualComponents = Collections.emptyList();
return visualComponents;
}
/**
* Handles attributes contained in MediaFormat.
*
* @param format the MediaFormat to handle the attributes of
* @param attrs the attributes Map to handle
*/
@Override
protected void handleAttributes(
MediaFormat format,
Map attrs)
{
/*
* Iterate over the specified attributes and handle those of them which
* we recognize.
*/
if(attrs != null)
{
/*
* The width and height attributes are separate but they have to be
* collected into a Dimension in order to be handled.
*/
String width = null;
String height = null;
for(Map.Entry attr : attrs.entrySet())
{
String key = attr.getKey();
String value = attr.getValue();
if(key.equals("rtcp-fb"))
{
// if (value.equals("nack pli"))
// USE_PLI = true;
}
else if(key.equals("imageattr"))
{
/*
* If the width and height attributes have been collected
* into outputSize, do not override the Dimension they have
* specified.
*/
if((attrs.containsKey("width")
|| attrs.containsKey("height"))
&& (outputSize != null))
{
continue;
}
Dimension res[] = parseSendRecvResolution(value);
if(res != null)
{
setOutputSize(res[1]);
qualityControl.setRemoteSendMaxPreset(
new QualityPreset(res[0]));
qualityControl.setRemoteReceiveResolution(outputSize);
((VideoMediaDeviceSession)getDeviceSession())
.setOutputSize(outputSize);
}
}
else if(key.equals("CIF"))
{
Dimension dim = new Dimension(352, 288);
if((outputSize == null)
|| ((outputSize.width < dim.width)
&& (outputSize.height < dim.height)))
{
setOutputSize(dim);
((VideoMediaDeviceSession)getDeviceSession())
.setOutputSize(outputSize);
}
}
else if(key.equals("QCIF"))
{
Dimension dim = new Dimension(176, 144);
if((outputSize == null)
|| ((outputSize.width < dim.width)
&& (outputSize.height < dim.height)))
{
setOutputSize(dim);
((VideoMediaDeviceSession)getDeviceSession())
.setOutputSize(outputSize);
}
}
else if(key.equals("VGA")) // X-Lite sends it.
{
Dimension dim = new Dimension(640, 480);
if((outputSize == null)
|| ((outputSize.width < dim.width)
&& (outputSize.height < dim.height)))
{
// X-Lite does not display anything if we send 640x480.
setOutputSize(dim);
((VideoMediaDeviceSession)getDeviceSession())
.setOutputSize(outputSize);
}
}
else if(key.equals("CUSTOM"))
{
String args[] = value.split(",");
if(args.length < 3)
continue;
try
{
Dimension dim
= new Dimension(
Integer.parseInt(args[0]),
Integer.parseInt(args[1]));
if((outputSize == null)
|| ((outputSize.width < dim.width)
&& (outputSize.height < dim.height)))
{
setOutputSize(dim);
((VideoMediaDeviceSession)getDeviceSession())
.setOutputSize(outputSize);
}
}
catch(Exception e)
{
}
}
else if (key.equals("width"))
{
width = value;
if(height != null)
{
setOutputSize(
new Dimension(
Integer.parseInt(width),
Integer.parseInt(height)));
((VideoMediaDeviceSession)getDeviceSession())
.setOutputSize(outputSize);
}
}
else if (key.equals("height"))
{
height = value;
if(width != null)
{
setOutputSize(
new Dimension(
Integer.parseInt(width),
Integer.parseInt(height)));
((VideoMediaDeviceSession)getDeviceSession())
.setOutputSize(outputSize);
}
}
}
}
}
/**
* Move origin of a partial desktop streaming MediaDevice.
*
* @param x new x coordinate origin
* @param y new y coordinate origin
*/
public void movePartialDesktopStreaming(int x, int y)
{
MediaDeviceImpl dev = (MediaDeviceImpl) getDevice();
if (!DeviceSystem.LOCATOR_PROTOCOL_IMGSTREAMING.equals(
dev.getCaptureDeviceInfoLocatorProtocol()))
{
return;
}
DataSource captureDevice = getDeviceSession().getCaptureDevice();
Object imgStreamingControl
= captureDevice.getControl(ImgStreamingControl.class.getName());
if (imgStreamingControl == null)
return;
// Makes the screen detection with a point inside a real screen i.e.
// x and y are both greater than or equal to 0.
ScreenDevice screen
= NeomediaServiceUtils.getMediaServiceImpl().getScreenForPoint(
new Point((x < 0) ? 0 : x, (y < 0) ? 0 : y));
if (screen != null)
{
Rectangle bounds = ((ScreenDeviceImpl) screen).getBounds();
((ImgStreamingControl) imgStreamingControl).setOrigin(
0,
screen.getIndex(),
x - bounds.x, y - bounds.y);
}
}
/**
* Notifies this VideoMediaStreamImpl that
* {@link #remoteBitrateEstimator} has computed a new bitrate estimate for
* the incoming streams.
*
* @param ssrcs
* @param bitrate
*/
private void remoteBitrateEstimatorOnReceiveBitrateChanged(
Collection ssrcs,
long bitrate)
{
// TODO Auto-generated method stub
}
/**
* Removes a specific VideoListener from this
* VideoMediaStream in order to have to no longer receive
* notifications when visual/video Components are being added and
* removed.
*
* @param listener the VideoListener to no longer be notified when
* visual/video Components are being added or removed in this
* VideoMediaStream
*/
public void removeVideoListener(VideoListener listener)
{
videoNotifierSupport.removeVideoListener(listener);
}
/**
* Notifies this MediaStream implementation that its
* RTPConnector instance has changed from a specific old value to a
* specific new value. Allows extenders to override and perform additional
* processing after this MediaStream has changed its
* RTPConnector instance.
*
* @param oldValue the RTPConnector of this MediaStream
* implementation before it got changed to newValue
* @param newValue the current RTPConnector of this
* MediaStream which replaced oldValue
* @see MediaStreamImpl#rtpConnectorChanged(AbstractRTPConnector,
* AbstractRTPConnector)
*/
@Override
protected void rtpConnectorChanged(
AbstractRTPConnector oldValue,
AbstractRTPConnector newValue)
{
super.rtpConnectorChanged(oldValue, newValue);
if (newValue != null)
{
MediaDeviceSession deviceSession = getDeviceSession();
if (deviceSession instanceof VideoMediaDeviceSession)
{
((VideoMediaDeviceSession) deviceSession)
.setConnector(newValue);
}
}
}
/**
* {@inheritDoc}
*/
@Override
protected void setLocalSourceID(long localSourceID)
{
super.setLocalSourceID(localSourceID);
MediaDeviceSession deviceSession = getDeviceSession();
if (deviceSession instanceof VideoMediaDeviceSession)
{
((VideoMediaDeviceSession) deviceSession).setLocalSSRC(
localSourceID);
}
}
/**
* Sets the size/resolution of the video to be output by this instance.
*
* @param outputSize the size/resolution of the video to be output by this
* instance
*/
private void setOutputSize(Dimension outputSize)
{
this.outputSize = outputSize;
}
/**
* Updates the QualityControl of this VideoMediaStream.
*
* @param advancedParams parameters of advanced attributes that may affect
* quality control
*/
public void updateQualityControl(Map advancedParams)
{
for(Map.Entry entry : advancedParams.entrySet())
{
if(entry.getKey().equals("imageattr"))
{
Dimension res[] = parseSendRecvResolution(entry.getValue());
if(res != null)
{
qualityControl.setRemoteSendMaxPreset(
new QualityPreset(res[0]));
qualityControl.setRemoteReceiveResolution(res[1]);
setOutputSize(res[1]);
((VideoMediaDeviceSession)getDeviceSession())
.setOutputSize(outputSize);
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
protected CachingTransformer createCachingTransformer()
{
if (cachingTransformer == null)
{
cachingTransformer = new CachingTransformer(this);
recurringRunnableExecutor.registerRecurringRunnable(
cachingTransformer);
}
return cachingTransformer;
}
/**
* {@inheritDoc}
*/
@Override
protected RetransmissionRequesterImpl createRetransmissionRequester()
{
ConfigurationService cfg = LibJitsi.getConfigurationService();
if (cfg != null && cfg.getBoolean(REQUEST_RETRANSMISSIONS_PNAME, false))
{
return new RetransmissionRequesterImpl(this);
}
return null;
}
/**
* {@inheritDoc}
*/
@Override
protected TransformEngine getRTCPTermination()
{
return rtcpFeedbackTermination;
}
/**
* {@inheritDoc}
*/
protected PaddingTermination getPaddingTermination()
{
return paddingTermination;
}
/**
* {@inheritDoc}
*/
@Override
public BandwidthEstimator getOrCreateBandwidthEstimator()
{
if (bandwidthEstimator == null)
{
bandwidthEstimator = new BandwidthEstimatorImpl(this);
recurringRunnableExecutor
.registerRecurringRunnable(bandwidthEstimator);
if (logger.isDebugEnabled())
{
logger.debug(
"Creating a BandwidthEstimator for stream " + this);
}
}
return bandwidthEstimator;
}
}