io.antmedia.muxer.MuxAdaptor Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of ant-media-server Show documentation
Show all versions of ant-media-server Show documentation
Ant Media Server supports RTMP, RTSP, MP4, HLS, WebRTC, Adaptive Streaming, etc.
The newest version!
package io.antmedia.muxer;
import static io.antmedia.muxer.IAntMediaStreamHandler.BROADCAST_STATUS_BROADCASTING;
import static org.bytedeco.ffmpeg.global.avcodec.AV_CODEC_ID_AAC;
import static org.bytedeco.ffmpeg.global.avcodec.AV_CODEC_ID_H264;
import static org.bytedeco.ffmpeg.global.avcodec.AV_CODEC_ID_H265;
import static org.bytedeco.ffmpeg.global.avcodec.AV_CODEC_ID_PNG;
import static org.bytedeco.ffmpeg.global.avcodec.AV_PKT_FLAG_KEY;
import static org.bytedeco.ffmpeg.global.avutil.AVMEDIA_TYPE_ATTACHMENT;
import static org.bytedeco.ffmpeg.global.avutil.AVMEDIA_TYPE_AUDIO;
import static org.bytedeco.ffmpeg.global.avutil.AVMEDIA_TYPE_DATA;
import static org.bytedeco.ffmpeg.global.avutil.AVMEDIA_TYPE_SUBTITLE;
import static org.bytedeco.ffmpeg.global.avutil.AVMEDIA_TYPE_VIDEO;
import static org.bytedeco.ffmpeg.global.avutil.AV_PIX_FMT_YUV420P;
import static org.bytedeco.ffmpeg.global.avutil.AV_SAMPLE_FMT_FLTP;
import static org.bytedeco.ffmpeg.global.avutil.av_channel_layout_default;
import static org.bytedeco.ffmpeg.global.avutil.av_free;
import static org.bytedeco.ffmpeg.global.avutil.av_malloc;
import static org.bytedeco.ffmpeg.global.avutil.av_rescale_q;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Deque;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.mina.core.buffer.IoBuffer;
import org.bytedeco.ffmpeg.avcodec.AVCodecContext;
import org.bytedeco.ffmpeg.avcodec.AVCodecParameters;
import org.bytedeco.ffmpeg.avcodec.AVPacket;
import org.bytedeco.ffmpeg.avformat.AVFormatContext;
import org.bytedeco.ffmpeg.avformat.AVStream;
import org.bytedeco.ffmpeg.avutil.AVChannelLayout;
import org.bytedeco.ffmpeg.avutil.AVRational;
import org.bytedeco.javacpp.BytePointer;
import org.json.simple.JSONObject;
import org.red5.codec.AVCVideo;
import org.red5.codec.HEVCVideo;
import org.red5.codec.IAudioStreamCodec;
import org.red5.codec.IStreamCodecInfo;
import org.red5.codec.IVideoStreamCodec;
import org.red5.io.object.DataTypes;
import org.red5.io.object.Input;
import org.red5.server.api.IConnection;
import org.red5.server.api.IContext;
import org.red5.server.api.scope.IScope;
import org.red5.server.api.stream.IBroadcastStream;
import org.red5.server.api.stream.IStreamCapableConnection;
import org.red5.server.api.stream.IStreamPacket;
import org.red5.server.net.rtmp.event.AudioData;
import org.red5.server.net.rtmp.event.CachedEvent;
import org.red5.server.net.rtmp.event.Notify;
import org.red5.server.net.rtmp.event.VideoData;
import org.red5.server.net.rtmp.event.VideoData.ExVideoPacketType;
import org.red5.server.net.rtmp.event.VideoData.FrameType;
import org.red5.server.net.rtmp.message.Constants;
import org.red5.server.stream.ClientBroadcastStream;
import org.red5.server.stream.IRecordingListener;
import org.red5.server.stream.consumer.FileConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import io.antmedia.AntMediaApplicationAdapter;
import io.antmedia.AppSettings;
import io.antmedia.EncoderSettings;
import io.antmedia.RecordType;
import io.antmedia.analytic.model.KeyFrameStatsEvent;
import io.antmedia.analytic.model.PublishStatsEvent;
import io.antmedia.datastore.db.DataStore;
import io.antmedia.datastore.db.IDataStoreFactory;
import io.antmedia.datastore.db.types.Broadcast;
import io.antmedia.datastore.db.types.BroadcastUpdate;
import io.antmedia.datastore.db.types.Endpoint;
import io.antmedia.eRTMP.HEVCDecoderConfigurationParser;
import io.antmedia.eRTMP.HEVCVideoEnhancedRTMP;
import io.antmedia.logger.LoggerUtils;
import io.antmedia.muxer.parser.AACConfigParser;
import io.antmedia.muxer.parser.AACConfigParser.AudioObjectTypes;
import io.antmedia.muxer.parser.Parser;
import io.antmedia.muxer.parser.SPSParser;
import io.antmedia.muxer.parser.codec.AACAudio;
import io.antmedia.plugin.PacketFeeder;
import io.antmedia.plugin.api.IPacketListener;
import io.antmedia.plugin.api.StreamParametersInfo;
import io.antmedia.rest.model.Result;
import io.antmedia.settings.ServerSettings;
import io.antmedia.storage.StorageClient;
import io.vertx.core.Vertx;
public class MuxAdaptor implements IRecordingListener, IEndpointStatusListener {
public static final int STAT_UPDATE_PERIOD_MS = 10000;
public static final String ADAPTIVE_SUFFIX = "_adaptive";
private static Logger logger = LoggerFactory.getLogger(MuxAdaptor.class);
protected ConcurrentLinkedQueue streamPacketQueue = new ConcurrentLinkedQueue<>();
protected AtomicBoolean isPipeReaderJobRunning = new AtomicBoolean(false);
private AtomicBoolean isBufferedWriterRunning = new AtomicBoolean(false);
protected List muxerList = Collections.synchronizedList(new ArrayList());
protected boolean deleteHLSFilesOnExit = true;
protected boolean deleteDASHFilesOnExit = true;
private int videoStreamIndex;
protected int audioStreamIndex;
protected boolean previewOverwrite = false;
protected volatile boolean enableVideo = false;
protected volatile boolean enableAudio = false;
boolean firstAudioPacketSkipped = false;
boolean firstVideoPacketSkipped = false;
private long packetPollerId = -1;
private ConcurrentSkipListSet bufferQueue = new ConcurrentSkipListSet<>((a, b) -> Long.compare(a.getTimestamp(), b.getTimestamp()));
private volatile boolean stopRequestExist = false;
public static final int RECORDING_ENABLED_FOR_STREAM = 1;
public static final int RECORDING_DISABLED_FOR_STREAM = -1;
public static final int RECORDING_NO_SET_FOR_STREAM = 0;
protected static final long WAIT_TIME_MILLISECONDS = 5;
protected AtomicBoolean isRecording = new AtomicBoolean(false);
protected ClientBroadcastStream broadcastStream;
protected boolean mp4MuxingEnabled;
protected boolean webMMuxingEnabled;
protected boolean addDateTimeToMp4FileName;
protected boolean hlsMuxingEnabled;
protected boolean dashMuxingEnabled;
protected boolean objectDetectionEnabled;
protected ConcurrentHashMap isHealthCheckStartedMap = new ConcurrentHashMap<>();
protected ConcurrentHashMap errorCountMap = new ConcurrentHashMap<>();
protected ConcurrentHashMap retryCounter = new ConcurrentHashMap<>();
protected ConcurrentHashMap statusMap = new ConcurrentHashMap<>();
protected int rtmpEndpointRetryLimit;
protected int healthCheckPeriodMS;
protected boolean webRTCEnabled = false;
protected StorageClient storageClient;
protected String hlsTime;
protected String hlsListSize;
protected String hlsPlayListType;
protected String dashSegDuration;
protected String dashFragmentDuration;
protected String targetLatency;
List adaptiveResolutionList = null;
protected DataStore dataStore;
/**
* By default first video key frame should be checked
* and below flag should be set to true
* If first video key frame should not be checked,
* then below should be flag in advance
*/
private boolean firstKeyFrameReceivedChecked = false;
private long lastKeyFramePTS =0;
protected String streamId;
protected long startTime;
protected IScope scope;
private String oldQuality;
private IAntMediaStreamHandler appAdapter;
protected List encoderSettingsList;
protected static boolean isStreamSource = false;
private int previewCreatePeriod;
private double latestSpeed;
private long lastQualityUpdateTime = 0;
private Broadcast broadcast;
protected AppSettings appSettings;
private int previewHeight;
private int lastFrameTimestamp;
private int maxAnalyzeDurationMS = 1000;
protected boolean generatePreview = true;
private int firstReceivedFrameTimestamp = -1;
protected int totalIngestedVideoPacketCount = 0;
private long bufferTimeMs = 0;
protected ServerSettings serverSettings;
/**
* Packet times in ordered way to calculate streaming health
* Key is the packet ime
* Value is the system time at that moment
*
*/
private Deque packetTimeList = new ConcurrentLinkedDeque<>();
private long lastDTS = -1;
private int overflowCount = 0;
public boolean addID3Data(String data) {
for (Muxer muxer : muxerList) {
if(muxer instanceof HLSMuxer) {
((HLSMuxer)muxer).addID3Data(data);
return true;
}
}
return false;
}
public boolean addSEIData(String data) {
for (Muxer muxer : muxerList) {
if(muxer instanceof HLSMuxer) {
((HLSMuxer)muxer).setSeiData(data);
return true;
}
}
return false;
}
public static class PacketTime {
public final long packetTimeMs;
public final long systemTimeMs;
public PacketTime(long packetTimeMs, long systemTimeMs) {
this.packetTimeMs = packetTimeMs;
this.systemTimeMs = systemTimeMs;
}
}
protected Vertx vertx;
/**
* Accessed from multiple threads so make it volatile
*/
private AtomicBoolean buffering = new AtomicBoolean(false);
private int bufferLogCounter;
/**
* The time when buffering has been finished. It's volatile because it's accessed from multiple threads
*/
private volatile long bufferingFinishTimeMs = 0;
/**
* Mux adaptor is generally used in RTMP.
* However it can be also used to stream RTSP Pull so that isAVC can be false
*/
private boolean avc = true;
private long bufferedPacketWriterId = -1;
private volatile long lastPacketTimeMsInQueue = 0;
private volatile long firstPacketReadyToSentTimeMs = 0;
protected String dataChannelWebHookURL = null;
protected long absoluteTotalIngestTime = 0;
/**
* It's defined here because EncoderAdaptor should access it directly to add new streams.
* Don't prefer to access to dashMuxer directly. Access it with getter
*/
protected Muxer dashMuxer = null;
private long checkStreamsStartTime = -1;
private byte[] videoDataConf;
private byte[] audioDataConf;
private AtomicInteger queueSize = new AtomicInteger(0);
//private long startTimeMs;
protected long totalIngestTime;
private int fps = 0;
private int width;
protected int height;
protected int keyFramePerMin = 0;
protected long lastKeyFrameStatsTimeMs = -1;
private long totalByteReceived = 0;
protected AVFormatContext streamSourceInputFormatContext;
private AVCodecParameters videoCodecParameters;
protected AVCodecParameters audioCodecParameters;
private BytePointer audioExtraDataPointer;
private BytePointer videoExtraDataPointer;
private AtomicLong endpointStatusUpdaterTimer = new AtomicLong(-1l);
private ConcurrentHashMap endpointStatusUpdateMap = new ConcurrentHashMap<>();
protected PacketFeeder packetFeeder;
private static final int COUNT_TO_LOG_BUFFER = 500;
/**
* Helper field to get the timebase for milliseconds
* Pay attention: Use them in basic conversions(av_rescale), do not use them by giving directly to the Muxers, Encoders as Timebase because
* Muxers and Encoders can close the timebase and we'll get error
*
* For muxers, encoders, use the gettimebaseForMs() method
*/
public static final AVRational TIME_BASE_FOR_MS = new AVRational().num(1).den(1000);
@SuppressWarnings("java:S2095")
public static AVRational getTimeBaseForMs() {
//create new instance because it can be used in references
return new AVRational().num(1).den(1000);
}
private AVRational videoTimeBase = getTimeBaseForMs();
private AVRational audioTimeBase = getTimeBaseForMs();
//NOSONAR because we need to keep the reference of the field
protected AVChannelLayout channelLayout;
private long lastTotalByteReceived = 0;
private long durationMs;
private int videoCodecId = -1;
public static MuxAdaptor initializeMuxAdaptor(ClientBroadcastStream clientBroadcastStream, Broadcast broadcast, boolean isSource, IScope scope) {
MuxAdaptor muxAdaptor = null;
ApplicationContext applicationContext = scope.getContext().getApplicationContext();
boolean tryEncoderAdaptor = false;
if (applicationContext.containsBean(AppSettings.BEAN_NAME)) {
AppSettings appSettings = (AppSettings) applicationContext.getBean(AppSettings.BEAN_NAME);
tryEncoderAdaptor = isEncoderAdaptorShouldBeTried(broadcast, appSettings);
}
if (tryEncoderAdaptor) {
//if adaptive bitrate enabled, take a look at encoder adaptor exists
//if it is not enabled, then initialize only mux adaptor
try {
Class transraterClass = Class.forName("io.antmedia.enterprise.adaptive.EncoderAdaptor");
muxAdaptor = (MuxAdaptor) transraterClass.getConstructor(ClientBroadcastStream.class)
.newInstance(clientBroadcastStream);
} catch (Exception e) {
logger.error(e.getMessage());
}
}
if (muxAdaptor == null) {
muxAdaptor = new MuxAdaptor(clientBroadcastStream);
}
muxAdaptor.setStreamSource(isSource);
muxAdaptor.setBroadcast(broadcast);
return muxAdaptor;
}
public static boolean isEncoderAdaptorShouldBeTried(Broadcast broadcast,
AppSettings appSettings)
{
return (broadcast != null && broadcast.getEncoderSettingsList() != null && !broadcast.getEncoderSettingsList().isEmpty())
||
(appSettings.getEncoderSettings() != null && !appSettings.getEncoderSettings().isEmpty())
||
appSettings.isWebRTCEnabled()
|| appSettings.isForceDecoding();
}
protected MuxAdaptor(ClientBroadcastStream clientBroadcastStream) {
this.broadcastStream = clientBroadcastStream;
}
public boolean addMuxer(Muxer muxer) {
return addMuxer(muxer, 0);
}
public boolean addMuxer(Muxer muxer, int resolutionHeight)
{
boolean result = false;
if (directMuxingSupported() && (resolutionHeight == 0 || resolutionHeight == height))
{
if (isRecording.get())
{
result = prepareMuxer(muxer, resolutionHeight);
}
else
{
result = addMuxerInternal(muxer);
}
}
return result;
}
public boolean removeMuxer(Muxer muxer)
{
boolean result = false;
if (muxerList.remove(muxer))
{
muxer.writeTrailer();
result = true;
}
return result;
}
protected boolean addMuxerInternal(Muxer muxer)
{
boolean result = false;
if (!muxerList.contains(muxer))
{
result = muxerList.add(muxer);
}
return result;
}
@Override
public boolean init(IConnection conn, String name, boolean isAppend) {
return init(conn.getScope(), name, isAppend);
}
public void enableSettings() {
AppSettings appSettingsLocal = getAppSettings();
hlsMuxingEnabled = appSettingsLocal.isHlsMuxingEnabled();
dashMuxingEnabled = appSettingsLocal.isDashMuxingEnabled();
mp4MuxingEnabled = appSettingsLocal.isMp4MuxingEnabled();
webMMuxingEnabled = appSettingsLocal.isWebMMuxingEnabled();
objectDetectionEnabled = appSettingsLocal.isObjectDetectionEnabled();
addDateTimeToMp4FileName = appSettingsLocal.isAddDateTimeToMp4FileName();
webRTCEnabled = appSettingsLocal.isWebRTCEnabled();
deleteHLSFilesOnExit = appSettingsLocal.isDeleteHLSFilesOnEnded();
deleteDASHFilesOnExit = appSettingsLocal.isDeleteDASHFilesOnEnded();
hlsListSize = appSettingsLocal.getHlsListSize();
hlsTime = appSettingsLocal.getHlsTime();
hlsPlayListType = appSettingsLocal.getHlsPlayListType();
Broadcast.HLSParameters broadcastHLSParameters = getBroadcast().getHlsParameters();
if(broadcastHLSParameters != null) {
if(StringUtils.isNotBlank(broadcastHLSParameters.getHlsListSize())) {
hlsListSize = broadcastHLSParameters.getHlsListSize();
}
if(StringUtils.isNotBlank(broadcastHLSParameters.getHlsTime())) {
hlsTime = broadcastHLSParameters.getHlsTime();
}
if(StringUtils.isNotBlank(broadcastHLSParameters.getHlsPlayListType())) {
hlsPlayListType = broadcastHLSParameters.getHlsPlayListType();
}
}
dashSegDuration = appSettingsLocal.getDashSegDuration();
dashFragmentDuration = appSettingsLocal.getDashFragmentDuration();
targetLatency = appSettingsLocal.getTargetLatency();
previewOverwrite = appSettingsLocal.isPreviewOverwrite();
encoderSettingsList = (getBroadcast() != null && getBroadcast().getEncoderSettingsList() != null && !getBroadcast().getEncoderSettingsList().isEmpty())
? getBroadcast().getEncoderSettingsList()
: appSettingsLocal.getEncoderSettings();
previewCreatePeriod = appSettingsLocal.getCreatePreviewPeriod();
maxAnalyzeDurationMS = appSettingsLocal.getMaxAnalyzeDurationMS();
generatePreview = appSettingsLocal.isGeneratePreview();
previewHeight = appSettingsLocal.getPreviewHeight();
bufferTimeMs = appSettingsLocal.getRtmpIngestBufferTimeMs();
dataChannelWebHookURL = appSettingsLocal.getDataChannelWebHookURL();
rtmpEndpointRetryLimit = appSettingsLocal.getEndpointRepublishLimit();
healthCheckPeriodMS = appSettingsLocal.getEndpointHealthCheckPeriodMs();
}
public void initStorageClient() {
if (scope.getContext().getApplicationContext().containsBean(StorageClient.BEAN_NAME)) {
storageClient = (StorageClient) scope.getContext().getApplicationContext().getBean(StorageClient.BEAN_NAME);
}
}
@Override
public boolean init(IScope scope, String streamId, boolean isAppend) {
this.streamId = streamId;
this.scope = scope;
packetFeeder = new PacketFeeder(streamId);
getDataStore();
//TODO: Refactor -> saving broadcast is called two times in RTMP ingesting. It should be one time
getStreamHandler().updateBroadcastStatus(streamId, 0, IAntMediaStreamHandler.PUBLISH_TYPE_RTMP, getDataStore().get(streamId));
enableSettings();
initServerSettings();
initStorageClient();
enableMp4Setting();
enableWebMSetting();
initVertx();
if (mp4MuxingEnabled) {
addMp4Muxer();
logger.info("adding MP4 Muxer, add datetime to file name {}", addDateTimeToMp4FileName);
}
if (hlsMuxingEnabled) {
addHLSMuxer();
}
getDashMuxer();
if (dashMuxer != null) {
addMuxer(dashMuxer);
}
for (Muxer muxer : muxerList) {
muxer.init(scope, streamId, 0, broadcast.getSubFolder(), 0);
}
getStreamHandler().muxAdaptorAdded(this);
return true;
}
public HLSMuxer addHLSMuxer() {
HLSMuxer hlsMuxer = new HLSMuxer(vertx, storageClient, getAppSettings().getS3StreamsFolderPath(), getAppSettings().getUploadExtensionsToS3(), getAppSettings().getHlsHttpEndpoint(), getAppSettings().isAddDateTimeToHlsFileName());
hlsMuxer.setHlsParameters( hlsListSize, hlsTime, hlsPlayListType, getAppSettings().getHlsflags(), getAppSettings().getHlsEncryptionKeyInfoFile(), getAppSettings().getHlsSegmentType());
hlsMuxer.setDeleteFileOnExit(deleteHLSFilesOnExit);
hlsMuxer.setId3Enabled(appSettings.isId3TagEnabled());
addMuxer(hlsMuxer);
logger.info("adding HLS Muxer for {}", streamId);
return hlsMuxer;
}
public Muxer getDashMuxer()
{
if (dashMuxingEnabled && dashMuxer == null) {
try {
Class dashMuxerClass = Class.forName("io.antmedia.enterprise.muxer.DASHMuxer");
logger.info("adding DASH Muxer for {}", streamId);
dashMuxer = (Muxer) dashMuxerClass.getConstructors()[0].newInstance(vertx, dashFragmentDuration, dashSegDuration, targetLatency, deleteDASHFilesOnExit, !appSettings.getEncoderSettings().isEmpty(),
appSettings.getDashWindowSize(), appSettings.getDashExtraWindowSize(), appSettings.islLDashEnabled(), appSettings.islLHLSEnabled(),
appSettings.isHlsEnabledViaDash(), appSettings.isUseTimelineDashMuxing(), appSettings.isDashHttpStreaming(),appSettings.getDashHttpEndpoint(), serverSettings.getDefaultHttpPort());
}
catch (ClassNotFoundException e) {
logger.info("DashMuxer class not found for stream:{}", streamId);
}
catch (Exception e) {
logger.error(ExceptionUtils.getStackTrace(e));
}
}
return dashMuxer;
}
private void initVertx() {
if (scope.getContext().getApplicationContext().containsBean(IAntMediaStreamHandler.VERTX_BEAN_NAME))
{
vertx = (Vertx)scope.getContext().getApplicationContext().getBean(IAntMediaStreamHandler.VERTX_BEAN_NAME);
logger.info("vertx exist {}", vertx);
}
else {
logger.info("No vertx bean for stream {}", streamId);
}
}
protected void initServerSettings() {
if(scope.getContext().getApplicationContext().containsBean(ServerSettings.BEAN_NAME)) {
serverSettings = (ServerSettings)scope.getContext().getApplicationContext().getBean(ServerSettings.BEAN_NAME);
logger.info("serverSettings exist {}", serverSettings);
}
else {
logger.info("No serverSettings bean for stream {}", streamId);
}
}
protected void enableMp4Setting() {
broadcast = getBroadcast();
if (broadcast.getMp4Enabled() == RECORDING_DISABLED_FOR_STREAM)
{
// if stream specific mp4 setting is disabled
mp4MuxingEnabled = false;
}
else if (broadcast.getMp4Enabled() == RECORDING_ENABLED_FOR_STREAM)
{
// if stream specific mp4 setting is enabled
mp4MuxingEnabled = true;
}
}
protected void enableWebMSetting() {
broadcast = getBroadcast();
if (broadcast.getWebMEnabled() == RECORDING_DISABLED_FOR_STREAM)
{
// if stream specific WebM setting is disabled
webMMuxingEnabled = false;
}
else if (broadcast.getWebMEnabled() == RECORDING_ENABLED_FOR_STREAM)
{
// if stream specific WebM setting is enabled
webMMuxingEnabled = true;
}
}
public static void setUpEndPoints(MuxAdaptor muxAdaptor, Broadcast broadcast, Vertx vertx)
{
if (broadcast != null) {
List endPointList = broadcast.getEndPointList();
if (endPointList != null && !endPointList.isEmpty())
{
for (Endpoint endpoint : endPointList) {
RtmpMuxer rtmpMuxer = new RtmpMuxer(endpoint.getRtmpUrl(), vertx);
rtmpMuxer.setStatusListener(muxAdaptor);
muxAdaptor.addMuxer(rtmpMuxer);
}
}
}
}
public AVCodecParameters getAudioCodecParameters() {
if (audioDataConf != null && audioCodecParameters == null)
{
AACConfigParser aacParser = new AACConfigParser(audioDataConf, 0);
if (!aacParser.isErrorOccured())
{
audioCodecParameters = new AVCodecParameters();
audioCodecParameters.sample_rate(aacParser.getSampleRate());
channelLayout = new AVChannelLayout();
av_channel_layout_default(channelLayout, aacParser.getChannelCount());
audioCodecParameters.ch_layout(channelLayout);
audioCodecParameters.codec_id(AV_CODEC_ID_AAC);
audioCodecParameters.codec_type(AVMEDIA_TYPE_AUDIO);
if (aacParser.getObjectType() == AudioObjectTypes.AAC_LC) {
audioCodecParameters.profile(AVCodecContext.FF_PROFILE_AAC_LOW);
}
else if (aacParser.getObjectType() == AudioObjectTypes.AAC_LTP) {
audioCodecParameters.profile(AVCodecContext.FF_PROFILE_AAC_LTP);
}
else if (aacParser.getObjectType() == AudioObjectTypes.AAC_MAIN) {
audioCodecParameters.profile(AVCodecContext.FF_PROFILE_AAC_MAIN);
}
else if (aacParser.getObjectType() == AudioObjectTypes.AAC_SSR) {
audioCodecParameters.profile(AVCodecContext.FF_PROFILE_AAC_SSR);
}
audioCodecParameters.frame_size(aacParser.getFrameSize());
audioCodecParameters.format(AV_SAMPLE_FMT_FLTP);
audioExtraDataPointer = new BytePointer(av_malloc(audioDataConf.length)).capacity(audioDataConf.length);
audioExtraDataPointer.position(0).put(audioDataConf);
audioCodecParameters.extradata(audioExtraDataPointer);
audioCodecParameters.extradata_size(audioDataConf.length);
audioCodecParameters.codec_tag(0);
}
else {
logger.warn("Cannot parse AAC header succesfully for stream:{}", streamId);
}
}
return audioCodecParameters;
}
public AVCodecParameters getVideoCodecParameters()
{
if (videoDataConf != null && videoCodecParameters == null) {
Parser parser = null;
if (videoCodecId == AV_CODEC_ID_H264)
{
/*
unsigned int(8) configurationVersion = 1;
unsigned int(8) AVCProfileIndication;
unsigned int(8) profile_compatibility;
unsigned int(8) AVCLevelIndication;
bit(6) reserved = '111111'b;
unsigned int(2) lengthSizeMinusOne;
bit(3) reserved = '111'b;
unsigned int(5) numOfSequenceParameterSets;
for (i = 0; i < numOfSequenceParameterSets; i++) {
unsigned int(16) sequenceParameterSetLength ;
bit(8*sequenceParameterSetLength) sequenceParameterSetNALUnit;
}
unsigned int(8) numOfPictureParameterSets;
for (i = 0; i < numOfPictureParameterSets; i++) {
unsigned int(16) pictureParameterSetLength;
bit(8*pictureParameterSetLength) pictureParameterSetNALUnit;
}
if (profile_idc == 100 || profile_idc == 110 ||
profile_idc == 122 || profile_idc == 144)
{
bit(6) reserved = '111111'b;
unsigned int(2) chroma_format;
bit(5) reserved = '11111'b;
unsigned int(3) bit_depth_luma_minus8;
bit(5) reserved = '11111'b;
unsigned int(3) bit_depth_chroma_minus8;
unsigned int(8) numOfSequenceParameterSetExt;
for (i = 0; i < numOfSequenceParameterSetExt; i++) {
unsigned int(16) sequenceParameterSetExtLength;
bit(8*sequenceParameterSetExtLength) sequenceParameterSetExtNALUnit;
}
}
}
*/
//convert above structure to sps and pps annexb
parser = new SPSParser(getAnnexbExtradata(videoDataConf), 5);
}
else if (videoCodecId == AV_CODEC_ID_H265) {
parser = new HEVCDecoderConfigurationParser(videoDataConf, 0);
}
else {
throw new IllegalArgumentException("Unsupported codec id for video:" + videoCodecId);
}
videoCodecParameters = new AVCodecParameters();
width = parser.getWidth();
height = parser.getHeight();
videoCodecParameters.width(parser.getWidth());
videoCodecParameters.height(parser.getHeight());
videoCodecParameters.codec_id(videoCodecId);
videoCodecParameters.codec_type(AVMEDIA_TYPE_VIDEO);
videoExtraDataPointer = new BytePointer(av_malloc(videoDataConf.length)).capacity(videoDataConf.length);
videoExtraDataPointer.position(0).put(videoDataConf);
videoCodecParameters.extradata_size(videoDataConf.length);
videoCodecParameters.extradata(videoExtraDataPointer);
videoCodecParameters.format(AV_PIX_FMT_YUV420P);
videoCodecParameters.codec_tag(0);
}
return videoCodecParameters;
}
/**
* Prepares the parameters. This method is called in RTMP ingesting
* @return
* @throws Exception
*/
public boolean prepare() throws Exception {
int streamIndex = 0;
AVCodecParameters codecParameters = getVideoCodecParameters();
if (codecParameters != null) {
logger.info("Incoming video width: {} height:{} stream:{}", codecParameters.width(), codecParameters.height(), streamId);
addStream2Muxers(codecParameters, getTimeBaseForMs(), streamIndex);
videoStreamIndex = streamIndex;
streamIndex++;
}
AVCodecParameters parameters = getAudioCodecParameters();
if (parameters != null) {
addStream2Muxers(parameters, getTimeBaseForMs(), streamIndex);
audioStreamIndex = streamIndex;
}
else {
logger.info("There is no audio in the stream or not received AAC Sequence header for stream:{} muting the audio", streamId);
enableAudio = false;
}
prepareMuxerIO();
registerToMainTrackIfExists();
return true;
}
public void registerToMainTrackIfExists() {
if(broadcastStream.getParameters() != null) {
String mainTrack = broadcastStream.getParameters().get("mainTrack");
if(mainTrack != null) {
BroadcastUpdate broadcastUpdate = new BroadcastUpdate();
broadcastUpdate.setMainTrackStreamId(mainTrack);
getDataStore().updateBroadcastFields(streamId, broadcastUpdate);
Broadcast mainBroadcast = getDataStore().get(mainTrack);
if(mainBroadcast == null)
{
mainBroadcast = new Broadcast();
try {
mainBroadcast.setStreamId(mainTrack);
} catch (Exception e) {
logger.error(ExceptionUtils.getStackTrace(e));
}
mainBroadcast.setZombi(true);
mainBroadcast.setStatus(BROADCAST_STATUS_BROADCASTING);
mainBroadcast.getSubTrackStreamIds().add(streamId);
getDataStore().save(mainBroadcast);
}
else
{
mainBroadcast.getSubTrackStreamIds().add(streamId);
BroadcastUpdate broadcastMainUpdate = new BroadcastUpdate();
broadcastMainUpdate.setSubTrackStreamIds(mainBroadcast.getSubTrackStreamIds());
getDataStore().updateBroadcastFields(mainTrack, broadcastMainUpdate);
}
}
}
}
/**
* Prepares parameters and muxers. This method is called when pulling stream source
* @param inputFormatContext
* @return
* @throws Exception
*/
public boolean prepareFromInputFormatContext(AVFormatContext inputFormatContext) throws Exception {
this.streamSourceInputFormatContext = inputFormatContext;
// Dump information about file onto standard error
int streamIndex = 0;
int streamCount = inputFormatContext.nb_streams();
for (int i=0; i < streamCount; i++)
{
AVStream stream = inputFormatContext.streams(i);
AVCodecParameters codecpar = stream.codecpar();
if (codecpar.codec_type() == AVMEDIA_TYPE_VIDEO && !isBlacklistCodec(codecpar.codec_id())) {
videoTimeBase = inputFormatContext.streams(i).time_base();
logger.info("Video format codec Id: {} width:{} height:{} for stream: {} source index:{} target index:{}", codecpar.codec_id(), codecpar.width(), codecpar.height(), streamId, i, streamIndex);
width = codecpar.width();
height = codecpar.height();
addStream2Muxers(codecpar, stream.time_base(), i);
videoStreamIndex = streamIndex;
videoCodecParameters = codecpar;
streamIndex++;
}
else if (codecpar.codec_type() == AVMEDIA_TYPE_AUDIO)
{
logger.info("Audio format sample rate:{} bitrate:{} for stream: {} source index:{} target index:{}",codecpar.sample_rate(), codecpar.bit_rate(), streamId, i, streamIndex);
audioTimeBase = inputFormatContext.streams(i).time_base();
addStream2Muxers(codecpar, stream.time_base(), i);
audioStreamIndex = streamIndex;
audioCodecParameters = codecpar;
streamIndex++;
}
}
if (enableVideo && (width == 0 || height == 0)) {
logger.info("Width or height is zero so returning for stream: {}", streamId);
return false;
}
isRecording.set(true);
prepareMuxerIO();
return true;
}
public static byte[] getAnnexbExtradata(byte[] avcExtradata){
IoBuffer buffer = IoBuffer.wrap(avcExtradata);
buffer.skip(6); //skip first 6 bytes for avc
short spsSize = buffer.getShort();
byte[] sps = new byte[spsSize];
buffer.get(sps);
buffer.skip(1); //skip one byte for pps number
short ppsSize = buffer.getShort();
byte[] pps = new byte[ppsSize];
buffer.get(pps);
byte[] extradataAnnexb = new byte[8 + spsSize + ppsSize];
extradataAnnexb[0] = 0x00;
extradataAnnexb[1] = 0x00;
extradataAnnexb[2] = 0x00;
extradataAnnexb[3] = 0x01;
System.arraycopy(sps, 0, extradataAnnexb, 4, spsSize);
extradataAnnexb[4 + spsSize] = 0x00;
extradataAnnexb[5 + spsSize] = 0x00;
extradataAnnexb[6 + spsSize] = 0x00;
extradataAnnexb[7 + spsSize] = 0x01;
System.arraycopy(pps, 0, extradataAnnexb, 8 + spsSize, ppsSize);
return extradataAnnexb;
}
public static String getStreamType(int codecType)
{
String streamType = "not_known";
if (codecType == AVMEDIA_TYPE_VIDEO)
{
streamType = "video";
}
else if (codecType == AVMEDIA_TYPE_AUDIO)
{
streamType = "audio";
}
else if (codecType == AVMEDIA_TYPE_DATA)
{
streamType = "data";
}
else if (codecType == AVMEDIA_TYPE_SUBTITLE)
{
streamType = "subtitle";
}
else if (codecType == AVMEDIA_TYPE_ATTACHMENT)
{
streamType = "attachment";
}
return streamType;
}
public void addStream2Muxers(AVCodecParameters codecParameters, AVRational rat, int streamIndex)
{
synchronized (muxerList) {
Iterator iterator = muxerList.iterator();
while (iterator.hasNext())
{
Muxer muxer = iterator.next();
if (!muxer.addStream(codecParameters, rat, streamIndex))
{
logger.warn("addStream returns false {} for stream: {} for {} stream", muxer.getFormat(), streamId, getStreamType(codecParameters.codec_type()));
}
}
}
startTime = System.currentTimeMillis();
}
public void prepareMuxerIO()
{
synchronized (muxerList) {
Iterator iterator = muxerList.iterator();
while (iterator.hasNext())
{
Muxer muxer = iterator.next();
if (!muxer.prepareIO())
{
iterator.remove();
logger.error("prepareIO returns false {} for stream: {}", muxer.getFormat(), streamId);
}
}
}
startTime = System.currentTimeMillis();
}
/**
* @param streamId id of the stream
* @param quality, quality string
* @param packetTime, time of the packet in milliseconds
* @param duration, the total elapsed time in milliseconds
* @param inputQueueSize, input queue size of the packets that is waiting to be processed
*/
public void updateStreamQualityParameters(String streamId, String quality, double speed, int inputQueueSize) {
long now = System.currentTimeMillis();
latestSpeed = speed;
//increase updating time to STAT_UPDATE_PERIOD_MS seconds because it may cause some issues in mongodb updates
//or
//update before STAT_UPDATE_PERIOD_MS if speed something meaningful
if ((now - lastQualityUpdateTime) > STAT_UPDATE_PERIOD_MS || (lastQualityUpdateTime == 0 && speed > 0.8))
{
logger.info("Stream queue size:{} speed:{} for streamId:{} ", inputQueueSize, speed, streamId);
lastQualityUpdateTime = now;
long byteTransferred = totalByteReceived - lastTotalByteReceived;
lastTotalByteReceived = totalByteReceived;
PublishStatsEvent publishStatsEvent = new PublishStatsEvent();
publishStatsEvent.setApp(scope.getName());
publishStatsEvent.setStreamId(streamId);
publishStatsEvent.setTotalByteReceived(totalByteReceived);
publishStatsEvent.setByteTransferred(byteTransferred);
durationMs = System.currentTimeMillis() - broadcast.getStartTime();
publishStatsEvent.setDurationMs(durationMs);
publishStatsEvent.setWidth(width);
publishStatsEvent.setHeight(height);
getStreamHandler().setQualityParameters(streamId, quality, speed, inputQueueSize, System.currentTimeMillis());
oldQuality = quality;
}
}
public double getLatestSpeed() {
return latestSpeed;
}
public IAntMediaStreamHandler getStreamHandler() {
if (appAdapter == null) {
IContext context = MuxAdaptor.this.scope.getContext();
ApplicationContext appCtx = context.getApplicationContext();
//this returns the StreamApplication instance
appAdapter = (IAntMediaStreamHandler) appCtx.getBean(AntMediaApplicationAdapter.BEAN_NAME);
}
return appAdapter;
}
public AppSettings getAppSettings() {
if (appSettings == null && scope.getContext().getApplicationContext().containsBean(AppSettings.BEAN_NAME)) {
appSettings = (AppSettings) scope.getContext().getApplicationContext().getBean(AppSettings.BEAN_NAME);
}
return appSettings;
}
public DataStore getDataStore() {
if (dataStore == null) {
IDataStoreFactory dsf = (IDataStoreFactory) scope.getContext().getBean(IDataStoreFactory.BEAN_NAME);
dataStore = dsf.getDataStore();
}
return dataStore;
}
public long correctPacketDtsOverflow(long packetDts) {
/*
* Continuous RTMP streaming for approximately 24 days can cause the DTS values to overflow
* and reset to 0 once they reach the maximum value for a signed integer.
* This method handles the overflow by continuing to increment the DTS values as if they hadn't reset,
* ensuring that the timestamps remain consistent and do not start over from 0.
* If this correction is not applied, errors occur when writing to the HLS muxer, leading to a halt in .ts generation.
*/
if (lastDTS > packetDts && packetDts >= 0) {
//It should be a huge difference such as starting from 0 after Integer.MAX_VALUE between lastDTS and packetDts for the overflow.
//We just check that it's bigger than the half of the Integer.MAX_VALUE
if ((lastDTS - packetDts) > (Integer.MAX_VALUE/2)) {
logger.info("Increasing the overflow count for stream:{} because incoming packetDts:{} is lower than the lastDts:{}", streamId, packetDts, lastDTS);
overflowCount++;
}
}
lastDTS = packetDts;
return packetDts + (long) overflowCount * Integer.MAX_VALUE;
}
/**
* This is the entrance points for the packet coming from the RTMP stream.
* It's directly used in EncoderAdaptor in Enterprise
*
* We override the videoBufferReceived, audioBufferReceived, and notifyDataReceived methods to handle the packets in EncoderAdaptor
*
* @param packet
*/
public void writeStreamPacket(IStreamPacket packet)
{
//RTMPProtocolDecoder overflows after 24 days(Integer.MAX_Value) of continuous streaming and it starts from zero again.
//According to the protocol it should overflow after 49 days. Anyway, we fix the overflow here
long dts = correctPacketDtsOverflow(packet.getTimestamp());
if (packet.getDataType() == Constants.TYPE_VIDEO_DATA)
{
if(!enableVideo) {
logger.warn("Video data was disabled beginning of the stream, so discarding video packets.");
return;
}
CachedEvent videoData = (CachedEvent) packet;
logger.trace("writeVideoBuffer video data packet timestamp:{} and packet timestamp:{} streamId:{}", dts, packet.getTimestamp(), streamId);
measureIngestTime(dts, videoData.getReceivedTime());
//we skip first video packet because it's a decoder configuration
if (!firstVideoPacketSkipped) {
firstVideoPacketSkipped = true;
return;
}
int bodySize = packet.getData().limit();
boolean isKeyFrame = videoData.getFrameType() == FrameType.KEYFRAME;
long pts = dts;
//first 5 bytes in flv video tag header
byte offset = 5;
long initialCompositionTimeByte = 0;
long shortValueCompositionTime = 0;
if (videoData.isExVideoHeader())
{
//handle composition time offset
// https://veovera.org/docs/enhanced/enhanced-rtmp-v2.pdf
if (videoData.getExVideoPacketType() == ExVideoPacketType.CODED_FRAMES) {
//header implementation is available in VideoData
//when the packet type is coded frames, first 3 bytes are the time offset
//get the first byte and shift to left for two bytes and increase the offset by one and get the short value
initialCompositionTimeByte = Byte.toUnsignedLong(packet.getData().position(offset).get());
//increase offset because we use it below
offset++;
shortValueCompositionTime = Short.toUnsignedLong(packet.getData().position(offset).getShort());
//increase offset because we use it below to get the correct data
offset+=2;
}
}
else
{
//first byte is frametype - u(4) + codecId - u(4)
//second byte is av packet type - u(8)
//next 3 bytes composition time offset is 24 bits signed integer
// VideoTag E.4.3.1 -> https://veovera.org/docs/legacy/video-file-format-v10-1-spec.pdf
initialCompositionTimeByte = Byte.toUnsignedLong(packet.getData().position(2).get());
shortValueCompositionTime = Short.toUnsignedLong(packet.getData().position(3).getShort());
}
long compositionTimeOffset = ((initialCompositionTimeByte << 16) | shortValueCompositionTime);
pts = dts + compositionTimeOffset;
//we get 5 less bytes because first 5 bytes is related to the video tag. It's not part of the generic packet
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(bodySize-offset);
byteBuffer.put(packet.getData().buf().position(offset));
videoBufferReceived(dts, isKeyFrame, pts, byteBuffer);
}
else if (packet.getDataType() == Constants.TYPE_AUDIO_DATA) {
if(!enableAudio) {
logger.debug("Audio data was disabled beginning of the stream, so discarding audio packets.");
return;
}
if (!firstAudioPacketSkipped) {
firstAudioPacketSkipped = true;
return;
}
int bodySize = packet.getData().limit();
//we get 2 less bytes because first 2 bytes is related to the audio tag. It's not part of the generic packet
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(bodySize-2);
byteBuffer.put(packet.getData().buf().position(2));
logger.trace("writeAudioBuffer video data packet timestamp:{} and packet timestamp:{} streamId:{}", dts, packet.getTimestamp(), streamId);
audioBufferReceived(dts, byteBuffer);
}
else if (packet.getDataType() == Constants.TYPE_STREAM_METADATA) {
//it can be onMetadata or it can be onFI action
if (appSettings.isRelayRTMPMetaDataToMuxers()) {
notifyMetaDataReceived(packet, dts);
}
//FYI: action can be "onFI" to deliver timecode
}
}
public void notifyMetaDataReceived(IStreamPacket packet, long dts) {
JSONObject jsonObject = getMetaData((Notify) packet);
if (jsonObject != null) {
String data = jsonObject.toJSONString();
synchronized (muxerList)
{
for (Muxer muxer : muxerList)
{
muxer.writeMetaData(data, dts);
}
}
}
}
public void audioBufferReceived(long dts, ByteBuffer byteBuffer) {
synchronized (muxerList)
{
packetFeeder.writeAudioBuffer(byteBuffer, audioStreamIndex, dts);
for (Muxer muxer : muxerList)
{
muxer.writeAudioBuffer(byteBuffer, audioStreamIndex, dts);
}
}
}
public void videoBufferReceived(long dts, boolean isKeyFrame, long pts, ByteBuffer byteBuffer) {
synchronized (muxerList)
{
packetFeeder.writeVideoBuffer(byteBuffer, dts, 0, videoStreamIndex, isKeyFrame, 0, pts);
for (Muxer muxer : muxerList)
{
muxer.writeVideoBuffer(byteBuffer, dts, 0, videoStreamIndex, isKeyFrame, 0, pts);
}
}
}
public JSONObject getMetaData(Notify notifyEvent)
{
String action = notifyEvent.getAction();
if ("onMetaData".equals(action)) {
// store the metadata
Input input = getInput(notifyEvent);
byte readDataType = input.readDataType();
if (readDataType == DataTypes.CORE_MAP) {
Map