All Downloads are FREE. Search and download functionalities are using the official Maven repository.

io.antmedia.muxer.RtmpMuxer Maven / Gradle / Ivy

package io.antmedia.muxer;


import static org.bytedeco.ffmpeg.global.avcodec.AV_CODEC_FLAG_GLOBAL_HEADER;
import static org.bytedeco.ffmpeg.global.avcodec.AV_INPUT_BUFFER_PADDING_SIZE;
import static org.bytedeco.ffmpeg.global.avcodec.AV_PKT_DATA_NEW_EXTRADATA;
import static org.bytedeco.ffmpeg.global.avcodec.AV_PKT_FLAG_KEY;
import static org.bytedeco.ffmpeg.global.avcodec.av_bsf_alloc;
import static org.bytedeco.ffmpeg.global.avcodec.av_bsf_free;
import static org.bytedeco.ffmpeg.global.avcodec.av_bsf_get_by_name;
import static org.bytedeco.ffmpeg.global.avcodec.av_bsf_init;
import static org.bytedeco.ffmpeg.global.avcodec.av_bsf_receive_packet;
import static org.bytedeco.ffmpeg.global.avcodec.av_bsf_send_packet;
import static org.bytedeco.ffmpeg.global.avcodec.av_init_packet;
import static org.bytedeco.ffmpeg.global.avcodec.av_packet_free;
import static org.bytedeco.ffmpeg.global.avcodec.av_packet_ref;
import static org.bytedeco.ffmpeg.global.avcodec.av_packet_unref;
import static org.bytedeco.ffmpeg.global.avcodec.avcodec_parameters_copy;
import static org.bytedeco.ffmpeg.global.avcodec.avcodec_parameters_from_context;
import static org.bytedeco.ffmpeg.global.avformat.AVFMT_GLOBALHEADER;
import static org.bytedeco.ffmpeg.global.avformat.AVFMT_NOFILE;
import static org.bytedeco.ffmpeg.global.avformat.AVIO_FLAG_WRITE;
import static org.bytedeco.ffmpeg.global.avformat.av_write_frame;
import static org.bytedeco.ffmpeg.global.avformat.av_write_trailer;
import static org.bytedeco.ffmpeg.global.avformat.avformat_alloc_output_context2;
import static org.bytedeco.ffmpeg.global.avformat.avformat_free_context;
import static org.bytedeco.ffmpeg.global.avformat.avformat_new_stream;
import static org.bytedeco.ffmpeg.global.avformat.avformat_write_header;
import static org.bytedeco.ffmpeg.global.avformat.avio_closep;
import static org.bytedeco.ffmpeg.global.avutil.AVMEDIA_TYPE_AUDIO;
import static org.bytedeco.ffmpeg.global.avutil.AVMEDIA_TYPE_VIDEO;
import static org.bytedeco.ffmpeg.global.avutil.AV_PIX_FMT_YUV420P;
import static org.bytedeco.ffmpeg.global.avutil.AV_ROUND_NEAR_INF;
import static org.bytedeco.ffmpeg.global.avutil.AV_ROUND_PASS_MINMAX;
import static org.bytedeco.ffmpeg.global.avutil.av_dict_free;
import static org.bytedeco.ffmpeg.global.avutil.av_dict_set;
import static org.bytedeco.ffmpeg.global.avutil.av_rescale_q;
import static org.bytedeco.ffmpeg.global.avutil.av_rescale_q_rnd;
import static org.bytedeco.ffmpeg.global.avutil.av_strerror;

import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.bytedeco.ffmpeg.avcodec.AVBSFContext;
import org.bytedeco.ffmpeg.avcodec.AVBitStreamFilter;
import org.bytedeco.ffmpeg.avcodec.AVCodec;
import org.bytedeco.ffmpeg.avcodec.AVCodecContext;
import org.bytedeco.ffmpeg.avcodec.AVCodecParameters;
import org.bytedeco.ffmpeg.avcodec.AVPacket;
import org.bytedeco.ffmpeg.avformat.AVFormatContext;
import org.bytedeco.ffmpeg.avformat.AVIOContext;
import org.bytedeco.ffmpeg.avformat.AVStream;
import org.bytedeco.ffmpeg.avutil.AVDictionary;
import org.bytedeco.ffmpeg.avutil.AVRational;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.ffmpeg.global.avformat;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacpp.BytePointer;
import org.bytedeco.javacpp.IntPointer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class RtmpMuxer extends Muxer {

	protected static Logger logger = LoggerFactory.getLogger(RtmpMuxer.class);
	private List registeredStreamIndexList = new ArrayList<>();
	private String url;
	private AVPacket videoPkt;
	private Map codecTimeBaseMap = new HashMap<>();
	private AVBSFContext bsfExtractdataContext = null;
	private AVPacket tmpPacket;
	private volatile boolean headerWritten = false;
	private BytePointer allocatedExtraDataPointer = null;

	public RtmpMuxer(String url) {
		super(null);
		format = "flv";
		this.url = url;
		
		videoPkt = avcodec.av_packet_alloc();
		av_init_packet(videoPkt);
		
		tmpPacket = avcodec.av_packet_alloc();
		av_init_packet(tmpPacket);
	}

	public String getURL() {
		return url;
	}
	


	/**
	 * {@inheritDoc}
	 */
	@Override
	public synchronized boolean addStream(AVCodec codec, AVCodecContext codecContext, int streamIndex) {

		AVFormatContext outputContext = getOutputFormatContext();

		if (outputContext == null) {
			return false;
		}
		registeredStreamIndexList.add(streamIndex);
		AVStream outStream = avformat_new_stream(outputContext, codec);		
		outStream.time_base(codecContext.time_base());
		
		int ret = avcodec_parameters_from_context(outStream.codecpar(), codecContext);

		if (ret < 0) {
			logger.info("codec context cannot be copied for url: {}", url);
		}
		outStream.codecpar().codec_tag(0);
		codecTimeBaseMap.put(streamIndex, codecContext.time_base());
		return true;
		
	}

	private AVFormatContext getOutputFormatContext() {
		if (outputFormatContext == null) {
			outputFormatContext= new AVFormatContext(null);
			int ret = avformat_alloc_output_context2(outputFormatContext, null, format, null);
			if (ret < 0) {
				logger.info("Could not create output context for url {}", url);
				return null;
			}
		}
		return outputFormatContext;
	}
	

	/**
	 * {@inheritDoc}
	 */
	@Override
	public boolean prepare(AVFormatContext inputFormatContext) {

		logger.info("preparing rtmp muxer for {}", url);
		AVFormatContext context = getOutputFormatContext();

		for (int i=0; i < inputFormatContext.nb_streams(); i++) {
			AVStream inStream = inputFormatContext.streams(i);
			registeredStreamIndexList.add(i);

			AVStream outStream = avformat_new_stream(context, inStream.codec().codec());

			int ret = avcodec_parameters_copy(outStream.codecpar(), inStream.codecpar());
			if (ret < 0) {
				logger.info("Cannot get codec parameters {}", url);
				return false;
			}

			outStream.codec().codec_tag(0);
			outStream.codecpar().codec_tag(0);

			if ((context.oformat().flags() & AVFMT_GLOBALHEADER) != 0)
				outStream.codec().flags( outStream.codec().flags() | AV_CODEC_FLAG_GLOBAL_HEADER);
		}

		prepareIO();

		return true;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public  boolean prepareIO() 
	{
		/*
		 * extradata context is created if addVideoStream is called from WebRTC Forwarder 
		 */
		AVFormatContext context = getOutputFormatContext();
		if (context != null && context.pb() != null) {
			//return false if it is already prepared
			return false;
		}

		AVIOContext pb = new AVIOContext(null);

		long startTime = System.currentTimeMillis();
		logger.info("rtmp muxer opening: {} time:{}" , url, System.currentTimeMillis());
		int ret = avformat.avio_open(pb,  url, AVIO_FLAG_WRITE);
		if (ret < 0) {
			logger.error("Could not open output file for rtmp url {}", url);
			return false;
		}
		context.pb(pb);
		long diff = System.currentTimeMillis() - startTime;
		logger.info("avio open takes {}", diff);
		
		
		if (bsfExtractdataContext == null)  
		{	
			return writeHeader(); 
		}
		isRunning.set(true);
		return true;
	}
	
	private boolean writeHeader() {
		long startTime = System.currentTimeMillis();
		AVDictionary optionsDictionary = null;

		if (!options.isEmpty()) {
			optionsDictionary = new AVDictionary();
			Set keySet = options.keySet();
			for (String key : keySet) {
				av_dict_set(optionsDictionary, key, options.get(key), 0);
			}
		}

		logger.info("before writing rtmp muxer header to {}", url);
		int ret = avformat_write_header(getOutputFormatContext(), optionsDictionary);		
		if (ret < 0) {
			logger.warn("could not write header to rtmp url {}", url);

			clearResource();
			return false;
		}
		if (optionsDictionary != null) {
			av_dict_free(optionsDictionary);
			optionsDictionary = null;
		}
		long diff = System.currentTimeMillis() - startTime;
		logger.info("write header takes {}", diff);
		headerWritten = true;
		isRunning.set(true);
		
		return true;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public void writeTrailer() {

		if (!isRunning.get() || outputFormatContext == null || outputFormatContext.pb() == null) {
			//return if it is already null
			logger.info("RTMPMuxer is not running or output context is null for stream: {}", url);
			return;
		}
		logger.info("Writing trailer for stream id: {}", url);
		isRunning.set(false);

		av_write_trailer(outputFormatContext);
		clearResource();

		isRecording = false;
	}


	private void clearResource() {
		/* close output */
		if ((outputFormatContext.flags() & AVFMT_NOFILE) == 0) {
			avio_closep(outputFormatContext.pb());
		}
		
		if (videoPkt != null) {
			av_packet_free(videoPkt);
			videoPkt = null;
		}
		
		if (tmpPacket != null) {
			av_packet_free(tmpPacket);
			tmpPacket = null;
		}
		
		if (bsfExtractdataContext != null) {
			av_bsf_free(bsfExtractdataContext);
			bsfExtractdataContext = null;
		}
		
		//allocatedExtraDataPointer is freed when the context is closing
		
		

		avformat_free_context(outputFormatContext);
		outputFormatContext.close();
		outputFormatContext = null;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public boolean addVideoStream(int width, int height, AVRational timebase, int codecId, int streamIndex, boolean isAVC, AVCodecParameters codecpar) {
		boolean result = false;
		AVFormatContext outputContext = getOutputFormatContext();
		if (outputContext != null) 
		{
			registeredStreamIndexList.add(streamIndex);
			AVStream outStream = avformat_new_stream(outputContext, null);
			outStream.codecpar().width(width);
			outStream.codecpar().height(height);
			outStream.codecpar().codec_id(codecId);
			outStream.codecpar().codec_type(AVMEDIA_TYPE_VIDEO);
			outStream.codecpar().format(AV_PIX_FMT_YUV420P);
			outStream.codecpar().codec_tag(0);
			outStream.codec().codec_tag(0);
			
			AVRational timeBase = new AVRational();
			timeBase.num(1).den(1000);
						

			AVBitStreamFilter h264bsfc = av_bsf_get_by_name("extract_extradata");
			bsfExtractdataContext = new AVBSFContext(null);

			int ret = av_bsf_alloc(h264bsfc, bsfExtractdataContext);
			if (ret < 0) {
				logger.info("cannot allocate bsf context for {}", file.getName());
				outStream.close();
				timeBase.close();
				return false;
			}

			ret = avcodec_parameters_copy(bsfExtractdataContext.par_in(), outStream.codecpar());
			if (ret < 0) {
				logger.info("cannot copy input codec parameters for {}", file.getName());
				outStream.close();
				timeBase.close();
				h264bsfc.close();
				return false;
			}
			bsfExtractdataContext.time_base_in(timeBase);

			ret = av_bsf_init(bsfExtractdataContext);
			if (ret < 0) {
				logger.info("cannot init bit stream filter context for {}", file.getName());
				outStream.close();
				timeBase.close();
				h264bsfc.close();
				return false;
			}

			ret = avcodec_parameters_copy(outStream.codecpar(), bsfExtractdataContext.par_out());
			if (ret < 0) {
				logger.info("cannot copy codec parameters to output for {}", file.getName());
				outStream.close();
				timeBase.close();
				h264bsfc.close();
				return false;
			}
			outStream.time_base(bsfExtractdataContext.time_base_out());
			
			codecTimeBaseMap.put(streamIndex, timeBase);
			result = true;
		}

		return result;
	}
	
	
	/**
	 * {@inheritDoc}
	 */
	@Override
	public synchronized void writePacket(AVPacket pkt, AVStream stream) {
		AVStream outStream = outputFormatContext.streams(pkt.stream_index());
		writePacket(pkt, stream.time_base(),  outStream.time_base(), outStream.codecpar().codec_type()); 
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public synchronized void writePacket(AVPacket pkt, AVCodecContext codecContext) {
		AVStream outStream = outputFormatContext.streams(pkt.stream_index());
		AVRational codecTimebase = codecTimeBaseMap.get(pkt.stream_index());
		writePacket(pkt, codecTimebase,  outStream.time_base(), outStream.codecpar().codec_type()); 
	}


	private void writePacket(AVPacket pkt, final AVRational inputTimebase, final AVRational outputTimebase, int codecType) 
	{

		if (!isRunning.get() || !registeredStreamIndexList.contains(pkt.stream_index())) {
			logger.info("Not writing to muxer because it's not started for {}", url);
			return;
		}
		
		final AVFormatContext context = getOutputFormatContext();
		if (context.streams(pkt.stream_index()).codecpar().codec_type() ==  AVMEDIA_TYPE_AUDIO && !headerWritten) {
			//Opening the RTMP muxer may take some time and don't make audio queue increase
			logger.info("Not writing audio packet to muxer because header is not written yet for {}", url);
			return;
		}
		
		writeFrameInternal(pkt, inputTimebase, outputTimebase, context, codecType);
	}

	private synchronized void writeFrameInternal(AVPacket pkt, AVRational inputTimebase, AVRational outputTimebase,
			AVFormatContext context, int codecType) {
		
		long pts = pkt.pts();
		long dts = pkt.dts();
		long duration = pkt.duration();
		long pos = pkt.pos();
		
		pkt.pts(av_rescale_q_rnd(pkt.pts(), inputTimebase, outputTimebase, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
		pkt.dts(av_rescale_q_rnd(pkt.dts(), inputTimebase, outputTimebase, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
		pkt.duration(av_rescale_q(pkt.duration(), inputTimebase, outputTimebase));
		pkt.pos(-1);
		
		int ret = 0;

		if (codecType == AVMEDIA_TYPE_VIDEO) {
			ret = av_packet_ref(tmpPacket , pkt);
			if (ret < 0) {
				logger.error("Cannot copy packet for {}", file.getName());
				return;
			}
			if (bsfExtractdataContext != null) {

				ret = av_bsf_send_packet(bsfExtractdataContext, tmpPacket);
				if (ret < 0) {
					logger.warn("cannot send packet to the filter");
					return;
				}

				while (av_bsf_receive_packet(bsfExtractdataContext, tmpPacket) == 0) 
				{
					if (!headerWritten) 
					{
						IntPointer size = new IntPointer(1);
						BytePointer extradataBytePointer = avcodec.av_packet_get_side_data(tmpPacket, AV_PKT_DATA_NEW_EXTRADATA,  size);
						if (size.get() != 0) 
						{
							allocatedExtraDataPointer = new BytePointer(avutil.av_malloc(size.get() + AV_INPUT_BUFFER_PADDING_SIZE)).capacity(size.get() + AV_INPUT_BUFFER_PADDING_SIZE);
							byte[] extraDataArray = new byte[size.get()];
							extradataBytePointer.get(extraDataArray, 0, extraDataArray.length);
							allocatedExtraDataPointer.put(extraDataArray, 0, extraDataArray.length);
							logger.info("extradata size:{} extradata: {} allocated pointer: {}", size.get(), extradataBytePointer, allocatedExtraDataPointer);
							context.streams(pkt.stream_index()).codecpar().extradata(allocatedExtraDataPointer);
							context.streams(pkt.stream_index()).codecpar().extradata_size(size.get());
							writeHeader();
						}
					}
					
					if (headerWritten) {
						ret = av_write_frame(context, tmpPacket);
						if (ret < 0 && logger.isInfoEnabled()) {
							byte[] data = new byte[128];
							av_strerror(ret, data, data.length);
							logger.info("cannot write video frame to muxer. Error: {} stream: {}", new String(data, 0, data.length), file != null ? file.getName() : " no name");
						}
					}
					else {
						logger.info("Header is not written yet for writing video packet for stream: {}", file.getName());
					}
				}
			}
			else 
			{
				ret = av_write_frame(context, tmpPacket);
				if (ret < 0 && logger.isInfoEnabled()) {
					byte[] data = new byte[128];
					av_strerror(ret, data, data.length);
					logger.info("cannot write video frame to muxer. Error: {} stream: {}", new String(data, 0, data.length), file != null ? file.getName() : "no name");
				}
			}
			av_packet_unref(tmpPacket);
		}
		else 
		{
			if (headerWritten) 
			{
				ret = av_write_frame(context, pkt);
				if (ret < 0 && logger.isInfoEnabled()) {
					byte[] data = new byte[128];
					av_strerror(ret, data, data.length);
					logger.info("cannot write frame(not video) to muxer. Error is {} ", new String(data, 0, data.length));
				}
			}
		}

		pkt.pts(pts);
		pkt.dts(dts);
		pkt.duration(duration);
		pkt.pos(pos);
	}
	
	boolean keyFrameReceived = false;

	@Override
	public synchronized void writeVideoBuffer(ByteBuffer encodedVideoFrame, long timestamp, int frameRotation, int streamIndex,
								 boolean isKeyFrame,long firstFrameTimeStamp) 
	{
		
		if (!isRunning.get() || !registeredStreamIndexList.contains(streamIndex)) {
			logger.info("Not writing to muxer because it's not started for {}", url);
			return;
		}
		
		if (!keyFrameReceived) {
			
			if (isKeyFrame) {
				keyFrameReceived = true;
				logger.info("Key frame is received to start");
			}
		}
		
		if (keyFrameReceived) {
			videoPkt.stream_index(streamIndex);
			videoPkt.pts(timestamp);
			videoPkt.dts(timestamp);
			
			encodedVideoFrame.rewind();
			if (isKeyFrame) {
				videoPkt.flags(videoPkt.flags() | AV_PKT_FLAG_KEY);
			}
			videoPkt.data(new BytePointer(encodedVideoFrame));
			videoPkt.size(encodedVideoFrame.limit());
			videoPkt.position(0);
			
			AVStream outStream = outputFormatContext.streams(videoPkt.stream_index());
			AVRational codecTimebase = codecTimeBaseMap.get(videoPkt.stream_index());
			writePacket(videoPkt, codecTimebase,  outStream.time_base(), outStream.codecpar().codec_type()); 
			
			av_packet_unref(videoPkt);
		}
		
	}


	
	

}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy