FFNvDecoder.cpp 7.65 KB
#include "FFNvDecoder.h"

#include <chrono>
#include <thread>
#include <fstream>

#include <chrono>

#include "FFCuContextManager.h"

#include "logger.hpp"

using namespace std;

// 参考博客: https://blog.csdn.net/qq_40116098/article/details/120704340

static AVPixelFormat get_hw_format(AVCodecContext *avctx, const AVPixelFormat *pix_fmts)
{
	FFNvDecoder* _this = (FFNvDecoder*)avctx->opaque;

	const AVPixelFormat *p;

	for (p = pix_fmts; *p != -1; p++) {
		if (*p == _this->getHwPixFmt())
			return *p;
	}

	LOG_ERROR("Failed to get HW surface format");
	return AV_PIX_FMT_NONE;
}

FFNvDecoder::FFNvDecoder()
{
	// 初始化解码对象
	fmt_ctx = nullptr;
	avctx = nullptr;
	m_bRunning = false;

	stream = nullptr;
    stream_index = -1;
    hw_pix_fmt = AV_PIX_FMT_NONE;
    m_dec_name = "";

	m_bPause = false;
	m_bReal = true;

	m_decode_thread = 0;
	m_post_decode_thread = 0;

	m_bFinished = false;
	m_dec_keyframe = false;
	m_fps = 0.0;
}

FFNvDecoder::~FFNvDecoder()
{
	m_dec_keyframe = false;
}

bool FFNvDecoder::init(FFDecConfig& cfg)
{
	m_cfg = cfg;

	fstream infile(cfg.uri);
	if (infile.is_open()){
		m_bReal = false;
		infile.close();
	}else {
		m_bReal = true;
	}

	return init(cfg.uri.c_str(), cfg.gpuid.c_str(),cfg.force_tcp);
}

bool FFNvDecoder::init(const char* uri, const char* gpuid, bool force_tcp)
{
	// av_log_set_level(AV_LOG_DEBUG);

	avformat_network_init();

	// 打开输入视频文件
	AVDictionary *options = nullptr;
	av_dict_set( &options, "bufsize", "655360", 0 );
	av_dict_set( &options, "rtsp_transport", force_tcp ? "tcp" : "udp", 0 );
	// av_dict_set( &options, "listen_timeout", "30", 0 ); // 单位为s
	av_dict_set( &options, "stimeout", "30000000", 0 ); // 单位为 百万分之一秒
	
	fmt_ctx = avformat_alloc_context();
	const char* input_file = uri;
	if (avformat_open_input(&fmt_ctx, input_file, nullptr, &options) != 0) {
		LOG_ERROR("Cannot open input file:{}",input_file);
		return false;
	}

	// 查找流信息
	if (avformat_find_stream_info(fmt_ctx, nullptr) < 0) {
		LOG_ERROR("Cannot find input stream information");
		return false;
	}

	// 查找视频流信息
	AVCodec *decoder = nullptr;
	stream_index = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &decoder, 0);
	if (stream_index < 0) {
		LOG_ERROR("Cannot find a video stream in the input file");
		return false;
	}

	string cuvid_dec_name = string(decoder->name) + "_cuvid";
	AVCodec *vcodec = avcodec_find_decoder_by_name(cuvid_dec_name.c_str());
	if (!(avctx = avcodec_alloc_context3(vcodec)))
		return (bool)AVERROR(ENOMEM);

	// 得到视频流对象
	stream = fmt_ctx->streams[stream_index];
	if (avcodec_parameters_to_context(avctx, stream->codecpar) < 0)
		return false;

	m_fps = av_q2d(stream ->avg_frame_rate);

	avctx->opaque = this;
	// 设置解码器管理器的像素格式回调函数
	avctx->get_format = get_hw_format;

	hw_pix_fmt = AV_PIX_FMT_CUDA;

	FFCuContextManager* pCtxMgr = FFCuContextManager::getInstance();

	AVBufferRef *hw_device_ctx = pCtxMgr->getCuCtx(gpuid);
	if(nullptr == hw_device_ctx){
		av_log(nullptr, AV_LOG_ERROR, "create CUDA context failed ! \n");
		return false;
	}
	avctx->hw_device_ctx = av_buffer_ref(hw_device_ctx);
	if (nullptr == avctx->hw_device_ctx)
	{
		return false;
	}

	// 打开解码器流
	AVDictionary *op = nullptr;
	av_dict_set( &op, "gpu", gpuid, 0 );
	// av_dict_set( &op, "surfaces", "5", 0 );
	if (avcodec_open2(avctx, vcodec, &op) < 0) {
		LOG_ERROR("Failed to open codec for stream");
		return false;
	}
	
	return true;
}

bool FFNvDecoder::isSurport(FFDecConfig& cfg)
{
	bool bRet = init(cfg);
    decode_finished();
    return bRet;
}

bool FFNvDecoder::start(){

	m_bRunning = true;

	pthread_create(&m_decode_thread,0,
        [](void* arg)
        {
            FFNvDecoder* a=(FFNvDecoder*)arg;
            a->decode_thread();
            return (void*)0;
        }
    ,this);

	return true;
}

static long long get_cur_time(){
    // 获取操作系统当前时间点(精确到微秒)
    chrono::time_point<chrono::system_clock, chrono::microseconds> tpMicro
        = chrono::time_point_cast<chrono::microseconds>(chrono::system_clock::now());
    // (微秒精度的)时间点 => (微秒精度的)时间戳
    time_t totalMicroSeconds = tpMicro.time_since_epoch().count();

	long long currentTime = ((long long)totalMicroSeconds)/1000;

    return currentTime;
}

void FFNvDecoder::decode_thread()
{
	AVPacket* pkt ;
	pkt = av_packet_alloc();
	av_init_packet( pkt );

	pthread_create(&m_post_decode_thread,0,
        [](void* arg)
        {
            FFNvDecoder* a=(FFNvDecoder*)arg;
            a->post_decode_thread();
            return (void*)0;
        }
    ,this);

	// long start_time = get_cur_time();

	while (m_bRunning)
	{
		if (!m_bReal)
		{
			if (m_bPause)
			{
				std::this_thread::sleep_for(std::chrono::milliseconds(3));
				continue;
			}
		}

		AVFrame * gpuFrame = mFrameQueue.getTail();
		if (gpuFrame == nullptr)
		{
			std::this_thread::sleep_for(std::chrono::milliseconds(1));
			continue;
		}
		
		int result = av_read_frame(fmt_ctx, pkt);
		if (result == AVERROR_EOF || result < 0)
		{
			LOG_ERROR("Failed to read frame!");
			break;
		}

		if (m_dec_keyframe && !(pkt->flags & AV_PKT_FLAG_KEY)) {
			av_packet_unref(pkt);
			continue;
		}

		if (m_bReal)
		{
			if (m_bPause)
			{
				av_packet_unref(pkt);
				std::this_thread::sleep_for(std::chrono::milliseconds(3));
				continue;
			}
		}

		if (stream_index == pkt->stream_index){
			result = avcodec_send_packet(avctx, pkt);
			if (result < 0){
				LOG_ERROR("{} - Failed to send pkt: {}", m_dec_name, result);
				continue;
			}

			result = avcodec_receive_frame(avctx, gpuFrame);
			if ((result == AVERROR(EAGAIN) || result == AVERROR_EOF) || result < 0){
				LOG_ERROR("{} - Failed to receive frame: {}", m_dec_name, result);
				continue;
			}

			mFrameQueue.addTail();
		}
		av_packet_unref(pkt);
	}

	m_bRunning = false;

	// long end_time = get_cur_time();

	// cout << "解码用时:" << end_time - start_time << endl;

	if (m_post_decode_thread != 0)
	{
		pthread_join(m_post_decode_thread,0);
	}

	decode_finished_cbk(m_finishedDecArg);

	decode_finished();

	LOG_INFO("{} - decode thread exited.", m_dec_name);
}

void FFNvDecoder::decode_finished(){
	if (avctx)
	{
		avcodec_free_context(&avctx);
	}
	
	if (fmt_ctx)
	{
		avformat_close_input(&fmt_ctx);
	}

	m_bFinished = true;
	m_dec_keyframe = false;
}

void FFNvDecoder::post_decode_thread(){
	int skip_frame = m_cfg.skip_frame;
	if (skip_frame <= 0){
		skip_frame = 1;
	}
	
	int index = 0;
	while (m_bRunning || mFrameQueue.length() > 0)
	{
		AVFrame * gpuFrame = mFrameQueue.getHead();
		if (gpuFrame == nullptr)
		{
			std::this_thread::sleep_for(std::chrono::milliseconds(3));
			continue;
		}

		// 跳帧
		if (skip_frame == 1 || index % skip_frame == 0){
			post_decoded_cbk(m_postDecArg, gpuFrame);
			index = 0;
		}
		
		mFrameQueue.addHead();

		index++;
	}

	LOG_INFO("post decode thread exited.");
}

void FFNvDecoder::close(){
	m_bRunning=false;
	if(m_decode_thread != 0){
		pthread_join(m_decode_thread,0);
	}
	m_dec_keyframe = false;
}

AVPixelFormat FFNvDecoder::getHwPixFmt(){
	return hw_pix_fmt;
}

bool FFNvDecoder::isRunning(){
	return m_bRunning;
}

bool FFNvDecoder::isFinished(){
	return m_bFinished;
}

bool FFNvDecoder::isPausing(){
	return m_bPause;
}

bool FFNvDecoder::getResolution( int &width, int &height ){
	if (avctx != nullptr)
	{
		width = avctx->width;
		height = avctx->height;
		return true;
	}
	
	return false;
}

void FFNvDecoder::pause(){
	m_bPause = true;
}

void FFNvDecoder::resume(){
	m_bPause = false;
}

void FFNvDecoder::setDecKeyframe(bool bKeyframe)
{
	m_dec_keyframe = bKeyframe;
}

int FFNvDecoder::getCachedQueueLength(){
	return mFrameQueue.length();
}

float FFNvDecoder::fps(){
	return m_fps;
}