#ifndef _RTP_RECEIVER_H_ #define _RTP_RECEIVER_H_ #include "demuxer.h" #include "buffer.h" #include "rtpudpv4transmitter.h" #include "rtpipv4address.h" #include "rtpsessionparams.h" #include "rtpsession.h" #include "rtppacket.h" #include #include #include #include #include #include #define OUTTIME_RTCP 30*1000 #define PAYLOAD 99 #define PAYLOAD_PS 96 #define PAYLOAD_H264 98 #define PAYLOAD_MP4 97 #define UDP_SIZE 1400 #define MIN_PORT 10000 #define MAX_PORT 60000 #define RTP_MAX_PACKET_LEN 1450 using namespace jrtplib; using namespace std; typedef unsigned char BYTE; /** 视频数据回调 * * @param videoType [in] 视频类型 音频-0xC0、h264-0x1B、MPEG4-0x01、SVAC-0x80 * @param data [in] 视频数据 * @param len [in] 视频数据长度 * @param isKey [in] 是否为关键帧 * @param pts [in] 时间戳 */ typedef void(*CallBack_Stream)(void* userdata, int videoType, char* data, int len, int isKey, uint64_t pts, uint64_t localPts); /** 录像回放完成回调消息通知 */ typedef void(*CallBack_VodFileEnd)(void* userdata); int AllocRtpPort(void); class MyRTPSession : public RTPSession { public: MyRTPSession() {} virtual ~MyRTPSession() {} private: virtual void OnRTPPacket(RTPPacket* pack, const RTPTime& receiverTime, const RTPAddress* senderAddress) { AddDestination(*senderAddress); } virtual void OnRTCPCompoundPacket(RTCPCompoundPacket *pack, const RTPTime &receivetime,const RTPAddress *senderaddress) { //AddDestination(*senderaddress); //const char* name = "hi~"; //SendRTCPAPPPacket(0, (const uint8_t*)name, "keeplive", 8); //printf("send rtcp app"); } }; // 标识帧, 注意buffer需要自己开辟和释放 struct Frame { Frame() { buf_ = NULL; len_ = 0; } ~Frame() { if (buf_ != nullptr) { free(buf_); buf_ = nullptr; } } Frame(BYTE* buf, int len, bool key) : buf_(buf), len_(len), key_(key) {} BYTE* buf_; int len_; bool key_{}; }; class FrameToDecode { public: FrameToDecode() : m_SliceBuf(0) , m_localPts(0) , m_LastPTS(-1) , m_LastIsKeyFrame(0) {} FrameToDecode(unsigned char m_streamId) : m_SliceBuf(0) , m_localPts(0) , m_LastPTS(-1) , m_LastIsKeyFrame(0) , m_streamId (m_streamId) {} void operator=(FrameToDecode &temp) { m_SliceBuf = temp.m_SliceBuf; m_streamId = temp.m_streamId; m_localPts = temp.m_localPts; m_LastPTS = temp.m_LastPTS; m_LastIsKeyFrame = temp.m_LastIsKeyFrame; } CBuffer m_SliceBuf; unsigned char m_streamId{}; uint64_t m_localPts; uint64_t m_LastPTS; bool m_LastIsKeyFrame; }; class RTPReceiver { RTPReceiver(const RTPReceiver& other); RTPReceiver& operator= (const RTPReceiver& other); public: RTPReceiver(); ~RTPReceiver(); bool Open(uint16_t localPort); bool IsOpened() const; void Close(); int GetPsFrameListSize(); void ClearPsVideoFrameList(); void OnPsDemux(unsigned char streamId, BYTE *data, int len, bool key, uint64_t pts, uint64_t localPts); void SetOutputCallback(CallBack_Stream cb, void* param); void SetVodEndCallback(CallBack_VodFileEnd cb, void* param); CallBack_VodFileEnd GetVodEndFunc(){ return m_hVodEndFunc; } void *GetUsrParam(){ return m_usrParam; } void SetDeviceID(string deviceID){this->m_deviceID = deviceID; } private: static int rtp_revc_thread_(void* param); static int ps_demuxer_thread_(void* param); static int ps_decode_thread_(void* param); int OnRtpRecv(); int OnPsProcess(); int OnDecodeProcess(); private: std::thread m_rtpThread; // RTP接收线程 std::thread m_psThread; // PS解包线程 uint16_t m_localPort; // RTP接收端口 MyRTPSession m_rtpSession; // RTP会话 std::atomic_bool m_bRtpExit; // 标识RTP收包线程闭 std::atomic_bool m_bPsExit; // 标识PS解包线程关闭 std::queue m_psVideoFrames; mutex m_psFrameMutex; CMpeg2Demux m_psParser; void* m_usrParam; std::atomic_bool m_bOpened; CallBack_Stream m_h264DataFunc; // 视频流回调 CallBack_VodFileEnd m_hVodEndFunc; // 录像流结束回调 CBuffer m_SliceBuf; uint64_t m_LastPTS; bool m_LastIsKeyFrame; unsigned char m_LastStreamType; int64_t m_idleCount; int64_t m_noDataCount;//线程计数,用于打开流成功但是实际没流过来 string m_deviceID; int64_t m_notToDecodCount{0};//线程计数,用来代表多长时间没有调用解码回调,针对大华相机 }; #endif // _RTP_RECEIVER_H_