372e629f
ming
gb28181支持TCP数据流
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
|
#ifndef _RTP_RECEIVER_H_
#define _RTP_RECEIVER_H_
#include "buffer.h"
#include "demuxer.h"
#include "rtppacket.h"
#include <stdint.h>
#include <mutex>
#include <queue>
#include <atomic>
#include <thread>
typedef unsigned char BYTE;
using namespace jrtplib;
using namespace std;
/** 视频数据回调
*
* @param videoType [in] 视频类型 音频-0xC0、h264-0x1B、MPEG4-0x01、SVAC-0x80
* @param data [in] 视频数据
* @param len [in] 视频数据长度
* @param isKey [in] 是否为关键帧
* @param pts [in] 时间戳
*/
typedef void(*CallBack_Stream)(void* userdata, int videoType, char* data, int len, int isKey, uint64_t pts, uint64_t localPts);
/** 录像回放完成回调消息通知
*/
typedef void(*CallBack_VodFileEnd)(void* userdata);
/**
* 请求流
*/
|
372e629f
ming
gb28181支持TCP数据流
|
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
|
// 标识帧, 注意buffer需要自己开辟和释放
struct Frame {
Frame() { buf_ = NULL; len_ = 0; }
~Frame() {
if (buf_ != nullptr)
{
free(buf_);
buf_ = nullptr;
}
}
Frame(BYTE* buf, int len, bool key) : buf_(buf), len_(len), key_(key) {}
BYTE* buf_;
int len_;
bool key_{};
};
class FrameToDecode
{
public:
FrameToDecode()
: m_SliceBuf(0)
, m_localPts(0)
, m_LastPTS(-1)
, m_LastIsKeyFrame(0) {}
FrameToDecode(unsigned char m_streamId)
: m_SliceBuf(0)
, m_localPts(0)
, m_LastPTS(-1)
, m_LastIsKeyFrame(0)
, m_streamId (m_streamId) {}
void operator=(FrameToDecode &temp)
{
m_SliceBuf = temp.m_SliceBuf;
m_streamId = temp.m_streamId;
m_localPts = temp.m_localPts;
m_LastPTS = temp.m_LastPTS;
m_LastIsKeyFrame = temp.m_LastIsKeyFrame;
}
CBuffer m_SliceBuf;
unsigned char m_streamId{};
uint64_t m_localPts;
uint64_t m_LastPTS;
bool m_LastIsKeyFrame;
};
class RTPReceiver{
public:
RTPReceiver();
|
372e629f
ming
gb28181支持TCP数据流
|
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
|
virtual bool Open(uint16_t localPort) = 0;
virtual bool IsOpened() = 0;
virtual void Close() = 0;
void SetVodEndCallback(CallBack_VodFileEnd cb, void* param);
void SetOutputCallback(CallBack_Stream cb, void* param);
void SetRequestStreamCallback(CallBack_Request_Stream cb);
void SetDeviceID(string deviceID);
int GetPsFrameListSize();
public:
void OnPsDemux(unsigned char streamId, BYTE *data, int len, bool key, uint64_t pts, uint64_t localPts);
int OnPsProcess();
void ClearPsVideoFrameList();
int ParsePacket(RTPPacket* packet);
public:
int InitPS();
void ClosePsThread();
void *GetUsrParam(){ return m_usrParam; }
public:
CBuffer m_SliceBuf;
uint64_t m_LastPTS;
bool m_LastIsKeyFrame;
unsigned char m_LastStreamType;
int64_t m_notToDecodCount{0};//线程计数,用来代表多长时间没有调用解码回调,针对大华相机
void* m_usrParam;
CallBack_Stream m_h264DataFunc; // 视频流回调
std::queue<Frame*> m_psVideoFrames;
mutex m_psFrameMutex;
string m_deviceID;
CMpeg2Demux m_psParser;
std::atomic_bool m_bPsExit; // 标识PS解包线程关闭
uint32_t lastPts{0};
uint64_t last_recv_ts{0};
int offset{0};
int mark{0};
BYTE* recvTmpBuf{nullptr};
std::thread* m_psThreadPtr; // PS解包线程
CallBack_VodFileEnd m_hVodEndFunc; // 录像流结束回调
CallBack_Request_Stream m_callback_request_stream; //请求流回调
};
#endif // _RTP_RECEIVER_H_
|