Commit 97243f564f60671326b3d25b6294bd5670b1dfe6
1 parent
767bb02f
优化任务管理和数据收集
Showing
13 changed files
with
419 additions
and
353 deletions
vehicle_structure_platform.git0708-3080-trt-face/src/FFNvDecoder/DxDecoderWrap.cpp
... | ... | @@ -151,6 +151,13 @@ bool DxDecoderWrap::DxDecoderIsRun() const |
151 | 151 | return false; |
152 | 152 | } |
153 | 153 | |
154 | +bool DxDecoderWrap::DxDecoderIsFinished() { | |
155 | + if(m_pDec) { | |
156 | + return m_pDec->isFinished(); | |
157 | + } | |
158 | + | |
159 | + return true; | |
160 | +} | |
154 | 161 | |
155 | 162 | bool DxDecoderWrap::DxFrameIsEmpty() |
156 | 163 | { |
... | ... | @@ -175,6 +182,20 @@ int DxDecoderWrap::DxLockFrame(DxGPUFrame& frame) |
175 | 182 | return 0; |
176 | 183 | } |
177 | 184 | |
185 | +DxGPUFrame DxDecoderWrap::DxGetFrame() | |
186 | +{ | |
187 | + std::lock_guard<std::mutex> l(m_queue_frames_mutex); | |
188 | + | |
189 | + DxGPUFrame frame; | |
190 | + if(m_queue_frames.size() <= 0) { | |
191 | + return frame; | |
192 | + } | |
193 | + frame = m_queue_frames.front(); | |
194 | + m_queue_frames.pop(); | |
195 | + | |
196 | + return frame; | |
197 | +} | |
198 | + | |
178 | 199 | void DxDecoderWrap::post_decode_callback(GPUFrame * decodedFrame) { |
179 | 200 | while(!m_bClose) { |
180 | 201 | m_queue_frames_mutex.lock(); |
... | ... | @@ -204,6 +225,7 @@ void DxDecoderWrap::post_decode_callback(GPUFrame * decodedFrame) { |
204 | 225 | frame.size = gpuFrame->width; |
205 | 226 | frame.frame = pHwData; |
206 | 227 | frame.timestamp = decodedFrame->ts; |
228 | + frame.dec_name = m_name; | |
207 | 229 | |
208 | 230 | m_queue_frames.push(frame); |
209 | 231 | m_queue_frames_mutex.unlock(); |
... | ... | @@ -216,5 +238,5 @@ void DxDecoderWrap::post_decode_callback(GPUFrame * decodedFrame) { |
216 | 238 | } |
217 | 239 | |
218 | 240 | void DxDecoderWrap::decode_finished_callback() { |
219 | - m_bClose = true; | |
241 | + // m_bClose = true; | |
220 | 242 | } |
221 | 243 | \ No newline at end of file | ... | ... |
vehicle_structure_platform.git0708-3080-trt-face/src/FFNvDecoder/DxDecoderWrap.h
... | ... | @@ -24,11 +24,12 @@ typedef struct DxConfig |
24 | 24 | |
25 | 25 | typedef struct DxGPUFrame |
26 | 26 | { |
27 | - void * frame; | |
27 | + void * frame {nullptr}; | |
28 | 28 | unsigned int size; |
29 | 29 | unsigned int width; |
30 | 30 | unsigned int height; |
31 | 31 | unsigned long long timestamp; |
32 | + std::string dec_name; | |
32 | 33 | }DxGPUFrame; |
33 | 34 | |
34 | 35 | |
... | ... | @@ -54,12 +55,15 @@ public: |
54 | 55 | int DxCloseDecoder(); |
55 | 56 | bool DxDecoderIsRun() const; |
56 | 57 | |
58 | + bool DxDecoderIsFinished(); | |
59 | + | |
57 | 60 | int DxGetFrameCount(); |
58 | 61 | |
59 | 62 | int DxGetResolution( int &width, int &height ); |
60 | 63 | |
61 | 64 | bool DxFrameIsEmpty(); |
62 | 65 | int DxLockFrame(DxGPUFrame& frame ); |
66 | + DxGPUFrame DxGetFrame(); | |
63 | 67 | |
64 | 68 | int PauseDecoder(); |
65 | 69 | int ResumeDecoder(); | ... | ... |
vehicle_structure_platform.git0708-3080-trt-face/src/VPT/ImageSaveCache.cpp
... | ... | @@ -38,7 +38,7 @@ void ImageSaveCache::show() |
38 | 38 | } |
39 | 39 | //#include <fstream> |
40 | 40 | //std::ofstream os1("./mp_frameSize.txt", std::ofstream::out | std::ofstream::trunc); |
41 | -void ImageSaveCache::add_frame(const OBJ_KEY &snaphot_id, const FRAME_KEY & frame_id, const DxGPUFrame & frame) | |
41 | +void ImageSaveCache::insert_frame(const OBJ_KEY &snaphot_id, const FRAME_KEY & frame_id, const DxGPUFrame & frame) | |
42 | 42 | { |
43 | 43 | //std::lock_guard<std::mutex> l(tx); |
44 | 44 | //os1 << std::unitbuf; | ... | ... |
vehicle_structure_platform.git0708-3080-trt-face/src/VPT/ImageSaveCache.h
... | ... | @@ -10,7 +10,7 @@ class ImageSaveCache |
10 | 10 | { |
11 | 11 | public: |
12 | 12 | |
13 | - void add_frame(const OBJ_KEY & snaphot_id, const FRAME_KEY & frame_id, const DxGPUFrame & frame); | |
13 | + void insert_frame(const OBJ_KEY & snaphot_id, const FRAME_KEY & frame_id, const DxGPUFrame & frame); | |
14 | 14 | void release(const OBJ_KEY & snaphot_id); |
15 | 15 | DxGPUFrame* get_frame(const OBJ_KEY & snaphot_id); |
16 | 16 | void show(); | ... | ... |
vehicle_structure_platform.git0708-3080-trt-face/src/VPT/MutliSourceVideoProcess.cpp
... | ... | @@ -226,106 +226,102 @@ int CMutliSourceVideoProcess::InitAlgorthim(mvpt_param vptParam, VIDEO_OBJECT_IN |
226 | 226 | return ret; |
227 | 227 | } |
228 | 228 | |
229 | -void CMutliSourceVideoProcess::FinishDecode(const int taskID) | |
229 | +void CMutliSourceVideoProcess::FinishTask(const int taskID) | |
230 | 230 | { |
231 | - for (int i = 0; i < tasks.size(); i++) | |
232 | - { | |
233 | - if (tasks[i].taskID == taskID && tasks[taskID].taskTcuvid != NULL) | |
234 | - { | |
235 | - tasks[taskID].taskState == FINISH; | |
236 | - tasks[taskID].taskTcuvid->DxCloseDecoder(); | |
237 | - delete tasks[taskID].taskTcuvid; | |
238 | - tasks[taskID].taskTcuvid = NULL; | |
239 | - printf("-----------------------finish task: %d-----------------------\n", taskID); | |
240 | - break; | |
241 | - } | |
231 | + if (m_taskMap.find(taskID) == m_taskMap.end()){ | |
232 | + return; | |
242 | 233 | } |
243 | 234 | |
244 | -} | |
235 | + Task& task = m_taskMap[taskID]; | |
245 | 236 | |
246 | -void CMutliSourceVideoProcess::FinishTask(const int taskID) | |
247 | -{ | |
248 | - for (int i = 0; i < tasks.size(); i++) | |
249 | - { | |
250 | - if (tasks[i].taskID == taskID) | |
251 | - { | |
252 | - //printf("first begin finish\n"); | |
253 | - if (tasks[i].taskState == PLAY) TaskinPlay--; | |
254 | - tasks[i].taskState = FINISH; | |
255 | - tasks[i].taskFileSource = nullptr; | |
256 | - tasks[i].taskObjCallbackFunc = nullptr; | |
257 | - tasks[i].taskRealTimeCallbackFunc = nullptr; | |
237 | + if (task.taskState == PLAY) TaskinPlay--; | |
238 | + task.taskState = FINISH; | |
239 | + task.taskFileSource = nullptr; | |
240 | + task.taskObjCallbackFunc = nullptr; | |
241 | + task.taskRealTimeCallbackFunc = nullptr; | |
258 | 242 | |
259 | - m_snaphot_helper.finish_task_ss_analysis(taskID); | |
243 | + m_snaphot_helper.finish_task_ss_analysis(taskID); | |
260 | 244 | |
261 | - if (tasks[i].folderName) | |
262 | - { | |
263 | - delete tasks[i].folderName; | |
264 | - tasks[i].folderName = nullptr; | |
265 | - } | |
245 | + if (task.folderName) { | |
246 | + delete task.folderName; | |
247 | + task.folderName = nullptr; | |
248 | + } | |
266 | 249 | |
267 | - if (tasks[i].folderNameLittle) | |
268 | - { | |
269 | - delete tasks[i].folderNameLittle; | |
270 | - tasks[i].folderNameLittle = nullptr; | |
271 | - } | |
250 | + if (task.folderNameLittle) { | |
251 | + delete task.folderNameLittle; | |
252 | + task.folderNameLittle = nullptr; | |
253 | + } | |
272 | 254 | |
273 | - tasks[i].frameImage.release(); | |
255 | + task.frameImage.release(); | |
274 | 256 | |
275 | - m_vptProcess.FinishTaskTracker(taskID); | |
257 | + m_vptProcess.FinishTaskTracker(taskID); | |
276 | 258 | |
277 | - if (viewTaskID == taskID) viewTaskID = -1; | |
259 | + if (viewTaskID == taskID) { | |
260 | + viewTaskID = -1; | |
261 | + } | |
278 | 262 | |
279 | - LOG_INFO("task {} is finished. timeusing: {}", taskID, get_cur_time_ms() - tasks[i].timestamp); | |
263 | + m_snaphot_helper.waitSaveAnalysisInfo(task.taskID); | |
280 | 264 | |
281 | - break; | |
282 | - } | |
265 | + task.taskTcuvid->DxCloseDecoder(); | |
266 | + delete task.taskTcuvid; | |
267 | + task.taskTcuvid = NULL; | |
268 | + | |
269 | + //回调通知上层任务结束 | |
270 | + if (taskFinishCallbackFunc != nullptr) { | |
271 | + std::lock_guard<std::mutex> l(m_snaphot_helper.callback_tx); | |
272 | + taskFinishCallbackFunc(task.taskID); | |
283 | 273 | } |
274 | + | |
275 | + LOG_INFO("task {} is finished. timeusing: {}", taskID, get_cur_time_ms() - task.timestamp); | |
276 | + | |
284 | 277 | } |
285 | 278 | |
286 | 279 | void CMutliSourceVideoProcess::PauseTask(const int taskID) |
287 | 280 | { |
288 | - for (int i = 0; i < tasks.size(); i++) | |
289 | - { | |
290 | - if (tasks[i].taskID == taskID) | |
291 | - { | |
292 | - if (tasks[i].taskState == PLAY) TaskinPlay--; | |
293 | - tasks[i].taskState = PAUSE; | |
294 | - m_vptProcess.PauseTaskTracker(taskID); | |
295 | - tasks[i].taskTcuvid->PauseDecoder(); | |
296 | - if (viewTaskID == taskID) viewTaskID = -1; | |
297 | - printf("-----------------------pasue task: %d-----------------------\n", taskID); | |
298 | - break; | |
299 | - } | |
281 | + if (m_taskMap.find(taskID) == m_taskMap.end()){ | |
282 | + return; | |
300 | 283 | } |
284 | + | |
285 | + Task& task = m_taskMap[taskID]; | |
286 | + | |
287 | + if (task.taskState == PLAY) TaskinPlay--; | |
288 | + task.taskState = PAUSE; | |
289 | + m_vptProcess.PauseTaskTracker(taskID); | |
290 | + task.taskTcuvid->PauseDecoder(); | |
291 | + if (viewTaskID == taskID) viewTaskID = -1; | |
292 | + printf("-----------------------pasue task: %d-----------------------\n", taskID); | |
301 | 293 | } |
302 | 294 | |
303 | 295 | void CMutliSourceVideoProcess::RestartTask(const int taskID) |
304 | 296 | { |
305 | - for (int i = 0; i < tasks.size(); i++) | |
306 | - { | |
307 | - if (tasks[i].taskID == taskID) | |
308 | - { | |
309 | - tasks[i].taskState = PLAY; | |
310 | - TaskinPlay++; | |
311 | - m_vptProcess.RestartTaskTraker(taskID); | |
312 | - tasks[i].taskTcuvid->ResumeDecoder(); | |
313 | - printf("-----------------------restart task: %d-----------------------\n", taskID); | |
314 | - break; | |
315 | - } | |
297 | + if (m_taskMap.find(taskID) == m_taskMap.end()){ | |
298 | + return; | |
316 | 299 | } |
300 | + | |
301 | + Task& task = m_taskMap[taskID]; | |
302 | + | |
303 | + task.taskState = PLAY; | |
304 | + TaskinPlay++; | |
305 | + m_vptProcess.RestartTaskTraker(taskID); | |
306 | + task.taskTcuvid->ResumeDecoder(); | |
307 | + printf("-----------------------restart task: %d-----------------------\n", taskID); | |
317 | 308 | } |
318 | 309 | |
319 | 310 | //ʵʱ�鿴�ӿ� �ɿ���һ·��ʵʱ�鿴������·�ķ������OSD��Ȼ����ͨ���ص��������ظ��û� |
320 | 311 | void CMutliSourceVideoProcess::ViewTask(const int taskID) |
321 | 312 | { |
322 | - if (tasks.size() > taskID && tasks[taskID].taskState == PLAY) | |
323 | - { | |
313 | + if (m_taskMap.find(taskID) == m_taskMap.end()){ | |
314 | + printf("Only can view playing task!"); | |
315 | + return; | |
316 | + } | |
317 | + | |
318 | + Task& task = m_taskMap[taskID]; | |
319 | + if (task.taskState == PLAY) { | |
324 | 320 | viewTaskID = taskID; |
325 | 321 | printf("-----------------------view task: %d-----------------------\n", taskID); |
326 | - } | |
327 | - else | |
322 | + } else { | |
328 | 323 | printf("Only can view playing task!"); |
324 | + } | |
329 | 325 | } |
330 | 326 | |
331 | 327 | //����ʵʱ�鿴�ӿڣ��ر�ʵʱ����ķ��� |
... | ... | @@ -465,9 +461,10 @@ bool CMutliSourceVideoProcess::AddTask(task_param tparam) //debug by zsh |
465 | 461 | new_task_info.obj_callback = new_task.taskObjCallbackFunc; |
466 | 462 | m_snaphot_helper.add_task_info(new_task.taskID, new_task_info); |
467 | 463 | |
464 | + m_taskMap[new_task.taskID] = new_task; | |
465 | + | |
468 | 466 | TotalTask++; |
469 | 467 | TaskinPlay++; |
470 | - tasks.push_back(new_task); | |
471 | 468 | |
472 | 469 | m_vptProcess.AddTaskTracker(new_task.taskID, width, height); |
473 | 470 | |
... | ... | @@ -528,17 +525,14 @@ int CMutliSourceVideoProcess::AddOperator(task_param tparam) |
528 | 525 | |
529 | 526 | int CMutliSourceVideoProcess::get_task_progress(int taskid, double &progress) |
530 | 527 | { |
531 | - int ret = 0; | |
532 | - for (auto &item : tasks) | |
533 | - { | |
534 | - if (item.taskID == taskid) | |
535 | - { | |
536 | - progress = (double)item.taskFrameCount / (double)item.taskTotalFrameCount; | |
537 | - return 0; | |
538 | - } | |
528 | + int ret = -1; | |
529 | + if (m_taskMap.find(taskid) == m_taskMap.end()){ | |
530 | + return ret; | |
539 | 531 | } |
540 | - return -1; | |
541 | 532 | |
533 | + Task& task = m_taskMap[taskid]; | |
534 | + progress = (double)task.taskFrameCount / (double)task.taskTotalFrameCount; | |
535 | + return 0; | |
542 | 536 | } |
543 | 537 | |
544 | 538 | void CMutliSourceVideoProcess::AddOperator(int taskID, int taskOper) |
... | ... | @@ -585,7 +579,14 @@ void CMutliSourceVideoProcess::OperatorTask() |
585 | 579 | RestartTask(newOperator.changeTaskID); |
586 | 580 | break; |
587 | 581 | case FINISHTASK: |
588 | - FinishTask(newOperator.changeTaskID); | |
582 | + { | |
583 | + FinishTask(newOperator.changeTaskID); | |
584 | + auto iter = m_taskMap.find(newOperator.changeTaskID); | |
585 | + if (iter != m_taskMap.end()) { | |
586 | + m_taskMap.erase(iter); | |
587 | + } | |
588 | + | |
589 | + } | |
589 | 590 | break; |
590 | 591 | default: |
591 | 592 | break; |
... | ... | @@ -595,9 +596,9 @@ void CMutliSourceVideoProcess::OperatorTask() |
595 | 596 | } |
596 | 597 | |
597 | 598 | //#define LOG_INFO |
598 | -void CMutliSourceVideoProcess::callTaskObjInfoCallbackFunc(int objCount, VPT_ObjInfo *obj, int taskFrameCount, int taskId) | |
599 | +void CMutliSourceVideoProcess::callTaskObjInfoCallbackFunc(const VPT_Result& vptResult, int taskFrameCount, int taskId) | |
599 | 600 | { |
600 | - if (objCount == 0) | |
601 | + if (vptResult.objCount == 0) | |
601 | 602 | { |
602 | 603 | video_object_info newObjInfo; |
603 | 604 | newObjInfo.task_id = taskId; |
... | ... | @@ -615,8 +616,9 @@ void CMutliSourceVideoProcess::callTaskObjInfoCallbackFunc(int objCount, VPT_Obj |
615 | 616 | } |
616 | 617 | else |
617 | 618 | { |
618 | - for (int c = 0; c < objCount; c++) | |
619 | + for (int c = 0; c < vptResult.objCount; c++) | |
619 | 620 | { |
621 | + const VPT_ObjInfo* obj = vptResult.obj; | |
620 | 622 | OBJ_KEY newObj = { taskId, obj[c].id }; |
621 | 623 | video_object_info newObjInfo; |
622 | 624 | newObjInfo.task_id = taskId; |
... | ... | @@ -645,7 +647,6 @@ void CMutliSourceVideoProcess::callTaskObjInfoCallbackFunc(int objCount, VPT_Obj |
645 | 647 | } |
646 | 648 | void CMutliSourceVideoProcess::algorthim_process() |
647 | 649 | { |
648 | - set<int> k; | |
649 | 650 | int count = 0; |
650 | 651 | |
651 | 652 | DxGPUFrame frame = {}; |
... | ... | @@ -658,10 +659,9 @@ void CMutliSourceVideoProcess::algorthim_process() |
658 | 659 | int process_times = 0; |
659 | 660 | |
660 | 661 | long long last_time = get_cur_time_ms(); |
661 | - while (!m_bProcessExit) | |
662 | - { | |
663 | - if (licence_status <= -3) | |
664 | - { | |
662 | + while (!m_bProcessExit) { | |
663 | + | |
664 | + if (licence_status <= -3) { | |
665 | 665 | printf("authority failed!\n"); |
666 | 666 | break; |
667 | 667 | } |
... | ... | @@ -674,106 +674,31 @@ void CMutliSourceVideoProcess::algorthim_process() |
674 | 674 | |
675 | 675 | taskCondVar.notify_all(); |
676 | 676 | |
677 | - int curTaskSize = tasks.size(); | |
678 | - | |
679 | - count = 0; | |
680 | - static int ncount = 0; | |
681 | - map<int, vector<int>> finishTaskDeleteObj; | |
682 | - int curPlayTaskCount = 0; | |
683 | - | |
684 | - for (int i = 0; i < curTaskSize; i++) | |
685 | - { | |
686 | - if ((tasks[i].taskState == PLAY || tasks[i].taskState == DECODEERROR)) | |
687 | - { | |
688 | - if (!tasks[i].taskTcuvid->DxDecoderIsRun()) | |
689 | - { | |
690 | - cudaError_t cudaStatus = cudaGetLastError(); | |
691 | - if (cudaStatus != cudaSuccess) { | |
692 | - printf("begin finish last error: %s\n", cudaGetErrorString(cudaStatus)); | |
693 | - } | |
694 | - | |
695 | - tasks[i].taskState = FINISH; | |
696 | - | |
697 | - FinishTask(tasks[i].taskID); | |
698 | - | |
699 | - tasks[i].taskTcuvid->DxCloseDecoder(); | |
700 | - delete tasks[i].taskTcuvid; | |
701 | - tasks[i].taskTcuvid = NULL; | |
702 | 677 | |
703 | - m_snaphot_helper.waitSaveAnalysisInfo(tasks[i].taskID); | |
704 | - | |
705 | - //回调通知上层任务结束 | |
706 | - if (taskFinishCallbackFunc != nullptr) | |
707 | - { | |
708 | - std::lock_guard<std::mutex> l(m_snaphot_helper.callback_tx); | |
709 | - taskFinishCallbackFunc(tasks[i].taskID); | |
710 | - } | |
711 | - | |
712 | - TaskinPlay--; | |
713 | - } | |
714 | - } | |
715 | - | |
716 | - if (tasks[i].taskState == FINISH) | |
717 | - count++; | |
718 | - } | |
719 | - | |
720 | - //�������������FINISH״̬ | |
721 | - if (count >= tasks.size()) //have no decode video, break | |
722 | - { | |
723 | - { | |
724 | - std::lock_guard<std::mutex> l(taskMutex); | |
725 | - //�ж���������ȴ������Ƿ����µ��������� | |
726 | - if (HasNewTask()) | |
727 | - { | |
728 | - continue; | |
729 | - } | |
730 | - else | |
731 | - { | |
732 | - continue; | |
733 | - } | |
678 | + for (auto it=m_taskMap.begin(); it!=m_taskMap.end(); ) { | |
679 | + Task& task = it->second; | |
680 | + if (!task.taskTcuvid->DxDecoderIsRun() && task.taskTcuvid->DxFrameIsEmpty()) { | |
681 | + FinishTask(task.taskID); | |
682 | + it = m_taskMap.erase(it); | |
683 | + } else { | |
684 | + ++it; | |
734 | 685 | } |
735 | 686 | } |
736 | - | |
737 | - //��ǰû��PLAY������ ѭ���ȴ� | |
738 | - curPlayTaskCount = TaskinPlay; | |
739 | - if (curPlayTaskCount <= 0) { | |
740 | - Sleep(30); | |
741 | - continue; | |
742 | - } | |
743 | 687 | |
744 | - k.clear(); | |
745 | - TaskinPlayID.clear(); | |
746 | - | |
747 | - //��ȡ�������� | |
748 | - getdata_flag: | |
749 | - for (int i = 0; i < curTaskSize; i++) | |
750 | - { | |
751 | - if (k.find(i) == k.end() && tasks[i].taskState == PLAY && tasks[i].taskTcuvid->DxDecoderIsRun()) | |
752 | - { | |
753 | - if(tasks[i].taskTcuvid->DxLockFrame(tasks[i].task_algorithm_data) == 0) { | |
754 | - k.insert(i); | |
755 | - TaskinPlayID.insert(tasks[i].taskID); | |
756 | - } | |
757 | - } | |
758 | - else if (k.find(i) == k.end() && tasks[i].taskState == PLAY && !tasks[i].taskTcuvid->DxDecoderIsRun()) | |
759 | - { | |
760 | - tasks[i].taskState = DECODEERROR; | |
761 | - curPlayTaskCount--; | |
762 | - m_vptProcess.FinishTaskTracker(tasks[i].taskID); | |
688 | + vector<DxGPUFrame> vec_dxGpuFrame; | |
689 | + for (auto it=m_taskMap.begin(); it!=m_taskMap.end(); ++it) { | |
690 | + Task& task = it->second; | |
691 | + DxGPUFrame dxFrame = task.taskTcuvid->DxGetFrame(); | |
692 | + if(nullptr == dxFrame.frame) { | |
693 | + continue; | |
763 | 694 | } |
764 | - } | |
765 | - | |
766 | - if (curPlayTaskCount <= 0) { | |
767 | - Sleep(30); | |
768 | - continue; | |
695 | + vec_dxGpuFrame.push_back(dxFrame); | |
769 | 696 | } |
770 | 697 | |
771 | - //��û�л�ȡ������·���Ľ������� ѭ���ȴ� | |
772 | - if (k.size() < curPlayTaskCount) | |
773 | - { | |
774 | - std::this_thread::sleep_for(std::chrono::milliseconds(1)); | |
775 | - goto getdata_flag; | |
698 | + if (vec_dxGpuFrame.size() <= 0) { | |
699 | + continue; | |
776 | 700 | } |
701 | + | |
777 | 702 | |
778 | 703 | #ifdef LOG_INFO2 |
779 | 704 | long long gather_data_time = get_cur_time_ms(); |
... | ... | @@ -781,49 +706,23 @@ void CMutliSourceVideoProcess::algorthim_process() |
781 | 706 | #endif |
782 | 707 | |
783 | 708 | cudaDeviceSynchronize(); |
784 | - | |
785 | - int cur_batch_size = 0; | |
786 | - cur_batch_size = section_batch_size; | |
787 | - | |
788 | - if (0) | |
789 | - { | |
790 | - if (section_batch_size == 20) | |
791 | - cur_batch_size = section_batch_size; | |
792 | - else | |
793 | - { | |
794 | - if (curPlayTaskCount <= 2 * section_batch_size) | |
795 | - cur_batch_size = section_batch_size; | |
796 | - else if (curPlayTaskCount >= 2 * MAX_BATCH) | |
797 | - cur_batch_size = MAX_BATCH; | |
798 | - else | |
799 | - cur_batch_size = curPlayTaskCount / 2 + (curPlayTaskCount % 2); | |
800 | - } | |
801 | - } | |
802 | 709 | |
803 | 710 | long long start_time_vpt = get_cur_time_ms(); |
804 | 711 | |
805 | - set<int>::iterator iter = TaskinPlayID.begin(); | |
806 | - | |
807 | - int task_in_play_size = TaskinPlayID.size(); | |
808 | - vector<vector<int>> deleteObjectID(task_in_play_size); | |
809 | - vector<sy_img> batch_img(task_in_play_size); | |
810 | - vector<vector<VPT_Result>> unUsedResult(task_in_play_size); | |
811 | - vector<VPT_Result> VPTResult(task_in_play_size); | |
812 | - vector<unsigned long long> vec_frameIndex; | |
813 | - | |
814 | - for (size_t i = 0; i < TaskinPlayID.size(); i++) { | |
815 | - DxGPUFrame task_algorithm_data = tasks[*iter].task_algorithm_data; | |
712 | + vector<DataInfo> vec_data; | |
713 | + for (auto task_algorithm_data: vec_dxGpuFrame) { | |
816 | 714 | int w = task_algorithm_data.width; |
817 | 715 | int h = task_algorithm_data.height; |
818 | - int npitch = task_algorithm_data.size; | |
819 | - | |
820 | - batch_img[i].set_data(w, h, 3, (unsigned char *)task_algorithm_data.frame); | |
821 | - vec_frameIndex.push_back(task_algorithm_data.timestamp); | |
822 | 716 | |
823 | - iter++; | |
717 | + DataInfo data_info; | |
718 | + data_info.img.set_data(w, h, 3, (unsigned char *)task_algorithm_data.frame); | |
719 | + data_info.task_id = atoi(task_algorithm_data.dec_name.c_str()); | |
720 | + data_info.frameIndex = task_algorithm_data.timestamp; | |
721 | + | |
722 | + vec_data.push_back(data_info); | |
824 | 723 | } |
825 | - | |
826 | - int flag = m_vptProcess.process(batch_img.data(), batch_img.size(), vec_frameIndex, VPTResult, deleteObjectID, unUsedResult); | |
724 | + | |
725 | + vector<VPTProcessResult> vec_vptResult = m_vptProcess.process(vec_data); | |
827 | 726 | |
828 | 727 | #ifdef LOG_INFO2 |
829 | 728 | std::cout << "VPT_Process_GPU time_using: " << get_cur_time_ms() - start_time_vpt << std::endl; |
... | ... | @@ -831,50 +730,49 @@ void CMutliSourceVideoProcess::algorthim_process() |
831 | 730 | |
832 | 731 | long long result_analysis_time = get_cur_time_ms(); |
833 | 732 | |
834 | - iter = TaskinPlayID.begin(); | |
835 | - for (int i = 0; i < curPlayTaskCount; i++) | |
733 | + for (int i = 0; i < vec_vptResult.size(); i++) | |
836 | 734 | { |
837 | - Task task = tasks[*iter]; | |
838 | - task.taskFrameCount = task.task_algorithm_data.timestamp; | |
839 | - //若该路任务当前帧未检测到目标,返回ID为-1的目标表明未检测到目标 | |
840 | - if (VPTResult[i].objCount == 0) | |
841 | - { | |
842 | - callTaskObjInfoCallbackFunc(0, nullptr, task.taskFrameCount, *iter); | |
843 | - } | |
735 | + int task_id = vec_vptResult[i].task_id; | |
736 | + Task& task = m_taskMap[task_id]; | |
737 | + | |
738 | + DxGPUFrame& task_algorithm_data = vec_dxGpuFrame[i]; | |
739 | + | |
740 | + task.taskFrameCount = task_algorithm_data.timestamp; | |
844 | 741 | |
845 | 742 | //实时查看模块,若存在实时查看,把当前视频画面cp回内存 |
846 | 743 | bool view = false; |
847 | - int frameHeight = task.task_algorithm_data.height; | |
848 | - int frameWidth = task.task_algorithm_data.width; | |
744 | + int frameHeight = task_algorithm_data.height; | |
745 | + int frameWidth = task_algorithm_data.width; | |
849 | 746 | |
850 | - if (*iter == viewTaskID) | |
851 | - { | |
852 | - cudaMemcpy(task.frameImage.data, task.task_algorithm_data.frame, 3 * frameWidth * frameHeight * sizeof(unsigned char), cudaMemcpyDeviceToHost); | |
747 | + if (task_id == viewTaskID) { | |
748 | + cudaMemcpy(task.frameImage.data, task_algorithm_data.frame, 3 * frameWidth * frameHeight * sizeof(unsigned char), cudaMemcpyDeviceToHost); | |
853 | 749 | view = true; |
854 | 750 | } |
855 | 751 | |
856 | 752 | //跟踪帧也需要返回跟踪的结果 |
857 | - if (task.taskLastFrameCount > 0) | |
858 | - { | |
859 | - vector<VPT_Result> OneUnUsedResult = unUsedResult[i]; | |
860 | - if (OneUnUsedResult.size() == 0) | |
861 | - { | |
862 | - callTaskObjInfoCallbackFunc(0, nullptr, task.taskLastFrameCount + 1, *iter); | |
753 | + if (task.taskLastFrameCount > 0) { | |
754 | + | |
755 | + VPT_Result default_vptResult; | |
756 | + default_vptResult.objCount = 0; | |
757 | + | |
758 | + vector<VPT_Result> OneUnUsedResult = vec_vptResult[i].vecUnUsedResult; | |
759 | + if (OneUnUsedResult.size() == 0) { | |
760 | + callTaskObjInfoCallbackFunc(default_vptResult, task.taskLastFrameCount + 1, task_id); | |
863 | 761 | } |
864 | - for (int k = 0; k < OneUnUsedResult.size(); ++k) | |
865 | - { | |
866 | - if (OneUnUsedResult[k].objCount == 0) | |
867 | - { | |
868 | - callTaskObjInfoCallbackFunc(0, nullptr, task.taskLastFrameCount + k + 1, *iter); | |
869 | - } | |
870 | - else | |
871 | - { | |
872 | - //cout << "OneUnUsedResult.size = " << OneUnUsedResult.size() << " k=" << k << " OneUnUsedResult[k].objCount = " << OneUnUsedResult[k].objCount << endl; | |
873 | - callTaskObjInfoCallbackFunc(OneUnUsedResult[k].objCount, OneUnUsedResult[k].obj, task.taskLastFrameCount + k + 1, *iter); | |
762 | + | |
763 | + for (int k = 0; k < OneUnUsedResult.size(); ++k) { | |
764 | + if (OneUnUsedResult[k].objCount == 0) { | |
765 | + callTaskObjInfoCallbackFunc(default_vptResult, task.taskLastFrameCount + k + 1, task_id); | |
766 | + } else { | |
767 | + callTaskObjInfoCallbackFunc(OneUnUsedResult[k], task.taskLastFrameCount + k + 1, task_id); | |
874 | 768 | } |
875 | 769 | } |
876 | 770 | } |
877 | - task.taskLastFrameCount = task.taskFrameCount; | |
771 | + | |
772 | + VPT_Result vptResult = vec_vptResult[i].vptResult; | |
773 | + callTaskObjInfoCallbackFunc(vptResult, task_algorithm_data.timestamp, task_id); | |
774 | + | |
775 | + task.taskLastFrameCount = task_algorithm_data.timestamp; | |
878 | 776 | |
879 | 777 | unsigned char* snapshot_image_data[MAX_OBJ_COUNT]{}; |
880 | 778 | int snapshot_left[MAX_OBJ_COUNT]{}; |
... | ... | @@ -888,13 +786,11 @@ void CMutliSourceVideoProcess::algorthim_process() |
888 | 786 | vector<int> human_idx; //用于记录快照数组中那些是人脸 |
889 | 787 | vector<OBJ_KEY> human_obj_keys; |
890 | 788 | |
891 | - callTaskObjInfoCallbackFunc(VPTResult[i].objCount, VPTResult[i].obj, task.taskFrameCount, *iter); | |
892 | - | |
893 | - for (int c = 0; c < VPTResult[i].objCount; c++) | |
789 | + for (int c = 0; c < vptResult.objCount; c++) | |
894 | 790 | { |
895 | - VPT_ObjInfo obj = VPTResult[i].obj[c]; | |
791 | + VPT_ObjInfo obj = vptResult.obj[c]; | |
896 | 792 | |
897 | - OBJ_KEY newObj = { (*iter), obj.id }; | |
793 | + OBJ_KEY newObj = { task_id, obj.id }; | |
898 | 794 | |
899 | 795 | //实时查看模块 绘制目标框到画面上 |
900 | 796 | if (view) |
... | ... | @@ -908,9 +804,7 @@ void CMutliSourceVideoProcess::algorthim_process() |
908 | 804 | int p1 = obj.left - 10 > 0 ? obj.left - 10 : 0; |
909 | 805 | int p2 = obj.top - 15 > 0 ? obj.top - 15 : 0; |
910 | 806 | |
911 | - cv::rectangle(task.frameImage, Rect(obj.left, obj.top, | |
912 | - obj.right - obj.left, | |
913 | - obj.bottom - obj.top), Scalar(158, 52, 254), 3, 1, 0); | |
807 | + cv::rectangle(task.frameImage, Rect(obj.left, obj.top, obj.right - obj.left, obj.bottom - obj.top), Scalar(158, 52, 254), 3, 1, 0); | |
914 | 808 | #ifdef _MSC_VER |
915 | 809 | string resss = "" + to_string(index) + " " + ObjTypes[index]; |
916 | 810 | putTextZH(task.frameImage, resss.c_str(), { p1, p2 }, Scalar(20, 255, 20), 14, "Arial"); |
... | ... | @@ -920,7 +814,17 @@ void CMutliSourceVideoProcess::algorthim_process() |
920 | 814 | #endif |
921 | 815 | } |
922 | 816 | |
923 | - CropInfo crop_info = m_snaphot_helper.cacheSnapShotInfo(newObj, obj, task); | |
817 | + bool bCacheSrc = false; | |
818 | + if (task.folderName != NULL){ | |
819 | + bCacheSrc = true; | |
820 | + } | |
821 | + | |
822 | + bool bCacheLittle = false; | |
823 | + if (task.folderNameLittle != NULL) { | |
824 | + bCacheLittle = true; | |
825 | + } | |
826 | + | |
827 | + CropInfo crop_info = m_snaphot_helper.cacheSnapShotInfo(newObj, obj, task.task_min_boxsize, bCacheSrc, bCacheLittle, task_algorithm_data); | |
924 | 828 | if(crop_info.bCrop){ |
925 | 829 | snapshot_image_data[copy_obj_count] = crop_info.snapshot_image_data; |
926 | 830 | snapshot_left[copy_obj_count] = crop_info.snapshot_left; |
... | ... | @@ -943,7 +847,7 @@ void CMutliSourceVideoProcess::algorthim_process() |
943 | 847 | //若待抠图的快照数不为0 则进行批量抠图 |
944 | 848 | if (0 != copy_obj_count) |
945 | 849 | { |
946 | - PartMemResizeBatch((unsigned char*)task.task_algorithm_data.frame, frameWidth, frameHeight, | |
850 | + PartMemResizeBatch((unsigned char*)task_algorithm_data.frame, frameWidth, frameHeight, | |
947 | 851 | snapshot_image_data, copy_obj_count, snapshot_left, snapshot_top, snapshot_right, snapshot_bottom, snapshot_dst_width, snapshot_dst_height, 0, 0, 0, 1, 1, 1); |
948 | 852 | |
949 | 853 | //最新刚添加的人脸检测模块,针对存在的行人快照进行人脸检测+人脸快照框的优选 |
... | ... | @@ -961,21 +865,27 @@ void CMutliSourceVideoProcess::algorthim_process() |
961 | 865 | ori_points[idx].y_ = (snapshot_bottom[ii] - snapshot_top[ii]); |
962 | 866 | } |
963 | 867 | |
964 | - m_snaphot_helper.cacheFaceSnapshotInfo(human_img, human_count, ori_points, human_idx, human_obj_keys, snapshot_left, snapshot_top, task); | |
868 | + m_snaphot_helper.cacheFaceSnapshotInfo(human_img, human_count, ori_points, human_idx, human_obj_keys, snapshot_left, snapshot_top, task_algorithm_data); | |
965 | 869 | } |
966 | 870 | } |
967 | 871 | |
968 | 872 | //实时查看 绘制目标轨迹 回调函数返回 |
969 | - if (view) | |
970 | - { | |
971 | - m_vptProcess.DrawTracker(*iter, &task.frameImage); | |
873 | + if (view) { | |
874 | + m_vptProcess.DrawTracker(task_id, &task.frameImage); | |
972 | 875 | if (task.taskRealTimeCallbackFunc != nullptr) |
973 | 876 | task.taskRealTimeCallbackFunc(task.frameImage.data, task.frameImage.rows, task.frameImage.cols); |
974 | 877 | } |
975 | - // tasks[*iter].taskFrameCount += skip_frame_; | |
976 | - iter++; | |
977 | 878 | } |
978 | 879 | |
880 | + { | |
881 | + cudaError_t cudaStatus = cudaGetLastError(); | |
882 | + if (cudaStatus != cudaSuccess) { | |
883 | + LOG_ERROR("result last error: {}", cudaGetErrorString(cudaStatus)); | |
884 | + } | |
885 | + } | |
886 | + | |
887 | + | |
888 | + | |
979 | 889 | #ifdef LOG_INFO2 |
980 | 890 | long long result_analysis_time2 = get_cur_time_ms(); |
981 | 891 | cout << "result_analysis time_using:" << result_analysis_time2 - result_analysis_time << endl; |
... | ... | @@ -983,21 +893,28 @@ void CMutliSourceVideoProcess::algorthim_process() |
983 | 893 | |
984 | 894 | long long second_analysis_time = get_cur_time_ms(); |
985 | 895 | |
986 | - auto task_iter = TaskinPlayID.begin(); | |
987 | - for (int i = 0; i < curPlayTaskCount; i++) | |
896 | + for (int i = 0; i < vec_vptResult.size(); i++) | |
988 | 897 | { |
989 | - for (int j = 0; j < deleteObjectID[i].size(); j++) | |
898 | + vector<int>& vecDeleteObj = vec_vptResult[i].vecDeleteObj; | |
899 | + for (int j = 0; j < vecDeleteObj.size(); j++) | |
990 | 900 | { |
991 | - OBJ_KEY deleteObj = { *task_iter, deleteObjectID[i][j] }; | |
901 | + OBJ_KEY deleteObj = { vec_vptResult[i].task_id, vecDeleteObj[j] }; | |
992 | 902 | m_snaphot_helper.SaveResultInFile(deleteObj); |
993 | 903 | } |
904 | + } | |
905 | + | |
994 | 906 | |
995 | - task_iter++; | |
907 | + for (auto task_algorithm_data: vec_dxGpuFrame) { | |
908 | + cudaFree(task_algorithm_data.frame); | |
909 | + task_algorithm_data.frame = nullptr; | |
996 | 910 | } |
997 | 911 | |
998 | - for (auto task_id: TaskinPlayID) { | |
999 | - cudaFree(tasks[task_id].task_algorithm_data.frame); | |
1000 | - tasks[task_id].task_algorithm_data.frame = nullptr; | |
912 | + | |
913 | + { | |
914 | + cudaError_t cudaStatus = cudaGetLastError(); | |
915 | + if (cudaStatus != cudaSuccess) { | |
916 | + LOG_ERROR("cudaFree last error: {}", cudaGetErrorString(cudaStatus)); | |
917 | + } | |
1001 | 918 | } |
1002 | 919 | |
1003 | 920 | m_snaphot_helper.object_attri_analysis(); |
... | ... | @@ -1012,7 +929,6 @@ void CMutliSourceVideoProcess::algorthim_process() |
1012 | 929 | #endif |
1013 | 930 | |
1014 | 931 | ++total_count; |
1015 | - ++ncount; | |
1016 | 932 | |
1017 | 933 | #ifdef LOG_INFO2 |
1018 | 934 | last_time = get_cur_time_ms(); |
... | ... | @@ -1029,12 +945,12 @@ void CMutliSourceVideoProcess::algorthim_process() |
1029 | 945 | |
1030 | 946 | int CMutliSourceVideoProcess::GetRuningNb() { |
1031 | 947 | int no = 0; |
1032 | - for(int i=0; i < tasks.size(); i++){ | |
1033 | - if(tasks[i].taskState == PLAY){ | |
948 | + for (auto it=m_taskMap.begin(); it!=m_taskMap.end(); ++it) { | |
949 | + Task& task = it->second; | |
950 | + if(task.taskState == PLAY){ | |
1034 | 951 | no ++; |
1035 | 952 | } |
1036 | 953 | } |
1037 | - | |
1038 | 954 | return no; |
1039 | 955 | } |
1040 | 956 | ... | ... |
vehicle_structure_platform.git0708-3080-trt-face/src/VPT/MutliSourceVideoProcess.h
... | ... | @@ -13,6 +13,7 @@ |
13 | 13 | #include "VPTProcess.h" |
14 | 14 | #include <queue> |
15 | 15 | #include <set> |
16 | +#include <map> | |
16 | 17 | #include "common.h" |
17 | 18 | #include "../FFNvDecoder/ImageSaveGPU.h" |
18 | 19 | |
... | ... | @@ -112,12 +113,9 @@ public: |
112 | 113 | |
113 | 114 | int get_task_progress(int taskid, double & progress); |
114 | 115 | void OperatorTask(); |
115 | - bool HasNewTask() { | |
116 | - return !TaskOperatorQ.empty(); | |
117 | - } | |
118 | 116 | void AddOperator(int taskID, int taskOper); |
119 | 117 | int AddOperator(task_param tparam); |
120 | - void callTaskObjInfoCallbackFunc(int objCount, VPT_ObjInfo *obj, int taskFrameCount, int taskId); | |
118 | + void callTaskObjInfoCallbackFunc(const VPT_Result& vptResult, int taskFrameCount, int taskId); | |
121 | 119 | bool AddTask(task_param tparam); |
122 | 120 | void PauseTask(const int taskID); |
123 | 121 | void RestartTask(const int taskID); |
... | ... | @@ -125,7 +123,6 @@ public: |
125 | 123 | void ViewTask(const int taskID); |
126 | 124 | void FinishViewTask(); |
127 | 125 | int FinishProcessThread(); |
128 | - void FinishDecode(const int taskID); | |
129 | 126 | |
130 | 127 | int GetRuningNb(); |
131 | 128 | |
... | ... | @@ -137,7 +134,7 @@ public: |
137 | 134 | int thrd_status; |
138 | 135 | int mgpuid; |
139 | 136 | int skip_frame_ {5}; // 控制跳帧参数 |
140 | - vector<Task> tasks; | |
137 | + map<int, Task> m_taskMap; | |
141 | 138 | int AddTaskSucFlag; //0:��ʼ��״̬ 1����������ɹ� -1����������ʧ�� |
142 | 139 | int TaskinPlay; |
143 | 140 | int TotalTask; | ... | ... |
vehicle_structure_platform.git0708-3080-trt-face/src/VPT/VPTProcess.cpp
... | ... | @@ -25,6 +25,13 @@ |
25 | 25 | // #include "../../model/vptModeTrt/ga_trt_fpn_vpt_calibrator.h" |
26 | 26 | |
27 | 27 | |
28 | +struct DetectResultInfo { | |
29 | + vector< vector <float>> det_result; | |
30 | + int task_id; | |
31 | + unsigned long long ts; | |
32 | +}; | |
33 | + | |
34 | + | |
28 | 35 | |
29 | 36 | static long long get_cur_time_ms(){ |
30 | 37 | chrono::time_point<chrono::system_clock, chrono::milliseconds> tpMicro |
... | ... | @@ -193,6 +200,116 @@ void VPTProcess::check_VPT_Result(VPT_Result & vResult) { |
193 | 200 | vResult.objCount = index; |
194 | 201 | } |
195 | 202 | |
203 | +vector<VPTProcessResult> VPTProcess::process(vector<DataInfo> vec_data) { | |
204 | + | |
205 | + vector<VPTProcessResult> vec_result; | |
206 | + | |
207 | + if(nullptr == det_handle){ | |
208 | + return vec_result; | |
209 | + } | |
210 | + | |
211 | + long long t1 = get_cur_time_ms(); | |
212 | + | |
213 | + int batchsize = vec_data.size(); | |
214 | + | |
215 | + vector<DetectResultInfo> vec_detectResult; | |
216 | + | |
217 | + int cycle_time = batchsize / m_max_batch_size; | |
218 | + cycle_time = (batchsize % m_max_batch_size) == 0 ? cycle_time : (cycle_time + 1) ; | |
219 | + | |
220 | + for (int i = 0; i < cycle_time; i++) { | |
221 | + int start_index = i * m_max_batch_size; | |
222 | + int end_index = start_index + m_max_batch_size; | |
223 | + if(end_index >= batchsize) { | |
224 | + end_index = batchsize; | |
225 | + } | |
226 | + | |
227 | + vector<sy_img> vec_img; | |
228 | + vector<int> vec_task_id; | |
229 | + vector<unsigned long long> vec_ts; | |
230 | + for (int j = start_index; j < end_index; j++) { | |
231 | + vec_img.push_back(vec_data[j].img); | |
232 | + vec_task_id.push_back(vec_data[j].task_id); | |
233 | + vec_ts.push_back(vec_data[j].frameIndex); | |
234 | + } | |
235 | + | |
236 | + ctools_result *detresult; | |
237 | + int res_status = ctools_process(det_handle, vec_img.data(), vec_img.size(), &detresult); | |
238 | + | |
239 | + for (size_t b = 0; b < vec_img.size(); b++) { | |
240 | + ctools_result &cur_result = detresult[b]; | |
241 | + | |
242 | + DetectResultInfo result_info; | |
243 | + result_info.task_id = vec_task_id[b]; | |
244 | + result_info.ts = vec_ts[b]; | |
245 | + for (int c = 0; c < cur_result.obj_count_ && c < MAX_OBJ_COUNT; c++) | |
246 | + { | |
247 | + float x1 = cur_result.obj_results_[c].data_[2]; | |
248 | + float y1 = cur_result.obj_results_[c].data_[3]; | |
249 | + float x2 = cur_result.obj_results_[c].data_[4]; | |
250 | + float y2 = cur_result.obj_results_[c].data_[5]; | |
251 | + | |
252 | + float class_id = cur_result.obj_results_[c].data_[0]; | |
253 | + float score = cur_result.obj_results_[c].data_[1]; | |
254 | + | |
255 | + if (score >= THRESHOLD) | |
256 | + { | |
257 | + vector <float> obj; | |
258 | + | |
259 | + obj.push_back(x1); | |
260 | + obj.push_back(y1); | |
261 | + obj.push_back(x2); | |
262 | + obj.push_back(y2); | |
263 | + obj.push_back(score); | |
264 | + obj.push_back(class_id); | |
265 | + // detectResult[real_index].push_back(obj); | |
266 | + result_info.det_result.push_back(obj); | |
267 | + } | |
268 | + } | |
269 | + | |
270 | + vec_detectResult.push_back(result_info); | |
271 | + } | |
272 | + } | |
273 | + | |
274 | + for (int i = 0; i < vec_detectResult.size(); i++) { | |
275 | + DetectResultInfo& det_result_info = vec_detectResult[i]; | |
276 | + TaskTracker& task_tracker = m_taskTrackerMap[det_result_info.task_id]; | |
277 | + | |
278 | + // TaskTracker& task_tracker = tools->taskTrackers[i]; | |
279 | + if (!task_tracker.tracker.GetState()) { | |
280 | + continue; | |
281 | + } | |
282 | + | |
283 | + VPTProcessResult oneResult; | |
284 | + oneResult.task_id = det_result_info.task_id; | |
285 | + if (task_tracker.lastFrameIndex > 0) { | |
286 | + // 非第一帧 | |
287 | + int update_times = det_result_info.ts - task_tracker.lastFrameIndex - 1; | |
288 | + if (update_times < 0) { | |
289 | + cout << "FrameIndex error !! lastFrameIndex= " << task_tracker.lastFrameIndex << " cur_frameindex = " << det_result_info.ts << endl; | |
290 | + } | |
291 | + | |
292 | + for (int j = 0; j < update_times; j++) { // 无检测框跟踪 | |
293 | + VPT_Result unresult; | |
294 | + unresult.objCount = task_tracker.tracker.update(task_tracker.ratioWidth, task_tracker.ratioHeight, false, task_tracker.lastDetectResult, unresult.obj, task_tracker.lastDeleteObjectID); | |
295 | + check_VPT_Result(unresult); | |
296 | + oneResult.vecUnUsedResult.push_back(unresult); | |
297 | + } | |
298 | + } | |
299 | + oneResult.vptResult.objCount = task_tracker.tracker.update(task_tracker.ratioWidth, task_tracker.ratioHeight, true, det_result_info.det_result, oneResult.vptResult.obj, oneResult.vecDeleteObj); | |
300 | + | |
301 | + check_VPT_Result(oneResult.vptResult); | |
302 | + | |
303 | + task_tracker.lastDetectResult = det_result_info.det_result; | |
304 | + task_tracker.lastDeleteObjectID = oneResult.vecDeleteObj; | |
305 | + task_tracker.lastFrameIndex = det_result_info.ts; | |
306 | + | |
307 | + oneResult.ts = det_result_info.ts; | |
308 | + | |
309 | + vec_result.push_back(oneResult); | |
310 | + } | |
311 | +} | |
312 | + | |
196 | 313 | int VPTProcess::process(sy_img * batch_img, int batchsize, vector<unsigned long long> vec_frameIndex, vector<VPT_Result>& result, vector<vector<int>>& deleteObjectID, vector<vector<VPT_Result>>& unUsedResult) |
197 | 314 | { |
198 | 315 | if(nullptr == det_handle){ |
... | ... | @@ -252,7 +369,7 @@ int VPTProcess::process(sy_img * batch_img, int batchsize, vector<unsigned long |
252 | 369 | } |
253 | 370 | |
254 | 371 | for (int i = 0; i < batchsize; i++) { |
255 | - TaskTracker& task_tracker = tools->taskTrackers[i]; | |
372 | + TaskTracker& task_tracker = m_taskTrackerMap[i]; | |
256 | 373 | if (!task_tracker.tracker.GetState()) { |
257 | 374 | continue; |
258 | 375 | } |
... | ... | @@ -289,7 +406,7 @@ void VPTProcess::release() { |
289 | 406 | det_handle = NULL; |
290 | 407 | } |
291 | 408 | |
292 | - vector<TaskTracker>().swap(taskTrackers); | |
409 | + m_taskTrackerMap.clear(); | |
293 | 410 | } |
294 | 411 | |
295 | 412 | void VPTProcess::AddTaskTracker(const int taskID, const double rWidth, const double rHeight) |
... | ... | @@ -300,53 +417,45 @@ void VPTProcess::AddTaskTracker(const int taskID, const double rWidth, const dou |
300 | 417 | t.ratioHeight = rHeight; |
301 | 418 | t.lastFrameIndex = 0; |
302 | 419 | t.tracker.setYOLOv5(true); // YOLOv5 要设为true, fpn 要设为false |
303 | - taskTrackers.push_back(t); | |
420 | + | |
421 | + m_taskTrackerMap[taskID] = t; | |
304 | 422 | } |
305 | 423 | |
306 | 424 | void VPTProcess::FinishTaskTracker(const int taskID) |
307 | 425 | { |
308 | - for (int i = 0; i < taskTrackers.size(); i++) | |
309 | - { | |
310 | - if (taskTrackers[i].TaskID == taskID) | |
311 | - { | |
312 | - taskTrackers.erase(taskTrackers.begin() + i); | |
313 | - break; | |
314 | - } | |
426 | + if (m_taskTrackerMap.find(taskID) == m_taskTrackerMap.end()) { | |
427 | + return; | |
315 | 428 | } |
429 | + | |
430 | + m_taskTrackerMap.erase(taskID); | |
316 | 431 | } |
317 | 432 | |
318 | 433 | void VPTProcess::PauseTaskTracker(const int taskID) |
319 | 434 | { |
320 | - for (int i = 0; i < taskTrackers.size(); i++) | |
321 | - { | |
322 | - if (taskTrackers[i].TaskID == taskID) | |
323 | - { | |
324 | - taskTrackers[i].tracker.Pause(); | |
325 | - break; | |
326 | - } | |
435 | + if (m_taskTrackerMap.find(taskID) == m_taskTrackerMap.end()) { | |
436 | + return; | |
327 | 437 | } |
438 | + | |
439 | + TaskTracker& t = m_taskTrackerMap[taskID]; | |
440 | + t.tracker.Pause(); | |
328 | 441 | } |
329 | 442 | |
330 | 443 | void VPTProcess::RestartTaskTraker(const int taskID) |
331 | 444 | { |
332 | - for (int i = 0; i < taskTrackers.size(); i++) | |
333 | - { | |
334 | - if (taskTrackers[i].TaskID == taskID) | |
335 | - { | |
336 | - taskTrackers[i].tracker.ReSet(); | |
337 | - break; | |
338 | - } | |
445 | + if (m_taskTrackerMap.find(taskID) == m_taskTrackerMap.end()) { | |
446 | + return; | |
339 | 447 | } |
448 | + | |
449 | + TaskTracker& t = m_taskTrackerMap[taskID]; | |
450 | + t.tracker.ReSet(); | |
340 | 451 | } |
341 | 452 | |
342 | 453 | void VPTProcess::DrawTracker(const int taskID, cv::Mat *img) |
343 | 454 | { |
344 | - for (int i = 0; i < taskTrackers.size(); i++) | |
345 | - { | |
346 | - if (taskTrackers[i].TaskID == taskID) | |
347 | - { | |
348 | - taskTrackers[i].tracker.addTracker(img); | |
349 | - break; | |
350 | - } | |
455 | + if (m_taskTrackerMap.find(taskID) == m_taskTrackerMap.end()) { | |
456 | + return; | |
351 | 457 | } |
458 | + | |
459 | + TaskTracker& t = m_taskTrackerMap[taskID]; | |
460 | + t.tracker.addTracker(img); | |
352 | 461 | } |
353 | 462 | \ No newline at end of file | ... | ... |
vehicle_structure_platform.git0708-3080-trt-face/src/VPT/VPTProcess.h
... | ... | @@ -9,6 +9,7 @@ |
9 | 9 | #include <iostream> |
10 | 10 | #include "utools.h" |
11 | 11 | #include <vector> |
12 | +#include <map> | |
12 | 13 | #include "common.h" |
13 | 14 | |
14 | 15 | #include "./sort/Sort.h" //tracker |
... | ... | @@ -28,6 +29,20 @@ struct TaskTracker |
28 | 29 | vector<int> lastDeleteObjectID; |
29 | 30 | }; |
30 | 31 | |
32 | +struct DataInfo { | |
33 | + int task_id {-1}; | |
34 | + sy_img img; | |
35 | + unsigned long long frameIndex; | |
36 | +}; | |
37 | + | |
38 | +struct VPTProcessResult { | |
39 | + int task_id {-1}; | |
40 | + VPT_Result vptResult; | |
41 | + vector<int> vecDeleteObj; | |
42 | + vector<VPT_Result> vecUnUsedResult; | |
43 | + unsigned long long ts; | |
44 | +}; | |
45 | + | |
31 | 46 | |
32 | 47 | class VPTProcess |
33 | 48 | { |
... | ... | @@ -59,6 +74,8 @@ public: |
59 | 74 | *************************************************************************/ |
60 | 75 | int process(sy_img * batch_img, int batchsize, vector<unsigned long long> vec_frameIndex, vector<VPT_Result>& result, vector<vector<int>>& deleteObjectID, vector<vector<VPT_Result>>& unUsedResult); |
61 | 76 | |
77 | + vector<VPTProcessResult> process(vector<DataInfo> vec_data); | |
78 | + | |
62 | 79 | /************************************************************************* |
63 | 80 | * PURPOSE: 资源释放 |
64 | 81 | * PARAM: |
... | ... | @@ -86,4 +103,5 @@ private: |
86 | 103 | |
87 | 104 | void* det_handle {nullptr}; |
88 | 105 | vector<TaskTracker> taskTrackers; |
106 | + map<int, TaskTracker> m_taskTrackerMap; | |
89 | 107 | }; | ... | ... |
vehicle_structure_platform.git0708-3080-trt-face/src/VPT/common.h
vehicle_structure_platform.git0708-3080-trt-face/src/VPT/mvpt.cpp
... | ... | @@ -47,7 +47,6 @@ void finish_task(void *handle, int task_id) |
47 | 47 | CMutliSourceVideoProcess* tools = (CMutliSourceVideoProcess*)handle; |
48 | 48 | std::unique_lock<std::mutex> l(tools->taskMutex); |
49 | 49 | tools->AddOperator(task_id, 3); |
50 | - tools->FinishDecode(task_id); | |
51 | 50 | } |
52 | 51 | |
53 | 52 | ... | ... |
vehicle_structure_platform.git0708-3080-trt-face/src/VPT/snapshot_analysis/snapshot_helper.cpp
... | ... | @@ -2038,7 +2038,7 @@ int snapshot_helper::SaveResultInFile(OBJ_KEY deleteObj) |
2038 | 2038 | return 0; |
2039 | 2039 | } |
2040 | 2040 | |
2041 | -CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Task task) { | |
2041 | +CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, sy_rect task_min_boxsize[DETECTTYPE], bool bCacheSrc, bool bCacheLittle, DxGPUFrame& dxGpuFrame) { | |
2042 | 2042 | |
2043 | 2043 | //逐个目标更新快照 |
2044 | 2044 | int boundary = 10; |
... | ... | @@ -2048,8 +2048,8 @@ CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Tas |
2048 | 2048 | int cur_real_height = (obj.bottom - obj.top); |
2049 | 2049 | int cur_real_index = obj.index; |
2050 | 2050 | |
2051 | - int frameHeight = task.task_algorithm_data.height; | |
2052 | - int frameWidth = task.task_algorithm_data.width; | |
2051 | + int frameHeight = dxGpuFrame.height; | |
2052 | + int frameWidth = dxGpuFrame.width; | |
2053 | 2053 | |
2054 | 2054 | int minDistance[EDGESIZE]; |
2055 | 2055 | minDistance[0] = minDistance[2] = 35; //left right |
... | ... | @@ -2060,7 +2060,7 @@ CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Tas |
2060 | 2060 | if (snapShotInfo.find(newObj) == snapShotInfo.end()) |
2061 | 2061 | { |
2062 | 2062 | //DxAppendLog(DxLOG_INFO, "30"); |
2063 | - if (LegalMinArea(cur_real_width, cur_real_height, task.task_min_boxsize[cur_real_index])) | |
2063 | + if (LegalMinArea(cur_real_width, cur_real_height, task_min_boxsize[cur_real_index])) | |
2064 | 2064 | { |
2065 | 2065 | //DxAppendLog(DxLOG_INFO, "31"); |
2066 | 2066 | //--------------------- 保存快照视频截图 -----------------------------// |
... | ... | @@ -2069,7 +2069,7 @@ CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Tas |
2069 | 2069 | int top = max(0, (int)(obj.top - boundaryLittle)); |
2070 | 2070 | int right = min({ frameWidth - 1, (int)(obj.right + boundaryLittle) }); |
2071 | 2071 | int bottom = min({ frameHeight - 1, (int)(obj.bottom + boundaryLittle) }); |
2072 | - snapShotInfo[newObj].frameCount = task.taskFrameCount; | |
2072 | + snapShotInfo[newObj].frameCount = dxGpuFrame.timestamp; | |
2073 | 2073 | snapShotInfo[newObj].isupdate = true; |
2074 | 2074 | snapShotInfo[newObj].lost = 0; |
2075 | 2075 | |
... | ... | @@ -2092,9 +2092,9 @@ CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Tas |
2092 | 2092 | snapShotInfo[newObj].snapShotLittle.frame = NULL; |
2093 | 2093 | snapShotInfo[newObj].snapShot.frame = NULL; |
2094 | 2094 | |
2095 | - if (task.folderName != NULL){ | |
2096 | - FRAME_KEY frame_id = { newObj.videoID, task.taskFrameCount }; | |
2097 | - ImgSaveCache.add_frame(newObj, frame_id, task.task_algorithm_data); | |
2095 | + if (bCacheSrc) { | |
2096 | + FRAME_KEY frame_id = { newObj.videoID, dxGpuFrame.timestamp }; | |
2097 | + ImgSaveCache.insert_frame(newObj, frame_id, dxGpuFrame); | |
2098 | 2098 | |
2099 | 2099 | snapShotInfo[newObj].snapShot.height = frameHeight; |
2100 | 2100 | snapShotInfo[newObj].snapShot.width = frameWidth; |
... | ... | @@ -2107,8 +2107,7 @@ CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Tas |
2107 | 2107 | int vRight = min({ frameWidth - 1, obj.right + boundary }); |
2108 | 2108 | int vBottom = min({ frameHeight - 1, obj.bottom + boundary }); |
2109 | 2109 | |
2110 | - if (task.folderNameLittle != NULL) | |
2111 | - { | |
2110 | + if (bCacheLittle) { | |
2112 | 2111 | int cur_width = 0; |
2113 | 2112 | int cur_height = 0; |
2114 | 2113 | |
... | ... | @@ -2162,7 +2161,7 @@ CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Tas |
2162 | 2161 | //DxAppendLog(DxLOG_INFO, "36"); |
2163 | 2162 | bool updateShotInfo = false; |
2164 | 2163 | int oldIndex = snapShotInfo[newObj].index.index; |
2165 | - snapShotInfo[newObj].frameCount = task.taskFrameCount; | |
2164 | + snapShotInfo[newObj].frameCount = dxGpuFrame.timestamp; | |
2166 | 2165 | snapShotInfo[newObj].isupdate = true; |
2167 | 2166 | snapShotInfo[newObj].lost = 0; |
2168 | 2167 | |
... | ... | @@ -2192,7 +2191,7 @@ CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Tas |
2192 | 2191 | |
2193 | 2192 | if ((LegalArea(maxArea, snapShotInfo[newObj].lastArea, left, top, right, bottom) |
2194 | 2193 | && LegalPos(snapShotInfo[newObj].flags, left, top, right, bottom, frameHeight, frameWidth, minDistance) |
2195 | - && LegalMinArea(cur_real_width, cur_real_height, task.task_min_boxsize[cur_real_index])) || updateShotInfo) | |
2194 | + && LegalMinArea(cur_real_width, cur_real_height, task_min_boxsize[cur_real_index])) || updateShotInfo) | |
2196 | 2195 | { |
2197 | 2196 | //DxAppendLog(DxLOG_INFO, "37"); |
2198 | 2197 | int boundary_w = (obj.right - obj.left) * 0.1; |
... | ... | @@ -2209,10 +2208,9 @@ CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Tas |
2209 | 2208 | snapShotInfo[newObj].box.bottom = bottom; |
2210 | 2209 | //printf("ori: %d %d %d %d\n", left, top, right - left, bottom - top); |
2211 | 2210 | snapShotInfo[newObj].confidence = obj.confidence; |
2212 | - if (task.folderName != NULL) | |
2213 | - { | |
2214 | - FRAME_KEY frame_id = { newObj.videoID, task.taskFrameCount }; | |
2215 | - ImgSaveCache.add_frame(newObj, frame_id, task.task_algorithm_data); | |
2211 | + if (bCacheSrc) { | |
2212 | + FRAME_KEY frame_id = { newObj.videoID, dxGpuFrame.timestamp }; | |
2213 | + ImgSaveCache.insert_frame(newObj, frame_id, dxGpuFrame); | |
2216 | 2214 | } |
2217 | 2215 | |
2218 | 2216 | //--------------------- 保存快照抠图 -----------------------------// |
... | ... | @@ -2220,8 +2218,7 @@ CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Tas |
2220 | 2218 | int vTop = max(0, obj.top - boundary_top); |
2221 | 2219 | int vRight = min(frameWidth - 1, obj.right + boundary_right); |
2222 | 2220 | int vBottom = min(frameHeight - 1, obj.bottom + boundary_bottom); |
2223 | - if (task.folderNameLittle != NULL) | |
2224 | - { | |
2221 | + if (bCacheLittle) { | |
2225 | 2222 | if (0 == snapShotInfo[newObj].index.index) |
2226 | 2223 | { |
2227 | 2224 | if (snapShotInfo[newObj].snapShotLittle.width != HP_WIDTH || snapShotInfo[newObj].snapShotLittle.height != HP_HEIGHT) |
... | ... | @@ -2247,7 +2244,7 @@ CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Tas |
2247 | 2244 | // { |
2248 | 2245 | // LOG_DEBUG("else , task_id:{} obj_id:{} index = {}", newObj.videoID, newObj.objID, snapShotInfo[newObj].index.index); |
2249 | 2246 | // } |
2250 | - // if (snapShotInfo[newObj].snapShotLittle.width != HCP_WIDTH || snapShotInfo[newObj].snapShotLittle.height != HCP_HEIGHT) | |
2247 | + if (snapShotInfo[newObj].snapShotLittle.width != HCP_WIDTH || snapShotInfo[newObj].snapShotLittle.height != HCP_HEIGHT) | |
2251 | 2248 | { |
2252 | 2249 | cudaFree(snapShotInfo[newObj].snapShotLittle.frame); //释放显存 |
2253 | 2250 | cudaMalloc((void**)&snapShotInfo[newObj].snapShotLittle.frame, 3 * HCP_WIDTH * HCP_HEIGHT * sizeof(unsigned char)); |
... | ... | @@ -2296,7 +2293,7 @@ CropInfo snapshot_helper::cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Tas |
2296 | 2293 | snapShotInfo[newObj].snapShotLittle.width = vRight - vLeft; |
2297 | 2294 | |
2298 | 2295 | //printf("begin partMemCopy: %d %d %d %d %d %d\n", vLeft, vTop, vRight, vBottom, frameWidth, frameHeight); |
2299 | - partMemCopy((unsigned char*)task.task_algorithm_data.frame, frameWidth, frameHeight, | |
2296 | + partMemCopy((unsigned char*)dxGpuFrame.frame, frameWidth, frameHeight, | |
2300 | 2297 | (unsigned char*)snapShotInfo[newObj].snapShotLittle.frame, vLeft, vTop, vRight, vBottom); |
2301 | 2298 | } |
2302 | 2299 | } |
... | ... | @@ -2317,7 +2314,7 @@ int snapshot_helper::getIndexByKey(OBJ_KEY newObj) { |
2317 | 2314 | return -1; |
2318 | 2315 | } |
2319 | 2316 | |
2320 | -void snapshot_helper::cacheFaceSnapshotInfo(sy_img *human_img, int human_count, sy_point* ori_points, vector<int> human_idx, vector<OBJ_KEY> human_obj_keys, int snapshot_left[], int snapshot_top[], Task task) { | |
2317 | +void snapshot_helper::cacheFaceSnapshotInfo(sy_img *human_img, int human_count, sy_point* ori_points, vector<int> human_idx, vector<OBJ_KEY> human_obj_keys, int snapshot_left[], int snapshot_top[], DxGPUFrame& dxGpuFrame) { | |
2321 | 2318 | fd_result *face_det_result = new fd_result[human_count]; |
2322 | 2319 | for (int fd_i = 0; fd_i < human_count; fd_i++) |
2323 | 2320 | { |
... | ... | @@ -2364,8 +2361,8 @@ void snapshot_helper::cacheFaceSnapshotInfo(sy_img *human_img, int human_count, |
2364 | 2361 | |
2365 | 2362 | int new_left = max(0, face_ori_rect.left_ - face_ori_rect.width_); |
2366 | 2363 | int new_top = max(0, face_ori_rect.top_ - face_ori_rect.height_); |
2367 | - int new_right = min((int)task.task_algorithm_data.width - 1, (face_ori_rect.left_ + 2 * face_ori_rect.width_)); | |
2368 | - int new_bottom = min((int)task.task_algorithm_data.height - 1, (face_ori_rect.top_ + 2 * face_ori_rect.height_)); | |
2364 | + int new_right = min((int)dxGpuFrame.width - 1, (face_ori_rect.left_ + 2 * face_ori_rect.width_)); | |
2365 | + int new_bottom = min((int)dxGpuFrame.height - 1, (face_ori_rect.top_ + 2 * face_ori_rect.height_)); | |
2369 | 2366 | int new_width = new_right - new_left; |
2370 | 2367 | int new_height = new_bottom - new_top; |
2371 | 2368 | |
... | ... | @@ -2398,7 +2395,7 @@ void snapshot_helper::cacheFaceSnapshotInfo(sy_img *human_img, int human_count, |
2398 | 2395 | { (face_ori_rect.left_ - face_expand_rect.left_), (face_ori_rect.top_ - face_expand_rect.top_), face_ori_rect.width_, face_ori_rect.height_ }; |
2399 | 2396 | |
2400 | 2397 | |
2401 | - cudacommon::CropImgGpu((unsigned char*)task.task_algorithm_data.frame, task.task_algorithm_data.width, task.task_algorithm_data.height, | |
2398 | + cudacommon::CropImgGpu((unsigned char*)dxGpuFrame.frame, dxGpuFrame.width, dxGpuFrame.height, | |
2402 | 2399 | (unsigned char*)snapShotInfo[cur_obj_key].snapShotFace.frame, face_expand_rect.left_, face_expand_rect.top_, face_expand_rect.width_, face_expand_rect.height_); |
2403 | 2400 | } |
2404 | 2401 | else |
... | ... | @@ -2413,8 +2410,8 @@ void snapshot_helper::cacheFaceSnapshotInfo(sy_img *human_img, int human_count, |
2413 | 2410 | { |
2414 | 2411 | int new_left = max(0, face_ori_rect.left_ - face_ori_rect.width_); |
2415 | 2412 | int new_top = max(0, face_ori_rect.top_ - face_ori_rect.height_); |
2416 | - int new_right = min((int)task.task_algorithm_data.width - 1, (face_ori_rect.left_ + 2 * face_ori_rect.width_)); | |
2417 | - int new_bottom = min((int)task.task_algorithm_data.height - 1, (face_ori_rect.top_ + 2 * face_ori_rect.height_)); | |
2413 | + int new_right = min((int)dxGpuFrame.width - 1, (face_ori_rect.left_ + 2 * face_ori_rect.width_)); | |
2414 | + int new_bottom = min((int)dxGpuFrame.height - 1, (face_ori_rect.top_ + 2 * face_ori_rect.height_)); | |
2418 | 2415 | int new_width = new_right - new_left; |
2419 | 2416 | int new_height = new_bottom - new_top; |
2420 | 2417 | |
... | ... | @@ -2448,7 +2445,7 @@ void snapshot_helper::cacheFaceSnapshotInfo(sy_img *human_img, int human_count, |
2448 | 2445 | snapShotInfo[cur_obj_key].face_info.face_position = |
2449 | 2446 | {(face_ori_rect.left_ - face_expand_rect.left_), (face_ori_rect.top_ - face_expand_rect.top_), face_ori_rect.width_, face_ori_rect.height_}; |
2450 | 2447 | |
2451 | - cudacommon::CropImgGpu((unsigned char*)task.task_algorithm_data.frame, task.task_algorithm_data.width, task.task_algorithm_data.height, | |
2448 | + cudacommon::CropImgGpu((unsigned char*)dxGpuFrame.frame, dxGpuFrame.width, dxGpuFrame.height, | |
2452 | 2449 | (unsigned char*)snapShotInfo[cur_obj_key].snapShotFace.frame, face_expand_rect.left_, face_expand_rect.top_, face_expand_rect.width_, face_expand_rect.height_); |
2453 | 2450 | } |
2454 | 2451 | } |
... | ... | @@ -2470,7 +2467,7 @@ void snapshot_helper::clearSnapshotInfo() { |
2470 | 2467 | void snapshot_helper::erase_snapshot_info(OBJ_KEY& objKey){ |
2471 | 2468 | // if (5 == objKey.videoID) |
2472 | 2469 | // { |
2473 | - // LOG_DEBUG("task:{} objId: {}", objKey.videoID, objKey.objID); | |
2470 | + // LOG_DEBUG("task_id:{} objId: {}", objKey.videoID, objKey.objID); | |
2474 | 2471 | // } |
2475 | 2472 | |
2476 | 2473 | snapShotInfo.erase(objKey); | ... | ... |
vehicle_structure_platform.git0708-3080-trt-face/src/VPT/snapshot_analysis/snapshot_helper.h
... | ... | @@ -68,7 +68,7 @@ struct OBJ_INDEX { |
68 | 68 | |
69 | 69 | struct OBJ_VALUE { |
70 | 70 | bool finishTracker; //轨迹结束可以保存了 |
71 | - int frameCount; | |
71 | + unsigned long long frameCount; | |
72 | 72 | bool isupdate; |
73 | 73 | int lost; |
74 | 74 | DxGPUFrame snapShot; |
... | ... | @@ -191,9 +191,9 @@ public: |
191 | 191 | void object_attri_analysis(); |
192 | 192 | void snapshot_res_callback(OBJ_KEY obj_key,/* OBJ_VALUE obj_value,*/ void* analysisRes = NULL); |
193 | 193 | |
194 | - CropInfo cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, Task task); | |
194 | + CropInfo cacheSnapShotInfo(OBJ_KEY newObj, VPT_ObjInfo obj, sy_rect task_min_boxsize[DETECTTYPE], bool bCacheSrc, bool bCacheLittle, DxGPUFrame& dxGpuFrame); | |
195 | 195 | |
196 | - void cacheFaceSnapshotInfo(sy_img *human_img, int human_count, sy_point* ori_points, vector<int> human_idx, vector<OBJ_KEY> human_obj_keys, int snapshot_left[], int snapshot_top[], Task task); | |
196 | + void cacheFaceSnapshotInfo(sy_img *human_img, int human_count, sy_point* ori_points, vector<int> human_idx, vector<OBJ_KEY> human_obj_keys, int snapshot_left[], int snapshot_top[], DxGPUFrame& dxGpuFrame); | |
197 | 197 | |
198 | 198 | int getIndexByKey(OBJ_KEY newObj); |
199 | 199 | std::map<OBJ_KEY, OBJ_VALUE>::iterator getValueByKey(OBJ_KEY newObj); | ... | ... |
vehicle_structure_platform.git0708-3080-trt-face/src/test/main.cpp
... | ... | @@ -209,9 +209,6 @@ static int sum = 0; |
209 | 209 | void videoObjSnapshotCallback(void * handle, video_object_snapshot *snapshot_info) |
210 | 210 | { |
211 | 211 | // printf("snapshot_info: %d \n", sum++); |
212 | - if(1) { | |
213 | - return; | |
214 | - } | |
215 | 212 | //#ifdef PRINTF_ATTRIRES |
216 | 213 | // cout << "handle :" << handle << " snapshot " << snapshot_info->task_id << " " << snapshot_info->object_id << " " << " " << snapshot_info->task_frame_count << " " << |
217 | 214 | // snapshot_info->video_image_path << " " << snapshot_info->snapshot_image_path << " " << snapshot_info->face_image_path << " " |
... | ... | @@ -264,7 +261,7 @@ void videoObjSnapshotCallback(void * handle, video_object_snapshot *snapshot_inf |
264 | 261 | //------------------------------------------------------------------------------------------------------------- |
265 | 262 | |
266 | 263 | //输出二次属性分析结果 |
267 | - if (0) | |
264 | + if (1) | |
268 | 265 | // if (snapshot_info->analysisRes != NULL) |
269 | 266 | { |
270 | 267 | if (0 == snapshot_info->object_type_index) |
... | ... | @@ -757,6 +754,8 @@ int main(int argc, char* argv[]) |
757 | 754 | } |
758 | 755 | beginTime = std::chrono::system_clock::now(); |
759 | 756 | |
757 | + int time_count = 0; | |
758 | + | |
760 | 759 | printf("begin add task\n"); |
761 | 760 | do { |
762 | 761 | if(get_running_task_number(handle) > 0){ |
... | ... | @@ -784,6 +783,11 @@ int main(int argc, char* argv[]) |
784 | 783 | |
785 | 784 | } |
786 | 785 | |
786 | + time_count ++; | |
787 | + if(time_count > 10) { | |
788 | + break; | |
789 | + } | |
790 | + | |
787 | 791 | } while(1) ; |
788 | 792 | |
789 | 793 | // printf("-------------------Begin rt_view_task 1 !----------------------\n"); | ... | ... |