traffic_light_process.cpp
34.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
#include <algorithm>
#include "./traffic_light_process.h"
#include <cmath>
#include "../decoder/interface/DeviceMemory.hpp"
#include "../common/logger.hpp"
#include "../ai_platform/mvpt_process_assist.h"
#if 1
#include "opencv2/opencv.hpp"
#endif
namespace ai_engine_module
{
namespace traffic_light_process
{
static std::set<algorithm_type_t> algor_type_list_ = {
algorithm_type_t::PERSON_RUNNING_REDLIGHTS,
algorithm_type_t::NONMOTOR_RUNNING_REDLIGHTS,
};
inline bool is_valid_label(const label_t &label) {
return ((label == label_t::red));
}
std::set<algorithm_type_t> task_id_to_algorithm_type_seq(const task_id_t &task_id,
task_param_manager *const task_param) {
std::set<algorithm_type_t> seq;
auto &&algor_map = task_param->get_task_other_param(task_id);
if (algor_map) {
// LOG_TRACE("task id is {} size algor type {}", task_id, algor_map->size());
for (auto iter = algor_map->begin(); iter != algor_map->end(); ++iter) {
if (algor_type_list_.count(iter->first) > 0)
seq.emplace(iter->first);
}
}
return seq; // N(RVO)
}
bool is_valid_box(const int top, const int left, const int right, const int bottom, const float score,
const algorithm_type_t &algor_type,
const int src_img_w, const int src_img_h,
const task_param_manager::algo_param_type_t_ *params_ptr = nullptr) {
if (!params_ptr)
return false;
if (!snapshot_legal_inarea(params_ptr->basic_param->algor_valid_rect, left, top, right, bottom))
return false;
if (params_ptr->algor_param == nullptr)
return false;
float scale_w = src_img_w / 1920.0;
float scale_h = src_img_h / 1080.0;
const unsigned width = (right - left) / scale_w; //归一化为1080p下的宽高,用于大小过滤(尺寸阈值按1080p设置)
const unsigned height = (bottom - top) / scale_h;
if (width == 0 || height == 0)
return false;
//! TODO: use switch to replace.
using data_t = algor_config_param_manned_incident;
data_t *algor_params_ptr = (data_t *) (params_ptr->algor_param);
if ((width < algor_params_ptr->obj_min_width || height < algor_params_ptr->obj_min_height || score < algor_params_ptr->obj_confidence_threshold))
return false;
return true;
}
vector<tr_point> Mbuild_area(vector<int> args)
{
vector<tr_point> result;
int points_check = args.size();
//需x y作为一组,是偶数
if (points_check % 2 == 1) {
LOG_ERROR("points need x and y, but only get one");
exit(0);
}
for (int i = 0; i < points_check; i++) {
if (i % 2 == 0) {
tr_point tmp_point;
tmp_point.x = args[i];
result.push_back(tmp_point);
}
else {
result[result.size() - 1].y = args[i];
}
}
return result;
}
bool McheckPointPolygon(vector<tr_point> tr_boxes, tr_point pCur) {
//任意四边形有4个顶点
int nCount = tr_boxes.size();
tr_point RectPoints[MAX_LANE_NUM];
// std::cout << "ponits: ";
for (int i = 0; i < nCount; i++) {
RectPoints[i] = tr_boxes[i];
// std::cout << tr_boxes[i].x << " " << tr_boxes[i].y << " ";
}
// std::cout << std::endl;
int nCross = 0;
pCur.y += 0.1;
for (int i = 0; i < nCount; i++) {
//依次取相邻的两个点
tr_point pStart = RectPoints[i];
tr_point pEnd = RectPoints[(i + 1) % nCount];
//相邻的两个点是平行于x轴的,肯定不相交,忽略
if (pStart.y == pEnd.y) continue;
//交点在pStart,pEnd的延长线上,pCur肯定不会与pStart.pEnd相交,忽略
if (pCur.y < min(pStart.y, pEnd.y) || pCur.y > max(pStart.y, pEnd.y)) continue;
//求当前点和x轴的平行线与pStart,pEnd直线的交点的x坐标
double x = (double)(pCur.y - pStart.y) * (double)(pEnd.x - pStart.x) / (double)(pEnd.y - pStart.y) + pStart.x;
//若x坐标大于当前点的坐标,则有交点
if (x > pCur.x) nCross++;
}
// 单边交点为偶数,点在多边形之外
return (nCross % 2 == 1);
}
float contourArea(const vector<tr_point>& polygon, cv::Point2f* quad_point) {
std::vector<cv::Point> contour;
for (int i = 0; i < polygon.size(); ++i) {
cv::Point tmp_p; tmp_p.x = polygon[i].x; tmp_p.y = polygon[i].y;
contour.push_back(tmp_p);
}
cv::Point2f rect_point[4];
cv::RotatedRect rect = cv::minAreaRect(contour); // 最小外接矩形 rect[0]中心点 rect[1]宽 高 rect[2]旋转角度
rect.points(rect_point); // 求外接矩形顶点坐标,顺时针依次为box[0] box[1] box[2] box[3], -45 <= angle <= 0:box[0]在左下角; angle > 45:box[0]在右下角
// 根据外接矩形求外接四边形(有可能求出三角形):取距矩形顶点近的4个点
for (int k = 0; k < 4; k ++) {
quad_point[k] = rect_point[k];
float sum_min_dist = 100000;
for (int j = 0; j < polygon.size(); ++j) {
float t_diff_x = polygon[j].x - rect_point[k].x, t_diff_y = polygon[j].y - rect_point[k].y;
float temp_dist = sqrt(t_diff_x * t_diff_x + t_diff_y * t_diff_y);
float temp_min_dist = temp_dist + 3*fabs(t_diff_y); //提高纵坐标的权重
if (temp_min_dist < sum_min_dist) {
quad_point[k].x = polygon[j].x; quad_point[k].y = polygon[j].y;
sum_min_dist = temp_min_dist;
}
}
}
return fabs(rect.angle);
}
// 与0作比较,小于精度则认为是和0相等,返回0 1 -1
int cmpzero(double d) { return (fabs(d) < PRECISION)? 0: (d > 0? 1:-1); }
// 向量叉积
double cross_det(double x1, double y1, double x2, double y2) { return x1*y2 - x2*y1; }
// 判断是否在区域两侧
bool inBothSidesOfArea(tr_point curpos, cv::Point2f* boxPoints, float angle) {
// 根据叉积判断目标相对于区域边界的位置,大于0:左 小于0:右 等于0:共线 (左侧/右侧和向量起始方向有关,当前设定的起始方向是y小的那侧)
if (angle > 45) {
// 左边界
int l_flag = cmpzero(cross_det(boxPoints[1].x-boxPoints[2].x, boxPoints[1].y-boxPoints[2].y, curpos.x-boxPoints[2].x, curpos.y-boxPoints[2].y));
// 右边界
int r_flag = cmpzero(cross_det(boxPoints[0].x-boxPoints[3].x, boxPoints[0].y-boxPoints[3].y, curpos.x-boxPoints[0].x, curpos.y-boxPoints[3].y));
if (l_flag > 0 || r_flag < 0) return true;
}
else {
// 左边界
int l_flag = cmpzero(cross_det(boxPoints[0].x-boxPoints[1].x, boxPoints[0].y-boxPoints[1].y, curpos.x-boxPoints[1].x, curpos.y-boxPoints[1].y));
// 右边界
int r_flag = cmpzero(cross_det(boxPoints[3].x-boxPoints[2].x, boxPoints[3].y-boxPoints[2].y, curpos.x-boxPoints[2].x, curpos.y-boxPoints[2].y));
if (l_flag > 0 || r_flag < 0) return true;
}
return false;
}
// 判断目标是否在区域两侧且轨迹为南北走向
bool inBothSidesOfAreaV2(tr_point curpos, cv::Point2f* boxPoints, float angle, const vector<sy_point>& tracker_list, int check_frames) {
int trackcnt = tracker_list.size();
tr_point p_start, p_end;
p_start.x = tracker_list[trackcnt-check_frames-1].x_; p_start.y = tracker_list[trackcnt-check_frames-1].y_;
p_end.x = tracker_list[trackcnt-1].x_; p_end.y = tracker_list[trackcnt-1].y_;
// 目标方向
tr_point obj_toward;
obj_toward.x = p_end.x - p_start.x;
obj_toward.y = p_end.y - p_start.y;
// 根据叉积判断目标相对于区域边界的位置,大于0:左 小于0:右 等于0:共线 (左侧/右侧和向量起始方向有关,当前设定的起始方向是y小的那侧)
bool flag = false;
if (angle > 45) {
// 左边界
int l_flag = cmpzero(cross_det(boxPoints[1].x-boxPoints[2].x, boxPoints[1].y-boxPoints[2].y, curpos.x-boxPoints[2].x, curpos.y-boxPoints[2].y));
// 右边界
int r_flag = cmpzero(cross_det(boxPoints[0].x-boxPoints[3].x, boxPoints[0].y-boxPoints[3].y, curpos.x-boxPoints[0].x, curpos.y-boxPoints[3].y));
if (l_flag > 0 || r_flag < 0) flag = true;
}
else {
// 左边界
int l_flag = cmpzero(cross_det(boxPoints[0].x-boxPoints[1].x, boxPoints[0].y-boxPoints[1].y, curpos.x-boxPoints[1].x, curpos.y-boxPoints[1].y));
// 右边界
int r_flag = cmpzero(cross_det(boxPoints[3].x-boxPoints[2].x, boxPoints[3].y-boxPoints[2].y, curpos.x-boxPoints[2].x, curpos.y-boxPoints[2].y));
if (l_flag > 0 || r_flag < 0) flag = true;
}
if (flag) {
int deg = atan2(obj_toward.y, obj_toward.x) * 180 / Pi; // 返回轨迹与x轴的夹角
if (abs(deg) < 30 || abs(deg) > 150) return false;
else return true;
}
return false;
}
#if 0
void CvtYuvToBgr(sy_img src, sy_img dst) {
const int uvStart = src.w_ * src.h_;
for (int h = 0; h < dst.h_; h++) {
for (int w = 0; w < dst.w_; w++) {
int uvIndex = h / 2 * src.w_ + w - w % 2;
unsigned char y = *(src.data_ + w + h * src.w_);
unsigned char u = *(src.data_ + uvStart + uvIndex);
unsigned char v = *(src.data_ + uvStart + uvIndex + 1);
int r = (int)(y + 1.402 * (v - 128));
int g = (int)(y - 0.34414 * (u - 128) - 0.71414 * (v - 128));
int b = (int)(y + 1.772 * (u -128));
if (r > 255) r = 255;
if (g > 255) g = 255;
if (b > 255) b = 255;
if (r < 0) r = 0;
if (g < 0) g = 0;
if (b < 0) b = 0;
*(dst.data_ + h * dst.w_ * 3 + w * 3) = b;
*(dst.data_ + h * dst.w_ * 3 + w * 3 + 1) = g;
*(dst.data_ + h * dst.w_ * 3 + w * 3 + 2) = r;
}
}
}
#endif
TrafficLightProcess::TrafficLightProcess()
: task_param_manager_(nullptr)
{
}
TrafficLightProcess::~TrafficLightProcess()
{
if (tools_) {
traffic_light_release(&tools_);
tools_ = nullptr;
}
if (m_algorthim_ctx) {
aclrtDestroyContext(m_algorthim_ctx);
}
}
bool TrafficLightProcess::init(int gpu_id, string models_dir)
{
init_ = false;
// string model_path = models_dir + "/models/village/trlight_det_310p.om" ;
string model_path = models_dir + "/models/village/trlight_det_b8_310p.om" ;
LOG_INFO("traffic_light 版本:{} 模型路径:{}", traffic_light_getversion(), model_path);
traffic_light_param param;
char modelNames[100];
strcpy(modelNames, model_path.c_str());
// param.modelNames = modelNames;
param.modelNames_b = modelNames;
param.thresld = 0.25;
param.devId = gpu_id;
param.max_batch = 8;
m_devId = param.devId;
ACL_CALL(aclrtSetDevice(m_devId), ACL_SUCCESS, -1);
ACL_CALL(aclrtCreateContext(&m_algorthim_ctx, m_devId), ACL_SUCCESS, -1);
int status;
if (!(init_ = (0 == (status = traffic_light_init(&tools_, param)))))
LOG_ERROR("Init TrafficLightProcessSdk failed error code is {}", status);
else
if (!task_param_manager_)
task_param_manager_ = task_param_manager::getInstance();
return init_;
}
bool TrafficLightProcess::check_initied()
{
if (!init_)
LOG_ERROR("[%s:%d] call init function please.", __FILE__, __LINE__);
return init_;
}
void TrafficLightProcess::force_release_result(const task_id_t& task_id) {
for (auto iter = id_to_result_.begin(); iter != id_to_result_.end();) {
const auto& key = iter->first;
if (key.task_id == task_id) {
auto& value = iter->second;
if (value.origin_img_desc != nullptr) {
VPCUtil::vpc_pic_desc_release(value.origin_img_desc);
}
if (value.roi_img_desc != nullptr) {
VPCUtil::vpc_pic_desc_release(value.roi_img_desc);
}
iter = id_to_result_.erase(iter);
}
else {
++iter;
}
}
for (auto iter = id_to_mn_.begin(); iter != id_to_mn_.end();) {
const auto& key = iter->first;
if (key.task_id == task_id) { iter = id_to_mn_.erase(iter);}
else { ++iter; }
}
}
std::shared_ptr<results_data_t> TrafficLightProcess::get_result_by_objectid(const id_t& id, bool do_erase)
{
auto it = id_to_result_.find(id);
if (it == id_to_result_.end())
return std::shared_ptr<results_data_t>(nullptr);
std::shared_ptr<results_data_t> res = std::make_shared<results_data_t>(it->second);
if (do_erase) {
id_to_result_.erase(id);
if (id_to_mn_.count(id)) id_to_mn_.erase(id);
}
return res;
}
bool TrafficLightProcess::update_mstreams(const std::vector<task_id_t>& taskIds, vector<sy_img> src_interest_imgs, vector<DeviceMemory*> vec_det_input_images, const vector<vector<vector<int>>>& traffic_region, const vector<vector<int>>& labels, map<OBJ_KEY, OBJ_INDEX> &m_total_obj_info, const std::vector<onelevel_det_result> &det_results)
{
if (!check_initied())
return false;
if (det_results.empty())
{
LOG_DEBUG("detection result is empty.");
return false;
}
struct stream_idx_and_algor_seq_t {
unsigned stream_idx;
std::set<algorithm_type_t> algors;
};
int n_images = det_results.size(); // or n_stream
unsigned flattened_idx = 0;
std::map<int, int> flattened_idx_to_batch_idx;
//! 记录每个box对应的算法以及流id.
std::map<unsigned, stream_idx_and_algor_seq_t> flattened_idx_to_algor_seq;
/* 1. Crop & keep some interest class. */
auto taskId_iter = taskIds.begin();
std::vector<sy_img> flattened_imgs(0);
std::vector<vpc_img_info> flattened_vpc_imgs(0);
std::vector<input_data_wrap_t> flattened_interest_data(0); //
VPCUtil* pVpcUtil = VPCUtil::getInstance();
for (int n = 0; n < n_images; ++n)
{
int n_interest_obj = 0;
auto& src_img = vec_det_input_images[n];
int src_img_w = src_img->getWidth();
int src_img_h = src_img->getHeight();
auto seg_regions = traffic_region[n];
auto seg_labels = labels[n];
vector<vector<int>> crosswalk_regions; //人行道区域
vector<vector<int>> interestion_regions; //十字路口区域
for (unsigned i = 0; i < seg_labels.size(); ++i) {
const seg_label_t seg_label = static_cast<seg_label_t>(seg_labels[i]);
if ((seg_label == seg_label_t::crosswalk)) {
vector<int> cur_region(seg_regions[i].begin(),seg_regions[i].end());
crosswalk_regions.push_back(cur_region);
}
if ((seg_label == seg_label_t::interestion_area)) {
vector<int> cur_region(seg_regions[i].begin(),seg_regions[i].end());
interestion_regions.push_back(cur_region);
}
}
auto& boxes_of_one_image = det_results[n].obj;
for (int i = 0; i < det_results[n].obj_count; ++i)
{
auto& box = boxes_of_one_image[i];
if (static_cast<det_class_label_t>(box.index) == det_class_label_t::MOTOCYCLE || static_cast<det_class_label_t>(box.index) == det_class_label_t::BICYCLE || static_cast<det_class_label_t>(box.index) == det_class_label_t::HUMAN)
{
auto& taskId = *taskId_iter;
input_data_wrap_t data;
int top = std::max(int(box.top - (IMAGE_CROP_EXPAND_RATIO * box.top)), 0);
int left = std::max(int(box.left - (IMAGE_CROP_EXPAND_RATIO * box.left)), 0);
int right = std::min(int(box.right + (IMAGE_CROP_EXPAND_RATIO * box.right)), src_img_w);
int bottom = std::min(int(box.bottom + (IMAGE_CROP_EXPAND_RATIO * box.bottom)), src_img_h);
//! loop per algor from set.
stream_idx_and_algor_seq_t stream_idx_and_algor_seq{n, {}};
std::set<algorithm_type_t> algorithm_type_seq = task_id_to_algorithm_type_seq(taskId,
task_param_manager_); // N(RVO).
for (auto algor_iter = algorithm_type_seq.begin();algor_iter != algorithm_type_seq.end(); ++algor_iter) {
const algorithm_type_t algor_type = *algor_iter;
auto &&algor_param_wrap = task_param_manager_->get_task_other_param(taskId, algor_type);
if (!algor_param_wrap) {
LOG_ERROR("{} is nullptr when get algor param from task_param", taskId.c_str());
continue;
}
if (!is_valid_box(top, left, right, bottom, box.confidence, algor_type, src_img_w, src_img_h, algor_param_wrap))
continue;
//DEBUG增加多边形区域选择及判断================================================================
tr_point curpos;
curpos.x = (left + right) * 0.5; curpos.y = bottom;
#if 0
vector<int> args = {1552,435,1756,537,1915,537,1915,499,1725,435}; //区域(泰兴黄桥锦润福府路口)
// vector<int> args = {0,0,0,1080,1920,1080,1920,0}; //区域
vector<tr_point> tr_boxes = Mbuild_area(args);
int cur_flag = McheckPointPolygon(tr_boxes, curpos); //cur_flag==true表示在区域内
if (!cur_flag) continue;
#else
// 目标在人行道区域且在十字路口区域的左侧或右侧---->只判南北向
int check_frames = 5; //1s 25
OBJ_KEY trace_obj_key = {taskId, box.id};
if (m_total_obj_info[trace_obj_key].center_points.size() < check_frames) continue; //忽略太短的轨迹
bool interestion_flag = false;
for (auto region : interestion_regions) {
vector<tr_point> tr_boxes = Mbuild_area(region);
vector<cv::Point> boxPoints;
cv::Point2f quad_point[4];
float angle = contourArea(tr_boxes, quad_point);
// interestion_flag = inBothSidesOfArea(curpos, quad_point, angle);
interestion_flag = inBothSidesOfAreaV2(curpos, quad_point, angle, m_total_obj_info[trace_obj_key].center_points, check_frames); //判断轨迹是否是南北向
if (interestion_flag) break;
}
if (!interestion_flag) continue;
bool cur_flag = false;
for (auto region : crosswalk_regions) {
vector<tr_point> tr_boxes = Mbuild_area(region);
cur_flag = McheckPointPolygon(tr_boxes, curpos); //cur_flag==true表示在区域内
if (cur_flag) break;
}
if (!cur_flag) continue;
#endif
//DEBUG END==================================================================================
stream_idx_and_algor_seq.algors.emplace(algor_type);
}
if (stream_idx_and_algor_seq.algors.empty())
continue;
int width = right - left;
int height = bottom - top;
data.box.top = top; data.box.left = left;
data.box.right = right; data.box.bottom = bottom;
data.box.score = box.confidence; data.box.cls = box.index;
data.taskId = taskId; data.objId = box.id;
// 抠图
video_object_info obj;
strcpy(obj.task_id, taskId.c_str());
obj.object_id = box.id;
obj.left = left; obj.top = top;
obj.right = right; obj.bottom = bottom;
vpc_img_info img_info = pVpcUtil->crop(src_img, obj);
sy_img img;
img.w_ = width;
img.h_ = height;
img.c_ = src_img->getChannel();
if (img_info.pic_desc != nullptr) {
void *outputDataDev = acldvppGetPicDescData(img_info.pic_desc);
img.data_ = reinterpret_cast<unsigned char*>(outputDataDev);
}
else {
LOG_ERROR("Crop image NPU failed wh is [{}, {}] ltrb is [{} {} {} {}]",
src_img_w, src_img_h, data.box.left, data.box.top, data.box.right, data.box.bottom);
continue;
}
flattened_imgs.emplace_back(std::move(img));
flattened_vpc_imgs.emplace_back(std::move(img_info));
flattened_interest_data.emplace_back(std::move(data));
flattened_idx_to_algor_seq[flattened_idx] = std::move(stream_idx_and_algor_seq);
flattened_idx_to_batch_idx[flattened_idx++] = n;
}
}
++taskId_iter;
}
int ret = aclrtSetCurrentContext(m_algorthim_ctx);
if (ACL_SUCCESS != ret) {
return false;
}
/* 2. collection result. */
int n_input_image = flattened_imgs.size();
int n_input_src_image = src_interest_imgs.size();
traffic_light_result model_results[n_input_src_image];
{
int steps = (n_input_src_image + MAX_BATCH - 1) / MAX_BATCH;
for (int step = 0; step < steps; ++step)
{
int offset = step * MAX_BATCH;
int batch_size = (step == steps - 1) ? n_input_src_image - offset : MAX_BATCH;
// traffic_light_process_batch(tools_, src_interest_imgs.data() + offset, batch_size, model_results + offset);
traffic_light_process_batchV2(tools_, src_interest_imgs.data() + offset, batch_size, model_results + offset);
}
}
/* 3. postprocess. */
{
for (int n = 0; n < n_input_image; ++n)
{
auto& det_result = flattened_interest_data[n];
auto& objId = det_result.objId;
auto& task_id = det_result.taskId;
auto &stream_idx_and_algor_seq = flattened_idx_to_algor_seq[n];
auto &algors = stream_idx_and_algor_seq.algors;
// auto &steram_idx = stream_idx_and_algor_seq.stream_idx;
const auto& src_img = vec_det_input_images[flattened_idx_to_batch_idx[n]];
auto &model_result = model_results[flattened_idx_to_batch_idx[n]];
int red_cnt = 0; //统计直行红灯个数
for (unsigned i = 0; i < model_result.objcount; ++i) {
auto &box = model_result.objinfo[i];
const label_t label = static_cast<label_t>(box.index);
if (!is_valid_label(label))
continue;
// if (box.left < 1200 || box.top < 159 || box.right > 1307 || box.bottom > 212) continue; // 限制红绿灯的出现位置(泰兴黄桥锦润福府路口)
LOG_TRACE("task id is {} obj_id {} label {} index {} score {}", task_id, objId, label, box.index, box.confidence);
red_cnt ++;
}
#if 0
if (red_cnt > 0) {
sy_img img = src_interest_imgs[flattened_idx_to_batch_idx[n]];
int dataSize = img.w_ * img.h_ * 3 / 2;
uint8_t* buffer = new uint8_t[dataSize];
uint8_t* imgBgr = new uint8_t[dataSize];
aclError aclRet = aclrtMemcpy(buffer, dataSize, img.data_, dataSize, ACL_MEMCPY_DEVICE_TO_HOST);
sy_img imgyuv_local, tmpbgr;
imgyuv_local.data_ = (unsigned char*)buffer; imgyuv_local.w_ = img.w_; imgyuv_local.h_ = img.h_;
tmpbgr.w_ = img.w_; tmpbgr.h_ = img.h_; tmpbgr.data_ = imgBgr;
CvtYuvToBgr(imgyuv_local, tmpbgr);
cv::Mat cvImg = cv::Mat(tmpbgr.h_, tmpbgr.w_, CV_8UC3, tmpbgr.data_);
string jpgSaveName1 = "result/oral/" + task_id + "_" + to_string(objId) + ".jpg";
cv::imwrite(jpgSaveName1, cvImg);
for (unsigned i = 0; i < model_result.objcount; ++i) {
cv::Point lt(model_result.objinfo[i].left, model_result.objinfo[i].top);
cv::Point rb(model_result.objinfo[i].right, model_result.objinfo[i].bottom);
cv::rectangle(cvImg, lt, rb, cv::Scalar(0, 0, 255), 3);
char buffer[50]; int fontface = cv::FONT_HERSHEY_SIMPLEX; double scale = 0.8; int thickness = 2; int baseline = 0;
snprintf(buffer, sizeof(buffer), "%d:%.2f", model_result.objinfo[i].index, model_result.objinfo[i].confidence);
cv::Size text = cv::getTextSize(buffer, fontface, scale, thickness, &baseline);
cv::putText(cvImg, buffer, lt - cv::Point(0, baseline), fontface, scale, cv::Scalar(0, 0, 255), thickness, 8);
}
string jpgSaveName2 = "result/draw/" + task_id + "_" + to_string(objId) + ".jpg";
cv::imwrite(jpgSaveName2, cvImg);
delete[] buffer; buffer = NULL;
delete[] imgBgr; imgBgr = NULL;
}
#endif
for (auto algor_type_iter = algors.begin();algor_type_iter != algors.end(); ++algor_type_iter) {
const algorithm_type_t algor_type = *algor_type_iter;
auto &&algor_param_wrap = task_param_manager_->get_task_other_param(task_id, algor_type);
if (!algor_param_wrap) {
LOG_ERROR("{} is nullptr when get algor param from task_param", task_id);
continue;
}
auto algor_param = ((algor_param_type)algor_param_wrap->algor_param);
id_t obj_key = obj_key_t{ objId, task_id, algor_type};
if (id_to_result_.find(obj_key) != id_to_result_.end())
continue;
// LOG_TRACE("task id is {} algor type is {} obj_id {}", task_id, int(algor_type), objId);
if (algor_type == algorithm_type_t::PERSON_RUNNING_REDLIGHTS && static_cast<det_class_label_t>(det_result.box.cls) != det_class_label_t::HUMAN)
continue;
// if (algor_type == algorithm_type_t::NONMOTOR_RUNNING_REDLIGHTS && (static_cast<det_class_label_t>(det_result.box.cls) != det_class_label_t::MOTOCYCLE ||
// static_cast<det_class_label_t>(det_result.box.cls) != det_class_label_t::BICYCLE))
// continue;
if (algor_type == algorithm_type_t::NONMOTOR_RUNNING_REDLIGHTS && static_cast<det_class_label_t>(det_result.box.cls) == det_class_label_t::HUMAN)
continue;
auto& e = id_to_mn_[obj_key];
++e.m_frame;
// 数量小于设定阈值不报警
if (red_cnt < algor_param->hs_count_threshold)
continue;
{
if (++e.n_frame == algor_param->n)
{
results_data_t result;
{ LOG_TRACE("task id is {} obj_id {} red_cnt {}", task_id, det_result.objId, red_cnt);
result.box = det_result.box;
result.taskId = det_result.taskId;
result.objId = det_result.objId;
result.algor_type = algor_type;
#if 0 /*暂不保存报警时刻的抓拍图,有需要再启用*/
// 原图
vpc_img_info src_img_info = VPCUtil::vpc_devMem2vpcImg(src_img);
result.origin_img_desc = src_img_info.pic_desc;
// 抠图--拷贝后赋值
void *outputDataDev = acldvppGetPicDescData(flattened_vpc_imgs[n].pic_desc);
int nBufferSize = acldvppGetPicDescSize(flattened_vpc_imgs[n].pic_desc);
void *devBuffer = nullptr;
auto ret = acldvppMalloc(&devBuffer, nBufferSize);
if (ret != ACL_SUCCESS) {
LOG_ERROR("acldvppMalloc failed, size = %u, errorCode = %d.", nBufferSize, static_cast<int32_t>(ret));
return false;
}
aclrtMemcpy(devBuffer, nBufferSize, outputDataDev, nBufferSize, ACL_MEMCPY_DEVICE_TO_DEVICE);
acldvppPicDesc *vpcInputDesc_= acldvppCreatePicDesc();
acldvppSetPicDescData(vpcInputDesc_, devBuffer);
acldvppSetPicDescFormat(vpcInputDesc_, PIXEL_FORMAT_YUV_SEMIPLANAR_420);
acldvppSetPicDescWidth(vpcInputDesc_, acldvppGetPicDescWidth(flattened_vpc_imgs[n].pic_desc));
acldvppSetPicDescHeight(vpcInputDesc_, acldvppGetPicDescHeight(flattened_vpc_imgs[n].pic_desc));
acldvppSetPicDescWidthStride(vpcInputDesc_, acldvppGetPicDescWidthStride(flattened_vpc_imgs[n].pic_desc));
acldvppSetPicDescHeightStride(vpcInputDesc_, acldvppGetPicDescHeightStride(flattened_vpc_imgs[n].pic_desc));
acldvppSetPicDescSize(vpcInputDesc_, nBufferSize);
result.roi_img_desc = vpcInputDesc_; //需复制
#endif
}
id_to_result_.emplace(obj_key, std::move(result));
}
}
if (e.m_frame == algor_param->m)
e.reset();
}
VPCUtil::vpc_img_release(flattened_vpc_imgs[n]); //flattened_imgs[n].data_
}
}
return true;
}
} // namespace traffic_light_process
} // namespace ai_engine_module