Commit d2d37be1 authored by xin.wang.waytous's avatar xin.wang.waytous

add-pub_classes

parent a7247020
rgb_sub_topic_name: /sensor/front/image/bgr
pointcloud_sub_topic_name: /sensor/lidar/livox/front_middle/points
rgb_sub_topic_name: /sensor/camera/front_rgb/image
pointcloud_sub_topic_name: /vehicle_all_pointcloud
detection_pub_topic_name: /perception/camera/obstacle
# static_gridmap_pub_topic_name: /perception/camera/bev_static_gridmap
creator_id: 0000
......@@ -11,20 +11,24 @@ rgb_config_path: configs/weights/multi_yolov5m.yaml
device: 0
period_time: 5
vis_res: true
vis_frame_id: front_middle_livox
vis_frame_id: livox
# ["pedestrian", "two_wheel", "car", "truck", "construction_machine", "fence", "stone", "dust", "cone"]
pub_classes: ["stone", "cone"]
obstacle_height_threshold: 30 # cm
camera_intrinsic: [969.796542854634, -0.129283640950858, 992.981470492944,
0. , 970.128231641552, 596.651376445781,
0. , 0. , 1. ]
distortion_coefficients: [-0.359234564377185, 0.169945565409037, 0.000788823270375011, 0.000877958277585529, -0.0433517026779682]
camera2lidar_extrinsic: [-0.00223004, -0.999324, -0.0367048, 0.70583,
0.0536961, 0.0365323, -0.997889, -0.761989,
0.998555, -0.00419624, 0.0535783, 0.781821,
0, 0, 0, 1]
camera_intrinsic: [1346.61241105747, 0, 951.014999441565,
0, 1344.43795511862, 530.562496955385,
0, 0, 1]
distortion_coefficients: [-0.449196898930979, 0.253956763950171, -0.00139403693782871, -0.00111424348026907, -0.0870171627456232]
# camera2lidar_extrinsic: [-0.0423904, -0.999009, 0.0135863, 0.0656235,
# 0.0313214, -0.0149206, -0.999398, -0.167858,
# 0.99861, -0.0419393, 0.0319228, -0.0162872,
# 0, 0, 0, 1]
camera2lidar_extrinsic: [ 0.0029218, -0.9999551, 0.0090201, -0.339,
0.0170034, -0.0089692, -0.9998152, 3.404,
0.9998512, 0.0030746, 0.0169764, -0.164,
0, 0, 0, 1 ]
src_img_width: 1920
src_img_height: 1080
dst_img_width: 960
dst_img_height: 540
......@@ -12,15 +12,21 @@ device: 0
period_time: 5
vis_res: true
vis_frame_id: livox
# ["pedestrian", "two_wheel", "car", "truck", "construction_machine", "fence", "stone", "dust", "cone"]
pub_classes: ["stone", "cone"]
obstacle_height_threshold: 30 # cm
camera_intrinsic: [1346.61241105747, 0, 951.014999441565,
0, 1344.43795511862, 530.562496955385,
0, 0, 1]
distortion_coefficients: [-0.449196898930979, 0.253956763950171, -0.00139403693782871, -0.00111424348026907, -0.0870171627456232]
camera2lidar_extrinsic: [-0.0423904, -0.999009, 0.0135863, 0.0656235,
0.0313214, -0.0149206, -0.999398, -0.167858,
0.99861, -0.0419393, 0.0319228, -0.0162872,
0, 0, 0, 1]
# camera2lidar_extrinsic: [-0.0423904, -0.999009, 0.0135863, 0.0656235,
# 0.0313214, -0.0149206, -0.999398, -0.167858,
# 0.99861, -0.0419393, 0.0319228, -0.0162872,
# 0, 0, 0, 1]
camera2lidar_extrinsic: [ 0.0029218, -0.9999551, 0.0090201, -0.339,
0.0170034, -0.0089692, -0.9998152, 3.404,
0.9998512, 0.0030746, 0.0169764, -0.164,
0, 0, 0, 1 ]
src_img_width: 1920
src_img_height: 1080
dst_img_width: 960
......
......@@ -5,7 +5,7 @@
* @Date: 2023-09-03 03:14:28
* @email: xin.wang@waytous.com
* @LastEditors: wxin
* @LastEditTime: 2023-09-21 09:11:08
* @LastEditTime: 2023-11-27 07:42:56
*/
#ifndef COMMON_MAP_H_
......@@ -56,7 +56,7 @@ static std::map<std::string, int> name2subtype = {
{"construction_machine", waytous_perception_msgs::Object::SUBTYPE_DOZER},
{"vehicle", waytous_perception_msgs::Object::SUBTYPE_TRUCK},
{"fence", waytous_perception_msgs::Object::SUBTYPE_UNKNOWN_STATIC},
{"stone", waytous_perception_msgs::Object::SUBTYPE_TRAFFIC_CONE},
{"stone", waytous_perception_msgs::Object::SUBTYPE_UNKNOWN_STATIC},
{"dust", waytous_perception_msgs::Object::SUBTYPE_UNKNOWN_MOVABLE},
{"cone", waytous_perception_msgs::Object::SUBTYPE_TRAFFIC_CONE},
{"road", waytous_perception_msgs::Object::SUBTYPE_UNKNOWN_STATIC},
......
......@@ -5,7 +5,7 @@
* @Date: 2023-09-03 01:23:43
* @email: xin.wang@waytous.com
* @LastEditors: wxin
* @LastEditTime: 2023-09-27 06:26:04
* @LastEditTime: 2023-11-23 09:57:11
*/
#include <string>
......@@ -23,7 +23,7 @@ int main(int argc, char** argv)
{
google::InitGoogleLogging("WARN");
ros::init(argc, argv, "vision_bev_node");
ros::init(argc, argv, "ros_camera_bev_node");
ros::NodeHandle nh;
// 读取参数
......
......@@ -5,7 +5,7 @@
* @Date: 2023-09-03 02:18:07
* @email: xin.wang@waytous.com
* @LastEditors: wxin
* @LastEditTime: 2023-11-22 09:22:01
* @LastEditTime: 2023-11-27 08:05:00
*/
......@@ -113,13 +113,23 @@ cameraFramePtr sensorManager::infer()
cv::Mat undistort_img(uImg->rows(), uImg->cols(), CV_8UC3);
memcpy(undistort_img.data, uImg->cpu_data(), uImg->total() * sizeof(uint8_t));
cptr->setImageSrc(undistort_img);
// 获取语义分割结果
cptr->setSemantics(std::dynamic_pointer_cast<waytous::deepinfer::ios::Semantics>(outputs[1]));
// 获取语义分割结果 // 不发布语义信息
// cptr->setSemantics(std::dynamic_pointer_cast<waytous::deepinfer::ios::Semantics>(outputs[1]));
// 获取实例分割结果
cptr->setDetections(std::dynamic_pointer_cast<waytous::deepinfer::ios::Detection2Ds>(outputs[0]));
// 获取深度估计结果
cptr->setDepth(std::dynamic_pointer_cast<waytous::deepinfer::ios::Depth>(outputs[2]));
// 过滤不发布的obj
if(infer_param.pub_classes.size() > 0){
for(auto& obj: cptr->objs_){
if(std::find(infer_param.pub_classes.begin(), infer_param.pub_classes.end(), obj.name) == infer_param.pub_classes.end())
{
obj.valid = -100;
}
}
}
// 获取时间戳、sensor_header等信息
auto header = ros_img->header;
int64_t timestamp = header.stamp.sec * 1e3 + header.stamp.nsec / 1e6; // to ms
......@@ -168,6 +178,10 @@ void sensorManager::publishDetection()
int8_t obj_id = 0;
for(auto& obj: vGrid->vInsObjs)
{
if(infer_param.pub_classes.size() > 0 && std::find(infer_param.pub_classes.begin(), infer_param.pub_classes.end(), obj.names) != infer_param.pub_classes.end())
{
continue;
}
waytous_perception_msgs::Object ob;
//int8 id = 1;
......
......@@ -5,7 +5,7 @@
* @Date: 2023-07-25 06:16:52
* @email: xin.wang@waytous.com
* @LastEditors: wxin
* @LastEditTime: 2023-10-11 02:48:38
* @LastEditTime: 2023-11-27 07:04:53
*/
......@@ -42,6 +42,9 @@ bool readInferParam(ImageInferNodeParam& param, const std::string& param_path){
param.vis_res = node["vis_res"].as<bool>();
param.vis_frame_id = node["vis_frame_id"].as<std::string>();
if(node["pub_classes"].IsDefined() && (!node["pub_classes"].IsNull())){
param.pub_classes = node["pub_classes"].as<std::vector<std::string>>();
}
param.obstacle_height_threshold = node["obstacle_height_threshold"].as<float>();
......
......@@ -5,7 +5,7 @@
* @Date: 2023-09-03 03:14:28
* @email: xin.wang@waytous.com
* @LastEditors: wxin
* @LastEditTime: 2023-10-11 02:48:24
* @LastEditTime: 2023-11-27 07:02:45
*/
#ifndef UTILS_H_
......@@ -49,6 +49,8 @@ struct ImageInferNodeParam{
bool vis_res = true;
std::string vis_frame_id = "Lidar";
std::vector<std::string> pub_classes;
// 相机参数
waytous::deepinfer::ios::CameraParamPtr cameraParam_ = nullptr;
......
......@@ -5,7 +5,7 @@
* @Date: 2023-09-03 03:13:41
* @email: xin.wang@waytous.com
* @LastEditors: wxin
* @LastEditTime: 2023-11-23 08:56:03
* @LastEditTime: 2023-11-27 07:58:07
*/
......@@ -393,7 +393,7 @@ int Visione_3D::Process_objects(const cv::Mat& image, const cv::Mat& imgdist, st
{
// g_Obj_count = mm+1;
auto & obj = objs[mm];
if( 0 == obj.bDetectType ){
if( 0 == obj.bDetectType){
Process_Instance_Objects(obj, imgdist, pTable, vGrid);
}
else{
......@@ -429,7 +429,7 @@ int Visione_3D::Process_Segmantic_Objects(object_Seg_info& obj, const cv::Mat& d
{
cv::Scalar objColor = obj.color;
const cv::Rect& rc = obj.rc;
obj.valid = 2;
obj.valid = obj.valid + 2;
uchar objcls = (perception::camera::name2type.find(obj.name) == perception::camera::name2type.end() ?
perception::camera::Perception_Default_Classification : perception::camera::name2type[obj.name]);
// cv::Mat objBin = cv::Mat::zeros(vGrid.m_imgBEV_static.size(), CV_8UC1);
......@@ -564,7 +564,7 @@ int Visione_3D::Process_Instance_Objects(object_Seg_info& obj, const cv::Mat& di
insobj.fConfidence = obj.fConfidence;
insobj.fromDepth = true;
vGrid.vInsObjs.push_back(insobj);
obj.valid = 2;
obj.valid = obj.valid + 2;
}
}
// std::cout<<" instance rc vision "<<double(clock()-start_)/CLOCKS_PER_SEC<<"s"<<std::endl; //输出时间(单位:s)
......@@ -842,7 +842,7 @@ int Visione_3D::Process_objects(const cv::Mat& image, Cloud::Ptr& pcl_cloud, std
int Visione_3D::Process_Segmantic_Objects(object_Seg_info& obj, const cv::Mat& indices, Cloud::Ptr& pcl_cloud, BEV_GridMap& vGrid){
cv::Scalar objColor = obj.color;
const cv::Rect& rc = obj.rc;
obj.valid = 1;
obj.valid = obj.valid + 1;
uchar objcls = (perception::camera::name2type.find(obj.name) == perception::camera::name2type.end() ?
perception::camera::Perception_Default_Classification : perception::camera::name2type[obj.name]);
......@@ -1002,7 +1002,7 @@ int Visione_3D::Process_Instance_Objects(object_Seg_info& obj, const cv::Mat& in
insobj.names = obj.name;
insobj.fConfidence = obj.fConfidence;
vGrid.vInsObjs.push_back(insobj);
obj.valid = 1;
obj.valid = obj.valid + 1;
}
}
}
......
......@@ -5,7 +5,7 @@
* @Date: 2023-04-13 14:37:39
* @emal: qingshui.cheng@waytous.com
* @LastEditors: wxin
* @LastEditTime: 2023-11-23 08:53:21
* @LastEditTime: 2023-11-27 07:46:08
*/
#ifndef VISION_3D_H
......@@ -75,7 +75,7 @@ struct object_Seg_info
int bDetectType; // 0 instance 1 semantics
cv::Rect rc; //目标矩形框
cv::Mat mask; //目标二值掩码图 大小同矩形框
uint8_t valid = 0;
int8_t valid = 0;
};
......
......@@ -6,7 +6,6 @@ Panels:
Expanded:
- /Global Options1
- /PointCloud22
- /PointCloud22/Status1
Splitter Ratio: 0.5
Tree Height: 191
- Class: rviz/Selection
......@@ -124,7 +123,7 @@ Visualization Manager:
Size (Pixels): 2
Size (m): 0.009999999776482582
Style: Points
Topic: /vehicle_all_pointcloud
Topic: /sensor/lidar/livox/front_middle/points
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
......@@ -133,7 +132,7 @@ Visualization Manager:
Global Options:
Background Color: 48; 48; 48
Default Light: true
Fixed Frame: Lidar
Fixed Frame: front_middle_livox
Frame Rate: 30
Name: root
Tools:
......@@ -157,7 +156,7 @@ Visualization Manager:
Views:
Current:
Class: rviz/Orbit
Distance: 78.24835205078125
Distance: 193.74002075195312
Enable Stereo Rendering:
Stereo Eye Separation: 0.05999999865889549
Stereo Focal Distance: 1
......@@ -173,9 +172,9 @@ Visualization Manager:
Invert Z Axis: false
Name: Current View
Near Clip Distance: 0.009999999776482582
Pitch: 0.9147961735725403
Pitch: 1.4097964763641357
Target Frame: <Fixed Frame>
Yaw: 0.16403070092201233
Yaw: 2.9990310668945312
Saved: ~
Window Geometry:
Displays:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment