Commit 5ca6257e authored by yangxue's avatar yangxue

transform pointcloud to vehicle_link

parent 3c675112
......@@ -45,6 +45,8 @@ find_package(catkin REQUIRED COMPONENTS
geometry_msgs
std_msgs
waytous_perception_msgs
pcl_ros
tf
)
find_package(PCL REQUIRED)
......
......@@ -11,7 +11,7 @@ rgb_config_path: configs/weights/multi_yolov5m.yaml
device: 0
period_time: 5
vis_res: true
vis_frame_id: Lidar
vis_frame_id: vehicle_link
vis_project: true
# ["pedestrian", "vehicle", "stone"]
pub_classes: ["pedestrian", "vehicle", "stone"] # ["stone", "cone"]
......
......@@ -5,7 +5,7 @@
* @Date: 2023-09-03 02:18:07
* @email: xin.wang@waytous.com
* @LastEditors: yangxue xue.yang@waytous.com
* @LastEditTime: 2025-04-07 03:55:10
* @LastEditTime: 2025-04-07 08:57:20
*/
......@@ -24,6 +24,8 @@ namespace perception::camera{
*/
sensorManager::sensorManager(ros::NodeHandle& nodehandle, ImageInferNodeParam& param)
{
// 初始化 transform监听器
tran_ = new tf::TransformListener(ros::Duration(100));
infer_param = param;
// last_header_ = eon::proto::common::Header();
......@@ -41,8 +43,10 @@ sensorManager::sensorManager(ros::NodeHandle& nodehandle, ImageInferNodeParam& p
// 初始化相机参数
std::string param_name = "cameraParam";
multi_model->modelUnitMap->SetIOPtr(param_name, infer_param.cameraParam_);
// 2d to bev 类
vision_converter_ = std::make_shared<Visione_3D>(param);
// 可视化类
if(param.vis_res){
ros_visualization_ptr_ = std::make_shared<RosVisualization>(nodehandle, param.vis_frame_id);
......@@ -162,9 +166,9 @@ void sensorManager::publishDetection()
}
std::cout << "Infer objects: " << frame_ptr->objs_.size() << std::endl;
auto vGrid = std::make_shared<BEV_GridMap>();
auto point_cloud_ptr = getCurPointcloud();
if(point_cloud_ptr != nullptr){
Cloud::Ptr pcl_cloud(new Cloud);
pcl::fromROSMsg(*point_cloud_ptr, *pcl_cloud);
......@@ -291,7 +295,7 @@ void sensorManager::publishDetection()
// geometry_msgs/Vector3[] trajectory # optional
// FleetObject fleet_object # optional
obFrame.foreground_objects.emplace_back(ob);
obTrackFrame.foreground_objects.emplace_back(obTrack);
obTrackFrame.object_track.emplace_back(obTrack);
}
obj_publisher_.publish(obTrackFrame); // obFrame or obTrackFrame
std::cout<<" onRun : "<<double(clock()-start_)/CLOCKS_PER_SEC<<"s. "<< "Detected Objects: "
......
......@@ -5,7 +5,7 @@
* @Date: 2023-09-03 01:53:20
* @email: xin.wang@waytous.com
* @LastEditors: yangxue xue.yang@waytous.com
* @LastEditTime: 2025-03-28 08:31:29
* @LastEditTime: 2025-04-07 09:02:10
*/
#ifndef SENSORMANAGER_H
......@@ -21,6 +21,7 @@
#include <pcl/point_types.h>
#include <pcl/common/transforms.h>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl_ros/transforms.h>
#include <interfaces/base_model.h>
#include <libs/ios/detection.h>
......@@ -31,9 +32,13 @@
#include <geometry_msgs/Pose.h>
#include <geometry_msgs/Vector3.h>
#include <geometry_msgs/Polygon.h>
#include <tf/transform_listener.h>
#include <tf/transform_datatypes.h>
// msg
#include "waytous_perception_msgs/ObjectArray.h"
#include "waytous_perception_msgs/Object.h"
#include "waytous_perception_msgs/ObjectTrackArray.h"
#include "waytous_perception_msgs/ObjectTrack.h"
// #include "waytous_perception_msgs/Rect.h"
#include "common_maps.h"
......@@ -64,6 +69,35 @@ public:
pub_thread_ = nullptr;
}
/**
* @name: transformTFToEigen
* @msg: 将tf::Transform转换为Eigen::Matrix4d
* @param {tf::Transform} t tf变换
* @return {Eigen::Matrix4d} 转换后的矩阵
*/
Eigen::Matrix4d transformTFToEigen(const tf::Transform& t)
{
Eigen::Matrix4d m = Eigen::Matrix4d::Identity();
// 从 tf::Transform 获取旋转四元数
tf::Quaternion q = t.getRotation();
// 将四元数转换为 Eigen::Quaternion
Eigen::Quaterniond eq(q.w(), q.x(), q.y(), q.z());
// 将旋转矩阵赋值给 Eigen::Matrix4d
m.block<3,3>(0,0) = eq.toRotationMatrix();
// 从 tf::Transform 获取平移向量
tf::Vector3 v = t.getOrigin();
// 将平移向量赋值给 Eigen::Matrix4d
m.block<3,1>(0,3) = Eigen::Vector3d(v.x(), v.y(), v.z());
return m;
}
/**
* @name: infer
* @msg: 模型推理
......@@ -112,9 +146,27 @@ public:
*/
void updatePointcloud(sensor_msgs::PointCloud2ConstPtr pdata){
{
std::lock_guard<std::mutex> lock_(pointcloud_mutex);
pc_timestamp_sys_ = GetMillSecondTimeStamp();
current_pointcloud_ = pdata;
ros::Time input_time = pdata->header.stamp;
std::string cloud_frame = pdata->header.frame_id;
// 初始化 current_pointcloud_
current_pointcloud_ = boost::make_shared<sensor_msgs::PointCloud2>();
// std::cout << "get lidar2vehicle & transform pc to vehicle frame: base_frame:" << base_frame_ << ", lidar frame:" << cloud_frame << std::endl;
tran_->waitForTransform(base_frame_, cloud_frame, input_time, ros::Duration(10.0)); // destination_frame, original_frame, cant tf tree receive
tran_->lookupTransform(base_frame_, cloud_frame, ros::Time(0), lidar2vehicle_);
pcl_ros::transformPointCloud(base_frame_, *pdata, *current_pointcloud_, *tran_);
// std::cout << "update lidar2camera with lidar2vehicle_: " << std::endl;
// update lidar2camera with lidar2vehicle_
Eigen::Matrix4d lidar2vehicle = transformTFToEigen(lidar2vehicle_);
Eigen::Matrix4d camera2lidar = infer_param.cameraParam_->camera2lidar_extrinsic;
Eigen::Matrix4d camera2lidar_new = lidar2vehicle * camera2lidar; // camera2vehicle actually!
vision_converter_->setLidar2Camera(camera2lidar_new.inverse());
}
};
......@@ -134,6 +186,7 @@ public:
if(std::abs(tm_now - pc_timestamp_sys_) > pc_timevalid_interval_){
current_pointcloud_ = nullptr;
}
std::cout << "getCurPointcloud: " << temp->header.stamp << std::endl;
return temp;
}
}
......@@ -161,6 +214,11 @@ private:
// 节点需要的参数
ImageInferNodeParam infer_param;
tf::TransformListener *tran_; // tf监听器
std::string base_frame_ = "vehicle_link"; // 目标坐标系
tf::StampedTransform lidar2vehicle_; // lidar-vehicle transform
ros::Publisher obj_publisher_; // 交通障碍物发布者
ros::Subscriber image_subscriber_; // 图像订阅者
ros::Subscriber pointcloud_subscriber_; // 点云订阅者
......@@ -189,7 +247,8 @@ private:
// 图像和点云数据指针
sensor_msgs::ImageConstPtr current_image_ = nullptr;
sensor_msgs::PointCloud2ConstPtr current_pointcloud_ = nullptr;
sensor_msgs::PointCloud2Ptr current_pointcloud_ = nullptr;
/// CloudConstPtr current_pointcloud_ = nullptr; // 点云数据指针
};
......
......@@ -4,8 +4,8 @@
* @Author: wxin
* @Date: 2023-11-07 01:52:23
* @email: xin.wang@waytous.com
* @LastEditors: wxin
* @LastEditTime: 2023-11-07 08:30:49
* @LastEditors: yangxue xue.yang@waytous.com
* @LastEditTime: 2025-04-07 09:01:30
*/
#ifndef ROS_VISUAL_H_
......@@ -67,7 +67,7 @@ public:
private:
// ros::Publisher m_pubCloudPoint_3D;
std::string current_frame_id = "Lidar";
std::string current_frame_id = "vehicle_link";
ros::Publisher m_pubCloudPoint_BEV;
ros::Publisher m_pub_bounding_boxes;
ros::NodeHandle m_nodehandle;
......
......@@ -4,8 +4,8 @@
* @Author: chengqingshui
* @Date: 2023-04-13 14:37:39
* @emal: qingshui.cheng@waytous.com
* @LastEditors: wxin
* @LastEditTime: 2024-02-28 09:14:51
* @LastEditors: yangxue xue.yang@waytous.com
* @LastEditTime: 2025-04-07 07:22:03
*/
#ifndef VISION_3D_H
......@@ -186,6 +186,10 @@ public:
*/
int Process_objects(const cv::Mat& image, const cv::Mat& depth, Cloud::Ptr& pcl_cloud, std::vector<object_Seg_info>&objs, BEV_GridMap& vGrid);
void setLidar2Camera(const Eigen::Matrix4d& lidar2camera)
{
m_lidar2camera = lidar2camera;
}
private:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment