diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9b37763..3b9dd1a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,7 +35,7 @@ repos: args: [-w] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.14.14 + rev: v0.15.0 hooks: - id: ruff-check args: [--fix, --exit-non-zero-on-fix] @@ -57,7 +57,7 @@ repos: # Protobuf & gRPC - repo: https://github.com/bufbuild/buf - rev: v1.64.0 + rev: v1.65.0 hooks: - id: buf-format diff --git a/aegis_grpc/CHANGELOG.md b/aegis_grpc/CHANGELOG.md index 8a1d06c..664e10a 100644 --- a/aegis_grpc/CHANGELOG.md +++ b/aegis_grpc/CHANGELOG.md @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added +* [PR-98](https://github.com/AGH-CEAI/aegis_ros/pull/98) - Added gRPC-based camera image reading. * [PR-97](https://github.com/AGH-CEAI/aegis_ros/pull/97) - Added gRPC func: MoveIt2 servo start & stop service. * [PR-96](https://github.com/AGH-CEAI/aegis_ros/pull/96) - Added gRPC func: ros2_control controller switching. * [PR-92](https://github.com/AGH-CEAI/aegis_ros/pull/92) - Added Python client installation script. diff --git a/aegis_grpc/CMakeLists.txt b/aegis_grpc/CMakeLists.txt index 7dd531f..019f5bd 100644 --- a/aegis_grpc/CMakeLists.txt +++ b/aegis_grpc/CMakeLists.txt @@ -13,6 +13,7 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON) # ------------------------------ # Dependencies +find_package(OpenCV REQUIRED) find_package(ament_cmake REQUIRED) set(ROS_DEPENDS @@ -20,6 +21,7 @@ set(ROS_DEPENDS rclcpp_action control_msgs controller_manager_msgs + cv_bridge geometry_msgs moveit_msgs moveit_ros_planning_interface @@ -177,6 +179,8 @@ target_link_libraries( gRPC::grpc++ grpc++_reflection protobuf::libprotobuf + opencv_core + opencv_imgproc ${PROJECT_NAME}_lib) ament_target_dependencies(grpc_server ${ROS_DEPENDS}) diff --git a/aegis_grpc/README.md b/aegis_grpc/README.md index 7613cb5..52d0df5 100644 --- a/aegis_grpc/README.md +++ b/aegis_grpc/README.md @@ -71,15 +71,15 @@ grpcurl -plaintext 127.0.0.1:50051 list proto_aegis_grpc.v1.RobotControlService grpcurl -plaintext 127.0.0.1:50051 list proto_aegis_grpc.v1.RobotReadService grpcurl -plaintext 127.0.0.1:50051 describe proto_aegis_grpc.v1.RobotReadService.GetAll # or with tool from container -podman run --network=host docker.io/fullstorydev/grpcurl -plaintext 127.0.0.1:50051 list +podman run --rm --network=host docker.io/fullstorydev/grpcurl -plaintext 127.0.0.1:50051 list # map grpcurl container alias -alias grpcurl="podman run --network=host docker.io/fullstorydev/grpcurl" +alias grpcurl="podman run --rm --network=host docker.io/fullstorydev/grpcurl" ``` Example call to the `GetAll` method with result as a plain json: ```bash -grpcurl -plaintext -d '{}' 127.0.0.1:50051 proto_aegis_grpc.v1.RobotReadService.GetAll +grpcurl -max-msg-sz 10485760 -plaintext -d '{}' 127.0.0.1:50051 proto_aegis_grpc.v1.RobotReadService.GetAll ``` ## Messages architecture @@ -93,12 +93,18 @@ The server is split into 2 services defined in [`proto_aegis_grpc.v1.robot_srvs` The "ROS-getters" are implemented in the [`RobotReadServiceImpl`](./include/aegis_grpc/robot_read_service.hpp) class as the following methods: -| Method name | Desc. | Impl. | gRPC Request | gRPC Response | -|-------------------------------------------------------|-----------------------------|-------|-------------------------|-----------------------------------------------------------------------------------------| -| `proto_aegis_grpc.v1.RobotReadService.GetAll` | Get the all available data. | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.robot_srvs.RobotState`](./proto_aegis_grpc/v1/robot_srvs.proto) | -| `proto_aegis_grpc.v1.RobotReadService.GetJointStates` | Get the joints' states. | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.sensor_msgs.JointState`](./proto_aegis_grpc/v1/sensor_msgs.proto) | -| `proto_aegis_grpc.v1.RobotReadService.GetTCPPose` | Get the TCP pose. | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.geometry_msgs.Pose`](./proto_aegis_grpc/v1/geometry_msgs.proto) | -| `proto_aegis_grpc.v1.RobotReadService.GetWrench` | Read force/torque sensor. | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.geometry_msgs.Wrench`](./proto_aegis_grpc/v1/geometry_msgs.proto) | +| Method name | Desc. | Impl. | gRPC Request | gRPC Response | +|------------------------------------------------------------|------------------------------------------------------|-------|-------------------------|---------------------------------------------------------------------------------------------| +| `proto_aegis_grpc.v1.RobotReadService.GetAll` | Get all available robot data. | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.robot_srvs.RobotObservation`](./proto_aegis_grpc/v1/robot_srvs.proto) | +| `proto_aegis_grpc.v1.RobotReadService.GetRobotState` | Get consolidated robot state (pose, joints, wrench). | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.robot_srvs.RobotState`](./proto_aegis_grpc/v1/robot_srvs.proto) | +| `proto_aegis_grpc.v1.RobotReadService.GetRobotVision` | Get all camera images (scene, left, right). | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.robot_srvs.RobotVision`](./proto_aegis_grpc/v1/robot_srvs.proto) | +| `proto_aegis_grpc.v1.RobotReadService.GetJointStates` | Get the joints' states. | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.sensor_msgs.JointState`](./proto_aegis_grpc/v1/sensor_msgs.proto) | +| `proto_aegis_grpc.v1.RobotReadService.GetTCPPose` | Get the TCP pose. | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.geometry_msgs.Pose`](./proto_aegis_grpc/v1/geometry_msgs.proto) | +| `proto_aegis_grpc.v1.RobotReadService.GetWrench` | Read force/torque sensor. | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.geometry_msgs.Wrench`](./proto_aegis_grpc/v1/geometry_msgs.proto) | +| `proto_aegis_grpc.v1.RobotReadService.GetCameraSceneImage` | Get scene camera image. | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.sensor_msgs.Image`](./proto_aegis_grpc/v1/sensor_msgs.proto) | +| `proto_aegis_grpc.v1.RobotReadService.GetCameraRightImage` | Get right tool camera image. | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.sensor_msgs.Image`](./proto_aegis_grpc/v1/sensor_msgs.proto) | +| `proto_aegis_grpc.v1.RobotReadService.GetCameraLeftImage` | Get left tool camera image. | ✅ | `google.protobuf.Empty` | [`proto_aegis_grpc.v1.sensor_msgs.Image`](./proto_aegis_grpc/v1/sensor_msgs.proto) | + You can always list the methods with the `grpcurl` command: ```bash diff --git a/aegis_grpc/include/aegis_grpc/robot_read_service.hpp b/aegis_grpc/include/aegis_grpc/robot_read_service.hpp index 6794187..7bee92e 100644 --- a/aegis_grpc/include/aegis_grpc/robot_read_service.hpp +++ b/aegis_grpc/include/aegis_grpc/robot_read_service.hpp @@ -1,17 +1,20 @@ #ifndef AEGIS_GRPC__ROBOT_READ_SERVICE_HPP_ #define AEGIS_GRPC__ROBOT_READ_SERVICE_HPP_ #include +#include #include #include #include "proto_aegis_grpc/v1/robot_srvs.grpc.pb.h" #include +#include #include #include #include #include #include +#include namespace aegis_grpc { @@ -20,58 +23,121 @@ class RobotReadServiceImpl final public: explicit RobotReadServiceImpl(std::shared_ptr node); - grpc::Status GetTCPPose(grpc::ServerContext* context, - const google::protobuf::Empty* request, - proto_aegis_grpc::v1::Pose* response) override; - - grpc::Status GetWrench(grpc::ServerContext* context, - const google::protobuf::Empty* request, - proto_aegis_grpc::v1::Wrench* response) override; - - grpc::Status - GetJointStates(grpc::ServerContext* context, - const google::protobuf::Empty* request, - proto_aegis_grpc::v1::JointState* response) override; - - grpc::Status GetAll(grpc::ServerContext* context, - const google::protobuf::Empty* request, - proto_aegis_grpc::v1::RobotState* response) override; + grpc::Status GetTCPPose( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::Pose* response + ) override; + + grpc::Status GetWrench( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::Wrench* response + ) override; + + grpc::Status GetJointStates( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::JointState* response + ) override; + + grpc::Status GetCameraSceneImage( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::Image* response + ) override; + + grpc::Status GetCameraRightImage( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::Image* response + ) override; + + grpc::Status GetCameraLeftImage( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::Image* response + ) override; + + grpc::Status GetRobotState( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::RobotState* response + ) override; + + grpc::Status GetRobotVision( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::RobotVision* response + ) override; + + grpc::Status GetAll( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::RobotObservation* response + ) override; // TODO(issue#84) implement getters for images from cameras (RGB & RGBD) // TODO(issue#85) develop synchronization guard to monitor the freshness of the data private: - void DeclareROSParameter(const std::string& name, const std::string& default_val, const std::string& description); + template + void DeclareROSParameter(const std::string& name, const T& default_val, const std::string& description); void PoseSubCb(const geometry_msgs::msg::PoseStamped::SharedPtr msg); void WrenchSubCb(const geometry_msgs::msg::WrenchStamped::SharedPtr msg); void JointStateSubCb(const sensor_msgs::msg::JointState::SharedPtr msg); + void ImageSceneSubCb(const sensor_msgs::msg::Image::SharedPtr msg); + void ImageRightSubCb(const sensor_msgs::msg::Image::SharedPtr msg); + void ImageLeftSubCb(const sensor_msgs::msg::Image::SharedPtr msg); static void FillProtoPose( - const geometry_msgs::msg::Pose& ros, - proto_aegis_grpc::v1::Pose* out); + const geometry_msgs::msg::Pose& ros, + proto_aegis_grpc::v1::Pose* out); static void FillProtoWrench( - const geometry_msgs::msg::Wrench& ros, - proto_aegis_grpc::v1::Wrench* out); + const geometry_msgs::msg::Wrench& ros, + proto_aegis_grpc::v1::Wrench* out); static void FillProtoJointState( const sensor_msgs::msg::JointState& ros, proto_aegis_grpc::v1::JointState* out); + void FillProtoImage( + const sensor_msgs::msg::Image& ros, + proto_aegis_grpc::v1::Image* out, + cv::Mat& resize_buffer); + std::shared_ptr node_; rclcpp::Subscription::SharedPtr pose_sub_; rclcpp::Subscription::SharedPtr wrench_sub_; - rclcpp::Subscription::SharedPtr - joint_state_sub_; + rclcpp::Subscription::SharedPtr joint_state_sub_; + rclcpp::Subscription::SharedPtr image_scene_sub_; + rclcpp::Subscription::SharedPtr image_right_sub_; + rclcpp::Subscription::SharedPtr image_left_sub_; geometry_msgs::msg::Pose pose_data_; geometry_msgs::msg::Wrench wrench_data_; sensor_msgs::msg::JointState joint_state_data_; + sensor_msgs::msg::Image image_scene_data_; + sensor_msgs::msg::Image image_right_data_; + sensor_msgs::msg::Image image_left_data_; std::mutex pose_mutex_; std::mutex wrench_mutex_; std::mutex joint_state_mutex_; + std::mutex image_scene_mutex_; + std::mutex image_right_mutex_; + std::mutex image_left_mutex_; + + uint32_t target_img_width_; + uint32_t target_img_height_; + + cv::Mat scene_resized_; + cv::Mat right_resized_; + cv::Mat left_resized_; + + bool enable_image_resize_; }; } // namespace aegis_grpc diff --git a/aegis_grpc/package.xml b/aegis_grpc/package.xml index 64570c9..7a7aa04 100644 --- a/aegis_grpc/package.xml +++ b/aegis_grpc/package.xml @@ -19,6 +19,7 @@ control_msgs controller_manager_msgs + cv_bridge geometry_msgs libgrpc moveit_msgs diff --git a/aegis_grpc/proto_aegis_grpc/v1/robot_srvs.proto b/aegis_grpc/proto_aegis_grpc/v1/robot_srvs.proto index 35496f8..9df15b6 100644 --- a/aegis_grpc/proto_aegis_grpc/v1/robot_srvs.proto +++ b/aegis_grpc/proto_aegis_grpc/v1/robot_srvs.proto @@ -11,7 +11,12 @@ service RobotReadService { rpc GetTCPPose(google.protobuf.Empty) returns (Pose); rpc GetWrench(google.protobuf.Empty) returns (Wrench); rpc GetJointStates(google.protobuf.Empty) returns (JointState); - rpc GetAll(google.protobuf.Empty) returns (RobotState); + rpc GetCameraSceneImage(google.protobuf.Empty) returns (Image); + rpc GetCameraRightImage(google.protobuf.Empty) returns (Image); + rpc GetCameraLeftImage(google.protobuf.Empty) returns (Image); + rpc GetRobotState(google.protobuf.Empty) returns (RobotState); + rpc GetRobotVision(google.protobuf.Empty) returns (RobotVision); + rpc GetAll(google.protobuf.Empty) returns (RobotObservation); } message RobotState { @@ -20,6 +25,17 @@ message RobotState { JointState joint_state = 3; } +message RobotVision { + Image image_scene = 1; + Image image_right = 2; + Image image_left = 3; +} + +message RobotObservation { + RobotState robot_state = 1; + RobotVision robot_vision = 2; +} + service RobotControlService { rpc ServoEnable(google.protobuf.Empty) returns (TriggerResponse); rpc ServoDisable(google.protobuf.Empty) returns (TriggerResponse); diff --git a/aegis_grpc/proto_aegis_grpc/v1/sensor_msgs.proto b/aegis_grpc/proto_aegis_grpc/v1/sensor_msgs.proto index eedaabb..e9012fd 100644 --- a/aegis_grpc/proto_aegis_grpc/v1/sensor_msgs.proto +++ b/aegis_grpc/proto_aegis_grpc/v1/sensor_msgs.proto @@ -20,3 +20,11 @@ message JointState { repeated double velocity = 3; repeated double effort = 4; } + +// https://github.com/ros2/common_interfaces/blob/rolling/sensor_msgs/msg/Image.msg +message Image { + uint32 height = 1; + uint32 width = 2; + uint32 step = 3; + bytes data = 4; +} diff --git a/aegis_grpc/python_client/aegis_grpc_client/grpc_client.py b/aegis_grpc/python_client/aegis_grpc_client/grpc_client.py index 6c83b5c..66f5518 100644 --- a/aegis_grpc/python_client/aegis_grpc_client/grpc_client.py +++ b/aegis_grpc/python_client/aegis_grpc_client/grpc_client.py @@ -95,20 +95,6 @@ async def get_tcp_pose(self) -> np.ndarray: self.logger.error(f"GetTCPPose failed: {e}") raise - def _pose_to_array(self, pose: geometry_msgs_pb2.Pose) -> np.ndarray: - return np.array( - [ - pose.position.x, - pose.position.y, - pose.position.z, - pose.orientation.x, - pose.orientation.y, - pose.orientation.z, - pose.orientation.w, - ], - dtype=np.float32, - ) - async def get_wrench(self) -> np.ndarray: """ Get current force and torque measurements. @@ -124,19 +110,6 @@ async def get_wrench(self) -> np.ndarray: self.logger.error(f"GetWrench failed: {e}") raise - def _wrench_to_array(self, wrench: geometry_msgs_pb2.Wrench) -> np.ndarray: - return np.array( - [ - wrench.force.x, - wrench.force.y, - wrench.force.z, - wrench.torque.x, - wrench.torque.y, - wrench.torque.z, - ], - dtype=np.float32, - ) - async def get_joint_states(self) -> np.ndarray: """ Get current joint positions, velocities, and efforts. @@ -167,33 +140,118 @@ async def get_joint_names(self) -> tuple[str]: self.logger.error(f"GetJointStates (names) failed: {e}") raise - def _joints_to_array(self, joints: sensor_msgs_pb2.JointState) -> np.ndarray: - return np.array( - [joints.position, joints.velocity, joints.effort], dtype=np.float32 - ).T - - async def get_all(self) -> dict[str, np.ndarray]: - """ - Get complete robot state (pose, wrench, joint states) in one call. - More efficient than calling individual methods. - - Returns: - dict of numpy arrays with: pose, wrench, n_joints*3] - """ + async def get_robot_state(self) -> dict[str, np.ndarray]: self._check_connected() try: - state = await self.read_stub.GetAll(Empty()) + state = await self.read_stub.GetRobotState(Empty()) return { "pose": self._pose_to_array(state.pose), "wrench": self._wrench_to_array(state.wrench), - "joints_pos": np.array([state.joint_state.position], dtype=np.float32), - "joints_vel": np.array([state.joint_state.velocity], dtype=np.float32), - "joints_eff": np.array([state.joint_state.effort], dtype=np.float32), + "joints": self._joints_to_array(state.joint_state), + } + except grpc.RpcError as e: + self.logger.error(f"GetRobotState failed: {e}") + raise + + async def get_camera_scene_image(self) -> np.ndarray: + self._check_connected() + try: + vision = await self.read_stub.GetCameraSceneImage(Empty()) + return self._image_to_array(vision) + except grpc.RpcError as e: + self.logger.error(f"GetCameraSceneImage failed: {e}") + raise + + async def get_camera_right_image(self) -> np.ndarray: + self._check_connected() + try: + vision = await self.read_stub.GetCameraRightImage(Empty()) + return self._image_to_array(vision) + except grpc.RpcError as e: + self.logger.error(f"GetCameraRightImage failed: {e}") + raise + + async def get_camera_left_image(self) -> np.ndarray: + self._check_connected() + try: + vision = await self.read_stub.GetCameraLeftImage(Empty()) + return self._image_to_array(vision) + except grpc.RpcError as e: + self.logger.error(f"GetCameraLeftImage failed: {e}") + raise + + async def get_robot_vision(self) -> dict[str, np.ndarray]: + self._check_connected() + try: + vision = await self.read_stub.GetRobotVision(Empty()) + return { + "scene": self._image_to_array(vision.image_scene), + "right": self._image_to_array(vision.image_right), + "left": self._image_to_array(vision.image_left), + } + except grpc.RpcError as e: + self.logger.error(f"GetRobotVision failed: {e}") + raise + + async def get_all(self) -> dict[str, object]: + self._check_connected() + try: + obs = await self.read_stub.GetAll(Empty()) + return { + "state": { + "pose": self._pose_to_array(obs.robot_state.pose), + "wrench": self._wrench_to_array(obs.robot_state.wrench), + "joints": self._joints_to_array(obs.robot_state.joint_state), + }, + "vision": { + "scene": self._image_to_array(obs.robot_vision.image_scene), + "right": self._image_to_array(obs.robot_vision.image_right), + "left": self._image_to_array(obs.robot_vision.image_left), + }, } except grpc.RpcError as e: self.logger.error(f"GetAll failed: {e}") raise + def _pose_to_array(self, pose: geometry_msgs_pb2.Pose) -> np.ndarray: + return np.array( + [ + pose.position.x, + pose.position.y, + pose.position.z, + pose.orientation.x, + pose.orientation.y, + pose.orientation.z, + pose.orientation.w, + ], + dtype=np.float32, + ) + + def _wrench_to_array(self, wrench: geometry_msgs_pb2.Wrench) -> np.ndarray: + return np.array( + [ + wrench.force.x, + wrench.force.y, + wrench.force.z, + wrench.torque.x, + wrench.torque.y, + wrench.torque.z, + ], + dtype=np.float32, + ) + + def _joints_to_array(self, joints: sensor_msgs_pb2.JointState) -> np.ndarray: + return np.array( + [joints.position, joints.velocity, joints.effort], dtype=np.float32 + ).T + + def _image_to_array(self, img: sensor_msgs_pb2.Image) -> np.ndarray: + return np.moveaxis( + np.frombuffer(img.data, dtype=np.uint8).reshape(img.height, img.width, 3), + -1, + 0, + ) + # ==================== RobotControlService Methods ==================== async def servo_joint( diff --git a/aegis_grpc/src/robot_read_service.cpp b/aegis_grpc/src/robot_read_service.cpp index a2ffe15..533b39f 100644 --- a/aegis_grpc/src/robot_read_service.cpp +++ b/aegis_grpc/src/robot_read_service.cpp @@ -1,3 +1,4 @@ +#include #include "aegis_grpc/robot_read_service.hpp" namespace aegis_grpc { @@ -6,9 +7,15 @@ RobotReadServiceImpl::RobotReadServiceImpl(std::shared_ptr node) : node_(node), pose_data_(), wrench_data_(), joint_state_data_() { // Initialization parameters - DeclareROSParameter("topic_pose", "/tcp_pose", "[str] Init; Sub: topic with the TCP pose data."); - DeclareROSParameter("topic_wrench", "/wrench", "[str] Init; Sub: topic with the F/T data."); - DeclareROSParameter("topic_joints", "/joint_states", "[str] Init; Sub: topic with the joint states."); + DeclareROSParameter("topic_pose", std::string("/tcp_pose"), "[str] Init; Sub: topic with the TCP pose data."); + DeclareROSParameter("topic_wrench", std::string("/wrench"), "[str] Init; Sub: topic with the F/T data."); + DeclareROSParameter("topic_joints", std::string("/joint_states"), "[str] Init; Sub: topic with the joint states."); + DeclareROSParameter("topic_camera_scene", std::string("/cam_scene/rgb/image_rect"), "[str] Init; Sub: Camera scene image topic."); + DeclareROSParameter("topic_camera_right", std::string("/cam_tool_right/image_color"), "[str] Init; Sub: Camera scene image topic."); + DeclareROSParameter("topic_camera_left", std::string("/cam_tool_left/image_color"), "[str] Init; Sub:Camera scene image topic."); + DeclareROSParameter("target_image_width", 64, "[int] Init; Target output image width in pixels."); + DeclareROSParameter("target_image_height", 64, "[int] Init; Target output image height in pixels."); + DeclareROSParameter("enable_image_resize", true, "[bool] Init; Enable resizing images before sending over gRPC."); pose_sub_ = node_->create_subscription( node_->get_parameter("topic_pose").as_string(), 10, @@ -22,12 +29,32 @@ RobotReadServiceImpl::RobotReadServiceImpl(std::shared_ptr node) node_->get_parameter("topic_joints").as_string(), 10, std::bind(&RobotReadServiceImpl::JointStateSubCb, this, std::placeholders::_1)); + image_scene_sub_ = node_->create_subscription( + node_->get_parameter("topic_camera_scene").as_string(), 10, + std::bind(&RobotReadServiceImpl::ImageSceneSubCb, this, + std::placeholders::_1)); + image_right_sub_ = node_->create_subscription( + node_->get_parameter("topic_camera_right").as_string(), 10, + std::bind(&RobotReadServiceImpl::ImageRightSubCb, this, + std::placeholders::_1)); + image_left_sub_ = node_->create_subscription( + node_->get_parameter("topic_camera_left").as_string(), 10, + std::bind(&RobotReadServiceImpl::ImageLeftSubCb, this, + std::placeholders::_1)); + target_img_width_ = node_->get_parameter("target_image_width").as_int(); + target_img_height_ = node_->get_parameter("target_image_height").as_int(); + enable_image_resize_ = node_->get_parameter("enable_image_resize").as_bool(); } -void RobotReadServiceImpl::DeclareROSParameter(const std::string& name, const std::string& default_val, const std::string& description) { +template +void RobotReadServiceImpl::DeclareROSParameter( + const std::string &name, + const T& default_val, + const std::string &description) { auto param_desc = rcl_interfaces::msg::ParameterDescriptor{}; param_desc.description = description; - node_->declare_parameter(name, default_val, param_desc); + + node_->declare_parameter(name, default_val, param_desc); const auto p = node_->get_parameter(name); RCLCPP_INFO(node_->get_logger(), "> %s := %s", @@ -36,27 +63,45 @@ void RobotReadServiceImpl::DeclareROSParameter(const std::string& name, const st } void RobotReadServiceImpl::PoseSubCb( - const geometry_msgs::msg::PoseStamped::SharedPtr msg) { - std::lock_guard lock(pose_mutex_); - pose_data_ = msg->pose; + const geometry_msgs::msg::PoseStamped::SharedPtr msg) { + std::lock_guard lock(pose_mutex_); + pose_data_ = msg->pose; } void RobotReadServiceImpl::WrenchSubCb( - const geometry_msgs::msg::WrenchStamped::SharedPtr msg) { - std::lock_guard lock(wrench_mutex_); - wrench_data_ = msg->wrench; + const geometry_msgs::msg::WrenchStamped::SharedPtr msg) { + std::lock_guard lock(wrench_mutex_); + wrench_data_ = msg->wrench; } void RobotReadServiceImpl::JointStateSubCb( - const sensor_msgs::msg::JointState::SharedPtr msg) { - std::lock_guard lock(joint_state_mutex_); - joint_state_data_ = *msg; + const sensor_msgs::msg::JointState::SharedPtr msg) { + std::lock_guard lock(joint_state_mutex_); + joint_state_data_ = *msg; +} + +void RobotReadServiceImpl::ImageSceneSubCb( + const sensor_msgs::msg::Image::SharedPtr msg) { + std::lock_guard lock(image_scene_mutex_); + image_scene_data_ = *msg; +} + +void RobotReadServiceImpl::ImageRightSubCb( + const sensor_msgs::msg::Image::SharedPtr msg) { + std::lock_guard lock(image_right_mutex_); + image_right_data_ = *msg; +} + +void RobotReadServiceImpl::ImageLeftSubCb( + const sensor_msgs::msg::Image::SharedPtr msg) { + std::lock_guard lock(image_left_mutex_); + image_left_data_ = *msg; } -grpc::Status -RobotReadServiceImpl::GetTCPPose(grpc::ServerContext *context, - const google::protobuf::Empty *request, - proto_aegis_grpc::v1::Pose *response) { +grpc::Status RobotReadServiceImpl::GetTCPPose( + grpc::ServerContext *context, + const google::protobuf::Empty *request, + proto_aegis_grpc::v1::Pose *response) { (void) context; (void) request; { @@ -66,94 +111,228 @@ RobotReadServiceImpl::GetTCPPose(grpc::ServerContext *context, return grpc::Status::OK; } -grpc::Status -RobotReadServiceImpl::GetWrench(grpc::ServerContext *context, - const google::protobuf::Empty *request, - proto_aegis_grpc::v1::Wrench *response) { - (void) context; - (void) request; - { - std::lock_guard lock(wrench_mutex_); - FillProtoWrench(wrench_data_, response); - } - return grpc::Status::OK; +grpc::Status RobotReadServiceImpl::GetWrench( + grpc::ServerContext *context, + const google::protobuf::Empty *request, + proto_aegis_grpc::v1::Wrench *response) { + (void) context; + (void) request; + { + std::lock_guard lock(wrench_mutex_); + FillProtoWrench(wrench_data_, response); + } + return grpc::Status::OK; } grpc::Status RobotReadServiceImpl::GetJointStates( - grpc::ServerContext *context, const google::protobuf::Empty *request, - proto_aegis_grpc::v1::JointState *response) { - (void) context; - (void) request; - { - std::lock_guard lock(joint_state_mutex_); - FillProtoJointState(joint_state_data_, response); - } - return grpc::Status::OK; + grpc::ServerContext *context, const google::protobuf::Empty *request, + proto_aegis_grpc::v1::JointState *response) { + (void) context; + (void) request; + { + std::lock_guard lock(joint_state_mutex_); + FillProtoJointState(joint_state_data_, response); + } + return grpc::Status::OK; } -grpc::Status -RobotReadServiceImpl::GetAll(grpc::ServerContext *context, - const google::protobuf::Empty *request, - proto_aegis_grpc::v1::RobotState *response) { - (void) context; - (void) request; - { +grpc::Status RobotReadServiceImpl::GetCameraSceneImage( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::Image* response) { + (void)context; + (void)request; + { + std::lock_guard lock(image_scene_mutex_); + FillProtoImage(image_scene_data_, response, scene_resized_); + } + return grpc::Status::OK; +} + +grpc::Status RobotReadServiceImpl::GetCameraRightImage( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::Image* response) { + (void)context; + (void)request; + { + std::lock_guard lock(image_right_mutex_); + FillProtoImage(image_right_data_, response, right_resized_); + } + return grpc::Status::OK; +} + +grpc::Status RobotReadServiceImpl::GetCameraLeftImage( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::Image* response) { + (void)context; + (void)request; + { + std::lock_guard lock(image_left_mutex_); + FillProtoImage(image_left_data_, response, left_resized_); + } + return grpc::Status::OK; +} + +grpc::Status RobotReadServiceImpl::GetRobotState( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::RobotState* response) { + // TODO(issue#85): Guard the freshness of the data in gRPC server + (void)context; + (void)request; + { std::lock_guard lock(pose_mutex_); FillProtoPose(pose_data_, response->mutable_pose()); - } - { + } + { std::lock_guard lock(wrench_mutex_); FillProtoWrench(wrench_data_, response->mutable_wrench()); - } - { + } + { std::lock_guard lock(joint_state_mutex_); FillProtoJointState(joint_state_data_, response->mutable_joint_state()); - } - return grpc::Status::OK; + } + return grpc::Status::OK; +} + + +grpc::Status RobotReadServiceImpl::GetRobotVision( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::RobotVision* response) { + // TODO(issue#85): Guard the freshness of the data in gRPC server + (void) context; + (void) request; + { + std::lock_guard lock(image_scene_mutex_); + FillProtoImage(image_scene_data_, response->mutable_image_scene(), scene_resized_); + } + { + std::lock_guard lock(image_right_mutex_); + FillProtoImage(image_right_data_, response->mutable_image_right(), right_resized_); + } + { + std::lock_guard lock(image_left_mutex_); + FillProtoImage(image_left_data_, response->mutable_image_left(), left_resized_); + } + return grpc::Status::OK; +} + +grpc::Status RobotReadServiceImpl::GetAll( + grpc::ServerContext* context, + const google::protobuf::Empty* request, + proto_aegis_grpc::v1::RobotObservation* response) { + (void) context; + (void) request; + { + std::lock_guard lock(pose_mutex_); + FillProtoPose(pose_data_, response->mutable_robot_state()->mutable_pose()); + } + { + std::lock_guard lock(wrench_mutex_); + FillProtoWrench(wrench_data_, response->mutable_robot_state()->mutable_wrench()); + } + { + std::lock_guard lock(joint_state_mutex_); + FillProtoJointState(joint_state_data_, response->mutable_robot_state()->mutable_joint_state()); + } + { + std::lock_guard lock(image_scene_mutex_); + FillProtoImage(image_scene_data_, response->mutable_robot_vision()->mutable_image_scene(), scene_resized_); + } + { + std::lock_guard lock(image_right_mutex_); + FillProtoImage(image_right_data_, response->mutable_robot_vision()->mutable_image_right(), right_resized_); + } + { + std::lock_guard lock(image_left_mutex_); + FillProtoImage(image_left_data_, response->mutable_robot_vision()->mutable_image_left(), left_resized_); + } + return grpc::Status::OK; } void RobotReadServiceImpl::FillProtoPose( - const geometry_msgs::msg::Pose& ros, - proto_aegis_grpc::v1::Pose* out) { + const geometry_msgs::msg::Pose& ros, + proto_aegis_grpc::v1::Pose* out) { auto* pos = out->mutable_position(); - pos->set_x(ros.position.x); - pos->set_y(ros.position.y); - pos->set_z(ros.position.z); + pos->set_x(ros.position.x); + pos->set_y(ros.position.y); + pos->set_z(ros.position.z); - auto* ori = out->mutable_orientation(); - ori->set_x(ros.orientation.x); - ori->set_y(ros.orientation.y); - ori->set_z(ros.orientation.z); - ori->set_w(ros.orientation.w); + auto* ori = out->mutable_orientation(); + ori->set_x(ros.orientation.x); + ori->set_y(ros.orientation.y); + ori->set_z(ros.orientation.z); + ori->set_w(ros.orientation.w); } void RobotReadServiceImpl::FillProtoWrench( - const geometry_msgs::msg::Wrench& ros, - proto_aegis_grpc::v1::Wrench* out) { - auto* f = out->mutable_force(); - f->set_x(ros.force.x); - f->set_y(ros.force.y); - f->set_z(ros.force.z); + const geometry_msgs::msg::Wrench& ros, + proto_aegis_grpc::v1::Wrench* out) { + auto* f = out->mutable_force(); + f->set_x(ros.force.x); + f->set_y(ros.force.y); + f->set_z(ros.force.z); - auto* t = out->mutable_torque(); - t->set_x(ros.torque.x); - t->set_y(ros.torque.y); - t->set_z(ros.torque.z); + auto* t = out->mutable_torque(); + t->set_x(ros.torque.x); + t->set_y(ros.torque.y); + t->set_z(ros.torque.z); } void RobotReadServiceImpl::FillProtoJointState( - const sensor_msgs::msg::JointState& ros, - proto_aegis_grpc::v1::JointState* out) { - out->clear_name(); - out->clear_position(); - out->clear_velocity(); - out->clear_effort(); - - for (const auto& v : ros.name) out->add_name(v); - for (const auto& v : ros.position) out->add_position(v); - for (const auto& v : ros.velocity) out->add_velocity(v); - for (const auto& v : ros.effort) out->add_effort(v); + const sensor_msgs::msg::JointState& ros, + proto_aegis_grpc::v1::JointState* out) { + out->clear_name(); + out->clear_position(); + out->clear_velocity(); + out->clear_effort(); + + for (const auto& v : ros.name) out->add_name(v); + for (const auto& v : ros.position) out->add_position(v); + for (const auto& v : ros.velocity) out->add_velocity(v); + for (const auto& v : ros.effort) out->add_effort(v); } + void RobotReadServiceImpl::FillProtoImage( + const sensor_msgs::msg::Image& ros, + proto_aegis_grpc::v1::Image* out, + cv::Mat& buffer) + { + assert((ros.encoding == "bgr8" && "Unsupported image encoding (expected BGR8).")); + if (!enable_image_resize_) { + out->set_height(ros.height); + out->set_width(ros.width); + out->set_step(ros.step); + out->set_data( + const_cast(ros.data.data()), + static_cast(ros.height * ros.step) + ); + return; + } + + const cv::Mat src( + ros.height, + ros.width, + CV_8UC3, + const_cast(ros.data.data()), + ros.step + ); + + cv::resize( + src, + buffer, + cv::Size(target_img_width_, target_img_height_), + 0, 0, cv::INTER_LINEAR + ); + + out->set_height(buffer.rows); + out->set_width(buffer.cols); + out->set_step(buffer.step); + out->set_data(buffer.data, buffer.total() * buffer.elemSize()); + } + } // namespace aegis_grpc