diff --git a/jsk_2023_04_human_approach/CMakeLists.txt b/jsk_2023_04_human_approach/CMakeLists.txt
new file mode 100644
index 0000000000..1aafa6224a
--- /dev/null
+++ b/jsk_2023_04_human_approach/CMakeLists.txt
@@ -0,0 +1,202 @@
+cmake_minimum_required(VERSION 3.0.2)
+project(jsk_2023_04_human_approach)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+# catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+## * add a build_depend tag for "message_generation"
+## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
+## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+## but can be declared for certainty nonetheless:
+## * add a exec_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+## * add "message_generation" and every package in MSG_DEP_SET to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * add "message_runtime" and every package in MSG_DEP_SET to
+## catkin_package(CATKIN_DEPENDS ...)
+## * uncomment the add_*_files sections below as needed
+## and list every .msg/.srv/.action file to be processed
+## * uncomment the generate_messages entry below
+## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+# add_message_files(
+# FILES
+# Message1.msg
+# Message2.msg
+# )
+
+## Generate services in the 'srv' folder
+# add_service_files(
+# FILES
+# Service1.srv
+# Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+# FILES
+# Action1.action
+# Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+# generate_messages(
+# DEPENDENCIES
+# std_msgs # Or other packages containing msgs
+# )
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+## * add "dynamic_reconfigure" to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * uncomment the "generate_dynamic_reconfigure_options" section below
+## and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+# cfg/DynReconf1.cfg
+# cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if your package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+# INCLUDE_DIRS include
+# LIBRARIES 202304_human_approach
+# CATKIN_DEPENDS other_catkin_pkg
+# DEPENDS system_lib
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+# include
+# ${catkin_INCLUDE_DIRS}
+)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+# src/${PROJECT_NAME}/202304_human_approach.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+# add_executable(${PROJECT_NAME}_node src/202304_human_approach_node.cpp)
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+# target_link_libraries(${PROJECT_NAME}_node
+# ${catkin_LIBRARIES}
+# )
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# catkin_install_python(PROGRAMS
+# scripts/my_python_script
+# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
+# install(TARGETS ${PROJECT_NAME}_node
+# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark libraries for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
+# install(TARGETS ${PROJECT_NAME}
+# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+# FILES_MATCHING PATTERN "*.h"
+# PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+# # myfile1
+# # myfile2
+# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_202304_human_approach.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
diff --git a/jsk_2023_04_human_approach/config/coral.yaml b/jsk_2023_04_human_approach/config/coral.yaml
new file mode 100644
index 0000000000..6afa61813e
--- /dev/null
+++ b/jsk_2023_04_human_approach/config/coral.yaml
@@ -0,0 +1,7 @@
+edgetpu_node_manager:
+ nodes:
+ - name: edgetpu_panorama_object_detector
+ type: panorama_object_detector
+ - name: edgetpu_human_pose_estimator
+ type: human_pose_estimator
+ default: edgetpu_panorama_object_detector
\ No newline at end of file
diff --git a/jsk_2023_04_human_approach/config/miyamichi.rviz b/jsk_2023_04_human_approach/config/miyamichi.rviz
new file mode 100644
index 0000000000..e774b9b887
--- /dev/null
+++ b/jsk_2023_04_human_approach/config/miyamichi.rviz
@@ -0,0 +1,756 @@
+Panels:
+ - Class: rviz/Displays
+ Help Height: 0
+ Name: Displays
+ Property Tree Widget:
+ Expanded:
+ - /TF1/Frames1
+ - /DepthClouds1/FrontLeftDepthCloud1/Autocompute Value Bounds1
+ - /DepthClouds1/FrontRightDepthCloud1/Autocompute Value Bounds1
+ - /DepthClouds1/LeftDepthCloud1/Autocompute Value Bounds1
+ - /DepthClouds1/RightDepthCloud1/Autocompute Value Bounds1
+ - /DepthClouds1/RearDepthCloud1/Autocompute Value Bounds1
+ - /HandPoseArray1/Status1
+ - /DepthCloud1/Auto Size1
+ Splitter Ratio: 0.5465393662452698
+ Tree Height: 2054
+ - Class: rviz/Selection
+ Name: Selection
+ - Class: rviz/Tool Properties
+ Expanded:
+ - /2D Pose Estimate1
+ - /2D Nav Goal1
+ - /Publish Point1
+ Name: Tool Properties
+ Splitter Ratio: 0.5886790156364441
+ - Class: rviz/Views
+ Expanded:
+ - /Current View1
+ - /Current View1/Focal Point1
+ Name: Views
+ Splitter Ratio: 0.5
+ - Class: rviz/Time
+ Name: Time
+ SyncMode: 0
+ SyncSource: DepthCloud
+Preferences:
+ PromptSaveOnExit: true
+Toolbars:
+ toolButtonStyle: 2
+Visualization Manager:
+ Class: ""
+ Displays:
+ - Alpha: 1
+ Class: rviz/RobotModel
+ Collision Enabled: false
+ Enabled: true
+ Links:
+ All Links Enabled: true
+ Expand Joint Details: false
+ Expand Link Details: false
+ Expand Tree: false
+ Link Tree Style: Links in Alphabetic Order
+ arm0.link_el0:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ arm0.link_el1:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ arm0.link_fngr:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ arm0.link_hr0:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ arm0.link_sh0:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ arm0.link_sh1:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ arm0.link_wr0:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ arm0.link_wr1:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ base_link:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ body:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ front_left_hip:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ front_left_lower_leg:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ front_left_upper_leg:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ front_rail:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ front_right_hip:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ front_right_lower_leg:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ front_right_upper_leg:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ rear_left_hip:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ rear_left_lower_leg:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ rear_left_upper_leg:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ rear_rail:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ rear_right_hip:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ rear_right_lower_leg:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ rear_right_upper_leg:
+ Alpha: 1
+ Show Axes: false
+ Show Trail: false
+ Value: true
+ Name: RobotModel
+ Robot Description: robot_description
+ TF Prefix: ""
+ Update Interval: 0
+ Value: true
+ Visual Enabled: true
+ - Class: rviz/TF
+ Enabled: true
+ Frame Timeout: 15
+ Frames:
+ All Enabled: false
+ arm0.link_el0:
+ Value: false
+ arm0.link_el1:
+ Value: false
+ arm0.link_fngr:
+ Value: false
+ arm0.link_hr0:
+ Value: false
+ arm0.link_sh0:
+ Value: false
+ arm0.link_sh1:
+ Value: false
+ arm0.link_wr0:
+ Value: false
+ arm0.link_wr1:
+ Value: false
+ back:
+ Value: false
+ back_fisheye:
+ Value: false
+ base_link:
+ Value: false
+ body:
+ Value: false
+ flat_body:
+ Value: false
+ front_left_hip:
+ Value: false
+ front_left_lower_leg:
+ Value: false
+ front_left_upper_leg:
+ Value: false
+ front_rail:
+ Value: false
+ front_right_hip:
+ Value: false
+ front_right_lower_leg:
+ Value: false
+ front_right_upper_leg:
+ Value: false
+ frontleft:
+ Value: false
+ frontleft_fisheye:
+ Value: false
+ frontright:
+ Value: false
+ frontright_fisheye:
+ Value: false
+ gpe:
+ Value: false
+ hand:
+ Value: false
+ hand_color_image_sensor:
+ Value: true
+ hand_depth_sensor:
+ Value: false
+ head:
+ Value: false
+ insta360_link:
+ Value: false
+ left:
+ Value: false
+ left_fisheye:
+ Value: false
+ link_wr1:
+ Value: false
+ odom:
+ Value: false
+ rear_left_hip:
+ Value: false
+ rear_left_lower_leg:
+ Value: false
+ rear_left_upper_leg:
+ Value: false
+ rear_rail:
+ Value: true
+ rear_right_hip:
+ Value: false
+ rear_right_lower_leg:
+ Value: false
+ rear_right_upper_leg:
+ Value: false
+ right:
+ Value: false
+ right_fisheye:
+ Value: false
+ vision:
+ Value: false
+ Marker Alpha: 1
+ Marker Scale: 0.75
+ Name: TF
+ Show Arrows: true
+ Show Axes: true
+ Show Names: true
+ Tree:
+ vision:
+ body:
+ arm0.link_sh0:
+ arm0.link_sh1:
+ arm0.link_hr0:
+ arm0.link_el0:
+ arm0.link_el1:
+ arm0.link_wr0:
+ arm0.link_wr1:
+ arm0.link_fngr:
+ {}
+ hand_color_image_sensor:
+ {}
+ hand_depth_sensor:
+ {}
+ base_link:
+ {}
+ flat_body:
+ {}
+ front_left_hip:
+ front_left_upper_leg:
+ front_left_lower_leg:
+ {}
+ front_rail:
+ {}
+ front_right_hip:
+ front_right_upper_leg:
+ front_right_lower_leg:
+ {}
+ hand:
+ {}
+ head:
+ back:
+ back_fisheye:
+ {}
+ frontleft:
+ frontleft_fisheye:
+ {}
+ frontright:
+ frontright_fisheye:
+ {}
+ left:
+ left_fisheye:
+ {}
+ right:
+ right_fisheye:
+ {}
+ link_wr1:
+ {}
+ odom:
+ gpe:
+ {}
+ rear_left_hip:
+ rear_left_upper_leg:
+ rear_left_lower_leg:
+ {}
+ rear_rail:
+ insta360_link:
+ {}
+ rear_right_hip:
+ rear_right_upper_leg:
+ rear_right_lower_leg:
+ {}
+ Update Interval: 0
+ Value: true
+ - Class: rviz/Group
+ Displays:
+ - Alpha: 1
+ Auto Size:
+ Auto Size Factor: 1
+ Value: true
+ Autocompute Intensity Bounds: true
+ Autocompute Value Bounds:
+ Max Value: 11.731683731079102
+ Min Value: 7.7589921951293945
+ Value: true
+ Axis: Z
+ Channel Name: intensity
+ Class: rviz/DepthCloud
+ Color: 255; 255; 255
+ Color Image Topic: ""
+ Color Transformer: AxisColor
+ Color Transport Hint: raw
+ Decay Time: 0
+ Depth Map Topic: /spot/depth/frontleft/image
+ Depth Map Transport Hint: raw
+ Enabled: true
+ Invert Rainbow: false
+ Max Color: 255; 255; 255
+ Min Color: 0; 0; 0
+ Name: FrontLeftDepthCloud
+ Occlusion Compensation:
+ Occlusion Time-Out: 30
+ Value: false
+ Position Transformer: XYZ
+ Queue Size: 5
+ Selectable: true
+ Size (Pixels): 3
+ Style: Flat Squares
+ Topic Filter: true
+ Use Fixed Frame: true
+ Use rainbow: true
+ Value: true
+ - Alpha: 1
+ Auto Size:
+ Auto Size Factor: 1
+ Value: true
+ Autocompute Intensity Bounds: true
+ Autocompute Value Bounds:
+ Max Value: 10.185791015625
+ Min Value: 7.813115119934082
+ Value: true
+ Axis: Z
+ Channel Name: intensity
+ Class: rviz/DepthCloud
+ Color: 255; 255; 255
+ Color Image Topic: ""
+ Color Transformer: AxisColor
+ Color Transport Hint: raw
+ Decay Time: 0
+ Depth Map Topic: /spot/depth/frontright/image
+ Depth Map Transport Hint: raw
+ Enabled: true
+ Invert Rainbow: false
+ Max Color: 255; 255; 255
+ Min Color: 0; 0; 0
+ Name: FrontRightDepthCloud
+ Occlusion Compensation:
+ Occlusion Time-Out: 30
+ Value: false
+ Position Transformer: XYZ
+ Queue Size: 5
+ Selectable: true
+ Size (Pixels): 3
+ Style: Flat Squares
+ Topic Filter: true
+ Use Fixed Frame: true
+ Use rainbow: true
+ Value: true
+ - Alpha: 1
+ Auto Size:
+ Auto Size Factor: 1
+ Value: true
+ Autocompute Intensity Bounds: true
+ Autocompute Value Bounds:
+ Max Value: 8.268538475036621
+ Min Value: 7.727081775665283
+ Value: true
+ Axis: Z
+ Channel Name: intensity
+ Class: rviz/DepthCloud
+ Color: 255; 255; 255
+ Color Image Topic: ""
+ Color Transformer: AxisColor
+ Color Transport Hint: raw
+ Decay Time: 0
+ Depth Map Topic: /spot/depth/left/image
+ Depth Map Transport Hint: raw
+ Enabled: true
+ Invert Rainbow: false
+ Max Color: 255; 255; 255
+ Min Color: 0; 0; 0
+ Name: LeftDepthCloud
+ Occlusion Compensation:
+ Occlusion Time-Out: 30
+ Value: false
+ Position Transformer: XYZ
+ Queue Size: 5
+ Selectable: true
+ Size (Pixels): 3
+ Style: Flat Squares
+ Topic Filter: true
+ Use Fixed Frame: true
+ Use rainbow: true
+ Value: true
+ - Alpha: 1
+ Auto Size:
+ Auto Size Factor: 1
+ Value: true
+ Autocompute Intensity Bounds: true
+ Autocompute Value Bounds:
+ Max Value: 8.739740371704102
+ Min Value: 7.83408784866333
+ Value: true
+ Axis: Z
+ Channel Name: intensity
+ Class: rviz/DepthCloud
+ Color: 255; 255; 255
+ Color Image Topic: ""
+ Color Transformer: AxisColor
+ Color Transport Hint: raw
+ Decay Time: 0
+ Depth Map Topic: /spot/depth/right/image
+ Depth Map Transport Hint: raw
+ Enabled: true
+ Invert Rainbow: false
+ Max Color: 255; 255; 255
+ Min Color: 0; 0; 0
+ Name: RightDepthCloud
+ Occlusion Compensation:
+ Occlusion Time-Out: 30
+ Value: false
+ Position Transformer: XYZ
+ Queue Size: 5
+ Selectable: true
+ Size (Pixels): 3
+ Style: Flat Squares
+ Topic Filter: true
+ Use Fixed Frame: true
+ Use rainbow: true
+ Value: true
+ - Alpha: 1
+ Auto Size:
+ Auto Size Factor: 1
+ Value: true
+ Autocompute Intensity Bounds: true
+ Autocompute Value Bounds:
+ Max Value: 8.420552253723145
+ Min Value: 6.867978572845459
+ Value: true
+ Axis: Z
+ Channel Name: intensity
+ Class: rviz/DepthCloud
+ Color: 255; 255; 255
+ Color Image Topic: ""
+ Color Transformer: AxisColor
+ Color Transport Hint: raw
+ Decay Time: 0
+ Depth Map Topic: /spot/depth/back/image
+ Depth Map Transport Hint: raw
+ Enabled: true
+ Invert Rainbow: false
+ Max Color: 255; 255; 255
+ Min Color: 0; 0; 0
+ Name: RearDepthCloud
+ Occlusion Compensation:
+ Occlusion Time-Out: 30
+ Value: false
+ Position Transformer: XYZ
+ Queue Size: 5
+ Selectable: true
+ Size (Pixels): 3
+ Style: Flat Squares
+ Topic Filter: true
+ Use Fixed Frame: true
+ Use rainbow: true
+ Value: true
+ Enabled: false
+ Name: DepthClouds
+ - Class: jsk_rviz_plugin/OverlayImage
+ Enabled: true
+ Name: PanoramaImage
+ Topic: /dual_fisheye_to_panorama/output
+ Value: true
+ alpha: 0.800000011920929
+ height: 128
+ keep aspect ratio: true
+ left: 0
+ overwrite alpha value: false
+ top: 0
+ transport hint: compressed
+ width: 700
+ - Class: jsk_rviz_plugin/OverlayImage
+ Enabled: false
+ Name: PanoramaDetection
+ Topic: /spot_recognition/object_detection_image
+ Value: false
+ alpha: 0.800000011920929
+ height: 128
+ keep aspect ratio: true
+ left: 0
+ overwrite alpha value: false
+ top: 0
+ transport hint: raw
+ width: 700
+ - Class: jsk_rviz_plugin/TFTrajectory
+ Enabled: true
+ Name: BodyTrajectory
+ Value: true
+ color: 25; 255; 240
+ duration: 1000
+ frame: base_link
+ line_width: 0.009999999776482582
+ - Class: jsk_rviz_plugin/PieChart
+ Enabled: true
+ Name: SpotBattery
+ Topic: /spot/status/battery_percentage
+ Value: true
+ auto color change: false
+ background color: 0; 0; 0
+ backround alpha: 0
+ clockwise rotate direction: false
+ foreground alpha: 0.699999988079071
+ foreground alpha 2: 0.4000000059604645
+ foreground color: 25; 255; 240
+ left: 750
+ max color: 255; 0; 0
+ max color change threthold: 0
+ max value: 100
+ med color: 255; 0; 0
+ med color change threthold: 0
+ min value: 0
+ show caption: true
+ size: 128
+ text size: 10
+ top: 50
+ - Class: jsk_rviz_plugin/PieChart
+ Enabled: true
+ Name: LaptopBattery
+ Topic: /spot/status/laptop_battery_percentage
+ Value: true
+ auto color change: false
+ background color: 0; 0; 0
+ backround alpha: 0
+ clockwise rotate direction: false
+ foreground alpha: 0.699999988079071
+ foreground alpha 2: 0.4000000059604645
+ foreground color: 25; 255; 240
+ left: 900
+ max color: 255; 0; 0
+ max color change threthold: 0
+ max value: 100
+ med color: 255; 0; 0
+ med color change threthold: 0
+ min value: 0
+ show caption: true
+ size: 128
+ text size: 10
+ top: 50
+ - Class: jsk_rviz_plugin/BoundingBoxArray
+ Enabled: false
+ Name: DetectedObjects
+ Queue Size: 10
+ Topic: /spot_recognition/bbox_array
+ Unreliable: false
+ Value: false
+ alpha: 0.800000011920929
+ color: 25; 255; 0
+ coloring: Label
+ line width: 0.05000000074505806
+ only edge: true
+ show coords: false
+ - Class: jsk_rviz_plugin/BoundingBoxArray
+ Enabled: true
+ Name: HumanPos
+ Queue Size: 10
+ Topic: /rect_array_in_panorama_to_bounding_box_array/bbox_array
+ Unreliable: false
+ Value: true
+ alpha: 0.800000011920929
+ color: 25; 255; 0
+ coloring: Auto
+ line width: 0.004999999888241291
+ only edge: false
+ show coords: false
+ - Class: jsk_rviz_plugin/HumanSkeletonArray
+ Enabled: true
+ Name: HandPoseArray
+ Queue Size: 10
+ Topic: /hand_pose_estimation_2d/skeleton
+ Unreliable: false
+ Value: true
+ alpha: 1
+ color: 25; 255; 0
+ coloring: Auto
+ line width: 0.004999999888241291
+ - Class: jsk_rviz_plugin/OverlayImage
+ Enabled: true
+ Name: OverlayImage
+ Topic: /hand_pose_estimation_2d/output/vis
+ Value: true
+ alpha: 0.800000011920929
+ height: 128
+ keep aspect ratio: true
+ left: 25
+ overwrite alpha value: false
+ top: 375
+ transport hint: raw
+ width: 400
+ - Alpha: 1
+ Auto Size:
+ Auto Size Factor: 1
+ Value: true
+ Autocompute Intensity Bounds: true
+ Autocompute Value Bounds:
+ Max Value: 10
+ Min Value: -10
+ Value: true
+ Axis: Z
+ Channel Name: intensity
+ Class: rviz/DepthCloud
+ Color: 255; 255; 255
+ Color Image Topic: ""
+ Color Transformer: RGB8
+ Color Transport Hint: raw
+ Decay Time: 0
+ Depth Map Topic: /spot/camera/hand_depth/depth_registered/image
+ Depth Map Transport Hint: raw
+ Enabled: false
+ Invert Rainbow: false
+ Max Color: 255; 255; 255
+ Min Color: 0; 0; 0
+ Name: DepthCloud
+ Occlusion Compensation:
+ Occlusion Time-Out: 30
+ Value: false
+ Position Transformer: XYZ
+ Queue Size: 5
+ Selectable: true
+ Size (Pixels): 3
+ Style: Flat Squares
+ Topic Filter: false
+ Use Fixed Frame: true
+ Use rainbow: true
+ Value: false
+ Enabled: true
+ Global Options:
+ Background Color: 48; 48; 48
+ Default Light: true
+ Fixed Frame: odom
+ Frame Rate: 30
+ Name: root
+ Tools:
+ - Class: rviz/Interact
+ Hide Inactive Objects: true
+ - Class: rviz/MoveCamera
+ - Class: rviz/Select
+ - Class: rviz/FocusCamera
+ - Class: rviz/Measure
+ - Class: rviz/SetInitialPose
+ Theta std deviation: 0.2617993950843811
+ Topic: /initialpose
+ X std deviation: 0.5
+ Y std deviation: 0.5
+ - Class: rviz/SetGoal
+ Topic: /move_base_simple/goal
+ - Class: rviz/PublishPoint
+ Single click: true
+ Topic: /clicked_point
+ Value: true
+ Views:
+ Current:
+ Class: rviz/Orbit
+ Distance: 13.387189865112305
+ Enable Stereo Rendering:
+ Stereo Eye Separation: 0.05999999865889549
+ Stereo Focal Distance: 1
+ Swap Stereo Eyes: false
+ Value: false
+ Field of View: 0.7853981852531433
+ Focal Point:
+ X: 4.6255669593811035
+ Y: -0.2518952488899231
+ Z: -4.357930660247803
+ Focal Shape Fixed Size: true
+ Focal Shape Size: 0.05000000074505806
+ Invert Z Axis: false
+ Name: Current View
+ Near Clip Distance: 0.009999999776482582
+ Pitch: 0.759796679019928
+ Target Frame: base_link
+ Yaw: 3.1918535232543945
+ Saved: ~
+Window Geometry:
+ Displays:
+ collapsed: false
+ Height: 2432
+ Hide Left Dock: false
+ Hide Right Dock: true
+ QMainWindow State: 000000ff00000000fd00000004000000000000034800000876fc0200000013fb0000001200530065006c0065006300740069006f006e00000001e10000009b000000b100fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000b1fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000006f000008760000018400fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb000000280020002d0020005400720061006a006500630074006f0072007900200053006c00690064006500720000000000ffffffff0000000000000000fb0000001c004d006f00740069006f006e0050006c0061006e006e0069006e006700000002300000018e0000000000000000fb00000044004d006f00740069006f006e0050006c0061006e006e0069006e00670020002d0020005400720061006a006500630074006f0072007900200053006c00690064006500720000000000ffffffff0000000000000000fb0000000a0049006d00610067006501000001f2000001ae0000000000000000fb0000000a0049006d00610067006501000002f4000000ca0000000000000000fb0000000a0049006d006100670065000000013f000000660000000000000000fb0000000a0049006d0061006700650000000164000000940000000000000000fb0000000a0049006d00610067006500000001a3000000e80000000000000000fc00000227000001970000000000fffffffa000000000100000002fb0000000a0049006d0061006700650000000000ffffffff0000000000000000fb0000000a0049006d0061006700650000000000000002ad0000000000000000fb0000000a0049006d00610067006503000004f5000002820000028800000157fb0000000a0049006d006100670065000000065a0000028b0000000000000000000000010000010f00000363fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003d000003630000013500fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000100000000060fc0100000002fb0000000800540069006d0065010000000000001000000005cd00fffffffb0000000800540069006d0065010000000000000450000000000000000000000cac0000087600000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
+ Selection:
+ collapsed: false
+ Time:
+ collapsed: false
+ Tool Properties:
+ collapsed: false
+ Views:
+ collapsed: true
+ Width: 4096
+ X: 0
+ Y: 54
diff --git a/jsk_2023_04_human_approach/euslisp/#arm.l# b/jsk_2023_04_human_approach/euslisp/#arm.l#
new file mode 100755
index 0000000000..7170e059be
--- /dev/null
+++ b/jsk_2023_04_human_approach/euslisp/#arm.l#
@@ -0,0 +1,37 @@
+#!/usr/bin/env roseus
+
+;;(ros::load-ros-package "jsk_recognition_msgs")
+(load "package://spoteus/spot-interface.l")
+(spot-init)
+(ros::roseus "spot-angle")
+
+(defun look-at (msg)
+ (let (rect-list max-rect)
+ (setq rect-list (send msg :rects))
+ (if (not (= (length rect-list) 0))
+ (progn
+ (sort rect-list #'compare-area-of rects)
+ (setq max-rect (car rect-list))
+ (send *ri* :speak-en "start")
+ (send *ri* :gripper-close)
+ (send *ri* :angle-vector (send *spot* :reset-pose))
+ (send *ri* :angle-vector (send *spot* :head :look-at (float-vector 3000 0 1000)))
+
+ ;;(send *ri* :angle-vector (send *spot* :angle-vector #f(0.0 -150.0 150.0 0.0 0.0 10.0) 2000)
+
+
+ ;;(send *ri* :gripper-open)
+ )
+ )
+ )
+ )
+
+(defun compare-area-of-rects (a b)
+ (>= (* (send a :width) (send a :height)) (* (send b :width) (send b :height))))
+
+;;(wait-for-julius-trigger "")
+
+(setq *human* (one-shot-subscribe "human" jsk_recognition_msgs::RectArray))
+(look-at *human*)
+;;(ros::subscribe "/human" jsk_recognition_msgs::RectArray #'cb)
+;;(ros::spin)
diff --git a/jsk_2023_04_human_approach/euslisp/#spot_object_recog.l# b/jsk_2023_04_human_approach/euslisp/#spot_object_recog.l#
new file mode 100755
index 0000000000..706fabd0c5
--- /dev/null
+++ b/jsk_2023_04_human_approach/euslisp/#spot_object_recog.l#
@@ -0,0 +1,68 @@
+#!/usr/bin/env roseus
+
+(ros::load-ros-package "jsk_recognition_msgs")
+
+(load "package://spoteus/spot-interface.l")
+(spot-init)
+
+(ros::roseus "test")
+
+
+
+(defun move-to-human (msg)
+ (let (rect-list sorted-list max-rect pu pv px py)
+ (setq rect-list (send msg :rects))
+ (if (not (= (length rect-list) 0))
+ (progn
+ (setq sorted-list (sort rect-list #'compare-area-of-rects))
+ (setq max-rect (car sorted-list))
+ (setq pu (+ (send max-rect :x) (/ (send max-rect :width) 2)))
+ (setq pv (+ (send max-rect :y) (/ (send max-rect :height) 2)))
+ (setq px (convert-x pu))
+ (setq py (convert-y pv))
+ (setq *angle* (* (* -1.0 px) (/ 180.0 2000.0)))
+ (print *angle*)
+ (send *ri* :go-pos 0 0 *angle*)
+ (send *ri* :go-pos 0.8 0 0)
+ ;;(while (<= (area-of-rects max-rect) 80000)
+ ;;(send *ri* :go-pos 0.5 0 0)
+ ;;)
+ (send *ri* :speak-jp "どちらにいきますか?")
+ ;;(if (>= (area-of-rects max-rect) 80000)
+ ;;(send *ri* :go-pos 0 0 0)
+ ;;)
+ ;;(dolist (x sorted-list)
+ ;;(progn
+ ;;(print (area-of-rects x))
+ ;;(print px)
+ ;;(print py)
+ ;;(send *ri* :go-pos 0 0 (* (* -1.0 px) (/ 180.0 2000.0)))
+ ;;))
+ (print "done")
+ )
+
+ )
+ )
+ )
+
+;;(print (sen (car (send msg :rects)) :y))))
+(defun area-of-rects (x)
+ (* (send x :width) (send x :height))
+ )
+(defun compare-area-of-rects (a b)
+ (>= (* (send a :width) (send a :height)) (* (send b :width) (send b :height))))
+
+(defun convert-x (u)
+ (- u 2000)
+ )
+(defun convert-y (v)
+ (+ (* -1 v) 1000)
+ )
+
+
+(setq *human* (one-shot-subscribe "human" jsk_recognition_msgs::RectArray))
+
+(move-to-human *human*)
+
+;; (ros::subscribe "/human" jsk_recognition_msgs::RectArray #'cb)
+(ros::spin)
diff --git a/jsk_2023_04_human_approach/euslisp/arm.l b/jsk_2023_04_human_approach/euslisp/arm.l
new file mode 100755
index 0000000000..5928f08269
--- /dev/null
+++ b/jsk_2023_04_human_approach/euslisp/arm.l
@@ -0,0 +1,38 @@
+#!/usr/bin/env roseus
+
+(ros::load-ros-package "jsk_recognition_msgs")
+(load "package://spoteus/spot-interface.l")
+(spot-init)
+(ros::roseus "spot-angle")
+
+(defun look-at (msg)
+ (let (rect-list max-rect)
+ (setq rect-list (send msg :rects))
+ (if (not (= (length rect-list) 0))
+ (progn
+ (sort rect-list #'compare-area-of rects)
+ (setq max-rect (car rect-list))
+ (send *ri* :speak-en "start")
+ (send *ri* :gripper-close)
+ (send *spot* :reset-pose)
+ (send *spot* :head :look-at (float-vector 3000 0 1000))
+
+ ;;(send *ri* :angle-vector (send *spot* :angle-vector #f(0.0 -150.0 150.0 0.0 0.0 10.0) 2000)
+ send *ri* :angle-vector #f(0.0 45.0 -90.0 0.0 45.0 -90.0 0.0 45.0 -90.0 0.0 45.0 -90.0 0.0 -170.0 150.0 30.0 10.0 45.0 0.0)
+
+
+ ;;(send *ri* :gripper-open)
+ )
+ )
+ )
+ )
+
+(defun compare-area-of-rects (a b)
+ (>= (* (send a :width) (send a :height)) (* (send b :width) (send b :height))))
+
+;;(wait-for-julius-trigger "")
+
+(setq *human* (one-shot-subscribe "human" jsk_recognition_msgs::RectArray))
+(look-at *human*)
+;;(ros::subscribe "/human" jsk_recognition_msgs::RectArray #'cb)
+;;(ros::spin)
diff --git a/jsk_2023_04_human_approach/euslisp/frames.gv b/jsk_2023_04_human_approach/euslisp/frames.gv
new file mode 100644
index 0000000000..d46ecb9227
--- /dev/null
+++ b/jsk_2023_04_human_approach/euslisp/frames.gv
@@ -0,0 +1,49 @@
+digraph G {
+"body" -> "base_link"[label="Broadcaster: /robot_state_publisher\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"vision" -> "body"[label="Broadcaster: /spot/spot_ros\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"body" -> "front_rail"[label="Broadcaster: /robot_state_publisher\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"body" -> "rear_rail"[label="Broadcaster: /robot_state_publisher\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"head" -> "frontleft"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"body" -> "head"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"frontleft" -> "frontleft_fisheye"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"head" -> "frontright"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"frontright" -> "frontright_fisheye"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"head" -> "back"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"back" -> "back_fisheye"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"head" -> "left"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"left" -> "left_fisheye"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"right" -> "right_fisheye"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"head" -> "right"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"arm0.link_wr1" -> "hand_color_image_sensor"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"arm0.link_wr0" -> "arm0.link_wr1"[label="Broadcaster: /robot_state_publisher\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"arm0.link_wr1" -> "hand_depth_sensor"[label="Broadcaster: /spot/spot_ros\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1681984981.006 sec old)\nBuffer length: 0.000 sec\n"];
+"arm0.link_hr0" -> "arm0.link_el0"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"arm0.link_sh1" -> "arm0.link_hr0"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"arm0.link_el0" -> "arm0.link_el1"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"arm0.link_wr1" -> "arm0.link_fngr"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"arm0.link_sh0" -> "arm0.link_sh1"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"body" -> "arm0.link_sh0"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"arm0.link_el1" -> "arm0.link_wr0"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"body" -> "front_left_hip"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"front_left_hip" -> "front_left_upper_leg"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"front_left_upper_leg" -> "front_left_lower_leg"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"body" -> "front_right_hip"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"front_right_hip" -> "front_right_upper_leg"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"front_right_upper_leg" -> "front_right_lower_leg"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"body" -> "rear_left_hip"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"rear_left_hip" -> "rear_left_upper_leg"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"rear_left_upper_leg" -> "rear_left_lower_leg"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"body" -> "rear_right_hip"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"rear_right_hip" -> "rear_right_upper_leg"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"rear_right_upper_leg" -> "rear_right_lower_leg"[label="Broadcaster: /robot_state_publisher\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"body" -> "flat_body"[label="Broadcaster: /spot/spot_ros\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"body" -> "hand"[label="Broadcaster: /spot/spot_ros\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"body" -> "link_wr1"[label="Broadcaster: /spot/spot_ros\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"body" -> "odom"[label="Broadcaster: /spot/spot_ros\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"odom" -> "gpe"[label="Broadcaster: /spot/spot_ros\nAverage rate: 16.758 Hz\nMost recent transform: 1681984818.480 ( 162.527 sec old)\nBuffer length: 4.893 sec\n"];
+"rear_rail" -> "insta360_link"[label="Broadcaster: /camera_tf_publisher\nAverage rate: 2.222 Hz\nMost recent transform: 1681984818.836 ( 162.170 sec old)\nBuffer length: 4.501 sec\n"];
+edge [style=invis];
+ subgraph cluster_legend { style=bold; color=black; label ="view_frames Result";
+"Recorded at time: 1681984981.006"[ shape=plaintext ] ;
+ }->"vision";
+}
\ No newline at end of file
diff --git a/jsk_2023_04_human_approach/euslisp/frames.pdf b/jsk_2023_04_human_approach/euslisp/frames.pdf
new file mode 100644
index 0000000000..4e73665e8a
Binary files /dev/null and b/jsk_2023_04_human_approach/euslisp/frames.pdf differ
diff --git a/jsk_2023_04_human_approach/euslisp/human_recog.l b/jsk_2023_04_human_approach/euslisp/human_recog.l
new file mode 100755
index 0000000000..56eac441ad
--- /dev/null
+++ b/jsk_2023_04_human_approach/euslisp/human_recog.l
@@ -0,0 +1,244 @@
+#!/usr/bin/env roseus
+
+(ros::load-ros-package "jsk_recognition_msgs")
+(ros::load-ros-package "speech_recognition_msgs")
+(load "package://spoteus/spot-interface.l")
+;;(spot-init)
+(ros::roseus "human_recog")
+
+(defun move-to-human (msg)
+ ;;(print "move-to-human-do")
+ (let (rect-list distance max-rect)
+ (setq rect-list (send msg :boxes))
+ (if (not (= (length rect-list) 0))
+ (progn
+ (print "data-start")
+ (if (= (length rect-list) 1)
+ (setq max-rect (car rect-list))
+ )
+ ;;(setq sorted-list (sort rect-list #'compare-distance-of-rects))
+ ;;(setq max-rect (car sorted-list))
+ (setq max-rect (car rect-list))
+ (setq *x* (send (send (send max-rect :pose) :position) :x))
+ (setq *y* (send (send (send max-rect :pose) :position) :y))
+ (setq *angle* (rad2deg (atan *y* *x*)))
+ (setq *human-to-spot-vector* (float-vector (* *x* -1) (* *y* -1) 0))
+ (print *angle*)
+ ;;;(send *ri* :go-pos 0 0 *angle*)
+ ;;;(send *ri* :go-pos 1.0 0 0)
+ )
+ )
+ )
+ )
+
+(defun ask()
+ (print "ask-do")
+ ;; (send *ri* :angle-vector #f(0.0 45.0 -90.0 0.0 45.0 -90.0 0.0 45.0 -90.0 0.0 45.0 -90.0 0.0 -170.0 150.0 0.0 10.0 45.0 0.0))
+ ;; (send *ri* :gripper-open)
+ ;; (send *ri* :speak-jp "どちらに行きますか? また、何かわからないことがありますか?")
+ ;; (unix::sleep 7)
+ ;; (send *ri* :angle-vector (send *spot* :reset-pose))
+ ;; (send *ri* :gripper-close)
+)
+
+(defun recognition-skeleton (msgs)
+ (print "recog-skeleton-do")
+ (let (physical-order physical-reverse-order skeleton-list bone-names-list bones index target-bone)
+ (setq physical-order (list "right elbow->right wrist" "left elbow->left wrist"))
+ (setq physical-reverse-order (list "right wrist->right elbow" "left wrist->left elbow"))
+ (ros::ros-info "skeletons ~A" (length (send msgs :skeletons)))
+
+ (if (>= (length (send msgs :skeletons)) 1)
+ (progn
+ (setq skeleton-list (send msgs :skeletons))
+ (dolist (skeleton skeleton-list)
+ (if (send skeleton :bone_names)
+ (progn
+ (setq bone-names-list (send skeleton :bone_names))
+ (dolist (bone-name bone-names-list)
+ (print bone-name)
+ ;;(print (send skelton :bones)
+
+ (dolist (target-bone-name (append physical-order physical-reverse-order))
+ (if (string= bone-name target-bone-name)
+ (progn
+ (setq bones (send skeleton :bones))
+ (setq index (position bone-name bone-names-list :test 'equal))
+ (setq target-bone (elt bones index))
+
+ (cond ((find target-bone-name physical-order)
+ (progn
+ (setq *bone-start-point* (send target-bone :start_point))
+ (setq *bone-end-point* (send target-bone :end_point))
+ ))
+ ((find bone-name physical-reverse-order)
+ (progn
+ (setq *bone-end-point* (send target-bone :start_point))
+ (setq *bone-start-point* (send target-bone :end_point))
+ )))
+ (tf-vector)
+ (send *ri* :go-pos 0 0 (degree-vector *human-to-spot-vector* *target-vector-xy*))
+ (switch-mode)
+ (setq *skeleton-recog-flag* t)
+ (setq *speech-recog-res* 0)
+ ))))))))))
+ )
+
+(defun switch-mode()
+ (cond ((= *speech-recog-res* 2)
+ (progn
+ (send *ri* :speak-jp "これはキッチンです。")
+ (send *ri* :go-pos 0 0 (* -1 (degree-vector *human-to-spot-vector* *target-vector-xy*)))))
+ ((= *speech-recog-res* 3)
+ (progn
+ (send *ri* :speak-jp "いきましょう。ついてきてください。")
+ (unix::sleep 5)
+ (send *ri* :go-pos 2.0 0 0)
+ )))
+ )
+
+(defun tf-vector()
+ (let (tf-base-to-arm tf-start-point tf-end-point target-vector)
+ (setq *tfl* (instance ros::transform-listener :init))
+ (send *tfl* :wait-for-transform "base_link" "hand_color_image_sensor" (ros::time 0) 1)
+ (setq tf-base-to-arm (send *tfl* :lookup-transform "base_link" "hand_color_image_sensor" (ros::time 0)))
+
+ (setq tf-start-point (send tf-base-to-arm :transform-vector (vector *bone-start-point*)))
+ (setq tf-end-point (send tf-base-to-arm :transform-vector (vector *bone-end-point*)))
+ (print tf-start-point)
+ (print tf-end-point)
+ (setq target-vector (v- tf-end-point tf-start-point))
+
+ (setq *target-vector-xy* (float-vector (elt target-vector 0) (elt target-vector 1) 0))
+ )
+ )
+
+
+(defun speech-recognition(msgs)
+ (setq *recognition-word* (car (send msgs :transcript)))
+ (print *recognition-word*)
+ (setq *stop-words-list* (list "止まって" "待って" "ストップ" "こっちに来て" "おいで"))
+ (setq *what-is-it-list* (list "これは何ですか" "あれは何ですか" "これは何" "あれは何" "それは何"))
+ (setq *where-list* (list "こっちに行きたいです ""あちらに行きたいです" "あっちに行きたいです" "そっちに行きたいです" "こっちに行きたい" "あっちに行きたい" "そっちに行きたい"))
+ (setq *finish-words-list* (list "終了" "終わり" "ありがとう"))
+ (cond ((find *recognition-word* *stop-words-list* :test #'equal)
+ (progn
+ (ros::ros-error "true ~A" *recognition-word*)
+ (setq *speech-recog-res* 1)
+ ))
+ ((find *recognition-word* *what-is-it-list* :test #'equal)
+ (progn
+ ;;(send *ri* :speak-jp "承知しました。何をしりたいか指差しして教えてください。")
+ (setq *speech-recog-res* 2)
+ (unix::sleep 10)
+ ))
+ ((find *recognition-word* *where-list* :test #'equal)
+ (progn
+ ;;(send *ri* :speak-jp "承知しました。どちらに行きたいか指差しして教えてください")
+ (setq *speech-recog-res* 3)
+ (unix::sleep 10)
+ ))
+ ((find *recognition-word* *finish-words-list* :test #'equal)
+ (progn
+ ;;(send *ri* :speak-jp "これにて終了します。お疲れ様でした。")
+ (setq *speech-recog-flag* t)
+ ))
+ )
+ (ros::ros-error "~A" *speech-recog-res*)
+ )
+
+(defun vector (a)
+ (float-vector (send a :x) (send a :y) (send a :z))
+ )
+
+(defun compare-distance-of-rects (a b)
+ (>= (norm (send (send a :pose) :position)) (norm (send (send a :pose) :position)))
+ )
+
+(defun degree-vector (a b)
+ (rad2deg (acos (/ (v. a b) (* (norm a) (norm b)))))
+ )
+
+(ros::roseus-add-srvs "topic_tools")
+(defun call-service-coral (req-topic)
+ (ros::wait-for-service "/coral_input/image/select")
+ ;; when input image has not come successfully, wait-for-service hang at this point.
+ (setq req (instance topic_tools::MuxSelectRequest :init))
+ (send req :topic req-topic)
+ (ros::service-call "/coral_input/image/select" req)
+ )
+
+(ros::roseus-add-srvs "coral_usb")
+(defun call-service-edgetpu (req-topic)
+ (ros::wait-for-service "/edgetpu_node_manager/start")
+ (setq req-1 (instance coral_usb::StartNodeRequest :init))
+ (send req-1 :name req-topic)
+ (ros::service-call "/edgetpu_node_manager/start" req-1)
+ )
+
+(defun move-to-human-call ()
+ (call-service-coral "/dual_fisheye_to_panorama/output")
+ (print "call-coral-human")
+ (call-service-edgetpu "edgetpu_panorama_object_detector")
+ (print "call-edgetpu-human")
+ (setq *human* (one-shot-subscribe "/rect_array_in_panorama_to_bounding_box_array/bbox_array" jsk_recognition_msgs::BoundingBoxArray))
+ (move-to-human *human*)
+ )
+
+(defun recognition-skeleton-call ()
+ (ros::ros-error "recog-skeleton-call")
+ ;;(call-service-coral "/spot/camera/hand_color/image")
+ (call-service-coral "/camera/color/image_raw")
+ (print "call-coral-skeleton")
+ (call-service-edgetpu "edgetpu_human_pose_estimator")
+ ;;(setq *skeleton* (one-shot-subscribe "/skeleton_with_depth/output/skeleton" jsk_recognition_msgs::HumanSkeletonArray))
+ ;;(recognition-skeleton *skeleton*)
+ (ros::subscribe "/skeleton_with_depth/output/skeleton" jsk_recognition_msgs::HumanSkeletonArray #'recognition-skeleton)
+ )
+
+
+(defun main()
+ (initialization-and-move)
+ (ros::subscribe "/speech_to_text" speech_recognition_msgs::SpeechRecognitionCandidates #'speech-recognition)
+ (until *speech-recog-flag*
+ (cond ((= *speech-recog-res* 1)
+ (progn
+ ;;(send *ri* :speak-jp "とまります。")
+ ;;(send *ri* :go-pos 0 0 0)
+ (move-to-human-call)
+ ;;(move-to-human)
+ ))
+ ((> *speech-recog-res* 1)
+ (progn
+ (until *skeleton-recog-flag*
+ (recognition-skeleton-call)
+ ;;(recognition-skeleton)
+ ;;(ros::unsubscribe "/skeleton_with_depth/output/skeleton")
+ ))))
+ (ros::spin-once)
+ ;;(print "*speech-recog-res*: ~A" *speech-recog-res*)
+ (ros::sleep))
+ )
+
+(defun initialization-and-move()
+ (setq *speech-recog-flag* nil)
+ (setq *skeleton-recog-flag* nil)
+ (setq *speech-recog-res* 0)
+ ;;(send *ri* :angle-vector (send *spot* :reset-pose))
+ ;;(send *ri* :gripper-close)
+ (move-to-human)
+ (print "human-call-done")
+ ;;(ask)
+ ;;(print "ask-done")
+ )
+
+(main)
+
+
+
+
+;;(ros::subscribe "/speech_to_text" speech_recognition_msgs::SpeechRecognitionCandidates #'speech-recognition)
+
+;;(ros::spin)
+
+;;(ros::subscribe "/rect_array_in_panorama_to_bounding_box_array/bbox_array" jsk_recognition_msgs::BoundingBoxArray #'cb)
diff --git a/jsk_2023_04_human_approach/euslisp/skeleton_test.l b/jsk_2023_04_human_approach/euslisp/skeleton_test.l
new file mode 100755
index 0000000000..17dcaba54a
--- /dev/null
+++ b/jsk_2023_04_human_approach/euslisp/skeleton_test.l
@@ -0,0 +1,134 @@
+#!/usr/bin/env roseus
+
+(ros::load-ros-package "jsk_recognition_msgs")
+(ros::load-ros-package "speech_recognition_msgs")
+(load "package://spoteus/spot-interface.l")
+(ros::roseus "human_recog")
+
+(defun move-to-human (msg)
+ (let (rect-list distance max-rect)
+ (setq rect-list (send msg :boxes))
+ (if (not (= (length rect-list) 0))
+ (progn
+ ;;(print "data-start")
+ (if (= (length rect-list) 1)
+ (setq max-rect (car rect-list))
+ )
+ (setq sorted-list (sort rect-list #'compare-distance-of-rects))
+ (setq max-rect (car sorted-list))
+ (setq *x* (send (send (send max-rect :pose) :position) :x))
+ (setq *y* (send (send (send max-rect :pose) :position) :y))
+ (setq *angle* (rad2deg (atan *y* *x*)))
+ (setq *human-to-spot-vector* (float-vector (* *x* -1) (* *y* -1) 0))
+ (print *angle*)
+ (send *ri* :go-pos 0 0 *angle*)
+ (send *ri* :go-pos 1.0 0 0)
+ )
+ )
+ )
+ )
+
+
+(defun recognition-skeleton (msgs)
+ ;;(send *ri* :gripper-open)
+ ;;(send *ri* :angle-vector (send *spot* :reset-pose))
+ (let (physical-order physical-reverse-order skeleton-list bone-names-list bones index target-bone start-point end-point)
+ (setq physical-order (list "right shoulder->right elbow" "right elbow->right wrist" "left shoulder->left elbow" "left elbow->left wrist"))
+ (setq physical-reverse-order (list "right eldow->right shoulder" "right wrist->right elbow" "left elbow->left shoulder" "left wrist->left elbow"))
+
+ (ros::ros-info "skeletons ~A" (length (send msgs :skeletons)))
+
+ (if (>= (length (send msgs :skeletons)) 1)
+ (progn
+ (setq skeleton-list (send msgs :skeletons))
+ ;;(ros::ros-error "~A" skeleton-list)
+
+ (dolist (skeleton skeleton-list)
+ (if (send skeleton :bone_names)
+ (progn
+ (setq bone-names-list (send skeleton :bone_names))
+ (dolist (bone-name bone-names-list)
+ (print bone-name)
+ ;;(print (send skelton :bones)
+ (dolist (target-bone-name (append physical-order physical-reverse-order))
+ (if (string= bone-name target-bone-name)
+ (progn
+ (setq bones (send skeleton :bones))
+ (ros::ros-error "target-bone ~A" bone-name)
+
+ (setq index (position bone-name bone-names-list :test 'equal))
+ (setq target-bone (elt bones index))
+
+ (cond ((find target-bone-name physical-order)
+ (progn
+ (setq start-point (send target-bone :start_point))
+ (setq end-point (send target-bone :end_point))
+ (setq *purpose-vector* (v- (vector end-point) (vector start-point)))
+ (print *purpose-vector*)
+ ))
+ ((find bone-name physical-reverse-order)
+ (progn
+ (setq end-point (send target-bone :start_point))
+ (setq start-point (send target-bone :end_point))
+ ;;(setq *purpose-vector* (v- (vector start-point) (vector end-point)))
+ ))
+ )
+ ;;(ros::ros-error "purpose-vector ~A" *purpose-vector*)
+ ;;(setq *tfl* (instance ros::transform-listener :init))
+ ;;(send *tfl* :wait-for-transform "base_link" "hand_color_image_sensor" (ros::time 0) 1)
+ ;;(setq tf-base-to-arm (send *tfl* :lookup-transform "base_link" "hand_color_image_sensor" (ros::time 0)))
+ ;;(setq target-vector (send tf-base-to-arm :transform-vector *purpose-vector*))
+ ;;(send *ri* :angle-vector (send *spot* :head :look-at target-vector))
+
+ ;;(setq tf-start-point (send tf-base-to-arm :transform-vector (vector start-point)))
+ ;;(setq tf-end-point (send tf-base-to-arm :transform-vector (vector end-point)))
+ ;;(print tf-start-point)
+ ;;(print tf-end-point)
+ ;;(setq target-vector (v- tf-end-point tf-start-point))
+ ;;(ros::ros-error "target-vector ~A" target-vector)
+
+ ;;(send *ri* :go-pos (elt target-vector 0) (elt target-vector 1) 0)
+ ;;(setq *target-vector-xy* (float-vector (elt target-vector 0) (elt target-vector 1) 0))
+ ;;(setq *start-to-spot-vector* (float-vector (elt tf-start-point 0) (elt tf-start-point 1) 0))
+ ;;(send *ri* :go-pos 0 0 (degree-vector *start-to-spot-vector* *target-vector-xy*))
+ )))))))))))
+
+(defun vector (a)
+ (float-vector (send a :x) (send a :y) (send a :z))
+ )
+
+
+(defun compare-distance-of-rects (a b)
+ (>= (norm (send (send a :pose) :position)) (norm (send (send a :pose) :position)))
+ )
+
+(defun degree-vector (a b)
+ (rad2deg (acos (/ (v. a b) (* (norm a) (norm b)))))
+ )
+
+(ros::roseus-add-srvs "topic_tools")
+(defun call-service-coral (req-topic)
+ (ros::wait-for-service "/coral_input/image/select")
+ (setq req (instance topic_tools::MuxSelectRequest :init))
+ (send req :topic req-topic)
+ (ros::service-call "/coral_input/image/select" req)
+ )
+
+(ros::roseus-add-srvs "coral_usb")
+(defun call-service-edgetpu (req-topic)
+ (ros::wait-for-service "/edgetpu_node_manager/start")
+ (setq req-1 (instance coral_usb::StartNodeRequest :init))
+ (send req-1 :name req-topic)
+ (ros::service-call "/edgetpu_node_manager/start" req-1)
+ )
+
+
+
+;;(call-service-coral "/spot/camera/hand_color/image")
+(call-service-coral "/camera/color/image_raw")
+(call-service-edgetpu "edgetpu_human_pose_estimator")
+;;(setq *skeleton* (one-shot-subscribe "/skeleton_with_depth/output/skeleton" jsk_recognition_msgs::HumanSkeletonArray))
+;;(recognition-skeleton *skeleton*)
+(ros::subscribe "/skeleton_with_depth/output/skeleton" jsk_recognition_msgs::HumanSkeletonArray #'recognition-skeleton)
+
+(ros::spin)
diff --git a/jsk_2023_04_human_approach/euslisp/spot_object_recog.l b/jsk_2023_04_human_approach/euslisp/spot_object_recog.l
new file mode 100755
index 0000000000..706fabd0c5
--- /dev/null
+++ b/jsk_2023_04_human_approach/euslisp/spot_object_recog.l
@@ -0,0 +1,68 @@
+#!/usr/bin/env roseus
+
+(ros::load-ros-package "jsk_recognition_msgs")
+
+(load "package://spoteus/spot-interface.l")
+(spot-init)
+
+(ros::roseus "test")
+
+
+
+(defun move-to-human (msg)
+ (let (rect-list sorted-list max-rect pu pv px py)
+ (setq rect-list (send msg :rects))
+ (if (not (= (length rect-list) 0))
+ (progn
+ (setq sorted-list (sort rect-list #'compare-area-of-rects))
+ (setq max-rect (car sorted-list))
+ (setq pu (+ (send max-rect :x) (/ (send max-rect :width) 2)))
+ (setq pv (+ (send max-rect :y) (/ (send max-rect :height) 2)))
+ (setq px (convert-x pu))
+ (setq py (convert-y pv))
+ (setq *angle* (* (* -1.0 px) (/ 180.0 2000.0)))
+ (print *angle*)
+ (send *ri* :go-pos 0 0 *angle*)
+ (send *ri* :go-pos 0.8 0 0)
+ ;;(while (<= (area-of-rects max-rect) 80000)
+ ;;(send *ri* :go-pos 0.5 0 0)
+ ;;)
+ (send *ri* :speak-jp "どちらにいきますか?")
+ ;;(if (>= (area-of-rects max-rect) 80000)
+ ;;(send *ri* :go-pos 0 0 0)
+ ;;)
+ ;;(dolist (x sorted-list)
+ ;;(progn
+ ;;(print (area-of-rects x))
+ ;;(print px)
+ ;;(print py)
+ ;;(send *ri* :go-pos 0 0 (* (* -1.0 px) (/ 180.0 2000.0)))
+ ;;))
+ (print "done")
+ )
+
+ )
+ )
+ )
+
+;;(print (sen (car (send msg :rects)) :y))))
+(defun area-of-rects (x)
+ (* (send x :width) (send x :height))
+ )
+(defun compare-area-of-rects (a b)
+ (>= (* (send a :width) (send a :height)) (* (send b :width) (send b :height))))
+
+(defun convert-x (u)
+ (- u 2000)
+ )
+(defun convert-y (v)
+ (+ (* -1 v) 1000)
+ )
+
+
+(setq *human* (one-shot-subscribe "human" jsk_recognition_msgs::RectArray))
+
+(move-to-human *human*)
+
+;; (ros::subscribe "/human" jsk_recognition_msgs::RectArray #'cb)
+(ros::spin)
diff --git a/jsk_2023_04_human_approach/launch/#recognition.launch# b/jsk_2023_04_human_approach/launch/#recognition.launch#
new file mode 100644
index 0000000000..5940ce9b95
--- /dev/null
+++ b/jsk_2023_04_human_approach/launch/#recognition.launch#
@@ -0,0 +1,71 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ frame_fixed: "rear_rail"
+ image_transport: compressed
+ dimensions_labels:
+ person: [0.5, 0.5, 1.5]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -->
+
+
+
+
+
diff --git a/jsk_2023_04_human_approach/launch/recognition.launch b/jsk_2023_04_human_approach/launch/recognition.launch
new file mode 100644
index 0000000000..73dba3bedc
--- /dev/null
+++ b/jsk_2023_04_human_approach/launch/recognition.launch
@@ -0,0 +1,72 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ frame_fixed: "rear_rail"
+ image_transport: compressed
+ dimensions_labels:
+ person: [0.5, 0.5, 1.5]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -->
+
+
+
+
+
+
diff --git a/jsk_2023_04_human_approach/launch/rviz.launch b/jsk_2023_04_human_approach/launch/rviz.launch
new file mode 100644
index 0000000000..62afa4a906
--- /dev/null
+++ b/jsk_2023_04_human_approach/launch/rviz.launch
@@ -0,0 +1,5 @@
+
+
+
+
diff --git a/jsk_2023_04_human_approach/package.xml b/jsk_2023_04_human_approach/package.xml
new file mode 100644
index 0000000000..116c19860a
--- /dev/null
+++ b/jsk_2023_04_human_approach/package.xml
@@ -0,0 +1,59 @@
+
+
+ jsk_2023_04_human_approach
+ 0.0.0
+ The 202304_human_approach package
+
+
+
+
+ obinata
+
+
+
+
+
+ TODO
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ catkin
+
+
+
+
+
+
+
+
diff --git a/jsk_2023_04_human_approach/python/merge.py b/jsk_2023_04_human_approach/python/merge.py
new file mode 100755
index 0000000000..9307541186
--- /dev/null
+++ b/jsk_2023_04_human_approach/python/merge.py
@@ -0,0 +1,22 @@
+import rospy
+import message_filters
+from jsk_recognition_msgs.msg import ClassificationResult
+from jsk_recognition_msgs.msg import RectArray
+rospy.init_node('miyamichi')
+pub = rospy.Publisher('human', RectArray, queue_size = 4)
+
+def callback(rects,classes):
+ msg = RectArray()
+ if "person" in classes.label_names:
+ for i, label_name in enumerate(classes.label_names):
+ if label_name == "person":
+ msg.rects.append(rects.rects[i])
+ msg.header = rects.header
+ pub.publish(msg)
+
+Rect_sub = message_filters.Subscriber('/edgetpu_panorama_object_detector/output/rects', RectArray)
+Class_sub = message_filters.Subscriber('/edgetpu_panorama_object_detector/output/class', ClassificationResult)
+
+ts = message_filters.TimeSynchronizer([Rect_sub, Class_sub], 10)
+ts.registerCallback(callback)
+rospy.spin()
diff --git a/jsk_2023_04_human_approach/rosinstall b/jsk_2023_04_human_approach/rosinstall
new file mode 100644
index 0000000000..4a9fc251bc
--- /dev/null
+++ b/jsk_2023_04_human_approach/rosinstall
@@ -0,0 +1,14 @@
+- git:
+# It can be checked out to origin/master after https://github.com/jsk-ros-pkg/coral_usb_ros/pull/130 has been merged
+ local-name: coral_usb_ros
+ uri: https://github.com/mqcmd196/coral_usb_ros.git
+ version: PR/publish_skel
+- git:
+ local-name: jsk_demos
+ uri: git@github.com:heissereal/jsk_demos.git
+ version: master
+- git:
+# It can be checked out to origin/master after https://github.com/jsk-ros-pkg/jsk_recognition/pull/2769 has been merged
+ local-name: jsk_recognition
+ uri: https://github.com/iory/jsk_recognition.git
+ version: skeleton-with-depth