Skip to content

Conversion of articulated objects #2154

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: eundersander/isaac_vr
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions examples/hitl/isaacsim_viewer/isaacsim_viewer.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def reset(self):

self._spot_wrapper._target_arm_joint_positions = [0.0, -2.36, 0.0, 2.25, 0.0, 1.67, 0.0, 0.0]

pos_usd = isaac_prim_utils.habitat_to_usd_position([random.uniform(-2.0, -4.0), 0.8, -4.8])
pos_usd = isaac_prim_utils.habitat_to_usd_position([-4.5, 0.1, -3.5])
self._spot_wrapper._robot.set_world_pose(pos_usd, [1.0, 0.0, 0.0, 0.0])


Expand Down Expand Up @@ -479,13 +479,15 @@ def __init__(self, app_service: AppService):
isaac_world.set_simulation_dt(physics_dt = self._isaac_physics_dt, rendering_dt = self._isaac_physics_dt)

# asset_path = "/home/eric/projects/habitat-lab/data/usd/scenes/102817140.usda"
asset_path = "/home/eric/projects/habitat-lab/data/usd/scenes/102344193.usda"
asset_path = "/home/joanne/habitat-lab/data/usd/scenes/fremont_static_objects.usda"
from omni.isaac.core.utils.stage import add_reference_to_stage
add_reference_to_stage(usd_path=asset_path, prim_path="/World/test_scene")
self._usd_visualizer.on_add_reference_to_stage(usd_path=asset_path, prim_path="/World/test_scene")

from habitat.isaac_sim._internal.spot_robot_wrapper import SpotRobotWrapper
self._spot_wrapper = SpotRobotWrapper(self._isaac_wrapper.service)
from habitat.isaac_sim._internal.murp_robot_wrapper import MurpRobotWrapper

self._spot_wrapper = MurpRobotWrapper(self._isaac_wrapper.service)

from habitat.isaac_sim._internal.metahand_robot_wrapper import MetahandRobotWrapper
self._metahand_wrapper = MetahandRobotWrapper(self._isaac_wrapper.service)
Expand Down
171 changes: 171 additions & 0 deletions habitat-lab/habitat/articulated_agents/robots/murp_robot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
# Copyright (c) Meta Platforms, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from typing import Dict, List, Optional, Set

import attr
import magnum as mn
import numpy as np

from habitat.articulated_agents.mobile_manipulator import (
ArticulatedAgentCameraParams,
MobileManipulator,
)


@attr.s(auto_attribs=True, slots=True)
class MurpParams:
"""Data to configure a mobile manipulator.

:property arm_joints: The joint ids of the arm joints.
:property gripper_joints: The habitat sim joint ids of any grippers.
:property arm_init_params: The starting joint angles of the arm. If None,
resets to 0.
:property gripper_init_params: The starting joint positions of the gripper. If None,
resets to 0.
:property ee_offset: The 3D offset from the end-effector link to the true
end-effector position.
:property ee_link: The Habitat Sim link ID of the end-effector.
:property ee_constraint: A (2, 3) shaped array specifying the upper and
lower limits for the 3D end-effector position.
:property cameras: The cameras and where they should go. The key is the
prefix to match in the sensor names. For example, a key of `"head"`
will match sensors `"head_rgb"` and `"head_depth"`
:property gripper_closed_state: All gripper joints must achieve this
state for the gripper to be considered closed.
:property gripper_open_state: All gripper joints must achieve this
state for the gripper to be considered open.
:property gripper_state_eps: Error margin for detecting whether gripper is closed.
:property arm_mtr_pos_gain: The position gain of the arm motor.
:property arm_mtr_vel_gain: The velocity gain of the arm motor.
:property arm_mtr_max_impulse: The maximum impulse of the arm motor.
:property base_offset: The offset of the root transform from the center ground point for navmesh kinematic control.
:property base_link_names: The name of the links
:property ee_count: how many end effectors
"""

arm_joints: List[int]
gripper_joints: List[int]

arm_init_params: Optional[List[float]]
gripper_init_params: Optional[List[float]]

ee_offset: List[mn.Vector3]
ee_links: List[int]
ee_constraint: np.ndarray

cameras: Dict[str, ArticulatedAgentCameraParams]

gripper_closed_state: List[float]
gripper_open_state: List[float]
gripper_state_eps: float

arm_mtr_pos_gain: float
arm_mtr_vel_gain: float
arm_mtr_max_impulse: float

base_offset: mn.Vector3
base_link_names: Set[str]

ik_pb_link_idx: int
ik_arm_len: int
ik_arm_start_idx: int

ee_count: Optional[int] = 1


class MurpRobot(MobileManipulator):
@classmethod
def _get_murp_params(cls):
return MurpParams(
arm_joints=[0, 2, 4, 6, 8, 10, 12],
gripper_joints=[19],
arm_init_params=[
2.6116285,
1.5283098,
1.0930868,
-0.50559217,
0.48147443,
2.628784,
-1.3962275,
],
gripper_init_params=[-1.56],
ee_offset=[mn.Vector3(0.08, 0, 0)],
ee_links=[7],
ee_constraint=np.array([[[0.4, 1.2], [-0.7, 0.7], [0.25, 1.5]]]),
cameras={
"articulated_agent_arm_depth": ArticulatedAgentCameraParams(
cam_offset_pos=mn.Vector3(0.166, 0.0, 0.018),
cam_orientation=mn.Vector3(0.0, -1.571, 0.0),
attached_link_id=36,
relative_transform=mn.Matrix4.rotation_z(mn.Deg(-90)),
),
"articulated_agent_arm_rgb": ArticulatedAgentCameraParams(
cam_offset_pos=mn.Vector3(0.166, 0.023, 0.03),
cam_orientation=mn.Vector3(0, -1.571, 0.0),
attached_link_id=36,
relative_transform=mn.Matrix4.rotation_z(mn.Deg(-90)),
),
"articulated_agent_arm_panoptic": ArticulatedAgentCameraParams(
cam_offset_pos=mn.Vector3(0.166, 0.0, 0.018),
cam_orientation=mn.Vector3(0, -1.571, 0.0),
attached_link_id=6,
relative_transform=mn.Matrix4.rotation_z(mn.Deg(-90)),
),
"head_stereo_right": ArticulatedAgentCameraParams(
cam_offset_pos=mn.Vector3(
0.4164822634134684, 0.0, 0.03614789234067159
),
cam_orientation=mn.Vector3(
0.0290787, -0.940569, -0.38998877
),
attached_link_id=-1,
),
"head_stereo_left": ArticulatedAgentCameraParams(
cam_offset_pos=mn.Vector3(
0.4164822634134684, 0.0, -0.03740343144695029
),
cam_orientation=mn.Vector3(
-3.1125141, -0.940569, 2.751605
),
attached_link_id=-1,
),
"third": ArticulatedAgentCameraParams(
cam_offset_pos=mn.Vector3(0.5, 1.9, 0.0),
cam_look_at_pos=mn.Vector3(1, 0.0, -0.75),
attached_link_id=-1,
),
},
gripper_closed_state=[0.0],
gripper_open_state=[-1.56],
gripper_state_eps=0.01,
arm_mtr_pos_gain=0.3,
arm_mtr_vel_gain=0.3,
arm_mtr_max_impulse=10.0,
base_offset=mn.Vector3(0.0, -0.48, 0.0),
base_link_names={"base_link", "spine"},
# for left arm
ik_pb_link_idx=9, # link to use for IK
ik_arm_len=7, # num links in arm URDF for IK
ik_arm_start_idx=3, # starting link for arm in URDF for IK
)

@property
def base_transformation(self):
add_rot = mn.Matrix4.rotation(
mn.Rad(-np.pi / 2), mn.Vector3(1.0, 0, 0)
)
return self.sim_obj.transformation # @ add_rot

def __init__(
self, agent_cfg, sim, limit_robo_joints=True, fixed_base=True
):
super().__init__(
self._get_murp_params(),
agent_cfg,
sim,
limit_robo_joints,
fixed_base,
base_type="mobile",
)
Loading