Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .github/workflows/examples.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ jobs:
TI_ENABLE_VULKAN: "0"
TI_DEBUG: "0"
OMNI_KIT_ACCEPT_EULA: "yes"
OMNI_KIT_ALLOW_ROOT: "1"

steps:
- name: Checkout code
Expand Down
1 change: 0 additions & 1 deletion .github/workflows/generic.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,6 @@ jobs:
TI_ENABLE_VULKAN: "0"
TI_DEBUG: "0"
OMNI_KIT_ACCEPT_EULA: "yes"
OMNI_KIT_ALLOW_ROOT: "1"

runs-on: ${{ matrix.OS }}
if: github.event_name != 'release'
Expand Down
5 changes: 2 additions & 3 deletions .github/workflows/production.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ env:
PY_COLORS: 1
MADRONA_DISABLE_CUDA_HEAP_SIZE: "1"
OMNI_KIT_ACCEPT_EULA: "yes"
OMNI_KIT_ALLOW_ROOT: "1"

jobs:
unit-tests:
Expand Down Expand Up @@ -55,7 +54,7 @@ jobs:
--container-mounts=${{ github.workspace }}:/root/workspace,${HOME}/.cache/uv:/root/.cache/uv \
--no-container-mount-home \
--container-workdir=/root/workspace"
SLURM_ENV_VARS="NVIDIA_DRIVER_CAPABILITIES=all,BASH_ENV=/root/.bashrc,HF_TOKEN,GS_ENABLE_NDARRAY=${GS_ENABLE_NDARRAY},OMNI_KIT_ACCEPT_EULA,OMNI_KIT_ALLOW_ROOT"
SLURM_ENV_VARS="NVIDIA_DRIVER_CAPABILITIES=all,BASH_ENV=/root/.bashrc,HF_TOKEN,GS_ENABLE_NDARRAY=${GS_ENABLE_NDARRAY},OMNI_KIT_ACCEPT_EULA"

JOBID_FIFO="${{ github.workspace }}/.slurm_job_id_fifo"
[[ -e "$JOBID_FIFO" ]] && rm -f "$JOBID_FIFO"
Expand Down Expand Up @@ -132,7 +131,7 @@ jobs:
--container-mounts=/mnt/data/artifacts:/mnt/data/artifacts,${{ github.workspace }}:/root/workspace,${HOME}/.cache/uv:/root/.cache/uv \
--no-container-mount-home \
--container-workdir=/root/workspace"
SLURM_ENV_VARS="NVIDIA_DRIVER_CAPABILITIES=all,BASH_ENV=/root/.bashrc,HF_TOKEN,GS_ENABLE_NDARRAY=${GS_ENABLE_NDARRAY},OMNI_KIT_ACCEPT_EULA,OMNI_KIT_ALLOW_ROOT"
SLURM_ENV_VARS="NVIDIA_DRIVER_CAPABILITIES=all,BASH_ENV=/root/.bashrc,HF_TOKEN,GS_ENABLE_NDARRAY=${GS_ENABLE_NDARRAY},OMNI_KIT_ACCEPT_EULA"
if [[ "${{ github.repository }}" == 'Genesis-Embodied-AI/Genesis' && "${{ github.ref }}" == 'refs/heads/main' ]] ; then
SLURM_ENV_VARS="${SLURM_ENV_VARS},WANDB_API_KEY"
fi
Expand Down
19 changes: 9 additions & 10 deletions genesis/utils/usd/usd_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import os
import shutil
import subprocess
import sys
from pathlib import Path

import numpy as np
Expand Down Expand Up @@ -256,11 +257,11 @@ def find_all_materials(self):
self.replace_asset_symlinks()
os.makedirs(self._bake_folder, exist_ok=True)

# Note that it is necessary to call 'bake_usd_material' via a subprocess to ensure proper isolation of
# omninerse kit, otherwise the global conversion registry of some Python bindings will be conflicting between
# each, ultimately leading to segfault...
# Note that it is necessary to call 'bake_usd_material' as a subprocess to ensure proper isolation of omniverse
# kit, otherwise the global conversion registry of some Python bindings will be conflicting with each other,
# ultimately leading to segfault...
commands = [
"python",
sys.executable,
os.path.join(os.path.dirname(os.path.abspath(__file__)), "usd_bake.py"),
"--input_file",
self._stage_file,
Expand All @@ -275,13 +276,11 @@ def find_all_materials(self):
]
gs.logger.debug(f"Execute: {' '.join(commands)}")

env = dict(os.environ)
env["OMNI_KIT_ALLOW_ROOT"] = "1"

try:
result = subprocess.run(
commands,
capture_output=True,
check=True,
text=True,
)
result = subprocess.run(commands, capture_output=True, check=True, text=True, env=env)
if result.stdout:
gs.logger.debug(result.stdout)
if result.stderr:
Expand Down
10 changes: 8 additions & 2 deletions genesis/utils/usd/usd_rigid_entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -427,12 +427,18 @@ def parse_usd_rigid_entity(morph: gs.morphs.USD, surface: gs.surfaces.Surface):
stage: Usd.Stage = context.stage

if morph.prim_path is None:
gs.logger.info("USD morph has no prim path. Fallback to its default prim path.")
gs.logger.debug("USD morph has no prim path. Fallback to its default prim path.")
entity_prim = stage.GetDefaultPrim()
else:
entity_prim = stage.GetPrimAtPath(morph.prim_path)
if not entity_prim.IsValid():
gs.raise_exception(f"Invalid prim path {morph.prim_path} in USD file {morph.file}.")
if morph.prim_path is None:
err_msg = (
f"Invalid default prim path {entity_prim} in USD file {morph.file}. Please specify 'morph.prim_path'."
)
else:
err_msg = f"Invalid user-specified prim path {entity_prim} in USD file {morph.file}."
gs.raise_exception(err_msg)

# find joints
links, link_joints, link_path_to_idx = _parse_articulation_structure(stage, entity_prim)
Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,9 @@ dev = [
"pytest-forked",
"pytest-random-order",
"pytest-print",
# Note that 'pytest-rerunfailures' is incompatible with 'pytest-forked'
# - 16.0 is causing pytest-xdist to crash in case of failure or skipped tests
"pytest-rerunfailures!=16.0",
# "pytest-rerunfailures!=16.0",
"setproctitle", # allows renaming the test processes on the cluster
"syrupy",
"huggingface_hub[hf_xet]",
Expand Down
106 changes: 80 additions & 26 deletions tests/test_usd.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
"""

import os
import time

import xml.etree.ElementTree as ET
import numpy as np
Expand Down Expand Up @@ -177,8 +178,8 @@ def compare_geoms(compared_geoms, usd_geoms, tol):
assert len(compared_geoms) == len(usd_geoms)

# Sort geoms by link name for consistent comparison
compared_geoms_sorted = sorted(compared_geoms, key=lambda g: (g.link.name, g._idx))
usd_geoms_sorted = sorted(usd_geoms, key=lambda g: (g.link.name, g._idx))
compared_geoms_sorted = sorted(compared_geoms, key=lambda g: (g.link.name, g.idx))
usd_geoms_sorted = sorted(usd_geoms, key=lambda g: (g.link.name, g.idx))

for compared_geom, usd_geom in zip(compared_geoms_sorted, usd_geoms_sorted):
assert compared_geom.type == usd_geom.type
Expand Down Expand Up @@ -266,9 +267,20 @@ def build_mjcf_scene(xml_path: str, scale: float):
"""
# Create MJCF scene
mjcf_scene = gs.Scene()
mjcf_morph = gs.morphs.MJCF(file=xml_path, scale=scale)
mjcf_scene.add_entity(mjcf_morph, material=gs.materials.Rigid(rho=1000.0))

mjcf_scene.add_entity(
gs.morphs.MJCF(
file=xml_path,
scale=scale,
convexify=False,
),
material=gs.materials.Rigid(
rho=1000.0,
),
)

mjcf_scene.build()

return mjcf_scene


Expand Down Expand Up @@ -300,18 +312,34 @@ def build_usd_scene(
The USD scene
"""
# Create USD scene
usd_scene = gs.Scene()
usd_morph = gs.morphs.USD(
usd_ctx=UsdContext(usd_file, use_bake_cache=False),
scale=scale,
fixed=fixed,
scene = gs.Scene()

kwargs = dict(
morph=gs.morphs.USD(
usd_ctx=UsdContext(
usd_file,
use_bake_cache=False,
),
scale=scale,
fixed=fixed,
convexify=False,
),
material=gs.materials.Rigid(
rho=1000.0,
),
vis_mode=vis_mode,
)

if is_stage:
usd_scene.add_stage(usd_morph, vis_mode=vis_mode, material=gs.materials.Rigid(rho=1000.0))
scene.add_stage(**kwargs)
else:
usd_scene.add_entity(usd_morph, vis_mode=vis_mode, material=gs.materials.Rigid(rho=1000.0))
usd_scene.build()
return usd_scene
scene.add_entity(**kwargs)

# Note that it is necessary to build the scene because spatial inertia of some geometries may not be specified.
# In such a case, it will be estimated from the geometry during build (RigidLink._build to be specific).
scene.build()

return scene


def build_mesh_scene(mesh_file: str, scale: float):
Expand Down Expand Up @@ -438,6 +466,7 @@ def box_plane_usd(asset_tmp_path, box_plane_mjcf: ET.ElementTree):
rigid_body_api.GetKinematicEnabledAttr().Set(False)

stage.Save()

return usd_file


Expand Down Expand Up @@ -864,6 +893,7 @@ def spherical_joint_usd(asset_tmp_path, spherical_joint_mjcf: ET.ElementTree):
joint_prim.CreateLocalPos1Attr().Set(Gf.Vec3f(0.0, 0.0, 0.0))

stage.Save()

return usd_file


Expand All @@ -883,10 +913,14 @@ def test_spherical_joint_mjcf_vs_usd(xml_path, spherical_joint_usd, scale, tol):
@pytest.mark.parametrize("model_name", ["usd/sneaker_airforce", "usd/RoughnessTest"])
@pytest.mark.skipif(not HAS_USD_SUPPORT, reason="USD support not available")
def test_usd_visual_parse(model_name, tol):
glb_file = os.path.join(get_hf_dataset(pattern=f"{model_name}.glb"), f"{model_name}.glb")
usd_file = os.path.join(get_hf_dataset(pattern=f"{model_name}.usdz"), f"{model_name}.usdz")
glb_asset_path = get_hf_dataset(pattern=f"{model_name}.glb")
glb_file = os.path.join(glb_asset_path, f"{model_name}.glb")
usd_asset_path = get_hf_dataset(pattern=f"{model_name}.usdz")
usd_file = os.path.join(usd_asset_path, f"{model_name}.usdz")

mesh_scene = build_mesh_scene(glb_file, scale=1.0)
usd_scene = build_usd_scene(usd_file, scale=1.0, vis_mode="visual", is_stage=False)

compare_mesh_scene(mesh_scene, usd_scene, tol=tol)


Expand All @@ -897,7 +931,9 @@ def test_usd_visual_parse(model_name, tol):
def test_usd_parse_nodegraph(usd_file):
asset_path = get_hf_dataset(pattern=usd_file)
usd_file = os.path.join(asset_path, usd_file)

usd_scene = build_usd_scene(usd_file, scale=1.0, vis_mode="visual", is_stage=False)

texture0 = usd_scene.entities[0].vgeoms[0].vmesh.surface.diffuse_texture
texture1 = usd_scene.entities[0].vgeoms[1].vmesh.surface.diffuse_texture
assert isinstance(texture0, gs.textures.ColorTexture)
Expand All @@ -914,15 +950,33 @@ def test_usd_parse_nodegraph(usd_file):
@pytest.mark.parametrize("backend", [gs.cuda])
@pytest.mark.skipif(not HAS_USD_SUPPORT, reason="USD support not available")
@pytest.mark.skipif(not HAS_OMNIVERSE_KIT_SUPPORT, reason="omniverse-kit support not available")
def test_usd_bake(usd_file):
asset_path = get_hf_dataset(pattern=os.path.join(os.path.dirname(usd_file), "*"), local_dir_use_symlinks=False)
def test_usd_bake(usd_file, tmp_path):
RETRY_NUM = 3 if "PYTEST_XDIST_WORKER" in os.environ else 0
RETRY_DELAY = 30.0

asset_path = get_hf_dataset(pattern=os.path.join(os.path.dirname(usd_file), "*"), local_dir=tmp_path)
usd_file = os.path.join(asset_path, usd_file)
usd_scene = build_usd_scene(usd_file, scale=1.0, vis_mode="visual", is_stage=False, fixed=True)

success_count = 0
for vgeom in usd_scene.entities[0].vgeoms:
vmesh = vgeom.vmesh
bake_success = vmesh.metadata["bake_success"]
assert bake_success is None or bake_success
success_count += 1 if bake_success else 0
assert success_count > 0

# Note that bootstrapping omni-kit by multiple workers concurrently is causing failure.
# There is no easy way to get around this limitation except retrying after some delay...
retry_idx = 0
while True:
usd_scene = build_usd_scene(usd_file, scale=1.0, vis_mode="visual", is_stage=False, fixed=True)

is_any_baked = False
for vgeom in usd_scene.entities[0].vgeoms:
vmesh = vgeom.vmesh
bake_success = vmesh.metadata["bake_success"]
try:
assert bake_success is None or bake_success
except AssertionError:
if retry_idx < RETRY_NUM:
usd_scene.destroy()
print(f"Failed to bake usd. Trying again in {RETRY_DELAY}s...")
time.sleep(RETRY_DELAY)
break
raise
is_any_baked |= bake_success
else:
assert is_any_baked
break
2 changes: 0 additions & 2 deletions tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,6 @@ def get_hf_dataset(
local_dir: str | None = None,
num_retry: int = 4,
retry_delay: float = 30.0,
local_dir_use_symlinks: bool = True,
):
assert num_retry >= 1

Expand All @@ -207,7 +206,6 @@ def get_hf_dataset(
allow_patterns=pattern,
max_workers=1,
local_dir=local_dir,
local_dir_use_symlinks=local_dir_use_symlinks,
)

# Make sure that download was successful
Expand Down
Loading