1+ import itertools
12import queue
23import sys
34from io import BytesIO
@@ -246,7 +247,7 @@ def test_batched_offscreen_rendering(show_viewer, tol):
246247 )
247248 scene .build (n_envs = 3 , env_spacing = (2.0 , 2.0 ))
248249
249- for _ in range (10 ):
250+ for _ in range (7 ):
250251 dofs_lower_bound , dofs_upper_bound = robot .get_dofs_limit ()
251252 qpos = dofs_lower_bound + (dofs_upper_bound - dofs_lower_bound ) * torch .rand (robot .n_qs )
252253
@@ -271,6 +272,47 @@ def test_batched_offscreen_rendering(show_viewer, tol):
271272 assert_allclose (steps_rgb_arrays [0 ][i ], steps_rgb_arrays [1 ][i ], tol = tol )
272273
273274
275+ @pytest .mark .required
276+ def test_render_api (show_viewer ):
277+ scene = gs .Scene (
278+ show_viewer = show_viewer ,
279+ show_FPS = False ,
280+ )
281+ scene .add_entity (
282+ morph = gs .morphs .Sphere (
283+ pos = (0.0 , 0.0 , 0.0 ),
284+ radius = 1.0 ,
285+ fixed = True ,
286+ ),
287+ )
288+ camera = scene .add_camera (
289+ pos = (0.0 , 0.0 , 10.0 ),
290+ lookat = (0.0 , 0.0 , 0.0 ),
291+ GUI = show_viewer ,
292+ )
293+ scene .build ()
294+
295+ rgb_arrs , depth_arrs , seg_arrs , normal_arrs = [], [], [], []
296+ for rgb , depth , seg , normal in itertools .product ((True , False ), repeat = 4 ):
297+ rgb_arr , depth_arr , seg_arr , normal_arr = camera .render (rgb = rgb , depth = depth , segmentation = seg , normal = normal )
298+ if rgb :
299+ rgb_arrs .append (rgb_arr .astype (np .float32 ))
300+ if depth :
301+ depth_arrs .append (depth_arr .astype (np .float32 ))
302+ if seg :
303+ seg_arrs .append (seg_arr .astype (np .float32 ))
304+ if normal :
305+ normal_arrs .append (normal_arr .astype (np .float32 ))
306+
307+ assert_allclose (np .diff (rgb_arrs , axis = 0 ), 0.0 , tol = gs .EPS )
308+ assert_allclose (np .diff (seg_arrs , axis = 0 ), 0.0 , tol = gs .EPS )
309+ assert_allclose (np .diff (normal_arrs , axis = 0 ), 0.0 , tol = gs .EPS )
310+ # Depth is not matching at machine-precision because of MSAA being disabled for depth-only
311+ # FIXME: There is one pixel off on MacOS with Apple's Software Rendering, probably due to a bug...
312+ tol = 2e-4 if sys .platform == "darwin" else gs .EPS
313+ assert_allclose (np .diff (depth_arrs , axis = 0 )[[0 , 1 , 2 , 4 , 5 , 6 ]], 0.0 , tol = tol )
314+
315+
274316@pytest .mark .required
275317def test_point_cloud (show_viewer ):
276318 CAMERA_DIST = 8.0
@@ -312,7 +354,8 @@ def test_point_cloud(show_viewer):
312354 GUI = show_viewer ,
313355 )
314356 for camera in scene .visualizer .cameras :
315- camera ._near = 1.0
357+ camera ._near = 2.0
358+ camera ._far = 15.0
316359 scene .build ()
317360
318361 if show_viewer :
@@ -329,12 +372,14 @@ def test_point_cloud(show_viewer):
329372 point_cloud = point_cloud [mask ]
330373 point_cloud = point_cloud @ gu .z_up_to_R (np .array ((1.0 , 1.0 , 1.0 )), np .array ((0.0 , 0.0 , 1.0 ))).T
331374 point_cloud -= np .array ((CAMERA_DIST , CAMERA_DIST , CAMERA_DIST ))
332- assert_allclose (np .linalg .norm (point_cloud , ord = float ("inf" ), axis = - 1 ), BOX_HALFSIZE , atol = 1e-4 )
375+ # FIXME: Tolerance must be increased whe using Apple's Software Rendering, probably due to a bug...
376+ tol = 2e-4 if sys .platform == "darwin" else 1e-4
377+ assert_allclose (np .linalg .norm (point_cloud , ord = float ("inf" ), axis = - 1 ), BOX_HALFSIZE , atol = tol )
333378
334379 point_cloud , mask = camera_box_2 .render_pointcloud (world_frame = True )
335380 point_cloud = point_cloud [mask ]
336381 point_cloud += np .array ((0.0 , OBJ_OFFSET , 0.0 ))
337- assert_allclose (np .linalg .norm (point_cloud , ord = float ("inf" ), axis = - 1 ), BOX_HALFSIZE , atol = 1e-4 )
382+ assert_allclose (np .linalg .norm (point_cloud , ord = float ("inf" ), axis = - 1 ), BOX_HALFSIZE , atol = tol )
338383
339384 # It is not possible to get higher accuracy because of tesselation
340385 point_cloud , mask = camera_sphere .render_pointcloud (world_frame = False )
@@ -387,21 +432,19 @@ def test_batched_mounted_camera_rendering(show_viewer, tol):
387432 for cam in cams :
388433 cam .attach (robot .get_link ("hand" ), gu .trans_R_to_T (trans , R ))
389434
390- target_quat = np .tile (np .array ([0 , 1 , 0 , 0 ]), [ n_envs , 1 ] ) # pointing downwards
391- center = np .tile (np .array ([- 0.25 , - 0.25 , 0.5 ]), [ n_envs , 1 ] )
435+ target_quat = np .tile (np .array ([0 , 1 , 0 , 0 ]), ( n_envs , 1 ) ) # pointing downwards
436+ center = np .tile (np .array ([- 0.25 , - 0.25 , 0.5 ]), ( n_envs , 1 ) )
392437 rng = np .random .default_rng (42 )
393438 angular_speed = rng .uniform (- 10 , 10 , n_envs )
394439 r = 0.25
395440
396441 ee_link = robot .get_link ("hand" )
397442
398443 steps_rgb_queue : queue .Queue [list [np .ndarray ]] = queue .Queue (maxsize = 2 )
399-
400- for i in range (50 ):
401- target_pos = np .zeros ([n_envs , 3 ])
402- target_pos [:, 0 ] = center [:, 0 ] + np .cos (i / 360 * np .pi * angular_speed ) * r
403- target_pos [:, 1 ] = center [:, 1 ] + np .sin (i / 360 * np .pi * angular_speed ) * r
404- target_pos [:, 2 ] = center [:, 2 ]
444+ for i in range (20 ):
445+ target_pos = center .copy ()
446+ target_pos [:, 0 ] += np .cos (i / 360 * np .pi * angular_speed ) * r
447+ target_pos [:, 1 ] += np .sin (i / 360 * np .pi * angular_speed ) * r
405448
406449 q = robot .inverse_kinematics (
407450 link = ee_link ,
0 commit comments