Skip to content

Commit 5804bb9

Browse files
authored
PR #14389 from AviaAv: Add page detection to texture mapping test
2 parents 83b7109 + 8daeee1 commit 5804bb9

File tree

4 files changed

+282
-141
lines changed

4 files changed

+282
-141
lines changed

unit-tests/live/image-quality/iq_helper.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
import time
88
import pyrealsense2 as rs
99

10-
10+
# targets are available on the Wiki page: https://rsconf.realsenseai.com/display/RealSense/Image+Quality+Tests
1111
# standard size to display / process the target
1212
WIDTH = 1280
1313
HEIGHT = 720
@@ -101,3 +101,7 @@ def get_roi_from_frame(frame):
101101
np_frame = np.asanyarray(frame.get_data())
102102
warped = cv2.warpPerspective(np_frame, M, (WIDTH, HEIGHT)) # using A4 size for its ratio
103103
return warped
104+
105+
106+
def is_color_close(actual, expected, tolerance):
107+
return all(abs(int(a) - int(e)) <= tolerance for a, e in zip(actual, expected))

unit-tests/live/image-quality/test-basic-color.py

Lines changed: 13 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
from rspy import log, test
88
import numpy as np
99
import cv2
10-
from iq_helper import find_roi_location, get_roi_from_frame, WIDTH, HEIGHT
10+
from iq_helper import find_roi_location, get_roi_from_frame, is_color_close, WIDTH, HEIGHT
1111

1212
NUM_FRAMES = 100 # Number of frames to check
1313
COLOR_TOLERANCE = 60 # Acceptable per-channel deviation in RGB values
@@ -37,9 +37,6 @@
3737

3838
dev, ctx = test.find_first_device_or_exit()
3939

40-
def is_color_close(actual, expected, tolerance):
41-
return all(abs(int(a) - int(e)) <= tolerance for a, e in zip(actual, expected))
42-
4340
def draw_debug(frame_bgr, a4_page_bgr):
4441
"""
4542
Simple debug view:
@@ -74,22 +71,15 @@ def draw_debug(frame_bgr, a4_page_bgr):
7471
return np.hstack([left, right])
7572

7673

77-
def is_cfg_supported(resolution, fps):
78-
color_sensor = dev.first_color_sensor()
79-
for p in color_sensor.get_stream_profiles():
80-
if p.stream_type() == rs.stream.color and p.format() == rs.format.bgr8:
81-
v = p.as_video_stream_profile()
82-
if (v.width(), v.height()) == resolution and v.fps() == fps:
83-
return True
84-
return False
85-
86-
8774
def run_test(resolution, fps):
8875
test.start("Basic Color Image Quality Test:", f"{resolution[0]}x{resolution[1]} @ {fps}fps")
8976
color_match_count = {color: 0 for color in expected_colors.keys()}
9077
pipeline = rs.pipeline(ctx)
9178
cfg = rs.config()
9279
cfg.enable_stream(rs.stream.color, resolution[0], resolution[1], rs.format.bgr8, fps)
80+
if not cfg.can_resolve(pipeline):
81+
log.i(f"Configuration {resolution[0]}x{resolution[1]}@{fps}fps is not supported by the device")
82+
return
9383
pipeline_profile = pipeline.start(cfg)
9484
for i in range(60): # skip initial frames
9585
pipeline.wait_for_frames()
@@ -138,16 +128,16 @@ def run_test(resolution, fps):
138128
raise e
139129
finally:
140130
cv2.destroyAllWindows()
141-
142-
pipeline.stop()
143-
test.finish()
131+
pipeline.stop()
132+
test.finish()
144133

145134

146135
log.d("context:", test.context)
147-
if "nightly" not in test.context:
148-
configurations = [((1280, 720), 30)]
149-
else:
150-
configurations = [
136+
137+
configurations = [((1280, 720), 30)]
138+
# on nightly we check additional arbitrary configurations
139+
if "nightly" in test.context:
140+
configurations += [
151141
((640,480), 15),
152142
((640,480), 30),
153143
((640,480), 60),
@@ -159,8 +149,7 @@ def run_test(resolution, fps):
159149
((1280,720), 15),
160150
]
161151

162-
for cfg in configurations:
163-
if is_cfg_supported(*cfg):
164-
run_test(*cfg)
152+
for resolution, fps in configurations:
153+
run_test(resolution, fps)
165154

166155
test.print_results_and_exit()

unit-tests/live/image-quality/test-basic-depth.py

Lines changed: 89 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -9,84 +9,109 @@
99
import numpy as np
1010
import cv2
1111
import time
12-
from iq_helper import find_roi_location, get_roi_from_frame
12+
from iq_helper import find_roi_location, get_roi_from_frame, WIDTH, HEIGHT
1313

1414
NUM_FRAMES = 100 # Number of frames to check
1515
DEPTH_TOLERANCE = 0.05 # Acceptable deviation from expected depth in meters
1616
FRAMES_PASS_THRESHOLD =0.8 # Percentage of frames that needs to pass
1717
DEBUG_MODE = False
1818

19-
test.start("Basic Depth Image Quality Test")
19+
DISTANCE_FROM_CUBE = 0.53
20+
DISTANCE_FROM_BACKGROUND = 0.67
21+
dev, ctx = test.find_first_device_or_exit()
2022

21-
try:
22-
dev, ctx = test.find_first_device_or_exit()
23+
def run_test(resolution, fps):
24+
test.start("Basic Depth Image Quality Test", f"{resolution[0]}x{resolution[1]} @ {fps}fps")
2325

24-
pipeline = rs.pipeline(ctx)
25-
cfg = rs.config()
26-
cfg.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
27-
cfg.enable_stream(rs.stream.infrared, 1, 640, 480, rs.format.y8, 30) # needed for finding the ArUco markers
28-
profile = pipeline.start(cfg)
29-
time.sleep(2)
26+
try:
27+
pipeline = rs.pipeline(ctx)
28+
cfg = rs.config()
29+
cfg.enable_stream(rs.stream.depth, resolution[0], resolution[1], rs.format.z16, fps)
30+
cfg.enable_stream(rs.stream.infrared, 1, 640, 480, rs.format.y8, 30) # needed for finding the ArUco markers
31+
if not cfg.can_resolve(pipeline):
32+
log.i(f"Configuration {resolution[0]}x{resolution[1]} @ {fps}fps is not supported by the device")
33+
return
34+
profile = pipeline.start(cfg)
35+
time.sleep(2)
3036

31-
depth_sensor = profile.get_device().first_depth_sensor()
32-
depth_scale = depth_sensor.get_depth_scale()
37+
depth_sensor = profile.get_device().first_depth_sensor()
38+
depth_scale = depth_sensor.get_depth_scale()
3339

34-
# find region of interest (page) and get the transformation matrix
35-
find_roi_location(pipeline, (4,5,6,7), DEBUG_MODE) # markers in the lab are 4,5,6,7
36-
depth_passes = {}
37-
for i in range(NUM_FRAMES):
38-
frames = pipeline.wait_for_frames()
39-
depth_frame = frames.get_depth_frame()
40-
infrared_frame = frames.get_infrared_frame()
41-
if not depth_frame:
42-
continue
43-
44-
depth_image = get_roi_from_frame(depth_frame)
40+
# find region of interest (page) and get the transformation matrix
41+
find_roi_location(pipeline, (4,5,6,7), DEBUG_MODE) # markers in the lab are 4,5,6,7
4542

4643
# Known pixel positions and expected depth values (in meters)
4744
# Using temporary values until setup in lab is completed
48-
h, w = depth_image.shape
4945
depth_points = {
50-
"cube": ((h // 2, w // 2), 0.45), # center of page, cube at 0.45m
51-
"background": ((h // 2, int(w * 0.1)), 0.6), # left edge, background at 0.6m
46+
"cube": ((HEIGHT // 2, WIDTH // 2), DISTANCE_FROM_CUBE), # cube expected to be at the center of page
47+
"background": ((HEIGHT // 2, int(WIDTH * 0.1)), DISTANCE_FROM_BACKGROUND) # left edge, background
5248
}
53-
if not depth_passes:
54-
depth_passes = {name: 0 for name in depth_points}
55-
for point_name, ((x, y), expected_depth) in depth_points.items():
56-
raw_depth = depth_image[y, x]
57-
depth_value = raw_depth * depth_scale # Convert to meters
58-
59-
if abs(depth_value - expected_depth) <= DEPTH_TOLERANCE:
60-
depth_passes[point_name] += 1
61-
else:
62-
log.d(f"Frame {i} - {point_name} at ({x},{y}): {depth_value:.3f}m ≠ {expected_depth:.3f}m")
63-
49+
depth_passes = {name: 0 for name in depth_points}
50+
for i in range(NUM_FRAMES):
51+
frames = pipeline.wait_for_frames()
52+
depth_frame = frames.get_depth_frame()
53+
infrared_frame = frames.get_infrared_frame()
54+
if not depth_frame:
55+
continue
56+
57+
depth_image = get_roi_from_frame(depth_frame)
58+
59+
for point_name, ((x, y), expected_depth) in depth_points.items():
60+
raw_depth = depth_image[y, x]
61+
depth_value = raw_depth * depth_scale # Convert to meters
62+
63+
if abs(depth_value - expected_depth) <= DEPTH_TOLERANCE:
64+
depth_passes[point_name] += 1
65+
else:
66+
log.d(f"Frame {i} - {point_name} at ({x},{y}): {depth_value:.3f}m ≠ {expected_depth:.3f}m")
67+
68+
if DEBUG_MODE:
69+
# display IR image along with transformed view of IR, get_roi_from_frame(infrared_frame)
70+
infrared_np = np.asanyarray(infrared_frame.get_data())
71+
w, h = infrared_np.shape
72+
dbg_resized = cv2.resize(get_roi_from_frame(infrared_frame), (h, w))
73+
74+
dbg = np.hstack([infrared_np, dbg_resized])
75+
cv2.imshow("Depth IQ - IR | Depth", dbg)
76+
cv2.waitKey(1)
77+
78+
# wait for close
6479
if DEBUG_MODE:
65-
# display IR image along with transformed view of IR, get_roi_from_frame(infrared_frame)
66-
infrared_np = np.asanyarray(infrared_frame.get_data())
67-
w, h = infrared_np.shape
68-
dbg_resized = cv2.resize(get_roi_from_frame(infrared_frame), (h, w))
69-
70-
dbg = np.hstack([infrared_np, dbg_resized])
71-
cv2.imshow("Depth IQ - IR | Depth", dbg)
72-
cv2.waitKey(1)
73-
74-
# wait for close
75-
if DEBUG_MODE:
76-
cv2.waitKey(0)
77-
78-
# Check that each point passed the threshold
79-
min_passes = int(NUM_FRAMES * FRAMES_PASS_THRESHOLD)
80-
for point_name, count in depth_passes.items():
81-
log.i(f"{point_name.title()} passed in {count}/{NUM_FRAMES} frames")
82-
test.check(count >= min_passes)
83-
84-
except Exception as e:
85-
test.fail()
86-
raise e
87-
finally:
88-
cv2.destroyAllWindows()
89-
90-
pipeline.stop()
91-
test.finish()
80+
cv2.waitKey(0)
81+
82+
# Check that each point passed the threshold
83+
min_passes = int(NUM_FRAMES * FRAMES_PASS_THRESHOLD)
84+
for point_name, count in depth_passes.items():
85+
log.i(f"{point_name.title()} passed in {count}/{NUM_FRAMES} frames")
86+
test.check(count >= min_passes)
87+
88+
except Exception as e:
89+
test.fail()
90+
raise e
91+
finally:
92+
cv2.destroyAllWindows()
93+
pipeline.stop()
94+
test.finish()
95+
96+
97+
log.d("context:", test.context)
98+
99+
configurations = [((1280, 720), 30)]
100+
# on nightly we check additional arbitrary configurations
101+
if "nightly" in test.context:
102+
configurations += [
103+
((640,480), 15),
104+
((640,480), 30),
105+
((640,480), 60),
106+
((848,480), 15),
107+
((848,480), 30),
108+
((848,480), 60),
109+
((1280,720), 5),
110+
((1280,720), 10),
111+
((1280,720), 15),
112+
]
113+
114+
for resolution, fps in configurations:
115+
run_test(resolution, fps)
116+
92117
test.print_results_and_exit()

0 commit comments

Comments
 (0)