Skip to content

Commit 4133e3e

Browse files
authored
Merge pull request #51 from AGH-CEAI/feature/add_calib_extrinsics
Add calibration of camera extrinsics
2 parents b953f01 + 76dff3f commit 4133e3e

File tree

6 files changed

+303
-51
lines changed

6 files changed

+303
-51
lines changed

aegis_utils/CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
99

1010
### Added
1111

12+
* [PR-51](https://github.com/AGH-CEAI/aegis_ros/pull/51) - Added calibration of camera extrinsics program.
1213
* [PR-50](https://github.com/AGH-CEAI/aegis_ros/pull/50) - Added calibration of camera intrinsics program.
1314
* [PR-47](https://github.com/AGH-CEAI/aegis_ros/pull/47) - Initial version of the `aegis_utils` package. Added calibration data collection program and corresponding positions configuration for Luxonis OAK-D SR tool camera.
1415

aegis_utils/README.md

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,10 +45,22 @@ ros2 run aegis_utils calib_data_collect -c tool_front_right -p ~/Documents/calib
4545
4646
## Calibration of camera intrinsics
4747
This tool computes the intrinsic parameters of a camera using previously collected calibration images.
48-
It processes a set of chessboard pattern images and saves the resulting camera matrix and distortion coefficients.
48+
49+
It outputs the resulting camera matrix and distortion coefficients and saves it to the calibration data folder.
4950
5051
### Example usage
5152
It accepts the same arguments and uses the same default data path as the data collection tool described above. For example:
5253
```bash
5354
ros2 run aegis_utils calib_intrinsics -c tool_front_right
5455
```
56+
57+
## Calibration of camera extrinsics
58+
This tool computes the extrinsic parameters of a camera using previously collected calibration images, TCP poses and the computed camera intrinsics.
59+
60+
It outputs the resulting transformation matrix form the robot TCP to the camera frame and saves it to the calibration data folder.
61+
62+
### Example usage
63+
It accepts the same arguments and uses the same default data path as the data collection tool described above. For example:
64+
```bash
65+
ros2 run aegis_utils calib_extrinsics -c tool_front_right
66+
```

aegis_utils/aegis_utils/calib_data_collect.py

Lines changed: 26 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -2,23 +2,20 @@
22
import threading
33
import time
44
from pathlib import Path
5-
from typing import List, Dict, Optional
5+
from typing import Dict, List, Optional
66

77
import cv2
88
import numpy as np
9-
import yaml
10-
119
import rclpy
12-
from rclpy.executors import SingleThreadedExecutor
13-
from rclpy.node import Node
10+
import yaml
11+
from aegis_director.robot_director import RobotDirector
1412
from ament_index_python.packages import get_package_share_directory
1513
from builtin_interfaces.msg import Time
1614
from cv_bridge import CvBridge
15+
from rclpy.executors import SingleThreadedExecutor
16+
from rclpy.node import Node
1717
from sensor_msgs.msg import Image
1818

19-
from aegis_director.robot_director import RobotDirector
20-
21-
2219
CAMERA_CONFIG = {
2320
"scene": {"pos_config": "cam_scene.yaml", "topic": "/cam_scene/rgb/image_rect"},
2421
"tool_front_right": {
@@ -40,6 +37,26 @@
4037
}
4138

4239

40+
def parse_args() -> argparse.Namespace:
41+
parser = argparse.ArgumentParser()
42+
parser.add_argument(
43+
"-c",
44+
"--camera",
45+
type=str,
46+
required=True,
47+
choices=CAMERA_CONFIG.keys(),
48+
help="Which camera: scene, tool_front_right, tool_front_left, tool_right, tool_left",
49+
)
50+
parser.add_argument(
51+
"-p",
52+
"--path",
53+
type=str,
54+
default=None,
55+
help="Optional path to calibration data folder",
56+
)
57+
return parser.parse_args()
58+
59+
4360
def load_positions(config_file_path: Path) -> List[Dict[str, float]]:
4461
with open(config_file_path, "r") as f:
4562
data = yaml.safe_load(f)
@@ -155,23 +172,7 @@ def collect_data(
155172

156173

157174
def main() -> None:
158-
parser = argparse.ArgumentParser()
159-
parser.add_argument(
160-
"-c",
161-
"--camera",
162-
type=str,
163-
required=True,
164-
choices=CAMERA_CONFIG.keys(),
165-
help="Which camera: scene, tool_front_right, tool_front_left, tool_right, tool_left",
166-
)
167-
parser.add_argument(
168-
"-p",
169-
"--path",
170-
type=str,
171-
default=None,
172-
help="Optional path to calibration data folder",
173-
)
174-
args = parser.parse_args()
175+
args = parse_args()
175176

176177
if args.path:
177178
data_path = Path(args.path).expanduser() / args.camera
Lines changed: 227 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,227 @@
1+
import argparse
2+
import json
3+
from pathlib import Path
4+
from typing import Optional, Tuple
5+
6+
import cv2
7+
import numpy as np
8+
import yaml
9+
from natsort import natsorted
10+
from scipy.spatial.transform import Rotation as R
11+
12+
CAMERA_CONFIG = {
13+
"scene": {},
14+
"tool_front_right": {},
15+
"tool_front_left": {},
16+
"tool_right": {},
17+
"tool_left": {},
18+
}
19+
20+
21+
def parse_args() -> argparse.Namespace:
22+
parser = argparse.ArgumentParser()
23+
parser.add_argument(
24+
"-c",
25+
"--camera",
26+
type=str,
27+
required=True,
28+
choices=CAMERA_CONFIG.keys(),
29+
help="Which camera: scene, tool_front_right, tool_front_left, tool_right, tool_left",
30+
)
31+
parser.add_argument(
32+
"-p",
33+
"--path",
34+
type=str,
35+
default=None,
36+
help="Optional path to calibration data folder",
37+
)
38+
return parser.parse_args()
39+
40+
41+
def load_tcp(tcp_path: Path) -> Tuple[np.ndarray, np.ndarray]:
42+
with open(tcp_path, "r") as f:
43+
data = yaml.safe_load(f)
44+
pos = np.array(data["tcp_pose"]["position"], dtype=float)
45+
ori = np.array(data["tcp_pose"]["orientation"], dtype=float)
46+
return pos, ori
47+
48+
49+
def load_intrinsics(intrinsics_path: Path) -> Tuple[np.ndarray, np.ndarray]:
50+
with open(intrinsics_path, "r") as f:
51+
data = json.load(f)
52+
camera_matrix = np.array(data["camera_matrix"], dtype=float)
53+
dist_coeffs = np.array(data["dist_coeffs"], dtype=float)
54+
return camera_matrix, dist_coeffs
55+
56+
57+
def process_view(
58+
image_path: Path,
59+
tcp_path: Path,
60+
board: cv2.aruco_CharucoBoard,
61+
aruco_dict: cv2.aruco_Dictionary,
62+
camera_matrix: np.ndarray,
63+
dist_coeffs: np.ndarray,
64+
) -> Optional[Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]]:
65+
try:
66+
image_idx = int(image_path.stem.split("_")[-1])
67+
tcp_idx = int(tcp_path.stem.split("_")[-1])
68+
except ValueError:
69+
print(f"Invalid index in filenames: {image_path}, {tcp_path}\nSkipping")
70+
return None
71+
72+
if image_idx != tcp_idx:
73+
print(f"Index mismatch: {image_path} vs {tcp_path}\nSkipping")
74+
return None
75+
76+
img = cv2.imread(str(image_path))
77+
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
78+
corners, ids, _ = cv2.aruco.detectMarkers(img_gray, aruco_dict)
79+
80+
if ids is None or len(ids) == 0:
81+
print(f"No ArUco markers detected in {image_path}\nSkipping")
82+
return None
83+
84+
retval, charuco_corners, charuco_ids = cv2.aruco.interpolateCornersCharuco(
85+
corners, ids, img_gray, board
86+
)
87+
88+
if retval < 4:
89+
print(f"Not enough Charuco corners detected in view {image_path}\nSkipping")
90+
return None
91+
92+
valid, rvec, tvec = cv2.aruco.estimatePoseBoard(
93+
charuco_corners, charuco_ids, board, camera_matrix, dist_coeffs, None, None
94+
)
95+
96+
if not valid:
97+
print(f"Could not estimate pose for {image_path}\nSkipping")
98+
return None
99+
100+
tcp_pos, tcp_ori = load_tcp(tcp_path)
101+
102+
R_robot2tcp = R.from_quat(tcp_ori).as_matrix()
103+
t_robot2tcp = tcp_pos.reshape(3, 1)
104+
R_cam2target, _ = cv2.Rodrigues(rvec)
105+
t_cam2target = tvec.reshape(3, 1)
106+
107+
return R_robot2tcp, t_robot2tcp, R_cam2target, t_cam2target
108+
109+
110+
def calibrate_extrinsics(
111+
data_path: Path,
112+
squares_x: int,
113+
squares_y: int,
114+
square_size: float,
115+
marker_size: float,
116+
aruco_dict: cv2.aruco_Dictionary,
117+
) -> None:
118+
board = cv2.aruco.CharucoBoard(
119+
(squares_x, squares_y),
120+
square_size,
121+
marker_size,
122+
aruco_dict,
123+
)
124+
board.setLegacyPattern(True)
125+
126+
image_paths = natsorted(data_path.glob("*_image_*.png"))
127+
tcp_paths = natsorted(data_path.glob("*_tcp_*.yaml"))
128+
intrinsics_path = data_path / f"{data_path.name}_intrinsics.json"
129+
130+
if not image_paths:
131+
print(f"No images found in {data_path}")
132+
return
133+
if not tcp_paths:
134+
print(f"No TCP poses found in {data_path}")
135+
return
136+
if not intrinsics_path.is_file():
137+
print(f"Intrinsics file not found in {intrinsics_path}")
138+
return
139+
if len(image_paths) != len(tcp_paths):
140+
print(
141+
f"Number of images ({len(image_paths)}) and TCP poses ({len(tcp_paths)}) do not match!"
142+
)
143+
144+
camera_matrix, dist_coeffs = load_intrinsics(intrinsics_path)
145+
146+
tcp_poses_R = []
147+
tcp_poses_t = []
148+
target_poses_R = []
149+
target_poses_t = []
150+
151+
valid_views = 0
152+
153+
for image_path, tcp_path in zip(image_paths, tcp_paths):
154+
poses = process_view(
155+
image_path, tcp_path, board, aruco_dict, camera_matrix, dist_coeffs
156+
)
157+
if poses is None:
158+
continue
159+
R_robot2tcp, t_robot2tcp, R_cam2target, t_cam2target = poses
160+
161+
tcp_poses_R.append(R_robot2tcp)
162+
tcp_poses_t.append(t_robot2tcp)
163+
target_poses_R.append(R_cam2target)
164+
target_poses_t.append(t_cam2target)
165+
166+
valid_views += 1
167+
168+
print(f"Processed {valid_views} valid views for extrinsic calibration")
169+
170+
if valid_views < 3:
171+
print("Not enough valid views for calibration")
172+
return
173+
174+
R_tcp2cam, t_tcp2cam = cv2.calibrateHandEye(
175+
tcp_poses_R,
176+
tcp_poses_t,
177+
target_poses_R,
178+
target_poses_t,
179+
method=cv2.CALIB_HAND_EYE_TSAI,
180+
)
181+
182+
T_tcp2cam = np.eye(4)
183+
T_tcp2cam[:3, :3] = R_tcp2cam
184+
T_tcp2cam[:3, 3] = t_tcp2cam.ravel()
185+
186+
print("Transformation matrix (TCP to camera)\n")
187+
print(T_tcp2cam)
188+
189+
extrinsics_path = data_path / f"{data_path.name}_extrinsics.json"
190+
with open(extrinsics_path, "w") as f:
191+
json.dump({"T_tcp2cam": T_tcp2cam.tolist()}, f, indent=2)
192+
193+
print(f"Extrinsics saved to {extrinsics_path}")
194+
195+
196+
def main() -> None:
197+
args = parse_args()
198+
199+
if args.path:
200+
data_path = Path(args.path).expanduser() / args.camera
201+
else:
202+
data_path = Path("~/ceai_ws/calibration_data").expanduser() / args.camera
203+
204+
board_path = Path(__file__).parent.parent / "config" / "charuco_board.yaml"
205+
with open(board_path, "r") as f:
206+
board_cfg = yaml.safe_load(f)
207+
208+
if args.camera.startswith("tool"):
209+
board_params = board_cfg["tool"]
210+
else:
211+
board_params = board_cfg["scene"]
212+
213+
squares_x = board_params["squares_x"]
214+
squares_y = board_params["squares_y"]
215+
square_size = board_params["square_size"]
216+
marker_size = board_params["marker_size"]
217+
aruco_dict = cv2.aruco.getPredefinedDictionary(
218+
getattr(cv2.aruco, board_params["aruco_dict"])
219+
)
220+
221+
calibrate_extrinsics(
222+
data_path, squares_x, squares_y, square_size, marker_size, aruco_dict
223+
)
224+
225+
226+
if __name__ == "__main__":
227+
main()

0 commit comments

Comments
 (0)