Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
234 changes: 166 additions & 68 deletions riegl2raycloud.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os
import argparse
import glob
import csv
# import csv
import json
import time

Expand All @@ -13,19 +13,22 @@
from boundingrectangle import boundingrectangle
from tile import tile_from_corner_points

OUT_DIR = "out"
def read_dat(matrix_dir):
"""
Reads all transformation matrix files from the specified directory.

def read_csv(file):
pos_dict = {}
with open(file) as csv_file:
reader = csv.reader(csv_file, delimiter=',')
for row in reader:
pos_dict[row[0]] = [float(i) for i in row[1:]]
return pos_dict
Parameters
----------
matrix_dir : str
Path to the directory containing matrix files.

def read_dat(folder):
Returns
-------
dict
A dictionary where keys are scan position names and values are 2D numpy arrays of transformation matrices.
"""
pos_dict = {}
for datfile in glob.glob(os.path.join(folder, "*.DAT")):
for datfile in glob.glob(os.path.join(matrix_dir, "*")):
scanpos = os.path.splitext(os.path.basename(datfile))[0]
with open(datfile) as f:
mat = []
Expand All @@ -35,8 +38,38 @@ def read_dat(folder):
pos_dict[scanpos] = np.array(mat)
return pos_dict

def scanposfromdat(matrix):
"""
Extracts the scan position from the last column of the given transformation matrix.

Parameters
----------
matrix : numpy.ndarray
A 4x4 transformation matrix.

Returns
-------
numpy.ndarray
A 1D array representing the scan position [x, y, z].
"""
return matrix[:-1,-1]

def plot_dat_positions(positions, visualisation: bool = False, out_dir = None):
plt.scatter([scanposfromdat(el)[0] for el in positions.values()], [scanposfromdat(el)[1] for el in positions.values()])
"""
Plot the scan positions from the matrix files.

Parameters
----------
positions : dict
Dictionary of scan positions, where keys are names and values are transformation matrices.
visualisation : bool, optional
If True, displays the plot.
out_dir : str, optional
Directory where the plot image will be saved, if specified.

"""
plt.scatter([scanposfromdat(el)[0] for el in positions.values()],
[scanposfromdat(el)[1] for el in positions.values()])
for txt in positions:
plt.annotate(txt, (scanposfromdat(positions[txt])[0], scanposfromdat(positions[txt])[1] + 1))
if out_dir:
Expand All @@ -47,15 +80,38 @@ def plot_dat_positions(positions, visualisation: bool = False, out_dir = None):
plt.show()
return

def get_pipeline(rxp_file):
def get_pipeline(rxp_file, args):
"""
Constructs a PDAL pipeline for processing .rxp files.

Parameters
----------
rxp_file : str
Path to the .rxp file.
args : argparse.Namespace
Parsed command-line arguments containing pipeline options.

Returns
-------
pdal.Pipeline
A configured PDAL pipeline instance.
"""
cmds = []
read = {"type":"readers.rxp",
"filename": rxp_file,
"sync_to_pps": "false",
"reflectance_as_intensity": "false"}
"filename": rxp_file,
"sync_to_pps": "false",
"reflectance_as_intensity": "false"}

cmds.append(read)
# option to add PDAL filters here
## add filters
dev_filter = {"type":"filters.range",
"limits":"Deviation[0:{}]".format(args.deviation)}
cmds.append(dev_filter)

refl_filter = {"type":"filters.range",
"limits":"Reflectance[{}:{}]".format(*args.reflectance)}
cmds.append(refl_filter)

dmp = json.dumps(cmds)
pipeline = pdal.Pipeline(dmp)
Expand All @@ -64,7 +120,7 @@ def get_pipeline(rxp_file):

def transform_rxp(rxp_array, matrix):
"""
Returns numpy array with xyz's of rxp file transformed using .dat matrix
Returns numpy array with xyz coords from rxp file transformed using .dat matrix

Parameters
----------
Expand All @@ -83,52 +139,63 @@ def transform_rxp(rxp_array, matrix):
xyz_np = np.hstack((xyz_np, extra_dim))
# transpose
xyz_np = np.transpose(xyz_np)
#perform transformation
# perform transformation
xyz_np = np.matmul(matrix, xyz_np)
#retranspose to get nx4 array again
# retranspose to get nx4 array again
xyz_np = np.transpose(xyz_np)
# remove final column of 1's
xyz_np = xyz_np[:,:-1]

return xyz_np

def read_rxps(project, pos_dict):
def read_rxps(scanpos_dirs, pos_dict, args):
"""
Reads all rxp files in a project folder
Reads and processes rxp files from all scans
Returns generator with scanpos, points

Parameters
----------
project
.RISCAN folder
scanpos_dirs
a list of all ScanPos directories
pos_dict
dictionary with .DAT matrices
a dictionary with transformation matrices for all scan positions
args : argparse.Namespace
Parsed command-line arguments containing pipeline options.

Yields
------
tuple
A tuple (scanpos_label, transformed_points),
where `scanpos_label` is the scan position label
and `transformed_points` is a numpy array of transformed points.
"""
for scanpos in sorted(os.listdir(os.path.join(project, 'SCANS'))): # TODO: temp slice for laptop memory reasons
if scanpos not in pos_dict:
print(f"Can't read rxp {scanpos} as not present in pos_dict, make sure .DAT files are generated before running (skipping)")
continue
rxp = glob.glob(os.path.join(project, 'SCANS', scanpos, '**/*_*.rxp'))
# remove all the residual files
rxp = [el for el in rxp if not 'residual' in el]

if len(rxp) != 1:
print(f"Error for scanpos {scanpos}: rxp file not found (list = {rxp})")
continue
for scan_dir in sorted(scanpos_dirs):
try:
base, scan = os.path.split(scan_dir)
try:
rxp = sorted(glob.glob(os.path.join(base, scan, 'scans' if 'SCNPOS' in scan else '', '??????_??????.rxp')))[-1]
print(rxp)
except:
print(f"Error: Cannot find {os.path.join(base, scan, '??????_??????.rxp')}")
return
except:
print(f"Error in 'base, scan = os.path.split({scan_dir})' ")
return

# get and execute pdal pipeline
pipeline = get_pipeline(rxp[0])
pipeline = get_pipeline(rxp, args)
pipeline.execute()

# transform using DAT files into 1 coordinate system
points = transform_rxp(pipeline.arrays[0], pos_dict[scanpos])
# register all raw scans (.rxp files) into a unified coordinate system
# using the SOP matrix for each scan
scanposlabel = scan.split('.')[0]
points = transform_rxp(pipeline.arrays[0], pos_dict[scanposlabel])

yield scanpos, points
# yield makes the function a generator
# generator is memory-efficient as it allows processing one item at a time without loading everything into memory at once
yield scanposlabel, points
del points # don't know if this actually does something but anyways

def scanposfromdat(matrix):
return matrix[:-1,-1]

def appendray(points, scanpos, time) -> o3d.t.geometry.PointCloud:
"""
Returns o3d Pointcloud with scanposition coordinates saved in normal field and appended time field
Expand All @@ -141,6 +208,11 @@ def appendray(points, scanpos, time) -> o3d.t.geometry.PointCloud:
np array of xyz of scanpos
time
time value of scan pos

Returns
-------
o3d.t.geometry.PointCloud
An Open3D point cloud object.
"""

device = o3d.core.Device("CPU:0")
Expand Down Expand Up @@ -173,6 +245,7 @@ def pc2rc(pos_dict, out_dir, args):
out_dir
path to output directory
args
Parsed command-line arguments containing processing options.
"""
if not os.path.exists(out_dir):
os.makedirs(out_dir)
Expand All @@ -198,10 +271,15 @@ def pc2rc(pos_dict, out_dir, args):
crop_bbox = corners_pc.get_oriented_bounding_box()

merged = None

# get all scanpos directories
scanpos_dirs = sorted(glob.glob(os.path.join(args.project, f'{args.prefix}*')))
if len(scanpos_dirs) == 0: raise Exception('no scan positions found')

# call generator function for rxps
for scanpos, points in read_rxps(args.project, pos_dict):
print(f"Processing position {scanpos}")
pcd = appendray(points, scanposfromdat(pos_dict[scanpos]), 1)
for scanposlabel, points in read_rxps(scanpos_dirs, pos_dict, args):
print(f"Processing position {scanposlabel}")
pcd = appendray(points, scanposfromdat(pos_dict[scanposlabel]), 1)
# crop using bbox
print("Cropping")
pcd = pcd.crop(crop_bbox)
Expand All @@ -218,11 +296,12 @@ def pc2rc(pos_dict, out_dir, args):
merged = merged.append(pcd)
if(args.debug):
# for debugging: also write single point clouds
pos_dir = out_dir + "/pos/"
pos_dir = os.path.join(out_dir, "pos")
if not os.path.exists(pos_dir):
os.makedirs(pos_dir)
filename = pos_dir+scanpos+"_raycloud.ply"
o3d.t.io.write_point_cloud( filename, pcd, write_ascii=False, compressed=False, print_progress=False)
filename = os.path.join(pos_dir, f'{scanposlabel}_raycloud.ply')
o3d.t.io.write_point_cloud(filename, pcd, write_ascii=False,
compressed=False, print_progress=False)
del pcd, points

print("Tiling merged point cloud")
Expand All @@ -231,49 +310,68 @@ def pc2rc(pos_dict, out_dir, args):
if not os.path.exists(tile_out_dir):
os.makedirs(tile_out_dir)

tiles = tile_from_corner_points(bbox_xy_corners, merged, size=args.tilesize, buffer=args.tilebuffer, exact_size=args.exact_tiles, visualization=False, out_dir=tile_out_dir)
tiles = tile_from_corner_points(bbox_xy_corners, merged, size=args.tilesize,
buffer=args.tilebuffer, exact_size=args.exact_tiles,
visualization=False, out_dir=tile_out_dir)

for i, tile in enumerate(tiles):
o3d.t.io.write_point_cloud(os.path.join(tile_out_dir,"Tile"+str(i)+".ply"), tile, write_ascii=False, compressed=False, print_progress=False)
o3d.t.io.write_point_cloud(os.path.join(tile_out_dir,"Tile"+str(i)+".ply"), tile,
write_ascii=False, compressed=False, print_progress=False)

#write merged pointcloud
print("Writing merged pointcloud")
o3d.t.io.write_point_cloud(os.path.join(out_dir, "merged_raycloud.ply"), merged, write_ascii=False, compressed=False, print_progress=False)
o3d.t.io.write_point_cloud(os.path.join(out_dir, "merged_raycloud.ply"), merged,
write_ascii=False, compressed=False, print_progress=False)
return

# @profile
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-p", "--project", type=str, required=True)
parser.add_argument("--debug", action='store_true')
parser.add_argument("-r", "--resolution", type=float)
parser.add_argument("-p", "--project", type=str, required=True,
help="Path to folder containing ScanPosXXX dirs")
parser.add_argument("-m", "--matrix", type=str, required=True,
help='Path to folder containing sop matrix files') # new argument
parser.add_argument("-o", "--odir", type=str, default='.',
help='Path to output dir') # new argument
parser.add_argument('--deviation', type=float, default=15,
help='deviation filter') # new argument
parser.add_argument('--reflectance', type=float, nargs=2, default=[-20, 5],
help='reflectance filter') # new argument
parser.add_argument("--prefix", type=str, default='ScanPos',
help='scanpos dir name prefix, default:ScanPos') # new argument
parser.add_argument("--debug", action='store_true',
help='use for debugging, will write individual ScanPos rayclouds')
parser.add_argument("-r", "--resolution", type=float, default=0.02,
help='Voxel size for downsampling, default:0.02 m')
# tile related
parser.add_argument("-b", "--edgebuffer", type=int, default=5)
parser.add_argument("-t", "--tilebuffer", type=int, default=2)
parser.add_argument("-s", "--tilesize", type=int, default=20)
parser.add_argument("-b", "--edgebuffer", type=int, default=5,
help='Buffer around edge of plot, default:5 m')
parser.add_argument("-t", "--tilebuffer", type=int, default=2,
help='overlap between / buffer around each tile, default:2 m')
parser.add_argument("-s", "--tilesize", type=int, default=20,
help='The length of the side of each square tile, default:20 m')
parser.add_argument("--exact_tiles", action='store_true')
print("")

args = parser.parse_args()

if not os.path.exists(args.project):
print("couldnt find folder")
print("couldnt find project folder")
os._exit(1)

pos_dict = read_dat(args.project)

# get output folder from project name
if (args.project.endswith("/")):
args.project = args.project[:len(args.project) -1]
out_dir = os.path.join("out", os.path.splitext(os.path.basename(args.project))[0])
pos_dict = read_dat(args.matrix)

print("Converting raycloud")
plot_dat_positions(pos_dict, out_dir=out_dir)
plot_dat_positions(pos_dict, out_dir=args.odir)

t = time.process_time()
pc2rc(pos_dict, out_dir, args)
pc2rc(pos_dict, args.odir, args)
t2 = time.process_time()
print(f"Converted pointclouds to rayclouds in {(t2 - t):.2f} seconds")
# print(f"Converted pointclouds to rayclouds in {(t2 - t):.2f} seconds")
total_seconds = t2 - t
hours = int(total_seconds // 3600)
minutes = int((total_seconds % 3600) // 60)
seconds = int(total_seconds % 60)
print(f"Converted pointclouds to rayclouds in {hours} h {minutes} min {seconds} s")

return

Expand Down