Skip to content

Commit e4aed11

Browse files
authored
Merge pull request #162 from IGNF/dev
dev -> master 1.15.5
2 parents 1510f8c + b4d1736 commit e4aed11

File tree

5 files changed

+89
-7
lines changed

5 files changed

+89
-7
lines changed

CHANGELOG.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,8 @@
1+
# 1.15.5
2+
- [optimization] replace_area_plointcloud: select polygons in replace_area that intersect target LAS.
3+
- make the normal case faster
4+
- when there is no polygon, a simple copy of target file is enough.
5+
16
# 1.15.4
27
- [fix] replace_area_plointcloud: better handling of source cloud with all points filtered
38

pdaltools/_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "1.15.4"
1+
__version__ = "1.15.5"
22

33

44
if __name__ == "__main__":

pdaltools/replace_area_in_pointcloud.py

Lines changed: 56 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,22 @@
11
import argparse
2+
import os
3+
import shutil
4+
import tempfile
5+
import time
26
import warnings
37

8+
import geopandas as gpd
49
import numpy as np
510
import pdal
611
from numpy.lib import recfunctions as rfn
712
from osgeo import gdal
13+
from shapely.geometry import box
814

9-
from pdaltools.las_info import get_writer_parameters_from_reader_metadata
15+
from pdaltools.las_info import (
16+
get_bounds_from_header_info,
17+
get_writer_parameters_from_reader_metadata,
18+
las_info_metadata,
19+
)
1020

1121

1222
def argument_parser():
@@ -139,6 +149,18 @@ def pipeline_read_from_DSM(dsm, ground_mask, classification):
139149
return pipeline
140150

141151

152+
def clip_area(area_input, area_output, las_file):
153+
start = time.time()
154+
gdf = gpd.read_file(area_input)
155+
minx, maxx, miny, maxy = get_bounds_from_header_info(las_info_metadata(las_file))
156+
selection = gdf[gdf.intersects(box(minx, miny, maxx, maxy))]
157+
num_polygon = len(selection)
158+
selection.to_file(area_output)
159+
end = time.time()
160+
print(f"Create replacement area cropped: {num_polygon} polygons in {end-start:.2f} seconds")
161+
return num_polygon
162+
163+
142164
def replace_area(
143165
target_cloud, pipeline_source, replacement_area, output_cloud, source_pdal_filter="", target_pdal_filter=""
144166
):
@@ -147,6 +169,24 @@ def replace_area(
147169
print("output cloud: ", output_cloud)
148170
print("source pdal filter: ", source_pdal_filter)
149171
print("target pdal filter: ", target_pdal_filter)
172+
173+
start = time.time()
174+
tmpdir = tempfile.mkdtemp()
175+
replacement_name, ext = os.path.splitext(os.path.basename(replacement_area))
176+
las_name, _ = os.path.splitext(os.path.basename(target_cloud))
177+
replacement_area_crop = os.path.join(tmpdir, f"{replacement_name}_{las_name}{ext}")
178+
print("replacement area crop: ", replacement_area_crop)
179+
180+
num_polygon = clip_area(replacement_area, replacement_area_crop, target_cloud)
181+
if num_polygon == 0:
182+
print("No polygon inside target cloud, output file is target cloud. We copy target in output.")
183+
184+
shutil.copy2(src=target_cloud, dst=output_cloud)
185+
end = time.time()
186+
print("all steps done in ", f"{end-start:.2f}", " seconds")
187+
return
188+
189+
replacement_area = replacement_area_crop
150190
crops = []
151191
# pipeline to read target_cloud and remove points inside the polygon
152192
pipeline_target = pdal.Pipeline()
@@ -160,7 +200,9 @@ def replace_area(
160200
# Keep only points out of the area
161201
pipeline_target |= pdal.Filter.expression(expression="geometryFid==-1", tag="A")
162202
target_count = pipeline_target.execute()
163-
print("Step 1: target count: ", target_count)
203+
204+
t1 = time.time()
205+
print(f"Step 1: target count: {target_count} points in {t1-start:.2f} seconds")
164206

165207
# get input dimensions dtype from target
166208
if pipeline_target.arrays:
@@ -171,15 +213,18 @@ def replace_area(
171213
pipeline_target2 |= pdal.Reader.las(filename=target_cloud)
172214
pipeline_target2.execute()
173215
input_dim_dtype = pipeline_target2.arrays[0].dtype
216+
t1_bis = time.time()
217+
print(f"Step 1-bis: re-read to have dimensions: {t1_bis-t1:.2f} seconds")
174218

219+
t2 = time.time()
175220
# get input dimensions names
176221
input_dimensions = list(input_dim_dtype.fields.keys())
177222

178223
# do not keep geometryFid
179224
output_dimensions = [dim for dim in input_dimensions if dim not in "geometryFid"]
180225

181226
# add target to the result after keeping only the expected dimensions
182-
if pipeline_target.arrays:
227+
if target_count:
183228
target_cloud_pruned = pipeline_target.arrays[0][output_dimensions]
184229
crops.append(target_cloud_pruned)
185230

@@ -192,7 +237,9 @@ def replace_area(
192237
# Keep only points in the area
193238
pipeline_source |= pdal.Filter.expression(expression="geometryFid>=0", tag="B")
194239
source_count = pipeline_source.execute()
195-
print("Step 2: source count: ", source_count)
240+
241+
t3 = time.time()
242+
print(f"Step 2: source count: {source_count} points in {t3-t2:.2f} seconds")
196243

197244
# add source to the result
198245
if source_count:
@@ -221,7 +268,11 @@ def replace_area(
221268

222269
writer_params = get_writer_params(target_cloud)
223270
pipeline |= pdal.Writer.las(filename=output_cloud, **writer_params)
224-
pipeline.execute()
271+
points = pipeline.execute()
272+
273+
end = time.time()
274+
print(f"Step 3: merge: { points }, points in {end-t3:.2f} seconds")
275+
print("all steps done in ", f"{end-start:.2f}", " seconds")
225276

226277

227278
if __name__ == "__main__":
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
{
2+
"type": "FeatureCollection",
3+
"name": "replace_area",
4+
"crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::2154" } },
5+
"features": [
6+
{ "type": "Feature", "properties": { "FID": 1 }, "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [ 961402.888587764813565, 6431696.486596554517746 ], [ 961420.320232908939943, 6431696.887324028648436 ], [ 961421.322051595314406, 6431676.750768431462348 ], [ 961402.988769633462653, 6431677.852768986485898 ], [ 961402.888587764813565, 6431696.486596554517746 ] ] ] ] } }
7+
]
8+
}

test/test_replace_area_in_pointcloud.py

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -325,7 +325,7 @@ def test_main_from_cloud_with_filter():
325325
assert get_nb_points(output_file) == 6390
326326

327327

328-
def test_main_from_DSM():
328+
def test_main_from_DSM_light():
329329
output_file = os.path.join(TMP_PATH, "main_from_DSM", "output_main_from_DSM.laz")
330330
os.makedirs(os.path.dirname(output_file))
331331
cmd = (
@@ -338,3 +338,21 @@ def test_main_from_DSM():
338338
# same result as test_from_DMS
339339
counts = compute_count_one_file(output_file, "Classification")
340340
assert counts == {"1": 3841, "2": 2355, str(SOURCE_CLASSIF): 45}
341+
342+
343+
def test_main_from_DSM_light_replace_all():
344+
output_file = os.path.join(TMP_PATH, "main_from_DSM_replace_around", "output_main_from_DSM.laz")
345+
os.makedirs(os.path.dirname(output_file))
346+
347+
# use replacement area bigger than the tile
348+
REPLACE_AREA_2 = os.path.join(INPUT_DIR, "replace_area_2.geojson")
349+
cmd = (
350+
f"from_DSM -d {SOURCE_DSM} -g {SOURCE_GROUND_MASK} -c {SOURCE_CLASSIF} -t {TARGET_CLOUD} -r {REPLACE_AREA_2}"
351+
f" -o {output_file}"
352+
).split()
353+
args = argument_parser().parse_args(cmd)
354+
args.func(args)
355+
356+
# only have points from source
357+
counts = compute_count_one_file(output_file, "Classification")
358+
assert counts == {str(SOURCE_CLASSIF): 81}

0 commit comments

Comments
 (0)