Skip to content

Commit ecca895

Browse files
authored
Merge pull request #107 from IGNF/dev
dev dans master pour v1.10.0
2 parents 539611f + 0cdf951 commit ecca895

17 files changed

+462
-34
lines changed

.github/workflows/cicd_deploy.yml

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,12 @@ jobs:
2727
id: build
2828
uses: docker/build-push-action@v5
2929
with:
30+
file: ./Dockerfile.pdal
3031
load: true
3132
tags: ${{ env.TEST_TAG }}
33+
build-args: |
34+
GITHUB_REPOSITORY=alavenant/PDAL
35+
GITHUB_SHA=master_07_05_25
3236
3337
# run the test on the docker image
3438
- name: Run tests in docker image
@@ -64,11 +68,16 @@ jobs:
6468
uses: docker/build-push-action@v5
6569
with:
6670
context: .
71+
file: ./Dockerfile.pdal
6772
push: true
6873
tags: ${{ steps.meta.outputs.tags }}
6974
labels: ${{ steps.meta.outputs.labels }}
75+
build-args: |
76+
GITHUB_REPOSITORY=alavenant/PDAL
77+
GITHUB_SHA=master_07_05_25
7078
7179
deploy-pypi:
80+
7281
runs-on: ubuntu-latest
7382

7483
environment:
@@ -97,11 +106,11 @@ jobs:
97106

98107
- name: Run tests with pytest
99108
shell: micromamba-shell {0}
100-
run: python -m pytest ./test -s --log-cli-level DEBUG
109+
run: python -m pytest ./test -s -m "not pdal_custom" --log-cli-level DEBUG
101110

102111
- name: Build pip package
103112
shell: micromamba-shell {0}
104113
run: make build
105114

106115
- name: pypi-publish
107-
uses: pypa/gh-action-pypi-publish@v1.12.4
116+
uses: pypa/gh-action-pypi-publish@v1.12.4

.github/workflows/cicd_full.yml

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,13 @@ jobs:
3232
id: build
3333
uses: docker/build-push-action@v5
3434
with:
35+
context: .
36+
file: ./Dockerfile.pdal
3537
load: true
3638
tags: ${{ env.TEST_TAG }}
39+
build-args: |
40+
GITHUB_REPOSITORY=alavenant/PDAL
41+
GITHUB_SHA=master_07_05_25
3742
3843
# run the test on the docker image
3944
- name: Run tests in docker image
@@ -66,4 +71,4 @@ jobs:
6671

6772
- name: Run tests with pytest
6873
shell: micromamba-shell {0}
69-
run: python -m pytest ./test -s --log-cli-level DEBUG
74+
run: python -m pytest ./test -s -m "not pdal_custom" --log-cli-level DEBUG

.github/workflows/cicd_light.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,5 +34,5 @@ jobs:
3434

3535
- name: Run tests with pytest
3636
shell: micromamba-shell {0}
37-
run: python -m pytest ./test -s --log-cli-level DEBUG -m "not geopf"
37+
run: python -m pytest ./test -s --log-cli-level DEBUG -m "not geopf and not pdal_custom"
3838

CHANGELOG.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,13 @@
1+
# 1.10.0
2+
- custom PDAL: fix CI for cicd_full (build docker image with custom PDAL, and skip custom PDAL test for local pytest)
3+
- las_rename_dimension: new tool to rename one or many dimensions
4+
5+
# 1.9.1
6+
- las_add_points_to_pointcloud: Fix add points to LAS (use PDAL instead of Laspy)
7+
8+
# 1.9.0
9+
- custom PDAL: in the docker image, compile custom PDAL (waiting for PDAL 2.9)
10+
111
# 1.8.1
212
- add_points_in_pointcloud: fix case when there is no points to add in the las file extent (copy input file to the output)
313
- color: temporarily disable tests on no_data values in downloaded images

Dockerfile.pdal

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
# code from https://github.com/PDAL/PDAL/blob/master/scripts/docker/ubuntu/Dockerfile
2+
FROM condaforge/mambaforge:latest AS mamba_pdal
3+
4+
ENV LANG=C.UTF-8 LC_ALL=C.UTF-8
5+
6+
RUN conda create -n pdaltools -y
7+
ARG GITHUB_SHA
8+
ARG GITHUB_REPOSITORY="PDAL/PDAL"
9+
ARG GITHUB_SERVER_URL="https://github.com"
10+
11+
SHELL ["conda", "run", "-n", "pdaltools", "/bin/bash", "-c"]
12+
13+
RUN mamba install -c conda-forge git compilers conda-pack cmake make ninja sysroot_linux-64=2.17 && \
14+
mamba install --yes -c conda-forge pdal --only-deps
15+
16+
RUN git clone "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}" pdal && \
17+
cd pdal ; \
18+
git checkout ${GITHUB_SHA}
19+
20+
RUN mkdir -p pdal/build && \
21+
cd pdal/build && \
22+
CXXFLAGS="-Werror=strict-aliasing" LDFLAGS="-Wl,-rpath-link,$CONDA_PREFIX/lib" cmake -G Ninja \
23+
-DCMAKE_BUILD_TYPE=Release \
24+
-DCMAKE_LIBRARY_PATH:FILEPATH="$CONDA_PREFIX/lib" \
25+
-DCMAKE_INCLUDE_PATH:FILEPATH="$CONDA_PREFIX/include" \
26+
-DCMAKE_INSTALL_PREFIX="$CONDA_PREFIX" \
27+
-DBUILD_PLUGIN_CPD=OFF \
28+
-DBUILD_PLUGIN_PGPOINTCLOUD=ON \
29+
-DBUILD_PLUGIN_NITF=ON \
30+
-DBUILD_PLUGIN_ICEBRIDGE=ON \
31+
-DBUILD_PLUGIN_HDF=ON \
32+
-DBUILD_PLUGIN_TILEDB=ON \
33+
-DBUILD_PLUGIN_E57=ON \
34+
-DBUILD_PGPOINTCLOUD_TESTS=OFF \
35+
-DWITH_ZSTD=ON \
36+
..
37+
38+
RUN cd pdal/build && \
39+
ninja
40+
41+
RUN cd pdal/build && \
42+
ctest -V
43+
44+
RUN cd pdal/build && \
45+
ninja install
46+
47+
RUN git clone https://github.com/PDAL/python.git
48+
49+
RUN mamba install --yes -c conda-forge pybind11
50+
51+
RUN mkdir -p python/build && \
52+
cd python/build && \
53+
CXXFLAGS="-Werror=strict-aliasing" LDFLAGS="-Wl,-rpath-link,$CONDA_PREFIX/lib" cmake -G Ninja \
54+
-DCMAKE_BUILD_TYPE=Release \
55+
-DCMAKE_LIBRARY_PATH:FILEPATH="$CONDA_PREFIX/lib" \
56+
-DCMAKE_INCLUDE_PATH:FILEPATH="$CONDA_PREFIX/include" \
57+
-DCMAKE_INSTALL_PREFIX="$CONDA_PREFIX/lib/python3.13/site-packages/" \
58+
..
59+
60+
RUN cd python/build && ninja
61+
62+
RUN cd python/build && ctest -V
63+
64+
RUN cd python/build && ninja install
65+
66+
67+
RUN git clone https://github.com/PDAL/python-plugins.git pdal-python && \
68+
cd pdal-python && git checkout 1.6.2 && \
69+
pip install -vv . --no-deps
70+
71+
RUN conda-pack -n pdaltools --dest-prefix=/opt/conda/envs/pdaltools -o /tmp/env.tar && \
72+
mkdir /venv && cd /venv && tar xf /tmp/env.tar && \
73+
rm /tmp/env.tar
74+
75+
76+
# Add our environment
77+
RUN mamba install numpy requests gdal lastools geopandas pytest requests-mock tqdm pip
78+
# Install laspy with laszip laz compression (as laszip is an implementation of the laz 1.4 standard)
79+
RUN pip install laspy[lazrs]
80+
81+
82+
FROM debian:bullseye-slim
83+
84+
# install PDAL
85+
COPY --from=mamba_pdal /opt/conda/envs/pdaltools/bin/pdal /opt/conda/envs/pdaltools/bin/pdal
86+
COPY --from=mamba_pdal /opt/conda/envs/pdaltools/bin/python /opt/conda/envs/pdaltools/bin/python
87+
COPY --from=mamba_pdal /opt/conda/envs/pdaltools/bin/las2las /opt/conda/envs/pdaltools/bin/las2las
88+
COPY --from=mamba_pdal /opt/conda/envs/pdaltools/bin/lasinfo /opt/conda/envs/pdaltools/bin/lasinfo
89+
COPY --from=mamba_pdal /opt/conda/envs/pdaltools/lib/ /opt/conda/envs/pdaltools/lib/
90+
COPY --from=mamba_pdal /opt/conda/envs/pdaltools/ssl /opt/conda/envs/pdaltools/ssl
91+
COPY --from=mamba_pdal /opt/conda/envs/pdaltools/share/proj/proj.db /opt/conda/envs/pdaltools/share/proj/proj.db
92+
93+
ENV PATH=$PATH:/opt/conda/envs/pdaltools/bin/
94+
ENV PROJ_LIB=/opt/conda/envs/pdaltools/share/proj/
95+
ENV GDAL_DATA=/opt/conda/envs/pdaltools/share/gdal
96+
97+
WORKDIR /pdaltools
98+
RUN mkdir tmp
99+
COPY pdaltools pdaltools
100+
COPY test test

Makefile

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ install:
1919
##############################
2020

2121
testing:
22-
python -m pytest ./test -s --log-cli-level DEBUG -m "not geopf"
22+
python -m pytest ./test -s --log-cli-level DEBUG -m "not geopf and not pdal_custom"
2323

2424
testing_full:
2525
python -m pytest ./test -s --log-cli-level DEBUG
@@ -62,6 +62,12 @@ FULL_IMAGE_NAME=${REGISTRY}/${NAMESPACE}/${IMAGE_NAME}:${VERSION}
6262
docker-build: clean
6363
docker build --no-cache -t ${IMAGE_NAME}:${VERSION} -f Dockerfile .
6464

65+
docker-build-pdal: clean
66+
docker build --build-arg GITHUB_REPOSITORY=alavenant/PDAL --build-arg GITHUB_SHA=master_07_05_25 -t ${IMAGE_NAME}:${VERSION} -f Dockerfile.pdal .
67+
68+
docker-test-pdal: clean
69+
docker run --rm -t ${IMAGE_NAME}:${VERSION} python -m pytest -m "not geopf" --log-cli-level=debug
70+
6571
docker-test:
6672
docker run --rm -it ${IMAGE_NAME}:${VERSION} python -m pytest -s
6773

pdaltools/_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "1.8.1"
1+
__version__ = "1.10.0"
22

33

44
if __name__ == "__main__":

pdaltools/add_points_in_pointcloud.py

Lines changed: 27 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import argparse
22
from shutil import copy2
3+
import tempfile
34

45
import geopandas as gpd
56
import laspy
@@ -10,6 +11,8 @@
1011

1112
from pdaltools.las_info import get_epsg_from_las, get_tile_bbox
1213

14+
import pdal
15+
1316

1417
def parse_args(argv=None):
1518
parser = argparse.ArgumentParser("Add points from GeoJSON in LIDAR tile")
@@ -127,13 +130,12 @@ def add_points_to_las(
127130
crs (str): CRS of the data.
128131
virtual_points_classes (int): The classification value to assign to those virtual points (default: 66).
129132
"""
130-
# Copy data pointcloud
131-
copy2(input_las, output_las)
132133

133134
if input_points_with_z.empty:
134135
print(
135136
"No points to add. All points of the geojson file are outside the tile. Copying the input file to output"
136137
)
138+
copy2(input_las, output_las)
137139
return
138140

139141
# Extract XYZ coordinates and additional attribute (classification)
@@ -148,24 +150,30 @@ def add_points_to_las(
148150
header = las.header
149151
if not header:
150152
header = laspy.LasHeader(point_format=8, version="1.4")
153+
154+
new_points = laspy.ScaleAwarePointRecord.zeros(nb_points, header=header) # use header for input_las
155+
# then fill in the gaps (X, Y, Z an classification)
156+
new_points.x = x_coords.astype(new_points.x.dtype)
157+
new_points.y = y_coords.astype(new_points.y.dtype)
158+
new_points.z = z_coords.astype(new_points.z.dtype)
159+
new_points.classification = classes.astype(new_points.classification.dtype)
160+
161+
with tempfile.NamedTemporaryFile(suffix="_new_points.las") as tmp:
162+
with laspy.open(tmp.name, mode="w", header=header) as las_file:
163+
las_file.write_points(new_points)
164+
151165
if crs:
152-
try:
153-
crs_obj = CRS.from_user_input(crs) # Convert to a pyproj.CRS object
154-
except CRSError:
155-
raise ValueError(f"Invalid CRS: {crs}")
156-
header.add_crs(crs_obj)
157-
158-
# Add the new points with 3D points
159-
with laspy.open(output_las, mode="a", header=header) as output_las: # mode `a` for adding points
160-
# create nb_points points with "0" everywhere
161-
new_points = laspy.ScaleAwarePointRecord.zeros(nb_points, header=header) # use header for input_las
162-
# then fill in the gaps (X, Y, Z an classification)
163-
new_points.x = x_coords.astype(new_points.x.dtype)
164-
new_points.y = y_coords.astype(new_points.y.dtype)
165-
new_points.z = z_coords.astype(new_points.z.dtype)
166-
new_points.classification = classes.astype(new_points.classification.dtype)
167-
168-
output_las.append_points(new_points)
166+
a_srs = crs
167+
else:
168+
a_srs = get_epsg_from_las(input_las)
169+
170+
# Use pdal to merge the new points with the existing points
171+
pipeline = pdal.Pipeline()
172+
pipeline |= pdal.Reader.las(filename=input_las)
173+
pipeline |= pdal.Reader.las(filename=tmp.name)
174+
pipeline |= pdal.Filter.merge()
175+
pipeline |= pdal.Writer.las(filename=output_las, forward="all", a_srs=a_srs)
176+
pipeline.execute()
169177

170178

171179
def line_to_multipoint(line, spacing: float, z_value: float = None):

pdaltools/count_occurences/count_occurences_for_attribute.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
"""Count occurences of each value of a given attribute in a set of pointclouds.
2-
Eg. to count points of each class in classified point clouds """
2+
Eg. to count points of each class in classified point clouds"""
33

44
import argparse
55
import json

pdaltools/las_add_buffer.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,6 @@ def remove_points_from_buffer(input_file: str, output_file: str):
158158
pipeline |= pdal.Filter.range(limits=f"{ORIGINAL_TILE_TAG}[1:1]")
159159
pipeline |= pdal.Writer.las(filename=tmp_las.name, forward="all", extra_dims="all")
160160
pipeline.execute()
161-
162161
remove_dimensions_from_las(tmp_las.name, dimensions=[ORIGINAL_TILE_TAG], output_las=output_file)
163162

164163

0 commit comments

Comments
 (0)