Skip to content

Commit

Permalink
Merge pull request #67 from IGNF/dev
Browse files Browse the repository at this point in the history
Merge dev to master for version 1.7.4
  • Loading branch information
leavauchier authored Oct 10, 2024
2 parents 2e25149 + 8ab8905 commit 543796f
Show file tree
Hide file tree
Showing 10 changed files with 123 additions and 40 deletions.
7 changes: 5 additions & 2 deletions .github/workflows/cicd_light.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,11 @@ on:


jobs:
docker_build_and_test:
runs-on: ubuntu-latest
test_light:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
permissions:
contents: read
packages: write
Expand Down
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# 1.7.4
- Color: fix images bbox to prevent in edge cases where points were at the edge of the last pixel
- Add possibility to remove points of some classes in standardize

# 1.7.3
- Add method to get a point cloud origin

Expand Down
2 changes: 1 addition & 1 deletion pdaltools/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "1.7.3"
__version__ = "1.7.4"


if __name__ == "__main__":
Expand Down
22 changes: 13 additions & 9 deletions pdaltools/color.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,11 +69,11 @@ def is_image_white(filename: str):
def download_image_from_geoplateforme(
proj, layer, minx, miny, maxx, maxy, pixel_per_meter, outfile, timeout, check_images
):
# Give single-point clouds a width/height of at least one pixel to have valid BBOX and SIZE
if minx == maxx:
maxx = minx + 1 / pixel_per_meter
if miny == maxy:
maxy = miny + 1 / pixel_per_meter
# Force a 1-pixel margin in the east and south borders
# to make sure that no point of the pointcloud is on the limit of the last pixel
# to prevent interpolation issues
maxx = maxx + 1 / pixel_per_meter
miny = miny - 1 / pixel_per_meter

# for layer in layers:
URL_GPP = "https://data.geopf.fr/wms-r/wms?"
Expand Down Expand Up @@ -136,22 +136,26 @@ def color(

tmp_ortho = None
if color_rvb_enabled:
tmp_ortho = tempfile.NamedTemporaryFile()
tmp_ortho = tempfile.NamedTemporaryFile(suffix="_rvb.tif")
download_image_from_geoplateforme_retrying(
proj, stream_RGB, minx, miny, maxx, maxy, pixel_per_meter, tmp_ortho.name, timeout_second, check_images
)

# Warning: the initial color is multiplied by 256 despite its initial 8-bits encoding
# which turns it to a 0 to 255*256 range.
# It is kept this way because of other dependencies that have been tuned to fit this range
pipeline |= pdal.Filter.colorization(
raster=tmp_ortho.name, dimensions="Red:1:256.0, Green:2:256.0, Blue:3:256.0"
)

tmp_ortho_irc = None
if color_ir_enabled:
tmp_ortho_irc = tempfile.NamedTemporaryFile()
tmp_ortho_irc = tempfile.NamedTemporaryFile(suffix="_irc.tif")
download_image_from_geoplateforme_retrying(
proj, stream_IRC, minx, miny, maxx, maxy, pixel_per_meter, tmp_ortho_irc.name, timeout_second, check_images
)

# Warning: the initial color is multiplied by 256 despite its initial 8-bits encoding
# which turns it to a 0 to 255*256 range.
# It is kept this way because of other dependencies that have been tuned to fit this range
pipeline |= pdal.Filter.colorization(raster=tmp_ortho_irc.name, dimensions="Infrared:1:256.0")

pipeline |= pdal.Writer.las(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def compute_count_one_file(filepath: str, attribute: str = "Classification") ->
pipeline |= pdal.Filter.stats(dimensions=attribute, count=attribute)
pipeline.execute()
# List of "class/count" on the only dimension that is counted
raw_counts = pipeline.metadata["metadata"]["filters.stats"]["statistic"][0]["counts"]
raw_counts = pipeline.metadata["metadata"]["filters.stats"]["statistic"][0].get("counts", [])
split_counts = [c.split("/") for c in raw_counts]
try:
# Try to prettify the value by converting it to an integer (eg. for Classification that
Expand Down
3 changes: 2 additions & 1 deletion pdaltools/las_remove_dimensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import pdal
from pdaltools.las_info import get_writer_parameters_from_reader_metadata


def remove_dimensions_from_las(input_las: str, dimensions: [str], output_las: str):
"""
export new las without some dimensions
Expand Down Expand Up @@ -43,7 +44,7 @@ def parse_args():
required=True,
nargs="+",
help="The dimension we would like to remove from the point cloud file ; be aware to not remove mandatory "
"dimensions of las"
"dimensions of las",
)

return parser.parse_args()
Expand Down
42 changes: 31 additions & 11 deletions pdaltools/standardize_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,10 @@

import argparse
import os
import platform
import subprocess as sp
import tempfile
from typing import Dict
from typing import Dict, List

import pdal

Expand All @@ -32,6 +33,7 @@
offset_z=0,
dataformat_id=6, # No color by default
a_srs="EPSG:2154",
class_points_removed=[], # remove points from class
)


Expand All @@ -43,6 +45,13 @@ def parse_args():
"--record_format", choices=[6, 8], type=int, help="Record format: 6 (no color) or 8 (4 color channels)"
)
parser.add_argument("--projection", default="EPSG:2154", type=str, help="Projection, eg. EPSG:2154")
parser.add_argument(
"--class_points_removed",
default=[],
nargs="*",
type=str,
help="List of classes number. Points of this classes will be removed from the file",
)
parser.add_argument(
"--extra_dims",
default=[],
Expand All @@ -51,7 +60,6 @@ def parse_args():
help="List of extra dims to keep in the output (default=[], use 'all' to keep all extra dims), "
"extra_dims must be specified with their type (see pdal.writers.las documentation, eg 'dim1=double')",
)

return parser.parse_args()


Expand All @@ -61,20 +69,28 @@ def get_writer_parameters(new_parameters: Dict) -> Dict:
override the standard ones
"""
params = STANDARD_PARAMETERS | new_parameters

return params


def rewrite_with_pdal(input_file: str, output_file: str, params_from_parser: Dict) -> None:
# Update parameters with command line values
def rewrite_with_pdal(
input_file: str, output_file: str, params_from_parser: Dict, classes_to_remove: List = []
) -> None:
params = get_writer_parameters(params_from_parser)
pipeline = pdal.Reader.las(input_file)
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(input_file)
if classes_to_remove:
expression = "&&".join([f"Classification != {c}" for c in classes_to_remove])
pipeline |= pdal.Filter.expression(expression=expression)
pipeline |= pdal.Writer(filename=output_file, forward="all", **params)
pipeline.execute()


def exec_las2las(input_file: str, output_file: str):
r = sp.run(["las2las", "-i", input_file, "-o", output_file], stderr=sp.PIPE, stdout=sp.PIPE)
if platform.processor() == "arm" and platform.architecture()[0] == "64bit":
las2las = "las2las64"
else:
las2las = "las2las"
r = sp.run([las2las, "-i", input_file, "-o", output_file], stderr=sp.PIPE, stdout=sp.PIPE)
if r.returncode == 1:
msg = r.stderr.decode()
print(msg)
Expand All @@ -86,14 +102,18 @@ def exec_las2las(input_file: str, output_file: str):


@copy_and_hack_decorator
def standardize(input_file: str, output_file: str, params_from_parser: Dict) -> None:
def standardize(input_file: str, output_file: str, params_from_parser: Dict, class_points_removed: []) -> None:
filename = os.path.basename(output_file)
with tempfile.NamedTemporaryFile(suffix=filename) as tmp:
rewrite_with_pdal(input_file, tmp.name, params_from_parser)
rewrite_with_pdal(input_file, tmp.name, params_from_parser, class_points_removed)
exec_las2las(tmp.name, output_file)


if __name__ == "__main__":
args = parse_args()
params_from_parser = dict(dataformat_id=args.record_format, a_srs=args.projection, extra_dims=args.extra_dims)
standardize(args.input_file, args.output_file, params_from_parser)
params_from_parser = dict(
dataformat_id=args.record_format,
a_srs=args.projection,
extra_dims=args.extra_dims,
)
standardize(args.input_file, args.output_file, params_from_parser, args.class_points_removed)
5 changes: 5 additions & 0 deletions script/test/test_run_remove_classes_in_las.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
python -m pdaltools.standardize_format \
--input_file test/data/classified_laz/test_data_77050_627755_LA93_IGN69.laz \
--output_file test/tmp/replaced_cmdline.laz \
--record_format 6 \
--class_points_removed 2 \
17 changes: 11 additions & 6 deletions test/test_las_remove_dimensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,22 @@
ini_las = os.path.join(INPUT_DIR, "test_data_77055_627760_LA93_IGN69.laz")
added_dimensions = ["DIM_1", "DIM_2"]

def get_points(input_las : str):

def get_points(input_las: str):
pipeline_read_ini = pdal.Pipeline() | pdal.Reader.las(input_las)
pipeline_read_ini.execute()
return pipeline_read_ini.arrays[0]

def append_dimension(input_las : str, output_las : str):

def append_dimension(input_las: str, output_las: str):
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(input_las)
pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions))
pipeline |= pdal.Writer.las(output_las, extra_dims="all", forward="all", )
pipeline |= pdal.Writer.las(
output_las,
extra_dims="all",
forward="all",
)
pipeline.execute()


Expand Down Expand Up @@ -52,10 +58,9 @@ def test_remove_one_dimension():
las_remove_dimensions.remove_dimensions_from_las(tmp_las.name, ["DIM_1"], tmp_las_rm.name)
points_end = get_points(tmp_las_rm.name)

assert list(points_end.dtype.fields.keys()).index("DIM_2") >= 0# should still contains DIM_2
assert list(points_end.dtype.fields.keys()).index("DIM_2") >= 0 # should still contains DIM_2

with pytest.raises(ValueError):
list(points_end.dtype.fields.keys()).index("DIM_1") # should not have DIM_1
assert "DIM_1" not in points_end.dtype.fields.keys(), "LAS should not have dimension DIM_1"

with pytest.raises(TypeError):
numpy.array_equal(points_ini, points_end) # output data should not be the same
Expand Down
59 changes: 50 additions & 9 deletions test/test_standardize_format.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,26 @@
import logging
import os
import platform
import shutil
import subprocess as sp
from test.utils import EXPECTED_DIMS_BY_DATAFORMAT, get_pdal_infos_summary

import pdal
import pytest

from pdaltools.count_occurences.count_occurences_for_attribute import (
compute_count_one_file,
)
from pdaltools.standardize_format import exec_las2las, rewrite_with_pdal, standardize

TEST_PATH = os.path.dirname(os.path.abspath(__file__))
TMP_PATH = os.path.join(TEST_PATH, "tmp")
INPUT_DIR = os.path.join(TEST_PATH, "data")

DEFAULT_PARAMS = {"dataformat_id": 6, "a_srs": "EPSG:2154", "extra_dims": []}

MUTLIPLE_PARAMS = [
{"dataformat_id": 6, "a_srs": "EPSG:2154", "extra_dims": []},
DEFAULT_PARAMS,
{"dataformat_id": 8, "a_srs": "EPSG:4326", "extra_dims": []},
{"dataformat_id": 8, "a_srs": "EPSG:2154", "extra_dims": ["dtm_marker=double", "dsm_marker=double"]},
{"dataformat_id": 8, "a_srs": "EPSG:2154", "extra_dims": "all"},
Expand All @@ -30,8 +36,19 @@ def setup_module(module):
os.mkdir(TMP_PATH)


def _test_standardize_format_one_params_set(input_file, output_file, params):
rewrite_with_pdal(input_file, output_file, params)
@pytest.mark.parametrize(
"params",
[
DEFAULT_PARAMS,
{"dataformat_id": 8, "a_srs": "EPSG:4326", "extra_dims": []},
{"dataformat_id": 8, "a_srs": "EPSG:2154", "extra_dims": ["dtm_marker=double", "dsm_marker=double"]},
{"dataformat_id": 8, "a_srs": "EPSG:2154", "extra_dims": "all"},
],
)
def test_standardize_format(params):
input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69_extra_dims.laz")
output_file = os.path.join(TMP_PATH, "formatted.laz")
rewrite_with_pdal(input_file, output_file, params, [])
# check file exists
assert os.path.isfile(output_file)
# check values from metadata
Expand All @@ -54,19 +71,43 @@ def _test_standardize_format_one_params_set(input_file, output_file, params):
extra_dims_names = [dim.split("=")[0] for dim in params["extra_dims"]]
assert dimensions == EXPECTED_DIMS_BY_DATAFORMAT[params["dataformat_id"]].union(extra_dims_names)

# Check that there is the expected number of points for each class
expected_points_counts = compute_count_one_file(input_file)

output_points_counts = compute_count_one_file(output_file)
assert output_points_counts == expected_points_counts

# TODO: Check srs
# TODO: check precision


def test_standardize_format():
@pytest.mark.parametrize(
"classes_to_remove",
[
[],
[2, 3],
[1, 2, 3, 4, 5, 6, 64], # remove all classes
],
)
def test_standardize_classes(classes_to_remove):
input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69_extra_dims.laz")
output_file = os.path.join(TMP_PATH, "formatted.laz")
for params in MUTLIPLE_PARAMS:
_test_standardize_format_one_params_set(input_file, output_file, params)
rewrite_with_pdal(input_file, output_file, DEFAULT_PARAMS, classes_to_remove)
# Check that there is the expected number of points for each class
expected_points_counts = compute_count_one_file(input_file)
for cl in classes_to_remove:
expected_points_counts.pop(str(cl))

output_points_counts = compute_count_one_file(output_file)
assert output_points_counts == expected_points_counts


def exec_lasinfo(input_file: str):
r = sp.run(["lasinfo", "-stdout", input_file], stderr=sp.PIPE, stdout=sp.PIPE)
if platform.processor() == "arm" and platform.architecture()[0] == "64bit":
lasinfo = "lasinfo64"
else:
lasinfo = "lasinfo"
r = sp.run([lasinfo, "-stdout", input_file], stderr=sp.PIPE, stdout=sp.PIPE)
if r.returncode == 1:
msg = r.stderr.decode()
print(msg)
Expand Down Expand Up @@ -102,14 +143,14 @@ def test_standardize_does_NOT_produce_any_warning_with_Lasinfo():
# if you want to see input_file warnings
# assert_lasinfo_no_warning(input_file)

standardize(input_file, output_file, MUTLIPLE_PARAMS[0])
standardize(input_file, output_file, DEFAULT_PARAMS, [])
assert_lasinfo_no_warning(output_file)


def test_standardize_malformed_laz():
input_file = os.path.join(TEST_PATH, "data/test_pdalfail_0643_6319_LA93_IGN69.laz")
output_file = os.path.join(TMP_PATH, "standardize_pdalfail_0643_6319_LA93_IGN69.laz")
standardize(input_file, output_file, MUTLIPLE_PARAMS[0])
standardize(input_file, output_file, DEFAULT_PARAMS, [])
assert os.path.isfile(output_file)


Expand Down

0 comments on commit 543796f

Please sign in to comment.