Skip to content

Commit

Permalink
Merge pull request #44 from IGNF/dev
Browse files Browse the repository at this point in the history
Merge version 1.5.2 to master
  • Loading branch information
leavauchier authored Feb 13, 2024
2 parents caeb5ed + 839e822 commit 2b4664f
Show file tree
Hide file tree
Showing 8 changed files with 122 additions and 45 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
# 1.5.2
- refactor tool to propagate header infos from one pipeline to another to use it by itself

# 1.5.1
- fix add_buffer: propagate header infos from input to the output
- update pdal.Writer params to make sure input format is forwarded except for the specified parameters
- add test for colorization with epsg != 2154

# 1.5.0
- switch colorisation source from Geoportail to Geoplateforme
Expand Down
12 changes: 12 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,22 @@ This library can be used in different ways:
* used in a docker container: see documentation [Dockerfile](Dockerfile)

# More details on the contained tools

## Colorization

* [color.py](pdaltools/color.py): Colorize a point cloud from Geoplateforme data

## Las infos

Misc tools to get information on a las file, eg. retrieve metadata, find epsg value, find bounds, get parameters to pass to a writer. They are intended to be used from the pdaltools module, for example:

```python
from pdaltools import las_infos

filename = ...
las_infos.las_info_metadata(filename)
```

## Stitching

* [las_clip.py](pdaltools/las_clip.py): crop a LAS file using 2d bounding box
Expand Down
2 changes: 1 addition & 1 deletion pdaltools/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "1.5.1"
__version__ = "1.5.2"


if __name__ == "__main__":
Expand Down
41 changes: 5 additions & 36 deletions pdaltools/las_add_buffer.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
import argparse
import logging
import os
from typing import Dict, List
from typing import List

import pdal

from pdaltools.las_info import get_buffered_bounds_from_filename
from pdaltools.las_info import (
get_buffered_bounds_from_filename,
get_writer_parameters_from_reader_metadata,
)
from pdaltools.las_merge import create_list


Expand Down Expand Up @@ -46,40 +49,6 @@ def create_las_with_buffer(
)


def get_writer_parameters_from_reader_metadata(metadata: Dict, a_srs=None) -> Dict:
"""As pdal las writers does not permit to pass easily metadata from one file as
parameters for a writer, use a trick to generate writer parameters from the
reader metadata of a previous pipeline:
This function uses the metadata from the reader of a pipeline to provide parameters
to pass to the writer of another pipeline
To be removed once https://github.com/PDAL/python/issues/147 is solved
Args:
metadata (Dict): metadata of an executed pipeline (that can be accessed using pipeline.metadata)
Returns:
Dict: parameters to pass to a pdal writer
"""

reader_metadata = metadata["metadata"]["readers.las"]

params = {
"major_version": reader_metadata["major_version"],
"minor_version": reader_metadata["minor_version"],
"global_encoding": reader_metadata["global_encoding"],
"extra_dims": "all",
"scale_x": reader_metadata["scale_x"],
"scale_y": reader_metadata["scale_y"],
"scale_z": reader_metadata["scale_z"],
"offset_x": reader_metadata["offset_x"],
"offset_y": reader_metadata["offset_y"],
"offset_z": reader_metadata["offset_z"],
"dataformat_id": reader_metadata["dataformat_id"],
"a_srs": a_srs if a_srs else reader_metadata["comp_spatialreference"],
}
return params


def las_merge_and_crop(
input_dir: str,
tile_filename: str,
Expand Down
36 changes: 35 additions & 1 deletion pdaltools/las_info.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import json
import logging
import os
from typing import Tuple
from typing import Dict, Tuple

import osgeo.osr as osr
import pdal
Expand Down Expand Up @@ -151,3 +151,37 @@ def get_buffered_bounds_from_filename(
ys = [minY - buffer_width, maxY + buffer_width]

return (xs, ys)


def get_writer_parameters_from_reader_metadata(metadata: Dict, a_srs=None) -> Dict:
"""As pdal las writers does not permit to pass easily metadata from one file as
parameters for a writer, use a trick to generate writer parameters from the
reader metadata of a previous pipeline:
This function uses the metadata from the reader of a pipeline to provide parameters
to pass to the writer of another pipeline
To be removed once https://github.com/PDAL/python/issues/147 is solved
Args:
metadata (Dict): metadata of an executed pipeline (that can be accessed using pipeline.metadata)
Returns:
Dict: parameters to pass to a pdal writer
"""

reader_metadata = metadata["metadata"]["readers.las"]

params = {
"major_version": reader_metadata["major_version"],
"minor_version": reader_metadata["minor_version"],
"global_encoding": reader_metadata["global_encoding"],
"extra_dims": "all",
"scale_x": reader_metadata["scale_x"],
"scale_y": reader_metadata["scale_y"],
"scale_z": reader_metadata["scale_z"],
"offset_x": reader_metadata["offset_x"],
"offset_y": reader_metadata["offset_y"],
"offset_z": reader_metadata["offset_z"],
"dataformat_id": reader_metadata["dataformat_id"],
"a_srs": a_srs if a_srs else reader_metadata["comp_spatialreference"],
}
return params
Binary file added test/data/sample_lareunion_epsg2975.laz
Binary file not shown.
31 changes: 24 additions & 7 deletions test/test_color.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@

cwd = os.getcwd()

TMPDIR = cwd + "/tmp/"
TEST_PATH = os.path.dirname(os.path.abspath(__file__))
TMPDIR = os.path.join(TEST_PATH, "tmp")


def setup_module(module):
Expand All @@ -21,12 +22,9 @@ def setup_module(module):
os.mkdir(TMPDIR)


TEST_PATH = os.path.dirname(os.path.abspath(__file__))
INPUT_PATH = os.path.join(TEST_PATH, "data/test_noepsg_043500_629205_IGN69.laz")
INPUT_PATH_SINGLE_POINT_CLOUD = os.path.join(TEST_PATH, "data/test_data_0436_6384_LA93_IGN69_single_point.laz")

OUTPUT_FILE = TMPDIR + "Semis_2021_0435_6292_LA93_IGN69.las"
OUTPUT_FILE_SINGLE_POINT_CLOUD = TMPDIR + "test_data_0436_6384_LA93_IGN69_single_point.colorized.laz"
OUTPUT_FILE = os.path.join(TMPDIR, "Semis_2021_0435_6292_LA93_IGN69.colorized.las")


@pytest.mark.geopf
Expand Down Expand Up @@ -56,13 +54,32 @@ def test_color_and_keeping_orthoimages():

@pytest.mark.geopf
def test_color_narrow_cloud():
input_path = os.path.join(TEST_PATH, "data/test_data_0436_6384_LA93_IGN69_single_point.laz")
output_path = os.path.join(TMPDIR, "test_data_0436_6384_LA93_IGN69_single_point.colorized.laz")
# Test that clouds that are smaller in width or height to 20cm are still clorized without an error.
color.color(INPUT_PATH_SINGLE_POINT_CLOUD, OUTPUT_FILE_SINGLE_POINT_CLOUD, epsg)
color.color(input_path, output_path, epsg)


@pytest.mark.geopf
def test_download_image_ok():
color.download_image_from_geoplateforme(epsg, layer, minx, miny, maxx, maxy, pixel_per_meter, OUTPUT_FILE, 15)
tif_output = os.path.join(TMPDIR, "download_image.tif")
color.download_image_from_geoplateforme(epsg, layer, minx, miny, maxx, maxy, pixel_per_meter, tif_output, 15)


@pytest.mark.geopf
def test_color_epsg_2975_forced():
input_path = os.path.join(TEST_PATH, "data/sample_lareunion_epsg2975.laz")
output_path = os.path.join(TMPDIR, "sample_lareunion_epsg2975.colorized.laz")
# Test that clouds that are smaller in width or height to 20cm are still clorized without an error.
color.color(input_path, output_path, 2975)


@pytest.mark.geopf
def test_color_epsg_2975_detected():
input_path = os.path.join(TEST_PATH, "data/sample_lareunion_epsg2975.laz")
output_path = os.path.join(TMPDIR, "sample_lareunion_epsg2975.colorized.laz")
# Test that clouds that are smaller in width or height to 20cm are still clorized without an error.
color.color(input_path, output_path)


@pytest.mark.geopf
Expand Down
41 changes: 41 additions & 0 deletions test/test_las_info.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import test.utils as tu

import pdal
import pytest

from pdaltools import las_info
Expand Down Expand Up @@ -90,3 +91,43 @@ def test_get_buffered_bounds_from_filename_with_buffer():
)
assert xs == [770550 - buffer_width, 770600 + buffer_width]
assert ys == [6277550 - buffer_width, 6277600 + buffer_width]


def test_get_writer_parameters_from_reader_metadata():
output_file = os.path.join(TMP_PATH, "writer_with_parameters.las")
output_expected = os.path.join(TMP_PATH, "writer_with_forward.las")

# First generate the expected output from a pipeline in a single part:
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(filename=INPUT_FILE)
pipeline |= pdal.Writer.las(
filename=output_expected,
forward="all",
)
pipeline.execute()

# Use pdal info summary to get metadata about the las file format and not only those of the data
out_expected_metadata = tu.get_pdal_infos_summary(output_expected)["summary"]["metadata"]

# Generate output from a pipeline separated in 2 parts
pipeline1 = pdal.Pipeline()
pipeline1 |= pdal.Reader.las(filename=INPUT_FILE)
# At this point a useful pipeline wou
pipeline1.execute()
out_data = pipeline.arrays[0]
metadata = pipeline.metadata

# Here in the expected usecase, you would do stuff to modify your data
params = las_info.get_writer_parameters_from_reader_metadata(metadata)
pipeline2 = pdal.Pipeline(arrays=[out_data])
pipeline2 |= pdal.Writer(filename=output_file, forward="all", **params)
pipeline2.execute()

out_metadata = tu.get_pdal_infos_summary(output_file)["summary"]["metadata"]

# Pop metadata that we don't expect to be the same
for key in ["creation_year", "creation_doy", "software_id"]:
out_metadata.pop(key)
out_expected_metadata.pop(key)

assert out_metadata == out_expected_metadata

0 comments on commit 2b4664f

Please sign in to comment.