Skip to content

Commit

Permalink
Merge pull request #29 from IGNF/dev
Browse files Browse the repository at this point in the history
Version 1.4.0 to master
  • Loading branch information
leavauchier authored Oct 31, 2023
2 parents d6d5504 + 7dbc37b commit 7addb4d
Show file tree
Hide file tree
Showing 13 changed files with 88 additions and 72 deletions.
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
# dev
# 1.4.0
- count_occurences / replace_value: add copy_and_hack decorator to run on tscan output files
- Update to pdal 2.6+ to better handle classification values and flags in replace_attribute_in_las
(was treating values over 31 as {classification under 31 + flag} even when saving to LAS 1.4)

# 1.3.1
- fix color: ensure that tmp orthoimages are deleted after use by using the namedTemporaryFile properly.
Expand Down
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ channels:
- conda-forge
dependencies:
- python=3.10.*
- conda-forge:pdal==2.5.*
- conda-forge:pdal>=2.6.*
- conda-forge:python-pdal==3.2.*
- requests
- gdal
Expand Down
2 changes: 1 addition & 1 deletion pdaltools/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "1.3.1"
__version__ = "1.4.0"


if __name__ == "__main__":
Expand Down
3 changes: 3 additions & 0 deletions pdaltools/count_occurences/count_occurences_for_attribute.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
from tqdm import tqdm
from typing import List

from pdaltools.unlock_file import copy_and_hack_decorator


def parse_args():
parser = argparse.ArgumentParser("Count points with each value of an attribute.")
Expand All @@ -25,6 +27,7 @@ def parse_args():
return parser.parse_args()


@copy_and_hack_decorator
def compute_count_one_file(filepath: str, attribute: str = "Classification") -> Counter:
pipeline = pdal.Reader.las(filepath)
pipeline |= pdal.Filter.stats(dimensions=attribute, count=attribute)
Expand Down
8 changes: 5 additions & 3 deletions pdaltools/replace_attribute_in_las.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
import tempfile
from typing import List, Dict

from pdaltools.unlock_file import copy_and_hack_decorator


def parse_args():
parser = argparse.ArgumentParser("Replace values of a given attribute in a las/laz file.")
Expand Down Expand Up @@ -65,6 +67,7 @@ def dict_to_pdal_assign_list(d: Dict, output_attribute: str = "Classification",
return assignment_list


@copy_and_hack_decorator
def replace_values(
input_file: str,
output_file: str,
Expand Down Expand Up @@ -109,9 +112,8 @@ def replace_values_clean(
attribute: str = "Classification",
writer_parameters: Dict = {},
):
_, extension = os.path.splitext(output_file)
with tempfile.NamedTemporaryFile(suffix=extension) as tmp:
tmp.close()
filename = os.path.basename(output_file)
with tempfile.NamedTemporaryFile(suffix=filename) as tmp:
replace_values(input_file, tmp.name, replacement_map, attribute, writer_parameters)
exec_las2las(tmp.name, output_file)

Expand Down
5 changes: 2 additions & 3 deletions pdaltools/standardize_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,8 @@ def exec_las2las(input_file: str, output_file: str):

@copy_and_hack_decorator
def standardize(input_file: str, output_file: str, params_from_parser: Dict) -> None:
_, extension = os.path.splitext(output_file)
with tempfile.NamedTemporaryFile(suffix=extension) as tmp:
tmp.close()
filename = os.path.basename(output_file)
with tempfile.NamedTemporaryFile(suffix=filename) as tmp:
rewrite_with_pdal(input_file, tmp.name, params_from_parser)
exec_las2las(tmp.name, output_file)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"4": 2160,
"5": 42546,
"6": 33595,
"64": 83,
"0": 83,
}
)

Expand Down
2 changes: 1 addition & 1 deletion test/count_occurences/test_merge_occurences_counts.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
"4": 2160,
"5": 42546,
"6": 33595,
"64": 83,
"0": 83,
}
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@
"4": 1227,
"5": 30392,
"6": 29447,
"64": 13
"0": 13
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@
"4": 933,
"5": 12154,
"6": 4148,
"64": 70
"0": 70
}
26 changes: 14 additions & 12 deletions test/test_replace_attribute_in_las.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from pdaltools.standardize_format import get_writer_parameters
import pytest
import shutil
from test.utils import get_pdal_infos_summary
from test.utils import get_pdal_infos_summary, EXPECTED_DIMS_BY_DATAFORMAT
from typing import Dict
from test.test_standardize_format import assert_lasinfo_no_warning

Expand All @@ -28,20 +28,22 @@
"4": 1227,
"5": 30392,
"6": 29447,
"64": 13,
"0": 13,
}
)
colored_las_params = get_writer_parameters({"dataformat_id": 8})

expected_counts = Counter({"2": 21172, "3": 226, "4": 1227, "5": 30392, "64": 29447, "201": 2047 + 13})
expected_counts = Counter({"0": 13, "2": 226, "4": 1227, "5": 30392, "65": 29447, "201": 2047 + 21172})

replacement_map_fail = {
"201": ["1", "64"],
"6": ["64"],
} # has duplicatevalue to replace
} # has duplicate value to replace, so it should fail

replacement_map_success = {
"201": ["1", "64"],
"64": ["6"],
"201": ["1", "2"],
"2": ["3"], # check that the replacement is correct when a value is both replaced and to replace
"65": ["6"], # check that values over 31 are interpreted correctly in las 1.4 output
}

# test replacement map parsing
Expand All @@ -59,8 +61,8 @@ def setup_module(module):
os.mkdir(tmp_path)


def test_replace_values():
replace_values(input_file, output_file, replacement_map_success, attribute)
def test_replace_values_ok():
replace_values(input_file, output_file, replacement_map_success, attribute, colored_las_params)
count = compute_count_one_file(output_file, attribute)

assert count == expected_counts
Expand All @@ -78,11 +80,11 @@ def test_replace_values_duplicate_input():


def check_dimensions(input_file, output_file):
input_summary = get_pdal_infos_summary(input_file)
input_dimensions = set(input_summary["summary"]["dimensions"])
output_summary = get_pdal_infos_summary(output_file)
output_dimensions = set(output_summary["summary"]["dimensions"])
assert input_dimensions == output_dimensions
output_dimensions = [s.strip() for s in output_summary["summary"]["dimensions"].split(",")]
print(sorted(output_dimensions))
print(sorted(EXPECTED_DIMS_BY_DATAFORMAT[8]))
assert set(output_dimensions) == set(EXPECTED_DIMS_BY_DATAFORMAT[8])


def test_parse_replacement_map_from_path_or_json_string_path_ok():
Expand Down
49 changes: 2 additions & 47 deletions test/test_standardize_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import shutil
from pdaltools.standardize_format import rewrite_with_pdal, standardize, exec_las2las
import logging
from test.utils import get_pdal_infos_summary
from test.utils import get_pdal_infos_summary, EXPECTED_DIMS_BY_DATAFORMAT
import pdal
import subprocess as sp

Expand All @@ -21,51 +21,6 @@
{"dataformat_id": 8, "a_srs": "EPSG:4326"},
]

expected_dims = {
6: set(
[
"X",
"Y",
"Z",
"Intensity",
"ReturnNumber",
"NumberOfReturns",
"ClassFlags",
"ScanChannel",
"ScanDirectionFlag",
"EdgeOfFlightLine",
"Classification",
"UserData",
"ScanAngleRank",
"PointSourceId",
"GpsTime",
]
),
8: set(
[
"X",
"Y",
"Z",
"Intensity",
"ReturnNumber",
"NumberOfReturns",
"ClassFlags",
"ScanChannel",
"ScanDirectionFlag",
"EdgeOfFlightLine",
"Classification",
"UserData",
"ScanAngleRank",
"PointSourceId",
"GpsTime",
"Red",
"Green",
"Blue",
"Infrared",
]
),
}


def setup_module(module):
try:
Expand Down Expand Up @@ -94,7 +49,7 @@ def _test_standardize_format_one_params_set(params):
assert metadata["dataformat_id"] == params["dataformat_id"]
# Check that there is no extra dim
dimensions = set([d.strip() for d in json_info["summary"]["dimensions"].split(",")])
assert dimensions == expected_dims[params["dataformat_id"]]
assert dimensions == EXPECTED_DIMS_BY_DATAFORMAT[params["dataformat_id"]]

# TODO: Check srs
# TODO: check precision
Expand Down
52 changes: 52 additions & 0 deletions test/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,55 @@ def get_pdal_infos_summary(f: str):
r = sp.run(["pdal", "info", "--summary", f], stderr=sp.PIPE, stdout=sp.PIPE)
json_info = json.loads(r.stdout.decode())
return json_info


EXPECTED_DIMS_BY_DATAFORMAT = {
6: set(
[
"X",
"Y",
"Z",
"Intensity",
"ReturnNumber",
"NumberOfReturns",
"ScanChannel",
"ScanDirectionFlag",
"EdgeOfFlightLine",
"Classification",
"UserData",
"ScanAngleRank",
"PointSourceId",
"GpsTime",
"KeyPoint",
"Overlap",
"Synthetic",
"Withheld",
]
),
8: set(
[
"X",
"Y",
"Z",
"Intensity",
"ReturnNumber",
"NumberOfReturns",
"ScanChannel",
"ScanDirectionFlag",
"EdgeOfFlightLine",
"Classification",
"UserData",
"ScanAngleRank",
"PointSourceId",
"GpsTime",
"Red",
"Green",
"Blue",
"Infrared",
"KeyPoint",
"Overlap",
"Synthetic",
"Withheld",
]
),
}

0 comments on commit 7addb4d

Please sign in to comment.