Skip to content

Process Module

process_with_tiles(ept_file, tile_size, output_path, metric, voxel_size, voxel_height=1, buffer_size=0.1, srs=None, hag=False, hag_dtm=False, dtm=None, bounds=None, interpolation=None, remove_outliers=False)

Process a large EPT point cloud by tiling, compute CHM or other metrics for each tile, and write the results to the specified output directory.

Parameters:

Name Type Description Default
ept_file str

Path to the EPT file containing the point cloud data.

required
tile_size tuple

Size of each tile as (tile_width, tile_height).

required
output_path str

Directory where the output files will be saved.

required
metric str

Metric to compute for each tile ("chm", "fhd", or "pai").

required
voxel_size tuple

Voxel resolution as (x_resolution, y_resolution, z_resolution).

required
voxel_height float

Height of each voxel in meters. Required if metric is "pai".

1
buffer_size float

Fractional buffer size relative to tile size (e.g., 0.1 for 10% buffer). Defaults to 0.1.

0.1
srs str

Spatial Reference System for the output. If None, uses SRS from the EPT file.

None
hag bool

If True, compute Height Above Ground using Delaunay triangulation. Defaults to False.

False
hag_dtm bool

If True, compute Height Above Ground using a provided DTM raster. Defaults to False.

False
dtm str

Path to the DTM raster file. Required if hag_dtm is True.

None
bounds tuple

Spatial bounds to crop the data. Must be of the form ([xmin, xmax], [ymin, ymax], [zmin, zmax]) or ([xmin, xmax], [ymin, ymax]). If None, tiling is done over the entire dataset.

None
interpolation str or None

Interpolation method for CHM calculation ("linear", "cubic", "nearest", or None).

None
remove_outliers bool

Whether to remove statistical outliers before calculating metrics. Defaults to False.

False

Returns:

Type Description
None

None

Raises:

Type Description
ValueError

If an unsupported metric is requested, if buffer or voxel sizes are invalid, or required arguments are missing.

FileNotFoundError

If the EPT or DTM file does not exist, or a required file for processing is missing.

Source code in pyforestscan/process.py
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
def process_with_tiles(ept_file, tile_size, output_path, metric, voxel_size,
                       voxel_height=1, buffer_size=0.1, srs=None, hag=False,
                       hag_dtm=False, dtm=None, bounds=None, interpolation=None, remove_outliers=False) -> None:
    """
    Process a large EPT point cloud by tiling, compute CHM or other metrics for each tile,
    and write the results to the specified output directory.

    Args:
        ept_file (str): Path to the EPT file containing the point cloud data.
        tile_size (tuple): Size of each tile as (tile_width, tile_height).
        output_path (str): Directory where the output files will be saved.
        metric (str): Metric to compute for each tile ("chm", "fhd", or "pai").
        voxel_size (tuple): Voxel resolution as (x_resolution, y_resolution, z_resolution).
        voxel_height (float, optional): Height of each voxel in meters. Required if metric is "pai".
        buffer_size (float, optional): Fractional buffer size relative to tile size (e.g., 0.1 for 10% buffer). Defaults to 0.1.
        srs (str, optional): Spatial Reference System for the output. If None, uses SRS from the EPT file.
        hag (bool, optional): If True, compute Height Above Ground using Delaunay triangulation. Defaults to False.
        hag_dtm (bool, optional): If True, compute Height Above Ground using a provided DTM raster. Defaults to False.
        dtm (str, optional): Path to the DTM raster file. Required if hag_dtm is True.
        bounds (tuple, optional): Spatial bounds to crop the data. Must be of the form
            ([xmin, xmax], [ymin, ymax], [zmin, zmax]) or ([xmin, xmax], [ymin, ymax]).
            If None, tiling is done over the entire dataset.
        interpolation (str or None, optional): Interpolation method for CHM calculation ("linear", "cubic", "nearest", or None).
        remove_outliers (bool, optional): Whether to remove statistical outliers before calculating metrics. Defaults to False.

    Returns:
        None

    Raises:
        ValueError: If an unsupported metric is requested, if buffer or voxel sizes are invalid, or required arguments are missing.
        FileNotFoundError: If the EPT or DTM file does not exist, or a required file for processing is missing.
    """
    if metric not in ["chm", "fhd", "pai"]:
        raise ValueError(f"Unsupported metric: {metric}")

    (min_z, max_z) = (None, None)
    if bounds:
        if len(bounds) == 2:
            (min_x, max_x), (min_y, max_y) = bounds
        else:
            (min_x, max_x), (min_y, max_y), (min_z, max_z) = bounds
    else:
        min_x, max_x, min_y, max_y, min_z, max_z = get_bounds_from_ept(ept_file)

    if not srs:
        srs = get_srs_from_ept(ept_file)

    num_tiles_x = int(np.ceil((max_x - min_x) / tile_size[0]))
    num_tiles_y = int(np.ceil((max_y - min_y) / tile_size[1]))
    total_tiles = num_tiles_x * num_tiles_y

    if not os.path.exists(output_path):
        os.makedirs(output_path)

    with tqdm(total=total_tiles, desc="Processing tiles") as pbar:
        for i in range(num_tiles_x):
            for j in range(num_tiles_y):
                if metric == "chm":
                    current_buffer_size = buffer_size
                else:
                    current_buffer_size = 0.0

                buffer_x = current_buffer_size * tile_size[0]
                buffer_y = current_buffer_size * tile_size[1]
                tile_min_x = min_x + i * tile_size[0] - buffer_x
                tile_max_x = min_x + (i + 1) * tile_size[0] + buffer_x
                tile_min_y = min_y + j * tile_size[1] - buffer_y
                tile_max_y = min_y + (j + 1) * tile_size[1] + buffer_y

                tile_min_x = max(min_x, tile_min_x)
                tile_max_x = min(max_x, tile_max_x)
                tile_min_y = max(min_y, tile_min_y)
                tile_max_y = min(max_y, tile_max_y)

                if tile_max_x <= tile_min_x or tile_max_y <= tile_min_y:
                    print(f"Warning: Skipping tile ({i}, {j}) due to invalid spatial extent.")
                    pbar.update(1)
                    continue

                if min_z and max_z:
                    tile_bounds = ([tile_min_x, tile_max_x], [tile_min_y, tile_max_y], [min_z, max_z])
                else:
                    tile_bounds = ([tile_min_x, tile_max_x], [tile_min_y, tile_max_y])
                tile_pipeline_stages = []

                if hag:
                    tile_pipeline_stages.append(_hag_delaunay())
                elif hag_dtm:
                    if not dtm or not os.path.isfile(dtm):
                        raise FileNotFoundError(f"DTM file is required for HAG calculation using DTM: {dtm}")
                    cropped_dtm_path = _crop_dtm(
                        dtm,
                        tile_min_x, tile_min_y,
                        tile_max_x, tile_max_y
                    )
                    tile_pipeline_stages.append(_hag_raster(cropped_dtm_path))
                base_pipeline = {
                    "type": "readers.ept",
                    "filename": ept_file,
                    "bounds": f"{tile_bounds}",
                }
                tile_pipeline_json = {
                    "pipeline": [base_pipeline] + tile_pipeline_stages
                }

                tile_pipeline = pdal.Pipeline(json.dumps(tile_pipeline_json))
                tile_pipeline.execute()
                if remove_outliers:
                    tile_points = remove_outliers_and_clean(tile_pipeline.arrays)[0]
                else:
                    tile_points = tile_pipeline.arrays[0]

                if tile_points.size == 0:
                    print(f"Warning: No data in tile ({i}, {j}). Skipping.")
                    pbar.update(1)
                    continue

                buffer_pixels_x = int(np.ceil(buffer_x / voxel_size[0]))
                buffer_pixels_y = int(np.ceil(buffer_y / voxel_size[1]))

                if metric == "chm":
                    chm, extent = calculate_chm(tile_points, voxel_size, interpolation=interpolation)

                    if buffer_pixels_x * 2 >= chm.shape[1] or buffer_pixels_y * 2 >= chm.shape[0]:
                        print(
                            f"Warning: Buffer size exceeds CHM dimensions for tile ({i}, {j}). Adjusting buffer size.")
                        buffer_pixels_x = max(0, chm.shape[1] // 2 - 1)
                        buffer_pixels_y = max(0, chm.shape[0] // 2 - 1)

                    chm = chm[buffer_pixels_y:-buffer_pixels_y, buffer_pixels_x:-buffer_pixels_x]

                    core_extent = (
                        tile_min_x + buffer_x,
                        tile_max_x - buffer_x,
                        tile_min_y + buffer_y,
                        tile_max_y - buffer_y,
                    )

                    result_file = os.path.join(output_path, f"tile_{i}_{j}_chm.tif")
                    create_geotiff(chm, result_file, srs, core_extent)
                elif metric in ["fhd", "pai"]:
                    voxels, spatial_extent = assign_voxels(tile_points, voxel_size)

                    if metric == "fhd":
                        result = calculate_fhd(voxels)
                    elif metric == "pai":
                        if not voxel_height:
                            raise ValueError(f"voxel_height is required for metric {metric}")

                        pad = calculate_pad(voxels, voxel_size[-1])

                        if np.all(pad == 0):
                            result = np.zeros((pad.shape[0], pad.shape[1]))
                        else:
                            result = calculate_pai(pad, voxel_height)
                        result = np.where(np.isfinite(result), result, 0)

                    if current_buffer_size > 0:
                        if buffer_pixels_x * 2 >= result.shape[1] or buffer_pixels_y * 2 >= result.shape[0]:
                            print(
                                f"Warning: Buffer size exceeds {metric.upper()} dimensions for tile ({i}, {j}). "
                                f"Adjusting buffer size."
                            )
                            buffer_pixels_x = max(0, result.shape[1] // 2 - 1)
                            buffer_pixels_y = max(0, result.shape[0] // 2 - 1)

                        result = result[buffer_pixels_y:-buffer_pixels_y, buffer_pixels_x:-buffer_pixels_x]

                    core_extent = (
                        tile_min_x + buffer_x,
                        tile_max_x - buffer_x,
                        tile_min_y + buffer_y,
                        tile_max_y - buffer_y,
                    )

                    if core_extent[1] <= core_extent[0] or core_extent[3] <= core_extent[2]:
                        print(f"Warning: Invalid core extent for tile ({i}, {j}): {core_extent}. Skipping.")
                        pbar.update(1)
                        continue

                    result_file = os.path.join(output_path, f"tile_{i}_{j}_{metric}.tif")
                    create_geotiff(result, result_file, srs, core_extent)
                else:
                    raise ValueError(f"Unsupported metric: {metric}")

                pbar.update(1)