Skip to content

Commit

Permalink
Median filtering using fastrasterfilter
Browse files Browse the repository at this point in the history
  • Loading branch information
pierotofy committed Dec 7, 2023
1 parent d419d9f commit 5674e68
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 93 deletions.
11 changes: 11 additions & 0 deletions SuperBuild/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,17 @@ externalproject_add(odm_orthophoto
${WIN32_CMAKE_ARGS} ${WIN32_GDAL_ARGS}
)

externalproject_add(fastrasterfilter
DEPENDS eigen34
GIT_REPOSITORY https://github.com/OpenDroneMap/FastRasterFilter.git
GIT_TAG main
PREFIX ${SB_BINARY_DIR}/fastrasterfilter
SOURCE_DIR ${SB_SOURCE_DIR}/fastrasterfilter
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
-DEIGEN3_INCLUDE_DIR=${SB_SOURCE_DIR}/eigen34/
${WIN32_CMAKE_ARGS} ${WIN32_GDAL_ARGS}
)

externalproject_add(lastools
GIT_REPOSITORY https://github.com/OpenDroneMap/LAStools.git
GIT_TAG 250
Expand Down
106 changes: 13 additions & 93 deletions opendm/dem/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
import math
import time
import shutil
import functools
import threading
import glob
import re
from joblib import delayed, Parallel
Expand All @@ -15,11 +13,9 @@
from opendm import io
from opendm import system
from opendm.concurrency import get_max_memory, parallel_map, get_total_memory
from scipy import ndimage
from datetime import datetime
from opendm.vendor.gdal_fillnodata import main as gdal_fillnodata
from opendm import log
import threading

from .ground_rectification.rectify import run_rectification
from . import pdal
Expand Down Expand Up @@ -237,106 +233,30 @@ def compute_euclidean_map(geotiff_path, output_path, overwrite=False):
return output_path


def median_smoothing(geotiff_path, output_path, smoothing_iterations=1, window_size=512, num_workers=1):
def median_smoothing(geotiff_path, output_path, window_size=512, num_workers=1):
""" Apply median smoothing """
start = datetime.now()

if not os.path.exists(geotiff_path):
raise Exception('File %s does not exist!' % geotiff_path)

# Prepare temporary files
folder_path, output_filename = os.path.split(output_path)
basename, ext = os.path.splitext(output_filename)

output_dirty_in = os.path.join(folder_path, "{}.dirty_1{}".format(basename, ext))
output_dirty_out = os.path.join(folder_path, "{}.dirty_2{}".format(basename, ext))

log.ODM_INFO('Starting smoothing...')

with rasterio.open(geotiff_path, num_threads=num_workers) as img, rasterio.open(output_dirty_in, "w+", BIGTIFF="IF_SAFER", num_threads=num_workers, **img.profile) as imgout, rasterio.open(output_dirty_out, "w+", BIGTIFF="IF_SAFER", num_threads=num_workers, **img.profile) as imgout2:
nodata = img.nodatavals[0]
dtype = img.dtypes[0]
shape = img.shape
for i in range(smoothing_iterations):
log.ODM_INFO("Smoothing iteration %s" % str(i + 1))
rows, cols = numpy.meshgrid(numpy.arange(0, shape[0], window_size), numpy.arange(0, shape[1], window_size))
rows = rows.flatten()
cols = cols.flatten()
rows_end = numpy.minimum(rows + window_size, shape[0])
cols_end= numpy.minimum(cols + window_size, shape[1])
windows = numpy.dstack((rows, cols, rows_end, cols_end)).reshape(-1, 4)

filt = functools.partial(ndimage.median_filter, size=9, output=dtype, mode='nearest')

# We cannot read/write to the same file from multiple threads without causing race conditions.
# To safely read/write from multiple threads, we use a lock to protect the DatasetReader/Writer.
read_lock = threading.Lock()
write_lock = threading.Lock()

# threading backend and GIL released filter are important for memory efficiency and multi-core performance
Parallel(n_jobs=num_workers, backend='threading')(delayed(window_filter_2d)(img, imgout, nodata , window, 9, filt, read_lock, write_lock) for window in windows)

# Between each iteration we swap the input and output temporary files
#img_in, img_out = img_out, img_in
if (i == 0):
img = imgout
imgout = imgout2
else:
img, imgout = imgout, img

# If the number of iterations was even, we need to swap temporary files
if (smoothing_iterations % 2 != 0):
output_dirty_in, output_dirty_out = output_dirty_out, output_dirty_in

# Cleaning temporary files
if os.path.exists(output_dirty_out):
os.replace(output_dirty_out, output_path)
if os.path.exists(output_dirty_in):
os.remove(output_dirty_in)
kwargs = {
'input': geotiff_path,
'output': output_path,
'window': window_size,
}
system.run('fastrasterfilter "{input}" '
'--output "{output}" '
'--window-size {window} '
'--radius 5 '
'--co TILED=YES '
'--co BIGTIFF=IF_SAFER '
'--co COMPRESS=DEFLATE '.format(**kwargs), env_vars={'OMP_NUM_THREADS': num_workers})

log.ODM_INFO('Completed smoothing to create %s in %s' % (output_path, datetime.now() - start))
return output_path


def window_filter_2d(img, imgout, nodata, window, kernel_size, filter, read_lock, write_lock):
"""
Apply a filter to dem within a window, expects to work with kernal based filters
:param img: path to the geotiff to filter
:param imgout: path to write the giltered geotiff to. It can be the same as img to do the modification in place.
:param window: the window to apply the filter, should be a list contains row start, col_start, row_end, col_end
:param kernel_size: the size of the kernel for the filter, works with odd numbers, need to test if it works with even numbers
:param filter: the filter function which takes a 2d array as input and filter results as output.
:param read_lock: threading lock for the read operation
:param write_lock: threading lock for the write operation
"""
shape = img.shape[:2]
if window[0] < 0 or window[1] < 0 or window[2] > shape[0] or window[3] > shape[1]:
raise Exception('Window is out of bounds')
expanded_window = [ max(0, window[0] - kernel_size // 2), max(0, window[1] - kernel_size // 2), min(shape[0], window[2] + kernel_size // 2), min(shape[1], window[3] + kernel_size // 2) ]

# Read input window
width = expanded_window[3] - expanded_window[1]
height = expanded_window[2] - expanded_window[0]
rasterio_window = rasterio.windows.Window(col_off=expanded_window[1], row_off=expanded_window[0], width=width, height=height)
with read_lock:
win_arr = img.read(indexes=1, window=rasterio_window)

# Should have a better way to handle nodata, similar to the way the filter algorithms handle the border (reflection, nearest, interpolation, etc).
# For now will follow the old approach to guarantee identical outputs
nodata_locs = win_arr == nodata
win_arr = filter(win_arr)
win_arr[nodata_locs] = nodata
win_arr = win_arr[window[0] - expanded_window[0] : window[2] - expanded_window[0], window[1] - expanded_window[1] : window[3] - expanded_window[1]]

# Write output window
width = window[3] - window[1]
height = window[2] - window[0]
rasterio_window = rasterio.windows.Window(col_off=window[1], row_off=window[0], width=width, height=height)
with write_lock:
imgout.write(win_arr, indexes=1, window=rasterio_window)


def get_dem_radius_steps(stats_file, steps, resolution, multiplier = 1.0):
radius_steps = [point_cloud.get_spacing(stats_file, resolution) * multiplier]
for _ in range(steps - 1):
Expand Down

0 comments on commit 5674e68

Please sign in to comment.