@article {bnh-7907, title = {High-Resolution Estimates of Fire Severity - An Evaluation of UAS Image and LiDAR Mapping Approaches on a Sedgeland Forest Boundary in Tasmania, Australia }, journal = {Fire}, volume = {4}, year = {2021}, month = {03/2021}, chapter = {14}, abstract = {

With an increase in the frequency and severity of wildfires across the globe and resultant changes to long-established fire regimes, the mapping of fire severity is a vital part of monitoring ecosystem resilience and recovery. The emergence of unoccupied aircraft systems (UAS) and compact sensors (RGB and LiDAR) provide new opportunities to map fire severity. This paper conducts a comparison of metrics derived from UAS Light Detecting and Ranging (LiDAR) point clouds and UAS image based products to classify fire severity. A workflow which derives novel metrics describing vegetation structure and fire severity from UAS remote sensing data is developed that fully utilises the vegetation information available in both data sources. UAS imagery and LiDAR data were captured pre- and post-fire over a 300 m by 300 m study area in Tasmania, Australia. The study area featured a vegetation gradient from sedgeland vegetation (e.g., button grass 0.2m) to forest (e.g., Eucalyptus obliqua and Eucalyptus globulus 50m). To classify the vegetation and fire severity, a comprehensive set of variables describing structural, textural and spectral characteristics were gathered using UAS images and UAS LiDAR datasets. A recursive feature elimination process was used to highlight the subsets of variables to be included in random forest classifiers. The classifier was then used to map vegetation and severity across the study area. The results indicate that UAS LiDAR provided similar overall accuracy to UAS image and combined (UAS LiDAR and UAS image predictor values) data streams to classify vegetation (UAS image: 80.6\%; UAS LiDAR: 78.9\%; and Combined: 83.1\%) and severity in areas of forest (UAS image: 76.6\%, UAS LiDAR: 74.5\%; and Combined: 78.5\%) and areas of sedgeland (UAS image: 72.4\%; UAS LiDAR: 75.2\%; and Combined: 76.6\%). These results indicate that UAS SfM and LiDAR point clouds can be used to assess fire severity at very high spatial resolutio

}, keywords = {3D remote sensing, drone, fire severity, fuel structure, Lidar, photogrammetry, RPAS, structure, UAS, vegetation}, doi = {https://doi.org/10.3390/fire4010014}, url = {https://www.mdpi.com/2571-6255/4/1/14/htm}, author = {Samuel Hillman and Bryan Hally and Luke Wallace and Darren Turner and Arko Lucieer and Karin Reinke and Simon Jones} } @article {bnh-8165, title = {Using pre- and post-fire LiDAR to assess the severity of the 2019 Tasmanian bushfires}, number = {698}, year = {2021}, month = {08/2021}, institution = {Bushfire and Natural Hazards CRC}, address = {MELBOURNE}, abstract = {

In January 2019, over 64,000 ha of bushland burned in the Riveaux Road fire in Tasmania{\textquoteright}s southern forests. Most of area burned occurred in tall wet eucalypt forest. These forests are considered to be highly flammable in dry conditions, but fires are infrequent due to the generally cool, wet climate in which they grow. As a result, limited data exists on the behaviour and effects of wildfire in these forests. Prior to these fires, extensive areas of these southern forests have been studied in-depth. In 2014, a large area of the forests that burned were mapped with aerial LiDAR, a remote-sensing technology that can characterise three-dimensional forest structure. Further, in 2016, detailed field-based measurements of fuel load, structure, and hazard were taken at 12 permanent plots which subsequently burned in 2019. Hence, the 2019 fires in Tasmania represent a globally-rare opportunity to characterise the severity of a large wildfire using pre-fire and post-fire data. In October 2019, the Department of Primary Industries, Parks, Water and Environment (DPIPWE) in Tasmania, along with five other BNHCRC end-users and the University of Tasmania, launched a project to use remote-sensing and field-based data to create a detailed case study of the 2019 Riveaux Rd. Fire, and to untangle the drivers of fire severity in tall wet eucalypt forests.\  To do this we (i) remeasured plots to assess tree mortality and changes in fuel loads post fire; (ii) acquired LiDAR data from a transect across a burned buttongrass-forest boundary on the Weld River enabling comparison with pre-fire LiDAR data; (iii) established baseline postfire LiDAR buttongrass-forest boundary transect on the Huon River at Blakes Opening.\  Here we describe the data sets and report some preliminary analyses.

}, keywords = {Bushfire, fire severity, Lidar, post-fire, pre-fire, Tasmania}, issn = {698}, author = {Furlaud, James M. and Arko Lucieer and Scott Foyster and Anna Matala and David Bowman} } @article {bnh-7523, title = {A comparison of terrestrial and UAS sensors for measuring fuel hazard in a dry sclerophyll forest}, journal = {International Journal of Applied Earth Observation and Geoinformation}, volume = {95}, year = {2020}, month = {11/2020}, abstract = {

In recent years, Unoccupied Aircraft Systems (UAS) have been used to capture information on forest structure in unprecedented detail. Pioneering studies in this field have shown that high spatial resolution images and Light Detecting And Ranging (LiDAR) data captured from these platforms provide detailed information describing the dominant tree elements of canopy cover and biomass. However, to date, few studies have investigated the arrangement of vegetation elements that contribute directly to fire propagation in UAS LiDAR point clouds; that is the surface, near-surface, elevated and intermediate-canopy vegetation. This paper begins to address this gap in the literature by exploring the use of image-based and LiDAR 3D representations collected using UAS platforms, for describing forest structure properties. Airborne and terrestrial 3D datasets were captured in a dry sclerophyll forest in south-eastern Australia. Results indicate that UAS LiDAR point clouds contain information that can describe fuel properties in all strata. Similar estimates of canopy cover (TLS: 68.27\% and UAS LiDAR: 64.20\%) and sub-canopy cover (Elevated cover TLS: 44.94\%, UAS LiDAR: 32.27\%, combined surface and near-surface cover TLS: 96.10\% UAS LiDAR: 93.56\%) to TLS were achieved using this technology. It was also shown that the UAS SfM photogrammetric technique significantly under performed in the representation of the canopy and below canopy structure (canopy cover - 20.31\%, elevated cover 10.09\%). This caused errors to be propagated in the estimate of heights in the elevated fuel layer (TLS: 0.51\ m, UAS LiDAR: 0.34\ m, UAS SfM: 0.15\ m). A method for classifying fuel hazard layers is also presented which identifies vegetation connectivity. These results indicate that information describing the below canopy vertical structure is present within the UAS LiDAR point clouds and can be exploited through this novel classification approach for fire hazard assessment. For fire prone countries, this type of information can provide important insight into forest fuels and the potential fire behaviour and impact of fire under different scenarios.

}, author = {Samuel Hillman and Luke Wallace and Arko Lucieer and Karin Reinke and Darren Turner and Simon Jones} }