<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2024.1393592</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Plant Science</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Effects of different ground segmentation methods on the accuracy of UAV-based canopy volume measurements</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name>
<surname>Han</surname>
<given-names>Leng</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<xref ref-type="author-notes" rid="fn002">
<sup>&#x2020;</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2669136"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Wang</surname>
<given-names>Zhichong</given-names>
</name>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref>
<xref ref-type="author-notes" rid="fn002">
<sup>&#x2020;</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1561529"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>He</surname>
<given-names>Miao</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<xref ref-type="author-notes" rid="fn002">
<sup>&#x2020;</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2669965"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>He</surname>
<given-names>Xiongkui</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<xref ref-type="author-notes" rid="fn001">
<sup>*</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1350991"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>College of Science, China Agricultural University</institution>, <addr-line>Beijing</addr-line>, <country>China</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>Centre for Chemicals Application Technology, China Agricultural University</institution>, <addr-line>Beijing</addr-line>, <country>China</country>
</aff>
<aff id="aff3">
<sup>3</sup>
<institution>College of Agricultural Unmanned System, China Agricultural University</institution>, <addr-line>Beijing</addr-line>, <country>China</country>
</aff>
<aff id="aff4">
<sup>4</sup>
<institution>Tropics and Subtropics Group, Institute of Agricultural Engineering, University of Hohenheim</institution>, <addr-line>Stuttgart</addr-line>, <country>Germany</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>Edited by: Wei Qiu, Nanjing Agricultural University, China</p>
</fn>
<fn fn-type="edited-by">
<p>Reviewed by: Renata Retkute, University of Cambridge, United Kingdom</p>
<p>Sergio V&#xe9;lez, Wageningen University and Research, Netherlands</p>
</fn>
<fn fn-type="corresp" id="fn001">
<p>*Correspondence: Xiongkui He, <email xlink:href="mailto:xiongkui@cau.edu.cn">xiongkui@cau.edu.cn</email>
</p>
</fn>
<fn fn-type="other" id="fn002">
<p>&#x2020;ORCID: Leng Han, <uri xlink:href="https://orcid.org/0000-0003-0190-0919">orcid.org/0000-0003-0190-0919</uri>; ZhiChong Wang, <uri xlink:href="https://orcid.org/0000-0002-9720-5496">orcid.org/0000-0002-9720-5496</uri>; Miao He, <uri xlink:href="https://orcid.org/0009-0000-9064-2602">orcid.org/0009-0000-9064-2602</uri>
</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>18</day>
<month>06</month>
<year>2024</year>
</pub-date>
<pub-date pub-type="collection">
<year>2024</year>
</pub-date>
<volume>15</volume>
<elocation-id>1393592</elocation-id>
<history>
<date date-type="received">
<day>29</day>
<month>02</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>30</day>
<month>05</month>
<year>2024</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2024 Han, Wang, He and He</copyright-statement>
<copyright-year>2024</copyright-year>
<copyright-holder>Han, Wang, He and He</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>The nonuniform distribution of fruit tree canopies in space poses a challenge for precision management. In recent years, with the development of Structure from Motion (SFM) technology, unmanned aerial vehicle (UAV) remote sensing has been widely used to measure canopy features in orchards to balance efficiency and accuracy. A pipeline of canopy volume measurement based on UAV remote sensing was developed, in which RGB and digital surface model (DSM) orthophotos were constructed from captured RGB images, and then the canopy was segmented using U-Net, OTSU, and RANSAC methods, and the volume was calculated. The accuracy of the segmentation and the canopy volume measurement were compared. The results show that the U-Net trained with RGB and DSM achieves the best accuracy in the segmentation task, with mean intersection of concatenation (MIoU) of 84.75% and mean pixel accuracy (MPA) of 92.58%. However, in the canopy volume estimation task, the U-Net trained with DSM only achieved the best accuracy with Root mean square error (RMSE) of 0.410 m<sup>3</sup>, relative root mean square error (rRMSE) of 6.40%, and mean absolute percentage error (MAPE) of 4.74%. The deep learning-based segmentation method achieved higher accuracy in both the segmentation task and the canopy volume measurement task. For canopy volumes up to 7.50 m<sup>3</sup>, OTSU and RANSAC achieve an RMSE of 0.521 m<sup>3</sup> and 0.580 m<sup>3</sup>, respectively. Therefore, in the case of manually labeled datasets, the use of U-Net to segment the canopy region can achieve higher accuracy of canopy volume measurement. If it is difficult to cover the cost of data labeling, ground segmentation using partitioned OTSU can yield more accurate canopy volumes than RANSAC.</p>
</abstract>
<kwd-group>
<kwd>UAV</kwd>
<kwd>ground segmentation</kwd>
<kwd>canopy volume</kwd>
<kwd>OTSU</kwd>
<kwd>RANSAC</kwd>
</kwd-group>
<counts>
<fig-count count="13"/>
<table-count count="1"/>
<equation-count count="6"/>
<ref-count count="57"/>
<page-count count="12"/>
<word-count count="4712"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-in-acceptance</meta-name>
<meta-value>Sustainable and Intelligent Phytoprotection</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>Precise management, such as application and pruning, of the canopy is important for fruit yield and quality. Canopy volume can provide a reference for precise pesticide application and pruning. Several pesticide application models require the use of canopy volume as an input variable (<xref ref-type="bibr" rid="B13">Gil et&#xa0;al., 2013</xref>; <xref ref-type="bibr" rid="B23">Nan et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B38">Sultan Mahmud et&#xa0;al., 2021</xref>). However, accurate measurement of canopy volume relative to tree height is more difficult (<xref ref-type="bibr" rid="B40">Tsoulias et&#xa0;al., 2019</xref>). To accurately obtain canopy volume, traditional methods require a number of expensive manual measurements, which increases the management cost of the production process. With the development of sensor technology, LiDAR, ultrasonic sensors, and cameras are used for nondestructive and rapid measurement of canopy volume.</p>
<p>Terrestrial LiDAR has a wide range of applications in orchard phenology, such as canopy volume measurement and tree height measurement (<xref ref-type="bibr" rid="B27">Pfeiffer et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B6">Brede et&#xa0;al., 2019</xref>). Due to the long time required for a single scan and the need to scan as many locations as possible to avoid occlusions, a complete scan of a 1-ha orchard can take 3&#x2013;6 days, even for an experienced team (<xref ref-type="bibr" rid="B51">Wilkes et&#xa0;al., 2017</xref>). Mobile LiDAR scanning with a real-time kinematic (RTK) receiver was developed to improve the efficiency of canopy point cloud collection (<xref ref-type="bibr" rid="B15">Karp et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B48">Wang et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B12">Gen&#xe9;-Mola et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B21">Mokro&#x161; et&#xa0;al., 2021</xref>). It has been shown that tree segmentation and canopy parameter extraction can also be achieved by LiDAR on UAV platforms, which reduce the effect of vibration and are easy to register (<xref ref-type="bibr" rid="B52">Yoshii et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B53">Yuan et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B8">Caras et&#xa0;al., 2024</xref>).</p>
<p>LiDAR still has a high cost compared to cameras. UAV remote sensing imagery has been widely used in the precision management of orchards (<xref ref-type="bibr" rid="B36">Stateras and Kalivas, 2020</xref>; <xref ref-type="bibr" rid="B57">Zhang et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B25">Pagliai&#xa0;et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B34">Sinha et&#xa0;al., 2022</xref>). With structure from motion (SFM) technology, three-dimensional information such as tree heights and canopy volume in orchards can be obtained using drone imagery (<xref ref-type="bibr" rid="B22">Mu et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B3">Anifantis et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B20">Maimaitijiang et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B31">Ross et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B43">V&#xe9;lez et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B44">Vinci et&#xa0;al., 2023</xref>). For some orchards, crown diameter also can be measured (<xref ref-type="bibr" rid="B9">Chang et&#xa0;al., 2020</xref>). Leaf area index (LAI) and leaf porosity can even be obtained using multispectral images (<xref ref-type="bibr" rid="B29">Raj et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B56">Zhang et&#xa0;al., 2024</xref>), while crop water stress index can also be assessed to inform precision irrigation (<xref ref-type="bibr" rid="B9">Chang et&#xa0;al., 2020</xref>). Combined with computer vision technology, it can even enable fruit recognition to provide growers with yield information in the early stages of crop growth (<xref ref-type="bibr" rid="B4">Ariza-Sent&#xed;s et&#xa0;al., 2023</xref>). The digital surface modeling (DSM) created by images contains the height of the crop (<xref ref-type="bibr" rid="B55">Zarco-Tejada et&#xa0;al., 2014</xref>; <xref ref-type="bibr" rid="B54">Yurtseven et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B18">Lu et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B41">Tunca et&#xa0;al., 2024</xref>).</p>
<p>Furthermore, to obtain the volume of the canopy, the digital terrain model (DTM) needs to be split from the DSM (<xref ref-type="bibr" rid="B26">Patrignani and Ochsner, 2015</xref>; <xref ref-type="bibr" rid="B2">Ali-Sisto et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B1">Ali et&#xa0;al., 2021</xref>), and the canopy height model (CHM) is created by taking the difference between the DSM and the DTM (<xref ref-type="bibr" rid="B10">Eitel et&#xa0;al., 2014</xref>; <xref ref-type="bibr" rid="B47">Walter et&#xa0;al., 2018</xref>). Next, canopy volume can be obtained by summing the CHM with voxel (<xref ref-type="bibr" rid="B37">Stovall et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B45">Wallace et&#xa0;al., 2017</xref>). For field crops, it is easier to obtain the DSM as DTM when the crop is not planted (<xref ref-type="bibr" rid="B20">Maimaitijiang et&#xa0;al., 2019</xref>). However, for orchards, accurate ground segmentation is required for canopy volume measurement tasks. Algorithms that have been developed for ground segmentation include zone thresholding methods and plane-fitting (<xref ref-type="bibr" rid="B35">Sithole and Vosselman, 2004</xref>; <xref ref-type="bibr" rid="B24">Oniga et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B49">Wen et&#xa0;al., 2023</xref>). The results of ground segmentation can significantly affect the measurement of canopy volume. Therefore, exploring different ground segmentation methods can improve the accuracy of canopy volume measurements.</p>
<p>This study presents a canopy volume measurement pipeline based on UAV remote sensing images, which first constructs the RGB and DSM of the target orchard, then segments the ground and canopy regions, and finally calculates the canopy volume based on the segmented masks using DSM. The effects of different segmentation algorithms on the accuracy of canopy volume measurements are also investigated.</p>
</sec>
<sec id="s2" sec-type="materials|methods">
<label>2</label>
<title>Materials and methods</title>
<p>The point cloud acquired from the moving LiDAR scan was voxelated, the voxel volumes were summed, and the calculated canopy volume was taken as the true value. The RGB and DSM images acquired by the UAV are segmented into plots, and the canopy and ground section are segmented by different segmentation methods, and the volume of the canopy is calculated without the use of high-resolution DTM data. Diagram of the experimental design is shown in <xref ref-type="fig" rid="f1">
<bold>Figure&#xa0;1</bold>
</xref>.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Diagram of the experimental design, LiDAR scanning as a baseline method for canopy volume measuremenlt, and comparison of the effects of different ground segmentation methods on the accuracy of canopy volume measurement.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g001.tif"/>
</fig>
<sec id="s2_1">
<label>2.1</label>
<title>Image data collection</title>
<sec id="s2_1_1">
<label>2.1.1</label>
<title>Data collection</title>
<p>Field experiments were conducted in a pear (<italic>Pyrus bretschneideri</italic> &#x2018;Zaosuhong&#x2019;) orchard in Pinggu District, Beijing (40.18&#xb0;N, 116.97&#xb0;E, WGS-84). The orchard covered an area of about 3&#xa0;ha, and the pear trees were BBCH 91 when photographed. The trees were spaced in rows with a 4.5-m interval (<xref ref-type="fig" rid="f2">
<bold>Figure&#xa0;2A</bold>
</xref>) and in rows with a 1.5-m interval between trees, with an average tree height of about 4&#xa0;m.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Experimental site and UAV conducting experiment. <bold>(A)</bold> Pear orchard with a row-to-row distance of 4.5&#xa0;m. <bold>(B)</bold> UAV used for image acquisition.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g002.tif"/>
</fig>
<p>The P4 Multispectral (DJI Technology Inc., Shenzhen, China), which has one RGB camera and a multispectral camera array with five cameras covering blue, green, red, red edge, and near-infrared bands, all at 2 megapixels (MP) with global shutter, was used to acquire images (<xref ref-type="fig" rid="f2">
<bold>Figure&#xa0;2B</bold>
</xref>), and only its RGB channel (2 MP) was used in this experiment. The flight height was 30&#xa0;m, resulting in a ground sample distance (GSD) of 0.016 m/pix. The head and side overlap were both 70%, and the images were taken at equal time intervals. Images were captured between 11:00 and 13:00 to ensure photograph quality. During the capture period, the weather was clear and windless, which eliminated the blur caused by swaying branches. During the flight, the network core service provided by Qianxun Inc. (Shanghai, China) was used to get more accurate RTK positioning.</p>
</sec>
<sec id="s2_1_2">
<label>2.1.2</label>
<title>Image processing</title>
<p>The image procession utilized a workstation with Windows 10 (64-bit), 32 GB of RAM, i7&#x2013;8700K, and GTX 1080Ti. The orthophotos and DSM were reconstructed with Terra (3.5.5, DJI Technology Inc., Shenzhen, China). A high reconstruction quality was selected to get high accuracy and resolution. The geographic coordinates were based on the WGS84 (EPSG: 4326) coordinate system in this study. As the P4M is equipped with an RTK receiver and the manufacturer supports phase-free control point technology, no ground image control point was set during the experiment.</p>
</sec>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>UAV-based canopy volume measurements</title>
<p>The DSM of an orchard can be utilized to calculate the canopy volume (<xref ref-type="bibr" rid="B19">Mahmud et&#xa0;al., 2023</xref>). First, a segmentation operation was performed to extract the mask of the canopy region. Next, the volume of the region between the canopy and the ground was calculated as the final measured canopy volume. In this study, a U-Net-based deep learning method, grid-based OTSU, and RANSAC methods were used to segment the canopy, and the accuracy of different segment methods was compared. The orchard is situated on a gentle hillside, resulting in the ground in the orchard not being on the same plane. To avoid misclassification in the ground segmentation, it is important to segment the elevation data of the orchard area separately. In this study, the orchard was divided into multiple 4.5&#xa0;m times 4.5&#xa0;m plots. The ground within each plot was considered to be in one plane.</p>
<sec id="s2_2_1">
<label>2.2.1</label>
<title>Deep-learning-based ground segmentation methods</title>
<p>U-Net (<xref ref-type="bibr" rid="B30">Ronneberger et&#xa0;al., 2015</xref>) is a widely used deep learning network in remote sensing for efficient semantic segmentation of input images through an encoder and decoder. The classical U-Net can obtain fast segmentation results on smaller datasets with a lightweight structure. In this study, the classical U-Net was directly used, containing 31 million parameters, and the inputs were a four-channel image of 281 pixels times 281 pixels (RGB and DSM) or a single-channel image of 281 pixels times 281 pixels (DSM) for&#xa0;training, and the outputs were the segmented masked images (<xref ref-type="fig" rid="f3">
<bold>Figure&#xa0;3</bold>
</xref>).</p>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>U-Net network structure used in the study. Blue boxes correspond to multichannel feature maps with a number of channels marked on the top of each box. Conv, convolution; up-conv, upconvolution; max pool, max pooling with the size of the convolution kernel.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g003.tif"/>
</fig>
<p>Three subfields from the orthophoto and the corresponding labeled files were divided as the sampling regions for the training set, validation set, and test set, with the number of samples being 400, 50, and 100, respectively (<xref ref-type="fig" rid="f4">
<bold>Figure&#xa0;4</bold>
</xref>). The validation set was used to adjust the epoch, batch size, and learning rate. Labels were created by an open-source annotation tool called labelme (V5.2.1).</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Sampling region of the data in training and the RGB images, DSM images, and corresponding labels used. <bold>(A)</bold> The red subfield is the random sampling region for the training set, and the blue and yellow are the test and validation sets, respectively. <bold>(B)</bold> Sampled training images and corresponding labels with a total of four channels of RGB and DSM were fed into the network, where the dark-green color in the labels are the canopy and the light-green-colored regions are the ground.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g004.tif"/>
</fig>
</sec>
<sec id="s2_2_2">
<label>2.2.2</label>
<title>OTSU-based ground segmentation method</title>
<p>The OTSU threshold segmentation method is widely used in the field of remote sensing. The values of the background and the target have different distributions, and the OTSU method selects the threshold corresponding to when the value of the interclass variance is taken to be the maximum as the optimal threshold. The points belonging to the ground had a low elevation, and the canopy points had a higher elevation in the distribution histogram of DSM, showing two peaks in the histogram. The OTSU method was used to automatically find the threshold value in the middle of the two peaks to maximize the interclass variance of the ground and canopy elevation distributions. Points with elevation above the threshold are categorized as canopy regions, and the rest are ground. <xref ref-type="fig" rid="f5">
<bold>Figure&#xa0;5</bold>
</xref> illustrates the segmentation process and the binarized mask map.</p>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Segmentation thresholds for DSM obtained by OTSU and a mask of the canopy region, where dark green is the canopy region and light green is the ground.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g005.tif"/>
</fig>
</sec>
<sec id="s2_2_3">
<label>2.2.3</label>
<title>RANSAC-based ground segmentation method</title>
<p>RANSAC is an iteration-based fitting method that obtains the parameters of a model by randomly sampling the data points and calculating the probability of a successful fit. Given its good robustness, it is often used to extract planes within a point cloud. Open3D is an open-source library that supports rapid development of software that deals with 3D data. The plane fitting function therein was used in this study based on the empirical selection of 50 sampling points, 10,000 iterations (<italic>N</italic>), and a distance threshold of 0.2&#xa0;m (<italic>D</italic>). The ground was fitted and split between the canopy and the ground (<xref ref-type="fig" rid="f6">
<bold>Figure&#xa0;6</bold>
</xref>).</p>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>Conversion of DSM to 3D point cloud and segmentation by RANSAC.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g006.tif"/>
</fig>
</sec>
<sec id="s2_2_4">
<label>2.2.4</label>
<title>Method of tree height and canopy volume calculation</title>
<p>The positions of the tree base were measured using a tilt-featured RTK receiver (E500, Beijing UniStrong Science and Technology Co. Ltd., Beijing, China), and the true height of the tree was measured using a tower ruler with a height accuracy of &#xb1; 5&#xa0;cm. The coordinates and tree heights of 20 trees were measured in the scanned area and used to analyze the accuracy of the tree height measurements. The tree height measured by the LiDAR or UAV was achieved by selecting points within 0.25&#xa0;m from the root coordinates of the tree and calculating the difference between the maximum and minimum heights.</p>
<p>The volume of the canopy was accumulated from the volume of each pixel in the mask (<xref ref-type="disp-formula" rid="eq1">Equation 1</xref>). The volume of the pixel was calculated by the area multiplied by the height, which was the difference between the pixel and the ground mean altitude.</p>
<disp-formula id="eq1">
<label>(1)</label>
<mml:math display="block" id="M1">
<mml:mrow>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mi>c</mml:mi>
<mml:mi>a</mml:mi>
<mml:mi>n</mml:mi>
<mml:mi>o</mml:mi>
<mml:mi>p</mml:mi>
<mml:mi>y</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:msup>
<mml:mo>&#x2211;</mml:mo>
<mml:mo>&#x200b;</mml:mo>
</mml:msup>
<mml:mi>G</mml:mi>
<mml:mi>S</mml:mi>
<mml:msup>
<mml:mi>D</mml:mi>
<mml:mn>2</mml:mn>
</mml:msup>
<mml:mo>&#xd7;</mml:mo>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mi>h</mml:mi>
<mml:mrow>
<mml:mi>p</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>x</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mover accent="true">
<mml:mi>h</mml:mi>
<mml:mo>&#xaf;</mml:mo>
</mml:mover>
<mml:mrow>
<mml:mi>g</mml:mi>
<mml:mi>r</mml:mi>
<mml:mi>o</mml:mi>
<mml:mi>u</mml:mi>
<mml:mi>n</mml:mi>
<mml:mi>d</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:mrow>
</mml:math>
</disp-formula>
<p>Where <italic>V<sub>canopy</sub>
</italic> is the final volume, GSDis the resolution of DSM (in this study is 0.016 m/pix), <italic>h<sub>pix</sub>
</italic> is the altitude of each pixel, and <inline-formula>
<mml:math display="inline" id="im1">
<mml:mrow>
<mml:msub>
<mml:mover accent="true">
<mml:mi>h</mml:mi>
<mml:mo>&#xaf;</mml:mo>
</mml:mover>
<mml:mrow>
<mml:mi>g</mml:mi>
<mml:mi>r</mml:mi>
<mml:mi>o</mml:mi>
<mml:mi>u</mml:mi>
<mml:mi>n</mml:mi>
<mml:mi>d</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> is the mean value of the altitude of the ground in the plot.</p>
</sec>
</sec>
<sec id="s2_3">
<label>2.3</label>
<title>LiDAR data acquisition methods</title>
<p>The true value of canopy volume is difficult to measure, and canopy calculations from moving LiDAR scans have typically been used as the true volume in previous studies (<xref ref-type="bibr" rid="B17">Li et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B38">Sultan Mahmud et&#xa0;al., 2021</xref>). A previously developed LiDAR-RTK fusion information acquisition system (<xref ref-type="bibr" rid="B14">Han et&#xa0;al., 2023</xref>) was used to acquire point clouds of the canopy. The LiDAR and RTK mobile stations were mounted on a frame on top of a vehicle, which allowed for smooth travel through the orchard (<xref ref-type="fig" rid="f7">
<bold>Figure&#xa0;7A</bold>
</xref>). Data acquisition was carried out during clear and windless hours, traveling at a speed of about 1&#xa0;m/s.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p>Voxel volume of the canopy obtained by LiDAR. <bold>(A)</bold> Vehicle collecting canopy point cloud with LiDAR and RTK rover. <bold>(B)</bold> Processing pipeline of point cloud data acquired by mobile laser scanning.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g007.tif"/>
</fig>
<p>The acquired point cloud was synchronized with the recorded RTK packet to obtain position and heading, and each frame of the point cloud was converted to a geographic coordinate system to obtain a complete point cloud of the scanned area. The complete point cloud was carefully removed from the ground portion manually with Meshlab software (2023.12). It was later constructed as voxel data at 0.1&#xa0;m in size. Multiplying the number of voxels by the volume of a single voxel calculates the measured canopy volume (<xref ref-type="fig" rid="f7">
<bold>Figure&#xa0;7B</bold>
</xref>). The canopy volume calculated from the moving LiDAR-scanned point cloud was taken as the true value.</p>
</sec>
<sec id="s2_4">
<label>2.4</label>
<title>Statistical methods for precision evaluation</title>
<p>Mean intersection of concatenation (MIoU, <xref ref-type="disp-formula" rid="eq2">Equation 2</xref>) and mean pixel accuracy (MPA, <xref ref-type="disp-formula" rid="eq3">Equation 3</xref>) were used to evaluate the segmentation accuracy of the model. The data in the training set was segmented using OTSU and RANSAC, and the segmentation accuracy was also evaluated.</p>
<disp-formula id="eq2">
<label>(2)</label>
<mml:math display="block" id="M2">
<mml:mrow>
<mml:mtext>MIoU</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:mfrac>
<mml:mrow>
<mml:msub>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
</mml:msub>
</mml:mrow>
<mml:mrow>
<mml:msub>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>F</mml:mi>
<mml:mi>P</mml:mi>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>F</mml:mi>
<mml:mi>N</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfrac>
<mml:mo>+</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:msub>
<mml:mi>T</mml:mi>
<mml:mi>N</mml:mi>
</mml:msub>
</mml:mrow>
<mml:mrow>
<mml:msub>
<mml:mi>T</mml:mi>
<mml:mi>N</mml:mi>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>F</mml:mi>
<mml:mi>P</mml:mi>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>F</mml:mi>
<mml:mi>N</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
<mml:mo stretchy="false">/</mml:mo>
<mml:mn>2</mml:mn>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula id="eq3">
<label>(3)</label>
<mml:math display="block" id="M3">
<mml:mrow>
<mml:mtext>MPA</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mn>1</mml:mn>
<mml:mrow>
<mml:mi>k</mml:mi>
<mml:mo>+</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:mfrac>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>0</mml:mn>
</mml:mrow>
<mml:mi>k</mml:mi>
</mml:msubsup>
<mml:msub>
<mml:mi>P</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</disp-formula>
<p>where <italic>T<sub>P</sub>
</italic> is the number of correctly classified pixels in canopy samples, <italic>T<sub>N</sub>
</italic> is the number of correctly classified pixels in ground samples, <italic>F<sub>P</sub>
</italic> is the number of wrongly classified pixels in canopy samples, <italic>F<sub>N</sub>
</italic> is the number of incorrectly classified pixels in ground samples, and <italic>P<sub>i</sub>
</italic> represents the proportion of correctly classified pixels in a different category.</p>
<p>In total, 50 zones of size 4.5&#xa0;m * 4.5&#xa0;m were selected from the scanned area of the LiDAR. The canopy volumes obtained by different methods were calculated, and the accuracy of the volumetric measurements was assessed using the moving LiDAR scans as the true values. Root mean square error (RMSE, <xref ref-type="disp-formula" rid="eq4">Equation 4</xref>), relative root mean square error (rRMSE, <xref ref-type="disp-formula" rid="eq5">Equation 5</xref>), and mean absolute percentage error (MAPE, <xref ref-type="disp-formula" rid="eq6">Equation 6</xref>) were used to assess the error between the measured and true values.</p>
<disp-formula id="eq4">
<label>(4)</label>
<mml:math display="block" id="M4">
<mml:mrow>
<mml:mtext>RMSE</mml:mtext>
<mml:mo>=</mml:mo>
<mml:msqrt>
<mml:mrow>
<mml:mfrac>
<mml:mrow>
<mml:msup>
<mml:mo>&#x2211;</mml:mo>
<mml:mo>&#x200b;</mml:mo>
</mml:msup>
<mml:msup>
<mml:mrow>
<mml:mrow>
<mml:mo stretchy="false">(</mml:mo>
<mml:mrow>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msubsup>
<mml:mi>V</mml:mi>
<mml:mi>i</mml:mi>
<mml:mo>'</mml:mo>
</mml:msubsup>
</mml:mrow>
<mml:mo stretchy="false">)</mml:mo>
</mml:mrow>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
<mml:mi>n</mml:mi>
</mml:mfrac>
</mml:mrow>
</mml:msqrt>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula id="eq5">
<label>(5)</label>
<mml:math display="block" id="M5">
<mml:mrow>
<mml:mtext>rRMSE</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi>R</mml:mi>
<mml:mi>M</mml:mi>
<mml:mi>S</mml:mi>
<mml:mi>E</mml:mi>
</mml:mrow>
<mml:mover accent="true">
<mml:mi>V</mml:mi>
<mml:mo>&#xaf;</mml:mo>
</mml:mover>
</mml:mfrac>
<mml:mo>*</mml:mo>
<mml:mn>100</mml:mn>
<mml:mo>%</mml:mo>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula id="eq6">
<label>(6)</label>
<mml:math display="block" id="M6">
<mml:mrow>
<mml:mtext>MAPE</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mn>100</mml:mn>
<mml:mo>%</mml:mo>
</mml:mrow>
<mml:mi>n</mml:mi>
</mml:mfrac>
<mml:msup>
<mml:mo>&#x2211;</mml:mo>
<mml:mo>&#x200b;</mml:mo>
</mml:msup>
<mml:mrow>
<mml:mo>|</mml:mo>
<mml:mrow>
<mml:mfrac>
<mml:mrow>
<mml:msubsup>
<mml:mi>V</mml:mi>
<mml:mi>i</mml:mi>
<mml:mo>'</mml:mo>
</mml:msubsup>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
<mml:mrow>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
<mml:mo>|</mml:mo>
</mml:mrow>
</mml:mrow>
</mml:math>
</disp-formula>
<p>where <italic>V<sub>i</sub>
</italic> is the measured volume, <inline-formula>
<mml:math display="inline" id="im2">
<mml:mrow>
<mml:msubsup>
<mml:mi>V</mml:mi>
<mml:mi>i</mml:mi>
<mml:mo>'</mml:mo>
</mml:msubsup>
</mml:mrow>
</mml:math>
</inline-formula> is the true volume (measured by moving LiDAR), <italic>n</italic> is the sample number, and <inline-formula>
<mml:math display="inline" id="im3">
<mml:mover accent="true">
<mml:mi>V</mml:mi>
<mml:mo>&#xaf;</mml:mo>
</mml:mover>
</mml:math>
</inline-formula> is the mean value of the true volume.</p>
</sec>
</sec>
<sec id="s3" sec-type="results">
<label>3</label>
<title>Result</title>
<sec id="s3_1">
<label>3.1</label>
<title>Segmentation of canopy and ground</title>
<p>Different hyperparameter settings will have an impact on the training results. In this study, by modifying the default parameters and pretraining, the finalized hyperparameters were an epoch of 20, a batch size of 5, and a learning rate of 10<sup>&#x2212;5</sup>. The losses of the U-Net networks trained with different input data during training are shown in <xref ref-type="app" rid="app1">
<bold>Appendix A</bold>
</xref>. Both drop faster in the first three epochs, and the loss stabilizes after 10 epochs of training.</p>
<p>The data in the training set were segmented by trained U-Net, OTSU, and RANSAC methods, and the example results are shown in <xref ref-type="fig" rid="f8">
<bold>Figure&#xa0;8</bold>
</xref>. Deep learning-based segmentation methods possess smoother edges. The neural network trained based on RGB and DSM inputs more information, and its edges will be smoother and more closely fit the actual canopy region. While OTSU and RANSAC segment by simple thresholding, the edges will have a lot of noise and reduce the recognition accuracy. In addition, RANSAC has a fixed distance threshold compared to OTSU, which will incorrectly classify the ground as a tree canopy in some areas.</p>
<fig id="f8" position="float">
<label>Figure&#xa0;8</label>
<caption>
<p>Results of different segmentation methods for tree rows: dark green for the canopy and light green for the ground.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g008.tif"/>
</fig>
<p>
<xref ref-type="table" rid="T1">
<bold>Table&#xa0;1</bold>
</xref> shows the accuracy of different segmentation methods. U-Net trained by RGB images and DSM achieved the highest MIoU and MPA of 84.75% and 92.58%, respectively. RANSAC had the worst segmentation accuracy, with MIoU and MPA of 64.48% and 90.20%, respectively. The MPAs of the four methods were close to each other, indicating that the segmentation accuracies of the different methods were similar for canopy and ground level. While the difference in MIoU suggested that the different segmentation methods had different overlaps of the canopy region, the deep learning method had tidier edges, and the pixel classification accuracy would be higher at the edges. Therefore, the overlap with the correct classification was higher to get a higher MIoU. Although both the DSM-based training U-Net OTSU and RANSAC methods used only altitude data, the results of the segmentation method with deep learning were smooth, with a higher overlap with the actual canopy region.</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>MIoU and MPA for different segmentation methods.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="center">Method</th>
<th valign="top" align="center">MIoU</th>
<th valign="top" align="center">MPA</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="center">U-Net (RGBD)</td>
<td valign="top" align="center">84.75%</td>
<td valign="top" align="center">92.58%</td>
</tr>
<tr>
<td valign="top" align="center">U-Net (DSM)</td>
<td valign="top" align="center">83.37%</td>
<td valign="top" align="center">91.55%</td>
</tr>
<tr>
<td valign="top" align="center">OTSU</td>
<td valign="top" align="center">65.33%</td>
<td valign="top" align="center">90.56%</td>
</tr>
<tr>
<td valign="top" align="center">RANSAC</td>
<td valign="top" align="center">64.48%</td>
<td valign="top" align="center">90.20%</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Measurement of tree height and canopy volume</title>
<p>
<xref ref-type="fig" rid="f9">
<bold>Figure&#xa0;9</bold>
</xref> shows the results of tree height measurements using moving LiDAR scanning and a UAV. The RMSE of the tree height measured by LiDAR was 0.430&#xa0;m and MAPE was 8.16%, while the RMSE and MAPE of UAV were 0.644&#xa0;m and 14.26%, respectively. The UAV showed a greater error, which was probably due to the DSM construction process with some errors. Comparing the measurements of the two methods for the same tree, it could be found that the UAV&#x2019;s measurements were low.</p>
<fig id="f9" position="float">
<label>Figure&#xa0;9</label>
<caption>
<p>Results of different methods of measuring tree height. <bold>(A)</bold> Tree height measured by LiDAR. <bold>(B)</bold> Tree height measured by UAV. <bold>(C)</bold> Comparison of the results of the two methods of measuring tree height.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g009.tif"/>
</fig>
<p>
<xref ref-type="fig" rid="f10">
<bold>Figure&#xa0;10</bold>
</xref> shows the accuracy of canopy volume measurements for different segmentation methods. U-Net trained with DSM obtained the highest canopy volume measurement accuracy with an RMSE of 0.410 m<sup>3</sup>. However, RANSAC segmentation had the worst canopy volume measurement accuracy, with an RMSE of 0.580 m<sup>3</sup>. The accuracy of deep learning-based segmentation approaches was higher than that of traditional methods, consistent with the results of segmentation accuracy. In the OTSU and RANSAC methods, the measurement accuracy of OTSU was higher than that of RANSAC.</p>
<fig id="f10" position="float">
<label>Figure&#xa0;10</label>
<caption>
<p>Results of canopy volume measurements for different segmentation methods, with the gray dashed line in the figure being the 1:1 line. <bold>(A)</bold> U-Net (RGB and DSM). <bold>(B)</bold> U-Net (DSM). <bold>(C)</bold> OTSU. <bold>(D)</bold> RANSAC.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g010.tif"/>
</fig>
</sec>
</sec>
<sec id="s4" sec-type="discussion">
<label>4</label>
<title>Discussion</title>
<p>This study presented a pipeline for measuring canopy volume using UAVs and evaluated the impact of different ground segmentation methods on the accuracy of the measurements. The results indicated that the deep learning-based segmentation method had higher accuracy than the OTSU and RANSAC methods, whether trained with RGB and DSM or only with DSM. The U-Net model trained with an additional RGB channel input provided more color and texture information, improved the segmentation accuracy (<xref ref-type="bibr" rid="B11">Geirhos et&#xa0;al., 2018</xref>), and resulted in the highest segmentation accuracy with MIoU of 84.75%. Since the edges of manual labels were smooth, the neural network learned the feature of smooth edges, and the segmentation results do not need filtering operations.</p>
<p>Since the filtering operation on the mask image involved different filtering algorithms and hyperparameters, the output results were not filtered in this study but were directly used to calculate the canopy volume in the next step in order to evaluate the&#xa0;differences between the different algorithms themselves. The difference in the performance of OTSU and RANSAC at the edges might result in the identity of the masked area being larger than that manually labeled, which leads to a lower MIoU. At the same time, RANSAC might incorrectly classify a small number of ground points as a canopy, and some &#x201c;pretzel-like&#x201d; noise points can be seen in the ground part of the segmentation results. RANSAC incorrectly classified a small number of ground points as a canopy, and some &#x201c;pretzel-like&#x201d; noise points were visible in the ground portion of the segmentation result. In the DSM constructed using UAV, there were weeds in the ground part, and the ground part has a certain &#x201c;thickness&#x201d; due to the accuracy of the SFM method, and RANSAC uses a fixed threshold (0.2&#xa0;m in this study) to misclassify some of the ground points as canopy (<xref ref-type="fig" rid="f11">
<bold>Figure&#xa0;11</bold>
</xref>). This might be the reason why RANSAC has the worst accuracy in segmentation and canopy volume measurements.</p>
<fig id="f11" position="float">
<label>Figure&#xa0;11</label>
<caption>
<p>The RANSAC method incorrectly segments undulating ground and wild weeds.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g011.tif"/>
</fig>
<p>LiDAR tended to give higher results compared to manually measured true tree heights due to the fact that the LiDAR-equipped vehicle had a slight wobble when traveling, leading to incorrect measurements of tree heights (<xref ref-type="bibr" rid="B14">Han et&#xa0;al., 2023</xref>). In contrast, the tree heights measured in the UAV-based constructed DMS will be low. This might be due to the small area of the treetops, which makes it difficult to recognize the features when constructing the DSM, thus resulting in the highest point of the tree canopy not being correctly identified. Considering the difference in measurement efficiency, it is possible to measure tree height using a UAV.</p>
<p>The accuracy of canopy volume measurements did not follow the&#xa0;same order as the accuracy of canopy area segmentation due&#xa0;to&#xa0;multiple sources of error. The UAV-constructed DSM underestimated the canopy height compared to the point cloud acquired by the moving LiDAR (<xref ref-type="fig" rid="f9">
<bold>Figures&#xa0;9C</bold>
</xref>, <xref ref-type="fig" rid="f12">
<bold>12</bold>
</xref>), resulting in underestimated volumes, while the UAV was unable to access the structure of the inner and lower canopy (<xref ref-type="bibr" rid="B7">Brede et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B33">Schneider et&#xa0;al., 2019</xref>). Other studies have reported underestimates of UAV measurement (<xref ref-type="bibr" rid="B16">Krause et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B28">Pourreza et&#xa0;al., 2022</xref>). The overhead captured from the UAV resulted in higher leaves obscuring details of the lower canopy, and the reconstructed DSM contained only the upper surface of the canopy, which could lead to an overestimation of volume. The two errors had opposite effects on the volume measurements. With the combination of these two factors, U-Net trained based on DSM alone obtained the best canopy volume measurement with an RMSE of 0.410 m<sup>3</sup>. The RMSE of the UAV-based tree height measurements in this study was 0.644&#xa0;m. An RMSE of 0.51&#xa0;m was also obtained in young trees (<xref ref-type="bibr" rid="B42">Vacca and Vecchi, 2024</xref>). In a previous study, an RMSE of 0.28&#xa0;m could be obtained using a high-resolution camera on a flat orchard (<xref ref-type="bibr" rid="B19">Mahmud et&#xa0;al., 2023</xref>), and an RMSE of about 0.3&#xa0;m has been reported in a similar study (<xref ref-type="bibr" rid="B46">Wallace et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B5">Birdal et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B16">Krause et&#xa0;al., 2019</xref>). There is almost only one branch extending upwards at the treetop in this study, while the RGB camera only has 200 million pixels, which affects the accuracy of the reconstruction. There are various ways to measure canopy volume, such as the envelope polygon method and the voxel method. The voxel method was used in this study as a baseline method, and the RMSE of the canopy volume measured with the UAV was 0.410 m<sup>3</sup>. Using the envelope polygon method as a baseline and adjusting the different parameters, the RMSE of measurement is between 0.33 m<sup>3</sup> and 0.43&#xa0;m<sup>3</sup> by the UAV (<xref ref-type="bibr" rid="B19">Mahmud et&#xa0;al., 2023</xref>). Based on UAV measurements of canopy volume in apple orchards, the best measurements obtained at different flight heights had an RMSE of 1.41 m<sup>3</sup>, using the ellipsoid fitted by manual measurements as a baseline (<xref ref-type="bibr" rid="B39">Sun et&#xa0;al., 2019</xref>). In contrast, the reconstruction process in this study did not use ground control points. With the combination of errors, the accuracy of canopy volume measurements was acceptable.</p>
<fig id="f12" position="float">
<label>Figure&#xa0;12</label>
<caption>
<p>Aligned LiDAR point cloud (yellow) with UAV-constructed DSM (using RGB coloring).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g012.tif"/>
</fig>
<p>The canopy measurement process proposed in this study can be used for orchard phenology, pruning, and light interception estimation (<xref ref-type="bibr" rid="B50">Westling et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B32">Scalisi et&#xa0;al., 2021</xref>). The developed method can obtain the variability of the canopy in spatial distribution and provide prescription maps for precise pesticide spraying, pruning, and other field management work. Compared to LiDAR, the UAV-reconstructed DSM is missing branch details at the treetops, leading to an underestimation of tree height. Also, the ground control point-free reconstruction method may have affected the accuracy of tree height measurement. The UAV-based canopy volume testing process can balance efficiency and accuracy and is particularly suitable for larger orchards.</p>
</sec>
<sec id="s5" sec-type="conclusions">
<label>5</label>
<title>Conclusion</title>
<p>This study evaluated the effect of different canopy region segmentation methods on the accuracy of UAV-based canopy volume measurements. RGB and DSM orthophotos constructed based on UAV were used to segment the canopy by U-Net, OTSU, and RANSAC methods and calculate the canopy volume. The results showed that U-Net trained by RGB and DSM achieved the best accuracy in the segmentation task, with 84.75% MIoU and 92.58% MPA. The MPA of segmentation by the OTSU and RANSAC methods is similar to that of the deep learning method, but the MIoU is 65.33% and 64.48%, respectively, which is lower than that of the deep learning method due to the lower overlap of the segmented regions and the obtained canopy mask with a lot of noise. In tree height measurement, the RMSE of tree height measured by LiDAR was 0.430&#xa0;m, while that of the UAV was 0.644&#xa0;m. However, the canopy volume measurement task was less affected by the accuracy of tree height measurements. The U-Net trained using only DSM achieved the best accuracy with an RMSE of 0.410 m<sup>3</sup>, an rRMSE of 6.40%, and a MAPE of 4.74%. In contrast, the RMSE of the U-Net segmentation method trained with RGB and DSM was 0.471 m<sup>3</sup>. The canopy volume measurement accuracy of the traditional OTSU and RANSAC methods was lower than that of the deep learning method, with RMSE of 0.521 m<sup>3</sup> and 0.580 m<sup>3</sup>, respectively. Therefore, in the case of having manually labeled datasets, the segmentation of the canopy region using the deep learning approach can achieve higher accuracy of canopy volume measurement.</p>
</sec>
<sec id="s6" sec-type="data-availability">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec id="s7" sec-type="author-contributions">
<title>Author contributions</title>
<p>LH: Conceptualization, Investigation, Methodology, Validation, Writing &#x2013; original draft. ZW: Conceptualization, Investigation, Writing &#x2013; review &amp; editing. MH: Conceptualization, Investigation, Methodology, Validation, Writing &#x2013; review &amp; editing. XH: Funding acquisition, Writing &#x2013; review &amp; editing.</p>
</sec>
</body>
<back>
<sec id="s8" sec-type="funding-information">
<title>Funding</title>
<p>The author(s) declare financial support was received for the research, authorship, and/or publication of this article. This work was supported by the earmarked fund for CARS (CARS-28), the 2115 talent development program of China Agricultural University, National Natural Science Foundation of China No.31761133019 and the Sanya Institute of China Agricultural University Guiding Fund Project, Grant No. SYND-2021&#x2013;06.</p>
</sec>
<sec id="s9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="s10" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ali</surname> <given-names>M. E. N. O.</given-names>
</name>
<name>
<surname>Taha</surname> <given-names>L. G. E.-D.</given-names>
</name>
<name>
<surname>Mohamed</surname> <given-names>M. H. A.</given-names>
</name>
<name>
<surname>Mandouh</surname> <given-names>A. A.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Generation of digital terrain model from multispectral LiDar using different ground filtering techniques</article-title>. <source>Egypt. J. Remote Sens. Sp. Sci.</source> <volume>24</volume>, <fpage>181</fpage>&#x2013;<lpage>189</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ejrs.2020.12.004</pub-id>
</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ali-Sisto</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Gopalakrishnan</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Kukkonen</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Savolainen</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Packalen</surname> <given-names>P.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>A method for vertical adjustment of digital aerial photogrammetry data by using a high-quality digital terrain model</article-title>. <source>Int. J. Appl. Earth Obs. Geoinf.</source> <volume>84</volume>, <elocation-id>101954</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2019.101954</pub-id>
</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Anifantis</surname> <given-names>A. S.</given-names>
</name>
<name>
<surname>Camposeo</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Vivaldi</surname> <given-names>G. S.</given-names>
</name>
<name>
<surname>Santoro</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Pascuzzi</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Comparison of UAV photogrammetry and 3D modeling techniques with other currently used methods for estimation of the tree row volume of a super-high-density olive orchard</article-title>. <source>Agriculture</source> <volume>9</volume>, <elocation-id>233</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture9110233</pub-id>
</citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ariza-Sent&#xed;s</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Baja</surname> <given-names>H.</given-names>
</name>
<name>
<surname>V&#xe9;lez</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Valente</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Object detection and tracking on UAV RGB videos for early extraction of grape phenotypic traits</article-title>. <source>Comput. Electron. Agric.</source> <volume>211</volume>, <elocation-id>108051</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.108051</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Birdal</surname> <given-names>A. C.</given-names>
</name>
<name>
<surname>Avdan</surname> <given-names>U.</given-names>
</name>
<name>
<surname>T&#xfc;rk</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Estimating tree heights with images from an unmanned aerial vehicle</article-title>. <source>Geomatics Nat. Hazards Risk</source> <volume>8</volume>, <fpage>1144</fpage>&#x2013;<lpage>1156</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/19475705.2017.1300608</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Brede</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Calders</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Lau</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Raumonen</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Bartholomeus</surname> <given-names>H. M.</given-names>
</name>
<name>
<surname>Herold</surname> <given-names>M.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Non-destructive tree volume estimation through quantitative structure modelling: Comparing UAV laser scanning with terrestrial LIDAR</article-title>. <source>Remote Sens. Environ.</source> <volume>233</volume>, <elocation-id>111355</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2019.111355</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Brede</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Lau</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Bartholomeus</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Kooistra</surname> <given-names>L.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Comparing RIEGL RiCOPTER UAV LiDAR derived canopy height and DBH with terrestrial LiDAR</article-title>. <source>Sensors</source> <volume>17</volume>, <elocation-id>2371</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s17102371</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Caras</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Lati</surname> <given-names>R. N.</given-names>
</name>
<name>
<surname>Holland</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Dubinin</surname> <given-names>V. M.</given-names>
</name>
<name>
<surname>Hatib</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Shulner</surname> <given-names>I.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>Monitoring the effects of weed management strategies on tree canopy structure and growth using UAV-LiDAR in a young almond orchard</article-title>. <source>Comput. Electron. Agric.</source> <volume>216</volume>, <elocation-id>108467</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2023.108467</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chang</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Yeom</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Jung</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Landivar</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Comparison of canopy shape and vegetation indices of citrus trees derived from UAV multispectral images for characterization of citrus greening disease</article-title>. <source>Remote Sens.</source> <volume>12</volume>, <elocation-id>4122</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs12244122</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Eitel</surname> <given-names>J. U. H.</given-names>
</name>
<name>
<surname>Magney</surname> <given-names>T. S.</given-names>
</name>
<name>
<surname>Vierling</surname> <given-names>L. A.</given-names>
</name>
<name>
<surname>Brown</surname> <given-names>T. T.</given-names>
</name>
<name>
<surname>Huggins</surname> <given-names>D. R.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>LiDAR based biomass and crop nitrogen estimates for rapid, non-destructive assessment of wheat nitrogen status</article-title>. <source>F. Crop Res.</source> <volume>159</volume>, <fpage>21</fpage>&#x2013;<lpage>32</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.fcr.2014.01.008</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="web">
<person-group person-group-type="author">
<name>
<surname>Geirhos</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Rubisch</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Michaelis</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Bethge</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Wichmann</surname> <given-names>F. A.</given-names>
</name>
<name>
<surname>Brendel</surname> <given-names>W.</given-names>
</name>
</person-group> (<year>2018</year>) <source>ImageNet-trained CNNs are biased towards texture; increasing shape bias improves accuracy and robustness</source>. Available online at: <uri xlink:href="http://arxiv.org/abs/1811.12231">http://arxiv.org/abs/1811.12231</uri>.</citation>
</ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gen&#xe9;-Mola</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Gregorio</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Guevara</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Auat</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Sanz-Cortiella</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Escol&#xe0;</surname> <given-names>A.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Fruit detection in an apple orchard using a mobile terrestrial laser scanner</article-title>. <source>Biosyst. Eng.</source> <volume>187</volume>, <fpage>171</fpage>&#x2013;<lpage>184</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2019.08.017</pub-id>
</citation>
</ref>
<ref id="B13">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gil</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Llorens</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Llop</surname> <given-names>J.</given-names>
</name>
<name>
<surname>F&#xe0;bregas</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Escol&#xe0;</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Rosell-Polo</surname> <given-names>J. R.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Variable rate sprayer. Part 2 &#x2013; Vineyard prototype: Design, implementation, and validation</article-title>. <source>Comput. Electron. Agric.</source> <volume>95</volume>, <fpage>136</fpage>&#x2013;<lpage>150</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2013.02.010</pub-id>
</citation>
</ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Han</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Jin</surname> <given-names>L.</given-names>
</name>
<name>
<surname>He</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Method of 3D voxel prescription map construction in digital orchard management based on LiDAR-RTK boarded on a UGV</article-title>. <source>Drones</source> <volume>7</volume>, <elocation-id>242</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/drones7040242</pub-id>
</citation>
</ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Karp</surname> <given-names>F. H. S.</given-names>
</name>
<name>
<surname>Cola&#xe7;o</surname> <given-names>A. F.</given-names>
</name>
<name>
<surname>Trevisan</surname> <given-names>R. G.</given-names>
</name>
<name>
<surname>Molin</surname> <given-names>J. P.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Accuracy assessment of a mobile terrestrial laser scanner for tree crops</article-title>. <source>Adv. Anim. Biosci.</source> <volume>8</volume>, <fpage>178</fpage>&#x2013;<lpage>182</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1017/S2040470017000073</pub-id>
</citation>
</ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Krause</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Sanders</surname> <given-names>T. G. M.</given-names>
</name>
<name>
<surname>Mund</surname> <given-names>J.-P.</given-names>
</name>
<name>
<surname>Greve</surname> <given-names>K.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>UAV-based photogrammetric tree height measurement for intensive forest monitoring</article-title>. <source>Remote Sens.</source> <volume>11</volume>, <elocation-id>758</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs11070758</pub-id>
</citation>
</ref>
<ref id="B17">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Dai</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>He</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Lin</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Estimating leaf area density of individual trees using the point cloud segmentation of terrestrial LiDAR data and a voxel-based model</article-title>. <source>Remote Sens.</source> <volume>9</volume>, <elocation-id>1202</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs9111202</pub-id>
</citation>
</ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Cheng</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Geng</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Xiang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Combining plant height, canopy coverage and vegetation index from UAV-based RGB images to estimate leaf nitrogen concentration of summer maize</article-title>. <source>Biosyst. Eng.</source> <volume>202</volume>, <fpage>42</fpage>&#x2013;<lpage>54</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2020.11.010</pub-id>
</citation>
</ref>
<ref id="B19">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mahmud</surname> <given-names>M. S.</given-names>
</name>
<name>
<surname>He</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Heinemann</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Choi</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Unmanned aerial vehicle based tree canopy characteristics measurement for precision spray applications</article-title>. <source>Smart Agric. Technol.</source> <volume>4</volume>, <elocation-id>100153</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.atech.2022.100153</pub-id>
</citation>
</ref>
<ref id="B20">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Maimaitijiang</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Sagan</surname> <given-names>V.</given-names>
</name>
<name>
<surname>Sidike</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Maimaitiyiming</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Hartling</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Peterson</surname> <given-names>K. T.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Vegetation Index Weighted Canopy Volume Model (CVMVI) for soybean biomass estimation from Unmanned Aerial System-based RGB imagery</article-title>. <source>ISPRS J. Photogramm. Remote Sens.</source> <volume>151</volume>, <fpage>27</fpage>&#x2013;<lpage>41</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2019.03.003</pub-id>
</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mokro&#x161;</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Mikita</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Singh</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Toma&#x161;t&#xed;k</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Chud&#xe1;</surname> <given-names>J.</given-names>
</name>
<name>
<surname>W&#x119;&#x17c;yk</surname> <given-names>P.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Novel low-cost mobile mapping systems for forest inventories as terrestrial laser scanning alternatives</article-title>. <source>Int. J. Appl. Earth Obs. Geoinf.</source> <volume>104</volume>, <elocation-id>102512</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2021.102512</pub-id>
</citation>
</ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Fujii</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Takata</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Zheng</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Noshita</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Honda</surname> <given-names>K.</given-names>
</name>
<etal/>
</person-group>. (<year>2018</year>). <article-title>Characterization of peach tree crown by using high-resolution images from an unmanned aerial vehicle</article-title>. <source>Hortic. Res.</source> <volume>5</volume>, <fpage>74</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41438-018-0097-z</pub-id>
</citation>
</ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Nan</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Zheng</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Bian</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Estimating leaf area density of Osmanthus trees using ultrasonic sensing</article-title>. <source>Biosyst. Eng.</source> <volume>186</volume>, <fpage>60</fpage>&#x2013;<lpage>70</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2019.06.020</pub-id>
</citation>
</ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Oniga</surname> <given-names>V.-E.</given-names>
</name>
<name>
<surname>Loghin</surname> <given-names>A.-M.</given-names>
</name>
<name>
<surname>Macovei</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Lazar</surname> <given-names>A.-A.</given-names>
</name>
<name>
<surname>Boroianu</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Sestras</surname> <given-names>P.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Enhancing LiDAR-UAS derived digital terrain models with hierarchic robust and volume-based filtering approaches for precision topographic mapping</article-title>. <source>Remote Sens.</source> <volume>16</volume>, <elocation-id>78</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs16010078</pub-id>
</citation>
</ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pagliai</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Ammoniaci</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Sarri</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Lisci</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Perria</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Vieri</surname> <given-names>M.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>Comparison of aerial and ground 3D point clouds for canopy size assessment in precision viticulture</article-title>. <source>Remote Sens.</source> <volume>14</volume>, <elocation-id>1145</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs14051145</pub-id>
</citation>
</ref>
<ref id="B26">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Patrignani</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Ochsner</surname> <given-names>T. E.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Canopeo: A powerful new tool for measuring fractional green canopy cover</article-title>. <source>Agron. J.</source> <volume>107</volume>, <fpage>2312</fpage>&#x2013;<lpage>2320</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.2134/agronj15.0150</pub-id>
</citation>
</ref>
<ref id="B27">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pfeiffer</surname> <given-names>S. A.</given-names>
</name>
<name>
<surname>Guevara</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Cheein</surname> <given-names>F. A.</given-names>
</name>
<name>
<surname>Sanz</surname> <given-names>R.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Mechatronic terrestrial LiDAR for canopy porosity and crown surface estimation</article-title>. <source>Comput. Electron. Agric.</source> <volume>146</volume>, <fpage>104</fpage>&#x2013;<lpage>113</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2018.01.022</pub-id>
</citation>
</ref>
<ref id="B28">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pourreza</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Moradi</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Khosravi</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Deljouei</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Vanderhoof</surname> <given-names>M. K.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>GCPs-free photogrammetry for estimating tree height and crown diameter in arizona cypress plantation using UAV-mounted GNSS RTK</article-title>. <source>Forests</source> <volume>13</volume>, <elocation-id>1905</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/f13111905</pub-id>
</citation>
</ref>
<ref id="B29">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Raj</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Walker</surname> <given-names>J. P.</given-names>
</name>
<name>
<surname>Pingale</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Nandan</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Naik</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Jagarlapudi</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Leaf area index estimation using top-of-canopy airborne RGB images</article-title>. <source>Int. J. Appl. Earth Obs. Geoinf.</source> <volume>96</volume>, <elocation-id>102282</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2020.102282</pub-id>
</citation>
</ref>
<ref id="B30">
<citation citation-type="web">
<person-group person-group-type="author">
<name>
<surname>Ronneberger</surname> <given-names>O.</given-names>
</name>
<name>
<surname>Fischer</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Brox</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2015</year>) <source>U-net: convolutional networks for biomedical image segmentation</source>. Available online at: <uri xlink:href="http://arxiv.org/abs/1505.04597">http://arxiv.org/abs/1505.04597</uri>.</citation>
</ref>
<ref id="B31">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ross</surname> <given-names>C. W.</given-names>
</name>
<name>
<surname>Loudermilk</surname> <given-names>E. L.</given-names>
</name>
<name>
<surname>Skowronski</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Pokswinski</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Hiers</surname> <given-names>J. K.</given-names>
</name>
<name>
<surname>O&#x2019;Brien</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>LiDAR voxel-size optimization for canopy gap estimation</article-title>. <source>Remote Sens.</source> <volume>14</volume>, <elocation-id>1054</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs14051054</pub-id>
</citation>
</ref>
<ref id="B32">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Scalisi</surname> <given-names>A.</given-names>
</name>
<name>
<surname>McClymont</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Underwood</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Morton</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Scheding</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Goodwin</surname> <given-names>I.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Reliability of a commercial platform for estimating flower cluster and fruit number, yield, tree geometry and light interception in apple trees under different rootstocks and row orientations</article-title>. <source>Comput. Electron. Agric.</source> <volume>191</volume>, <elocation-id>106519</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2021.106519</pub-id>
</citation>
</ref>
<ref id="B33">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Schneider</surname> <given-names>F. D.</given-names>
</name>
<name>
<surname>K&#xfc;kenbrink</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Schaepman</surname> <given-names>M. E.</given-names>
</name>
<name>
<surname>Schimel</surname> <given-names>D. S.</given-names>
</name>
<name>
<surname>Morsdorf</surname> <given-names>F.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Quantifying 3D structure and occlusion in dense tropical and temperate forests using close-range LiDAR</article-title>. <source>Agric. For. Meteorol.</source> <volume>268</volume>, <fpage>249</fpage>&#x2013;<lpage>257</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.agrformet.2019.01.033</pub-id>
</citation>
</ref>
<ref id="B34">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sinha</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Quir&#xf3;s</surname> <given-names>J. J.</given-names>
</name>
<name>
<surname>Sankaran</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Khot</surname> <given-names>L. R.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>High resolution aerial photogrammetry based 3D mapping of fruit crop canopies for precision inputs management</article-title>. <source>Inf. Process. Agric.</source> <volume>9</volume>, <fpage>11</fpage>&#x2013;<lpage>23</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.inpa.2021.01.006</pub-id>
</citation>
</ref>
<ref id="B35">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sithole</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Vosselman</surname> <given-names>G.</given-names>
</name>
</person-group> (<year>2004</year>). <article-title>Experimental comparison of filter algorithms for bare-Earth extraction from airborne laser scanning point clouds</article-title>. <source>ISPRS J. Photogramm. Remote Sens.</source> <volume>59</volume>, <fpage>85</fpage>&#x2013;<lpage>101</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2004.05.004</pub-id>
</citation>
</ref>
<ref id="B36">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Stateras</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Kalivas</surname> <given-names>D.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Assessment of olive tree canopy characteristics and yield forecast model using high resolution UAV imagery</article-title>. <source>Agriculture</source> <volume>10</volume>, <elocation-id>385</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture10090385</pub-id>
</citation>
</ref>
<ref id="B37">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Stovall</surname> <given-names>A. E. L.</given-names>
</name>
<name>
<surname>Vorster</surname> <given-names>A. G.</given-names>
</name>
<name>
<surname>Anderson</surname> <given-names>R. S.</given-names>
</name>
<name>
<surname>Evangelista</surname> <given-names>P. H.</given-names>
</name>
<name>
<surname>Shugart</surname> <given-names>H. H.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Non-destructive aboveground biomass estimation of coniferous trees using terrestrial LiDAR</article-title>. <source>Remote Sens. Environ.</source> <volume>200</volume>, <fpage>31</fpage>&#x2013;<lpage>42</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2017.08.013</pub-id>
</citation>
</ref>
<ref id="B38">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sultan Mahmud</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Zahid</surname> <given-names>A.</given-names>
</name>
<name>
<surname>He</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Choi</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Krawczyk</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Zhu</surname> <given-names>H.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Development of a LiDAR-guided section-based tree canopy density measurement system for precision spray applications</article-title>. <source>Comput. Electron. Agric.</source> <volume>182</volume>, <elocation-id>106053</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2021.106053</pub-id>
</citation>
</ref>
<ref id="B39">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Ding</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Lu</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Remote measurement of apple orchard canopy information using unmanned aerial vehicle photogrammetry</article-title>. <source>Agronomy</source> <volume>9</volume>, <elocation-id>774</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy9110774</pub-id>
</citation>
</ref>
<ref id="B40">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tsoulias</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Paraforos</surname> <given-names>D. S.</given-names>
</name>
<name>
<surname>Fountas</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Zude-Sasse</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Estimating canopy parameters based on the stem position in apple trees using a 2D LiDAR</article-title>. <source>Agronomy</source> <volume>9</volume>, <elocation-id>740</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy9110740</pub-id>
</citation>
</ref>
<ref id="B41">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tunca</surname> <given-names>E.</given-names>
</name>
<name>
<surname>K&#xf6;ksal</surname> <given-names>E. S.</given-names>
</name>
<name>
<surname>&#xd6;zt&#xfc;rk</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Akay</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Taner</surname> <given-names>S.&#xc7;.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Accurate leaf area index estimation in sorghum using high-resolution UAV data and machine learning models</article-title>. <source>Phys. Chem. Earth Parts A/B/C</source> <volume>133</volume>, <elocation-id>103537</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.pce.2023.103537</pub-id>
</citation>
</ref>
<ref id="B42">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Vacca</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Vecchi</surname> <given-names>E.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>UAV photogrammetric surveys for tree height estimation</article-title>. <source>Drones</source> <volume>8</volume>, <elocation-id>106</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/drones8030106</pub-id>
</citation>
</ref>
<ref id="B43">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>V&#xe9;lez</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Vacas</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Mart&#xed;n</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Ruano-Rosa</surname> <given-names>D.</given-names>
</name>
<name>
<surname>&#xc1;lvarez</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>A novel technique using planar area and ground shadows calculated from UAV RGB imagery to estimate pistachio tree (Pistacia vera L.) canopy volume</article-title>. <source>Remote Sens.</source> <volume>14</volume>, <elocation-id>6006</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs14236006</pub-id>
</citation>
</ref>
<ref id="B44">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Vinci</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Brigante</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Traini</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Farinelli</surname> <given-names>D.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Geometrical characterization of hazelnut trees in an intensive orchard by an unmanned aerial vehicle (UAV) for precision agriculture applications</article-title>. <source>Remote Sens.</source> <volume>15</volume>, <elocation-id>541</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs15020541</pub-id>
</citation>
</ref>
<ref id="B45">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wallace</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Hillman</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Reinke</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Hally</surname> <given-names>B.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Non-destructive estimation of above-ground surface and near-surface biomass using 3D terrestrial remote sensing techniques</article-title>. <source>Methods Ecol. Evol.</source> <volume>8</volume>, <fpage>1607</fpage>&#x2013;<lpage>1616</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/2041-210X.12759</pub-id>
</citation>
</ref>
<ref id="B46">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wallace</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Lucieer</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Malenovsk&#xfd;</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Turner</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Vop&#x11b;nka</surname> <given-names>P.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Assessment of forest structure using two UAV techniques: A comparison of airborne laser scanning and structure from motion (SfM) point clouds</article-title>. <source>Forests</source> <volume>7</volume>, <elocation-id>62</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/f7030062</pub-id>
</citation>
</ref>
<ref id="B47">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Walter</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Edwards</surname> <given-names>J.</given-names>
</name>
<name>
<surname>McDonald</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Kuchel</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Photogrammetry for the estimation of wheat biomass and harvest index</article-title>. <source>F. Crop Res.</source> <volume>216</volume>, <fpage>165</fpage>&#x2013;<lpage>174</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.fcr.2017.11.024</pub-id>
</citation>
</ref>
<ref id="B48">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Lin</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Yao</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>L.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Validation of a low-cost 2D laser scanner in development of a more-affordable mobile terrestrial proximal sensing system for 3D plant structure phenotyping in indoor environment</article-title>. <source>Comput. Electron. Agric.</source> <volume>140</volume>, <fpage>180</fpage>&#x2013;<lpage>189</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2017.06.002</pub-id>
</citation>
</ref>
<ref id="B49">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wen</surname> <given-names>L.</given-names>
</name>
<name>
<surname>He</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Mountain segmentation based on global optimization with the cloth simulation constraint</article-title>. <source>Remote Sens.</source> <volume>15</volume>, <elocation-id>2966</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs15122966</pub-id>
</citation>
</ref>
<ref id="B50">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Westling</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Mahmud</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Underwood</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Bally</surname> <given-names>I.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Replacing traditional light measurement with LiDAR based methods in orchards</article-title>. <source>Comput. Electron. Agric.</source> <volume>179</volume>, <elocation-id>105798</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2020.105798</pub-id>
</citation>
</ref>
<ref id="B51">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wilkes</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Lau</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Disney</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Calders</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Burt</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Gonzalez de Tanago</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2017</year>). <article-title>Data acquisition considerations for Terrestrial Laser Scanning of forest plots</article-title>. <source>Remote Sens. Environ.</source> <volume>196</volume>, <fpage>140</fpage>&#x2013;<lpage>153</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2017.04.030</pub-id>
</citation>
</ref>
<ref id="B52">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yoshii</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Matsumura</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Lin</surname> <given-names>C.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Integrating UAV-SfM and airborne lidar point cloud data to plantation forest feature extraction</article-title>. <source>Remote Sens.</source> <volume>14</volume>, <elocation-id>1713</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs14071713</pub-id>
</citation>
</ref>
<ref id="B53">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yuan</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Choi</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Bolkas</surname> <given-names>D.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>GNSS-IMU-assisted colored ICP for UAV-LiDAR point cloud registration of peach trees</article-title>. <source>Comput. Electron. Agric.</source> <volume>197</volume>, <elocation-id>106966</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2022.106966</pub-id>
</citation>
</ref>
<ref id="B54">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yurtseven</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Akgul</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Coban</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Gulci</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Determination and accuracy analysis of individual tree crown parameters using UAV based imagery and OBIA techniques</article-title>. <source>Measurement</source> <volume>145</volume>, <fpage>651</fpage>&#x2013;<lpage>664</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.measurement.2019.05.092</pub-id>
</citation>
</ref>
<ref id="B55">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zarco-Tejada</surname> <given-names>P. J.</given-names>
</name>
<name>
<surname>Diaz-Varela</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Angileri</surname> <given-names>V.</given-names>
</name>
<name>
<surname>Loudjani</surname> <given-names>P.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Tree height quantification using very high resolution imagery acquired from an unmanned aerial vehicle (UAV) and automatic 3D photo-reconstruction methods</article-title>. <source>Eur. J. Agron.</source> <volume>55</volume>, <fpage>89</fpage>&#x2013;<lpage>99</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.eja.2014.01.004</pub-id>
</citation>
</ref>
<ref id="B56">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>R.</given-names>
</name>
<etal/>
</person-group>. (<year>2024</year>). <article-title>Removal of canopy shadows improved retrieval accuracy of individual apple tree crowns LAI and chlorophyll content using UAV multispectral imagery and PROSAIL model</article-title>. <source>Comput. Electron. Agric.</source> <volume>221</volume>, <elocation-id>108959</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2024.108959</pub-id>
</citation>
</ref>
<ref id="B57">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Valente</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Kooistra</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>W.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Orchard management with small unmanned aerial vehicles: a survey of sensing and analysis approaches</article-title>. <source>Precis. Agric.</source> <volume>22</volume>, <fpage>2007</fpage>&#x2013;<lpage>2052</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11119-021-09813-y</pub-id>
</citation>
</ref>
</ref-list>
<app-group>
<app id="app1">
<title>Appendix A</title>
<fig id="f13" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Loss of U-Net network during training with training epoch. <bold>(A)</bold> Trained with RGB and DSM. <bold>(B)</bold> Trained only with DSM.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1393592-g013.tif"/>
</fig>
</app>
</app-group>
</back>
</article>