<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Ecol. Evol.</journal-id>
<journal-title>Frontiers in Ecology and Evolution</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Ecol. Evol.</abbrev-journal-title>
<issn pub-type="epub">2296-701X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fevo.2023.1171358</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Ecology and Evolution</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Extraction and monitoring of vegetation coverage based on uncrewed aerial vehicle visible image in a post gold mining area</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name>
<surname>Chen</surname>
<given-names>Rui</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1819477"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Han</surname>
<given-names>Lei</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<xref ref-type="author-notes" rid="fn001">
<sup>*</sup>
</xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Zhao</surname>
<given-names>Yonghua</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1968153"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Zhao</surname>
<given-names>Zilin</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Liu</surname>
<given-names>Zhao</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Li</surname>
<given-names>Risheng</given-names>
</name>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Xia</surname>
<given-names>Longfei</given-names>
</name>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Zhai</surname>
<given-names>Yunmeng</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>School of Earth Science and Resources, Chang&#x2019;an University</institution>, <addr-line>Xi&#x2019;an</addr-line>, <country>China</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>School of Land Engineering, Shaanxi Key Laboratory of Land Consolidation, Chang&#x2019;an University</institution>, <addr-line>Xi&#x2019;an</addr-line>, <country>China</country>
</aff>
<aff id="aff3">
<sup>3</sup>
<institution>Key Laboratory of Degraded and Unused Land Consolidation Engineering, Ministry of Natural Resources of the People&#x2019;s Republic of China, Chang&#x2019;an University</institution>, <addr-line>Xi&#x2019;an</addr-line>, <country>China</country>
</aff>
<aff id="aff4">
<sup>4</sup>
<institution>Institute of Land Engineering and Technology, Shaanxi Provincial Land Engineering Construction Group</institution>, <addr-line>Xi&#x2019;an</addr-line>, <country>China</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>Edited by: Jie Lou, Zhejiang Lab, China</p>
</fn>
<fn fn-type="edited-by">
<p>Reviewed by: Jinman Wang, China University of Geosciences, China; Vishal Mishra, Indian Institute of Technology Roorkee, India</p>
</fn>
<fn fn-type="corresp" id="fn001">
<p>*Correspondence: Lei Han, <email xlink:href="mailto:hanshuanglei@chd.edu.cn">hanshuanglei@chd.edu.cn</email>
</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>12</day>
<month>07</month>
<year>2023</year>
</pub-date>
<pub-date pub-type="collection">
<year>2023</year>
</pub-date>
<volume>11</volume>
<elocation-id>1171358</elocation-id>
<history>
<date date-type="received">
<day>22</day>
<month>02</month>
<year>2023</year>
</date>
<date date-type="accepted">
<day>20</day>
<month>06</month>
<year>2023</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2023 Chen, Han, Zhao, Zhao, Liu, Li, Xia and Zhai</copyright-statement>
<copyright-year>2023</copyright-year>
<copyright-holder>Chen, Han, Zhao, Zhao, Liu, Li, Xia and Zhai</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>Vegetation coverage reflects the degree of environmental degradation. Timely and effective monitoring of vegetation conditions is the basis for promoting vegetation protection and improving the ecological environment of mining areas. Exploring vegetation coverage extraction methods and selecting the optimal vegetation index in mining areas can provide scientific reference for estimating vegetation coverage based on vegetation index in mining areas. Uncrewed aerial vehicles (UAVs) are widely used because of their fast real-time performance, high spatial resolution, and easy accessibility. In this study, the performances of nine visible vegetation indices and two threshold segmentation methods for extracting vegetation coverage in a post-gold mining area in the Qinling Mountains were comprehensively compared using visible spectrum UAV images. Of the nine indices, the excess green index (EXG) and visible-band difference vegetation index (VDVI) were the most effective in discriminating between vegetation and non-vegetation by visual interpretation. In addition, the accuracy of the bimodal histogram threshold method in extracting vegetation coverage was higher than that of Otsu&#x2019;s threshold method. The bimodal histogram threshold method combined with EXG yielded optimal extraction results. Based on optimal methods, the total percentages of fractional vegetation coverage in 2019, 2020, and 2021 were 31.47%, 34.08%, and 42.77%, respectively, indicating that the vegetation in the mining area improved. These results provide valuable guidance for extracting vegetation information and evaluating vegetation restoration in mining areas.</p>
</abstract>
<kwd-group>
<kwd>remote sensing</kwd>
<kwd>uncrewed aerial vehicle</kwd>
<kwd>vegetation coverage</kwd>
<kwd>eco-monitoring</kwd>
<kwd>post-mining area</kwd>
</kwd-group>
<counts>
<fig-count count="8"/>
<table-count count="5"/>
<equation-count count="3"/>
<ref-count count="50"/>
<page-count count="13"/>
<word-count count="4788"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-in-acceptance</meta-name>
<meta-value>Environmental Informatics and Remote Sensing</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>Vegetation restoration and reconstruction are key components of ecological restoration in mining areas and are effective ways to improve the quality of the local ecological environment (<xref ref-type="bibr" rid="B22">Li et&#xa0;al., 2019</xref>). Therefore, obtaining vegetation information quickly and accurately in mining areas to evaluate the status of local ecological restoration is an urgent issue. As an important indicator of vegetation status, fractional vegetation coverage (FVC) is defined as the percentage ratio of the vertical projection area of vegetation (including leaves, stems, and branches) on the ground in a statistical area (<xref ref-type="bibr" rid="B18">Jia et&#xa0;al., 2015</xref>). FVC is not only the main indicator of regional environmental status and quality assessment but also an important part of terrestrial ecosystem research. Thus, accurate and rapid extraction of vegetation coverage requires timely monitoring of vegetation change, which is crucial for protecting biodiversity and promoting economic development.</p>
<p>Currently, remote sensing observations and land surface measurements are primarily used to monitor FVC (<xref ref-type="bibr" rid="B25">Lu et&#xa0;al., 2020</xref>). As a low-cost and highly efficient monitoring technology, remote sensing can provide objective and accurate environmental monitoring for large-scale mining areas. With the rapid development of satellite remote sensing technologies, many vegetation products, such as those derived from NOAA/AVHRR (<xref ref-type="bibr" rid="B3">Boyd et&#xa0;al., 2002</xref>), TM/Landsat (<xref ref-type="bibr" rid="B40">Voorde et&#xa0;al., 2008</xref>; <xref ref-type="bibr" rid="B21">Leng et&#xa0;al., 2019</xref>), and Terra &amp; Aqua/MODIS (<xref ref-type="bibr" rid="B36">Song et&#xa0;al., 2017</xref>), have facilitated large-scale monitoring of vegetation coverage. However, for small-scale areas, such as mining areas with complex topography and heterogeneous habitats, monitoring FVC using satellite remote sensing technologies is challenging because of their relatively coarse spatial resolution and long revisit period. Furthermore, although <italic>in situ</italic> measurements have high accuracy, they are usually time consuming and labor intensive, rendering them unsuitable for real-time and long-term monitoring. Notably, uncrewed aerial vehicles (UAVs) have the advantages of strong real-time performance, high spatial resolution, and easy access; thus, they have attracted wide attention as a novel and improved method to extract vegetation coverage with high efficiency and precision on small spatial scales in agriculture, forestry, surveying, mapping, and other related fields (<xref ref-type="bibr" rid="B42">Watanabe et&#xa0;al., 2017</xref>; <xref ref-type="bibr" rid="B33">Schofield et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B2">Ana et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B13">Guo et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B29">Park et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B27">Mishra et&#xa0;al., 2023</xref>).</p>
<p>Compared with multispectral, hyperspectral, and other sensors, visible light sensors are better options for extracting vegetation coverage via UAV technology owing to their outstanding advantages, such as low cost and are less affected by weather and light (<xref ref-type="bibr" rid="B6">Coy et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B17">Jay et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B31">Ren et&#xa0;al., 2021</xref>). The vegetation index can effectively reflect vegetation vitality and information and is a commonly used method for extracting vegetation coverage (<xref ref-type="bibr" rid="B43">Woebbecke et&#xa0;al., 1995</xref>; <xref ref-type="bibr" rid="B14">Hague et&#xa0;al., 2006</xref>; <xref ref-type="bibr" rid="B30">Rasmussen et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B20">Kim et&#xa0;al., 2018</xref>; <xref ref-type="bibr" rid="B9">Geng et&#xa0;al., 2022</xref>). Various vegetation indices have been developed based on the spectral characteristics of green vegetation in the visible light band, such as the green leaf index (<xref ref-type="bibr" rid="B34">Shane et&#xa0;al., 2021</xref>), green-red vegetation index (<xref ref-type="bibr" rid="B47">Zhang et&#xa0;al., 2019</xref>), and difference-enhanced vegetation index (<xref ref-type="bibr" rid="B50">Zhou et&#xa0;al., 2021</xref>). An increasing number of studies have shown that vegetation coverage can be extracted using a vegetation index derived from UAV-visible images. Therefore, the limited wavelength information of UAV visible-light images must be used to construct a universally applicable and highly accurate vegetation index and effectively extract green vegetation information. Another key aspect of vegetation coverage extraction by vegetation indices is the determination of a suitable threshold, which can be based on threshold segmentation methods (<xref ref-type="bibr" rid="B1">Akash et&#xa0;al., 2019</xref>). However, few studies have used this method to determine the thresholds in mining areas, and the effectiveness of vegetation indices in mountainous mining areas has not yet been evaluated.</p>
<p>The Qinling Mountains are an important ecological security barrier in China and provide many ecosystem services, such as climate regulation, water yield, carbon sequestration, and biodiversity preservation (<xref ref-type="bibr" rid="B7">Fu et&#xa0;al., 2022</xref>). Rich gold mineral resources in the Qinling Mountains provide a good foundation for mining activities; however, long-term mining has resulted in serious vegetation destruction (<xref ref-type="bibr" rid="B23">Li et&#xa0;al., 2022</xref>), which has plagued sustainable local development (<xref ref-type="bibr" rid="B16">Huo et&#xa0;al., 2022</xref>). Therefore, a rapid and accurate method for acquisition of mine vegetation cover is required. Currently, research on vegetation coverage extraction based on visible vegetation index focuses mostly on cities, forests, grasslands, and farmlands with well-growing plants (<xref ref-type="bibr" rid="B9">Geng et&#xa0;al., 2022</xref>). However, an optimal vegetation index for extracting vegetation coverage suitable for Qinling gold mining areas with sparse vegetation and complex terrain has not yet been determined. Furthermore, previous studies focused on extraction methods for the current vegetation situation and lacked long-term monitoring. Therefore, an abandoned gold mining area in the Qinling Mountains was selected as the research area, and high spatial resolution visible spectrum images obtained by a UAV were used as the data source. The objectives of this study were to (1) compare the performances of nine visible light vegetation indices (RGRI, BGRI, EXG, EXGR, NGRDI, NGBDI, RGBVI, VDVI, and VEG) and two threshold segmentation methods (bimodal histogram method and Otsu&#x2019;s threshold method) in the extraction of vegetation coverage information; (2) select the optimal combination of the vegetation index and threshold segmentation method with high extraction accuracy and wide applicability; and (3) analyze the interannual variation of FVC in the study area using results obtained by the optimal combination. This study provides scientific guidance for rapidly and accurately extracting vegetation coverage and offers technical support for evaluating vegetation restoration in mining areas.</p>
</sec>
<sec id="s2" sec-type="materials|methods">
<label>2</label>
<title>Materials and methods</title>
<sec id="s2_1">
<label>2.1</label>
<title>Study area</title>
<p>The study area is located in the southeastern part of Shangluo City, Shaanxi Province, China (<xref ref-type="fig" rid="f1">
<bold>Figure&#xa0;1</bold>
</xref>). It is between 108&#xb0;34&#x2032;20&#x2032;&#x2032;&#x2013;111&#xb0;1&#x2032;25&#x2032;&#x2032; E and 33&#xb0;2&#x2032;30&#x2032;&#x2032;&#x2013;34&#xb0;24&#x2032;40&#x2032;&#x2032; N. The study area is located in the Qinling Mountains and has a warm, temperate climate. The mean annual temperature is 12.2&#xb0;C, the mean annual precipitation is 804.8&#xa0;mm, and the mean annual sunshine duration is 1947.4&#xa0;h. The soil type is yellow cinnamon. It is high in the northwest and low in the southeast. A gold production company in the research area began operations in 1999 and ceased production after a dam failure in 2006. Even after several years, bare slag still poses a serious threat to human health, and this research area has been listed as a key area for heavy metal prevention and control (<xref ref-type="bibr" rid="B5">Chen et&#xa0;al., 2022</xref>).</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>
<bold>(A)</bold> Geographical location and <bold>(B)</bold> UAV image with a spatial resolution of 0.0436&#xa0;m of the study area.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fevo-11-1171358-g001.tif"/>
</fig>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>UAV image acquisition and processing</title>
<p>Field and UAV aerial surveys were conducted in August 2019, 2020, and 2021 to monitor the vegetation coverage at the research site in the post-mining area. The UAV flight test was conducted using a DJI Phantom 4 Pro on clear and cloudless days, and RGB-visible images were acquired. The flight parameters are listed in <xref ref-type="table" rid="T1">
<bold>Table&#xa0;1</bold>
</xref>. The automatic cruise mode was used for route planning during the flight. The flight area and route were designed prior to conducting the experiment. The flight was 0.68 km<sup>2</sup>. Orthoimages of the study area are shown in <xref ref-type="fig" rid="f1">
<bold>Figure&#xa0;1B</bold>
</xref>.</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Flight setting of the UAV and image parameters.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Flight setting</th>
<th valign="middle" align="center">Parameter</th>
<th valign="middle" align="center">Acquired image content</th>
<th valign="middle" align="center">Parameter</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="center">Flight speed</td>
<td valign="middle" align="center">14.1 m/s</td>
<td valign="middle" align="center">Number of original images</td>
<td valign="middle" align="center">300+</td>
</tr>
<tr>
<td valign="middle" align="center">Photo interval</td>
<td valign="middle" align="center">2 s</td>
<td valign="middle" align="center">Picture resolution</td>
<td valign="middle" align="center">72 dpi</td>
</tr>
<tr>
<td valign="middle" align="center">Number of routes</td>
<td valign="middle" align="center">13</td>
<td valign="middle" align="center">Graphic form</td>
<td valign="middle" align="center">JPEG</td>
</tr>
<tr>
<td valign="middle" align="center">Number of waypoints</td>
<td valign="middle" align="center">26</td>
<td valign="middle" align="center">Shutter speed</td>
<td valign="middle" align="center">1/1600</td>
</tr>
<tr>
<td valign="middle" align="center">Course overlap rate</td>
<td valign="middle" align="center">80%</td>
<td valign="middle" align="center">ISO</td>
<td valign="middle" align="center">800</td>
</tr>
<tr>
<td valign="middle" align="center">Side overlap rate</td>
<td valign="middle" align="center">60%</td>
<td valign="middle" align="left"/>
<td valign="middle" align="left"/>
</tr>
<tr>
<td valign="middle" align="center">Flight altitude</td>
<td valign="middle" align="center">140 m</td>
<td valign="middle" align="left"/>
<td valign="middle" align="left"/>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s2_3">
<label>2.3</label>
<title>Calculation of visible light vegetation index</title>
<p>The basic principle behind the construction of a vegetation index is that vegetation absorbs and reflects light of different wavelengths. The corresponding vegetation index can be obtained by combining different bands of remote sensing images to enhance vegetation (<xref ref-type="bibr" rid="B11">Guilherme et&#xa0;al., 2018</xref>). The visible vegetation index is mainly constructed based on the red, green, and blue bands of the image because healthy green vegetation has a strong reflection in the green band and weak reflections in the red and blue bands. The nine commonly used visible light vegetation indices are listed in <xref ref-type="table" rid="T2">
<bold>Table&#xa0;2</bold>
</xref>.</p>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>Nine commonly used visible light vegetation indices considered in this study and the calculation formulas based on the visible spectrum.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Visible vegetation index</th>
<th valign="middle" align="center">Full name</th>
<th valign="middle" align="center">Calculation formula</th>
<th valign="middle" align="center">Reference</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="center">RGRI</td>
<td valign="middle" align="center">Red&#x2013;green ratio index</td>
<td valign="middle" align="center">R/G</td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B39">Verrelst et&#xa0;al., 2008</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">BGRI</td>
<td valign="middle" align="center">Blue&#x2013;green ratio index</td>
<td valign="middle" align="center">B/G</td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B32">Romina et&#xa0;al., 2010</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">EXG</td>
<td valign="middle" align="center">Excess green index</td>
<td valign="middle" align="center">2g&#x2212;r&#x2212;b</td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B20">Kim et&#xa0;al., 2018</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">EXGR</td>
<td valign="middle" align="center">Excess green minus red index</td>
<td valign="middle" align="center">EXG&#x2212;1.4r&#x2212;g</td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B38">Sun et&#xa0;al., 2014</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">NGRDI</td>
<td valign="middle" align="center">Normalized green&#x2013;red difference index</td>
<td valign="middle" align="center">(G&#x2212;R)/(G+R)</td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B10">Gitelson et&#xa0;al., 2002</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">NGBDI</td>
<td valign="middle" align="center">Normalized green&#x2013;blue difference index</td>
<td valign="middle" align="center">(G&#x2212;B)/(G+B)</td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B15">Hunt et&#xa0;al., 2005</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">RGBVI</td>
<td valign="middle" align="center">Red&#x2013;green&#x2013;blue vegetation index</td>
<td valign="middle" align="center">(G<sup>2</sup>&#x2212;B&#xd7;R)/(G<sup>2</sup>+B&#xd7;R)</td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B19">Juliane et&#xa0;al., 2015</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">VDVI</td>
<td valign="middle" align="center">Visible-band difference vegetation index</td>
<td valign="middle" align="center">(2G&#x2212;R&#x2212;B)/(2G+R+B)</td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B41">Wang et&#xa0;al., 2015</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">VEG</td>
<td valign="middle" align="center">Vegetative index</td>
<td valign="middle" align="center">g/r<sup>0.67</sup>b<sup>0.33</sup>
</td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B9">Geng et&#xa0;al., 2022</xref>)</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s2_4">
<label>2.4</label>
<title>Vegetation information extraction based on threshold</title>
<p>The vegetation index threshold method is effective for discriminating between vegetation and non-vegetation information in an image. Three steps are required to extract the vegetation coverage using the visible light vegetation index. The first step is to calculate the vegetation index, the second is to set an appropriate threshold, and the final is to separate the vegetation and non-vegetation parts. The accuracy of vegetation coverage extraction largely depends on threshold selection (<xref ref-type="bibr" rid="B41">Wang et&#xa0;al., 2015</xref>). Two commonly used methods, the bimodal histogram threshold method and Otsu&#x2019;s threshold method, were applied to determine the threshold for each vegetation index.</p>
<sec id="s2_4_1">
<label>2.4.1</label>
<title>Bimodal histogram method</title>
<p>A bimodal histogram is an image with two obvious peaks in a gray histogram (<xref ref-type="bibr" rid="B50">Zhou et&#xa0;al., 2021</xref>). These two wave peaks correspond to the internal and external target points. The wave trough between the two wave peaks corresponded to the target point near the edge of the object. Typically, the value at the wave trough is selected as the threshold. The calculation process of the bimodal histogram used in this study is as follows. (1) Calculate the average gray value (avg) and standard deviation of the pixels. (2) Considering the average pixel value as the dividing point, find the positions of the maximum values of the left (small peak) and right (large peak) parts. (3) If the two peak positions are close (within the standard deviation range), then one of the two peaks of the histogram is very low; hence, another low peak position must be found; otherwise, proceed to step (7). (4) Determine the position of the pixel gray median point (midpoint). (5) If midpoints&gt;avg, then the small peak is on the left side of the large peak (lower gray level); otherwise, the small peak is on the right side of the large peak (higher gray level), and the position of the dividing point should be adjusted accordingly. (6) Re-find the positions of the large and small peaks. (7) The wave trough of the two peak positions is considered the required threshold (<xref ref-type="bibr" rid="B24">Liang, 2002</xref>).</p>
</sec>
<sec id="s2_4_2">
<label>2.4.2</label>
<title>Otsu&#x2019;s threshold method</title>
<p>Otsu&#x2019;s threshold method, also known as the maximum between-cluster variance method, is a global threshold selection method (<xref ref-type="bibr" rid="B28">Otsu, 2007</xref>). This method divides an image into background and target images based on a threshold. When the optimal threshold is considered, the variance between the background and target and the difference between the two parts of the image are the largest; that is, the optimal threshold is determined based on the maximum between-cluster variance. The calculation process of Otsu&#x2019;s method is as follows. (1) Identify the highest gray level in the image. (2) Take each gray level as a threshold. (3) Calculate the number of pixels and the average value of the two categories segmented by the threshold. (4) Calculate the variance between the two clusters. (5) Determine the threshold of the maximum variance (<xref ref-type="bibr" rid="B44">Xu et&#xa0;al., 2022</xref>).</p>
</sec>
</sec>
<sec id="s2_5">
<label>2.5</label>
<title>Extraction accuracy evaluation</title>
<p>Accuracy, Precision, and Recall were calculated as follows to evaluate the classification accuracy (<xref ref-type="bibr" rid="B35">Shukla and Jain, 2020</xref>):</p>
<disp-formula>
<mml:math display="block" id="M1">
<mml:mrow>
<mml:mtext>Accuracy</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mtext>TP</mml:mtext>
<mml:mo>+</mml:mo>
<mml:mtext>TN</mml:mtext>
</mml:mrow>
<mml:mrow>
<mml:mtext>TP</mml:mtext>
<mml:mo>+</mml:mo>
<mml:mtext>TN</mml:mtext>
<mml:mo>+</mml:mo>
<mml:mtext>FP</mml:mtext>
<mml:mo>+</mml:mo>
<mml:mtext>FN</mml:mtext>
</mml:mrow>
</mml:mfrac>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>T</mml:mi>
<mml:mi>N</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mi>P</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>N</mml:mi>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula>
<mml:math display="block" id="M2">
<mml:mrow>
<mml:mtext>Presicion</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>F</mml:mi>
<mml:mi>P</mml:mi>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
</mml:math>
</disp-formula>
<disp-formula>
<mml:math display="block" id="M3">
<mml:mrow>
<mml:mtext>Recall</mml:mtext>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>F</mml:mi>
<mml:mi>N</mml:mi>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
</mml:math>
</disp-formula>
<p>where TP, which stands for &#x201c;true positive,&#x201d; is the object that is correctly classified as vegetation among all the extracted objects; TN, which stands for &#x201c;true negative,&#x201d; is the object that is correctly classified as non-vegetation among all the extracted objects; FP, which stands for &#x201c;false positive,&#x201d; is the object that is misclassified as vegetation among all extracted objects; and FN, which stands for &#x201c;false negative,&#x201d; is the object that is misclassified as non-vegetation among all the extracted objects.</p>
</sec>
<sec id="s2_6">
<label>2.6</label>
<title>Data analysis</title>
<p>The UAV images were converted into orthoimages using DJI Terra v.3.3 software developed by DJI (Shenzhen, China). Supervised classification, calculation of vegetation indices, threshold segmentation, and extraction of vegetation coverage were performed using ENVI 5.3 software.</p>
</sec>
</sec>
<sec id="s3" sec-type="results">
<label>3</label>
<title>Results</title>
<sec id="s3_1">
<label>3.1</label>
<title>Calculation results of vegetation index</title>
<sec id="s3_1_1">
<label>3.1.1</label>
<title>Visual interpretation and supervision classification</title>
<p>The performance of the visible vegetation indices in extracting vegetation was evaluated by comparison with the results of the maximum likelihood classification method (<xref ref-type="fig" rid="f2">
<bold>Figure&#xa0;2</bold>
</xref>). Fifty regions of interest (ROIs) with non-vegetation and fifty ROIs with vegetation were uniformly selected to verify the classification accuracy. The overall accuracies of the typical and validation quadrats were 99.99% and 99.39%, respectively.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Original images of <bold>(A1)</bold> typical and <bold>(B1)</bold> validation quadrats and classification results of the <bold>(A2)</bold> typical and <bold>(B2)</bold> verification quadrats.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fevo-11-1171358-g002.tif"/>
</fig>
</sec>
<sec id="s3_1_2">
<label>3.1.2</label>
<title>Vegetation index calculation results</title>
<p>The vegetation indices derived from the gray image of a typical quadrat are shown in <xref ref-type="fig" rid="f3">
<bold>Figure&#xa0;3</bold>
</xref>. Most visible light vegetation indices can be used to effectively distinguish vegetation from non-vegetation information; however, the extraction effects are different. Some vegetation indices, such as EXG, EXGR, RGBVI, VEG, and VDVI, can clearly discriminate between vegetation and non-vegetation areas; however, BGRI, RGRI, NGBDI, and NGRDI cannot clearly distinguish between the two and resulted in some misclassifications, indicating poor extraction performance. Furthermore, to analyze the pixel value ranges of vegetation and non-vegetation in the gray image of each band and vegetation index, 75 representative ROIs were randomly selected to count the pixel eigenvalues of each visible band and vegetation index (<xref ref-type="table" rid="T3">
<bold>Table&#xa0;3</bold>
</xref>). The results indicated that the reflectance in the green band of the vegetation was significantly higher than that of the non-vegetation. In the BGRI, RGRI, and NGRDI gray images, the pixel values of vegetation and non-vegetation overlapped over a large range; therefore, vegetation and non-vegetation areas overlapped. In addition, the calculation formulas for RGRI, NGBDI, and NGRDI only used blue + green or red + green bands, indicating that the red, green, and blue bands should be combined when calculating the visible light vegetation index.</p>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>Calculation results of nine vegetation indices for the typical quadrat.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fevo-11-1171358-g003.tif"/>
</fig>
<table-wrap id="T3" position="float">
<label>Table&#xa0;3</label>
<caption>
<p>Differences in pixel values of visible bands and vegetation indices of the typical quadrat.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" rowspan="2" align="center">Indicators</th>
<th valign="middle" colspan="4" align="center">Vegetation</th>
<th valign="middle" colspan="4" align="center">Non-vegetation</th>
<th valign="middle" rowspan="2" align="center">ANOVA<break/>P value</th>
</tr>
<tr>
<th valign="middle" align="center">Min</th>
<th valign="middle" align="center">Max</th>
<th valign="middle" align="center">Mean</th>
<th valign="middle" align="center">Standard<break/>deviation</th>
<th valign="middle" align="center">Min</th>
<th valign="middle" align="center">Max</th>
<th valign="middle" align="center">Mean</th>
<th valign="middle" align="center">Standard<break/>deviation</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="center">Red band</td>
<td valign="middle" align="center">30.00</td>
<td valign="middle" align="center">194.00</td>
<td valign="middle" align="center">96.57</td>
<td valign="middle" align="center">23.89</td>
<td valign="middle" align="center">68.00</td>
<td valign="middle" align="center">254.00</td>
<td valign="middle" align="center">177.23</td>
<td valign="middle" align="center">43.07</td>
<td valign="middle" align="center">0.054</td>
</tr>
<tr>
<td valign="middle" align="center">Green band</td>
<td valign="middle" align="center">66.00</td>
<td valign="middle" align="center">211.00</td>
<td valign="middle" align="center">132.26</td>
<td valign="middle" align="center">20.03</td>
<td valign="middle" align="center">78.00</td>
<td valign="middle" align="center">249.00</td>
<td valign="middle" align="center">176.73</td>
<td valign="middle" align="center">38.20</td>
<td valign="middle" align="center">0.033</td>
</tr>
<tr>
<td valign="middle" align="center">Blue band</td>
<td valign="middle" align="center">27.00</td>
<td valign="middle" align="center">163.00</td>
<td valign="middle" align="center">84.02</td>
<td valign="middle" align="center">17.51</td>
<td valign="middle" align="center">86.00</td>
<td valign="middle" align="center">249.00</td>
<td valign="middle" align="center">174.07</td>
<td valign="middle" align="center">29.24</td>
<td valign="middle" align="center">0.048</td>
</tr>
<tr>
<td valign="middle" align="center">BGRI</td>
<td valign="middle" align="center">0.32</td>
<td valign="middle" align="center">0.85</td>
<td valign="middle" align="center">0.63</td>
<td valign="middle" align="center">0.08</td>
<td valign="middle" align="center">0.85</td>
<td valign="middle" align="center">1.20</td>
<td valign="middle" align="center">1.00</td>
<td valign="middle" align="center">0.07</td>
<td valign="middle" align="center">0.007</td>
</tr>
<tr>
<td valign="middle" align="center">RGRI</td>
<td valign="middle" align="center">0.43</td>
<td valign="middle" align="center">0.96</td>
<td valign="middle" align="center">0.73</td>
<td valign="middle" align="center">0.10</td>
<td valign="middle" align="center">0.86</td>
<td valign="middle" align="center">1.13</td>
<td valign="middle" align="center">1.00</td>
<td valign="middle" align="center">0.04</td>
<td valign="middle" align="center">0.031</td>
</tr>
<tr>
<td valign="middle" align="center">EXG</td>
<td valign="middle" align="center">0.07</td>
<td valign="middle" align="center">0.70</td>
<td valign="middle" align="center">0.28</td>
<td valign="middle" align="center">0.10</td>
<td valign="middle" align="center">&#x2212;0.03</td>
<td valign="middle" align="center">0.05</td>
<td valign="middle" align="center">0.00</td>
<td valign="middle" align="center">0.01</td>
<td valign="middle" align="center">0.001</td>
</tr>
<tr>
<td valign="middle" align="center">EXGR</td>
<td valign="middle" align="center">&#x2212;0.76</td>
<td valign="middle" align="center">&#x2212;0.22</td>
<td valign="middle" align="center">&#x2212;0.57</td>
<td valign="middle" align="center">0.09</td>
<td valign="middle" align="center">&#x2212;0.86</td>
<td valign="middle" align="center">&#x2212;0.73</td>
<td valign="middle" align="center">&#x2212;0.80</td>
<td valign="middle" align="center">0.02</td>
<td valign="middle" align="center">0.012</td>
</tr>
<tr>
<td valign="middle" align="center">NGRDI</td>
<td valign="middle" align="center">0.02</td>
<td valign="middle" align="center">0.40</td>
<td valign="middle" align="center">0.16</td>
<td valign="middle" align="center">0.07</td>
<td valign="middle" align="center">&#x2212;0.06</td>
<td valign="middle" align="center">0.08</td>
<td valign="middle" align="center">0.00</td>
<td valign="middle" align="center">0.02</td>
<td valign="middle" align="center">0.151</td>
</tr>
<tr>
<td valign="middle" align="center">NGBDI</td>
<td valign="middle" align="center">0.08</td>
<td valign="middle" align="center">0.51</td>
<td valign="middle" align="center">0.23</td>
<td valign="middle" align="center">0.06</td>
<td valign="middle" align="center">&#x2212;0.09</td>
<td valign="middle" align="center">0.08</td>
<td valign="middle" align="center">0.00</td>
<td valign="middle" align="center">0.04</td>
<td valign="middle" align="center">0.023</td>
</tr>
<tr>
<td valign="middle" align="center">RGBVI</td>
<td valign="middle" align="center">0.11</td>
<td valign="middle" align="center">0.75</td>
<td valign="middle" align="center">0.37</td>
<td valign="middle" align="center">0.11</td>
<td valign="middle" align="center">&#x2212;0.05</td>
<td valign="middle" align="center">0.07</td>
<td valign="middle" align="center">0.00</td>
<td valign="middle" align="center">0.02</td>
<td valign="middle" align="center">0.017</td>
</tr>
<tr>
<td valign="middle" align="center">VDVI</td>
<td valign="middle" align="center">0.05</td>
<td valign="middle" align="center">0.45</td>
<td valign="middle" align="center">0.19</td>
<td valign="middle" align="center">0.06</td>
<td valign="middle" align="center">&#x2212;0.03</td>
<td valign="middle" align="center">0.03</td>
<td valign="middle" align="center">0.00</td>
<td valign="middle" align="center">0.01</td>
<td valign="middle" align="center">0.001</td>
</tr>
<tr>
<td valign="middle" align="center">VEG</td>
<td valign="middle" align="center">1.10</td>
<td valign="middle" align="center">2.51</td>
<td valign="middle" align="center">1.47</td>
<td valign="middle" align="center">0.20</td>
<td valign="middle" align="center">0.95</td>
<td valign="middle" align="center">1.06</td>
<td valign="middle" align="center">1.01</td>
<td valign="middle" align="center">0.01</td>
<td valign="middle" align="center">0.002</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_1_3">
<label>3.1.3</label>
<title>Threshold segmentation and vegetation index selection</title>
<p>The bimodal histogram threshold and Otsu&#x2019;s threshold methods were employed to determine the threshold of each visible light vegetation index grey image. The vegetation and non-vegetation areas were discriminated based on the thresholds, and the extraction accuracy was verified by comparison with the supervised classification results. The threshold segmentation results are shown in <xref ref-type="fig" rid="f4">
<bold>Figures&#xa0;4</bold>
</xref>, <xref ref-type="fig" rid="f5">
<bold>5</bold>
</xref>. By jointly viewing the orthoimages and supervised classification results (<xref ref-type="fig" rid="f2">
<bold>Figure&#xa0;2A1, A2</bold>
</xref>), we found that the extraction results of the bimodal histogram threshold method had fewer misclassifications, and the segmentation effect was obviously better than that of Otsu&#x2019;s threshold method. In the segmentation results of the bimodal histogram threshold method, RGRI, EXGR, NGRDI, and VEG misclassified vegetation as non-vegetation, whereas BGRI and EXG misclassified non-vegetation as non-vegetation, indicating relatively poor extraction accuracy. The quantitative accuracy must be evaluated to accurately evaluate the effects of the segmentation results. The Accuracy, Precision and Recall of the threshold segmentation results were calculated based on the maximum likelihood classification results (<xref ref-type="table" rid="T4">
<bold>Table&#xa0;4</bold>
</xref>). Overall, the classification accuracy of the bimodal histogram method was higher than that of Otsu&#x2019;s threshold method. Among the visible light vegetation indices, EXG, based on the bimodal histogram method, had the highest classification accuracy, with the Accuracy was 98.264%, Precision was 99. 811% and 97.572% in vegetation and non-vegetation, and Recall was 99.913% and 94.847% in vegetation and non-vegetation.</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Segmentation results of the bimodal histogram threshold method.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fevo-11-1171358-g004.tif"/>
</fig>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Segmentation results of Otsu&#x2019;s threshold method.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fevo-11-1171358-g005.tif"/>
</fig>
<table-wrap id="T4" position="float">
<label>Table&#xa0;4</label>
<caption>
<p>Accuracy evaluation of the typical quadrat.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" rowspan="2" colspan="2" align="center">Segmentation</th>
<th valign="middle" rowspan="2" align="center">Accuracy (%)</th>
<th valign="middle" colspan="2" align="center">Precision (%)</th>
<th valign="middle" colspan="2" align="center">Recall (%)</th>
</tr>
<tr>
<th valign="middle" align="center">Vegetation</th>
<th valign="middle" align="center">Non-vegetation</th>
<th valign="middle" align="center">Vegetation</th>
<th valign="middle" align="center">Non-vegetation</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" rowspan="2" align="center">BGRI</td>
<td valign="middle" align="center">Otsu&#x2019;s</td>
<td valign="middle" align="center">92.741</td>
<td valign="middle" align="center">99.330</td>
<td valign="middle" align="center">89.790</td>
<td valign="middle" align="center">99.667</td>
<td valign="middle" align="center">81.332</td>
</tr>
<tr>
<td valign="middle" align="center">Bimodal histogram</td>
<td valign="middle" align="center">96.275</td>
<td valign="middle" align="center">96.233</td>
<td valign="middle" align="center">96.294</td>
<td valign="middle" align="center">98.278</td>
<td valign="middle" align="center">0.920</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">EXG</td>
<td valign="middle" align="center">Otsu&#x2019;s</td>
<td valign="middle" align="center">81.656</td>
<td valign="middle" align="center">100.000</td>
<td valign="middle" align="center">73.441</td>
<td valign="middle" align="center">99.999</td>
<td valign="middle" align="center">62.772</td>
</tr>
<tr>
<td valign="middle" align="center">Bimodal histogram</td>
<td valign="middle" align="center">98.264</td>
<td valign="middle" align="center">99.811</td>
<td valign="middle" align="center">97.572</td>
<td valign="middle" align="center">99.913</td>
<td valign="middle" align="center">94.847</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">EXGR</td>
<td valign="middle" align="center">Otsu&#x2019;s</td>
<td valign="middle" align="center">75.787</td>
<td valign="middle" align="center">99.998</td>
<td valign="middle" align="center">64.956</td>
<td valign="middle" align="center">99.998</td>
<td valign="middle" align="center">56.086</td>
</tr>
<tr>
<td valign="middle" align="center">Bimodal histogram</td>
<td valign="middle" align="center">82.903</td>
<td valign="middle" align="center">99.999</td>
<td valign="middle" align="center">75.315</td>
<td valign="middle" align="center">99.998</td>
<td valign="middle" align="center">66.471</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">MGRVI</td>
<td valign="middle" align="center">Otsu&#x2019;s</td>
<td valign="middle" align="center">75.024</td>
<td valign="middle" align="center">99.996</td>
<td valign="middle" align="center">64.967</td>
<td valign="middle" align="center">99.997</td>
<td valign="middle" align="center">56.106</td>
</tr>
<tr>
<td valign="middle" align="center">Bimodal histogram</td>
<td valign="middle" align="center">82.524</td>
<td valign="middle" align="center">99.811</td>
<td valign="middle" align="center">74.389</td>
<td valign="middle" align="center">99.886</td>
<td valign="middle" align="center">63.573</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">NGBDI</td>
<td valign="middle" align="center">Otsu&#x2019;s</td>
<td valign="middle" align="center">91.216</td>
<td valign="middle" align="center">99.627</td>
<td valign="middle" align="center">87.449</td>
<td valign="middle" align="center">99.809</td>
<td valign="middle" align="center">78.045</td>
</tr>
<tr>
<td valign="middle" align="center">Bimodal histogram</td>
<td valign="middle" align="center">96.421</td>
<td valign="middle" align="center">95.192</td>
<td valign="middle" align="center">96.979</td>
<td valign="middle" align="center">97.828</td>
<td valign="middle" align="center">93.384</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">NGRDI</td>
<td valign="middle" align="center">Otsu&#x2019;s</td>
<td valign="middle" align="center">75.184</td>
<td valign="middle" align="center">99.997</td>
<td valign="middle" align="center">64.006</td>
<td valign="middle" align="center">99.998</td>
<td valign="middle" align="center">55.439</td>
</tr>
<tr>
<td valign="middle" align="center">Bimodal histogram</td>
<td valign="middle" align="center">80.634</td>
<td valign="middle" align="center">99.924</td>
<td valign="middle" align="center">71.996</td>
<td valign="middle" align="center">99.953</td>
<td valign="middle" align="center">61.507</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">RGBVI</td>
<td valign="middle" align="center">Otsu&#x2019;s</td>
<td valign="middle" align="center">84.722</td>
<td valign="middle" align="center">99.999</td>
<td valign="middle" align="center">77.880</td>
<td valign="middle" align="center">99.999</td>
<td valign="middle" align="center">66.936</td>
</tr>
<tr>
<td valign="middle" align="center">Bimodal histogram</td>
<td valign="middle" align="center">97.824</td>
<td valign="middle" align="center">99.795</td>
<td valign="middle" align="center">96.941</td>
<td valign="middle" align="center">99.905</td>
<td valign="middle" align="center">93.595</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">RGRI</td>
<td valign="middle" align="center">Otsu&#x2019;s</td>
<td valign="middle" align="center">76.536</td>
<td valign="middle" align="center">99.993</td>
<td valign="middle" align="center">66.031</td>
<td valign="middle" align="center">99.995</td>
<td valign="middle" align="center">56.864</td>
</tr>
<tr>
<td valign="middle" align="center">Bimodal histogram</td>
<td valign="middle" align="center">81.725</td>
<td valign="middle" align="center">99.866</td>
<td valign="middle" align="center">73.602</td>
<td valign="middle" align="center">99.918</td>
<td valign="middle" align="center">62.882</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">VDVI</td>
<td valign="middle" align="center">Otsu&#x2019;s</td>
<td valign="middle" align="center">83.005</td>
<td valign="middle" align="center">99.999</td>
<td valign="middle" align="center">75.395</td>
<td valign="middle" align="center">99.999</td>
<td valign="middle" align="center">64.539</td>
</tr>
<tr>
<td valign="middle" align="center">Bimodal histogram</td>
<td valign="middle" align="center">97.993</td>
<td valign="middle" align="center">99.895</td>
<td valign="middle" align="center">97.141</td>
<td valign="middle" align="center">99.952</td>
<td valign="middle" align="center">93.994</td>
</tr>
<tr>
<td valign="middle" rowspan="2" align="center">VEG</td>
<td valign="middle" align="center">Otsu&#x2019;s</td>
<td valign="middle" align="center">77.195</td>
<td valign="middle" align="center">99.997</td>
<td valign="middle" align="center">66.982</td>
<td valign="middle" align="center">99.996</td>
<td valign="middle" align="center">57.560</td>
</tr>
<tr>
<td valign="middle" align="center">Bimodal histogram</td>
<td valign="middle" align="center">91.142</td>
<td valign="middle" align="center">99.999</td>
<td valign="middle" align="center">87.175</td>
<td valign="middle" align="center">99.999</td>
<td valign="middle" align="center">77.737</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Suitability performance test</title>
<p>The reliability and applicability of EXG, VDVI, and RGBVI for extracting vegetation coverage were verified based on the supervised classification results of the verification quadrat. The vegetation extracted based on EXG, VDVI, RGBVI, and the bimodal histogram threshold method (the thresholds were 0.047603, 0.041258, and 0.075669, respectively) are shown in <xref ref-type="fig" rid="f6">
<bold>Figure&#xa0;6</bold>
</xref>. The results of vegetation coverage extraction were compared with those of the maximum likelihood classification (<xref ref-type="table" rid="T5">
<bold>Table&#xa0;5</bold>
</xref>). EXG combined with the bimodal histogram method still had the highest accuracy in extracting vegetation coverage, followed by VDVI and RGBVI, suggesting that EXG had the highest precision in extracting vegetation information and could be used to estimate vegetation coverage in mining areas.</p>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>Verification results of the verification quadrat.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fevo-11-1171358-g006.tif"/>
</fig>
<table-wrap id="T5" position="float">
<label>Table&#xa0;5</label>
<caption>
<p>Accuracy evaluation of the verification quadrat.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" rowspan="2" align="center">Segmentation</th>
<th valign="middle" rowspan="2" align="center">Accuracy (%)</th>
<th valign="middle" colspan="2" align="center">Precision (%)</th>
<th valign="middle" colspan="2" align="center">Recall (%)</th>
</tr>
<tr>
<th valign="middle" align="center">Vegetation</th>
<th valign="middle" align="center">Non-vegetation</th>
<th valign="middle" align="center">Vegetation</th>
<th valign="middle" align="center">Vegetation</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="center">EXG</td>
<td valign="middle" align="center">95.073</td>
<td valign="middle" align="center">99.967</td>
<td valign="middle" align="center">91.114</td>
<td valign="middle" align="center">99.989</td>
<td valign="middle" align="center">77.046</td>
</tr>
<tr>
<td valign="middle" align="center">RGBVI</td>
<td valign="middle" align="center">91.421</td>
<td valign="middle" align="center">99.999</td>
<td valign="middle" align="center">88.861</td>
<td valign="middle" align="center">99.999</td>
<td valign="middle" align="center">72.816</td>
</tr>
<tr>
<td valign="middle" align="center">VDVI</td>
<td valign="middle" align="center">93.107</td>
<td valign="middle" align="center">99.928</td>
<td valign="middle" align="center">91.072</td>
<td valign="middle" align="center">99.976</td>
<td valign="middle" align="center">76.957</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Vegetation coverage assessment</title>
<p>According to the above results, EXG combined with the bimodal histogram threshold method was used to estimate vegetation coverage in 2019, 2020, and 2021 (the thresholds were 0.07848, 0.122353, and 0.125108, respectively). The extraction results were statistically classified as follows: vegetation coverage of 0&#x2013;0.05 was considered a zero-coverage area, 0.05&#x2013;0.2 was a low vegetation coverage area, 0.2&#x2013;0.4 was a low&#x2013;moderate vegetation coverage area, 0.4&#x2013;0.6 was a moderate vegetation coverage area, 0.6&#x2013;0.8 was a moderate&#x2013;high vegetation coverage area, and 0.8&#x2013;1 was a high vegetation coverage area (<xref ref-type="bibr" rid="B48">Zhao et&#xa0;al., 2022</xref>). <xref ref-type="fig" rid="f7">
<bold>Figure&#xa0;7</bold>
</xref> shows that EXG can clearly discriminate between vegetation and non-vegetation areas. From 2019 to 2021, the non-vegetation area decreased, and the proportion of moderate&#x2013;high and high vegetation coverage areas increased significantly. The average FVC values in 2019, 2020, and 2021 were 31.47%, 34.08%, and 42.77%, respectively, indicating that the FVC in the mining area increased. The results suggest that the effect of vegetation restoration was remarkable, and the quality of the ecological environment improved. However, most areas in the post-mining area had low, low&#x2013;moderate, and moderate vegetation coverage, and vegetation restoration requires further strengthening.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p>Estimated results of vegetation coverage based on EXG combined with the bimodal histogram threshold method in <bold>(A)</bold> 2019, <bold>(B) </bold>2020, and <bold>(C)</bold> 2021. <bold>(D)</bold> Inter-annual variation of vegetation coverage from 2019 to 2021.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fevo-11-1171358-g007.tif"/>
</fig>
</sec>
</sec>
<sec id="s4" sec-type="discussion">
<label>4</label>
<title>Discussion</title>
<sec id="s4_1">
<label>4.1</label>
<title>Extraction accuracy of vegetation coverage</title>
<p>The accuracy of vegetation coverage extraction was related to both the vegetation index and threshold segmentation method. In this study, EXG exhibited the highest extraction accuracy, followed by VDVI and RGBVI, which is consistent with the results of <xref ref-type="bibr" rid="B41">Wang et&#xa0;al. (2015)</xref> and <xref ref-type="bibr" rid="B4">Chen and Deng (2019)</xref>. The calculation formulas for EXG, VDVI, and RGNVI show the reflectance characteristics of vegetation in the visible bands, which effectively increase the sensitivity of vegetation to green bands and make full use of the information in the red, green, and blue bands. Currently, the bimodal histogram threshold and Otsu&#x2019;s threshold methods are widely used for threshold segmentation. In this study, the results of the threshold methods for vegetation coverage extraction suggested that the accuracy of the bimodal histogram method was significantly better than that of Otsu&#x2019;s threshold method (<xref ref-type="fig" rid="f5">
<bold>Figure&#xa0;5</bold>
</xref>), reaffirming the results of <xref ref-type="bibr" rid="B49">Zhao et&#xa0;al. (2019)</xref>. Using the bimodal histogram method, the accuracies of RGRI, EXGR, and NGRDI were relatively low, which may be related to the histogram characteristics. As shown in the histogram of each vegetation index (<xref ref-type="fig" rid="f8">
<bold>Figure&#xa0;8</bold>
</xref>), EXG, VDVI, and RGBVI showed similar changes and obvious bimodal characteristics, whereas the histograms of RGRI, EXGR, and NGRDI had no obvious bimodal characteristics. Therefore, the accuracy of vegetation coverage extraction varied greatly.</p>
<fig id="f8" position="float">
<label>Figure&#xa0;8</label>
<caption>
<p>Statistical histogram of the nine vegetation indices considered in this study.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fevo-11-1171358-g008.tif"/>
</fig>
</sec>
<sec id="s4_2">
<label>4.2</label>
<title>Characteristics of UAV visible vegetation indices</title>
<p>Satellite remote sensing images have advantages, such as large image areas and multiple bands (<xref ref-type="bibr" rid="B45">Xu et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B12">Guo and Guo, 2021</xref>). However, owing to the relatively coarse spatial resolution, the interpretation accuracy is relatively limited, and the temporal resolution often cannot meet the real-time requirements of vegetation monitoring on a small spatial scale, such as in mining areas. With the popularization of UAV technology, UAV images have compensated for the deficiencies in satellite remote sensing images in terms of spatial and temporal resolution. UAVs provide a new data source for the acquisition of vegetation coverage information in mining areas and offer new approaches for monitoring vegetation growth and recovery in mining areas (<xref ref-type="bibr" rid="B37">Sun et&#xa0;al., 2021</xref>). The results of this study indicate that vegetation coverage data can be accurately extracted from UAV images. As an unsupervised classification method, the visible light vegetation index can be used to extract vegetation coverage quickly and accurately without manual visual discrimination of vegetation areas or non-vegetation areas. Two major advantages are commonly associated with using visible spectrum images for extracting vegetation coverage. One is that RGB images are low cost, convenient to process, and less affected by weather and light. The other is that RGB images have a relatively high spatial and temporal resolution, which is more suitable for local studies. For example, <xref ref-type="bibr" rid="B26">Marcial-Pablo et&#xa0;al. (2019)</xref> indicated that the accuracy of visible vegetation indices is higher than that of visible NIR vegetation indices for early crop cover. <xref ref-type="bibr" rid="B8">Furukawa et&#xa0;al. (2021)</xref> reported that RGB images provide reliable information for vegetation monitoring. For the mining areas, the land-use type was relatively single, and vegetation coverage could be quickly obtained via UAV images. Moreover, the UAV-visible images were acquired in summer, when vegetation growth was the best. For most vegetation, summer is the most vigorous period for plant growth, during which the vegetation exhibits the strongest reflected spectral features. Thus, vegetation coverage can be accurately estimated using the vegetation index.</p>
</sec>
<sec id="s4_3">
<label>4.3</label>
<title>Variation characteristics of vegetation coverage</title>
<p>According to previous investigation and research results, the soil arsenic contamination in the gold mining area is serious (the average soil arsenic content was 93.96 mg/kg) (<xref ref-type="bibr" rid="B5">Chen et&#xa0;al., 2022</xref>). Vegetation types are scarce (mostly herbaceous plants), and vegetation coverage is low. The results of the vegetation coverage change from 2019 to 2021 indicated that most natural vegetation restoration sites had low to low&#x2013;moderate vegetation coverage. This was mainly because soil As contamination limited the normal growth and development of plants in the early stages of vegetation restoration (<xref ref-type="bibr" rid="B46">Yang et&#xa0;al., 2020</xref>), and community succession was relatively slow. Increased vegetation coverage improves the quality of regional ecological environments. With the progress of ecological restoration, the soil arsenic content has decreased, and plants have developed their own unique physiological and ecological characteristics after a period of adaptation. The number of pixels with zero vegetation and low and low-moderate vegetation coverage decreased, those with moderate&#x2013;high and high vegetation coverage increased, and the overall vegetation coverage increased.</p>
</sec>
</sec>
<sec id="s5" sec-type="conclusions">
<label>5</label>
<title>Conclusions</title>
<p>In this study, a disused gold mining area in the Qinling Mountains was selected as the research area, and UAVs were deployed to obtain image data with high spatial resolution in the visible light. The performance of different visible light vegetation indices combined with two threshold segmentation methods for extracting vegetation coverage was evaluated. The main conclusions are as follows. (1) Except for RGRI, NGRDI, and NGBDI, the other visible light vegetation indices effectively discriminated between vegetation and non-vegetation in the study area. (2) EXG, VDVI, and RGBVI combined with the bimodal histogram threshold method had higher extraction accuracy in distinguishing between vegetation and non-vegetation areas. (3) EXG and the bimodal histogram threshold method had the highest accuracy for vegetation identification, which was the closest to the results of the monitored and actual situations. (4) The spatiotemporal analysis of vegetation coverage in 2019, 2020, and 2021 showed that most mining areas had low, low&#x2013;moderate, and moderate vegetation coverage, whereas the overall vegetation coverage was low. The average FVC for the three years were 31.47%, 34.08%, and 42.77%, respectively, indicating an increasing trend. Future studies should continue monitoring vegetation coverage changes to provide technical support for land reclamation and ecological restoration in mining areas.</p>
</sec>
<sec id="s6" sec-type="data-availability">
<title>Data availability statement</title>
<p>The original contributions presented in the study are included in the article/supplementary material. Further inquiries can be directed to the corresponding author.</p>
</sec>
<sec id="s7" sec-type="author-contributions">
<title>Author contributions</title>
<p>RC: conceptualization, methodology, analysis, and writing &#x2013; original manuscript. LH: conceptualization, reviewing and editing, and funding acquisition. YHZ: reviewing and editing, supervision. ZZ: methodology, statistical analysis, and reviewing and editing. ZL: reviewing and editing, supervision. RL: methodology, reviewing and editing. LX: reviewing and editing, providing revisions and comments, and supervision. YMZ: methodology, providing revisions and comments, and reviewing and editing. All authors contributed to the article and approved the submitted version.</p>
</sec>
</body>
<back>
<sec id="s8" sec-type="funding-information">
<title>Funding</title>
<p>This work was supported by the Key Laboratory of Degraded and Unused Land Consolidation Engineering, Ministry of Natural Resources of the People&#x2019;s Republic of China (Program No. SXDJ2017-9), and the Shaanxi Key Laboratory of Land Reclamation Engineering: (Program No. 2018-ZZ03).</p>
</sec>
<ack>
<title>Acknowledgments</title>
<p>We are grateful to the reviewers whose comments have helped to clarify and improve the manuscript.</p>
</ack>
<sec id="s9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>RL and LX were employed by the Institute of Land Engineering and Technology, Shaanxi Provincial Land Engineering Construction Group, Xi&#x2019;an, China.</p>
<p>The remaining authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="s10" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors&#xa0;and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Akash</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Jinha</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Anjin</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Sungchan</surname> <given-names>O.</given-names>
</name>
<name>
<surname>Murilo</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Juan</surname> <given-names>L.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>A comparative study of RGB and multispectral sensor-based cotton canopy cover modelling using multi-temporal UAS data</article-title>. <source>Remote Sens.</source> <volume>11</volume> (<issue>23</issue>), <elocation-id>2757</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs11232757</pub-id>
</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ana</surname> <given-names>D. C.</given-names>
</name>
<name>
<surname>Shi</surname> <given-names>Y. Y.</given-names>
</name>
<name>
<surname>Maja</surname> <given-names>J. M.</given-names>
</name>
<name>
<surname>Pe&#xf1;a</surname> <given-names>J. M.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>UAVs for vegetation monitoring: overview and recent scientific contributions</article-title>. <source>Remote Sens-Basel</source> <volume>13</volume>, <elocation-id>2139</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs13112139</pub-id>
</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Boyd</surname> <given-names>D. S.</given-names>
</name>
<name>
<surname>Foody</surname> <given-names>G. M.</given-names>
</name>
<name>
<surname>Ripple</surname> <given-names>W. J.</given-names>
</name>
</person-group> (<year>2002</year>). <article-title>Evaluation of approaches for forest cover estimation in the pacific Northwest, USA, using remote sensing</article-title>. <source>Appl. Geogr.</source> <volume>22</volume> (<issue>4</issue>), <fpage>375</fpage>&#x2013;<lpage>393</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/S0143-6228(02)00048-6</pub-id>
</citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chen</surname> <given-names>X. D.</given-names>
</name>
<name>
<surname>Deng</surname> <given-names>J. H.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Study on extraction method of vegetation coverage of summer maize based on visible image</article-title>. <source>Exp. Technol. Manage.</source> <volume>36</volume> (<issue>12</issue>), <fpage>131</fpage>&#x2013;<lpage>136</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.6041/j.issn.1000-1298.2019.05.027</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chen</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Han</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Zhao</surname> <given-names>Y. H.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>R. S.</given-names>
</name>
<name>
<surname>Xia</surname> <given-names>L. F.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>Assessment of soil-heavy metal pollution and the health risks in a mining area from southern shaanxi province, China</article-title>. <source>Toxics</source> <volume>10</volume>, <elocation-id>385</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/toxics10070385</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Coy</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Rankine</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Taylor</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Nielsen</surname> <given-names>D. C.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Increasing the accuracy and automation of fractional vegetation cover estimation from digital photographs</article-title>. <source>Remote Sens.</source> <volume>8</volume> (<issue>7</issue>), <elocation-id>474</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs8070474</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Song</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Relationship between brain size and digestive tract length support the expensive-tissue hypothesis in feirana quadranus</article-title>. <source>Front. Ecol. Evol.</source> <volume>10</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fevo.2022.982590</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Furukawa</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Laneng</surname> <given-names>L. A.</given-names>
</name>
<name>
<surname>Ando</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Yoshimura</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Kaneko</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Morimoto</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Comparison of RGB and multispectral unmanned aerial vehicle for monitoring vegetation coverage changes on a landslide area</article-title>. <source>Drones</source> <volume>5</volume>, <elocation-id>97</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/drones5030097</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Geng</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X. M.</given-names>
</name>
<name>
<surname>Fang</surname> <given-names>H. L.</given-names>
</name>
<name>
<surname>Ye</surname> <given-names>J. S.</given-names>
</name>
<name>
<surname>Han</surname> <given-names>L. K.</given-names>
</name>
<name>
<surname>Gong</surname> <given-names>Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>Vegetation coverage of desert ecosystems in the qinghai-Tibet plateau is underestimated</article-title>. <source>Ecol. Indic</source> <volume>137</volume>, <elocation-id>108780</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ecolind.2022.108780</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gitelson</surname> <given-names>A. A.</given-names>
</name>
<name>
<surname>Kaufman</surname> <given-names>Y. J.</given-names>
</name>
<name>
<surname>Stark</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Rundquist</surname> <given-names>D.</given-names>
</name>
</person-group> (<year>2002</year>). <article-title>Novel algorithms for remote estimation of vegetation fraction</article-title>. <source>Remote Sens. Environ.</source> <volume>80</volume> (<issue>1</issue>), <fpage>76</fpage>&#x2013;<lpage>87</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/S0034-4257(01)00289-9</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Guilherme</surname> <given-names>M. S.</given-names>
</name>
<name>
<surname>Daniel</surname> <given-names>G. D.</given-names>
</name>
<name>
<surname>Oriel</surname> <given-names>T. K.</given-names>
</name>
<name>
<surname>Ana</surname> <given-names>C. S. L.</given-names>
</name>
<name>
<surname>S&#xe9;rgio</surname> <given-names>G.</given-names>
</name>
<name>
<surname>F&#xe1;bio</surname> <given-names>M. O.</given-names>
</name>
<etal/>
</person-group>. (<year>2018</year>). <article-title>The potential for RGB images obtained using unmanned aerial vehicle to assess and predict yield in sugarcane fields</article-title>. <source>Int. J. Remote Sens.</source> <volume>39</volume> (<issue>15-16</issue>), <fpage>5402</fpage>&#x2013;<lpage>5414</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/01431161.2018.1448484</pub-id>
</citation>
</ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Guo</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>Q.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>And feng z detecting the vegetation change related to the creep of 2018 baige landslide in jinsha river, SE Tibet using SPOT data</article-title>. <source>Front. Earth Sci.</source> <volume>9</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/feart.2021.706998</pub-id>
</citation>
</ref>
<ref id="B13">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Guo</surname> <given-names>Z. C.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>S. L.</given-names>
</name>
<name>
<surname>Kang</surname> <given-names>W. P.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>K.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Biomass and vegetation coverage survey in the mu us sandy land - based on unmanned aerial vehicle RGB images</article-title>. <source>Int. J. Appl. Earth Obs.</source> <volume>94</volume>, <elocation-id>102239</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2020.102239</pub-id>
</citation>
</ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hague</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Tillett</surname> <given-names>N. D.</given-names>
</name>
<name>
<surname>Wheeler</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2006</year>). <article-title>Automated crop and weed monitoring in widely spaced cereals</article-title>. <source>Precis. Agric.</source> <volume>7</volume> (<issue>1</issue>), <fpage>21</fpage>&#x2013;<lpage>32</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11119-005-6787-1</pub-id>
</citation>
</ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hunt</surname> <given-names>E. R.</given-names>
</name>
<name>
<surname>Cavigelli</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Daughtry</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Mcmurtrey</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Walthall</surname> <given-names>C.</given-names>
</name>
</person-group> (<year>2005</year>). <article-title>Evaluation of digital photography from model aircraft for remote sensing of crop biomass and nitrogen status</article-title>. <source>Precis. Agric.</source> <volume>6</volume> (<issue>4</issue>), <fpage>359</fpage>&#x2013;<lpage>378</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11119-005-2324-5</pub-id>
</citation>
</ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Huo</surname> <given-names>A. D.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhao</surname> <given-names>Z. X.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>L. Y.</given-names>
</name>
<name>
<surname>Zhong</surname> <given-names>F. Q.</given-names>
</name>
<name>
<surname>Zheng</surname> <given-names>C. L.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>Risk assessment of heavy metal pollution in farmland soils at the northern foot of the qinling mountains, China</article-title>. <source>Int. J. Environ. Res. Public Health</source> <volume>19</volume> (<issue>22</issue>), <elocation-id>14962</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/ijerph192214962</pub-id>
</citation>
</ref>
<ref id="B17">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jay</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Baret</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Dutartre</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Malatesta</surname> <given-names>G.</given-names>
</name>
<name>
<surname>H&#xe9;no</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Comar</surname> <given-names>A.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Exploiting the centimeter resolution of UAV multispectral imagery to improve remote-sensing estimates of canopy structure and biochemistry in sugar beet crops</article-title>. <source>Remote Sens. Environ.</source> <volume>231</volume>, <fpage>110898</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2018.09.011</pub-id>
</citation>
</ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jia</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Liang</surname> <given-names>S. L.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>S. H.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Y. W.</given-names>
</name>
<name>
<surname>Xiao</surname> <given-names>Z. Q.</given-names>
</name>
<name>
<surname>Yao</surname> <given-names>Y. J.</given-names>
</name>
<etal/>
</person-group>. (<year>2015</year>). <article-title>Global land surface fractional vegetation cover estimation using general regression neural networks from MODIS surface reflectance</article-title>. <source>IEEE T Geosci Remote</source> <volume>53</volume> (<issue>9</issue>), <fpage>4787</fpage>&#x2013;<lpage>4796</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TGRS.2015.2409563</pub-id>
</citation>
</ref>
<ref id="B19">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Juliane</surname> <given-names>Be</given-names>
</name>
<name>
<surname>Kang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Helge</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Andreas</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Simon</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Janis</surname> <given-names>B.</given-names>
</name>
<etal/>
</person-group>. (<year>2015</year>). <article-title>Combining UAV-based plant height from crop surface models, visible, and near infrared vegetation indices for biomass monitoring in barley</article-title>. <source>Int. J. Appl. Earth Obs.</source> <volume>39</volume>, <fpage>79</fpage>&#x2013;<lpage>87</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2015.02.012</pub-id>
</citation>
</ref>
<ref id="B20">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kim</surname> <given-names>D. W.</given-names>
</name>
<name>
<surname>Yun</surname> <given-names>H. S.</given-names>
</name>
<name>
<surname>Jeong</surname> <given-names>S. J.</given-names>
</name>
<name>
<surname>Kwon</surname> <given-names>Y. S.</given-names>
</name>
<name>
<surname>Kim</surname> <given-names>S. G.</given-names>
</name>
<name>
<surname>Lee</surname> <given-names>W. S.</given-names>
</name>
<etal/>
</person-group>. (<year>2018</year>). <article-title>Modeling and testing of growth status for Chinese cabbage and white radish with UAV-based RGB imagery</article-title>. <source>Remote Sens.</source> <volume>10</volume> (<issue>4</issue>), <elocation-id>563</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs10040563</pub-id>
</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Leng</surname> <given-names>R. L.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Y. Y.</given-names>
</name>
<name>
<surname>Xie</surname> <given-names>J. Q.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>F. N.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Cui</surname> <given-names>X.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>An analysis of fractional vegetation cover of the gannan grassland in the non-growing season based on multispectral data and small UVAs</article-title>. <source>Pratacultural Sci.</source> <volume>36</volume> (<issue>11</issue>), <fpage>2742</fpage>&#x2013;<lpage>2751</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.11829/j.issn.1001-0629.2019-0013</pub-id>
</citation>
</ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>X. L.</given-names>
</name>
<name>
<surname>Gao</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Jin</surname> <given-names>L. Q.</given-names>
</name>
<name>
<surname>Zhou</surname> <given-names>H. K.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Adaptive strategies to overcome challenges in vegetation restoration to coalmine wasteland in a frigid alpine setting</article-title>. <source>Catena</source> <volume>182</volume>, <elocation-id>104142</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.catena.2019.104142</pub-id>
</citation>
</ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>M. H.</given-names>
</name>
<name>
<surname>Duan</surname> <given-names>C. G.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>S. Y.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>C. E.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>The effect of different restoration approaches on vegetation development in metal mines</article-title>. <source>Sci. Total Environ.</source> <volume>806</volume> (<issue>2</issue>), <elocation-id>150626</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/J.Scitotenv.2021.150626</pub-id>
</citation>
</ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liang</surname> <given-names>H. W.</given-names>
</name>
</person-group> (<year>2002</year>). <article-title>Direct determination of threshold from bimodal histogram</article-title>. <source>Pattern Recognition Artificial Intell.</source> <volume>15</volume> (<issue>2</issue>), <fpage>253</fpage>&#x2013;<lpage>256</lpage>.</citation>
</ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lu</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Liao</surname> <given-names>X. H.</given-names>
</name>
<name>
<surname>Yue</surname> <given-names>H. Y.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>Y. H.</given-names>
</name>
<name>
<surname>Ye</surname> <given-names>H. P.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>C. C.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Optimizing distribution of drone ports for emergency monitoring of flood disasters in China</article-title>. <source>J. Flood Risk Manag.</source> <volume>13</volume> (<issue>1</issue>), <fpage>e12593</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/jfr3.12593</pub-id>
</citation>
</ref>
<ref id="B26">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Marcial-Pablo</surname> <given-names>M. D. J.</given-names>
</name>
<name>
<surname>Gonzalez-Sanchez</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Jimenez-Jimenez</surname> <given-names>S. I.</given-names>
</name>
<name>
<surname>Ontiveros-Capurata</surname> <given-names>R. E.</given-names>
</name>
<name>
<surname>Ojeda-Bustamante</surname> <given-names>W.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Estimation of vegetation fraction using RGB and multispectral images from UAV</article-title>. <source>Int. J. Remote Sens.</source> <volume>40</volume> (<issue>2</issue>), <fpage>420</fpage>&#x2013;<lpage>438</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/01431161.2018.1528017</pub-id>
</citation>
</ref>
<ref id="B27">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mishra</surname> <given-names>V.</given-names>
</name>
<name>
<surname>Avtar</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Prathiba</surname> <given-names>A. P.</given-names>
</name>
<name>
<surname>Mishra</surname> <given-names>P. K.</given-names>
</name>
<name>
<surname>Tiwari</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Sharma</surname> <given-names>S. K.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Uncrewed aerial systems in water resource management and monitoring: a review of sensors, applications, software, and issues</article-title>. <source>Adv. Civ. Eng.</source> <volume>28</volume>, <elocation-id>3544724</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1155/2023/3544724</pub-id>
</citation>
</ref>
<ref id="B28">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Otsu</surname> <given-names>N.</given-names>
</name>
</person-group> (<year>2007</year>). <article-title>A threshold selection method from gray-level histograms</article-title>. <source>IEEE T Syst. Man Cy-S</source> <volume>9</volume> (<issue>1</issue>), <fpage>62</fpage>&#x2013;<lpage>66</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TSMC.1979.4310076</pub-id>
</citation>
</ref>
<ref id="B29">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Park</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Park</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Song</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Lee</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Analyzing impact of types of UAV-derived images on the object-based classification of land cover in an urban area</article-title>. <source>Drones</source> <volume>6</volume>, <elocation-id>71</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/drones6030071</pub-id>
</citation>
</ref>
<ref id="B30">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rasmussen</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Ntakos</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Nielsen</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Svensgaard</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Poulsen</surname> <given-names>R. N.</given-names>
</name>
<name>
<surname>Christensen</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Are vegetation indices derived from consumer-grade cameras mounted on UAVs sufficiently reliable for assessing experimental plots</article-title>? <source>Eur. J. Agron.</source> <volume>74</volume>, <fpage>75</fpage>&#x2013;<lpage>92</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.eja.2015.11.026</pub-id>
</citation>
</ref>
<ref id="B31">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ren</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Cheng</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Ding</surname> <given-names>A.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Vegetation properties in human-impacted riparian zones based on unmanned aerial vehicle (UAV) imagery: an analysis of river reaches in the yongding river basin</article-title>. <source>Forests</source> <volume>12</volume>, <elocation-id>22</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/f12010022</pub-id>
</citation>
</ref>
<ref id="B32">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Romina</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Mar&#xed;a</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Santiago</surname> <given-names>A. T.</given-names>
</name>
<name>
<surname>Elizabeth</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Ana</surname> <given-names>S. B.</given-names>
</name>
<name>
<surname>Constanza</surname> <given-names>R.</given-names>
</name>
<etal/>
</person-group>. (<year>2010</year>). <article-title>Cryptochrome as a sensor of the blue/green ratio of natural radiation in arabidopsis</article-title>. <source>Plant Physiol.</source> <volume>154</volume> (<issue>1</issue>), <fpage>401</fpage>&#x2013;<lpage>409</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1104/pp.110.160820</pub-id>
</citation>
</ref>
<ref id="B33">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Schofield</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Esteban</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Katselidis</surname> <given-names>K. A.</given-names>
</name>
<name>
<surname>Graeme</surname> <given-names>C. H.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Drones for research on sea turtles and other marine vertebrates &#x2013; a review</article-title>. <source>Biol. Conserv.</source> <volume>238</volume>, <elocation-id>108214</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biocon.2019.108214</pub-id>
</citation>
</ref>
<ref id="B34">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shane</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Kevin</surname> <given-names>L.</given-names>
</name>
<name>
<surname>John</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Kenny</surname> <given-names>S.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Use of an unmanned aerial vehicle (UAV) to document vegetation coverage rate in managed grasslands following a historic river flood</article-title>. <source>J. Anim. Sci.</source> <volume>99</volume> (<issue>2</issue>), <fpage>9</fpage>&#x2013;<lpage>10</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/jas/skab096.015</pub-id>
</citation>
</ref>
<ref id="B35">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shukla</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Jain</surname> <given-names>K.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Automatic extraction of urban land information from unmanned aerial vehicle (UAV) data</article-title>. <source>Earth Sci. Inform</source> <volume>13</volume>, <fpage>1225</fpage>&#x2013;<lpage>1236</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s12145-020-00498-x</pub-id>
</citation>
</ref>
<ref id="B36">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Song</surname> <given-names>W. J.</given-names>
</name>
<name>
<surname>Mu</surname> <given-names>X. H.</given-names>
</name>
<name>
<surname>Ruan</surname> <given-names>G. Y.</given-names>
</name>
<name>
<surname>Gao</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>L. Y.</given-names>
</name>
<name>
<surname>Yan</surname> <given-names>G. Y.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Estimating fractional vegetation cover and the vegetation index of bare soil and highly dense vegetation with a physically based method</article-title>. <source>Int. J. Appl. Earth Obs.</source> <volume>58</volume>, <fpage>168</fpage>&#x2013;<lpage>176</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2017.01.015</pub-id>
</citation>
</ref>
<ref id="B37">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname> <given-names>Z. Y.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X. N.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Z. H.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Xie</surname> <given-names>Y. C.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>Y. H.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>UAVs as remote sensing platforms in plant ecology: review of applications and challenges</article-title>. <source>J. Plant Ecol.</source> <volume>14</volume> (<issue>6</issue>), <fpage>1003</fpage>&#x2013;<lpage>1023</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/jpe/rtab089</pub-id>
</citation>
</ref>
<ref id="B38">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname> <given-names>G. X.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X. C.</given-names>
</name>
<name>
<surname>Yan</surname> <given-names>T. T.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Shi</surname> <given-names>Y. Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2014</year>). <article-title>Inversion method of flora growth parameters based on machine vision</article-title>. <source>Trans. Chin. Soc. Agric. Eng.</source> <volume>30</volume> (<issue>20</issue>), <fpage>187</fpage>&#x2013;<lpage>195</lpage>.</citation>
</ref>
<ref id="B39">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Verrelst</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Schaepman</surname> <given-names>M. E.</given-names>
</name>
<name>
<surname>Koetz</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Kneub&#xfc;hlerb</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2008</year>). <article-title>Angular sensitivity analysis of vegetation indices derived from CHRIS/PROBA data</article-title>. <source>Remote Sens. Environ.</source> <volume>112</volume> (<issue>5</issue>), <fpage>2341</fpage>&#x2013;<lpage>2353</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2007.11.001</pub-id>
</citation>
</ref>
<ref id="B40">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Voorde</surname> <given-names>T. V.</given-names>
</name>
<name>
<surname>Vlaeminck</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Canters</surname> <given-names>F.</given-names>
</name>
</person-group> (<year>2008</year>). <article-title>Comparing different approaches for mapping urban vegetation cover from landsat ETM+ data: a case study on Brussels</article-title>. <source>Sensors</source> <volume>8</volume>, <fpage>3880</fpage>&#x2013;<lpage>3902</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s8063880</pub-id>
</citation>
</ref>
<ref id="B41">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname> <given-names>X. Q.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>M. M.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>S. Q.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>D. P.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Extraction of vegetation information from visible unmanned aerial vehicle images</article-title>. <source>Trans. Chin. Soc. Agric. Eng.</source> <volume>31</volume> (<issue>05</issue>), <fpage>152</fpage>&#x2013;<lpage>159</lpage>.</citation>
</ref>
<ref id="B42">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Watanabe</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Guo</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Arai</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Takanashi</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Kajiya-Kanegae</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Kobayashi</surname> <given-names>M.</given-names>
</name>
<etal/>
</person-group>. (<year>2017</year>). <article-title>High-throughput phenotyping of sorghum plant height using an unmanned aerial vehicle and its application to genomic prediction modeling</article-title>. <source>Front. Plant Sci.</source> <volume>8</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2017.00421</pub-id>
</citation>
</ref>
<ref id="B43">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Woebbecke</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Meyer</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Bargen</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Mortensen</surname> <given-names>D.</given-names>
</name>
</person-group> (<year>1995</year>). <article-title>Color indices for weed identification under various soil, residue, and lighting conditions</article-title>. <source>T ASABE</source> <volume>38</volume>, <fpage>259</fpage>&#x2013;<lpage>269</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.13031/2013.27838</pub-id>
</citation>
</ref>
<ref id="B44">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Xu</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Han</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Gong</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Q.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Accuracy of vegetation indices in assessing different grades of grassland desertification from UAV</article-title>. <source>Int. J. Env. Res. Pub. He.</source> <volume>19</volume> (<issue>24</issue>), <elocation-id>16793</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/ijerph192416793</pub-id>
</citation>
</ref>
<ref id="B45">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Xu</surname> <given-names>K. X.</given-names>
</name>
<name>
<surname>Su</surname> <given-names>Y. J.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>T. Y.</given-names>
</name>
<name>
<surname>Jin</surname> <given-names>S. C.</given-names>
</name>
<name>
<surname>Ma</surname> <given-names>Q.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Estimation of degraded grassland aboveground biomass using machine learning methods from terrestrial laser scanning data</article-title>. <source>Ecol. Indic</source>. <volume>108</volume> (<issue>C</issue>), <elocation-id>105747</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ecolind.2019.105747</pub-id>
</citation>
</ref>
<ref id="B46">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yang</surname> <given-names>G. Y.</given-names>
</name>
<name>
<surname>Zhong</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>C. E.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>S. Y.</given-names>
</name>
<name>
<surname>Hou</surname> <given-names>L.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Arsenic distribution, accumulation and tolerance mechanisms of typha angustifolia in different phenological growth stages</article-title>. <source>B Environ. Contam. Tox.</source> <volume>104</volume> (<issue>3</issue>), <fpage>358</fpage>&#x2013;<lpage>365</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00128-020-02796-y</pub-id>
</citation>
</ref>
<ref id="B47">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>X. L.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Qi</surname> <given-names>Y. X.</given-names>
</name>
<name>
<surname>Deng</surname> <given-names>L. F.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>X. L.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>S. T.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>New research methods for vegetation information extraction based on visible light remote sensing images from an unmanned aerial vehicle (UAV)</article-title>. <source>Int. J. Appl. Earth Obs.</source> <volume>78</volume>, <fpage>215</fpage>&#x2013;<lpage>226</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2019.01.001</pub-id>
</citation>
</ref>
<ref id="B48">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhao</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Z. W.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>G. Z.</given-names>
</name>
<name>
<surname>Xu</surname> <given-names>Y. M.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>L. J.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Exploration of influence factors on regional fractional vegetation cover based on a combination of factor regression and interaction&#x2013;take the three-river headwaters region as an example</article-title>. <source>China Environ. Sci.</source> <volume>42</volume> (<issue>8</issue>), <fpage>3903</fpage>&#x2013;<lpage>3912</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.19674/j.cnki.issn1000-6923.20220329d011</pub-id>
</citation>
</ref>
<ref id="B49">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhao</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>H. B.</given-names>
</name>
<name>
<surname>Lan</surname> <given-names>Y. B.</given-names>
</name>
<name>
<surname>Lu</surname> <given-names>L. Q.</given-names>
</name>
<name>
<surname>Jia</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Z. M.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Extraction method of summer corn vegetation coverage based on visible light image of unmanned aerial vehicle</article-title>. <source>Trans. Chin. Soc. Agric. Machinery</source> <volume>050</volume> (<issue>005</issue>), <fpage>232</fpage>&#x2013;<lpage>240</lpage>.</citation>
</ref>
<ref id="B50">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhou</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>Z. Q.</given-names>
</name>
<name>
<surname>Han</surname> <given-names>J. Z.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Green vegetation extraction based on visible light image of UAV</article-title>. <source>China Environ. Sci.</source> <volume>41</volume> (<issue>05</issue>), <fpage>2380</fpage>&#x2013;<lpage>2390</lpage>.</citation>
</ref>
</ref-list>
</back>
</article>