<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="1.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2025.1730583</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Original Research</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>Integrating UAV visible and multispectral imagery to assess grazing-induced vegetation responses in sandy grasslands</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Guan</surname><given-names>Qiang</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3238683/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Jiang</surname><given-names>Mingyang</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/3136409/overview"/>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Du</surname><given-names>Wen</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>*</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project-administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &amp; editing</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Chen</surname><given-names>Xueyan</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
</contrib>
<contrib contrib-type="author">
<name><surname>Yan</surname><given-names>Baolong</given-names></name>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
</contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>College of Computer Science and Technology, Inner Mongolia Minzu University</institution>, <city>Tongliao</city>,&#xa0;<country country="cn">China</country></aff>
<aff id="aff2"><label>2</label><institution>College of Information and Electrical Engineering, Shenyang Agricultural University</institution>, <city>Shenyang</city>,&#xa0;<country country="cn">China</country></aff>
<aff id="aff3"><label>3</label><institution>College of Engineering, Inner Mongolia Minzu University</institution>, <city>Tongliao</city>,&#xa0;<country country="cn">China</country></aff>
<aff id="aff4"><label>4</label><institution>Innovation Center for Intelligent Forage Equipment, Inner Mongolia Minzu University</institution>, <city>Tongliao</city>,&#xa0;<country country="cn">China</country></aff>
<aff id="aff5"><label>5</label><institution>College of Grassland Science, Inner Mongolia Minzu University</institution>, <city>Tongliao</city>,&#xa0;<country country="cn">China</country></aff>
<author-notes>
<corresp id="c001"><label>*</label>Correspondence: Wen Du, <email xlink:href="mailto:duwen@syau.edu.cn">duwen@syau.edu.cn</email></corresp>
</author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2025-12-11">
<day>11</day>
<month>12</month>
<year>2025</year>
</pub-date>
<pub-date publication-format="electronic" date-type="corrected" iso-8601-date="2026-01-16">
<day>16</day>
<month>01</month>
<year>2026</year></pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2025</year>
</pub-date>
<volume>16</volume>
<elocation-id>1730583</elocation-id>
<history>
<date date-type="received">
<day>23</day>
<month>10</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>25</day>
<month>11</month>
<year>2025</year>
</date>
<date date-type="rev-recd">
<day>21</day>
<month>11</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2025 Guan, Jiang, Du, Chen and Yan.</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Guan, Jiang, Du, Chen and Yan</copyright-holder>
<license>
<ali:license_ref start_date="2025-12-11">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<sec>
<title>Introduction</title>
<p>Monitoring grazing intensity is crucial for maintaining ecological balance and promoting the sustainable management of sandy grasslands. Traditional ground surveys and single-source remote sensing often lack the spatial resolution, spectral richness, and robustness required to accurately characterize heterogeneous grazing impacts. Unmanned aerial vehicle (UAV)-based multi-source remote sensing provides fine-scale, repeatable observations that can overcome the limitations of traditional field surveys.</p>
</sec>
<sec>
<title>Methods</title>
<p>Grazing experiments were conducted in the sandy grasslands of Inner Mongolia, China, using UAVs to capture visible and multispectral imagery across plots subjected to different grazing intensities. Spectral responses were analyzed using mean&#x2013;variance statistics and Tukey&#x2019;s multiple comparison tests. A series of novel spectral indices were constructed based on separability analysis and integrated with traditional vegetation indices to address the limited sensitivity of conventional indices and multi-index feature redundancy. An automatic incremental feature selection (AIFS) algorithm was developed to adaptively optimize the feature subset and enhance model robustness, with a support vector machine classifier, k-nearest neighbor, and random forest used for grazing intensity recognition.</p>
</sec>
<sec>
<title>Results</title>
<p>Distinct spectral responses to grazing disturbance were observed: visible bands increased with grazing intensity due to enhanced soil background effects, while red-edge and near-infrared bands effectively captured reductions in chlorophyll content and canopy structure under moderate to severe grazing. Traditional vegetation indices were sensitive to extreme grazing, whereas the proposed indices showed superior performance in distinguishing moderate grazing levels. The AIFS-optimized feature subset reduced redundancy and improved model accuracy, achieving the highest recognition performance (OA=92.13%, Kappa=88.99%)&#x2014;outperforming models using all features or single-source data.</p>
</sec>
<sec>
<title>Discussion</title>
<p>Integrating UAV visible and multispectral imagery with intelligent feature selection enhances the detection of grazing-induced vegetation responses. This approach provides a robust framework for high-precision grassland monitoring and sustainable ecological management in arid and semi-arid regions.</p>
</sec>
</abstract>
<kwd-group>
<kwd>sandy grasslands</kwd>
<kwd>multi-source remote sensing</kwd>
<kwd>unmanned aerial vehicle</kwd>
<kwd>grazing intensity</kwd>
<kwd>spectral indices</kwd>
<kwd>feature selection</kwd>
</kwd-group>
<funding-group>
<funding-statement>The author(s) declared that financial support was received for this work and/or its publication. This study was supported by the Science and Technology Plan Project of Inner Mongolia Autonomous Region (2025YFDZ0025), Inner Mongolia Natural Science Foundation Project (2025MS06053), Open Fund Project of the Innovation Centre for Intelligent Equipment for Pasture in Inner Mongolia Autonomous Region (MDK2025049), Basic Research Operating Expenses Project for Universities Directly Affiliated with the Inner Mongolia Autonomous Region (GXKY25Z016) and Inner Mongolia University for Nationalities Doctoral Research Start-up Fund (BSZ006).</funding-statement>
</funding-group>
<counts>
<fig-count count="11"/>
<table-count count="3"/>
<equation-count count="12"/>
<ref-count count="67"/>
<page-count count="17"/>
<word-count count="8264"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Sustainable and Intelligent Phytoprotection</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>Grasslands are globally significant ecosystems, providing essential resources for livestock production and playing a vital role in maintaining ecological balance, sequestering carbon, and conserving water (<xref ref-type="bibr" rid="B64">Zhao et&#xa0;al., 2020</xref>). China has extensive grasslands, with the sandy grasslands of Inner Mongolia serving as a typical example, which function in both ecological protection and grazing production. However, long-term overgrazing and other human disturbances have caused severe degradation of sandy grasslands, leading to vegetation loss, wind erosion, and biodiversity decline (<xref ref-type="bibr" rid="B23">Lin et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B18">Jiang et&#xa0;al., 2024</xref>). Grazing intensity is a key indicator of grassland exploitation and degradation, and its accurate monitoring is vital for the ecological protection and sustainable use of sandy grasslands. Therefore, there is an urgent need to develop efficient and precise monitoring methods to support scientific management and policy formulation (<xref ref-type="bibr" rid="B37">Reinermann et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B3">Ali and Kaul, 2025</xref>).</p>
<p>Currently, monitoring grazing intensity on grasslands mainly relies on traditional field surveys and sampling plots. These methods remain direct and reliable, but are time-consuming, labor-intensive, and limited in spatial coverage, making dynamic monitoring challenging (<xref ref-type="bibr" rid="B47">Wang et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B56">Yu et&#xa0;al., 2025</xref>). Satellite remote sensing has been widely used for large-scale grassland monitoring; however, its spatial and temporal resolution often cannot meet the requirements for small-scale, detailed detection of grazing intensity in sandy grasslands (<xref ref-type="bibr" rid="B2">Ali et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B48">Wang et&#xa0;al., 2023</xref>). In contrast, unmanned aerial vehicle (UAV) remote sensing offers advantages such as flexibility, high resolution, and timely data acquisition, making it particularly suitable for detailed studies of sandy grassland surface characteristics and grazing disturbances (<xref ref-type="bibr" rid="B28">Lyu et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B6">Bazzo et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B29">Lyu et&#xa0;al., 2024</xref>). UAV-based studies typically acquire visible, multispectral, or hyperspectral imagery. Previous studies have demonstrated that hyperspectral data can capture subtle spectral variations, leading to improved accuracy in vegetation classification, degradation detection, and biomass estimation. However, its high equipment costs and complex data processing limit its large-scale application (<xref ref-type="bibr" rid="B11">Fern&#xe1;ndez-Habas et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B49">Wengert et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B24">Liu et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B66">Zhu et&#xa0;al., 2023</xref>; <xref ref-type="bibr" rid="B59">Zhang et&#xa0;al., 2024a</xref>). Therefore, cost-effective visible and multispectral data have become increasingly crucial for monitoring sandy grasslands (<xref ref-type="bibr" rid="B39">Rossi et&#xa0;al., 2019</xref>).</p>
<p>In recent years, visible-light imagery has been utilized to estimate grassland coverage and degradation levels due to its ease of acquisition and simplicity of processing (<xref ref-type="bibr" rid="B35">Possoch et&#xa0;al., 2016</xref>; <xref ref-type="bibr" rid="B27">Lussem et&#xa0;al., 2017</xref>, <xref ref-type="bibr" rid="B26">2018</xref>; <xref ref-type="bibr" rid="B63">Zhang et&#xa0;al., 2022</xref>). Multispectral imagery, due to its rich spectral information, has been widely applied for estimating grass biomass and monitoring grazing disturbances (<xref ref-type="bibr" rid="B31">Orlando et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B40">Schucknecht et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B12">Gao et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B67">Zwick et&#xa0;al., 2024</xref>). However, monitoring based on a single data source often suffers from limited robustness across different grazing conditions, low generalizability, and inconsistent performance in distinguishing moderate grazing levels&#x2014;one of the most critical yet difficult categories to detect. To address these limitations, multi-source data fusion has gradually emerged as a popular research focus (<xref ref-type="bibr" rid="B5">Barber et&#xa0;al., 2021</xref>; <xref ref-type="bibr" rid="B25">Liu et&#xa0;al., 2025</xref>). For example, some researchers extract key indicators, such as vegetation indices, and perform multi-indicator analyses to identify Zoysia japonica, thereby evaluating its cold tolerance (<xref ref-type="bibr" rid="B19">Ku et&#xa0;al., 2023</xref>). Other studies have developed multiple linear regression (MLR) and generalized additive models (GAM) to estimate grassland aboveground biomass (AGB) using texture features from UAV RGB imagery and vegetation indices from multispectral imagery (<xref ref-type="bibr" rid="B34">Pan et&#xa0;al., 2024</xref>). Researchers collected UAV RGB and multispectral imagery, combined vegetation structure variables with spectral features, and applied machine learning algorithms for image segmentation and species classification (<xref ref-type="bibr" rid="B36">Pranga et&#xa0;al., 2024</xref>). However, despite progress in multi-source fusion, existing studies still exhibit three major gaps: (1) fusion is often simple (e.g., direct concatenation of indices or original bands) and lacks targeted design based on spectral separability; (2) redundant or irrelevant features in multi-source datasets reduce model stability and generalizability; (3) limited attention has been given to constructing spectral indices specifically sensitive to grazing-induced vegetation responses, especially at moderate grazing intensities.</p>
<p>To address these challenges, this study focuses on detecting grazing intensity in a typical sandy grassland. By integrating visible and multispectral UAV data, the study examines the correlations and separability of spectral features. We construct targeted spectral indices (SIs) based on the physical mechanisms of vegetation response to grazing and fuse them with traditional vegetation indices. Concurrently, an automatic incremental feature selection (AIFS) method is applied to select multi-source features, reducing redundancy and enhancing adaptability to heterogeneous grazing conditions. Ultimately, by integrating and optimizing multiple-source SIs, the study achieves efficient and precise detection of grazing intensity in sandy grasslands, providing a novel approach for ecological monitoring and grassland management.</p>
</sec>
<sec id="s2" sec-type="materials|methods">
<label>2</label>
<title>Materials and methods</title>
<sec id="s2_1">
<label>2.1</label>
<title>Overview of the experiment site</title>
<p>The study was conducted at Manghatu Village, Gerchaolu Sumu, Zhalute Banner, Tongliao City, Inner Mongolia, China (44&#xb0;62&#x2032;24&#x2033;N, 120&#xb0;45&#x2032;14&#x2033;E) at an altitude of 482 m. The region features a temperate semi-arid continental climate with distinct seasons: dry, windy springs; hot summers with scarce rainfall; cool autumns with significant diurnal temperature variations; and long, cold winters. The climatic parameters described in this section were obtained from the China Meteorological Data Service Center and represent long-term averages based on a 30-year period (1995&#x2013;2024). The long-term mean temperature is 5.5 &#xb0;C, and the annual accumulated temperature ranges from 2400 to 2600 &#xb0;C. Annual sunshine averages approximately 3000 hours, with a frost-free period of 115&#x2013;130 days. Annual rainfall ranges from 300 to 400 mm, with the majority occurring between June and August, accounting for ~70% of the total precipitation. Annual evaporation exceeds 1800 mm, with an average relative humidity of 49%. The grassland is classified as a temperate mountain grassland, characterized by sandy soil with a pH of 7.6. The dominant species is <italic>Agropyron cristatum</italic>, with key associated species including <italic>Lespedeza davurica</italic>, <italic>Cleistogenes squarrosa</italic>, and <italic>Carex duriuscula</italic>.</p>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>Experimental design</title>
<p>This study investigated the effects of varying grazing intensities on grassland ecosystems, commencing in June 2025. Grazing occurred from 15 June to 15 September, during which the sheep remained in their assigned subplots without interruption. The experimental site covered a total area of 3 ha, divided into four subplots of 0.75 ha each. A randomized block design was used to establish four grazing stocking rate treatments. Each subplot was grazed by 0, 3, 6, or 9 six-month-old rams, corresponding to 0, 4, 8, or 12 sheep units per ha, respectively. These treatments corresponded to no grazing (NG), light grazing (LG), moderate grazing (MG), and severe grazing (SG) (<xref ref-type="bibr" rid="B60">Zhang et&#xa0;al., 2024b</xref>). The overview of the experimental site and the distribution of the grazing intensity treatments are shown in <xref ref-type="fig" rid="f1"><bold>Figure&#xa0;1</bold></xref>.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Location of the experimental sites and distribution of areas under different grazing intensities. <bold>(a)</bold> China, <bold>(b)</bold> Inner Mongolia, and <bold>(c)</bold> the study site. (e.g., NG, No Grazing; LG, Light Grazing; MG, Moderate Grazing; SG, Severe Grazing).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1730583-g001.tif">
<alt-text content-type="machine-generated">Map illustrating three sections: (a) a map of China highlighting a region, (b) a close-up of the highlighted region, and (c) an aerial view of the land divided into sections labeled LG, SG, NG, and MG. Coordinates and a compass rose are included for orientation.</alt-text>
</graphic></fig>
</sec>
<sec id="s2_3">
<label>2.3</label>
<title>Data acquisition</title>
<sec id="s2_3_1">
<label>2.3.1</label>
<title>Ground investigation</title>
<p>Grassland technical personnel conducted ground surveys and established sample plots within experimental areas subjected to varying levels of grazing intensity. Sample plots were manually established for four grazing intensity levels&#x2014;no grazing, light grazing, moderate grazing, and severe grazing&#x2014;to identify herbaceous species influenced by grazing. Precise identification and extraction of regions of interest are allowed in the UAV imagery for subsequent analysis.</p>
</sec>
<sec id="s2_3_2">
<label>2.3.2</label>
<title>Image collection and region of interest extraction</title>
<p>This study used a Mavic 3M UAV (SZ DJI Technology Co., Ltd., Shenzhen, China) to acquire imagery (<xref ref-type="bibr" rid="B54">Yu et&#xa0;al., 2023</xref>). The UAV is equipped with a visible light camera and a four-channel multispectral camera. The visible light camera features a 4/3-inch CMOS sensor with 20 million effective pixels. The multispectral camera captures four bands&#x2014;green (560 &#xb1; 16 nm), red (650 &#xb1; 16 nm), red edge (730 &#xb1; 16 nm), and near-infrared (860 &#xb1; 26 nm)&#x2014;with a resolution of 2592 &#xd7; 1944 pixels per band. The UAV integrates a Global Navigation Satellite System (GNSS) with a Real-Time Kinematic (RTK) module, enabling centimeter-level positioning accuracy.</p>
<p>Imagery was acquired on 19 August 2025, between 11:00 and 12:00, at a flight altitude of 30 m. Both forward and side overlaps were set at 80% to ensure high-quality stitching and subsequent processing. A total of 1,536 visible and 3,155 multispectral images were acquired during the flight. Images were stitched using DJTerra software V3.0 (SZ DJI Technology Co., Ltd., Shenzhen, China), producing visible light orthoimages at ~0.87 cm/pixel and multispectral images at ~1.54 cm/pixel. RGB imagery corresponding to different grazing intensity areas is shown in <xref ref-type="fig" rid="f2"><bold>Figure&#xa0;2</bold></xref>. After UAV image stitching, the red, green, and blue channels from the visible imagery, along with the green, red, red-edge, and near-infrared reflectance from the multispectral imagery, were extracted for the marked ground areas using ENVI 5.6 software (Harris Geospatial, Boulder, CO, USA)(Fenghua <xref ref-type="bibr" rid="B57">Yu et&#xa0;al., 2024</xref>). To ensure representativeness, region of interest (ROI) samples were selected using a stratified random sampling strategy within each grazing treatment subplot. A total of 720 samples were extracted, with 180 samples assigned to each of the four grazing intensity levels (NG, LG, MG, SG), ensuring balanced class distribution and consistent spatial coverage across treatments.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Visible-light UAV images illustrating different grazing intensities. <bold>(a)</bold> No grazing, <bold>(b)</bold> light grazing, <bold>(c)</bold> moderate grazing, and <bold>(d)</bold> severe grazing.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1730583-g002.tif">
<alt-text content-type="machine-generated">Four panels labeled (a) to (d) show grass at different stages of dryness and vegetation density. Panel (a) has lush green grass, (b) shows slightly less green, (c) has more brown patches, and (d) is mostly dry with sparse green areas.</alt-text>
</graphic></fig>
</sec>
</sec>
<sec id="s2_4">
<label>2.4</label>
<title>Grazing intensity monitoring method based on UAV multi-source remote sensing</title>
<p><xref ref-type="fig" rid="f3"><bold>Figure&#xa0;3</bold></xref> illustrates the framework for monitoring grazing intensity using multi-source UAV imagery. The process mainly involves four steps: (1) data acquisition and preprocessing, (2) construction and fusion of multi-source SIs, (3) AIFS, and (4) classification modeling and accuracy evaluation. Data pre-processing includes normalization, standardization, and outlier removal. Multi-source SIs are derived from multiple features, integrating conventional and self-constructed indices to capture complementary information across data sources. An AIFS method, which combines feature importance assessment with redundancy removal, is employed to select the optimal combination of indices gradually. Finally, the performance of the optimized fused features is evaluated using various classification algorithms to achieve high accuracy and robust monitoring.</p>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>Schematic framework for monitoring grazing intensity using multi-source UAV imagery.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1730583-g003.tif">
<alt-text content-type="machine-generated">Flowchart illustrating a process from data acquisition and preprocessing to selection of spectral indices. Includes steps like image calibration, spectral index construction, evaluation metrics, classification methods, and final feature selection through techniques such as feature importance ranking and cross-validation accuracy testing.</alt-text>
</graphic></fig>
<sec id="s2_4_1">
<label>2.4.1</label>
<title>Data preprocessing</title>
<p>To ensure balanced contributions from visible and multispectral data in subsequent multi-source feature fusion, the raw data were first preprocessed. The pixel values of the red (<inline-formula>
<mml:math display="inline" id="im1"><mml:mrow><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:math></inline-formula>), green (<inline-formula>
<mml:math display="inline" id="im2"><mml:mrow><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:math></inline-formula>), and blue (<inline-formula>
<mml:math display="inline" id="im3"><mml:mrow><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:math></inline-formula>) bands in the visible imagery were normalized as follows (<xref ref-type="bibr" rid="B9">Cao et&#xa0;al., 2021</xref>):</p>
<disp-formula>
<mml:math display="block" id="M1"><mml:mrow><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msub><mml:mi>B</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow><mml:mrow><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:mfrac></mml:mrow></mml:math>
</disp-formula>
<disp-formula>
<mml:math display="block" id="M2"><mml:mrow><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msub><mml:mi>B</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow><mml:mrow><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:mfrac></mml:mrow></mml:math>
</disp-formula>
<disp-formula>
<mml:math display="block" id="M3"><mml:mrow><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msub><mml:mi>B</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow><mml:mrow><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:mfrac></mml:mrow></mml:math>
</disp-formula>
<p>The reflectance values of the green (<inline-formula>
<mml:math display="inline" id="im4"><mml:mrow><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula>), red (<inline-formula>
<mml:math display="inline" id="im5"><mml:mrow><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula>), red edge (<inline-formula>
<mml:math display="inline" id="im6"><mml:mrow><mml:mi>R</mml:mi><mml:mi>E</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula>), and near-infrared (<inline-formula>
<mml:math display="inline" id="im7"><mml:mrow><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula>) bands in multispectral imagery did not require further normalization. This preprocessing lays the foundation for subsequent construction and fusion of multi-source SIs, preventing feature bias caused by differences in numerical scales.</p>
</sec>
<sec id="s2_4_2">
<label>2.4.2</label>
<title>Multi-source SIs construction</title>
<p>SIs were calculated from combinations of different spectral bands to enhance target features, improve stability, and mitigate interference from factors such as illumination (<xref ref-type="bibr" rid="B14">Guan et&#xa0;al., 2022</xref>). Common approaches for constructing SIs include ratio-based, difference-based, and interaction-based combinations. In this study, 14 self-constructed indices were developed by integrating visible and multispectral band information, optimizing inter-band complementarity, and enhancing feature recognition. Both conventional and self-constructed SIs are summarized in <xref ref-type="table" rid="T1"><bold>Table&#xa0;1</bold></xref> (<xref ref-type="bibr" rid="B46">Varela et&#xa0;al., 2021</xref>) (<xref ref-type="bibr" rid="B62">Zhang et&#xa0;al., 2021b</xref>) (<xref ref-type="bibr" rid="B45">Tilly et&#xa0;al., 2015</xref>) (<xref ref-type="bibr" rid="B8">Bendig et&#xa0;al., 2015</xref>) (<xref ref-type="bibr" rid="B22">Liedtke et&#xa0;al., 2020</xref>) (<xref ref-type="bibr" rid="B65">Zhou et&#xa0;al., 2019</xref>) (<xref ref-type="bibr" rid="B58">Yue et&#xa0;al., 2019</xref>) (<xref ref-type="bibr" rid="B33">Pamungkas, 2023</xref>) (<xref ref-type="bibr" rid="B65">Zhou et&#xa0;al., 2019</xref>) (<xref ref-type="bibr" rid="B33">Pamungkas, 2023</xref>).</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Spectral indices used in this study.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Type</th>
<th valign="middle" align="center">SI</th>
<th valign="middle" align="center">Formula</th>
<th valign="middle" align="center">Reference</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" rowspan="4" align="center">Visible light SIs</td>
<td valign="middle" align="center">ExG</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im8"><mml:mrow><mml:mn>2</mml:mn><mml:mo>&#xb7;</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B46">Varela et&#xa0;al., 2021</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">VARI</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im9"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B62">Zhang et&#xa0;al., 2021b</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">ExGR</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im10"><mml:mrow><mml:mi>E</mml:mi><mml:mi>x</mml:mi><mml:mi>G</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mn>1.4</mml:mn><mml:mo>&#xb7;</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>&#xb7;</mml:mo><mml:mi>E</mml:mi><mml:mi>x</mml:mi><mml:mi>G</mml:mi><mml:mo>&#xa0;</mml:mo><mml:mo>&#x2212;</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mn>1.4</mml:mn><mml:mo>&#xb7;</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>&#xa0;</mml:mo><mml:mo>&#x2212;</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B45">Tilly et&#xa0;al., 2015</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">RGBVI</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im11"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msup><mml:mi>B</mml:mi><mml:mn>2</mml:mn></mml:msup><mml:mo>&#x2212;</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>&#xb7;</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msup><mml:mi>B</mml:mi><mml:mn>2</mml:mn></mml:msup><mml:mo>+</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>&#xb7;</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>&#xa0;</mml:mo><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B8">Bendig et&#xa0;al., 2015</xref>)</td>
</tr>
<tr>
<td valign="middle" rowspan="6" align="center">Multispectral SIs</td>
<td valign="middle" align="center">NDVI</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im12"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>+</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B22">Liedtke et&#xa0;al., 2020</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">GNDVI</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im13"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>+</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B65">Zhou et&#xa0;al., 2019</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">SAVI</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im14"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mn>1</mml:mn><mml:mo>+</mml:mo><mml:mi>L</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>+</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>+</mml:mo><mml:mi>L</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B58">Yue et&#xa0;al., 2019</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">MSAVI</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im15"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mn>2</mml:mn><mml:mo>&#xb7;</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:msqrt><mml:mrow><mml:msup><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mn>2</mml:mn><mml:mo>&#xb7;</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mn>2</mml:mn></mml:msup><mml:mo>&#x2212;</mml:mo><mml:mn>8</mml:mn><mml:mo>&#xd7;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:msqrt><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mn>2</mml:mn></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B33">Pamungkas, 2023</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">NDRE</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im16"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>R</mml:mi><mml:mi>E</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>+</mml:mo><mml:mi>R</mml:mi><mml:mi>E</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B65">Zhou et&#xa0;al., 2019</xref>)</td>
</tr>
<tr>
<td valign="middle" align="center">EVI2</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im17"><mml:mrow><mml:mn>2.5</mml:mn><mml:mo>&#xb7;</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>+</mml:mo><mml:mn>2.4</mml:mn><mml:mo>&#xb7;</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>+</mml:mo><mml:mn>1</mml:mn><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">(<xref ref-type="bibr" rid="B33">Pamungkas, 2023</xref>)</td>
</tr>
<tr>
<td valign="middle" rowspan="14" align="center">Self-constructed SIs</td>
<td valign="middle" align="center">idx1</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im18"><mml:mrow><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx2</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im19"><mml:mrow><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx3</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im20"><mml:mrow><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">/</mml:mo><mml:mi>R</mml:mi><mml:mi>E</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx4</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im21"><mml:mrow><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">/</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx5</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im22"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>+</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx6</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im23"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>E</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>R</mml:mi><mml:mi>E</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>+</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx7</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im24"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msub><mml:mi>B</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mi>R</mml:mi><mml:mi>E</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msub><mml:mi>B</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo>+</mml:mo><mml:mi>R</mml:mi><mml:mi>E</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx8</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im25"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msub><mml:mi>B</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo>&#xb7;</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>&#xb7;</mml:mo><mml:mi>R</mml:mi><mml:mi>E</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx9</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im26"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msub><mml:mi>B</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo>&#xb7;</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msub><mml:mi>B</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo>&#xb7;</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msub><mml:mi>B</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx10</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im27"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>&#xb7;</mml:mo><mml:mi>R</mml:mi><mml:mi>E</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>&#xb7;</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx11</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im28"><mml:mrow><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx12</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im29"><mml:mrow><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx13</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im30"><mml:mrow><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo>+</mml:mo><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
<tr>
<td valign="middle" align="center">idx14</td>
<td valign="middle" align="center"><inline-formula>
<mml:math display="inline" id="im31"><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>m</mml:mi><mml:mi>e</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msub><mml:mi>B</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">/</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mo>+</mml:mo><mml:mi>m</mml:mi><mml:mi>e</mml:mi><mml:mi>a</mml:mi><mml:mi>n</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:msub><mml:mi>B</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>m</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
<td valign="middle" align="center">This study</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s2_4_3">
<label>2.4.3</label>
<title>Automatic incremental feature selection</title>
<p>To further enhance the discriminative power of the integrated features, this study employed an AIFS method on the feature matrix <inline-formula>
<mml:math display="inline" id="im32"><mml:mrow><mml:msub><mml:mtext>X</mml:mtext><mml:mrow><mml:mtext>all</mml:mtext></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula>, which combines visible indices, multispectral indices, and self-constructed indices.</p>
<sec id="s2_4_3_1">
<label>2.4.3.1</label>
<title>Feature importance ranking</title>
<p>Feature importance was evaluated using the Random Forest (RF) algorithm (<xref ref-type="bibr" rid="B7">Belgiu and Dr&#x103;gu&#x163;, 2016</xref>). Let the feature fusion matrix be denoted as <inline-formula>
<mml:math display="inline" id="im33"><mml:mrow><mml:msub><mml:mtext>X</mml:mtext><mml:mrow><mml:mi>a</mml:mi><mml:mi>l</mml:mi><mml:mi>l</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2208;</mml:mo><mml:msup><mml:mi>&#x211d;</mml:mi><mml:mrow><mml:mi>n</mml:mi><mml:mo>&#xd7;</mml:mo><mml:mi>m</mml:mi></mml:mrow></mml:msup></mml:mrow></mml:math></inline-formula>, where n is the number of samples and m is the total number of features, and let the class vector be <inline-formula>
<mml:math display="inline" id="im34"><mml:mrow><mml:msub><mml:mtext>Y</mml:mtext><mml:mrow><mml:mi>a</mml:mi><mml:mi>l</mml:mi><mml:mi>l</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula>. The RF model builds multiple decision trees and calculates the incremental prediction error on out-of-bag (OOB) samples for each tree to obtain the importance score <inline-formula>
<mml:math display="inline" id="im35"><mml:mrow><mml:msub><mml:mi>I</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> for each feature:</p>
<disp-formula>
<mml:math display="block" id="M4"><mml:mrow><mml:msub><mml:mi>I</mml:mi><mml:mi>j</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mi>T</mml:mi></mml:mfrac><mml:munderover><mml:mstyle displaystyle="true"><mml:mo>&#x2211;</mml:mo></mml:mstyle><mml:mrow><mml:mi>t</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>T</mml:mi></mml:munderover><mml:mi>&#x394;</mml:mi><mml:msubsup><mml:mrow><mml:mtext>Err</mml:mtext></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>O</mml:mi><mml:mi>O</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:msubsup></mml:mrow></mml:math>
</disp-formula>
<p>Here, <italic>T</italic> is the total number of decision trees, and <inline-formula>
<mml:math display="inline" id="im36"><mml:mrow><mml:mi>&#x394;</mml:mi><mml:msubsup><mml:mrow><mml:mtext>Err</mml:mtext></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow><mml:mrow><mml:mi>O</mml:mi><mml:mi>O</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:msubsup></mml:mrow></mml:math></inline-formula> represents the increase in OOB error caused by randomly permuting the values of feature <italic>j</italic> in the <italic>t</italic>-th tree. Features were then ranked in descending order based on <inline-formula>
<mml:math display="inline" id="im37"><mml:mrow><mml:msub><mml:mi>I</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula>, resulting in the sorted feature matrix <inline-formula>
<mml:math display="inline" id="im38"><mml:mrow><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>s</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula>.</p>
</sec>
<sec id="s2_4_3_2">
<label>2.4.3.2</label>
<title>Incremental feature selection for combinations</title>
<p>To minimize the impact of redundant features while maintaining classification discriminative power, an incremental combination strategy was used to select features gradually. The procedure is as follows:</p>
<p>1) Candidate features were sequentially added from the sorted feature matrix.</p>
<p>2) The Pearson correlation coefficient between the candidate feature <inline-formula>
<mml:math display="inline" id="im39"><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> and the selected feature set <italic>S</italic> was calculated (<xref ref-type="bibr" rid="B15">Guan et&#xa0;al., 2023</xref>):</p>
<disp-formula>
<mml:math display="block" id="M5"><mml:mrow><mml:msub><mml:mi>&#x3c1;</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>k</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mtext>cov</mml:mtext><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mi>j</mml:mi></mml:msub><mml:mo>,</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mi>k</mml:mi></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mrow><mml:msub><mml:mi>&#x3c3;</mml:mi><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:msub><mml:msub><mml:mi>&#x3c3;</mml:mi><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>k</mml:mi></mml:msub></mml:mrow></mml:msub></mml:mrow></mml:mfrac><mml:mo>,</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mo>&#xa0;</mml:mo><mml:mi>k</mml:mi><mml:mo>&#x2208;</mml:mo><mml:mi>S</mml:mi></mml:mrow></mml:math>
</disp-formula>
<p>Suppose the absolute Pearson correlation <inline-formula>
<mml:math display="inline" id="im40"><mml:mrow><mml:mo>&#x2223;</mml:mo><mml:msub><mml:mi>&#x3c1;</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>k</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2223;</mml:mo></mml:mrow></mml:math></inline-formula> between the candidate feature <inline-formula>
<mml:math display="inline" id="im41"><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> and any feature <inline-formula>
<mml:math display="inline" id="im42"><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mi>k</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> in the selected set <italic>S</italic> exceeds the threshold <inline-formula>
<mml:math display="inline" id="im43"><mml:mrow><mml:msub><mml:mi>&#x3c1;</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>s</mml:mi><mml:mi>h</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula>=0.8. In that case, the candidate feature is considered highly correlated with existing features and is excluded. Otherwise, it is added to the selected feature set.</p>
<p>3) For each newly added feature, overall accuracy (OA) was calculated using 5-fold cross-validation:</p>
<disp-formula>
<mml:math display="block" id="M6"><mml:mrow><mml:mtext>OA</mml:mtext><mml:mo>=</mml:mo><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:mtext>CV&#xa0;Loss</mml:mtext><mml:mo>=</mml:mo><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mi>K</mml:mi></mml:mfrac><mml:munderover><mml:mstyle displaystyle="true"><mml:mo>&#x2211;</mml:mo></mml:mstyle><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>K</mml:mi></mml:munderover><mml:msub><mml:mi>L</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math>
</disp-formula>
<p>Here, <inline-formula>
<mml:math display="inline" id="im44"><mml:mrow><mml:msub><mml:mi>L</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> denotes the error rate for the <italic>i</italic>-th fold, with <inline-formula>
<mml:math display="inline" id="im45"><mml:mrow><mml:mi>K</mml:mi><mml:mo>=</mml:mo><mml:mn>5</mml:mn></mml:mrow></mml:math></inline-formula>. Newly added features were retained in the final subset only if they improved the OA. This strategy ensured that the selected features minimized redundancy while maximizing classification performance.</p>
</sec>
<sec id="s2_4_3_3">
<label>2.4.3.3</label>
<title>Output the final feature subset</title>
<p>After iterative incremental selection, the optimal feature subset, <inline-formula>
<mml:math display="inline" id="im46"><mml:mrow><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>n</mml:mi><mml:mi>s</mml:mi><mml:mi>t</mml:mi><mml:mi>r</mml:mi><mml:mi>u</mml:mi><mml:mi>c</mml:mi><mml:mi>t</mml:mi><mml:mi>e</mml:mi><mml:mi>d</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula>, is obtained. This subset serves as input for subsequent grazing intensity classification, enabling high-precision and robust classification performance.</p>
<p><xref ref-type="statement" rid="st1"><bold>Algorithm 1</bold></xref> summarizes the AIFS procedure. The algorithm ranks features using RF importance, incrementally evaluates candidate features based on Pearson correlation and cross-validated accuracy, and retains only those that improve classification while minimizing redundancy.</p>
<statement content-type="algorithm" id="st1">
<label>Algorithm 1</label>
<p><graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1730583-sg001.tif"/></p>
</statement>
</sec>
</sec>
<sec id="s2_4_4">
<label>2.4.4</label>
<title>Classification methods</title>
<p>This study used three widely applied classification algorithms&#x2014;K-nearest neighbor (KNN) (<xref ref-type="bibr" rid="B1">Abdullah et&#xa0;al., 2001</xref>), support vector machine (SVM) (<xref ref-type="bibr" rid="B13">Guan et&#xa0;al., 2025</xref>), and Random Forest (RF) (<xref ref-type="bibr" rid="B16">Guo et&#xa0;al., 2024</xref>)&#x2014;to classify grazing intensity based on the fused features.</p>
<sec id="s2_4_4_1">
<label>2.4.4.1</label>
<title>K-nearest neighbor</title>
<p>KNN is a non-parametric classification method based on the similarity of samples. For a sample <inline-formula>
<mml:math display="inline" id="im72"><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mn>0</mml:mn></mml:msub></mml:mrow></mml:math></inline-formula>, the algorithm calculates distances to all training samples, selects the K nearest neighbors, and assigns <inline-formula>
<mml:math display="inline" id="im73"><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mn>0</mml:mn></mml:msub></mml:mrow></mml:math></inline-formula> to the majority class of these neighbors. The procedure is expressed as:</p>
<disp-formula>
<mml:math display="block" id="M7"><mml:mrow><mml:mover accent="true"><mml:mi>y</mml:mi><mml:mo>^</mml:mo></mml:mover><mml:mo>=</mml:mo><mml:munder><mml:mrow><mml:mi>arg</mml:mi><mml:mi>max</mml:mi></mml:mrow><mml:mi>c</mml:mi></mml:munder><mml:munder><mml:mstyle displaystyle="true"><mml:mo>&#x2211;</mml:mo></mml:mstyle><mml:mrow><mml:mi>i</mml:mi><mml:mo>&#x2208;</mml:mo><mml:msub><mml:mi>N</mml:mi><mml:mi>K</mml:mi></mml:msub><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mn>0</mml:mn></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:munder><mml:msub><mml:mi>&#x3c9;</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo>&#xb7;</mml:mo><mml:mi mathvariant="double-struck">I</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>y</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mi>c</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math>
</disp-formula>
<p>Here, <inline-formula>
<mml:math display="inline" id="im74"><mml:mrow><mml:msub><mml:mi>N</mml:mi><mml:mi>K</mml:mi></mml:msub><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mn>0</mml:mn></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula> denotes the set of K nearest neighbors of <inline-formula>
<mml:math display="inline" id="im75"><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mn>0</mml:mn></mml:msub></mml:mrow></mml:math></inline-formula>, <inline-formula>
<mml:math display="inline" id="im76"><mml:mrow><mml:msub><mml:mi>&#x3c9;</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mn>1</mml:mn><mml:mo stretchy="false">/</mml:mo><mml:mtext>d</mml:mtext><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mn>0</mml:mn></mml:msub><mml:mo>,</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula> represents the distance weight, and <inline-formula>
<mml:math display="inline" id="im77"><mml:mrow><mml:mi mathvariant="double-struck">I</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mo>&#xb7;</mml:mo><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula> is the indicator function.</p>
</sec>
<sec id="s2_4_4_2">
<label>2.4.4.2</label>
<title>Support vector machine</title>
<p>SVM is a maximum-margin method for binary or multi-class classification, separating samples by finding the optimal hyperplane that maximizes the margin between classes. In this study, a radial basis function (RBF) kernel was used for non-linear mapping. Kernel scale and box constraint parameters were optimized via grid search. The SVM classifier is expressed as:</p>
<disp-formula>
<mml:math display="block" id="M8"><mml:mrow><mml:mi>f</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mtext>sign</mml:mtext><mml:mo stretchy="false">(</mml:mo><mml:munderover><mml:mstyle displaystyle="true"><mml:mo>&#x2211;</mml:mo></mml:mstyle><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>N</mml:mi></mml:munderover><mml:msub><mml:mi>&#x3b1;</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:msub><mml:mi>y</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mi>K</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo>,</mml:mo><mml:mi>x</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>+</mml:mo><mml:mi>b</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math>
</disp-formula>
<p>Here, <inline-formula>
<mml:math display="inline" id="im78"><mml:mrow><mml:mi>K</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo>,</mml:mo><mml:mi>x</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula> denotes the RBF kernel; <inline-formula>
<mml:math display="inline" id="im79"><mml:mrow><mml:msub><mml:mi>&#x3b1;</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> is the Lagrange multiplier; and <italic>b</italic> is the bias term.</p>
</sec>
<sec id="s2_4_4_3">
<label>2.4.4.3</label>
<title>Random forest</title>
<p>Random Forest (RF) is an ensemble learning method that classifies data by constructing multiple decision trees. Each tree is trained using bootstrap sampling, which randomly selects a subset of features at each node, thereby reducing overfitting and improving stability. Final classification is determined by majority voting:</p>
<disp-formula>
<mml:math display="block" id="M9"><mml:mrow><mml:mover accent="true"><mml:mrow><mml:msub><mml:mi>y</mml:mi><mml:mn>0</mml:mn></mml:msub></mml:mrow><mml:mo stretchy="true">^</mml:mo></mml:mover><mml:mo>=</mml:mo><mml:mtext>mode</mml:mtext><mml:mo>{</mml:mo><mml:msub><mml:mi>h</mml:mi><mml:mn>1</mml:mn></mml:msub><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mn>0</mml:mn><mml:mo stretchy="false">)</mml:mo><mml:mo>,</mml:mo><mml:msub><mml:mi>h</mml:mi><mml:mn>2</mml:mn></mml:msub><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mn>0</mml:mn><mml:mo stretchy="false">)</mml:mo><mml:mo>,</mml:mo><mml:mo>&#x2026;</mml:mo><mml:mo>,</mml:mo><mml:msub><mml:mi>h</mml:mi><mml:mi>T</mml:mi></mml:msub><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mn>0</mml:mn><mml:mo stretchy="false">)</mml:mo><mml:mo>}</mml:mo></mml:mrow></mml:math>
</disp-formula>
<p>Here, <inline-formula>
<mml:math display="inline" id="im80"><mml:mrow><mml:msub><mml:mi>h</mml:mi><mml:mi>t</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> is the prediction of the <italic>t</italic>-th decision tree, and T is the total number of trees.</p>
</sec>
<sec id="s2_4_4_4">
<label>2.4.4.4</label>
<title>Classification test and parameter setting</title>
<p>To ensure balanced training and test sets, the dataset was divided using the Sample set Partitioning based on joint X&#x2013;Y distances (SPXY) method, with 70% allocated to training and 30% to testing (<xref ref-type="bibr" rid="B50">Wu et&#xa0;al., 1996</xref>). Grid search optimization was performed for all classifiers. For KNN, the number of neighbors K was set to [3, 5, 7, 9] with distance metrics including Euclidean, Manhattan, and cosine. For SVM, the RBF kernel scale was set to [0.1, 0.5, 1, 2], and the box constraint parameter C was set to [0.1, 1, 10]. For RF, the number of trees was set to [100, 200, 300], with minimum leaf nodes [1, 5, 10].</p>
</sec>
</sec>
<sec id="s2_4_5">
<label>2.4.5</label>
<title>Evaluation indicators</title>
<sec id="s2_4_5_1">
<label>2.4.5.1</label>
<title>Separability evaluation</title>
<p>To assess the discriminative capability of spectral features under different grazing intensities, a qualitative analysis was first performed by comparing means and variances. Quantitative assessment was conducted using one-way analysis of variance (ANOVA) followed by Tukey&#x2019;s multiple comparison test (<xref ref-type="bibr" rid="B43">Stoline, 1981</xref>). To quantify the classification ability of SIs, the M statistic was introduced, defined as: <inline-formula>
<mml:math display="inline" id="im81"><mml:mrow><mml:mi>M</mml:mi><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mo>|</mml:mo><mml:mover accent="true"><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mo stretchy="true">&#xaf;</mml:mo></mml:mover><mml:mo>&#x2212;</mml:mo><mml:mover accent="true"><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo stretchy="true">&#xaf;</mml:mo></mml:mover><mml:mo>|</mml:mo></mml:mrow><mml:mrow><mml:msubsup><mml:mi>s</mml:mi><mml:mn>1</mml:mn><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup><mml:mo>+</mml:mo><mml:msubsup><mml:mi>s</mml:mi><mml:mn>2</mml:mn><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup></mml:mrow></mml:mfrac></mml:mrow></mml:math></inline-formula>Here, <inline-formula>
<mml:math display="inline" id="im82"><mml:mrow><mml:mover accent="true"><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mn>1</mml:mn></mml:msub></mml:mrow><mml:mo stretchy="true">&#xaf;</mml:mo></mml:mover></mml:mrow></mml:math></inline-formula> and <inline-formula>
<mml:math display="inline" id="im83"><mml:mrow><mml:mover accent="true"><mml:mrow><mml:msub><mml:mi>x</mml:mi><mml:mn>2</mml:mn></mml:msub></mml:mrow><mml:mo stretchy="true">&#xaf;</mml:mo></mml:mover></mml:mrow></mml:math></inline-formula> denote the sample means of the two categories, and <inline-formula>
<mml:math display="inline" id="im84"><mml:mrow><mml:msubsup><mml:mi>s</mml:mi><mml:mn>1</mml:mn><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup></mml:mrow></mml:math></inline-formula> and <inline-formula>
<mml:math display="inline" id="im85"><mml:mrow><mml:msubsup><mml:mi>s</mml:mi><mml:mn>2</mml:mn><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msubsup></mml:mrow></mml:math></inline-formula> represent the corresponding within-class variances. A higher M value indicates a greater difference in feature distribution between the two categories, reflecting stronger separability (<xref ref-type="bibr" rid="B42">Smith et&#xa0;al., 2007</xref>).</p>
</sec>
<sec id="s2_4_5_2">
<label>2.4.5.2</label>
<title>Evaluation of classification accuracy</title>
<p>After fitting the classifier to the training set, its performance was evaluated on the test set using overall accuracy (OA) and the Kappa coefficient (<inline-formula>
<mml:math display="inline" id="im86"><mml:mrow><mml:mi>&#x3ba;</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula> (<xref ref-type="bibr" rid="B14">Guan et&#xa0;al., 2022</xref>). Overall OA was calculated as:</p>
<disp-formula>
<mml:math display="block" id="M10"><mml:mrow><mml:mtext>OA</mml:mtext><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:msubsup><mml:mstyle displaystyle="true"><mml:mo>&#x2211;</mml:mo></mml:mstyle><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>k</mml:mi></mml:msubsup><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mtext>n</mml:mtext></mml:mfrac></mml:mrow></mml:math>
</disp-formula>
<p>Here, <italic>k</italic> represents the total number of categories, and <inline-formula>
<mml:math display="inline" id="im87"><mml:mrow><mml:msubsup><mml:mstyle displaystyle="true"><mml:mo>&#x2211;</mml:mo></mml:mstyle><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>k</mml:mi></mml:msubsup><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:math></inline-formula> represents the number of correctly classified samples.</p>
<p>The Kappa coefficient (<inline-formula>
<mml:math display="inline" id="im88"><mml:mi>&#x3ba;</mml:mi></mml:math></inline-formula>) measures the agreement between the observed classification accuracy and that expected by random chance, calculated as:</p>
<disp-formula>
<mml:math display="block" id="M11"><mml:mrow><mml:mi>&#x3ba;</mml:mi><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mi>o</mml:mi></mml:msub><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>p</mml:mi><mml:mi>e</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>p</mml:mi><mml:mi>e</mml:mi></mml:msub></mml:mrow></mml:mfrac></mml:mrow></mml:math>
</disp-formula>
<p>Here,</p>
<disp-formula>
<mml:math display="block" id="M12"><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mi>o</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mi>O</mml:mi><mml:mi>A</mml:mi><mml:mo>&#xa0;</mml:mo><mml:mo>,</mml:mo><mml:mo>&#xa0;</mml:mo><mml:msub><mml:mi>p</mml:mi><mml:mi>e</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:msubsup><mml:mstyle displaystyle="true"><mml:mo>&#x2211;</mml:mo></mml:mstyle><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>k</mml:mi></mml:msubsup><mml:mo stretchy="false">(</mml:mo><mml:msubsup><mml:mstyle displaystyle="true"><mml:mo>&#x2211;</mml:mo></mml:mstyle><mml:mrow><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>k</mml:mi></mml:msubsup><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:msubsup><mml:mstyle displaystyle="true"><mml:mo>&#x2211;</mml:mo></mml:mstyle><mml:mrow><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mi>k</mml:mi></mml:msubsup><mml:msub><mml:mi>c</mml:mi><mml:mrow><mml:mi>j</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mrow><mml:msup><mml:mi>n</mml:mi><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:mfrac></mml:mrow></mml:math>
</disp-formula>
<p>Here, <inline-formula>
<mml:math display="inline" id="im89"><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mi>o</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> denotes the observed classification accuracy, and <inline-formula>
<mml:math display="inline" id="im90"><mml:mrow><mml:msub><mml:mi>p</mml:mi><mml:mi>e</mml:mi></mml:msub></mml:mrow></mml:math></inline-formula> represents the expected accuracy under random classification.</p>
</sec>
</sec>
</sec>
</sec>
<sec id="s3" sec-type="results">
<label>3</label>
<title>Results</title>
<sec id="s3_1">
<label>3.1</label>
<title>Detection of grazing intensity by spectral features</title>
<p>The spectral mean and variance curves under different grazing intensities are presented in <xref ref-type="fig" rid="f4"><bold>Figure&#xa0;4</bold></xref>. The grey values of the three visible-light features generally increased with grazing intensity, but decreased under severe grazing. In the multispectral bands, green and red reflectance increased with grazing intensity, whereas the red-edge and near-infrared bands showed decreasing trends. However, under severe grazing, both the red-edge and near-infrared bands exhibited an increasing trend.</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Mean and variance curves under different grazing intensities. <bold>(a)</bold> Visible-light features, and <bold>(b)</bold> multispectral features. (e.g., NG, No Grazing; LG, Light Grazing; MG, Moderate Grazing; SG, Severe Grazing).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1730583-g004.tif">
<alt-text content-type="machine-generated">Two graphs labeled (a) and (b). Graph (a) shows greyscale value against RGB color components with four lines (NG, LG, MG, SG) showing decreasing trends. Graph (b) shows reflectance against waveband (G_MS to NIR_MS) with the same four lines showing increasing trends.</alt-text>
</graphic></fig>
<p>ANOVA results for visible and multispectral features revealed significant differences (p &lt; 0.05) across grazing intensities, indicating that grazing intensity has a substantial impact on spectral responses. Tukey&#x2019;s <italic>post-hoc</italic> test (<xref ref-type="fig" rid="f5"><bold>Figure&#xa0;5</bold></xref>) revealed that the red (<inline-formula>
<mml:math display="inline" id="im98"><mml:mrow><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:math></inline-formula>) and blue (<inline-formula>
<mml:math display="inline" id="im99"><mml:mrow><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:math></inline-formula>) components significantly distinguished between low, medium, and severe grazing intensities, with the most substantial differences observed under extreme grazing conditions. The green (<inline-formula>
<mml:math display="inline" id="im100"><mml:mrow><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:math></inline-formula>) component demonstrated a limited ability to separate low and medium grazing intensities, but retained significant differentiation under severe grazing conditions. For multispectral bands, the green (<inline-formula>
<mml:math display="inline" id="im101"><mml:mrow><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula>) and red (<inline-formula>
<mml:math display="inline" id="im102"><mml:mrow><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula>) bands showed substantial differences between low and severe grazing intensities. The red-edge (<inline-formula>
<mml:math display="inline" id="im103"><mml:mrow><mml:mi>R</mml:mi><mml:mi>E</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula>) and near-infrared (<inline-formula>
<mml:math display="inline" id="im104"><mml:mrow><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula>) bands were sensitive not only to the transition between low and severe grazing, but also to the transition between medium and severe grazing, highlighting their strong ability to capture variations in vegetation status and grazing conditions. Overall, multispectral features offered more comprehensive discrimination of grazing levels than visible-light features. Specifically, the red-edge and near-infrared bands effectively captured vegetation changes under medium-to-severe grazing, whereas visible-light features were more sensitive to extreme grazing.</p>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Tukey&#x2019;s multiple comparison results for original spectral features across grazing intensities. <bold>(a)</bold><inline-formula>
<mml:math display="inline" id="im91"><mml:mrow><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:math></inline-formula>, <bold>(b)</bold><inline-formula>
<mml:math display="inline" id="im92"><mml:mrow><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:math></inline-formula>, <bold>(c)</bold><inline-formula>
<mml:math display="inline" id="im93"><mml:mrow><mml:mi>B</mml:mi><mml:mo>_</mml:mo><mml:mi>R</mml:mi><mml:mi>G</mml:mi><mml:mi>B</mml:mi></mml:mrow></mml:math></inline-formula>, <bold>(d)</bold><inline-formula>
<mml:math display="inline" id="im94"><mml:mrow><mml:mi>G</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula>, <bold>(e)</bold><inline-formula>
<mml:math display="inline" id="im95"><mml:mrow><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula>, <bold>(f)</bold><inline-formula>
<mml:math display="inline" id="im96"><mml:mrow><mml:mi>R</mml:mi><mml:mi>E</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula>, and <bold>(g)</bold><inline-formula>
<mml:math display="inline" id="im97"><mml:mrow><mml:mi>N</mml:mi><mml:mi>I</mml:mi><mml:mi>R</mml:mi><mml:mo>_</mml:mo><mml:mi>M</mml:mi><mml:mi>S</mml:mi></mml:mrow></mml:math></inline-formula>. Significant differences among grazing groups are indicated (p &lt; 0.05). (e.g., NG, No Grazing; LG, Light Grazing; MG, Moderate Grazing; SG, Severe Grazing).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1730583-g005.tif">
<alt-text content-type="machine-generated">Box plots illustrating RGB and MS values across different treatments (CK, LG, MG, SG) at a significance level of 0.05. Panels (a) to (g) display R_RGB, G_RGB, B_RGB, G_MS, R_MS, RE_MS, and NIR_MS respectively. Each plot uses different letters (a, b, c, etc.) to denote statistical differences among treatments. Outliers are shown as individual points.</alt-text>
</graphic></fig>
<p>In this study, the classification performance of grazing intensity was evaluated using KNN, SVM, and RF based on three feature sets: visible light (RGB), multispectral (Multi), and multi-source fusion (RGB+Multi). The results are presented in <xref ref-type="fig" rid="f6"><bold>Figure&#xa0;6</bold></xref>. Among single-source features, RGB achieved the highest accuracy with KNN, yielding an OA of 78.77% and a Kappa of 70.86%. The Multi-features performed best with RF, achieving an OA of 75.94% and a Kappa of 66.74%. In contrast, the multi-source fusion features markedly improved classification, achieving OA values of 83.49%, 86.79%, and 81.13%, with corresponding Kappa values of 76.62%, 81.28%, and 73.25%. These results demonstrate that integrating visible and multispectral features leverages complementary spectral information to improve discrimination among grazing intensity levels. Overall, the combination of multi-source fusion features with the SVM classifier yielded the highest accuracy and the most consistent results.</p>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>Classification performance of three feature sets (RGB, Multi, and RGB+Multi) using KNN, SVM, and RF classifiers. <bold>(a)</bold> OA, and <bold>(b)</bold> Kappa coefficient.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1730583-g006.tif">
<alt-text content-type="machine-generated">Bar charts comparing Overall Accuracy (OA) and Kappa for RGB, Multi, and RGB+Multi data using KNN, SVM, and RF classifiers. Chart (a) shows OA, with RGB+Multi performing best. Chart (b) shows Kappa, with similar trends and RGB+Multi showing higher consistency.</alt-text>
</graphic></fig>
<p><xref ref-type="fig" rid="f7"><bold>Figure&#xa0;7</bold></xref> presents pixel-level classification results under different combinations of original features and classifiers. As shown in <xref ref-type="fig" rid="f7"><bold>Figure&#xa0;7a</bold></xref>, the experimental area was divided into four grazing intensity levels: no grazing (NG), light grazing (LG), moderate grazing (MG), and severe grazing (SG). <xref ref-type="fig" rid="f7"><bold>Figures&#xa0;7b&#x2013;d</bold></xref> illustrate the classification maps generated by different feature combinations and classifiers. The classification map based on visible (RGB) features using the KNN classifier (<xref ref-type="fig" rid="f7"><bold>Figure&#xa0;7b</bold></xref>) roughly captures the grazing intensity distribution but shows some confusion between LG and MG regions. When using multispectral (Multi) features with the KNN classifier (<xref ref-type="fig" rid="f7"><bold>Figure&#xa0;7c</bold></xref>), the classification accuracy improves slightly, particularly in distinguishing NG and SG areas. However, misclassifications remain at moderate levels. In contrast, the multi-source fusion (RGB+Multi) combined with the SVM classifier (<xref ref-type="fig" rid="f7"><bold>Figure&#xa0;7d</bold></xref>) produces the most accurate and spatially consistent classification, with clear boundaries between different grazing intensities that closely align with the actual distribution. This demonstrates that multi-source feature fusion effectively enhances spatial discrimination of grazing intensity and mitigates spectral confusion caused by soil&#x2013;vegetation mixing in sandy grasslands.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p>Pixel-level classification maps under different combinations of original features and classifiers. <bold>(a)</bold> Reference grazing intensity map; <bold>(b)</bold> RGB features with KNN, <bold>(c)</bold> multispectral (Multi) features with KNN, and <bold>(d)</bold> multi-source fusion (RGB+Multi) features with SVM. (e.g., NG, No Grazing; LG, Light Grazing; MG, Moderate Grazing; SG, Severe Grazing).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1730583-g007.tif">
<alt-text content-type="machine-generated">Map images divided into sections labeled LG (green), SG (red), NG (blue), and MG (yellow). Panels (b), (c), and (d) show similar maps with varied color pixel distributions. A color legend is included.</alt-text>
</graphic></fig>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Detection of grazing intensity by spectral indices</title>
<p>The M-statistic was applied to assess the separability of the SIs, with results presented in <xref ref-type="fig" rid="f8"><bold>Figure&#xa0;8</bold></xref>. For most indices, M values exceeded 1, indicating good separability among grazing intensity levels. Traditional visible indices (ExG, ExGR, RGBVI) and red&#x2013;NIR indices (NDVI, GNDVI, SAVI, MSAVI, EVI2) generally exhibited high separability. Their maximum M values all exceeded 1, while the mean values were mainly within the range of 0.6 to 1.0. In contrast, NDRE, idx3, and several other indices showed poor separability, with maximum M values below 0.4. Notably, among the self-constructed indices, idx6, idx7, and idx14 performed best, with maximum M values exceeding 1.5. In particular, idx7 and idx14 reached extreme values above 2, demonstrating substantial advantages in capturing vegetation responses to grazing intensity and outperforming traditional indices.</p>
<fig id="f8" position="float">
<label>Figure&#xa0;8</label>
<caption>
<p>Separability of spectral indices evaluated using the M statistic.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1730583-g008.tif">
<alt-text content-type="machine-generated">Bar chart showing various indices labeled from ExG to idx14 along the x-axis. The y-axis ranges from negative one to positive two, with a dashed line at one. Bar heights vary, with indices such as idx6 and idx14 having higher values around two.</alt-text>
</graphic></fig>
<p><xref ref-type="fig" rid="f9"><bold>Figure&#xa0;9</bold></xref> presents the classification results of three classifiers (KNN, SVM, and RF) using different combinations of SIs. The classification accuracy of visible indices (RGB) was comparable to that of multispectral indices (Multi), with Multi showing slightly better overall performance. The results indicate that near-infrared and red-edge bands provide stronger discriminative capability for grazing level identification. Integrating RGB and Multi (RGB+Multi) markedly improved accuracy, with OA values of 84.91%, 88.68%, and 82.55% for KNN, SVM, and RF, respectively. Corresponding Kappa coefficients all exceeded 75%, indicating strong complementarity between visible and multispectral features. With the newly constructed indices (New-indices), classification performance further improved: all three classifiers achieved OA values above 87% and Kappa values above 82%. These results demonstrate that the newly proposed indices more effectively capture differences in grazing intensity and enhance class separability. Under the AllFusion condition (RGB+Multi+New-indices), results were comparable to those from RGB+Multi and New-indices. Overall, SVM showed the most consistent performance, with a maximum OA of 88.68% and a Kappa above 83%. Comparative analysis indicates that the newly constructed indices substantially improve discrimination of grazing intensity, outperforming single-band combinations, while multi-source feature fusion further enhances model robustness.</p>
<fig id="f9" position="float">
<label>Figure&#xa0;9</label>
<caption>
<p>Classification performance for different combinations of spectral indices across three classifiers. <bold>(a)</bold> OA, and <bold>(b)</bold> Kappa coefficient.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1730583-g009.tif">
<alt-text content-type="machine-generated">Two bar charts compare the performance of three classifiers: KNN, SVM, and RF, across various feature sets: RGB, Multi, RGB+Multi, New-indices, and AllFusion. Chart (a) shows Overall Accuracy (OA) and chart (b) shows Kappa values. Each classifier is represented by different-colored bars. Classifiers generally perform consistently across different feature sets.</alt-text>
</graphic></fig>
<p><xref ref-type="fig" rid="f10"><bold>Figure&#xa0;10</bold></xref> presents pixel-level classification maps generated from different SI sets and classifiers. <xref ref-type="fig" rid="f10"><bold>Figure&#xa0;10a</bold></xref> shows the reference grazing-intensity partition (NG, LG, MG, SG). <xref ref-type="fig" rid="f10"><bold>Figure&#xa0;10b-f</bold></xref> display classification maps produced using different index sets and the best-performing classifier for each set. The map produced from RGB-derived indices with RF (<xref ref-type="fig" rid="f10"><bold>Figure&#xa0;10b</bold></xref>) captures the broad spatial pattern but shows misclassification between adjacent LG and MG patches. Using multispectral indices with SVM (<xref ref-type="fig" rid="f10"><bold>Figure&#xa0;10c</bold></xref>) improves discrimination of NG and SG zones and reduces local speckle. The fused RGB+Multi set under SVM (<xref ref-type="fig" rid="f10"><bold>Figure&#xa0;10d</bold></xref>) increases spatial continuity and sharpens boundaries. Notably, the AllFusion map (<xref ref-type="fig" rid="f10"><bold>Figure&#xa0;10f</bold></xref>), which uses the full index suite, attains the best overall spatial agreement with the reference&#x2014;reflecting the benefit of combining complementary information across index families. The map based on the newly constructed indices (<xref ref-type="fig" rid="f10"><bold>Figure&#xa0;10e</bold></xref>) also yields high spatial consistency and particularly good detection of moderate grazing, despite using a more compact feature set. Overall, AllFusion and the new-index set both perform strongly: AllFusion slightly surpasses other sets in overall spatial agreement, while the new indices offer nearly comparable accuracy with fewer features, demonstrating a favorable trade-off between completeness and parsimony.</p>
<fig id="f10" position="float">
<label>Figure&#xa0;10</label>
<caption>
<p>Pixel-level classification maps derived from different spectral-index combinations and classifiers. <bold>(a)</bold> Reference grazing intensity map, <bold>(b)</bold> RGB-derived indices with RF, <bold>(c)</bold> Multi-derived indices with SVM, <bold>(d)</bold> RGB+Multi indices with SVM, <bold>(e)</bold> New indices set with SVM, and <bold>(f)</bold> AllFusion with SVM. (e.g., NG, No Grazing; LG, Light Grazing; MG, Moderate Grazing; SG, Severe Grazing).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1730583-g010.tif">
<alt-text content-type="machine-generated">Six-panel image showing segmented geological analysis. Panel (a) labels distinct zones: LG, SG, NG, and MG in green, red, blue, and yellow. Panels (b) to (f) depict maps with color-coded areas corresponding to LG, SG, NG, and MG, reflecting variability across the region.</alt-text>
</graphic></fig>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Detection of grazing intensity by feature selection</title>
<p>This study applied multiple feature selection methods for full-feature fusion, including random frog (RF) (<xref ref-type="bibr" rid="B21">Li et&#xa0;al., 2012</xref>), ReliefF (<xref ref-type="bibr" rid="B38">Robnik-&#x160;ikonja and Kononenko, 2003</xref>), least-squares mutual information (LSMI) (<xref ref-type="bibr" rid="B44">Suzuki et&#xa0;al., 2009</xref>), successive projections algorithm (SPA) (<xref ref-type="bibr" rid="B4">Ara&#xfa;jo et&#xa0;al., 2001</xref>; <xref ref-type="bibr" rid="B55">Yu et&#xa0;al., 2020</xref>), elimination of uninformative variables (UVE) (<xref ref-type="bibr" rid="B10">Centner et&#xa0;al., 1996</xref>), competitive adaptive reweighted sampling (CARS) (<xref ref-type="bibr" rid="B20">Li et&#xa0;al., 2009</xref>), and AIFS. Each method used its own scoring or importance distribution to determine the selected features automatically. As shown in <xref ref-type="table" rid="T2"><bold>Table&#xa0;2</bold></xref>, the number and type of features selected by different methods varied slightly but showed overall consistency. Most methods consistently selected traditional vegetation indices (e.g., ExG, GNDVI, NDRE, RGBVI, MSAVI) and several self-constructed indices (e.g., idx7, idx8, idx9, idx14), suggesting that these features have strong discriminatory power for grazing intensity across different algorithms. Among these, idx7, idx9, and idx14 appeared most frequently across the six methods, highlighting their stability and central importance. In contrast, the RF and AIFS methods selected fewer features.</p>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>Spectral indices selected by different feature selection methods.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="center">Method</th>
<th valign="middle" align="center">Number of indices</th>
<th valign="middle" align="center">Specific spectral indices</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="center">RF</td>
<td valign="middle" align="center">7</td>
<td valign="middle" align="left">idx1, idx6, idx7, idx8, idx9, idx13, idx14</td>
</tr>
<tr>
<td valign="middle" align="center">ReliefF</td>
<td valign="middle" align="center">10</td>
<td valign="middle" align="left">ExG, GNDVI, NDRE, idx3, idx5, idx6, idx7, idx8, idx9, idx14</td>
</tr>
<tr>
<td valign="middle" align="center">LSMI</td>
<td valign="middle" align="center">12</td>
<td valign="middle" align="left">ExG, RGBVI, MSAVI, idx1, idx3, idx5, idx6, idx7, idx8, idx9, idx11, idx14</td>
</tr>
<tr>
<td valign="middle" align="center">SPA</td>
<td valign="middle" align="center">12</td>
<td valign="middle" align="left">VARI, ExGR, GNDVI, MSAVI, NDRE, idx1, idx4, idx7, idx8, idx9, idx10, idx11</td>
</tr>
<tr>
<td valign="middle" align="center">UVE</td>
<td valign="middle" align="center">11</td>
<td valign="middle" align="left">ExG, ExGR, RGBVI, idx1, idx2, idx3, idx4, idx7, idx9, idx10, idx14</td>
</tr>
<tr>
<td valign="middle" align="center">CARS</td>
<td valign="middle" align="center">11</td>
<td valign="middle" align="left">VARI, RGBVI, GNDVI, NDRE, idx3, idx4, idx6, idx7, idx9, idx10, idx14</td>
</tr>
<tr>
<td valign="middle" align="center">AIFS</td>
<td valign="middle" align="center">7</td>
<td valign="middle" align="left">idx9, idx8, idx7, NDVI, idx2, idx3, NDRE</td>
</tr>
</tbody>
</table>
</table-wrap>
<p><xref ref-type="table" rid="T3"><bold>Table&#xa0;3</bold></xref> shows that various feature selection methods exhibit distinct performances across different classifiers (KNN, SVM, RF). Overall, the AIFS method produced the best classification results. SVM achieved high accuracy, with OA and Kappa values of 92.13% and 88.99%, respectively, and also showed strong performance under KNN and RF. This result demonstrates that the method achieves optimal discrimination while requiring fewer features. In contrast, the AllFusion approach, which utilizes all features, yielded the lowest accuracy. The OA and Kappa values across all three classifiers were significantly lower than those of other methods, indicating that redundant features not only fail to improve performance but may also impair classification effectiveness. Overall, appropriate feature selection substantially improves the accuracy of grazing intensity classification, with the AIFS method providing both efficiency and precision.</p>
<table-wrap id="T3" position="float">
<label>Table&#xa0;3</label>
<caption>
<p>Classification results of feature selection methods under different classifiers.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" rowspan="2" align="center">Method</th>
<th valign="middle" colspan="2" align="center">KNN</th>
<th valign="middle" colspan="2" align="center">SVM</th>
<th valign="middle" colspan="2" align="center">RF</th>
</tr>
<tr>
<th valign="middle" align="center">OA (%)</th>
<th valign="middle" align="center">Kappa (%)</th>
<th valign="middle" align="center">OA (%</th>
<th valign="middle" align="center">Kappa (%)</th>
<th valign="middle" align="center">OA (%)</th>
<th valign="middle" align="center">Kappa (%)</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="center">RF</td>
<td valign="middle" align="center">87.96</td>
<td valign="middle" align="center">83.14</td>
<td valign="middle" align="center">87.04</td>
<td valign="middle" align="center">81.90</td>
<td valign="middle" align="center">85.65</td>
<td valign="middle" align="center">79.87</td>
</tr>
<tr>
<td valign="middle" align="center">ReliefF</td>
<td valign="middle" align="center">89.35</td>
<td valign="middle" align="center">84.95</td>
<td valign="middle" align="center">90.28</td>
<td valign="middle" align="center">86.23</td>
<td valign="middle" align="center">87.50</td>
<td valign="middle" align="center">82.27</td>
</tr>
<tr>
<td valign="middle" align="center">LSMI</td>
<td valign="middle" align="center">89.82</td>
<td valign="middle" align="center">85.23</td>
<td valign="middle" align="center">88.43</td>
<td valign="middle" align="center">83.15</td>
<td valign="middle" align="center">85.65</td>
<td valign="middle" align="center">79.08</td>
</tr>
<tr>
<td valign="middle" align="center">SPA</td>
<td valign="middle" align="center">86.11</td>
<td valign="middle" align="center">79.81</td>
<td valign="middle" align="center">87.96</td>
<td valign="middle" align="center">82.47</td>
<td valign="middle" align="center">86.57</td>
<td valign="middle" align="center">80.57</td>
</tr>
<tr>
<td valign="middle" align="center">UVE</td>
<td valign="middle" align="center">87.50</td>
<td valign="middle" align="center">81.82</td>
<td valign="middle" align="center">87.50</td>
<td valign="middle" align="center">81.80</td>
<td valign="middle" align="center">84.72</td>
<td valign="middle" align="center">77.72</td>
</tr>
<tr>
<td valign="middle" align="center">CARS</td>
<td valign="middle" align="center">86.11</td>
<td valign="middle" align="center">80.29</td>
<td valign="middle" align="center">88.89</td>
<td valign="middle" align="center">84.15</td>
<td valign="middle" align="center">84.72</td>
<td valign="middle" align="center">78.18</td>
</tr>
<tr>
<td valign="middle" align="center">AIFS</td>
<td valign="middle" align="center">91,20</td>
<td valign="middle" align="center">87.72</td>
<td valign="middle" align="center"><bold>92.13</bold></td>
<td valign="middle" align="center"><bold>88.99</bold></td>
<td valign="middle" align="center">87.96</td>
<td valign="middle" align="center">83.17</td>
</tr>
<tr>
<td valign="middle" align="center">AllFusion</td>
<td valign="middle" align="center">83.49</td>
<td valign="middle" align="center">76.63</td>
<td valign="middle" align="center">86.79</td>
<td valign="middle" align="center">81.28</td>
<td valign="middle" align="center">81.13</td>
<td valign="middle" align="center">73.25</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>Bold values indicate the highest accuracy and Kappa coefficient among all combinations.</p></fn>
</table-wrap-foot>
</table-wrap>
<p>The classification model developed in this study was applied to perform pixel-level classification on images generated by fusing visible and multispectral data. To reduce noise, a 3 &#xd7; 3 filter was used to smooth the classification map (<xref ref-type="bibr" rid="B13">Guan et&#xa0;al., 2025</xref>), as shown in <xref ref-type="fig" rid="f11"><bold>Figure&#xa0;11</bold></xref>. The predicted grazing intensities (blue, green, yellow, and red areas) show a clear correspondence with the ground-truth grazing intensity areas (dashed regions representing NG, LG, MG, and SG). Regions with actual grazing intensities of NG, LG, MG, and SG were predominantly predicted as blue, green, yellow, and red, respectively. These results indicate that the model can accurately predict varying grazing intensities, demonstrating robust overall performance. However, minor deviations exist between some local predictions and actual conditions, likely due to factors such as terrain undulations and the grazing behavior of sheep.</p>
<fig id="f11" position="float">
<label>Figure&#xa0;11</label>
<caption>
<p>Pixel-level classification map of grazing intensity. (e.g., NG, No Grazing; LG, Light Grazing; MG, Moderate Grazing; SG, Severe Grazing).</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-16-1730583-g011.tif">
<alt-text content-type="machine-generated">Color-coded map showing land sections divided by dashed lines. Blue represents NG, green indicates LG, yellow signifies MG, and red denotes SG. Four sections labeled MG, NG, SG, and LG are marked with varied color distributions.</alt-text>
</graphic></fig>
</sec>
</sec>
<sec id="s4" sec-type="discussion">
<label>4</label>
<title>Discussion</title>
<p>This study investigated the remote sensing-based monitoring of grazing intensity in sandy grasslands by integrating visible and multispectral data acquired from UAVs with spectral indices and an automatic incremental feature selection (AIFS) method. The results demonstrate that multi-source data fusion and feature optimization substantially improve the discrimination of grazing intensities, with the SVM classifier achieving the highest accuracy (OA=92.13%, Kappa=88.99%). These findings highlight that, in complex ecosystems such as sandy grasslands&#x2014;characterized by sparse vegetation and strong soil background effects&#x2014;the integration of UAV multi-source imagery with intelligent feature selection effectively addresses the limitations of traditional approaches and provides a robust methodological framework for grassland ecological monitoring.</p>
<sec id="s4_1">
<label>4.1</label>
<title>Spectral response characteristics and ecological interpretation</title>
<p>The spectral mean curves revealed that visible bands generally increased with grazing intensity but declined under severe grazing conditions. This nonlinear pattern can be explained by the pronounced soil&#x2013;vegetation hybrid effect typical of sandy grasslands. Light to moderate grazing increases soil exposure, enhancing reflectance in blue, green, and red bands due to the bright sandy background. Under heavy grazing, however, trampling induces surface crusting and litter accumulation, which reduce overall reflectance. In the multispectral region, the green and red bands similarly increased with grazing intensity, while the red-edge and near-infrared (NIR) bands decreased, reflecting reductions in chlorophyll content and canopy integrity. The slight rebound of red-edge and NIR reflectance under severe grazing likely arises from soil high-albedo effects and the persistence of grazing-tolerant species.</p>
<p>These results differ from the monotonic spectral decline patterns reported in humid or dense grasslands (<xref ref-type="bibr" rid="B30">Oliveira et&#xa0;al., 2019</xref>; <xref ref-type="bibr" rid="B61">Zhang et&#xa0;al., 2021a</xref>), illustrating that vegetation spectral responses in semi-arid grasslands are co-regulated by physiological degradation of plants and enhanced background reflectance from soil. This interactive mechanism underscores the need to account for both vegetation and soil components when interpreting spectral responses in dryland ecosystems (<xref ref-type="bibr" rid="B41">Shi et&#xa0;al., 2021</xref>).</p>
</sec>
<sec id="s4_2">
<label>4.2</label>
<title>Performance of spectral indices and feature selection</title>
<p>Statistical analyses (ANOVA and Tukey&#x2019;s tests) confirmed that all spectral indices exhibited significant differences among grazing intensities. Traditional vegetation indices (NDVI, GNDVI) effectively captured the decline in vegetation cover and photosynthetic activity between light and heavy grazing but were less sensitive to moderate grazing levels. Indices that integrate red and NIR information (NDRE, MSAVI) showed superior discrimination between low&#x2013;moderate and moderate&#x2013;heavy grazing intensities, aligning with previous findings from semi-arid regions (<xref ref-type="bibr" rid="B17">Hernandez et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B32">P&#xe1;dua et&#xa0;al., 2024</xref>; <xref ref-type="bibr" rid="B36">Pranga et&#xa0;al., 2024</xref>).</p>
<p>The self-developed indices (idx7, idx9, idx14) demonstrated even greater adaptability and stability, reflecting the advantages of region-specific spectral design tailored to sandy grasslands. Feature selection consistently identified ExG, NDRE, MSAVI, and the newly constructed indices (idx7, idx9, idx14) as key predictors, confirming their robustness for grazing intensity monitoring (<xref ref-type="bibr" rid="B53">Xu et&#xa0;al., 2022</xref>).</p>
<p>Interestingly, NDRE and idx3 exhibited relatively low M-statistic values (&lt;0.4), suggesting weak separability when evaluated independently. However, both were included in the final feature subset derived through AIFS. This apparent discrepancy highlights the difference between univariate separability and multivariate synergy. While the M-statistic evaluates features in isolation, the AIFS approach optimizes combinations based on their joint contribution to classification accuracy. Thus, even indices with limited standalone performance can provide complementary information that enhances the discriminative power and stability of the final model. The inclusion of correlation constraints further ensures non-redundancy among selected features, reinforcing the unique role of each feature in the optimized subset.</p>
<p>In addition, compared with commonly used feature selection algorithms such as ReliefF and CARS, AIFS also shows practical advantages in computational efficiency. ReliefF requires repeated distance-based sampling across the entire feature space, which becomes increasingly expensive as feature dimensionality grows. CARS relies on iterative Monte-Carlo sampling and partial-least-squares regression, making its computational burden dependent on repeated model fitting. In contrast, AIFS performs a single-pass importance ranking using Random Forests followed by lightweight incremental evaluation with correlation filtering and cross-validation. Because redundant features are removed early during correlation screening, subsequent computations are substantially reduced. As a result, AIFS generally achieves competitive or higher classification accuracy while requiring fewer iterations and fewer full model evaluations than ReliefF and CARS, making it more suitable for UAV-based multi-source datasets where dozens of features must be processed efficiently.</p>
</sec>
<sec id="s4_3">
<label>4.3</label>
<title>Ecological and methodological implications</title>
<p>From an ecological perspective, the observed spectral responses reflect key plant physiological mechanisms under grazing disturbance (<xref ref-type="bibr" rid="B51">Xiang et&#xa0;al., 2025a</xref>, <xref ref-type="bibr" rid="B52">b</xref>). Moderate grazing stimulates compensatory growth and chlorophyll regeneration, leading to increased red-edge reflectance, whereas severe grazing reduces leaf area index (LAI) and internal scattering, lowering NIR reflectance. Concurrently, increased soil exposure elevates visible-band reflectance, generating a nonlinear response in the composite spectral signal. This &#x201c;hybrid soil&#x2013;vegetation effect&#x201d; emphasizes that spectral variability in sandy grasslands arises from coupled changes in vegetation physiology, community composition, and soil optical properties.</p>
<p>Methodologically, this study demonstrates that multi-source UAV imagery combined with AIFS enhances classification performance while reducing redundancy, outperforming both full-feature and single-source approaches. The optimized feature set (idx9, idx8, idx7, NDVI, idx2, idx3, NDRE) achieved the highest classification accuracy and robustness, validating the efficiency of automated, data-driven feature selection in heterogeneous environments.</p>
</sec>
<sec id="s4_4">
<label>4.4</label>
<title>Limitations and future directions</title>
<p>Despite these advances, several limitations remain. The study was conducted on a controlled grazing trial with limited spatial coverage, which may not fully capture the spatial heterogeneity of sandy grasslands. In particular, the relatively small study area restricts the ability to test the generalizability of the proposed method across broader ecological gradients. The use of single-period UAV imagery restricted the analysis of seasonal dynamics. Additionally, some confusion persisted between light and moderate grazing levels, suggesting the need for features more sensitive to early degradation signals. Furthermore, although UAV-based data acquisition reduces many operational uncertainties, potential sources of error remain, including illumination variability, minor fluctuations in flight altitude, and atmospheric effects. In this study, flights were conducted under clear-sky conditions near solar noon to minimize illumination differences, and radiometric calibration panels along with the UAV&#x2019;s RTK module were used to ensure consistent reflectance retrieval and high positioning accuracy. These measures help reduce uncertainty, but residual variability may still influence spectral responses.</p>
<p>Future studies should expand monitoring to diverse soil&#x2013;vegetation types, extend the research area to larger and more heterogeneous sandy grassland regions, incorporate multi-temporal UAV datasets for dynamic assessments. Integrating medium- and high-resolution satellite data (e.g., Sentinel-2, GF-6) with UAV observations would also allow multi-scale validation and facilitate the application of the method to regional monitoring. Moreover, integrating structural information from LiDAR or radar could enhance discrimination under low vegetation cover. Further development of hybrid feature selection and deep learning approaches could also improve model robustness and generalizability.</p>
</sec>
</sec>
<sec id="s5" sec-type="conclusions">
<label>5</label>
<title>Conclusions</title>
<p>This study utilized the fusion of visible and multispectral UAV data to systematically analyze spectral response features across different grazing intensities in sandy grasslands, which are characterized by low vegetation cover and substantial soil background interference. Integration of feature construction with an AIFS method improved the accuracy of grazing intensity classification. The main findings of this study are summarized as follows:</p>
<list list-type="order">
<list-item>
<p>Visible and multispectral bands responded differently to grazing disturbance. Notably, the red edge and near-infrared bands were particularly effective in distinguishing between medium- and severe-grazing intensities. Nonlinear variations resulting from soil-vegetation interactions highlight the distinctive spectral characteristics of sandy grasslands.</p></list-item>
<list-item>
<p>Traditional vegetation indices (NDVI, GNDVI) were sensitive primarily to extreme grazing levels, whereas the proposed indices (idx7, idx9, idx14) outperformed them in distinguishing moderate grazing intensities, demonstrating strong regional adaptability.</p></list-item>
<list-item>
<p>The AIFS method efficiently reduced redundant information and enhanced model robustness, achieving the highest accuracy with the SVM classifier (OA=92.13%, Kappa=88.99%), surpassing the performance obtained using all features or single-source data.</p></list-item>
</list>
<p>The study demonstrates that multi-source UAV remote sensing, combined with intelligent feature selection, provides a promising approach for high-precision monitoring of grazing intensity in sandy grasslands, offering valuable insights for assessing grassland degradation and informing ecological management. However, the study is limited by the use of single-period data and small plot sizes. Future studies could incorporate multi-temporal and multi-scale observations, along with the integration of structural data, to further elucidate the dynamic processes and mechanisms underlying grazing disturbance. This study not only provides technical support for monitoring grazing intensity in sandy grasslands but also offers novel insights for ecological remote sensing research in low-vegetation-cover grasslands.</p>
</sec>
</body>
<back>
<sec id="s6" sec-type="data-availability">
<title>Data availability statement</title>
<p>The original contributions presented in the study are included in the article/supplementary material. Further inquiries can be directed to the corresponding author/s.</p></sec>
<sec id="s7" sec-type="author-contributions">
<title>Author contributions</title>
<p>QG: Conceptualization, Writing &#x2013; original draft, Methodology, Investigation. MJ: Funding acquisition, Software, Visualization, Writing &#x2013; review &amp; editing. WD: Project administration, Writing &#x2013; review &amp; editing, Supervision. XC: Validation, Writing &#x2013; original draft, Formal Analysis. BY: Data curation, Resources, Writing &#x2013; original draft.</p></sec>
<sec id="s9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p></sec>
<sec id="s10" sec-type="correction-statement">
<title>Correction note</title>
<p>A correction has been made to this article. Details can be found at: <ext-link xlink:href="https://doi.org/10.3389/fpls.2025.1774249" ext-link-type="uri">10.3389/fpls.2025.1774249</ext-link>.</p></sec>
<sec id="s11" sec-type="ai-statement">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was not used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p></sec>
<sec id="s12" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p></sec>
<ref-list>
<title>References</title>
<ref id="B1">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Abdullah</surname> <given-names>M. Z.</given-names></name>
<name><surname>Guan</surname> <given-names>L. C.</given-names></name>
<name><surname>Mohd Azemi</surname> <given-names>B. M. N.</given-names></name>
</person-group> (<year>2001</year>). 
<article-title>Stepwise discriminant analysis for colour grading of oil palm using machine vision system</article-title>. <source>Food Bioproducts Process.</source> <volume>79</volume>, <fpage>223</fpage>&#x2013;<lpage>231</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1205/096030801753252298</pub-id>
</mixed-citation>
</ref>
<ref id="B2">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ali</surname> <given-names>I.</given-names></name>
<name><surname>Cawkwell</surname> <given-names>F.</given-names></name>
<name><surname>Dwyer</surname> <given-names>E.</given-names></name>
<name><surname>Barrett</surname> <given-names>B.</given-names></name>
<name><surname>Green</surname> <given-names>S.</given-names></name>
</person-group> (<year>2016</year>). 
<article-title>Satellite remote sensing of grasslands: from observation to management</article-title>. <source>J. Plant Ecol.</source> <volume>9</volume>, <fpage>649</fpage>&#x2013;<lpage>671</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/jpe/rtw005</pub-id>
</mixed-citation>
</ref>
<ref id="B3">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ali</surname> <given-names>A.</given-names></name>
<name><surname>Kaul</surname> <given-names>H.-P.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Monitoring yield and quality of forages and grassland in the view of precision agriculture applications&#x2014;A review</article-title>. <source>Remote Sens.</source> <volume>17</volume>, <fpage>279</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs17020279</pub-id>
</mixed-citation>
</ref>
<ref id="B4">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ara&#xfa;jo</surname> <given-names>M. C. U.</given-names></name>
<name><surname>Saldanha</surname> <given-names>T. C. B.</given-names></name>
<name><surname>Galv&#xe3;o</surname> <given-names>R. K. H.</given-names></name>
<name><surname>Yoneyama</surname> <given-names>T.</given-names></name>
<name><surname>Chame</surname> <given-names>H. C.</given-names></name>
<name><surname>Visani</surname> <given-names>V.</given-names></name>
</person-group> (<year>2001</year>). 
<article-title>The successive projections algorithm for variable selection in spectroscopic multicomponent analysis</article-title>. <source>Chemometrics Intelligent Lab. Syst.</source> <volume>57</volume>, <fpage>65</fpage>&#x2013;<lpage>73</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/S0169-7439(01)00119-8</pub-id>
</mixed-citation>
</ref>
<ref id="B5">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Barber</surname> <given-names>N.</given-names></name>
<name><surname>Alvarado</surname> <given-names>E.</given-names></name>
<name><surname>Kane</surname> <given-names>V. R.</given-names></name>
<name><surname>Mell</surname> <given-names>W. E.</given-names></name>
<name><surname>Moskal</surname> <given-names>L. M.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Estimating fuel moisture in grasslands using UAV-mounted infrared and visible light sensors</article-title>. <source>Sensors</source> <volume>21</volume>, <fpage>6350</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/s21196350</pub-id>, PMID: <pub-id pub-id-type="pmid">34640670</pub-id>
</mixed-citation>
</ref>
<ref id="B6">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bazzo</surname> <given-names>C. O.</given-names></name>
<name><surname>Kamali</surname> <given-names>B.</given-names></name>
<name><surname>H&#xfc;tt</surname> <given-names>C.</given-names></name>
<name><surname>Bareth</surname> <given-names>G.</given-names></name>
<name><surname>Gaiser</surname> <given-names>T.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>A review of estimation methods for aboveground biomass in grasslands using UAV</article-title>. <source>Remote Sens.</source> <volume>15</volume>, <fpage>639</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs15030639</pub-id>
</mixed-citation>
</ref>
<ref id="B7">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Belgiu</surname> <given-names>M.</given-names></name>
<name><surname>Dr&#x103;gu&#x163;</surname> <given-names>L.</given-names></name>
</person-group> (<year>2016</year>). 
<article-title>Random forest in remote sensing: A review of applications and future directions</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>114</volume>, <fpage>24</fpage>&#x2013;<lpage>31</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2016.01.011</pub-id>
</mixed-citation>
</ref>
<ref id="B8">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Bendig</surname> <given-names>J.</given-names></name>
<name><surname>Yu</surname> <given-names>K.</given-names></name>
<name><surname>Aasen</surname> <given-names>H.</given-names></name>
<name><surname>Bolten</surname> <given-names>A.</given-names></name>
<name><surname>Bennertz</surname> <given-names>S.</given-names></name>
<name><surname>Broscheit</surname> <given-names>J.</given-names></name>
<etal/>
</person-group>. (<year>2015</year>). 
<article-title>Combining UAV-based plant height from crop surface models, visible, and near infrared vegetation indices for biomass monitoring in barley</article-title>. <source>Int. J. Appl. Earth Observation Geoinformation</source> <volume>39</volume>, <fpage>79</fpage>&#x2013;<lpage>87</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jag.2015.02.012</pub-id>
</mixed-citation>
</ref>
<ref id="B9">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Cao</surname> <given-names>X.</given-names></name>
<name><surname>Liu</surname> <given-names>Y.</given-names></name>
<name><surname>Yu</surname> <given-names>R.</given-names></name>
<name><surname>Han</surname> <given-names>D.</given-names></name>
<name><surname>Su</surname> <given-names>B.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>A comparison of UAV RGB and multispectral imaging in phenotyping for stay green of wheat population</article-title>. <source>Remote Sens.</source> <volume>13</volume>, <fpage>5173</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs13245173</pub-id>
</mixed-citation>
</ref>
<ref id="B10">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Centner</surname> <given-names>V.</given-names></name>
<name><surname>Massart</surname> <given-names>D.-L.</given-names></name>
<name><surname>de Noord</surname> <given-names>O. E.</given-names></name>
<name><surname>de Jong</surname> <given-names>S.</given-names></name>
<name><surname>Vandeginste</surname> <given-names>B. M.</given-names></name>
<name><surname>Sterna</surname> <given-names>C.</given-names></name>
</person-group> (<year>1996</year>). 
<article-title>Elimination of uninformative variables for multivariate calibration</article-title>. <source>Analytical Chem.</source> <volume>68</volume>, <fpage>3851</fpage>&#x2013;<lpage>3858</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1021/ac960321m</pub-id>, PMID: <pub-id pub-id-type="pmid">21619260</pub-id>
</mixed-citation>
</ref>
<ref id="B11">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Fern&#xe1;ndez-Habas</surname> <given-names>J.</given-names></name>
<name><surname>Carriere Ca&#xf1;ada</surname> <given-names>M.</given-names></name>
<name><surname>Garc&#xed;a Moreno</surname> <given-names>A. M.</given-names></name>
<name><surname>Leal-Murillo</surname> <given-names>J. R.</given-names></name>
<name><surname>Gonz&#xe1;lez-Dugo</surname> <given-names>M. P.</given-names></name>
<name><surname>Abellanas Oar</surname> <given-names>B.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). 
<article-title>Estimating pasture quality of Mediterranean grasslands using hyperspectral narrow bands from field spectroscopy by Random Forest and PLS regressions</article-title>. <source>Comput. Electron. Agric.</source> <volume>192</volume>, <elocation-id>106614</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2021.106614</pub-id>
</mixed-citation>
</ref>
<ref id="B12">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Gao</surname> <given-names>S.-h.</given-names></name>
<name><surname>Yan</surname> <given-names>Y.-z.</given-names></name>
<name><surname>Yuan</surname> <given-names>Y.</given-names></name>
<name><surname>Zhang</surname> <given-names>N.</given-names></name>
<name><surname>Ma</surname> <given-names>L.</given-names></name>
<name><surname>Zhang</surname> <given-names>Q.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Comprehensive degradation index for monitoring desert grassland using UAV multispectral imagery</article-title>. <source>Ecol. Indic.</source> <volume>165</volume>, <elocation-id>112194</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.ecolind.2024.112194</pub-id>
</mixed-citation>
</ref>
<ref id="B13">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Guan</surname> <given-names>Q.</given-names></name>
<name><surname>Qiao</surname> <given-names>S.</given-names></name>
<name><surname>Feng</surname> <given-names>S.</given-names></name>
<name><surname>Du</surname> <given-names>W.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>Investigation of peanut leaf spot detection using superpixel unmixing technology for hyperspectral UAV images</article-title>. <source>Agriculture</source> <volume>15</volume>, <fpage>597</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture15060597</pub-id>
</mixed-citation>
</ref>
<ref id="B14">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Guan</surname> <given-names>Q.</given-names></name>
<name><surname>Song</surname> <given-names>K.</given-names></name>
<name><surname>Feng</surname> <given-names>S.</given-names></name>
<name><surname>Yu</surname> <given-names>F.</given-names></name>
<name><surname>Xu</surname> <given-names>T.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Detection of peanut leaf spot disease based on leaf-, plant-, and field-scale hyperspectral reflectance</article-title>. <source>Remote Sens.</source> <volume>14</volume>, <fpage>4988</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs14194988</pub-id>
</mixed-citation>
</ref>
<ref id="B15">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Guan</surname> <given-names>Q.</given-names></name>
<name><surname>Zhao</surname> <given-names>D.</given-names></name>
<name><surname>Feng</surname> <given-names>S.</given-names></name>
<name><surname>Xu</surname> <given-names>T.</given-names></name>
<name><surname>Wang</surname> <given-names>H.</given-names></name>
<name><surname>Song</surname> <given-names>K.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Hyperspectral technique for detection of peanut leaf spot disease based on improved PCA loading</article-title>. <source>Agronomy</source> <volume>13</volume>, <fpage>1153</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy13041153</pub-id>
</mixed-citation>
</ref>
<ref id="B16">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Guo</surname> <given-names>S.</given-names></name>
<name><surname>Feng</surname> <given-names>Z.</given-names></name>
<name><surname>Wang</surname> <given-names>P.</given-names></name>
<name><surname>Chang</surname> <given-names>J.</given-names></name>
<name><surname>Han</surname> <given-names>H.</given-names></name>
<name><surname>Li</surname> <given-names>H.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Mapping and classification of the liaohe estuary wetland based on the combination of object-oriented and temporal features</article-title>. <source>IEEE Access</source> <volume>12</volume>, <fpage>60496</fpage>&#x2013;<lpage>60512</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/ACCESS.2024.3389935</pub-id>
</mixed-citation>
</ref>
<ref id="B17">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Hernandez</surname> <given-names>A.</given-names></name>
<name><surname>Jensen</surname> <given-names>K.</given-names></name>
<name><surname>Larson</surname> <given-names>S.</given-names></name>
<name><surname>Larsen</surname> <given-names>R.</given-names></name>
<name><surname>Rigby</surname> <given-names>C.</given-names></name>
<name><surname>Johnson</surname> <given-names>B.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Using unmanned aerial vehicles and multispectral sensors to model forage yield for grasses of semiarid landscapes</article-title>. <source>Grasses</source> <volume>3</volume>, <fpage>84</fpage>&#x2013;<lpage>109</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/grasses3020007</pub-id>
</mixed-citation>
</ref>
<ref id="B18">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Jiang</surname> <given-names>K.</given-names></name>
<name><surname>Zhang</surname> <given-names>Q.</given-names></name>
<name><surname>Wang</surname> <given-names>Y.</given-names></name>
<name><surname>Li</surname> <given-names>H.</given-names></name>
<name><surname>Yang</surname> <given-names>Y.</given-names></name>
<name><surname>Reyimu</surname> <given-names>T.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Effects of grazing on the grassland ecosystem multifunctionality of montane meadow on the northern slope of the Tianshan Mountains, China</article-title>. <source>Environ. Earth Sci.</source> <volume>83</volume>, <fpage>70</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s12665-023-11292-5</pub-id>
</mixed-citation>
</ref>
<ref id="B19">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Ku</surname> <given-names>K.-B.</given-names></name>
<name><surname>Mansoor</surname> <given-names>S.</given-names></name>
<name><surname>Han</surname> <given-names>G. D.</given-names></name>
<name><surname>Chung</surname> <given-names>Y. S.</given-names></name>
<name><surname>Tuan</surname> <given-names>T. T.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Identification of new cold tolerant Zoysia grass species using high-resolution RGB and multi-spectral imaging</article-title>. <source>Sci. Rep.</source> <volume>13</volume>, <fpage>13209</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-023-40128-2</pub-id>, PMID: <pub-id pub-id-type="pmid">37580436</pub-id>
</mixed-citation>
</ref>
<ref id="B20">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>H.</given-names></name>
<name><surname>Liang</surname> <given-names>Y.</given-names></name>
<name><surname>Xu</surname> <given-names>Q.</given-names></name>
<name><surname>Cao</surname> <given-names>D.</given-names></name>
</person-group> (<year>2009</year>). 
<article-title>Key wavelengths screening using competitive adaptive reweighted sampling method for multivariate calibration</article-title>. <source>Analytica Chimica Acta</source> <volume>648</volume>, <fpage>77</fpage>&#x2013;<lpage>84</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.aca.2009.06.046</pub-id>, PMID: <pub-id pub-id-type="pmid">19616692</pub-id>
</mixed-citation>
</ref>
<ref id="B21">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Li</surname> <given-names>H.-D.</given-names></name>
<name><surname>Xu</surname> <given-names>Q.-S.</given-names></name>
<name><surname>Liang</surname> <given-names>Y.-Z.</given-names></name>
</person-group> (<year>2012</year>). 
<article-title>Random frog: An efficient reversible jump Markov Chain Monte Carlo-like approach for variable selection with applications to gene selection and disease classification</article-title>. <source>Analytica Chimica Acta</source> <volume>740</volume>, <fpage>20</fpage>&#x2013;<lpage>26</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.aca.2012.06.031</pub-id>, PMID: <pub-id pub-id-type="pmid">22840646</pub-id>
</mixed-citation>
</ref>
<ref id="B22">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Liedtke</surname> <given-names>J. D.</given-names></name>
<name><surname>Hunt</surname> <given-names>C. H.</given-names></name>
<name><surname>George-Jaeggli</surname> <given-names>B.</given-names></name>
<name><surname>Laws</surname> <given-names>K.</given-names></name>
<name><surname>Watson</surname> <given-names>J.</given-names></name>
<name><surname>Potgieter</surname> <given-names>A. B.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>High-throughput phenotyping of dynamic canopy traits associated with stay-green in grain sorghum</article-title>. <source>Plant Phenomics</source> <volume>2020</volume>, <elocation-id>4635153</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.34133/2020/4635153</pub-id>, PMID: <pub-id pub-id-type="pmid">33313557</pub-id>
</mixed-citation>
</ref>
<ref id="B23">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lin</surname> <given-names>X.</given-names></name>
<name><surname>Zhao</surname> <given-names>H.</given-names></name>
<name><surname>Zhang</surname> <given-names>S.</given-names></name>
<name><surname>Li</surname> <given-names>X.</given-names></name>
<name><surname>Gao</surname> <given-names>W.</given-names></name>
<name><surname>Ren</surname> <given-names>Z.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). 
<article-title>Effects of animal grazing on vegetation biomass and soil moisture on a typical steppe in Inner Mongolia, China</article-title>. <source>Ecohydrology</source> <volume>15</volume>, <fpage>e2350</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1002/eco.2350</pub-id>
</mixed-citation>
</ref>
<ref id="B24">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Liu</surname> <given-names>X.</given-names></name>
<name><surname>Wang</surname> <given-names>H.</given-names></name>
<name><surname>Cao</surname> <given-names>Y.</given-names></name>
<name><surname>Yang</surname> <given-names>Y.</given-names></name>
<name><surname>Sun</surname> <given-names>X.</given-names></name>
<name><surname>Sun</surname> <given-names>K.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Comprehensive growth index monitoring of desert steppe grassland vegetation based on UAV hyperspectral</article-title>. <source>Front. Plant Sci.</source> <volume>13</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2022.1050999</pub-id>, PMID: <pub-id pub-id-type="pmid">36762180</pub-id>
</mixed-citation>
</ref>
<ref id="B25">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Liu</surname> <given-names>B.</given-names></name>
<name><surname>Ye</surname> <given-names>H.</given-names></name>
<name><surname>Liao</surname> <given-names>X.</given-names></name>
<name><surname>Zhang</surname> <given-names>X.</given-names></name>
<name><surname>Mao</surname> <given-names>G.</given-names></name>
<name><surname>Pan</surname> <given-names>T.</given-names></name>
</person-group> (<year>2025</year>). 
<article-title>UAV Data for herbaceous community&#x2019; aboveground biomass upscaling: a new perspective on LiDAR and multispectral information fusion</article-title>. <source>Int. J. Digital Earth</source> <volume>18</volume>, <elocation-id>2543563</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/17538947.2025.2543563</pub-id>
</mixed-citation>
</ref>
<ref id="B26">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lussem</surname> <given-names>U.</given-names></name>
<name><surname>Bolten</surname> <given-names>A.</given-names></name>
<name><surname>Gnyp</surname> <given-names>M. L.</given-names></name>
<name><surname>Jasper</surname> <given-names>J.</given-names></name>
<name><surname>Bareth</surname> <given-names>G.</given-names></name>
</person-group> (<year>2018</year>). 
<article-title>Evaluation of rgb-based vegetation indices from uav imagery to estimate forage yield in grassland</article-title>. <source>Int. Arch. Photogramm. Remote Sens. Spatial Inf. Sci.</source> <volume>XLII-3</volume>, <fpage>1215</fpage>&#x2013;<lpage>1219</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5194/isprs-archives-XLII-3-1215-2018</pub-id>
</mixed-citation>
</ref>
<ref id="B27">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lussem</surname> <given-names>U.</given-names></name>
<name><surname>Hollberg</surname> <given-names>J.</given-names></name>
<name><surname>Menne</surname> <given-names>J.</given-names></name>
<name><surname>Schellberg</surname> <given-names>J.</given-names></name>
<name><surname>Bareth</surname> <given-names>G.</given-names></name>
</person-group> (<year>2017</year>). 
<article-title>Using calibrated rgb imagery from low-cost uavs for grassland monitoring: case study at the rengen grassland experiment (rge), Germany</article-title>. <source>Int. Arch. Photogramm. Remote Sens. Spatial Inf. Sci.</source> <volume>XLII-2/W6</volume>, <fpage>229</fpage>&#x2013;<lpage>233</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5194/isprs-archives-XLII-2-W6-229-2017</pub-id>
</mixed-citation>
</ref>
<ref id="B28">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lyu</surname> <given-names>X.</given-names></name>
<name><surname>Li</surname> <given-names>X.</given-names></name>
<name><surname>Dang</surname> <given-names>D.</given-names></name>
<name><surname>Dou</surname> <given-names>H.</given-names></name>
<name><surname>Wang</surname> <given-names>K.</given-names></name>
<name><surname>Lou</surname> <given-names>A.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Unmanned aerial vehicle (UAV) remote sensing in grassland ecosystem monitoring: A systematic review</article-title>. <source>Remote Sens.</source> <volume>14</volume>, <fpage>1096</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs14051096</pub-id>
</mixed-citation>
</ref>
<ref id="B29">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Lyu</surname> <given-names>X.</given-names></name>
<name><surname>Li</surname> <given-names>X.</given-names></name>
<name><surname>Dang</surname> <given-names>D.</given-names></name>
<name><surname>Wang</surname> <given-names>K.</given-names></name>
<name><surname>Zhang</surname> <given-names>C.</given-names></name>
<name><surname>Cao</surname> <given-names>W.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Systematic review of remote sensing technology for grassland biodiversity monitoring: Current status and challenges</article-title>. <source>Global Ecol. Conserv.</source> <volume>54</volume>, <elocation-id>e03196</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.gecco.2024.e03196</pub-id>
</mixed-citation>
</ref>
<ref id="B30">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Oliveira</surname> <given-names>R. A.</given-names></name>
<name><surname>N&#xe4;si</surname> <given-names>R.</given-names></name>
<name><surname>Niemel&#xe4;inen</surname> <given-names>O.</given-names></name>
<name><surname>Nyholm</surname> <given-names>L.</given-names></name>
<name><surname>Alhonoja</surname> <given-names>K.</given-names></name>
<name><surname>Kaivosoja</surname> <given-names>J.</given-names></name>
<etal/>
</person-group>. (<year>2019</year>). 
<article-title>Assessment of rgb and hyperspectral uav remote sensing for grass quantity and quality estimation</article-title>. <source>Int. Arch. Photogramm. Remote Sens. Spatial Inf. Sci.</source> <volume>XLII-2/W13</volume>, <fpage>489</fpage>&#x2013;<lpage>494</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5194/isprs-archives-XLII-2-W13-489-2019</pub-id>
</mixed-citation>
</ref>
<ref id="B31">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Orlando</surname> <given-names>S.</given-names></name>
<name><surname>Minacapilli</surname> <given-names>M.</given-names></name>
<name><surname>Sarno</surname> <given-names>M.</given-names></name>
<name><surname>Carrubba</surname> <given-names>A.</given-names></name>
<name><surname>Motisi</surname> <given-names>A.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>A low-cost multispectral imaging system for the characterisation of soil and small vegetation properties using visible and near-infrared reflectance</article-title>. <source>Comput. Electron. Agric.</source> <volume>202</volume>, <elocation-id>107359</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2022.107359</pub-id>
</mixed-citation>
</ref>
<ref id="B32">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>P&#xe1;dua</surname> <given-names>L.</given-names></name>
<name><surname>Castro</surname> <given-names>J. P.</given-names></name>
<name><surname>Castro</surname> <given-names>J.</given-names></name>
<name><surname>Sousa</surname> <given-names>J. J.</given-names></name>
<name><surname>Castro</surname> <given-names>M.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Assessing the impact of clearing and grazing on fuel management in a mediterranean oak forest through unmanned aerial vehicle multispectral data</article-title>. <source>Drones</source> <volume>8</volume>, <fpage>364</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/drones8080364</pub-id>
</mixed-citation>
</ref>
<ref id="B33">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Pamungkas</surname> <given-names>S.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Analysis of vegetation index for ndvi, evi-2, and savi for mangrove forest density using google earth engine in Lembar Bay, Lombok Island</article-title>. <source>IOP Conf. Series: Earth Environ. Sci.</source> <volume>1127</volume>, <elocation-id>12034</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1088/1755-1315/1127/1/012034</pub-id>
</mixed-citation>
</ref>
<ref id="B34">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Pan</surname> <given-names>T.</given-names></name>
<name><surname>Ye</surname> <given-names>H.</given-names></name>
<name><surname>Zhang</surname> <given-names>X.</given-names></name>
<name><surname>Liao</surname> <given-names>X.</given-names></name>
<name><surname>Wang</surname> <given-names>D.</given-names></name>
<name><surname>Bayin</surname> <given-names>D.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Estimating aboveground biomass of grassland in central Asia mountainous areas using unmanned aerial vehicle vegetation indices and image textures &#x2013; A case study of typical grassland in Tajikistan</article-title>. <source>Environ. Sustainability Indic.</source> <volume>22</volume>, <elocation-id>100345</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.indic.2024.100345</pub-id>
</mixed-citation>
</ref>
<ref id="B35">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Possoch</surname> <given-names>M.</given-names></name>
<name><surname>Bieker</surname> <given-names>S.</given-names></name>
<name><surname>Hoffmeister</surname> <given-names>D.</given-names></name>
<name><surname>Bolten</surname> <given-names>A.</given-names></name>
<name><surname>Schellberg</surname> <given-names>J.</given-names></name>
<name><surname>Bareth</surname> <given-names>G.</given-names></name>
</person-group> (<year>2016</year>). 
<article-title>Multi-temporal crop surface models combined with the rgb vegetation index from uav-based images for forage monitoring in grassland</article-title>. <source>Int. Arch. Photogramm. Remote Sens. Spatial Inf. Sci.</source> <volume>XLI-B1</volume>, <fpage>991</fpage>&#x2013;<lpage>998</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5194/isprs-archives-XLI-B1-991-2016</pub-id>
</mixed-citation>
</ref>
<ref id="B36">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Pranga</surname> <given-names>J.</given-names></name>
<name><surname>Borra-Serrano</surname> <given-names>I.</given-names></name>
<name><surname>Quataert</surname> <given-names>P.</given-names></name>
<name><surname>De Swaef</surname> <given-names>T.</given-names></name>
<name><surname>Vanden Nest</surname> <given-names>T.</given-names></name>
<name><surname>Willekens</surname> <given-names>K.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Quantification of species composition in grass-clover swards using RGB and multispectral UAV imagery and machine learning</article-title>. <source>Front. Plant Sci.</source> <volume>15</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2024.1414181</pub-id>, PMID: <pub-id pub-id-type="pmid">38962243</pub-id>
</mixed-citation>
</ref>
<ref id="B37">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Reinermann</surname> <given-names>S.</given-names></name>
<name><surname>Asam</surname> <given-names>S.</given-names></name>
<name><surname>Kuenzer</surname> <given-names>C.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Remote sensing of grassland production and management&#x2014;A review</article-title>. <source>Remote Sens.</source> <volume>12</volume>, <fpage>1949</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs12121949</pub-id>
</mixed-citation>
</ref>
<ref id="B38">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Robnik-&#x160;ikonja</surname> <given-names>M.</given-names></name>
<name><surname>Kononenko</surname> <given-names>I.</given-names></name>
</person-group> (<year>2003</year>). 
<article-title>Theoretical and empirical analysis of reliefF and RReliefF</article-title>. <source>Mach. Learn.</source> <volume>53</volume>, <fpage>23</fpage>&#x2013;<lpage>69</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1023/A:1025667309714</pub-id>
</mixed-citation>
</ref>
<ref id="B39">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Rossi</surname> <given-names>M.</given-names></name>
<name><surname>Niedrist</surname> <given-names>G.</given-names></name>
<name><surname>Asam</surname> <given-names>S.</given-names></name>
<name><surname>Tonon</surname> <given-names>G.</given-names></name>
<name><surname>Tomelleri</surname> <given-names>E.</given-names></name>
<name><surname>Zebisch</surname> <given-names>M.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>A comparison of the signal from diverse optical sensors for monitoring alpine grassland dynamics</article-title>. <source>Remote Sens.</source> <volume>11</volume>, <fpage>296</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs11030296</pub-id>
</mixed-citation>
</ref>
<ref id="B40">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Schucknecht</surname> <given-names>A.</given-names></name>
<name><surname>Seo</surname> <given-names>B.</given-names></name>
<name><surname>Kr&#xe4;mer</surname> <given-names>A.</given-names></name>
<name><surname>Asam</surname> <given-names>S.</given-names></name>
<name><surname>Atzberger</surname> <given-names>C.</given-names></name>
<name><surname>Kiese</surname> <given-names>R.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Estimating dry biomass and plant nitrogen concentration in pre-Alpine grasslands with low-cost UAS-borne multispectral data &#x2013; a comparison of sensors, algorithms, and predictor sets</article-title>. <source>Biogeosciences</source> <volume>19</volume>, <fpage>2699</fpage>&#x2013;<lpage>2727</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.5194/bg-19-2699-2022</pub-id>
</mixed-citation>
</ref>
<ref id="B41">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Shi</surname> <given-names>Y.</given-names></name>
<name><surname>Gao</surname> <given-names>J.</given-names></name>
<name><surname>Li</surname> <given-names>X.</given-names></name>
<name><surname>Li</surname> <given-names>J.</given-names></name>
<name><surname>dela Torre</surname> <given-names>D. M. G.</given-names></name>
<name><surname>Brierley</surname> <given-names>G. J.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Improved estimation of aboveground biomass of disturbed grassland through including bare ground and grazing intensity</article-title>. <source>Remote Sens.</source> <volume>13</volume>, <fpage>2105</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs13112105</pub-id>
</mixed-citation>
</ref>
<ref id="B42">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Smith</surname> <given-names>A. M. S.</given-names></name>
<name><surname>Drake</surname> <given-names>N. A.</given-names></name>
<name><surname>Wooster</surname> <given-names>M. J.</given-names></name>
<name><surname>Hudak</surname> <given-names>A. T.</given-names></name>
<name><surname>Holden</surname> <given-names>Z. A.</given-names></name>
<name><surname>Gibbons</surname> <given-names>C. J.</given-names></name>
</person-group> (<year>2007</year>). 
<article-title>Production of Landsat ETM+ reference imagery of burned areas within Southern African savannahs: comparison of methods and application to MODIS</article-title>. <source>Int. J. Remote Sens.</source> <volume>28</volume>, <fpage>2753</fpage>&#x2013;<lpage>2775</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/01431160600954704</pub-id>
</mixed-citation>
</ref>
<ref id="B43">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Stoline</surname> <given-names>M. R.</given-names></name>
</person-group> (<year>1981</year>). 
<article-title>The status of multiple comparisons: simultaneous estimation of all pairwise comparisons in one-way ANOVA designs</article-title>. <source>Am. Statistician</source> <volume>35</volume>, <fpage>134</fpage>&#x2013;<lpage>141</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/00031305.1981.10479331</pub-id>
</mixed-citation>
</ref>
<ref id="B44">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Suzuki</surname> <given-names>T.</given-names></name>
<name><surname>Sugiyama</surname> <given-names>M.</given-names></name>
<name><surname>Kanamori</surname> <given-names>T.</given-names></name>
<name><surname>Sese</surname> <given-names>J.</given-names></name>
</person-group> (<year>2009</year>). 
<article-title>Mutual information estimation reveals global associations between stimuli and biological processes</article-title>. <source>BMC Bioinf.</source> <volume>10</volume>, <elocation-id>S52</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1186/1471-2105-10-S1-S52</pub-id>, PMID: <pub-id pub-id-type="pmid">19208155</pub-id>
</mixed-citation>
</ref>
<ref id="B45">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Tilly</surname> <given-names>N.</given-names></name>
<name><surname>Aasen</surname> <given-names>H.</given-names></name>
<name><surname>Bareth</surname> <given-names>G.</given-names></name>
</person-group> (<year>2015</year>). 
<article-title>Fusion of plant height and vegetation indices for the estimation of barley biomass</article-title>. <source>Remote Sens.</source> <volume>7</volume>, <fpage>11449</fpage>&#x2013;<lpage>11480</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs70911449</pub-id>
</mixed-citation>
</ref>
<ref id="B46">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Varela</surname> <given-names>S.</given-names></name>
<name><surname>Pederson</surname> <given-names>T.</given-names></name>
<name><surname>Bernacchi</surname> <given-names>C. J.</given-names></name>
<name><surname>Leakey</surname> <given-names>A. D. B.</given-names></name>
</person-group> (<year>2021</year>). 
<article-title>Understanding growth dynamics and yield prediction of sorghum using high temporal resolution UAV imagery time series and machine learning</article-title>. <source>Remote Sens.</source> <volume>13</volume>, <fpage>1763</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs13091763</pub-id>
</mixed-citation>
</ref>
<ref id="B47">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>Z.</given-names></name>
<name><surname>Ma</surname> <given-names>Y.</given-names></name>
<name><surname>Zhang</surname> <given-names>Y.</given-names></name>
<name><surname>Shang</surname> <given-names>J.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Review of remote sensing applications in grassland monitoring</article-title>. <source>Remote Sens.</source> <volume>14</volume>, <fpage>2903</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs14122903</pub-id>
</mixed-citation>
</ref>
<ref id="B48">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wang</surname> <given-names>S.</given-names></name>
<name><surname>Tuya</surname> <given-names>H.</given-names></name>
<name><surname>Zhang</surname> <given-names>S.</given-names></name>
<name><surname>Zhao</surname> <given-names>X.</given-names></name>
<name><surname>Liu</surname> <given-names>Z.</given-names></name>
<name><surname>Li</surname> <given-names>R.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Random forest method for analysis of remote sensing inversion of aboveground biomass and grazing intensity of grasslands in Inner Mongolia, China</article-title>. <source>Int. J. Remote Sens.</source> <volume>44</volume>, <fpage>2867</fpage>&#x2013;<lpage>2884</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/01431161.2023.2210724</pub-id>
</mixed-citation>
</ref>
<ref id="B49">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wengert</surname> <given-names>M.</given-names></name>
<name><surname>Wijesingha</surname> <given-names>J.</given-names></name>
<name><surname>Schulze-Br&#xfc;ninghoff</surname> <given-names>D.</given-names></name>
<name><surname>Wachendorf</surname> <given-names>M.</given-names></name>
<name><surname>Astor</surname> <given-names>T.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Multisite and multitemporal grassland yield estimation using UAV-borne hyperspectral data</article-title>. <source>Remote Sens.</source> <volume>14</volume>, <fpage>2068</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs14092068</pub-id>
</mixed-citation>
</ref>
<ref id="B50">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Wu</surname> <given-names>W.</given-names></name>
<name><surname>Walczak</surname> <given-names>B.</given-names></name>
<name><surname>Massart</surname> <given-names>D. L.</given-names></name>
<name><surname>Heuerding</surname> <given-names>S.</given-names></name>
<name><surname>Erni</surname> <given-names>F.</given-names></name>
<name><surname>Last</surname> <given-names>I. R.</given-names></name>
<etal/>
</person-group>. (<year>1996</year>). 
<article-title>Artificial neural networks in classification of NIR spectral data: Design of the training set</article-title>. <source>Chemometrics Intelligent Lab. Syst.</source> <volume>33</volume>, <fpage>35</fpage>&#x2013;<lpage>46</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/0169-7439(95)00077-1</pub-id>
</mixed-citation>
</ref>
<ref id="B51">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xiang</surname> <given-names>S.</given-names></name>
<name><surname>Wang</surname> <given-names>S.</given-names></name>
<name><surname>Guo</surname> <given-names>Z.</given-names></name>
<name><surname>Wang</surname> <given-names>N.</given-names></name>
<name><surname>Jin</surname> <given-names>Z.</given-names></name>
<name><surname>Yu</surname> <given-names>F.</given-names></name>
<etal/>
</person-group>. (<year>2025</year>a). 
<article-title>Inversion of nitrogen concentration in crop leaves based on improved radiative transfer model</article-title>. <source>Comput. Electron. Agric.</source> <volume>239</volume>, <elocation-id>111017</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2025.111017</pub-id>
</mixed-citation>
</ref>
<ref id="B52">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xiang</surname> <given-names>S.</given-names></name>
<name><surname>Wang</surname> <given-names>S.</given-names></name>
<name><surname>Jin</surname> <given-names>Z.</given-names></name>
<name><surname>Xiao</surname> <given-names>Y.</given-names></name>
<name><surname>Liu</surname> <given-names>M.</given-names></name>
<name><surname>Yang</surname> <given-names>H.</given-names></name>
<etal/>
</person-group>. (<year>2025</year>b). 
<article-title>RSPECT: A PROSPECT-based model incorporating the real structure of rice leaves</article-title>. <source>Remote Sens. Environ.</source> <volume>330</volume>, <elocation-id>114962</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rse.2025.114962</pub-id>
</mixed-citation>
</ref>
<ref id="B53">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Xu</surname> <given-names>X.</given-names></name>
<name><surname>Liu</surname> <given-names>L.</given-names></name>
<name><surname>Han</surname> <given-names>P.</given-names></name>
<name><surname>Gong</surname> <given-names>X.</given-names></name>
<name><surname>Zhang</surname> <given-names>Q.</given-names></name>
</person-group> (<year>2022</year>). 
<article-title>Accuracy of vegetation indices in assessing different grades of grassland desertification from UAV</article-title>. <source>Int. J. Environ. Res. Public Health</source> <volume>19</volume>, <fpage>16793</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/ijerph192416793</pub-id>, PMID: <pub-id pub-id-type="pmid">36554681</pub-id>
</mixed-citation>
</ref>
<ref id="B54">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yu</surname> <given-names>F.-h.</given-names></name>
<name><surname>Bai</surname> <given-names>J.-c.</given-names></name>
<name><surname>Jin</surname> <given-names>Z.-y.</given-names></name>
<name><surname>Guo</surname> <given-names>Z.-h.</given-names></name>
<name><surname>Yang</surname> <given-names>J.-x.</given-names></name>
<name><surname>Chen</surname> <given-names>C.-l.</given-names></name>
</person-group> (<year>2023</year>). 
<article-title>Combining the critical nitrogen concentration and machine learning algorithms to estimate nitrogen deficiency in rice from UAV hyperspectral data</article-title>. <source>J. Integr. Agric.</source> <volume>22</volume>, <fpage>1216</fpage>&#x2013;<lpage>1229</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.jia.2022.12.007</pub-id>
</mixed-citation>
</ref>
<ref id="B55">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yu</surname> <given-names>F.</given-names></name>
<name><surname>Feng</surname> <given-names>S.</given-names></name>
<name><surname>Du</surname> <given-names>W.</given-names></name>
<name><surname>Wang</surname> <given-names>D.</given-names></name>
<name><surname>Guo</surname> <given-names>Z.</given-names></name>
<name><surname>Xing</surname> <given-names>S.</given-names></name>
<etal/>
</person-group>. (<year>2020</year>). 
<article-title>A study of nitrogen deficiency inversion in rice leaves based on the hyperspectral reflectance differential</article-title>. <source>Front. Plant Sci. Volume</source> <volume>11</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2020.573272</pub-id>, PMID: <pub-id pub-id-type="pmid">33343590</pub-id>
</mixed-citation>
</ref>
<ref id="B56">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yu</surname> <given-names>F.</given-names></name>
<name><surname>Xu</surname> <given-names>C.</given-names></name>
<name><surname>Xiang</surname> <given-names>S.</given-names></name>
<name><surname>Bai</surname> <given-names>J.</given-names></name>
<name><surname>Jin</surname> <given-names>Z.</given-names></name>
<name><surname>Zhang</surname> <given-names>H.</given-names></name>
<etal/>
</person-group>. (<year>2025</year>). 
<article-title>Hyperspectral leaf reflectance simulation considering internal structure</article-title>. <source>Sci. Rep.</source> <volume>15</volume>, <fpage>13639</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1038/s41598-025-98299-z</pub-id>, PMID: <pub-id pub-id-type="pmid">40254694</pub-id>
</mixed-citation>
</ref>
<ref id="B57">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yu</surname> <given-names>F.</given-names></name>
<name><surname>Zhang</surname> <given-names>H.</given-names></name>
<name><surname>Bai</surname> <given-names>J.</given-names></name>
<name><surname>Xiang</surname> <given-names>S.</given-names></name>
<name><surname>Xu</surname> <given-names>T.</given-names></name>
</person-group> (<year>2024</year>). 
<article-title>Method for the hyperspectral inversion of the phosphorus content of rice leaves in cold northern China</article-title>. <source>Int. J. Agric. Biol. Eng.</source> <volume>17</volume>, <fpage>256</fpage>&#x2013;<lpage>263</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.25165/j.ijabe.20241706.8464</pub-id>
</mixed-citation>
</ref>
<ref id="B58">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Yue</surname> <given-names>J.</given-names></name>
<name><surname>Yang</surname> <given-names>G.</given-names></name>
<name><surname>Tian</surname> <given-names>Q.</given-names></name>
<name><surname>Feng</surname> <given-names>H.</given-names></name>
<name><surname>Xu</surname> <given-names>K.</given-names></name>
<name><surname>Zhou</surname> <given-names>C.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Estimate of winter-wheat above-ground biomass based on UAV ultrahigh-ground-resolution image textures and vegetation indices</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>150</volume>, <fpage>226</fpage>&#x2013;<lpage>244</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.isprsjprs.2019.02.022</pub-id>
</mixed-citation>
</ref>
<ref id="B59">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>T.</given-names></name>
<name><surname>Bi</surname> <given-names>Y.</given-names></name>
<name><surname>Xuan</surname> <given-names>C.</given-names></name>
</person-group> (<year>2024</year>a). 
<article-title>Convolutional transformer attention network with few-shot learning for grassland degradation monitoring using UAV hyperspectral imagery</article-title>. <source>Int. J. Remote Sens.</source> <volume>45</volume>, <fpage>2109</fpage>&#x2013;<lpage>2135</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/01431161.2024.2326042</pub-id>
</mixed-citation>
</ref>
<ref id="B60">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>Z.</given-names></name>
<name><surname>Gong</surname> <given-names>J.</given-names></name>
<name><surname>Song</surname> <given-names>L.</given-names></name>
<name><surname>Zhang</surname> <given-names>S.</given-names></name>
<name><surname>Zhang</surname> <given-names>W.</given-names></name>
<name><surname>Dong</surname> <given-names>J.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>b). 
<article-title>Adaptations of soil microbes to stoichiometric imbalances in regulating their carbon use efficiency under a range of different grazing intensities</article-title>. <source>Appl. Soil Ecol.</source> <volume>193</volume>, <elocation-id>105141</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.apsoil.2023.105141</pub-id>
</mixed-citation>
</ref>
<ref id="B61">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>A.</given-names></name>
<name><surname>Hu</surname> <given-names>S.</given-names></name>
<name><surname>Zhang</surname> <given-names>X.</given-names></name>
<name><surname>Zhang</surname> <given-names>T.</given-names></name>
<name><surname>Li</surname> <given-names>M.</given-names></name>
<name><surname>Tao</surname> <given-names>H.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>a). 
<article-title>A handheld grassland vegetation monitoring system based on multispectral imaging</article-title>. <source>Agriculture</source> <volume>11</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agriculture11121262</pub-id>
</mixed-citation>
</ref>
<ref id="B62">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>J.</given-names></name>
<name><surname>Qiu</surname> <given-names>X.</given-names></name>
<name><surname>Wu</surname> <given-names>Y.</given-names></name>
<name><surname>Zhu</surname> <given-names>Y.</given-names></name>
<name><surname>Cao</surname> <given-names>Q.</given-names></name>
<name><surname>Liu</surname> <given-names>X.</given-names></name>
<etal/>
</person-group>. (<year>2021</year>b). 
<article-title>Combining texture, color, and vegetation indices from fixed-wing UAS imagery to estimate wheat growth parameters using multivariate regression methods</article-title>. <source>Comput. Electron. Agric.</source> <volume>185</volume>, <elocation-id>106138</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.compag.2021.106138</pub-id>
</mixed-citation>
</ref>
<ref id="B63">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhang</surname> <given-names>H.</given-names></name>
<name><surname>Tang</surname> <given-names>Z.</given-names></name>
<name><surname>Wang</surname> <given-names>B.</given-names></name>
<name><surname>Meng</surname> <given-names>B.</given-names></name>
<name><surname>Qin</surname> <given-names>Y.</given-names></name>
<name><surname>Sun</surname> <given-names>Y.</given-names></name>
<etal/>
</person-group>. (<year>2022</year>). 
<article-title>A non-destructive method for rapid acquisition of grassland aboveground biomass for satellite ground verification using UAV RGB images</article-title>. <source>Global Ecol. Conserv.</source> <volume>33</volume>, <elocation-id>e01999</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.gecco.2022.e01999</pub-id>
</mixed-citation>
</ref>
<ref id="B64">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhao</surname> <given-names>Y.</given-names></name>
<name><surname>Liu</surname> <given-names>Z.</given-names></name>
<name><surname>Wu</surname> <given-names>J.</given-names></name>
</person-group> (<year>2020</year>). 
<article-title>Grassland ecosystem services: a systematic review of research advances and future directions</article-title>. <source>Landscape Ecol.</source> <volume>35</volume>, <fpage>793</fpage>&#x2013;<lpage>814</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s10980-020-00980-3</pub-id>
</mixed-citation>
</ref>
<ref id="B65">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhou</surname> <given-names>J.</given-names></name>
<name><surname>Yungbluth</surname> <given-names>D.</given-names></name>
<name><surname>Vong</surname> <given-names>C. N.</given-names></name>
<name><surname>Scaboo</surname> <given-names>A.</given-names></name>
<name><surname>Zhou</surname> <given-names>J.</given-names></name>
</person-group> (<year>2019</year>). 
<article-title>Estimation of the maturity date of soybean breeding lines using UAV-based multispectral imagery</article-title>. <source>Remote Sens.</source> <volume>11</volume>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/rs11182075</pub-id>
</mixed-citation>
</ref>
<ref id="B66">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zhu</surname> <given-names>X.</given-names></name>
<name><surname>Bi</surname> <given-names>Y.</given-names></name>
<name><surname>Du</surname> <given-names>J.</given-names></name>
<name><surname>Gao</surname> <given-names>X.</given-names></name>
<name><surname>Zhang</surname> <given-names>T.</given-names></name>
<name><surname>Pi</surname> <given-names>W.</given-names></name>
<etal/>
</person-group>. (<year>2023</year>). 
<article-title>Research on deep learning method recognition and a classification model of grassland grass species based on unmanned aerial vehicle hyperspectral remote sensing</article-title>. <source>Grassland Sci.</source> <volume>69</volume>, <fpage>3</fpage>&#x2013;<lpage>11</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1111/grs.12379</pub-id>
</mixed-citation>
</ref>
<ref id="B67">
<mixed-citation publication-type="journal">
<person-group person-group-type="author">
<name><surname>Zwick</surname> <given-names>M.</given-names></name>
<name><surname>Cardoso</surname> <given-names>J. A.</given-names></name>
<name><surname>Guti&#xe9;rrez-Zapata</surname> <given-names>D. M.</given-names></name>
<name><surname>Cer&#xf3;n-Mu&#xf1;oz</surname> <given-names>M.</given-names></name>
<name><surname>Guti&#xe9;rrez</surname> <given-names>J. F.</given-names></name>
<name><surname>Raab</surname> <given-names>C.</given-names></name>
<etal/>
</person-group>. (<year>2024</year>). 
<article-title>Pixels to pasture: Using machine learning and multispectral remote sensing to predict biomass and nutrient quality in tropical grasslands</article-title>. <source>Remote Sens. Applications: Soc. Environ.</source> <volume>36</volume>, <elocation-id>101282</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.rsase.2024.101282</pub-id>
</mixed-citation>
</ref>
</ref-list>
<fn-group>
<fn id="n1" fn-type="custom" custom-type="edited-by">
<p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3063817">Yu Weiguo</ext-link>, Seoul National University, Republic of Korea</p></fn>
<fn id="n2" fn-type="custom" custom-type="reviewed-by">
<p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2975813">Dayang Liu</ext-link>, Northeast Forestry University, China</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3267128">Fa Zhao</ext-link>, Anhui Polytechnic University, China</p></fn>
</fn-group>
</back>
</article>